diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala
index d0b3c350f12bd871c0b6d26406d486fa6c8b4ccd..9fcdbb5b92e19df60af02600124d264abe2b897f 100644
--- a/project/SparkBuild.scala
+++ b/project/SparkBuild.scala
@@ -17,11 +17,11 @@ object SparkBuild extends Build {
   //val HADOOP_VERSION = "2.0.0-mr1-cdh4.1.1"
   //val HADOOP_MAJOR_VERSION = "2"
 
-  lazy val root = Project("root", file("."), settings = rootSettings) aggregate(core, /*repl,*/ examples, bagel)
+  lazy val root = Project("root", file("."), settings = rootSettings) aggregate(core, repl, examples, bagel)
 
   lazy val core = Project("core", file("core"), settings = coreSettings)
 
-//  lazy val repl = Project("repl", file("repl"), settings = replSettings) dependsOn (core)
+  lazy val repl = Project("repl", file("repl"), settings = replSettings) dependsOn (core)
 
   lazy val examples = Project("examples", file("examples"), settings = examplesSettings) dependsOn (core)
 
@@ -35,7 +35,7 @@ object SparkBuild extends Build {
     organization       := "org.spark-project",
     version            := "0.7.0-SNAPSHOT",
     scalaVersion       := "2.10.0",
-    scalacOptions      := Seq(/*"-deprecation",*/ "-unchecked", "-optimize"), // -deprecation is too noisy due to usage of old Hadoop API, enable it once that's no longer an issue
+    scalacOptions      := Seq("-unchecked", "-optimize"),
     unmanagedJars in Compile <<= baseDirectory map { base => (base / "lib" ** "*.jar").classpath },
     retrieveManaged := true,
     retrievePattern := "[type]s/[artifact](-[revision])(-[classifier]).[ext]",
@@ -136,7 +136,9 @@ object SparkBuild extends Build {
         "io.spray"           %% "spray-json"       % "1.2.3",
         "colt"                % "colt"             % "1.2.0",
         "org.apache.mesos"    % "mesos"            % "0.9.0-incubating",
-        "org.scala-lang"      % "scala-actors"     % "2.10.0"
+        "org.scala-lang"      % "scala-actors"     % "2.10.0",
+        "org.scala-lang"      % "jline"            % "2.10.0",
+        "org.scala-lang"      % "scala-reflect"    % "2.10.0"
       ) ++ (if (HADOOP_MAJOR_VERSION == "2")
         Some("org.apache.hadoop" % "hadoop-client" % HADOOP_VERSION) else None).toSeq,
     unmanagedSourceDirectories in Compile <+= baseDirectory{ _ / ("src/hadoop" + HADOOP_MAJOR_VERSION + "/scala") }
@@ -146,10 +148,11 @@ object SparkBuild extends Build {
     publish := {}
   )
 
-/*  def replSettings = sharedSettings ++ Seq(
+ def replSettings = sharedSettings ++ Seq(
     name := "spark-repl",
-    libraryDependencies <+= scalaVersion("org.scala-lang" % "scala-compiler" % _)
-  )*/
+    // libraryDependencies <+= scalaVersion("org.scala-lang" % "scala-compiler" % _)
+    libraryDependencies ++= Seq("org.scala-lang" % "scala-compiler" % "2.10.0")
+  )
 
   def examplesSettings = sharedSettings ++ Seq(
     name := "spark-examples"
diff --git a/repl/lib/scala-jline.jar b/repl/lib/scala-jline.jar
deleted file mode 100644
index 2f18c95cdd7aa00e103e80f2c589ecd715030c70..0000000000000000000000000000000000000000
Binary files a/repl/lib/scala-jline.jar and /dev/null differ
diff --git a/repl/src/main/scala/spark/repl/Main.scala b/repl/src/main/scala/spark/repl/Main.scala
index 58809ab646a96219b3c572ac8ba345afe481b76d..b0a78e0bb811e32f1950d19ea750b1a7d4865b5e 100644
--- a/repl/src/main/scala/spark/repl/Main.scala
+++ b/repl/src/main/scala/spark/repl/Main.scala
@@ -3,12 +3,12 @@ package spark.repl
 import scala.collection.mutable.Set
 
 object Main {
-  private var _interp: SparkILoop = null
-  
+  private var _interp: SparkILoop = _
+
   def interp = _interp
-  
+
   def interp_=(i: SparkILoop) { _interp = i }
-  
+
   def main(args: Array[String]) {
     _interp = new SparkILoop
     _interp.process(args)
diff --git a/repl/src/main/scala/spark/repl/SparkExprTyper.scala b/repl/src/main/scala/spark/repl/SparkExprTyper.scala
new file mode 100644
index 0000000000000000000000000000000000000000..c55797eb54ef036dcc2ec742c0eb7134135ec108
--- /dev/null
+++ b/repl/src/main/scala/spark/repl/SparkExprTyper.scala
@@ -0,0 +1,109 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author  Paul Phillips
+ */
+
+package spark.repl
+
+import scala.tools.nsc._
+import scala.tools.nsc.interpreter._
+
+import scala.reflect.internal.util.BatchSourceFile
+import scala.tools.nsc.ast.parser.Tokens.EOF
+
+import spark.Logging
+
+trait SparkExprTyper extends Logging {
+  val repl: SparkIMain
+
+  import repl._
+  import global.{ reporter => _, Import => _, _ }
+  import definitions._
+  import syntaxAnalyzer.{ UnitParser, UnitScanner, token2name }
+  import naming.freshInternalVarName
+
+  object codeParser extends { val global: repl.global.type = repl.global } with CodeHandlers[Tree] {
+    def applyRule[T](code: String, rule: UnitParser => T): T = {
+      reporter.reset()
+      val scanner = newUnitParser(code)
+      val result  = rule(scanner)
+
+      if (!reporter.hasErrors)
+        scanner.accept(EOF)
+
+      result
+    }
+
+    def defns(code: String) = stmts(code) collect { case x: DefTree => x }
+    def expr(code: String)  = applyRule(code, _.expr())
+    def stmts(code: String) = applyRule(code, _.templateStats())
+    def stmt(code: String)  = stmts(code).last  // guaranteed nonempty
+  }
+
+  /** Parse a line into a sequence of trees. Returns None if the input is incomplete. */
+  def parse(line: String): Option[List[Tree]] = debugging(s"""parse("$line")""")  {
+    var isIncomplete = false
+    reporter.withIncompleteHandler((_, _) => isIncomplete = true) {
+      val trees = codeParser.stmts(line)
+      if (reporter.hasErrors) Some(Nil)
+      else if (isIncomplete) None
+      else Some(trees)
+    }
+  }
+  // def parsesAsExpr(line: String) = {
+  //   import codeParser._
+  //   (opt expr line).isDefined
+  // }
+
+  def symbolOfLine(code: String): Symbol = {
+    def asExpr(): Symbol = {
+      val name  = freshInternalVarName()
+      // Typing it with a lazy val would give us the right type, but runs
+      // into compiler bugs with things like existentials, so we compile it
+      // behind a def and strip the NullaryMethodType which wraps the expr.
+      val line = "def " + name + " = {\n" + code + "\n}"
+
+      interpretSynthetic(line) match {
+        case IR.Success =>
+          val sym0 = symbolOfTerm(name)
+          // drop NullaryMethodType
+          val sym = sym0.cloneSymbol setInfo afterTyper(sym0.info.finalResultType)
+          if (sym.info.typeSymbol eq UnitClass) NoSymbol
+          else sym
+        case _          => NoSymbol
+      }
+    }
+    def asDefn(): Symbol = {
+      val old = repl.definedSymbolList.toSet
+
+      interpretSynthetic(code) match {
+        case IR.Success =>
+          repl.definedSymbolList filterNot old match {
+            case Nil        => NoSymbol
+            case sym :: Nil => sym
+            case syms       => NoSymbol.newOverloaded(NoPrefix, syms)
+          }
+        case _ => NoSymbol
+      }
+    }
+    beQuietDuring(asExpr()) orElse beQuietDuring(asDefn())
+  }
+
+  private var typeOfExpressionDepth = 0
+  def typeOfExpression(expr: String, silent: Boolean = true): Type = {
+    if (typeOfExpressionDepth > 2) {
+      logDebug("Terminating typeOfExpression recursion for expression: " + expr)
+      return NoType
+    }
+    typeOfExpressionDepth += 1
+    // Don't presently have a good way to suppress undesirable success output
+    // while letting errors through, so it is first trying it silently: if there
+    // is an error, and errors are desired, then it re-evaluates non-silently
+    // to induce the error message.
+    try beSilentDuring(symbolOfLine(expr).tpe) match {
+      case NoType if !silent => symbolOfLine(expr).tpe // generate error
+      case tpe               => tpe
+    }
+    finally typeOfExpressionDepth -= 1
+  }
+}
diff --git a/repl/src/main/scala/spark/repl/SparkILoop.scala b/repl/src/main/scala/spark/repl/SparkILoop.scala
index 22bcb4be8a8f8b37c9a9c2a73b6565eebc300ce9..5a1e54c929b0722df1782c984859dd22d53e5fee 100644
--- a/repl/src/main/scala/spark/repl/SparkILoop.scala
+++ b/repl/src/main/scala/spark/repl/SparkILoop.scala
@@ -1,26 +1,38 @@
 /* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
  * @author Alexander Spoon
  */
 
 package spark.repl
 
+
 import scala.tools.nsc._
 import scala.tools.nsc.interpreter._
 
+import scala.tools.nsc.interpreter.{ Results => IR }
 import Predef.{ println => _, _ }
-import java.io.{ BufferedReader, FileReader, PrintWriter }
+import java.io.{ BufferedReader, FileReader }
+import java.util.concurrent.locks.ReentrantLock
 import scala.sys.process.Process
-import session._
-import scala.tools.nsc.interpreter.{ Results => IR }
-import scala.tools.util.{ SignalManager, Signallable, Javap }
+import scala.tools.nsc.interpreter.session._
+import scala.util.Properties.{ jdkHome, javaVersion }
+import scala.tools.util.{ Javap }
 import scala.annotation.tailrec
-import scala.util.control.Exception.{ ignoring }
 import scala.collection.mutable.ListBuffer
 import scala.concurrent.ops
-import util.{ ClassPath, Exceptional, stringFromWriter, stringFromStream }
-import interpreter._
-import io.{ File, Sources }
+import scala.tools.nsc.util.{ ClassPath, Exceptional, stringFromWriter, stringFromStream }
+import scala.tools.nsc.interpreter._
+import scala.tools.nsc.io.{ File, Directory }
+import scala.reflect.NameTransformer._
+import scala.tools.nsc.util.ScalaClassLoader
+import scala.tools.nsc.util.ScalaClassLoader._
+import scala.tools.util._
+import scala.language.{implicitConversions, existentials}
+import scala.reflect.{ClassTag, classTag}
+import scala.tools.reflect.StdRuntimeTags._
+import scala.reflect.{ClassTag, classTag}
+import java.lang.{Class => jClass}
+import scala.reflect.api.{Mirror, TypeCreator, Universe => ApiUniverse}
 
 import spark.Logging
 import spark.SparkContext
@@ -37,45 +49,86 @@ import spark.SparkContext
  *  @author  Lex Spoon
  *  @version 1.2
  */
-class SparkILoop(in0: Option[BufferedReader], val out: PrintWriter, val master: Option[String])
+class SparkILoop(in0: Option[BufferedReader], protected val out: JPrintWriter,
+               val master: Option[String])
                 extends AnyRef
                    with LoopCommands
+                   with SparkILoopInit
                    with Logging
 {
-  def this(in0: BufferedReader, out: PrintWriter, master: String) = this(Some(in0), out, Some(master))
-  def this(in0: BufferedReader, out: PrintWriter) = this(Some(in0), out, None)
-  def this() = this(None, new PrintWriter(Console.out, true), None)
-  
+  def this(in0: BufferedReader, out: JPrintWriter, master: String) = this(Some(in0), out, Some(master))
+  def this(in0: BufferedReader, out: JPrintWriter) = this(Some(in0), out, None)
+  def this() = this(None, new JPrintWriter(Console.out, true), None)
+
   var in: InteractiveReader = _   // the input stream from which commands come
   var settings: Settings = _
   var intp: SparkIMain = _
 
-  /*
-  lazy val power = {
-    val g = intp.global
-    Power[g.type](this, g)
+  @deprecated("Use `intp` instead.", "2.9.0") def interpreter = intp
+  @deprecated("Use `intp` instead.", "2.9.0") def interpreter_= (i: SparkIMain): Unit = intp = i
+
+  /** Having inherited the difficult "var-ness" of the repl instance,
+   *  I'm trying to work around it by moving operations into a class from
+   *  which it will appear a stable prefix.
+   */
+  private def onIntp[T](f: SparkIMain => T): T = f(intp)
+
+  class IMainOps[T <: SparkIMain](val intp: T) {
+    import intp._
+    import global._
+
+    def printAfterTyper(msg: => String) =
+      intp.reporter printMessage afterTyper(msg)
+
+    /** Strip NullaryMethodType artifacts. */
+    private def replInfo(sym: Symbol) = {
+      sym.info match {
+        case NullaryMethodType(restpe) if sym.isAccessor  => restpe
+        case info                                         => info
+      }
+    }
+    def echoTypeStructure(sym: Symbol) =
+      printAfterTyper("" + deconstruct.show(replInfo(sym)))
+
+    def echoTypeSignature(sym: Symbol, verbose: Boolean) = {
+      if (verbose) SparkILoop.this.echo("// Type signature")
+      printAfterTyper("" + replInfo(sym))
+
+      if (verbose) {
+        SparkILoop.this.echo("\n// Internal Type structure")
+        echoTypeStructure(sym)
+      }
+    }
   }
-  */
-  
-  // TODO
-  // object opt extends AestheticSettings
-  // 
-  @deprecated("Use `intp` instead.", "2.9.0")
-  def interpreter = intp
-  
-  @deprecated("Use `intp` instead.", "2.9.0")
-  def interpreter_= (i: SparkIMain): Unit = intp = i
-  
+  implicit def stabilizeIMain(intp: SparkIMain) = new IMainOps[intp.type](intp)
+
+  /** TODO -
+   *  -n normalize
+   *  -l label with case class parameter names
+   *  -c complete - leave nothing out
+   */
+  private def typeCommandInternal(expr: String, verbose: Boolean): Result = {
+    onIntp { intp =>
+      val sym = intp.symbolOfLine(expr)
+      if (sym.exists) intp.echoTypeSignature(sym, verbose)
+      else ""
+    }
+  }
+
+  var sparkContext: SparkContext = _
+
+  override def echoCommandMessage(msg: String) {
+    intp.reporter printMessage msg
+  }
+
+  // def isAsync = !settings.Yreplsync.value
+  def isAsync = false
+  // lazy val power = new Power(intp, new StdReplVals(this))(tagOfStdReplVals, classTag[StdReplVals])
   def history = in.history
 
   /** The context class loader at the time this object was created */
   protected val originalClassLoader = Thread.currentThread.getContextClassLoader
 
-  // Install a signal handler so we can be prodded.
-  private val signallable =
-    /*if (isReplDebug) Signallable("Dump repl state.")(dumpCommand())
-    else*/ null
-    
   // classpath entries added via :cp
   var addedClasspath: String = ""
 
@@ -87,74 +140,49 @@ class SparkILoop(in0: Option[BufferedReader], val out: PrintWriter, val master:
 
   /** Record a command for replay should the user request a :replay */
   def addReplay(cmd: String) = replayCommandStack ::= cmd
+
+  def savingReplayStack[T](body: => T): T = {
+    val saved = replayCommandStack
+    try body
+    finally replayCommandStack = saved
+  }
+  def savingReader[T](body: => T): T = {
+    val saved = in
+    try body
+    finally in = saved
+  }
+
   
-  /** Try to install sigint handler: ignore failure.  Signal handler
-   *  will interrupt current line execution if any is in progress.
-   * 
-   *  Attempting to protect the repl from accidental exit, we only honor
-   *  a single ctrl-C if the current buffer is empty: otherwise we look
-   *  for a second one within a short time.
-   */
-  private def installSigIntHandler() {
-    def onExit() {
-      Console.println("") // avoiding "shell prompt in middle of line" syndrome
-      sys.exit(1)
-    }
-    ignoring(classOf[Exception]) {
-      SignalManager("INT") = {
-        if (intp == null)
-          onExit()
-        else if (intp.lineManager.running)
-          intp.lineManager.cancel()
-        else if (in.currentLine != "") {
-          // non-empty buffer, so make them hit ctrl-C a second time
-          SignalManager("INT") = onExit()
-          io.timer(5)(installSigIntHandler())  // and restore original handler if they don't
-        }
-        else onExit()
-      }
+  def sparkCleanUp(){
+    echo("Stopping spark context.")
+    intp.beQuietDuring {
+      command("sc.stop()")
     }
   }
-
   /** Close the interpreter and set the var to null. */
   def closeInterpreter() {
     if (intp ne null) {
-      intp.close
+      sparkCleanUp()
+      intp.close()
       intp = null
-      Thread.currentThread.setContextClassLoader(originalClassLoader)
     }
   }
-  
+
   class SparkILoopInterpreter extends SparkIMain(settings, out) {
+    outer =>
+
     override lazy val formatting = new Formatting {
       def prompt = SparkILoop.this.prompt
     }
-    override protected def createLineManager() = new Line.Manager {
-      override def onRunaway(line: Line[_]): Unit = {
-        val template = """
-          |// She's gone rogue, captain! Have to take her out!
-          |// Calling Thread.stop on runaway %s with offending code:
-          |// scala> %s""".stripMargin
-        
-        echo(template.format(line.thread, line.code))
-        // XXX no way to suppress the deprecation warning
-        line.thread.stop()
-        in.redrawLine()
-      }
-    }
-    override protected def parentClassLoader = {
-      SparkHelper.explicitParentLoader(settings).getOrElse( classOf[SparkILoop].getClassLoader )
-    }
+    override protected def parentClassLoader =  SparkHelper.explicitParentLoader(settings).getOrElse(classOf[SparkILoop].getClassLoader)
   }
 
   /** Create a new interpreter. */
   def createInterpreter() {
     if (addedClasspath != "")
       settings.classpath append addedClasspath
-      
+
     intp = new SparkILoopInterpreter
-    intp.setContextClassLoader()
-    installSigIntHandler()
   }
 
   /** print a friendly help message */
@@ -168,10 +196,10 @@ class SparkILoop(in0: Option[BufferedReader], val out: PrintWriter, val master:
   private def helpSummary() = {
     val usageWidth  = commands map (_.usageMsg.length) max
     val formatStr   = "%-" + usageWidth + "s %s %s"
-    
+
     echo("All commands can be abbreviated, e.g. :he instead of :help.")
     echo("Those marked with a * have more detailed help, e.g. :help imports.\n")
-    
+
     commands foreach { cmd =>
       val star = if (cmd.hasLongHelp) "*" else " "
       echo(formatStr.format(cmd.usageMsg, star, cmd.help))
@@ -182,7 +210,7 @@ class SparkILoop(in0: Option[BufferedReader], val out: PrintWriter, val master:
       case Nil  => echo(cmd + ": no such command.  Type :help for help.")
       case xs   => echo(cmd + " is ambiguous: did you mean " + xs.map(":" + _.name).mkString(" or ") + "?")
     }
-    Result(true, None)    
+    Result(true, None)
   }
   private def matchingCommands(cmd: String) = commands filter (_.name startsWith cmd)
   private def uniqueCommand(cmd: String): Option[LoopCommand] = {
@@ -193,31 +221,16 @@ class SparkILoop(in0: Option[BufferedReader], val out: PrintWriter, val master:
       case xs       => xs find (_.name == cmd)
     }
   }
-  
-  /** Print a welcome message */
-  def printWelcome() {
-    echo("""Welcome to
-      ____              __  
-     / __/__  ___ _____/ /__
-    _\ \/ _ \/ _ `/ __/  '_/
-   /___/ .__/\_,_/_/ /_/\_\   version 0.7.0
-      /_/                  
-""")
-    import Properties._
-    val welcomeMsg = "Using Scala %s (%s, Java %s)".format(
-      versionString, javaVmName, javaVersion) 
-    echo(welcomeMsg)
-  }
-  
+
   /** Show the history */
   lazy val historyCommand = new LoopCommand("history", "show the history (optional num is commands to show)") {
     override def usage = "[num]"
     def defaultLines = 20
-    
+
     def apply(line: String): Result = {
       if (history eq NoHistory)
         return "No history available."
-      
+
       val xs      = words(line)
       val current = history.index
       val count   = try xs.head.toInt catch { case _: Exception => defaultLines }
@@ -229,32 +242,38 @@ class SparkILoop(in0: Option[BufferedReader], val out: PrintWriter, val master:
     }
   }
 
-  private def echo(msg: String) = {
+  // When you know you are most likely breaking into the middle
+  // of a line being typed.  This softens the blow.
+  protected def echoAndRefresh(msg: String) = {
+    echo("\n" + msg)
+    in.redrawLine()
+  }
+  protected def echo(msg: String) = {
     out println msg
     out.flush()
   }
-  private def echoNoNL(msg: String) = {
+  protected def echoNoNL(msg: String) = {
     out print msg
     out.flush()
   }
-  
+
   /** Search the history */
   def searchHistory(_cmdline: String) {
     val cmdline = _cmdline.toLowerCase
     val offset  = history.index - history.size + 1
-    
+
     for ((line, index) <- history.asStrings.zipWithIndex ; if line.toLowerCase contains cmdline)
       echo("%d %s".format(index + offset, line))
   }
-  
+
   private var currentPrompt = Properties.shellPromptString
   def setPrompt(prompt: String) = currentPrompt = prompt
   /** Prompt to print when awaiting input */
   def prompt = currentPrompt
-  
+
   import LoopCommand.{ cmd, nullary }
 
-  /** Standard commands **/
+  /** Standard commands */
   lazy val standardCommands = List(
     cmd("cp", "<path>", "add a jar or directory to the classpath", addClasspath),
     cmd("help", "[command]", "print this summary or command-specific help", helpCommand),
@@ -263,53 +282,30 @@ class SparkILoop(in0: Option[BufferedReader], val out: PrintWriter, val master:
     cmd("imports", "[name name ...]", "show import history, identifying sources of names", importsCommand),
     cmd("implicits", "[-v]", "show the implicits in scope", implicitsCommand),
     cmd("javap", "<path|class>", "disassemble a file or class name", javapCommand),
-    nullary("keybindings", "show how ctrl-[A-Z] and other keys are bound", keybindingsCommand),
     cmd("load", "<path>", "load and interpret a Scala file", loadCommand),
     nullary("paste", "enter paste mode: all input up to ctrl-D compiled together", pasteCommand),
-    //nullary("power", "enable power user mode", powerCmd),
-    nullary("quit", "exit the interpreter", () => Result(false, None)),
+//    nullary("power", "enable power user mode", powerCmd),
+    nullary("quit", "exit the repl", () => Result(false, None)),
     nullary("replay", "reset execution and replay all previous commands", replay),
+    nullary("reset", "reset the repl to its initial state, forgetting all session entries", resetCommand),
     shCommand,
     nullary("silent", "disable/enable automatic printing of results", verbosity),
-    cmd("type", "<expr>", "display the type of an expression without evaluating it", typeCommand)
+    cmd("type", "[-v] <expr>", "display the type of an expression without evaluating it", typeCommand),
+    nullary("warnings", "show the suppressed warnings from the most recent line which had any", warningsCommand)
   )
-  
+
   /** Power user commands */
   lazy val powerCommands: List[LoopCommand] = List(
-    //nullary("dump", "displays a view of the interpreter's internal state", dumpCommand),
-    //cmd("phase", "<phase>", "set the implicit phase for power commands", phaseCommand),
-    cmd("wrap", "<method>", "name of method to wrap around each repl line", wrapCommand) withLongHelp ("""
-      |:wrap
-      |:wrap clear
-      |:wrap <method>
-      |
-      |Installs a wrapper around each line entered into the repl.
-      |Currently it must be the simple name of an existing method
-      |with the specific signature shown in the following example.
-      |
-      |def timed[T](body: => T): T = {
-      |  val start = System.nanoTime
-      |  try body
-      |  finally println((System.nanoTime - start) + " nanos elapsed.")
-      |}
-      |:wrap timed
-      |
-      |If given no argument, :wrap names the wrapper installed.
-      |An argument of clear will remove the wrapper if any is active.
-      |Note that wrappers do not compose (a new one replaces the old
-      |one) and also that the :phase command uses the same machinery,
-      |so setting :wrap will clear any :phase setting.       
-    """.stripMargin.trim)
+    // cmd("phase", "<phase>", "set the implicit phase for power commands", phaseCommand)
   )
-  
-  /*
-  private def dumpCommand(): Result = {
-    echo("" + power)
-    history.asStrings takeRight 30 foreach echo
-    in.redrawLine()
-  }
-  */
-  
+
+  // private def dumpCommand(): Result = {
+  //   echo("" + power)
+  //   history.asStrings takeRight 30 foreach echo
+  //   in.redrawLine()
+  // }
+  // private def valsCommand(): Result = power.valsDescription
+
   private val typeTransforms = List(
     "scala.collection.immutable." -> "immutable.",
     "scala.collection.mutable."   -> "mutable.",
@@ -317,7 +313,7 @@ class SparkILoop(in0: Option[BufferedReader], val out: PrintWriter, val master:
     "java.lang."                  -> "jl.",
     "scala.runtime."              -> "runtime."
   )
-  
+
   private def importsCommand(line: String): Result = {
     val tokens    = words(line)
     val handlers  = intp.languageWildcardHandlers ++ intp.importHandlers
@@ -333,7 +329,7 @@ class SparkILoop(in0: Option[BufferedReader], val out: PrintWriter, val master:
         val implicitMsg    = if (imps.isEmpty) "" else imps.size + " are implicit"
         val foundMsg       = if (found.isEmpty) "" else found.mkString(" // imports: ", ", ", "")
         val statsMsg       = List(typeMsg, termMsg, implicitMsg) filterNot (_ == "") mkString ("(", ", ", ")")
-        
+
         intp.reporter.printMessage("%2d) %-30s %s%s".format(
           idx + 1,
           handler.importString,
@@ -342,12 +338,11 @@ class SparkILoop(in0: Option[BufferedReader], val out: PrintWriter, val master:
         ))
     }
   }
-  
-  private def implicitsCommand(line: String): Result = {
-    val intp = SparkILoop.this.intp
+
+  private def implicitsCommand(line: String): Result = onIntp { intp =>
     import intp._
-    import global.Symbol
-    
+    import global._
+
     def p(x: Any) = intp.reporter.printMessage("" + x)
 
     // If an argument is given, only show a source with that
@@ -360,17 +355,17 @@ class SparkILoop(in0: Option[BufferedReader], val out: PrintWriter, val master:
           else (args exists (source.name.toString contains _))
         }
     }
-    
+
     if (filtered.isEmpty)
       return "No implicits have been imported other than those in Predef."
-      
+
     filtered foreach {
       case (source, syms) =>
         p("/* " + syms.size + " implicit members imported from " + source.fullName + " */")
-        
+
         // This groups the members by where the symbol is defined
         val byOwner = syms groupBy (_.owner)
-        val sortedOwners = byOwner.toList sortBy { case (owner, _) => intp.afterTyper(source.info.baseClasses indexOf owner) }
+        val sortedOwners = byOwner.toList sortBy { case (owner, _) => afterTyper(source.info.baseClasses indexOf owner) }
 
         sortedOwners foreach {
           case (owner, members) =>
@@ -388,10 +383,10 @@ class SparkILoop(in0: Option[BufferedReader], val out: PrintWriter, val master:
 
               xss map (xs => xs sortBy (_.name.toString))
             }
-          
-            val ownerMessage = if (owner == source) " defined in " else " inherited from "            
+
+            val ownerMessage = if (owner == source) " defined in " else " inherited from "
             p("  /* " + members.size + ownerMessage + owner.fullName + " */")
-            
+
             memberGroups foreach { group =>
               group foreach (s => p("  " + intp.symbolDefString(s)))
               p("")
@@ -400,158 +395,182 @@ class SparkILoop(in0: Option[BufferedReader], val out: PrintWriter, val master:
         p("")
     }
   }
-  
-  protected def newJavap() = new Javap(intp.classLoader, new SparkIMain.ReplStrippingWriter(intp)) {
-    override def tryClass(path: String): Array[Byte] = {
-      // Look for Foo first, then Foo$, but if Foo$ is given explicitly,
-      // we have to drop the $ to find object Foo, then tack it back onto
-      // the end of the flattened name.
-      def className  = intp flatName path
-      def moduleName = (intp flatName path.stripSuffix("$")) + "$"
 
-      val bytes = super.tryClass(className)
-      if (bytes.nonEmpty) bytes
-      else super.tryClass(moduleName)
+  private def findToolsJar() = {
+    val jdkPath = Directory(jdkHome)
+    val jar     = jdkPath / "lib" / "tools.jar" toFile;
+
+    if (jar isFile)
+      Some(jar)
+    else if (jdkPath.isDirectory)
+      jdkPath.deepFiles find (_.name == "tools.jar")
+    else None
+  }
+  private def addToolsJarToLoader() = {
+    val cl = findToolsJar match {
+      case Some(tools) => ScalaClassLoader.fromURLs(Seq(tools.toURL), intp.classLoader)
+      case _           => intp.classLoader
+    }
+    if (Javap.isAvailable(cl)) {
+      logDebug(":javap available.")
+      cl
+    }
+    else {
+      logDebug(":javap unavailable: no tools.jar at " + jdkHome)
+      intp.classLoader
+    }
+  }
+
+  protected def newJavap() = new JavapClass(addToolsJarToLoader(), new SparkIMain.ReplStrippingWriter(intp)) {
+    override def tryClass(path: String): Array[Byte] = {
+      val hd :: rest = path split '.' toList;
+      // If there are dots in the name, the first segment is the
+      // key to finding it.
+      if (rest.nonEmpty) {
+        intp optFlatName hd match {
+          case Some(flat) =>
+            val clazz = flat :: rest mkString NAME_JOIN_STRING
+            val bytes = super.tryClass(clazz)
+            if (bytes.nonEmpty) bytes
+            else super.tryClass(clazz + MODULE_SUFFIX_STRING)
+          case _          => super.tryClass(path)
+        }
+      }
+      else {
+        // Look for Foo first, then Foo$, but if Foo$ is given explicitly,
+        // we have to drop the $ to find object Foo, then tack it back onto
+        // the end of the flattened name.
+        def className  = intp flatName path
+        def moduleName = (intp flatName path.stripSuffix(MODULE_SUFFIX_STRING)) + MODULE_SUFFIX_STRING
+
+        val bytes = super.tryClass(className)
+        if (bytes.nonEmpty) bytes
+        else super.tryClass(moduleName)
+      }
     }
   }
+  // private lazy val javap = substituteAndLog[Javap]("javap", NoJavap)(newJavap())
   private lazy val javap =
     try newJavap()
     catch { case _: Exception => null }
-  
-  private def typeCommand(line: String): Result = {
-    intp.typeOfExpression(line) match {
-      case Some(tp) => tp.toString
-      case _        => "Failed to determine type."
+
+  // Still todo: modules.
+  private def typeCommand(line0: String): Result = {
+    line0.trim match {
+      case ""                      => ":type [-v] <expression>"
+      case s if s startsWith "-v " => typeCommandInternal(s stripPrefix "-v " trim, true)
+      case s                       => typeCommandInternal(s, false)
     }
   }
-  
+
+  private def warningsCommand(): Result = {
+    if (intp.lastWarnings.isEmpty)
+      "Can't find any cached warnings."
+    else
+      intp.lastWarnings foreach { case (pos, msg) => intp.reporter.warning(pos, msg) }
+  }
+
   private def javapCommand(line: String): Result = {
     if (javap == null)
-      return ":javap unavailable on this platform."
-    if (line == "")
-      return ":javap [-lcsvp] [path1 path2 ...]"
-    
-    javap(words(line)) foreach { res =>
-      if (res.isError) return "Failed: " + res.value
-      else res.show()
-    }
-  }
-  private def keybindingsCommand(): Result = {
-    if (in.keyBindings.isEmpty) "Key bindings unavailable."
-    else {
-      echo("Reading jline properties for default key bindings.")
-      echo("Accuracy not guaranteed: treat this as a guideline only.\n")
-      in.keyBindings foreach (x => echo ("" + x))
-    }
+      ":javap unavailable, no tools.jar at %s.  Set JDK_HOME.".format(jdkHome)
+    else if (javaVersion startsWith "1.7")
+      ":javap not yet working with java 1.7"
+    else if (line == "")
+      ":javap [-lcsvp] [path1 path2 ...]"
+    else
+      javap(words(line)) foreach { res =>
+        if (res.isError) return "Failed: " + res.value
+        else res.show()
+      }
   }
+
   private def wrapCommand(line: String): Result = {
     def failMsg = "Argument to :wrap must be the name of a method with signature [T](=> T): T"
-    val intp = SparkILoop.this.intp
-    val g: intp.global.type = intp.global
-    import g._
-
-    words(line) match {
-      case Nil            =>
-        intp.executionWrapper match {
-          case ""   => "No execution wrapper is set."
-          case s    => "Current execution wrapper: " + s
-        }
-      case "clear" :: Nil =>
-        intp.executionWrapper match {
-          case ""   => "No execution wrapper is set."
-          case s    => intp.clearExecutionWrapper() ; "Cleared execution wrapper."
-        }
-      case wrapper :: Nil =>
-        intp.typeOfExpression(wrapper) match {
-          case Some(PolyType(List(targ), MethodType(List(arg), restpe))) =>
-            intp setExecutionWrapper intp.pathToTerm(wrapper)
-            "Set wrapper to '" + wrapper + "'"
-          case Some(x) =>
-            failMsg + "\nFound: " + x
-          case _ =>
-            failMsg + "\nFound: <unknown>"
-        }
-      case _ => failMsg
+    onIntp { intp =>
+      import intp._
+      import global._
+
+      words(line) match {
+        case Nil            =>
+          intp.executionWrapper match {
+            case ""   => "No execution wrapper is set."
+            case s    => "Current execution wrapper: " + s
+          }
+        case "clear" :: Nil =>
+          intp.executionWrapper match {
+            case ""   => "No execution wrapper is set."
+            case s    => intp.clearExecutionWrapper() ; "Cleared execution wrapper."
+          }
+        case wrapper :: Nil =>
+          intp.typeOfExpression(wrapper) match {
+            case PolyType(List(targ), MethodType(List(arg), restpe)) =>
+              intp setExecutionWrapper intp.pathToTerm(wrapper)
+              "Set wrapper to '" + wrapper + "'"
+            case tp =>
+              failMsg + "\nFound: <unknown>"
+          }
+        case _ => failMsg
+      }
     }
   }
 
   private def pathToPhaseWrapper = intp.pathToTerm("$r") + ".phased.atCurrent"
-  /*
-  private def phaseCommand(name: String): Result = {
-    // This line crashes us in TreeGen:
-    //
-    //   if (intp.power.phased set name) "..."
-    //
-    // Exception in thread "main" java.lang.AssertionError: assertion failed: ._7.type
-    //  at scala.Predef$.assert(Predef.scala:99)
-    //  at scala.tools.nsc.ast.TreeGen.mkAttributedQualifier(TreeGen.scala:69)
-    //  at scala.tools.nsc.ast.TreeGen.mkAttributedQualifier(TreeGen.scala:44)
-    //  at scala.tools.nsc.ast.TreeGen.mkAttributedRef(TreeGen.scala:101)
-    //  at scala.tools.nsc.ast.TreeGen.mkAttributedStableRef(TreeGen.scala:143)
-    //
-    // But it works like so, type annotated.
-    val phased: Phased = power.phased
-    import phased.NoPhaseName
-
-    if (name == "clear") {
-      phased.set(NoPhaseName)
-      intp.clearExecutionWrapper()
-      "Cleared active phase."
-    }
-    else if (name == "") phased.get match {
-      case NoPhaseName => "Usage: :phase <expr> (e.g. typer, erasure.next, erasure+3)"
-      case ph          => "Active phase is '%s'.  (To clear, :phase clear)".format(phased.get)
-    }
-    else {
-      val what = phased.parse(name)
-      if (what.isEmpty || !phased.set(what)) 
-        "'" + name + "' does not appear to represent a valid phase."
-      else {
-        intp.setExecutionWrapper(pathToPhaseWrapper)
-        val activeMessage =
-          if (what.toString.length == name.length) "" + what
-          else "%s (%s)".format(what, name)
-        
-        "Active phase is now: " + activeMessage
-      }
-    }
-  }
-  */
-  
+  // private def phaseCommand(name: String): Result = {
+  //   val phased: Phased = power.phased
+  //   import phased.NoPhaseName
+
+  //   if (name == "clear") {
+  //     phased.set(NoPhaseName)
+  //     intp.clearExecutionWrapper()
+  //     "Cleared active phase."
+  //   }
+  //   else if (name == "") phased.get match {
+  //     case NoPhaseName => "Usage: :phase <expr> (e.g. typer, erasure.next, erasure+3)"
+  //     case ph          => "Active phase is '%s'.  (To clear, :phase clear)".format(phased.get)
+  //   }
+  //   else {
+  //     val what = phased.parse(name)
+  //     if (what.isEmpty || !phased.set(what))
+  //       "'" + name + "' does not appear to represent a valid phase."
+  //     else {
+  //       intp.setExecutionWrapper(pathToPhaseWrapper)
+  //       val activeMessage =
+  //         if (what.toString.length == name.length) "" + what
+  //         else "%s (%s)".format(what, name)
+
+  //       "Active phase is now: " + activeMessage
+  //     }
+  //   }
+  // }
+
   /** Available commands */
-  def commands: List[LoopCommand] = standardCommands /* ++ (
+  def commands: List[LoopCommand] = standardCommands /*++ (
     if (isReplPower) powerCommands else Nil
   )*/
-  
+
   val replayQuestionMessage =
-    """|The repl compiler has crashed spectacularly. Shall I replay your
-       |session? I can re-run all lines except the last one.
+    """|That entry seems to have slain the compiler.  Shall I replay
+       |your session? I can re-run each line except the last one.
        |[y/n]
     """.trim.stripMargin
 
-  private val crashRecovery: PartialFunction[Throwable, Unit] = {
+  private val crashRecovery: PartialFunction[Throwable, Boolean] = {
     case ex: Throwable =>
-      if (settings.YrichExes.value) {
-        val sources = implicitly[Sources]
-        echo("\n" + ex.getMessage)
-        echo(
-          if (isReplDebug) "[searching " + sources.path + " for exception contexts...]"
-          else "[searching for exception contexts...]"
-        )
-        echo(Exceptional(ex).force().context())
-      }
-      else {
-        echo(util.stackTraceString(ex))
-      }
+      echo(intp.global.throwableAsString(ex))
+
       ex match {
         case _: NoSuchMethodError | _: NoClassDefFoundError =>
-          echo("Unrecoverable error.")
+          echo("\nUnrecoverable error.")
           throw ex
         case _  =>
-          def fn(): Boolean = in.readYesOrNo(replayQuestionMessage, { echo("\nYou must enter y or n.") ; fn() })
+          def fn(): Boolean =
+            try in.readYesOrNo(replayQuestionMessage, { echo("\nYou must enter y or n.") ; fn() })
+            catch { case _: RuntimeException => false }
+
           if (fn()) replay()
           else echo("\nAbandoning crashed session.")
       }
+      true
   }
 
   /** The main read-eval-print loop for the repl.  It calls
@@ -564,66 +583,88 @@ class SparkILoop(in0: Option[BufferedReader], val out: PrintWriter, val master:
       in readLine prompt
     }
     // return false if repl should exit
-    def processLine(line: String): Boolean =
+    def processLine(line: String): Boolean = {
+      if (isAsync) {
+        if (!awaitInitialized()) return false
+        runThunks()
+      }
       if (line eq null) false               // assume null means EOF
       else command(line) match {
         case Result(false, _)           => false
         case Result(_, Some(finalLine)) => addReplay(finalLine) ; true
         case _                          => true
       }
-
-    while (true) {
-      try if (!processLine(readOneLine)) return
-      catch crashRecovery
     }
+    def innerLoop() {
+      if ( try processLine(readOneLine()) catch crashRecovery )
+        innerLoop()
+    }
+    innerLoop()
   }
 
   /** interpret all lines from a specified file */
-  def interpretAllFrom(file: File) {    
-    val oldIn = in
-    val oldReplay = replayCommandStack
-    
-    try file applyReader { reader =>
-      in = SimpleReader(reader, out, false)
-      echo("Loading " + file + "...")
-      loop()
-    }
-    finally {
-      in = oldIn
-      replayCommandStack = oldReplay
+  def interpretAllFrom(file: File) {
+    savingReader {
+      savingReplayStack {
+        file applyReader { reader =>
+          in = SimpleReader(reader, out, false)
+          echo("Loading " + file + "...")
+          loop()
+        }
+      }
     }
   }
 
-  /** create a new interpreter and replay all commands so far */
+  /** create a new interpreter and replay the given commands */
   def replay() {
-    closeInterpreter()
-    createInterpreter()
-    for (cmd <- replayCommands) {
+    reset()
+    if (replayCommandStack.isEmpty)
+      echo("Nothing to replay.")
+    else for (cmd <- replayCommands) {
       echo("Replaying: " + cmd)  // flush because maybe cmd will have its own output
       command(cmd)
       echo("")
     }
   }
-  
+  def resetCommand() {
+    echo("Resetting repl state.")
+    if (replayCommandStack.nonEmpty) {
+      echo("Forgetting this session history:\n")
+      replayCommands foreach echo
+      echo("")
+      replayCommandStack = Nil
+    }
+    if (intp.namedDefinedTerms.nonEmpty)
+      echo("Forgetting all expression results and named terms: " + intp.namedDefinedTerms.mkString(", "))
+    if (intp.definedTypes.nonEmpty)
+      echo("Forgetting defined types: " + intp.definedTypes.mkString(", "))
+
+    reset()
+  }
+  def reset() {
+    intp.reset()
+    // unleashAndSetPhase()
+  }
+
   /** fork a shell and run a command */
   lazy val shCommand = new LoopCommand("sh", "run a shell command (result is implicitly => List[String])") {
     override def usage = "<command line>"
     def apply(line: String): Result = line match {
       case ""   => showUsage()
-      case _    => 
+      case _    =>
         val toRun = classOf[ProcessResult].getName + "(" + string2codeQuoted(line) + ")"
         intp interpret toRun
         ()
     }
   }
-  
+
   def withFile(filename: String)(action: File => Unit) {
     val f = File(filename)
-    
+
     if (f.exists) action(f)
     else echo("That file does not exist")
   }
-  
+
   def loadCommand(arg: String) = {
     var shouldReplay: Option[String] = None
     withFile(arg)(f => {
@@ -643,23 +684,36 @@ class SparkILoop(in0: Option[BufferedReader], val out: PrintWriter, val master:
     }
     else echo("The path '" + f + "' doesn't seem to exist.")
   }
-  
+
   def powerCmd(): Result = {
     if (isReplPower) "Already in power mode."
-    else enablePowerMode()
+    else enablePowerMode(false)
   }
-  def enablePowerMode() = {
-    //replProps.power setValue true
-    //power.unleash()
-    //echo(power.banner)
+
+  def enablePowerMode(isDuringInit: Boolean) = {
+    // replProps.power setValue true
+    // unleashAndSetPhase()
+    // asyncEcho(isDuringInit, power.banner)
   }
-  
+  // private def unleashAndSetPhase() {
+//     if (isReplPower) {
+// //      power.unleash()
+//       // Set the phase to "typer"
+//       intp beSilentDuring phaseCommand("typer")
+//     }
+//   }
+
+  def asyncEcho(async: Boolean, msg: => String) {
+    if (async) asyncMessage(msg)
+    else echo(msg)
+  }
+
   def verbosity() = {
-    val old = intp.printResults
-    intp.printResults = !old
-    echo("Switched " + (if (old) "off" else "on") + " result printing.")
+    // val old = intp.printResults
+    // intp.printResults = !old
+    // echo("Switched " + (if (old) "off" else "on") + " result printing.")
   }
-  
+
   /** Run one command submitted by the user.  Two values are returned:
     * (1) whether to keep running, (2) the line to record for replay,
     * if any. */
@@ -674,11 +728,11 @@ class SparkILoop(in0: Option[BufferedReader], val out: PrintWriter, val master:
     else if (intp.global == null) Result(false, None)  // Notice failure to create compiler
     else Result(true, interpretStartingWith(line))
   }
-  
+
   private def readWhile(cond: String => Boolean) = {
     Iterator continually in.readLine("") takeWhile (x => x != null && cond(x))
   }
-  
+
   def pasteCommand(): Result = {
     echo("// Entering paste mode (ctrl-D to finish)\n")
     val code = readWhile(_ => true) mkString "\n"
@@ -686,23 +740,19 @@ class SparkILoop(in0: Option[BufferedReader], val out: PrintWriter, val master:
     intp interpret code
     ()
   }
-    
+
   private object paste extends Pasted {
     val ContinueString = "     | "
     val PromptString   = "scala> "
-    
+
     def interpret(line: String): Unit = {
       echo(line.trim)
       intp interpret line
       echo("")
     }
-    
+
     def transcript(start: String) = {
-      // Printing this message doesn't work very well because it's buried in the
-      // transcript they just pasted.  Todo: a short timer goes off when
-      // lines stop coming which tells them to hit ctrl-D.
-      //
-      // echo("// Detected repl transcript paste: ctrl-D to finish.")
+      echo("\n// Detected repl transcript paste: ctrl-D to finish.\n")
       apply(Iterator(start) ++ readWhile(_.trim != PromptString.trim))
     }
   }
@@ -717,7 +767,7 @@ class SparkILoop(in0: Option[BufferedReader], val out: PrintWriter, val master:
   def interpretStartingWith(code: String): Option[String] = {
     // signal completion non-completion input has been received
     in.completion.resetVerbosity()
-    
+
     def reallyInterpret = {
       val reallyResult = intp.interpret(code)
       (reallyResult, reallyResult match {
@@ -727,7 +777,7 @@ class SparkILoop(in0: Option[BufferedReader], val out: PrintWriter, val master:
           if (in.interactive && code.endsWith("\n\n")) {
             echo("You typed two blank lines.  Starting a new command.")
             None
-          } 
+          }
           else in.readLine(ContinueString) match {
             case null =>
               // we know compilation is going to fail since we're at EOF and the
@@ -741,10 +791,10 @@ class SparkILoop(in0: Option[BufferedReader], val out: PrintWriter, val master:
           }
       })
     }
-    
+
     /** Here we place ourselves between the user and the interpreter and examine
      *  the input they are ostensibly submitting.  We intervene in several cases:
-     * 
+     *
      *  1) If the line starts with "scala> " it is assumed to be an interpreter paste.
      *  2) If the line starts with "." (but not ".." or "./") it is treated as an invocation
      *     on the previous result.
@@ -759,28 +809,12 @@ class SparkILoop(in0: Option[BufferedReader], val out: PrintWriter, val master:
     else if (Completion.looksLikeInvocation(code) && intp.mostRecentVar != "") {
       interpretStartingWith(intp.mostRecentVar + code)
     }
-    else {
-      def runCompletion = in.completion execute code map (intp bindValue _)
-      /** Due to my accidentally letting file completion execution sneak ahead
-       *  of actual parsing this now operates in such a way that the scala
-       *  interpretation always wins.  However to avoid losing useful file
-       *  completion I let it fail and then check the others.  So if you
-       *  type /tmp it will echo a failure and then give you a Directory object.
-       *  It's not pretty: maybe I'll implement the silence bits I need to avoid
-       *  echoing the failure.
-       */
-      if (intp isParseable code) {
-        val (code, result) = reallyInterpret
-        //if (power != null && code == IR.Error)
-        //  runCompletion
-        
-        result
-      }
-      else runCompletion match {
-        case Some(_)  => None // completion hit: avoid the latent error
-        case _        => reallyInterpret._2  // trigger the latent error
-      }
+    else if (code.trim startsWith "//") {
+      // line comment, do nothing
+      None
     }
+    else
+      reallyInterpret._2
   }
 
   // runs :load `file` on any files passed via -i
@@ -794,7 +828,7 @@ class SparkILoop(in0: Option[BufferedReader], val out: PrintWriter, val master:
       }
     case _ =>
   }
-  
+
   /** Tries to create a JLineReader, falling back to SimpleReader:
    *  unless settings or properties are such that it should start
    *  with SimpleReader.
@@ -802,7 +836,7 @@ class SparkILoop(in0: Option[BufferedReader], val out: PrintWriter, val master:
   def chooseReader(settings: Settings): InteractiveReader = {
     if (settings.Xnojline.value || Properties.isEmacsShell)
       SimpleReader()
-    else try SparkJLineReader(
+    else try new SparkJLineReader(
       if (settings.noCompletion.value) NoCompletion
       else new SparkJLineCompletion(intp)
     )
@@ -813,96 +847,99 @@ class SparkILoop(in0: Option[BufferedReader], val out: PrintWriter, val master:
     }
   }
 
-  def initializeSpark() {
-    intp.beQuietDuring {
-      command("""
-        spark.repl.Main.interp.out.println("Creating SparkContext...");
-        spark.repl.Main.interp.out.flush();
-        @transient val sc = spark.repl.Main.interp.createSparkContext();
-        spark.repl.Main.interp.out.println("Spark context available as sc.");
-        spark.repl.Main.interp.out.flush();
-        """)
-      command("import spark.SparkContext._");
-    }
-    echo("Type in expressions to have them evaluated.")
-    echo("Type :help for more information.")
-  }
-
-  var sparkContext: SparkContext = null
-
-  def createSparkContext(): SparkContext = {
-    val master = this.master match {
-      case Some(m) => m
-      case None => {
-        val prop = System.getenv("MASTER")
-        if (prop != null) prop else "local"
-      }
-    }
-    sparkContext = new SparkContext(master, "Spark shell")
-    sparkContext
-  }
-
-  def process(settings: Settings): Boolean = {
-    // Ensure logging is initialized before any Spark threads try to use logs
-    // (because SLF4J initialization is not thread safe)
-    initLogging()
-
-    printWelcome()
-    echo("Initializing interpreter...")
+  val u: scala.reflect.runtime.universe.type = scala.reflect.runtime.universe
+  val m = u.runtimeMirror(getClass.getClassLoader)
+  private def tagOfStaticClass[T: ClassTag]: u.TypeTag[T] =
+    u.TypeTag[T](
+      m,
+      new TypeCreator {
+        def apply[U <: ApiUniverse with Singleton](m: Mirror[U]): U # Type =
+          m.staticClass(classTag[T].runtimeClass.getName).toTypeConstructor.asInstanceOf[U # Type]
+      })
 
+  def process(settings: Settings): Boolean = savingContextLoader {
     this.settings = settings
     createInterpreter()
-    
+
     // sets in to some kind of reader depending on environmental cues
     in = in0 match {
       case Some(reader) => SimpleReader(reader, out, true)
-      case None         => chooseReader(settings)
+      case None         =>
+        // some post-initialization
+        chooseReader(settings) match {
+          case x: SparkJLineReader => addThunk(x.consoleReader.postInit) ; x
+          case x                   => x
+        }
     }
+    lazy val tagOfSparkIMain = tagOfStaticClass[spark.repl.SparkIMain]
+    // Bind intp somewhere out of the regular namespace where
+    // we can get at it in generated code.
+    addThunk(intp.quietBind(NamedParam[SparkIMain]("$intp", intp)(tagOfSparkIMain, classTag[SparkIMain])))
+    addThunk({
+      import scala.tools.nsc.io._
+      import Properties.userHome
+      import scala.compat.Platform.EOL
+      val autorun = replProps.replAutorunCode.option flatMap (f => io.File(f).safeSlurp())
+      if (autorun.isDefined) intp.quietRun(autorun.get)
+    })
+
+    addThunk(printWelcome())
+    addThunk(initializeSpark())
 
     loadFiles(settings)
     // it is broken on startup; go ahead and exit
     if (intp.reporter.hasErrors)
       return false
-    
-    try {      
-      // this is about the illusion of snappiness.  We call initialize()
-      // which spins off a separate thread, then print the prompt and try 
-      // our best to look ready.  Ideally the user will spend a
-      // couple seconds saying "wow, it starts so fast!" and by the time
-      // they type a command the compiler is ready to roll.
-      intp.initialize()
-      initializeSpark()
-      if (isReplPower) {
-        echo("Starting in power mode, one moment...\n")
-        enablePowerMode()
-      }
-      loop()
+
+    // This is about the illusion of snappiness.  We call initialize()
+    // which spins off a separate thread, then print the prompt and try
+    // our best to look ready.  The interlocking lazy vals tend to
+    // inter-deadlock, so we break the cycle with a single asynchronous
+    // message to an actor.
+    if (isAsync) {
+      intp initialize initializedCallback()
+      createAsyncListener() // listens for signal to run postInitialization
     }
+    else {
+      intp.initializeSynchronous()
+      postInitialization()
+    }
+    // printWelcome()
+
+    try loop()
+    catch AbstractOrMissingHandler()
     finally closeInterpreter()
+
     true
   }
 
+  def createSparkContext(): SparkContext = {
+    val master = this.master match {
+      case Some(m) => m
+      case None => {
+        val prop = System.getenv("MASTER")
+        if (prop != null) prop else "local"
+      }
+    }
+    sparkContext = new SparkContext(master, "Spark shell")
+    echo("Created spark context..")
+    sparkContext
+  }
+
   /** process command-line arguments and do as they request */
   def process(args: Array[String]): Boolean = {
-    val command = new CommandLine(args.toList, msg => echo("scala: " + msg))
+    val command = new CommandLine(args.toList, echo)
     def neededHelp(): String =
       (if (command.settings.help.value) command.usageMsg + "\n" else "") +
       (if (command.settings.Xhelp.value) command.xusageMsg + "\n" else "")
-    
+
     // if they asked for no help and command is valid, we call the real main
     neededHelp() match {
       case ""     => command.ok && process(command.settings)
       case help   => echoNoNL(help) ; true
     }
   }
-  
-  @deprecated("Use `process` instead", "2.9.0")
-  def main(args: Array[String]): Unit = {
-    if (isReplDebug)
-      System.out.println(new java.util.Date)
-    
-    process(args)
-  }
+
   @deprecated("Use `process` instead", "2.9.0")
   def main(settings: Settings): Unit = process(settings)
 }
@@ -916,10 +953,10 @@ object SparkILoop {
   // like if you'd just typed it into the repl.
   def runForTranscript(code: String, settings: Settings): String = {
     import java.io.{ BufferedReader, StringReader, OutputStreamWriter }
-    
+
     stringFromStream { ostream =>
       Console.withOut(ostream) {
-        val output = new PrintWriter(new OutputStreamWriter(ostream), true) {
+        val output = new JPrintWriter(new OutputStreamWriter(ostream), true) {
           override def write(str: String) = {
             // completely skip continuation lines
             if (str forall (ch => ch.isWhitespace || ch == '|')) ()
@@ -945,19 +982,19 @@ object SparkILoop {
       }
     }
   }
-  
+
   /** Creates an interpreter loop with default settings and feeds
    *  the given code to it as input.
    */
   def run(code: String, sets: Settings = new Settings): String = {
     import java.io.{ BufferedReader, StringReader, OutputStreamWriter }
-    
+
     stringFromStream { ostream =>
       Console.withOut(ostream) {
         val input    = new BufferedReader(new StringReader(code))
-        val output   = new PrintWriter(new OutputStreamWriter(ostream), true)
-        val repl     = new SparkILoop(input, output)
-        
+        val output   = new JPrintWriter(new OutputStreamWriter(ostream), true)
+        val repl     = new ILoop(input, output)
+
         if (sets.classpath.isDefault)
           sets.classpath.value = sys.props("java.class.path")
 
@@ -966,32 +1003,4 @@ object SparkILoop {
     }
   }
   def run(lines: List[String]): String = run(lines map (_ + "\n") mkString)
-
-  // provide the enclosing type T
-  // in order to set up the interpreter's classpath and parent class loader properly
-  def breakIf[T: Manifest](assertion: => Boolean, args: NamedParam*): Unit =
-    if (assertion) break[T](args.toList)
-
-  // start a repl, binding supplied args
-  def break[T: Manifest](args: List[NamedParam]): Unit = {
-    val msg = if (args.isEmpty) "" else "  Binding " + args.size + " value%s.".format(
-      if (args.size == 1) "" else "s"
-    )
-    echo("Debug repl starting." + msg)
-    val repl = new SparkILoop {
-      override def prompt = "\ndebug> "
-    }
-    repl.settings = new Settings(echo)
-    repl.settings.embeddedDefaults[T]
-    repl.createInterpreter()
-    repl.in = SparkJLineReader(repl)
-    
-    // rebind exit so people don't accidentally call sys.exit by way of predef
-    repl.quietRun("""def exit = println("Type :quit to resume program execution.")""")
-    args foreach (p => repl.bind(p.name, p.tpe, p.value))
-    repl.loop()
-
-    echo("\nDebug repl exiting.")
-    repl.closeInterpreter()
-  }  
 }
diff --git a/repl/src/main/scala/spark/repl/SparkILoopInit.scala b/repl/src/main/scala/spark/repl/SparkILoopInit.scala
new file mode 100644
index 0000000000000000000000000000000000000000..6ae535c4e6f2486d461a0fc9e1e8603838ea2756
--- /dev/null
+++ b/repl/src/main/scala/spark/repl/SparkILoopInit.scala
@@ -0,0 +1,143 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package spark.repl
+
+import scala.tools.nsc._
+import scala.tools.nsc.interpreter._
+
+import scala.reflect.internal.util.Position
+import scala.util.control.Exception.ignoring
+import scala.tools.nsc.util.stackTraceString
+
+/**
+ *  Machinery for the asynchronous initialization of the repl.
+ */
+trait SparkILoopInit {
+  self: SparkILoop =>
+
+  /** Print a welcome message */
+  def printWelcome() {
+    echo("""Welcome to
+      ____              __
+     / __/__  ___ _____/ /__
+    _\ \/ _ \/ _ `/ __/  '_/
+   /___/ .__/\_,_/_/ /_/\_\   version 0.7.1-SNAPSHOT
+      /_/
+""")
+    import Properties._
+    val welcomeMsg = "Using Scala %s (%s, Java %s)".format(
+      versionString, javaVmName, javaVersion)
+    echo(welcomeMsg)
+    echo("Type in expressions to have them evaluated.")
+    echo("Type :help for more information.")
+   }
+
+  protected def asyncMessage(msg: String) {
+    if (isReplInfo || isReplPower)
+      echoAndRefresh(msg)
+  }
+
+  private val initLock = new java.util.concurrent.locks.ReentrantLock()
+  private val initCompilerCondition = initLock.newCondition() // signal the compiler is initialized
+  private val initLoopCondition = initLock.newCondition()     // signal the whole repl is initialized
+  private val initStart = System.nanoTime
+
+  private def withLock[T](body: => T): T = {
+    initLock.lock()
+    try body
+    finally initLock.unlock()
+  }
+  // a condition used to ensure serial access to the compiler.
+  @volatile private var initIsComplete = false
+  @volatile private var initError: String = null
+  private def elapsed() = "%.3f".format((System.nanoTime - initStart).toDouble / 1000000000L)
+
+  // the method to be called when the interpreter is initialized.
+  // Very important this method does nothing synchronous (i.e. do
+  // not try to use the interpreter) because until it returns, the
+  // repl's lazy val `global` is still locked.
+  protected def initializedCallback() = withLock(initCompilerCondition.signal())
+
+  // Spins off a thread which awaits a single message once the interpreter
+  // has been initialized.
+  protected def createAsyncListener() = {
+    io.spawn {
+      withLock(initCompilerCondition.await())
+      asyncMessage("[info] compiler init time: " + elapsed() + " s.")
+      postInitialization()
+    }
+  }
+
+  // called from main repl loop
+  protected def awaitInitialized(): Boolean = {
+    if (!initIsComplete)
+      withLock { while (!initIsComplete) initLoopCondition.await() }
+    if (initError != null) {
+      println("""
+        |Failed to initialize the REPL due to an unexpected error.
+        |This is a bug, please, report it along with the error diagnostics printed below.
+        |%s.""".stripMargin.format(initError)
+      )
+      false
+    } else true
+  }
+  // private def warningsThunks = List(
+  //   () => intp.bind("lastWarnings", "" + typeTag[List[(Position, String)]], intp.lastWarnings _),
+  // )
+
+  protected def postInitThunks = List[Option[() => Unit]](
+    Some(intp.setContextClassLoader _),
+    if (isReplPower) Some(() => enablePowerMode(true)) else None
+  ).flatten
+  // ++ (
+  //   warningsThunks
+  // )
+  // called once after init condition is signalled
+  protected def postInitialization() {
+    try {
+      postInitThunks foreach (f => addThunk(f()))
+      runThunks()
+    } catch {
+      case ex: Throwable =>
+        initError = stackTraceString(ex)
+        throw ex
+    } finally {
+      initIsComplete = true
+
+      if (isAsync) {
+        asyncMessage("[info] total init time: " + elapsed() + " s.")
+        withLock(initLoopCondition.signal())
+      }
+    }
+  }
+
+ def initializeSpark() {
+    intp.beQuietDuring {
+      command("""
+         @transient val sc = spark.repl.Main.interp.createSparkContext();
+        """)
+      command("import spark.SparkContext._");
+    }
+   echo("Spark context available as sc.")
+  }
+
+  // code to be executed only after the interpreter is initialized
+  // and the lazy val `global` can be accessed without risk of deadlock.
+  private var pendingThunks: List[() => Unit] = Nil
+  protected def addThunk(body: => Unit) = synchronized {
+    pendingThunks :+= (() => body)
+  }
+  protected def runThunks(): Unit = synchronized {
+    if (pendingThunks.nonEmpty)
+      logDebug("Clearing " + pendingThunks.size + " thunks.")
+
+    while (pendingThunks.nonEmpty) {
+      val thunk = pendingThunks.head
+      pendingThunks = pendingThunks.tail
+      thunk()
+    }
+  }
+}
diff --git a/repl/src/main/scala/spark/repl/SparkIMain.scala b/repl/src/main/scala/spark/repl/SparkIMain.scala
index 43b6a6c9500302acac56155a44a5b4c658b62912..9894429ec1f4f109119287302b1e74a51c6e6d20 100644
--- a/repl/src/main/scala/spark/repl/SparkIMain.scala
+++ b/repl/src/main/scala/spark/repl/SparkIMain.scala
@@ -1,5 +1,5 @@
 /* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
  * @author  Martin Odersky
  */
 
@@ -9,304 +9,334 @@ import scala.tools.nsc._
 import scala.tools.nsc.interpreter._
 
 import Predef.{ println => _, _ }
-import java.io.{ PrintWriter }
-import java.lang.reflect
+import util.stringFromWriter
+import scala.reflect.internal.util._
 import java.net.URL
-import util.{ Set => _, _ }
-import io.{ AbstractFile, PlainFile, VirtualDirectory }
-import reporters.{ ConsoleReporter, Reporter }
-import symtab.{ Flags, Names }
-import scala.tools.nsc.interpreter.{ Results => IR }
+import scala.sys.BooleanProp
+import io.{AbstractFile, PlainFile, VirtualDirectory}
+
+import reporters._
+import symtab.Flags
+import scala.reflect.internal.Names
 import scala.tools.util.PathResolver
-import scala.tools.nsc.util.{ ScalaClassLoader, Exceptional }
+import scala.tools.nsc.util.ScalaClassLoader
 import ScalaClassLoader.URLClassLoader
-import Exceptional.unwrap
+import scala.tools.nsc.util.Exceptional.unwrap
 import scala.collection.{ mutable, immutable }
-import scala.PartialFunction.{ cond, condOpt }
 import scala.util.control.Exception.{ ultimately }
-import scala.reflect.NameTransformer
 import SparkIMain._
+import java.util.concurrent.Future
+import typechecker.Analyzer
+import scala.language.implicitConversions
+import scala.reflect.runtime.{ universe => ru }
+import scala.reflect.{ ClassTag, classTag }
+import scala.tools.reflect.StdRuntimeTags._
+import scala.util.control.ControlThrowable
+import util.stackTraceString
 
 import spark.HttpServer
 import spark.Utils
 import spark.SparkEnv
+import spark.Logging
+
+// /** directory to save .class files to */
+// private class ReplVirtualDirectory(out: JPrintWriter) extends VirtualDirectory("((memory))", None) {
+//   private def pp(root: AbstractFile, indentLevel: Int) {
+//     val spaces = "    " * indentLevel
+//     out.println(spaces + root.name)
+//     if (root.isDirectory)
+//       root.toList sortBy (_.name) foreach (x => pp(x, indentLevel + 1))
+//   }
+//   // print the contents hierarchically
+//   def show() = pp(this, 0)
+// }
+
+  /** An interpreter for Scala code.
+   *
+   *  The main public entry points are compile(), interpret(), and bind().
+   *  The compile() method loads a complete Scala file.  The interpret() method
+   *  executes one line of Scala code at the request of the user.  The bind()
+   *  method binds an object to a variable that can then be used by later
+   *  interpreted code.
+   *
+   *  The overall approach is based on compiling the requested code and then
+   *  using a Java classloader and Java reflection to run the code
+   *  and access its results.
+   *
+   *  In more detail, a single compiler instance is used
+   *  to accumulate all successfully compiled or interpreted Scala code.  To
+   *  "interpret" a line of code, the compiler generates a fresh object that
+   *  includes the line of code and which has public member(s) to export
+   *  all variables defined by that code.  To extract the result of an
+   *  interpreted line to show the user, a second "result object" is created
+   *  which imports the variables exported by the above object and then
+   *  exports members called "$eval" and "$print". To accomodate user expressions
+   *  that read from variables or methods defined in previous statements, "import"
+   *  statements are used.
+   *
+   *  This interpreter shares the strengths and weaknesses of using the
+   *  full compiler-to-Java.  The main strength is that interpreted code
+   *  behaves exactly as does compiled code, including running at full speed.
+   *  The main weakness is that redefining classes and methods is not handled
+   *  properly, because rebinding at the Java level is technically difficult.
+   *
+   *  @author Moez A. Abdel-Gawad
+   *  @author Lex Spoon
+   */
+  class SparkIMain(initialSettings: Settings, val out: JPrintWriter) extends SparkImports with Logging {
+    imain =>
 
-/** An interpreter for Scala code.
- *  
- *  The main public entry points are compile(), interpret(), and bind().
- *  The compile() method loads a complete Scala file.  The interpret() method
- *  executes one line of Scala code at the request of the user.  The bind()
- *  method binds an object to a variable that can then be used by later
- *  interpreted code.
- *  
- *  The overall approach is based on compiling the requested code and then
- *  using a Java classloader and Java reflection to run the code
- *  and access its results.
- *  
- *  In more detail, a single compiler instance is used
- *  to accumulate all successfully compiled or interpreted Scala code.  To
- *  "interpret" a line of code, the compiler generates a fresh object that
- *  includes the line of code and which has public member(s) to export
- *  all variables defined by that code.  To extract the result of an
- *  interpreted line to show the user, a second "result object" is created
- *  which imports the variables exported by the above object and then
- *  exports a single member named "$export".  To accomodate user expressions
- *  that read from variables or methods defined in previous statements, "import"
- *  statements are used.
- *  
- *  This interpreter shares the strengths and weaknesses of using the
- *  full compiler-to-Java.  The main strength is that interpreted code
- *  behaves exactly as does compiled code, including running at full speed.
- *  The main weakness is that redefining classes and methods is not handled
- *  properly, because rebinding at the Java level is technically difficult.
- *
- *  @author Moez A. Abdel-Gawad
- *  @author Lex Spoon
- */
-class SparkIMain(val settings: Settings, protected val out: PrintWriter) extends SparkImports {
-  imain =>
-  
-  /** construct an interpreter that reports to Console */
-  def this(settings: Settings) = this(settings, new NewLinePrintWriter(new ConsoleWriter, true))
-  def this() = this(new Settings())
-
-  /** whether to print out result lines */
-  var printResults: Boolean = true
-  
-  /** whether to print errors */
-  var totalSilence: Boolean = false
-
-  private val RESULT_OBJECT_PREFIX = "RequestResult$"
-  
-  lazy val formatting: Formatting = new Formatting {
-    val prompt = Properties.shellPromptString
-  }
-  import formatting._
-  
-  val SPARK_DEBUG_REPL: Boolean = (System.getenv("SPARK_DEBUG_REPL") == "1")
-
-  /** Local directory to save .class files too */
-  val outputDir = {
-    val tmp = System.getProperty("java.io.tmpdir")
-    val rootDir = System.getProperty("spark.repl.classdir", tmp)
-    Utils.createTempDir(rootDir)
-  }
-  if (SPARK_DEBUG_REPL) {
-    echo("Output directory: " + outputDir)
-  }
 
-  /** Scala compiler virtual directory for outputDir */
-  val virtualDirectory = new PlainFile(outputDir)
+      val SPARK_DEBUG_REPL: Boolean = (System.getenv("SPARK_DEBUG_REPL") == "1")
 
-  /** Jetty server that will serve our classes to worker nodes */
-  val classServer = new HttpServer(outputDir)
+      /** Local directory to save .class files too */
+      val outputDir = {
+        val tmp = System.getProperty("java.io.tmpdir")
+        val rootDir = System.getProperty("spark.repl.classdir", tmp)
+        Utils.createTempDir(rootDir)
+      }
+      if (SPARK_DEBUG_REPL) {
+        echo("Output directory: " + outputDir)
+      }
 
-  // Start the classServer and store its URI in a spark system property
-  // (which will be passed to executors so that they can connect to it)
-  classServer.start()
-  System.setProperty("spark.repl.class.uri", classServer.uri)
-  if (SPARK_DEBUG_REPL) {
-    echo("Class server started, URI = " + classServer.uri)
-  }
+    val virtualDirectory                              = new PlainFile(outputDir) // "directory" for classfiles
+    val classServer                                   = new HttpServer(outputDir)     /** Jetty server that will serve our classes to worker nodes */
+    private var currentSettings: Settings             = initialSettings
+    var printResults                                  = true      // whether to print result lines
+    var totalSilence                                  = false     // whether to print anything
+    private var _initializeComplete                   = false     // compiler is initialized
+    private var _isInitialized: Future[Boolean]       = null      // set up initialization future
+    private var bindExceptions                        = true      // whether to bind the lastException variable
+    private var _executionWrapper                     = ""        // code to be wrapped around all lines
+
+
+        // Start the classServer and store its URI in a spark system property
+    // (which will be passed to executors so that they can connect to it)
+      classServer.start()
+      System.setProperty("spark.repl.class.uri", classServer.uri)
+      if (SPARK_DEBUG_REPL) {
+        echo("Class server started, URI = " + classServer.uri)
+      }
+
+    /** We're going to go to some trouble to initialize the compiler asynchronously.
+     *  It's critical that nothing call into it until it's been initialized or we will
+     *  run into unrecoverable issues, but the perceived repl startup time goes
+     *  through the roof if we wait for it.  So we initialize it with a future and
+     *  use a lazy val to ensure that any attempt to use the compiler object waits
+     *  on the future.
+     */
+    private var _classLoader: AbstractFileClassLoader = null                              // active classloader
+    private val _compiler: Global                     = newCompiler(settings, reporter)   // our private compiler
 
-  /*
-  // directory to save .class files to
-  val virtualDirectory = new VirtualDirectory("(memory)", None) {
-    private def pp(root: io.AbstractFile, indentLevel: Int) {
-      val spaces = "    " * indentLevel
-      out.println(spaces + root.name)
-      if (root.isDirectory)
-        root.toList sortBy (_.name) foreach (x => pp(x, indentLevel + 1))
+    private val nextReqId = {
+      var counter = 0
+      () => { counter += 1 ; counter }
     }
-    // print the contents hierarchically
-    def show() = pp(this, 0)
-  }
-  */
-
-  /** reporter */
-  lazy val reporter: ConsoleReporter = new SparkIMain.ReplReporter(this)
-  import reporter.{ printMessage, withoutTruncating }
-  
-  // not sure if we have some motivation to print directly to console
-  private def echo(msg: String) { Console println msg }
-  
-  // protected def defaultImports: List[String] = List("_root_.scala.sys.exit")
-
-  /** We're going to go to some trouble to initialize the compiler asynchronously.
-   *  It's critical that nothing call into it until it's been initialized or we will
-   *  run into unrecoverable issues, but the perceived repl startup time goes
-   *  through the roof if we wait for it.  So we initialize it with a future and
-   *  use a lazy val to ensure that any attempt to use the compiler object waits
-   *  on the future.
-   */
-  private val _compiler: Global = newCompiler(settings, reporter)
-  private var _initializeComplete = false
-  def isInitializeComplete = _initializeComplete
-
-  private def _initialize(): Boolean = {
-    val source = """
-      |class $repl_$init {
-      |  List(1) map (_ + 1)
-      |}
-      |""".stripMargin
-    
-    val result = try {
-      new _compiler.Run() compileSources List(new BatchSourceFile("<init>", source))
-      if (isReplDebug || settings.debug.value) {
-        // Can't use printMessage here, it deadlocks
-        Console.println("Repl compiler initialized.")
-      }
-      // addImports(defaultImports: _*)
-      true
-    } 
-    catch {
-      case x: AbstractMethodError =>
-        printMessage("""
-          |Failed to initialize compiler: abstract method error.
-          |This is most often remedied by a full clean and recompile.
-          |""".stripMargin
-        )
-        x.printStackTrace()
-        false
-      case x: MissingRequirementError => printMessage("""
-        |Failed to initialize compiler: %s not found.
-        |** Note that as of 2.8 scala does not assume use of the java classpath.
-        |** For the old behavior pass -usejavacp to scala, or if using a Settings
-        |** object programatically, settings.usejavacp.value = true.""".stripMargin.format(x.req)
+
+    def compilerClasspath: Seq[URL] = (
+      if (isInitializeComplete) global.classPath.asURLs
+      else new PathResolver(settings).result.asURLs  // the compiler's classpath
       )
-      false
+      def settings = currentSettings
+    def mostRecentLine = prevRequestList match {
+      case Nil      => ""
+      case req :: _ => req.originalLine
+    }
+    // Run the code body with the given boolean settings flipped to true.
+    def withoutWarnings[T](body: => T): T = beQuietDuring {
+      val saved = settings.nowarn.value
+      if (!saved)
+        settings.nowarn.value = true
+
+      try body
+      finally if (!saved) settings.nowarn.value = false
     }
-    
-    try result
-    finally _initializeComplete = result
-  }
-  
-  // set up initialization future
-  private var _isInitialized: () => Boolean = null
-  def initialize() = synchronized { 
-    if (_isInitialized == null)
-      _isInitialized = scala.concurrent.ops future _initialize()
-  }
 
-  /** the public, go through the future compiler */
-  lazy val global: Global = {
-    initialize()
+    /** construct an interpreter that reports to Console */
+    def this(settings: Settings) = this(settings, new NewLinePrintWriter(new ConsoleWriter, true))
+    def this() = this(new Settings())
 
-    // blocks until it is ; false means catastrophic failure
-    if (_isInitialized()) _compiler
-    else null
-  }
-  @deprecated("Use `global` for access to the compiler instance.", "2.9.0")
-  lazy val compiler: global.type = global
-
-  import global._
-  
-  object naming extends {
-    val global: imain.global.type = imain.global
-  } with Naming {
-    // make sure we don't overwrite their unwisely named res3 etc.
-    override def freshUserVarName(): String = {
-      val name = super.freshUserVarName()
-      if (definedNameMap contains name) freshUserVarName()
-      else name
+    lazy val repllog: Logger = new Logger {
+      val out: JPrintWriter = imain.out
+      val isInfo: Boolean  = BooleanProp keyExists "scala.repl.info"
+      val isDebug: Boolean = BooleanProp keyExists "scala.repl.debug"
+      val isTrace: Boolean = BooleanProp keyExists "scala.repl.trace"
     }
-  }
-  import naming._
-
-  // object dossiers extends {
-  //   val intp: imain.type = imain
-  // } with Dossiers { }
-  // import dossiers._
-  
-  lazy val memberHandlers = new {
-    val intp: imain.type = imain
-  } with SparkMemberHandlers
-  import memberHandlers._
-  
-  def atPickler[T](op: => T): T = atPhase(currentRun.picklerPhase)(op)
-  def afterTyper[T](op: => T): T = atPhase(currentRun.typerPhase.next)(op)
-
-  /** Temporarily be quiet */
-  def beQuietDuring[T](operation: => T): T = {    
-    val wasPrinting = printResults    
-    ultimately(printResults = wasPrinting) {
-      if (isReplDebug) echo(">> beQuietDuring")
-      else printResults = false
-      
-      operation
+    lazy val formatting: Formatting = new Formatting {
+      val prompt = Properties.shellPromptString
     }
-  }
-  def beSilentDuring[T](operation: => T): T = {
-    val saved = totalSilence
-    totalSilence = true
-    try operation
-    finally totalSilence = saved
-  }
-  
-  def quietRun[T](code: String) = beQuietDuring(interpret(code))
-
-  /** whether to bind the lastException variable */
-  private var bindLastException = true
-  
-  /** A string representing code to be wrapped around all lines. */
-  private var _executionWrapper: String = ""
-  def executionWrapper = _executionWrapper
-  def setExecutionWrapper(code: String) = _executionWrapper = code
-  def clearExecutionWrapper() = _executionWrapper = ""
-  
-  /** Temporarily stop binding lastException */
-  def withoutBindingLastException[T](operation: => T): T = {
-    val wasBinding = bindLastException
-    ultimately(bindLastException = wasBinding) {
-      bindLastException = false
-      operation
+    lazy val reporter: ConsoleReporter = new SparkIMain.ReplReporter(this)
+
+    import formatting._
+    import reporter.{ printMessage, withoutTruncating }
+
+    // This exists mostly because using the reporter too early leads to deadlock.
+    private def echo(msg: String) { Console println msg }
+    private def _initSources = List(new BatchSourceFile("<init>", "class $repl_$init { }"))
+    private def _initialize() = {
+      try {
+        // todo. if this crashes, REPL will hang
+        new _compiler.Run() compileSources _initSources
+        _initializeComplete = true
+        true
+      }
+      catch AbstractOrMissingHandler()
+    }
+    private def tquoted(s: String) = "\"\"\"" + s + "\"\"\""
+
+    // argument is a thunk to execute after init is done
+    def initialize(postInitSignal: => Unit) {
+      synchronized {
+        if (_isInitialized == null) {
+          _isInitialized = io.spawn {
+            try _initialize()
+            finally postInitSignal
+          }
+        }
+      }
+    }
+    def initializeSynchronous(): Unit = {
+      if (!isInitializeComplete) {
+        _initialize()
+        assert(global != null, global)
+      }
+    }
+    def isInitializeComplete = _initializeComplete
+
+    /** the public, go through the future compiler */
+    lazy val global: Global = {
+      if (isInitializeComplete) _compiler
+      else {
+        // If init hasn't been called yet you're on your own.
+        if (_isInitialized == null) {
+          logWarning("Warning: compiler accessed before init set up.  Assuming no postInit code.")
+          initialize(())
+        }
+        //       // blocks until it is ; false means catastrophic failure
+        if (_isInitialized.get()) _compiler
+        else null
+      }
+    }
+    @deprecated("Use `global` for access to the compiler instance.", "2.9.0")
+    lazy val compiler: global.type = global
+
+    import global._
+    import definitions.{ScalaPackage, JavaLangPackage, termMember, typeMember}
+    import rootMirror.{RootClass, getClassIfDefined, getModuleIfDefined, getRequiredModule, getRequiredClass}
+
+    implicit class ReplTypeOps(tp: Type) {
+      def orElse(other: => Type): Type    = if (tp ne NoType) tp else other
+      def andAlso(fn: Type => Type): Type = if (tp eq NoType) tp else fn(tp)
+    }
+
+    // TODO: If we try to make naming a lazy val, we run into big time
+    // scalac unhappiness with what look like cycles.  It has not been easy to
+    // reduce, but name resolution clearly takes different paths.
+    object naming extends {
+      val global: imain.global.type = imain.global
+    } with Naming {
+      // make sure we don't overwrite their unwisely named res3 etc.
+      def freshUserTermName(): TermName = {
+        val name = newTermName(freshUserVarName())
+        if (definedNameMap contains name) freshUserTermName()
+        else name
+      }
+      def isUserTermName(name: Name) = isUserVarName("" + name)
+      def isInternalTermName(name: Name) = isInternalVarName("" + name)
+    }
+    import naming._
+
+    object deconstruct extends {
+      val global: imain.global.type = imain.global
+    } with StructuredTypeStrings
+
+    lazy val memberHandlers = new {
+      val intp: imain.type = imain
+    } with SparkMemberHandlers
+    import memberHandlers._
+
+    /** Temporarily be quiet */
+    def beQuietDuring[T](body: => T): T = {
+      val saved = printResults
+      printResults = false
+      try body
+      finally printResults = saved
+    }
+    def beSilentDuring[T](operation: => T): T = {
+      val saved = totalSilence
+      totalSilence = true
+      try operation
+      finally totalSilence = saved
+    }
+
+    def quietRun[T](code: String) = beQuietDuring(interpret(code))
+
+
+     private def logAndDiscard[T](label: String, alt: => T): PartialFunction[Throwable, T] = {
+      case t: ControlThrowable => throw t
+      case t: Throwable        =>
+        logDebug(label + ": " + unwrap(t))
+        logDebug(stackTraceString(unwrap(t)))
+      alt
+    }
+    /** takes AnyRef because it may be binding a Throwable or an Exceptional */
+
+    private def withLastExceptionLock[T](body: => T, alt: => T): T = {
+      assert(bindExceptions, "withLastExceptionLock called incorrectly.")
+      bindExceptions = false
+
+      try     beQuietDuring(body)
+      catch   logAndDiscard("withLastExceptionLock", alt)
+      finally bindExceptions = true
     }
-  }
-  
-  protected def createLineManager(): Line.Manager = new Line.Manager
-  lazy val lineManager = createLineManager()
-
-  /** interpreter settings */
-  lazy val isettings = new SparkISettings(this)
-
-  /** Instantiate a compiler.  Subclasses can override this to
-   *  change the compiler class used by this interpreter. */
-  protected def newCompiler(settings: Settings, reporter: Reporter) = {
-    settings.outputDirs setSingleOutput virtualDirectory
-    settings.exposeEmptyPackage.value = true
-    new Global(settings, reporter)
-  }
-  
-  /** the compiler's classpath, as URL's */
-  lazy val compilerClasspath: List[URL] = new PathResolver(settings) asURLs
 
-  /* A single class loader is used for all commands interpreted by this Interpreter.
+    def executionWrapper = _executionWrapper
+    def setExecutionWrapper(code: String) = _executionWrapper = code
+    def clearExecutionWrapper() = _executionWrapper = ""
+
+    /** interpreter settings */
+    lazy val isettings = new SparkISettings(this)
+
+    /** Instantiate a compiler.  Overridable. */
+    protected def newCompiler(settings: Settings, reporter: Reporter): ReplGlobal = {
+      settings.outputDirs setSingleOutput virtualDirectory
+      settings.exposeEmptyPackage.value = true
+      new Global(settings, reporter) with ReplGlobal {
+        override def toString: String = "<global>"
+      }
+    }
+
+    /** Parent classloader.  Overridable. */
+    protected def parentClassLoader: ClassLoader =
+      SparkHelper.explicitParentLoader(settings).getOrElse( this.getClass.getClassLoader() )
+
+    /* A single class loader is used for all commands interpreted by this Interpreter.
      It would also be possible to create a new class loader for each command
      to interpret.  The advantages of the current approach are:
 
-       - Expressions are only evaluated one time.  This is especially
-         significant for I/O, e.g. "val x = Console.readLine"
-
-     The main disadvantage is:
-
-       - Objects, classes, and methods cannot be rebound.  Instead, definitions
-         shadow the old ones, and old code objects refer to the old
-         definitions.
-  */
-  private var _classLoader: AbstractFileClassLoader = null
-  def resetClassLoader() = _classLoader = makeClassLoader()
-  def classLoader: AbstractFileClassLoader = {
-    if (_classLoader == null)
-      resetClassLoader()
-    
-    _classLoader
-  }
-  private def makeClassLoader(): AbstractFileClassLoader = {
-    val parent =
-      if (parentClassLoader == null)  ScalaClassLoader fromURLs compilerClasspath
-      else                            new URLClassLoader(compilerClasspath, parentClassLoader)
+    - Expressions are only evaluated one time.  This is especially
+    significant for I/O, e.g. "val x = Console.readLine"
+
+    The main disadvantage is:
 
-    new AbstractFileClassLoader(virtualDirectory, parent) {
+    - Objects, classes, and methods cannot be rebound.  Instead, definitions
+    shadow the old ones, and old code objects refer to the old
+    definitions.
+    */
+    def resetClassLoader() = {
+      logDebug("Setting new classloader: was " + _classLoader)
+      _classLoader = null
+      ensureClassLoader()
+    }
+    final def ensureClassLoader() {
+      if (_classLoader == null)
+        _classLoader = makeClassLoader()
+    }
+    def classLoader: AbstractFileClassLoader = {
+      ensureClassLoader()
+      _classLoader
+    }
+    private class TranslatingClassLoader(parent: ClassLoader) extends AbstractFileClassLoader(virtualDirectory, parent) {
       /** Overridden here to try translating a simple name to the generated
        *  class name if the original attempt fails.  This method is used by
        *  getResourceAsStream as well as findClass.
@@ -314,223 +344,300 @@ class SparkIMain(val settings: Settings, protected val out: PrintWriter) extends
       override protected def findAbstractFile(name: String): AbstractFile = {
         super.findAbstractFile(name) match {
           // deadlocks on startup if we try to translate names too early
-          case null if isInitializeComplete => generatedName(name) map (x => super.findAbstractFile(x)) orNull
-          case file                         => file
+          case null if isInitializeComplete =>
+            generatedName(name) map (x => super.findAbstractFile(x)) orNull
+          case file                         =>
+            file
         }
       }
     }
-  }
-  private def loadByName(s: String): JClass =
-    (classLoader tryToInitializeClass s) getOrElse sys.error("Failed to load expected class: '" + s + "'")
-  
-  protected def parentClassLoader: ClassLoader =
-    SparkHelper.explicitParentLoader(settings).getOrElse( this.getClass.getClassLoader() )
-
-  def getInterpreterClassLoader() = classLoader
-
-  // Set the current Java "context" class loader to this interpreter's class loader
-  def setContextClassLoader() = classLoader.setAsContext()
-
-  /** Given a simple repl-defined name, returns the real name of
-   *  the class representing it, e.g. for "Bippy" it may return
-   *
-   *    $line19.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$Bippy
-   */
-  def generatedName(simpleName: String): Option[String] = {
-    if (simpleName endsWith "$") optFlatName(simpleName.init) map (_ + "$")
-    else optFlatName(simpleName)
-  }
-  def flatName(id: String)    = optFlatName(id) getOrElse id
-  def optFlatName(id: String) = requestForIdent(id) map (_ fullFlatName id)
-
-  def allDefinedNames = definedNameMap.keys.toList sortBy (_.toString)
-  def pathToType(id: String): String = pathToName(newTypeName(id))
-  def pathToTerm(id: String): String = pathToName(newTermName(id))
-  def pathToName(name: Name): String = {
-    if (definedNameMap contains name)
-      definedNameMap(name) fullPath name
-    else name.toString
-  }
+    private def makeClassLoader(): AbstractFileClassLoader =
+      new TranslatingClassLoader(parentClassLoader match {
+        case null   => ScalaClassLoader fromURLs compilerClasspath
+        case p      => new URLClassLoader(compilerClasspath, p)
+      })
+
+    def getInterpreterClassLoader() = classLoader
+
+    // Set the current Java "context" class loader to this interpreter's class loader
+    def setContextClassLoader() = classLoader.setAsContext()
+
+    /** Given a simple repl-defined name, returns the real name of
+     *  the class representing it, e.g. for "Bippy" it may return
+     *  {{{
+     *    $line19.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$Bippy
+     *  }}}
+     */
+    def generatedName(simpleName: String): Option[String] = {
+      if (simpleName endsWith nme.MODULE_SUFFIX_STRING) optFlatName(simpleName.init) map (_ + nme.MODULE_SUFFIX_STRING)
+      else optFlatName(simpleName)
+    }
+    def flatName(id: String)    = optFlatName(id) getOrElse id
+    def optFlatName(id: String) = requestForIdent(id) map (_ fullFlatName id)
+
+    def allDefinedNames = definedNameMap.keys.toList.sorted
+    def pathToType(id: String): String = pathToName(newTypeName(id))
+    def pathToTerm(id: String): String = pathToName(newTermName(id))
+    def pathToName(name: Name): String = {
+      if (definedNameMap contains name)
+        definedNameMap(name) fullPath name
+      else name.toString
+    }
 
-  /** Most recent tree handled which wasn't wholly synthetic. */
-  private def mostRecentlyHandledTree: Option[Tree] = {
-    prevRequests.reverse foreach { req =>
-      req.handlers.reverse foreach {
-        case x: MemberDefHandler if x.definesValue && !isInternalVarName(x.name.toString)  => return Some(x.member)
-        case _ => ()
+    /** Most recent tree handled which wasn't wholly synthetic. */
+    private def mostRecentlyHandledTree: Option[Tree] = {
+      prevRequests.reverse foreach { req =>
+        req.handlers.reverse foreach {
+          case x: MemberDefHandler if x.definesValue && !isInternalTermName(x.name) => return Some(x.member)
+          case _ => ()
+        }
       }
+      None
     }
-    None
-  }
-  
-  /** Stubs for work in progress. */
-  def handleTypeRedefinition(name: TypeName, old: Request, req: Request) = {
-    for (t1 <- old.simpleNameOfType(name) ; t2 <- req.simpleNameOfType(name)) {
-      DBG("Redefining type '%s'\n  %s -> %s".format(name, t1, t2))
+
+    /** Stubs for work in progress. */
+    def handleTypeRedefinition(name: TypeName, old: Request, req: Request) = {
+      for (t1 <- old.simpleNameOfType(name) ; t2 <- req.simpleNameOfType(name)) {
+        logDebug("Redefining type '%s'\n  %s -> %s".format(name, t1, t2))
+      }
     }
-  }
 
-  def handleTermRedefinition(name: TermName, old: Request, req: Request) = {
-    for (t1 <- old.compilerTypeOf get name ; t2 <- req.compilerTypeOf get name) {
-      // Printing the types here has a tendency to cause assertion errors, like
-      //   assertion failed: fatal: <refinement> has owner value x, but a class owner is required
-      // so DBG is by-name now to keep it in the family.  (It also traps the assertion error,
-      // but we don't want to unnecessarily risk hosing the compiler's internal state.)
-      DBG("Redefining term '%s'\n  %s -> %s".format(name, t1, t2))
+    def handleTermRedefinition(name: TermName, old: Request, req: Request) = {
+      for (t1 <- old.compilerTypeOf get name ; t2 <- req.compilerTypeOf get name) {
+    //    Printing the types here has a tendency to cause assertion errors, like
+        //   assertion failed: fatal: <refinement> has owner value x, but a class owner is required
+        // so DBG is by-name now to keep it in the family.  (It also traps the assertion error,
+        // but we don't want to unnecessarily risk hosing the compiler's internal state.)
+        logDebug("Redefining term '%s'\n  %s -> %s".format(name, t1, t2))
+      }
     }
-  }
-  def recordRequest(req: Request) {
-    if (req == null || referencedNameMap == null)
-      return
-
-    prevRequests += req
-    req.referencedNames foreach (x => referencedNameMap(x) = req)
-    
-    // warning about serially defining companions.  It'd be easy
-    // enough to just redefine them together but that may not always
-    // be what people want so I'm waiting until I can do it better.
-    if (!settings.nowarnings.value) {
+
+    def recordRequest(req: Request) {
+      if (req == null || referencedNameMap == null)
+        return
+
+      prevRequests += req
+      req.referencedNames foreach (x => referencedNameMap(x) = req)
+
+      // warning about serially defining companions.  It'd be easy
+      // enough to just redefine them together but that may not always
+      // be what people want so I'm waiting until I can do it better.
       for {
         name   <- req.definedNames filterNot (x => req.definedNames contains x.companionName)
         oldReq <- definedNameMap get name.companionName
         newSym <- req.definedSymbols get name
         oldSym <- oldReq.definedSymbols get name.companionName
+        if Seq(oldSym, newSym).permutations exists { case Seq(s1, s2) => s1.isClass && s2.isModule }
       } {
-        printMessage("warning: previously defined %s is not a companion to %s.".format(oldSym, newSym))
-        printMessage("Companions must be defined together; you may wish to use :paste mode for this.")
+        afterTyper(replwarn(s"warning: previously defined $oldSym is not a companion to $newSym."))
+        replwarn("Companions must be defined together; you may wish to use :paste mode for this.")
       }
-    }
-    
-    // Updating the defined name map
-    req.definedNames foreach { name =>
-      if (definedNameMap contains name) {
-        if (name.isTypeName) handleTypeRedefinition(name.toTypeName, definedNameMap(name), req)
-        else handleTermRedefinition(name.toTermName, definedNameMap(name), req)
+
+      // Updating the defined name map
+      req.definedNames foreach { name =>
+        if (definedNameMap contains name) {
+          if (name.isTypeName) handleTypeRedefinition(name.toTypeName, definedNameMap(name), req)
+          else handleTermRedefinition(name.toTermName, definedNameMap(name), req)
+        }
+         definedNameMap(name) = req
       }
-      definedNameMap(name) = req
     }
-  }
 
-  /** Parse a line into a sequence of trees. Returns None if the input is incomplete. */
-  def parse(line: String): Option[List[Tree]] = {
-    var justNeedsMore = false
-    reporter.withIncompleteHandler((pos,msg) => {justNeedsMore = true}) {
-      // simple parse: just parse it, nothing else
-      def simpleParse(code: String): List[Tree] = {
-        reporter.reset()
-        val unit = new CompilationUnit(new BatchSourceFile("<console>", code))
-        val scanner = new syntaxAnalyzer.UnitParser(unit)
-        
-        scanner.templateStatSeq(false)._2
-      }
-      val trees = simpleParse(line)
-      
-      if (reporter.hasErrors)   Some(Nil)  // the result did not parse, so stop
-      else if (justNeedsMore)   None
-      else                      Some(trees)
+    def replwarn(msg: => String) {
+      if (!settings.nowarnings.value)
+        printMessage(msg)
     }
-  }
-  
-  def isParseable(line: String): Boolean = {
-    beSilentDuring {
-      parse(line) match {
-        case Some(xs) => xs.nonEmpty  // parses as-is
-        case None     => true         // incomplete
+
+    def isParseable(line: String): Boolean = {
+      beSilentDuring {
+        try parse(line) match {
+          case Some(xs) => xs.nonEmpty  // parses as-is
+          case None     => true         // incomplete
+        }
+        catch { case x: Exception =>    // crashed the compiler
+          replwarn("Exception in isParseable(\"" + line + "\"): " + x)
+           false
+         }
       }
     }
+
+    def compileSourcesKeepingRun(sources: SourceFile*) = {
+      val run = new Run()
+      reporter.reset()
+      run compileSources sources.toList
+      (!reporter.hasErrors, run)
+    }
+
+    /** Compile an nsc SourceFile.  Returns true if there are
+     *  no compilation errors, or false otherwise.
+     */
+    def compileSources(sources: SourceFile*): Boolean =
+      compileSourcesKeepingRun(sources: _*)._1
+
+    /** Compile a string.  Returns true if there are no
+     *  compilation errors, or false otherwise.
+     */
+    def compileString(code: String): Boolean =
+      compileSources(new BatchSourceFile("<script>", code))
+
+    /** Build a request from the user. `trees` is `line` after being parsed.
+     */
+    private def buildRequest(line: String, trees: List[Tree]): Request = {
+      executingRequest = new Request(line, trees)
+      executingRequest
+    }
+
+  // rewriting "5 // foo" to "val x = { 5 // foo }" creates broken code because
+  // the close brace is commented out.  Strip single-line comments.
+  // ... but for error message output reasons this is not used, and rather than
+  // enclosing in braces it is constructed like "val x =\n5 // foo".
+  private def removeComments(line: String): String = {
+    showCodeIfDebugging(line) // as we're about to lose our // show
+    line.lines map (s => s indexOf "//" match {
+      case -1   => s
+      case idx  => s take idx
+    }) mkString "\n"
   }
 
-  /** Compile an nsc SourceFile.  Returns true if there are
-   *  no compilation errors, or false otherwise.
-   */
-  def compileSources(sources: SourceFile*): Boolean = {
-    reporter.reset()
-    new Run() compileSources sources.toList
-    !reporter.hasErrors
+  private def safePos(t: Tree, alt: Int): Int =
+    try t.pos.startOrPoint
+  catch { case _: UnsupportedOperationException => alt }
+
+  // Given an expression like 10 * 10 * 10 we receive the parent tree positioned
+  // at a '*'.  So look at each subtree and find the earliest of all positions.
+  private def earliestPosition(tree: Tree): Int = {
+    var pos = Int.MaxValue
+    tree foreach { t =>
+      pos = math.min(pos, safePos(t, Int.MaxValue))
+    }
+    pos
   }
 
-  /** Compile a string.  Returns true if there are no
-   *  compilation errors, or false otherwise.
-   */
-  def compileString(code: String): Boolean =
-    compileSources(new BatchSourceFile("<script>", code))
 
-  /** Build a request from the user. `trees` is `line` after being parsed.
-   */
-  private def buildRequest(line: String, trees: List[Tree]): Request = new Request(line, trees)
-  
   private def requestFromLine(line: String, synthetic: Boolean): Either[IR.Result, Request] = {
-    val trees = parse(indentCode(line)) match {
+    val content = indentCode(line)
+    val trees = parse(content) match {
       case None         => return Left(IR.Incomplete)
       case Some(Nil)    => return Left(IR.Error) // parse error or empty input
       case Some(trees)  => trees
     }
-    
-    // use synthetic vars to avoid filling up the resXX slots
-    def varName = if (synthetic) freshInternalVarName() else freshUserVarName()
-
-    // Treat a single bare expression specially. This is necessary due to it being hard to
-    // modify code at a textual level, and it being hard to submit an AST to the compiler.
-    if (trees.size == 1) trees.head match {
-      case _:Assign                         => // we don't want to include assignments
-      case _:TermTree | _:Ident | _:Select  => // ... but do want these as valdefs.
-        requestFromLine("val %s =\n%s".format(varName, line), synthetic) match {
+    logDebug(
+      trees map (t => {
+        // [Eugene to Paul] previously it just said `t map ...`
+        // because there was an implicit conversion from Tree to a list of Trees
+        // however Martin and I have removed the conversion
+        // (it was conflicting with the new reflection API),
+        // so I had to rewrite this a bit
+        val subs = t collect { case sub => sub }
+        subs map (t0 =>
+          "  " + safePos(t0, -1) + ": " + t0.shortClass + "\n"
+                ) mkString ""
+      }) mkString "\n"
+    )
+    // If the last tree is a bare expression, pinpoint where it begins using the
+    // AST node position and snap the line off there.  Rewrite the code embodied
+    // by the last tree as a ValDef instead, so we can access the value.
+    trees.last match {
+      case _:Assign                        => // we don't want to include assignments
+        case _:TermTree | _:Ident | _:Select => // ... but do want other unnamed terms.
+          val varName  = if (synthetic) freshInternalVarName() else freshUserVarName()
+      val rewrittenLine = (
+        // In theory this would come out the same without the 1-specific test, but
+        // it's a cushion against any more sneaky parse-tree position vs. code mismatches:
+        // this way such issues will only arise on multiple-statement repl input lines,
+        // which most people don't use.
+        if (trees.size == 1) "val " + varName + " =\n" + content
+        else {
+          // The position of the last tree
+          val lastpos0 = earliestPosition(trees.last)
+          // Oh boy, the parser throws away parens so "(2+2)" is mispositioned,
+          // with increasingly hard to decipher positions as we move on to "() => 5",
+          // (x: Int) => x + 1, and more.  So I abandon attempts to finesse and just
+          // look for semicolons and newlines, which I'm sure is also buggy.
+          val (raw1, raw2) = content splitAt lastpos0
+          logDebug("[raw] " + raw1 + "   <--->   " + raw2)
+
+          val adjustment = (raw1.reverse takeWhile (ch => (ch != ';') && (ch != '\n'))).size
+          val lastpos = lastpos0 - adjustment
+
+          // the source code split at the laboriously determined position.
+          val (l1, l2) = content splitAt lastpos
+          logDebug("[adj] " + l1 + "   <--->   " + l2)
+
+          val prefix   = if (l1.trim == "") "" else l1 + ";\n"
+          // Note to self: val source needs to have this precise structure so that
+          // error messages print the user-submitted part without the "val res0 = " part.
+          val combined   = prefix + "val " + varName + " =\n" + l2
+
+          logDebug(List(
+            "    line" -> line,
+            " content" -> content,
+            "     was" -> l2,
+            "combined" -> combined) map {
+              case (label, s) => label + ": '" + s + "'"
+            } mkString "\n"
+          )
+          combined
+        }
+      )
+        // Rewriting    "foo ; bar ; 123"
+        // to           "foo ; bar ; val resXX = 123"
+        requestFromLine(rewrittenLine, synthetic) match {
           case Right(req) => return Right(req withOriginalLine line)
           case x          => return x
         }
-      case _                                =>
+      case _ =>
     }
-        
-    // figure out what kind of request
     Right(buildRequest(line, trees))
   }
 
-  /** 
-   *    Interpret one line of input.  All feedback, including parse errors
-   *    and evaluation results, are printed via the supplied compiler's 
-   *    reporter.  Values defined are available for future interpreted
-   *    strings.
-   *  
-   *  
-   *    The return value is whether the line was interpreter successfully,
-   *    e.g. that there were no parse errors.
-   *  
+  // normalize non-public types so we don't see protected aliases like Self
+  def normalizeNonPublic(tp: Type) = tp match {
+    case TypeRef(_, sym, _) if sym.isAliasType && !sym.isPublic => tp.dealias
+    case _                                                      => tp
+  }
+
+  /**
+   *  Interpret one line of input. All feedback, including parse errors
+   *  and evaluation results, are printed via the supplied compiler's
+   *  reporter. Values defined are available for future interpreted strings.
    *
-   *  @param line ...
-   *  @return     ...
+   *  The return value is whether the line was interpreter successfully,
+   *  e.g. that there were no parse errors.
    */
   def interpret(line: String): IR.Result = interpret(line, false)
+  def interpretSynthetic(line: String): IR.Result = interpret(line, true)
   def interpret(line: String, synthetic: Boolean): IR.Result = {
     def loadAndRunReq(req: Request) = {
+      classLoader.setAsContext()
       val (result, succeeded) = req.loadAndRun
+
       /** To our displeasure, ConsoleReporter offers only printMessage,
        *  which tacks a newline on the end.  Since that breaks all the
        *  output checking, we have to take one off to balance.
        */
-      def show() = {
-        if (result == "") ()
-        else printMessage(result stripSuffix "\n")
-      }
-
       if (succeeded) {
-        if (printResults)
-          show()
+        if (printResults && result != "")
+          printMessage(result stripSuffix "\n")
+        else if (isReplDebug) // show quiet-mode activity
+          printMessage(result.trim.lines map ("[quiet] " + _) mkString "\n")
+
         // Book-keeping.  Have to record synthetic requests too,
         // as they may have been issued for information, e.g. :type
         recordRequest(req)
         IR.Success
       }
-      else {
-        // don't truncate stack traces
-        withoutTruncating(show())
-        IR.Error
-      }
+        else {
+          // don't truncate stack traces
+          withoutTruncating(printMessage(result))
+          IR.Error
+        }
     }
-    
+
     if (global == null) IR.Error
     else requestFromLine(line, synthetic) match {
       case Left(result) => result
-      case Right(req)   => 
+      case Right(req)   =>
         // null indicates a disallowed statement type; otherwise compile and
         // fail if false (implying e.g. a type error)
         if (req == null || !req.compile) IR.Error
@@ -546,23 +653,39 @@ class SparkIMain(val settings: Settings, protected val out: PrintWriter) extends
    *  @param value     the object value to bind to it
    *  @return          an indication of whether the binding succeeded
    */
-  def bind(name: String, boundType: String, value: Any): IR.Result = {
+  def bind(name: String, boundType: String, value: Any, modifiers: List[String] = Nil): IR.Result = {
     val bindRep = new ReadEvalPrint()
     val run = bindRep.compile("""
-        |object %s {
-        |  var value: %s = _
-        |  def set(x: Any) = value = x.asInstanceOf[%s]
-        |}
-      """.stripMargin.format(bindRep.evalName, boundType, boundType)
-      )
-    bindRep.callOpt("set", value) match {
-      case Some(_)  => interpret("val %s = %s.value".format(name, bindRep.evalPath))
-      case _        => DBG("Set failed in bind(%s, %s, %s)".format(name, boundType, value)) ; IR.Error
+                              |object %s {
+                                |  var value: %s = _
+                              |  def set(x: Any) = value = x.asInstanceOf[%s]
+                              |}
+                              """.stripMargin.format(bindRep.evalName, boundType, boundType)
+                            )
+    bindRep.callEither("set", value) match {
+      case Left(ex) =>
+        logDebug("Set failed in bind(%s, %s, %s)".format(name, boundType, value))
+        logDebug(util.stackTraceString(ex))
+        IR.Error
+
+      case Right(_) =>
+        val line = "%sval %s = %s.value".format(modifiers map (_ + " ") mkString, name, bindRep.evalPath)
+      logDebug("Interpreting: " + line)
+      interpret(line)
     }
   }
+  def directBind(name: String, boundType: String, value: Any): IR.Result = {
+    val result = bind(name, boundType, value)
+    if (result == IR.Success)
+      directlyBoundNames += newTermName(name)
+    result
+  }
+  def directBind(p: NamedParam): IR.Result                                    = directBind(p.name, p.tpe, p.value)
+  def directBind[T: ru.TypeTag : ClassTag](name: String, value: T): IR.Result = directBind((name, value))
+
   def rebind(p: NamedParam): IR.Result = {
     val name     = p.name
-    val oldType  = typeOfTerm(name) getOrElse { return IR.Error }
+    val oldType  = typeOfTerm(name) orElse { return IR.Error }
     val newType  = p.tpe
     val tempName = freshInternalVarName()
 
@@ -570,23 +693,27 @@ class SparkIMain(val settings: Settings, protected val out: PrintWriter) extends
     quietRun("val %s = %s.asInstanceOf[%s]".format(name, tempName, newType))
   }
   def quietImport(ids: String*): IR.Result = beQuietDuring(addImports(ids: _*))
-  def addImports(ids: String*): IR.Result = 
+  def addImports(ids: String*): IR.Result =
     if (ids.isEmpty) IR.Success
     else interpret("import " + ids.mkString(", "))
 
-  def quietBind(p: NamedParam): IR.Result                  = beQuietDuring(bind(p))
-  def bind(p: NamedParam): IR.Result                       = bind(p.name, p.tpe, p.value)
-  def bind[T: Manifest](name: String, value: T): IR.Result = bind((name, value))
-  def bindValue(x: Any): IR.Result                         = bind(freshUserVarName(), TypeStrings.fromValue(x), x)
+  def quietBind(p: NamedParam): IR.Result                               = beQuietDuring(bind(p))
+  def bind(p: NamedParam): IR.Result                                    = bind(p.name, p.tpe, p.value)
+  def bind[T: ru.TypeTag : ClassTag](name: String, value: T): IR.Result = bind((name, value))
+  def bindSyntheticValue(x: Any): IR.Result                             = bindValue(freshInternalVarName(), x)
+  def bindValue(x: Any): IR.Result                                      = bindValue(freshUserVarName(), x)
+  def bindValue(name: String, x: Any): IR.Result                        = bind(name, TypeStrings.fromValue(x), x)
 
   /** Reset this interpreter, forgetting all user-specified requests. */
   def reset() {
-    //virtualDirectory.clear()
-    virtualDirectory.delete()
-    virtualDirectory.create()
+    clearExecutionWrapper()
     resetClassLoader()
     resetAllCreators()
     prevRequests.clear()
+    referencedNameMap.clear()
+    definedNameMap.clear()
+    virtualDirectory.delete()
+    virtualDirectory.create()
   }
 
   /** This instance is no longer needed, so release any resources
@@ -596,9 +723,9 @@ class SparkIMain(val settings: Settings, protected val out: PrintWriter) extends
     reporter.flush()
     classServer.stop()
   }
-  
+
   /** Here is where we:
-   * 
+   *
    *  1) Read some source code, and put it in the "read" object.
    *  2) Evaluate the read object, and put the result in the "eval" object.
    *  3) Create a String for human consumption, and put it in the "print" object.
@@ -608,115 +735,172 @@ class SparkIMain(val settings: Settings, protected val out: PrintWriter) extends
   class ReadEvalPrint(lineId: Int) {
     def this() = this(freshLineId())
 
-    val packageName = "$line" + lineId
-    val readName    = "$read"
-    val evalName    = "$eval"
-    val printName   = "$print"
-    val valueMethod = "$result"   // no-args method giving result
-    
+    private var lastRun: Run = _
+    private var evalCaught: Option[Throwable] = None
+    private var conditionalWarnings: List[ConditionalWarning] = Nil
+
+    val packageName = sessionNames.line + lineId
+    val readName    = sessionNames.read
+    val evalName    = sessionNames.eval
+    val printName   = sessionNames.print
+    val resultName  = sessionNames.result
+
+    def bindError(t: Throwable) = {
+      if (!bindExceptions) // avoid looping if already binding
+        throw t
+
+      val unwrapped = unwrap(t)
+      withLastExceptionLock[String]({
+        directBind[Throwable]("lastException", unwrapped)(tagOfThrowable, classTag[Throwable])
+        util.stackTraceString(unwrapped)
+      }, util.stackTraceString(unwrapped))
+    }
+
     // TODO: split it out into a package object and a regular
     // object and we can do that much less wrapping.
     def packageDecl = "package " + packageName
-    
+
     def pathTo(name: String)   = packageName + "." + name
     def packaged(code: String) = packageDecl + "\n\n" + code
 
     def readPath  = pathTo(readName)
     def evalPath  = pathTo(evalName)
     def printPath = pathTo(printName)
-    
-    def call(name: String, args: Any*): AnyRef = 
-      evalMethod(name).invoke(evalClass, args.map(_.asInstanceOf[AnyRef]): _*)
-    
+
+    def call(name: String, args: Any*): AnyRef = {
+      val m = evalMethod(name)
+      logDebug("Invoking: " + m)
+      if (args.nonEmpty)
+        logDebug("  with args: " + args.mkString(", "))
+
+      m.invoke(evalClass, args.map(_.asInstanceOf[AnyRef]): _*)
+    }
+
+    def callEither(name: String, args: Any*): Either[Throwable, AnyRef] =
+      try Right(call(name, args: _*))
+    catch { case ex: Throwable => Left(ex) }
+
     def callOpt(name: String, args: Any*): Option[AnyRef] =
       try Some(call(name, args: _*))
-      catch { case ex: Exception =>
-        quietBind("lastException", ex)
-        None
-      }
-    
-    lazy val evalClass = loadByName(evalPath)
-    lazy val evalValue = callOpt(valueMethod)
+    catch { case ex: Throwable => bindError(ex) ; None }
 
-    def compile(source: String): Boolean = compileAndSaveRun("<console>", source)
-    def lineAfterTyper[T](op: => T): T = {
-      assert(lastRun != null, "Internal error: trying to use atPhase, but Run is null." + this)
-      atPhase(lastRun.typerPhase.next)(op)
+    class EvalException(msg: String, cause: Throwable) extends RuntimeException(msg, cause) { }
+
+    private def evalError(path: String, ex: Throwable) =
+      throw new EvalException("Failed to load '" + path + "': " + ex.getMessage, ex)
+
+    private def load(path: String): Class[_] = {
+      try Class.forName(path, true, classLoader)
+      catch { case ex: Throwable => evalError(path, unwrap(ex)) }
+    }
+
+    lazy val evalClass = load(evalPath)
+    lazy val evalValue = callEither(resultName) match {
+      case Left(ex)      => evalCaught = Some(ex) ; None
+      case Right(result) => Some(result)
     }
-    
+
+    def compile(source: String): Boolean = compileAndSaveRun("<console>", source)
+
     /** The innermost object inside the wrapper, found by
-      * following accessPath into the outer one.
-      */
+     * following accessPath into the outer one.
+     */
     def resolvePathToSymbol(accessPath: String): Symbol = {
-      //val readRoot  = definitions.getModule(readPath)   // the outermost wrapper
-      // MATEI: changed this to getClass because the root object is no longer a module (Scala singleton object)
-      val readRoot  = definitions.getClass(readPath)   // the outermost wrapper
-      (accessPath split '.').foldLeft(readRoot) { (sym, name) =>
-        if (name == "") sym else
-        lineAfterTyper(sym.info member newTermName(name))
+      // val readRoot  = getRequiredModule(readPath)   // the outermost wrapper
+      // MATEI: Changed this to getClass because the root object is no longer a module (Scala singleton object)
+
+      val readRoot  = definitions.getClass(newTypeName(readPath))   // the outermost wrapper
+      (accessPath split '.').foldLeft(readRoot: Symbol) {
+        case (sym, "")    => sym
+        case (sym, name)  => afterTyper(termMember(sym, name))
       }
     }
-    
-    // def compileAndTypeExpr(expr: String): Option[Typer] = {
-    //   class TyperRun extends Run {
-    //     override def stopPhase(name: String) = name == "superaccessors"
-    //   }
-    // }
-    private var lastRun: Run = _
-    private def evalMethod(name: String) = {
-      val methods = evalClass.getMethods filter (_.getName == name)
-      assert(methods.size == 1, "Internal error - eval object method " + name + " is overloaded: " + methods)
-      methods.head
+    /** We get a bunch of repeated warnings for reasons I haven't
+     *  entirely figured out yet.  For now, squash.
+     */
+    private def updateRecentWarnings(run: Run) {
+      def loop(xs: List[(Position, String)]): List[(Position, String)] = xs match {
+        case Nil                  => Nil
+        case ((pos, msg)) :: rest =>
+          val filtered = rest filter { case (pos0, msg0) =>
+            (msg != msg0) || (pos.lineContent.trim != pos0.lineContent.trim) || {
+              // same messages and same line content after whitespace removal
+              // but we want to let through multiple warnings on the same line
+              // from the same run.  The untrimmed line will be the same since
+              // there's no whitespace indenting blowing it.
+              (pos.lineContent == pos0.lineContent)
+            }
+                                    }
+        ((pos, msg)) :: loop(filtered)
+      }
+     //PRASHANT: This leads to a NoSuchMethodError for _.warnings. Yet to figure out its purpose.
+      // val warnings = loop(run.allConditionalWarnings flatMap (_.warnings))
+      // if (warnings.nonEmpty)
+      //   mostRecentWarnings = warnings
+    }
+    private def evalMethod(name: String) = evalClass.getMethods filter (_.getName == name) match {
+      case Array(method) => method
+      case xs            => sys.error("Internal error: eval object " + evalClass + ", " + xs.mkString("\n", "\n", ""))
     }
     private def compileAndSaveRun(label: String, code: String) = {
       showCodeIfDebugging(code)
-      reporter.reset()
-      lastRun = new Run()
-      lastRun.compileSources(List(new BatchSourceFile(label, packaged(code))))
-      !reporter.hasErrors
+      val (success, run) = compileSourcesKeepingRun(new BatchSourceFile(label, packaged(code)))
+      updateRecentWarnings(run)
+      lastRun = run
+      success
     }
   }
 
   /** One line of code submitted by the user for interpretation */
-  // private 
+  // private
   class Request(val line: String, val trees: List[Tree]) {
-    val lineRep     = new ReadEvalPrint()
-    import lineRep.lineAfterTyper
-    
+    val reqId = nextReqId()
+    val lineRep = new ReadEvalPrint()
+
     private var _originalLine: String = null
     def withOriginalLine(s: String): this.type = { _originalLine = s ; this }
     def originalLine = if (_originalLine == null) line else _originalLine
 
     /** handlers for each tree in this request */
     val handlers: List[MemberHandler] = trees map (memberHandlers chooseHandler _)
+    def defHandlers = handlers collect { case x: MemberDefHandler => x }
 
     /** all (public) names defined by these statements */
     val definedNames = handlers flatMap (_.definedNames)
 
     /** list of names used by this expression */
     val referencedNames: List[Name] = handlers flatMap (_.referencedNames)
-    
+
     /** def and val names */
     def termNames = handlers flatMap (_.definesTerm)
     def typeNames = handlers flatMap (_.definesType)
+    def definedOrImported = handlers flatMap (_.definedOrImported)
+    def definedSymbolList = defHandlers flatMap (_.definedSymbols)
+
+    def definedTypeSymbol(name: String) = definedSymbols(newTypeName(name))
+    def definedTermSymbol(name: String) = definedSymbols(newTermName(name))
 
     /** Code to import bound names from previous lines - accessPath is code to
-      * append to objectName to access anything bound by request.
-      */
-    val ComputedImports(importsPreamble, importsTrailer, accessPath) =
+     * append to objectName to access anything bound by request.
+     */
+    val SparkComputedImports(importsPreamble, importsTrailer, accessPath) =
       importsCode(referencedNames.toSet)
 
     /** Code to access a variable with the specified name */
-    def fullPath(vname: String) = (
-      //lineRep.readPath + accessPath + ".`%s`".format(vname)
+    def fullPath(vname: String) = {
+      // lineRep.readPath + accessPath + ".`%s`".format(vname)
       lineRep.readPath + ".INSTANCE" + accessPath + ".`%s`".format(vname)
-    )
-    /** Same as fullpath, but after it has been flattened, so:
-     *  $line5.$iw.$iw.$iw.Bippy      // fullPath
-     *  $line5.$iw$$iw$$iw$Bippy      // fullFlatName
-     */
-    def fullFlatName(name: String) =
-      lineRep.readPath + accessPath.replace('.', '$') + "$" + name
+    }
+      /** Same as fullpath, but after it has been flattened, so:
+       *  $line5.$iw.$iw.$iw.Bippy      // fullPath
+       *  $line5.$iw$$iw$$iw$Bippy      // fullFlatName
+       */
+      def fullFlatName(name: String) =
+        // lineRep.readPath + accessPath.replace('.', '$') + nme.NAME_JOIN_STRING + name
+        lineRep.readPath + ".INSTANCE" + accessPath.replace('.', '$') + nme.NAME_JOIN_STRING + name
+
+    /** The unmangled symbol name, but supplemented with line info. */
+    def disambiguated(name: Name): String = name + " (in " + lineRep + ")"
 
     /** Code to access a variable with the specified name */
     def fullPath(vname: Name): String = fullPath(vname.toString)
@@ -726,52 +910,66 @@ class SparkIMain(val settings: Settings, protected val out: PrintWriter) extends
 
     /** generate the source code for the object that computes this request */
     private object ObjectSourceCode extends CodeAssembler[MemberHandler] {
+      def path = pathToTerm("$intp")
+      def envLines = {
+        if (!isReplPower) Nil // power mode only for now
+        // $intp is not bound; punt, but include the line.
+        else if (path == "$intp") List(
+          "def $line = " + tquoted(originalLine),
+          "def $trees = Nil"
+        )
+        else List(
+          "def $line  = " + tquoted(originalLine),
+          "def $req = %s.requestForReqId(%s).orNull".format(path, reqId),
+          "def $trees = if ($req eq null) Nil else $req.trees".format(lineRep.readName, path, reqId)
+        )
+      }
+
       val preamble = """
         |class %s extends Serializable {
-        |  %s%s
-      """.stripMargin.format(lineRep.readName, importsPreamble, indentCode(toCompute))
+        |  %s%s%s
+      """.stripMargin.format(lineRep.readName, envLines.map("  " + _ + ";\n").mkString, importsPreamble, indentCode(toCompute))
       val postamble = importsTrailer + "\n}" + "\n" +
         "object " + lineRep.readName + " {\n" +
         "  val INSTANCE = new " + lineRep.readName + "();\n" +
         "}\n"
       val generate = (m: MemberHandler) => m extraCodeToEvaluate Request.this
+
       /*
       val preamble = """
-        |object %s {
-        |  %s%s
-      """.stripMargin.format(lineRep.readName, importsPreamble, indentCode(toCompute))
+        |object %s extends Serializable {
+        |%s%s%s
+      """.stripMargin.format(lineRep.readName, envLines.map("  " + _ + ";\n").mkString, importsPreamble, indentCode(toCompute))
       val postamble = importsTrailer + "\n}"
       val generate = (m: MemberHandler) => m extraCodeToEvaluate Request.this
       */
+
     }
-    
+
     private object ResultObjectSourceCode extends CodeAssembler[MemberHandler] {
       /** We only want to generate this code when the result
        *  is a value which can be referred to as-is.
-       */      
+       */
       val evalResult =
         if (!handlers.last.definesValue) ""
         else handlers.last.definesTerm match {
           case Some(vname) if typeOf contains vname =>
-            """
-            |lazy val $result = {
-            |  $export
-            |  %s
-            |}""".stripMargin.format(fullPath(vname))
+            "lazy val %s = %s".format(lineRep.resultName, fullPath(vname))
           case _  => ""
         }
+
       // first line evaluates object to make sure constructor is run
       // initial "" so later code can uniformly be: + etc
       val preamble = """
       |object %s {
       |  %s
-      |  val $export: String = %s {
+      |  val %s: String = %s {
       |    %s
       |    (""
       """.stripMargin.format(
-        lineRep.evalName, evalResult, executionWrapper, lineRep.readName + ".INSTANCE" + accessPath
+        lineRep.evalName, evalResult, lineRep.printName,
+        executionWrapper, lineRep.readName + ".INSTANCE" + accessPath
       )
-      
       val postamble = """
       |    )
       |  }
@@ -785,7 +983,7 @@ class SparkIMain(val settings: Settings, protected val out: PrintWriter) extends
     def getEval: Option[AnyRef] = {
       // ensure it has been compiled
       compile
-      // try to load it and call the value method      
+      // try to load it and call the value method
       lineRep.evalValue filterNot (_ == null)
     }
 
@@ -797,114 +995,54 @@ class SparkIMain(val settings: Settings, protected val out: PrintWriter) extends
 
       // compile the object containing the user's code
       lineRep.compile(ObjectSourceCode(handlers)) && {
-        // extract and remember types 
+        // extract and remember types
         typeOf
         typesOfDefinedTerms
 
+        // Assign symbols to the original trees
+        // TODO - just use the new trees.
+        defHandlers foreach { dh =>
+          val name = dh.member.name
+          definedSymbols get name foreach { sym =>
+            dh.member setSymbol sym
+           logDebug("Set symbol of " + name + " to " + sym.defString)
+          }
+        }
+
         // compile the result-extraction object
-        lineRep compile ResultObjectSourceCode(handlers)
+        withoutWarnings(lineRep compile ResultObjectSourceCode(handlers))
       }
     }
 
     lazy val resultSymbol = lineRep.resolvePathToSymbol(accessPath)
-    def applyToResultMember[T](name: Name, f: Symbol => T) = lineAfterTyper(f(resultSymbol.info.nonPrivateDecl(name)))
+    def applyToResultMember[T](name: Name, f: Symbol => T) = afterTyper(f(resultSymbol.info.nonPrivateDecl(name)))
 
     /* typeOf lookup with encoding */
-    def lookupTypeOf(name: Name) = {
-      typeOf.getOrElse(name, typeOf(global.encode(name.toString)))
-    }
-    def simpleNameOfType(name: TypeName) = {
-      (compilerTypeOf get name) map (_.typeSymbol.simpleName)
-    }
-    
-    private def typeMap[T](f: Type => T): Map[Name, T] = {
-      def toType(name: Name): T = {
-        // the types are all =>T; remove the =>
-        val tp1 = lineAfterTyper(resultSymbol.info.nonPrivateDecl(name).tpe match {
-          case NullaryMethodType(tp)  => tp
-          case tp                 => tp
-        })
-        // normalize non-public types so we don't see protected aliases like Self
-        lineAfterTyper(tp1 match {
-          case TypeRef(_, sym, _) if !sym.isPublic  => f(tp1.normalize)
-          case tp                                   => f(tp)
-        })
-      }
-      termNames ++ typeNames map (x => x -> toType(x)) toMap
-    }
+    def lookupTypeOf(name: Name) = typeOf.getOrElse(name, typeOf(global.encode(name.toString)))
+    def simpleNameOfType(name: TypeName) = (compilerTypeOf get name) map (_.typeSymbol.simpleName)
+
+    private def typeMap[T](f: Type => T) =
+      mapFrom[Name, Name, T](termNames ++ typeNames)(x => f(cleanMemberDecl(resultSymbol, x)))
+
     /** Types of variables defined by this request. */
-    lazy val compilerTypeOf = typeMap[Type](x => x)
+    lazy val compilerTypeOf = typeMap[Type](x => x) withDefaultValue NoType
     /** String representations of same. */
-    lazy val typeOf         = typeMap[String](_.toString)
-    
+    lazy val typeOf         = typeMap[String](tp => afterTyper(tp.toString))
+
     // lazy val definedTypes: Map[Name, Type] = {
     //   typeNames map (x => x -> afterTyper(resultSymbol.info.nonPrivateDecl(x).tpe)) toMap
     // }
-    lazy val definedSymbols: Map[Name, Symbol] = (
+    lazy val definedSymbols = (
       termNames.map(x => x -> applyToResultMember(x, x => x)) ++
-      typeNames.map(x => x -> compilerTypeOf.get(x).map(_.typeSymbol).getOrElse(NoSymbol))
-    ).toMap
-
-    lazy val typesOfDefinedTerms: Map[Name, Type] =
-      termNames map (x => x -> applyToResultMember(x, _.tpe)) toMap
-    
-    private def bindExceptionally(t: Throwable) = {
-      val ex: Exceptional =
-        if (isettings.showInternalStackTraces) Exceptional(t)
-        else new Exceptional(t) {
-          override def spanFn(frame: JavaStackFrame) = !(frame.className startsWith lineRep.evalPath)
-          override def contextPrelude = super.contextPrelude + "/* The repl internal portion of the stack trace is elided. */\n"
-        }
-        
-      quietBind("lastException", ex)
-      ex.contextHead + "\n(access lastException for the full trace)"
-    }
-    private def bindUnexceptionally(t: Throwable) = {
-      quietBind("lastException", t)
-      stackTraceString(t)
-    }
+      typeNames.map(x => x -> compilerTypeOf(x).typeSymbolDirect)
+    ).toMap[Name, Symbol] withDefaultValue NoSymbol
+
+    lazy val typesOfDefinedTerms = mapFrom[Name, Name, Type](termNames)(x => applyToResultMember(x, _.tpe))
 
     /** load and run the code using reflection */
     def loadAndRun: (String, Boolean) = {
-      import interpreter.Line._
-      
-      def handleException(t: Throwable) = {
-        /** We turn off the binding to accomodate ticket #2817 */
-        withoutBindingLastException {
-          val message =
-            if (opt.richExes) bindExceptionally(unwrap(t))
-            else bindUnexceptionally(unwrap(t))
-          
-          (message, false)
-        }
-      }
-      
-      try {
-        val execution = lineManager.set(originalLine) {
-          // MATEI: set the right SparkEnv for our SparkContext, because
-          // this execution will happen in a separate thread
-          val sc = spark.repl.Main.interp.sparkContext
-          if (sc != null && sc.env != null)
-            SparkEnv.set(sc.env)
-          // Execute the line
-          lineRep call "$export"
-        }
-        execution.await()
-        
-        execution.state match {
-          case Done       => ("" + execution.get(), true)
-          case Threw      =>
-            val ex = execution.caught()
-            if (isReplDebug)
-              ex.printStackTrace()
-
-            if (bindLastException) handleException(ex)
-            else throw ex
-          case Cancelled  => ("Execution interrupted by signal.\n", false)
-          case Running    => ("Execution still running! Seems impossible.", false)
-        }
-      }
-      finally lineManager.clear()
+      try   { ("" + (lineRep call sessionNames.print), true) }
+      catch { case ex: Throwable => (lineRep.bindError(ex), false) }
     }
 
     override def toString = "Request(line=%s, %s trees)".format(line, trees.size)
@@ -922,136 +1060,157 @@ class SparkIMain(val settings: Settings, protected val out: PrintWriter) extends
       case ModuleDef(_, name, _)    => name
       case _                        => naming.mostRecentVar
     })
-  
-  private def requestForName(name: Name): Option[Request] = {
+
+  private var mostRecentWarnings: List[(global.Position, String)] = Nil
+  def lastWarnings = mostRecentWarnings
+
+  def treesForRequestId(id: Int): List[Tree] =
+    requestForReqId(id).toList flatMap (_.trees)
+
+  def requestForReqId(id: Int): Option[Request] =
+    if (executingRequest != null && executingRequest.reqId == id) Some(executingRequest)
+    else prevRequests find (_.reqId == id)
+
+  def requestForName(name: Name): Option[Request] = {
     assert(definedNameMap != null, "definedNameMap is null")
     definedNameMap get name
   }
 
-  private def requestForIdent(line: String): Option[Request] = 
+  def requestForIdent(line: String): Option[Request] =
     requestForName(newTermName(line)) orElse requestForName(newTypeName(line))
-    
-  def safeClass(name: String): Option[Symbol] = {
-    try Some(definitions.getClass(newTypeName(name)))
-    catch { case _: MissingRequirementError => None }
-  }
-  def safeModule(name: String): Option[Symbol] = {
-    try Some(definitions.getModule(newTermName(name)))
-    catch { case _: MissingRequirementError => None }
-  }
+
+  def requestHistoryForName(name: Name): List[Request] =
+    prevRequests.toList.reverse filter (_.definedNames contains name)
+
 
   def definitionForName(name: Name): Option[MemberHandler] =
     requestForName(name) flatMap { req =>
       req.handlers find (_.definedNames contains name)
     }
-  
+
   def valueOfTerm(id: String): Option[AnyRef] =
-    requestForIdent(id) flatMap (_.getEval)
+    requestForName(newTermName(id)) flatMap (_.getEval)
 
   def classOfTerm(id: String): Option[JClass] =
-    valueOfTerm(id) map (_.getClass)    
+    valueOfTerm(id) map (_.getClass)
 
-  def typeOfTerm(id: String): Option[Type] = newTermName(id) match {
-    case nme.ROOTPKG  => Some(definitions.RootClass.tpe)
-    case name         => requestForName(name) flatMap (_.compilerTypeOf get name)
+  def typeOfTerm(id: String): Type = newTermName(id) match {
+    case nme.ROOTPKG  => RootClass.tpe
+    case name         => requestForName(name).fold(NoType: Type)(_ compilerTypeOf name)
   }
+
+  def symbolOfType(id: String): Symbol =
+    requestForName(newTypeName(id)).fold(NoSymbol: Symbol)(_ definedTypeSymbol id)
+
   def symbolOfTerm(id: String): Symbol =
-    requestForIdent(id) flatMap (_.definedSymbols get newTermName(id)) getOrElse NoSymbol
+    requestForIdent(newTermName(id)).fold(NoSymbol: Symbol)(_ definedTermSymbol id)
 
   def runtimeClassAndTypeOfTerm(id: String): Option[(JClass, Type)] = {
-    for {
-      clazz <- classOfTerm(id)
-      tpe <- runtimeTypeOfTerm(id)
-      nonAnon <- new RichClass(clazz).supers.find(c => !(new RichClass(c).isScalaAnonymous))
-    } yield {
-      (nonAnon, tpe)
-    }
-  }
-  
-  def runtimeTypeOfTerm(id: String): Option[Type] = {
-    for {
-      tpe <- typeOfTerm(id)
-      clazz <- classOfTerm(id)
-      val staticSym = tpe.typeSymbol
-      runtimeSym <- safeClass(clazz.getName)
-      if runtimeSym != staticSym
-      if runtimeSym isSubClass staticSym
-    } yield {
-      runtimeSym.info
+    classOfTerm(id) flatMap { clazz =>
+      new RichClass(clazz).supers find(c => !(new RichClass(c).isScalaAnonymous)) map { nonAnon =>
+        (nonAnon, runtimeTypeOfTerm(id))
+      }
     }
   }
-  
-  // XXX literals.
-  // 1) Identifiers defined in the repl.
-  // 2) A path loadable via getModule.
-  // 3) Try interpreting it as an expression.
-  private var typeOfExpressionDepth = 0
-  def typeOfExpression(expr: String): Option[Type] = {
-    DBG("typeOfExpression(" + expr + ")")
-    if (typeOfExpressionDepth > 2) {
-      DBG("Terminating typeOfExpression recursion for expression: " + expr)
-      return None
-    }
 
-    def asQualifiedImport = {
-      val name = expr.takeWhile(_ != '.')
-      importedTermNamed(name) flatMap { sym =>
-        typeOfExpression(sym.fullName + expr.drop(name.length))
-      }
+  def runtimeTypeOfTerm(id: String): Type = {
+    typeOfTerm(id) andAlso { tpe =>
+      val clazz      = classOfTerm(id) getOrElse { return NoType }
+      val staticSym  = tpe.typeSymbol
+      val runtimeSym = getClassIfDefined(clazz.getName)
+
+      if ((runtimeSym != NoSymbol) && (runtimeSym != staticSym) && (runtimeSym isSubClass staticSym))
+        runtimeSym.info
+      else NoType
     }
-    def asModule = safeModule(expr) map (_.tpe)
-    def asExpr = beSilentDuring {
-      val lhs = freshInternalVarName()
-      val line = "lazy val " + lhs + " = { " + expr + " } "
-
-      interpret(line, true) match {
-        case IR.Success => typeOfExpression(lhs)
-        case _          => None
+  }
+  def cleanMemberDecl(owner: Symbol, member: Name): Type = afterTyper {
+    normalizeNonPublic {
+      owner.info.nonPrivateDecl(member).tpe match {
+        case NullaryMethodType(tp) => tp
+        case tp                    => tp
       }
     }
-    
-    typeOfExpressionDepth += 1
-    try typeOfTerm(expr) orElse asModule orElse asExpr orElse asQualifiedImport
-    finally typeOfExpressionDepth -= 1
   }
-  // def compileAndTypeExpr(expr: String): Option[Typer] = {
-  //   class TyperRun extends Run {
-  //     override def stopPhase(name: String) = name == "superaccessors"
-  //   }
-  // }
-    
+
+  object exprTyper extends {
+    val repl: SparkIMain.this.type = imain
+  } with SparkExprTyper { }
+
+  def parse(line: String): Option[List[Tree]] = exprTyper.parse(line)
+
+  def symbolOfLine(code: String): Symbol =
+    exprTyper.symbolOfLine(code)
+
+  def typeOfExpression(expr: String, silent: Boolean = true): Type =
+    exprTyper.typeOfExpression(expr, silent)
+
   protected def onlyTerms(xs: List[Name]) = xs collect { case x: TermName => x }
   protected def onlyTypes(xs: List[Name]) = xs collect { case x: TypeName => x }
-    
-  def definedTerms   = onlyTerms(allDefinedNames) filterNot (x => isInternalVarName(x.toString))
-  def definedTypes   = onlyTypes(allDefinedNames)
-  def definedSymbols = prevRequests.toSet flatMap ((x: Request) => x.definedSymbols.values)
-  
+
+  def definedTerms      = onlyTerms(allDefinedNames) filterNot isInternalTermName
+  def definedTypes      = onlyTypes(allDefinedNames)
+  def definedSymbols    = prevRequestList.flatMap(_.definedSymbols.values).toSet[Symbol]
+  def definedSymbolList = prevRequestList flatMap (_.definedSymbolList) filterNot (s => isInternalTermName(s.name))
+
+  // Terms with user-given names (i.e. not res0 and not synthetic)
+    def namedDefinedTerms = definedTerms filterNot (x => isUserVarName("" + x) || directlyBoundNames(x))
+
+  private def findName(name: Name) = definedSymbols find (_.name == name) getOrElse NoSymbol
+
+  /** Translate a repl-defined identifier into a Symbol.
+   */
+  def apply(name: String): Symbol =
+    types(name) orElse terms(name)
+
+  def types(name: String): Symbol = {
+    val tpname = newTypeName(name)
+    findName(tpname) orElse getClassIfDefined(tpname)
+  }
+  def terms(name: String): Symbol = {
+    val termname = newTypeName(name)
+    findName(termname) orElse getModuleIfDefined(termname)
+  }
+  // [Eugene to Paul] possibly you could make use of TypeTags here
+  def types[T: ClassTag] : Symbol = types(classTag[T].runtimeClass.getName)
+  def terms[T: ClassTag] : Symbol = terms(classTag[T].runtimeClass.getName)
+  def apply[T: ClassTag] : Symbol = apply(classTag[T].runtimeClass.getName)
+
+  def classSymbols  = allDefSymbols collect { case x: ClassSymbol => x }
+  def methodSymbols = allDefSymbols collect { case x: MethodSymbol => x }
+
   /** the previous requests this interpreter has processed */
-  private lazy val prevRequests      = mutable.ListBuffer[Request]()
-  private lazy val referencedNameMap = mutable.Map[Name, Request]()
-  private lazy val definedNameMap    = mutable.Map[Name, Request]()
-  protected def prevRequestList      = prevRequests.toList
-  private def allHandlers            = prevRequestList flatMap (_.handlers)
-  def allSeenTypes                   = prevRequestList flatMap (_.typeOf.values.toList) distinct
-  def allImplicits                   = allHandlers filter (_.definesImplicit) flatMap (_.definedNames)
-  def importHandlers                 = allHandlers collect { case x: ImportHandler => x }
-  
+  private var executingRequest: Request = _
+  private val prevRequests       = mutable.ListBuffer[Request]()
+  private val referencedNameMap  = mutable.Map[Name, Request]()
+  private val definedNameMap     = mutable.Map[Name, Request]()
+  private val directlyBoundNames = mutable.Set[Name]()
+
+  def allHandlers    = prevRequestList flatMap (_.handlers)
+  def allDefHandlers = allHandlers collect { case x: MemberDefHandler => x }
+  def allDefSymbols  = allDefHandlers map (_.symbol) filter (_ ne NoSymbol)
+
+  def lastRequest         = if (prevRequests.isEmpty) null else prevRequests.last
+  def prevRequestList     = prevRequests.toList
+  def allSeenTypes        = prevRequestList flatMap (_.typeOf.values.toList) distinct
+  def allImplicits        = allHandlers filter (_.definesImplicit) flatMap (_.definedNames)
+  def importHandlers      = allHandlers collect { case x: ImportHandler => x }
+
   def visibleTermNames: List[Name] = definedTerms ++ importedTerms distinct
 
   /** Another entry point for tab-completion, ids in scope */
   def unqualifiedIds = visibleTermNames map (_.toString) filterNot (_ contains "$") sorted
-  
+
   /** Parse the ScalaSig to find type aliases */
   def aliasForType(path: String) = ByteCode.aliasForType(path)
-  
+
   def withoutUnwrapping(op: => Unit): Unit = {
     val saved = isettings.unwrapStrings
     isettings.unwrapStrings = false
     try op
     finally isettings.unwrapStrings = saved
   }
-  
+
   def symbolDefString(sym: Symbol) = {
     TypeStrings.quieter(
       afterTyper(sym.defString),
@@ -1059,38 +1218,41 @@ class SparkIMain(val settings: Settings, protected val out: PrintWriter) extends
       sym.owner.fullName + "."
     )
   }
-  
+
   def showCodeIfDebugging(code: String) {
     /** Secret bookcase entrance for repl debuggers: end the line
      *  with "// show" and see what's going on.
      */
-    if (SPARK_DEBUG_REPL || code.lines.exists(_.trim endsWith "// show")) {
-      echo(code)
-      parse(code) foreach (ts => ts foreach (t => withoutUnwrapping(DBG(asCompactString(t)))))
+    def isShow    = code.lines exists (_.trim endsWith "// show")
+    def isShowRaw = code.lines exists (_.trim endsWith "// raw")
+
+    // old style
+    beSilentDuring(parse(code)) foreach { ts =>
+      ts foreach { t =>
+        withoutUnwrapping(logDebug(asCompactString(t)))
+      }
     }
   }
+
   // debugging
   def debugging[T](msg: String)(res: T) = {
-    DBG(msg + " " + res)
+    logDebug(msg + " " + res)
     res
   }
-  def DBG(s: => String) = if (isReplDebug) {
-    //try repldbg(s)
-    //catch { case x: AssertionError => repldbg("Assertion error printing debug string:\n  " + x) }
-  }
 }
 
 /** Utility methods for the Interpreter. */
 object SparkIMain {
   // The two name forms this is catching are the two sides of this assignment:
   //
-  // $line3.$read.$iw.$iw.Bippy = 
+  // $line3.$read.$iw.$iw.Bippy =
   //   $line3.$read$$iw$$iw$Bippy@4a6a00ca
   private def removeLineWrapper(s: String) = s.replaceAll("""\$line\d+[./]\$(read|eval|print)[$.]""", "")
   private def removeIWPackages(s: String)  = s.replaceAll("""\$(iw|iwC|read|eval|print)[$.]""", "")
   private def removeSparkVals(s: String)   = s.replaceAll("""\$VAL[0-9]+[$.]""", "")
+
   def stripString(s: String)               = removeSparkVals(removeIWPackages(removeLineWrapper(s)))
-  
+
   trait CodeAssembler[T] {
     def preamble: String
     def generate: T => String
@@ -1102,7 +1264,7 @@ object SparkIMain {
       code println postamble
     }
   }
-  
+
   trait StrippingWriter {
     def isStripping: Boolean
     def stripImpl(str: String): String
@@ -1112,17 +1274,17 @@ object SparkIMain {
     def maxStringLength: Int
     def isTruncating: Boolean
     def truncate(str: String): String = {
-      if (isTruncating && str.length > maxStringLength)
+      if (isTruncating && (maxStringLength != 0 && str.length > maxStringLength))
         (str take maxStringLength - 3) + "..."
       else str
     }
   }
-  abstract class StrippingTruncatingWriter(out: PrintWriter)
-          extends PrintWriter(out)
+  abstract class StrippingTruncatingWriter(out: JPrintWriter)
+          extends JPrintWriter(out)
              with StrippingWriter
              with TruncatingWriter {
     self =>
- 
+
     def clean(str: String): String = truncate(strip(str))
     override def write(str: String) = super.write(clean(str))
   }
@@ -1132,18 +1294,7 @@ object SparkIMain {
     def isStripping        = isettings.unwrapStrings
     def isTruncating       = reporter.truncationOK
 
-    def stripImpl(str: String): String = {
-      val cleaned = stripString(str)
-      var ctrlChars = 0
-      cleaned map { ch =>
-        if (ch.isControl && !ch.isWhitespace) {
-          ctrlChars += 1
-          if (ctrlChars > 5) return "[line elided for control chars: possibly a scala signature]"
-          else '?'
-        }
-        else ch
-      }
-    }
+    def stripImpl(str: String): String = naming.unmangle(str)
   }
 
   class ReplReporter(intp: SparkIMain) extends ConsoleReporter(intp.settings, null, new ReplStrippingWriter(intp)) {
@@ -1156,5 +1307,55 @@ object SparkIMain {
       }
       else Console.println(msg)
     }
-  }  
+  }
+}
+
+class SparkISettings(intp: SparkIMain) extends Logging {
+  /** A list of paths where :load should look */
+  var loadPath = List(".")
+
+  /** Set this to true to see repl machinery under -Yrich-exceptions.
+   */
+  var showInternalStackTraces = false
+
+  /** The maximum length of toString to use when printing the result
+   *  of an evaluation.  0 means no maximum.  If a printout requires
+   *  more than this number of characters, then the printout is
+   *  truncated.
+   */
+  var maxPrintString = 800
+
+  /** The maximum number of completion candidates to print for tab
+   *  completion without requiring confirmation.
+   */
+  var maxAutoprintCompletion = 250
+
+  /** String unwrapping can be disabled if it is causing issues.
+   *  Settings this to false means you will see Strings like "$iw.$iw.".
+   */
+  var unwrapStrings = true
+
+  def deprecation_=(x: Boolean) = {
+    val old = intp.settings.deprecation.value
+    intp.settings.deprecation.value = x
+    if (!old && x) logDebug("Enabled -deprecation output.")
+    else if (old && !x) logDebug("Disabled -deprecation output.")
+  }
+
+  def deprecation: Boolean = intp.settings.deprecation.value
+
+  def allSettings = Map(
+    "maxPrintString" -> maxPrintString,
+    "maxAutoprintCompletion" -> maxAutoprintCompletion,
+    "unwrapStrings" -> unwrapStrings,
+    "deprecation" -> deprecation
+  )
+
+  private def allSettingsString =
+    allSettings.toList sortBy (_._1) map { case (k, v) => "  " + k + " = " + v + "\n" } mkString
+
+  override def toString = """
+    | SparkISettings {
+    | %s
+    | }""".stripMargin.format(allSettingsString)
 }
diff --git a/repl/src/main/scala/spark/repl/SparkISettings.scala b/repl/src/main/scala/spark/repl/SparkISettings.scala
deleted file mode 100644
index 8ebb01d14602171c3ab8c38eaf9df79ec629e0a8..0000000000000000000000000000000000000000
--- a/repl/src/main/scala/spark/repl/SparkISettings.scala
+++ /dev/null
@@ -1,63 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
- * @author Alexander Spoon
- */
-
-package spark.repl
-
-import scala.tools.nsc._
-import scala.tools.nsc.interpreter._
-
-/** Settings for the interpreter
- *
- * @version 1.0
- * @author Lex Spoon, 2007/3/24
- **/
-class SparkISettings(intp: SparkIMain) {
-  /** A list of paths where :load should look */
-  var loadPath = List(".")
-  
-  /** Set this to true to see repl machinery under -Yrich-exceptions.
-   */
-  var showInternalStackTraces = false
-
-  /** The maximum length of toString to use when printing the result
-   *  of an evaluation.  0 means no maximum.  If a printout requires
-   *  more than this number of characters, then the printout is
-   *  truncated.
-   */
-  var maxPrintString = 800
-  
-  /** The maximum number of completion candidates to print for tab
-   *  completion without requiring confirmation.
-   */
-  var maxAutoprintCompletion = 250
-  
-  /** String unwrapping can be disabled if it is causing issues.
-   *  Settings this to false means you will see Strings like "$iw.$iw.".
-   */
-  var unwrapStrings = true
-  
-  def deprecation_=(x: Boolean) = {
-    val old = intp.settings.deprecation.value
-    intp.settings.deprecation.value = x
-    if (!old && x) println("Enabled -deprecation output.")
-    else if (old && !x) println("Disabled -deprecation output.")
-  }
-  def deprecation: Boolean = intp.settings.deprecation.value
-  
-  def allSettings = Map(
-    "maxPrintString" -> maxPrintString,
-    "maxAutoprintCompletion" -> maxAutoprintCompletion,
-    "unwrapStrings" -> unwrapStrings,
-    "deprecation" -> deprecation
-  )
-  
-  private def allSettingsString =
-    allSettings.toList sortBy (_._1) map { case (k, v) => "  " + k + " = " + v + "\n" } mkString
-    
-  override def toString = """
-    | SparkISettings {
-    | %s
-    | }""".stripMargin.format(allSettingsString)
-}
diff --git a/repl/src/main/scala/spark/repl/SparkImports.scala b/repl/src/main/scala/spark/repl/SparkImports.scala
index 5caf5ca51a407d573abd728004b7aa123a3e94b8..20345aa9eef807b376afa1fd0ea5594fdb852901 100644
--- a/repl/src/main/scala/spark/repl/SparkImports.scala
+++ b/repl/src/main/scala/spark/repl/SparkImports.scala
@@ -1,9 +1,10 @@
 /* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
  * @author  Paul Phillips
  */
 
-package spark.repl
+package spark
+package repl
 
 import scala.tools.nsc._
 import scala.tools.nsc.interpreter._
@@ -12,30 +13,34 @@ import scala.collection.{ mutable, immutable }
 
 trait SparkImports {
   self: SparkIMain =>
-  
+
   import global._
   import definitions.{ ScalaPackage, JavaLangPackage, PredefModule }
   import memberHandlers._
-  
+
+  def isNoImports = settings.noimports.value
+  def isNoPredef  = settings.nopredef.value
+
   /** Synthetic import handlers for the language defined imports. */
   private def makeWildcardImportHandler(sym: Symbol): ImportHandler = {
     val hd :: tl = sym.fullName.split('.').toList map newTermName
     val tree = Import(
       tl.foldLeft(Ident(hd): Tree)((x, y) => Select(x, y)),
-      List(ImportSelector(nme.WILDCARD, -1, null, -1))
+      ImportSelector.wildList
     )
     tree setSymbol sym
     new ImportHandler(tree)
   }
-  
+
   /** Symbols whose contents are language-defined to be imported. */
   def languageWildcardSyms: List[Symbol] = List(JavaLangPackage, ScalaPackage, PredefModule)
   def languageWildcards: List[Type] = languageWildcardSyms map (_.tpe)
   def languageWildcardHandlers = languageWildcardSyms map makeWildcardImportHandler
-  
-  def importedTerms  = onlyTerms(importHandlers flatMap (_.importedNames))
-  def importedTypes  = onlyTypes(importHandlers flatMap (_.importedNames))
-  
+
+  def allImportedNames = importHandlers flatMap (_.importedNames)
+  def importedTerms    = onlyTerms(allImportedNames)
+  def importedTypes    = onlyTypes(allImportedNames)
+
   /** Types which have been wildcard imported, such as:
    *    val x = "abc" ; import x._  // type java.lang.String
    *    import java.lang.String._   // object java.lang.String
@@ -48,30 +53,28 @@ trait SparkImports {
    *  into the compiler scopes.
    */
   def sessionWildcards: List[Type] = {
-    importHandlers flatMap {
-      case x if x.importsWildcard => x.targetType
-      case _                      => None
-    } distinct
+    importHandlers filter (_.importsWildcard) map (_.targetType) distinct
   }
   def wildcardTypes = languageWildcards ++ sessionWildcards
-  
+
   def languageSymbols        = languageWildcardSyms flatMap membersAtPickler
   def sessionImportedSymbols = importHandlers flatMap (_.importedSymbols)
   def importedSymbols        = languageSymbols ++ sessionImportedSymbols
   def importedTermSymbols    = importedSymbols collect { case x: TermSymbol => x }
   def importedTypeSymbols    = importedSymbols collect { case x: TypeSymbol => x }
   def implicitSymbols        = importedSymbols filter (_.isImplicit)
-  
-  def importedTermNamed(name: String) = importedTermSymbols find (_.name.toString == name)
+
+  def importedTermNamed(name: String): Symbol =
+    importedTermSymbols find (_.name.toString == name) getOrElse NoSymbol
 
   /** Tuples of (source, imported symbols) in the order they were imported.
    */
   def importedSymbolsBySource: List[(Symbol, List[Symbol])] = {
     val lang    = languageWildcardSyms map (sym => (sym, membersAtPickler(sym)))
-    val session = importHandlers filter (_.targetType.isDefined) map { mh =>
-      (mh.targetType.get.typeSymbol, mh.importedSymbols)
+    val session = importHandlers filter (_.targetType != NoType) map { mh =>
+      (mh.targetType.typeSymbol, mh.importedSymbols)
     }
-    
+
     lang ++ session
   }
   def implicitSymbolsBySource: List[(Symbol, List[Symbol])] = {
@@ -79,7 +82,7 @@ trait SparkImports {
       case (k, vs) => (k, vs filter (_.isImplicit))
     } filterNot (_._2.isEmpty)
   }
-  
+
   /** Compute imports that allow definitions from previous
    *  requests to be visible in a new request.  Returns
    *  three pieces of related code:
@@ -90,7 +93,7 @@ trait SparkImports {
    *  2. A code fragment that should go after the code
    *  of the new request.
    *
-   *  3. An access path which can be traverested to access
+   *  3. An access path which can be traversed to access
    *  any bindings inside code wrapped by #1 and #2 .
    *
    * The argument is a set of Names that need to be imported.
@@ -103,27 +106,27 @@ trait SparkImports {
    * (3) It imports multiple same-named implicits, but only the
    * last one imported is actually usable.
    */
-  case class ComputedImports(prepend: String, append: String, access: String)
-  protected def importsCode(wanted: Set[Name]): ComputedImports = {
-    /** Narrow down the list of requests from which imports 
+  case class SparkComputedImports(prepend: String, append: String, access: String)
+
+  protected def importsCode(wanted: Set[Name]): SparkComputedImports = {
+    /** Narrow down the list of requests from which imports
      *  should be taken.  Removes requests which cannot contribute
      *  useful imports for the specified set of wanted names.
      */
     case class ReqAndHandler(req: Request, handler: MemberHandler) { }
-    
-    def reqsToUse: List[ReqAndHandler] = {      
+
+    def reqsToUse: List[ReqAndHandler] = {
       /** Loop through a list of MemberHandlers and select which ones to keep.
         * 'wanted' is the set of names that need to be imported.
        */
       def select(reqs: List[ReqAndHandler], wanted: Set[Name]): List[ReqAndHandler] = {
-        val isWanted = wanted contains _
         // Single symbol imports might be implicits! See bug #1752.  Rather than
         // try to finesse this, we will mimic all imports for now.
         def keepHandler(handler: MemberHandler) = handler match {
           case _: ImportHandler => true
-          case x                => x.definesImplicit || (x.definedNames exists isWanted)
+          case x                => x.definesImplicit || (x.definedNames exists wanted)
         }
-                   
+
         reqs match {
           case Nil                                    => Nil
           case rh :: rest if !keepHandler(rh.handler) => select(rest, wanted)
@@ -133,7 +136,7 @@ trait SparkImports {
             rh :: select(rest, newWanted)
         }
       }
-      
+
       /** Flatten the handlers out and pair each with the original request */
       select(allReqAndHandlers reverseMap { case (r, h) => ReqAndHandler(r, h) }, wanted).reverse
     }
@@ -147,8 +150,13 @@ trait SparkImports {
       code append "class %sC extends Serializable {\n".format(impname)
       trailingBraces append "}\nval " + impname + " = new " + impname + "C;\n"
       accessPath append ("." + impname)
-      
+
       currentImps.clear
+      // code append "object %s {\n".format(impname)
+      // trailingBraces append "}\n"
+      // accessPath append ("." + impname)
+
+      // currentImps.clear
     }
 
     addWrapper()
@@ -159,36 +167,28 @@ trait SparkImports {
         // If the user entered an import, then just use it; add an import wrapping
         // level if the import might conflict with some other import
         case x: ImportHandler =>
-          if (x.importsWildcard || (currentImps exists (x.importedNames contains _)))
+          if (x.importsWildcard || currentImps.exists(x.importedNames contains _))
             addWrapper()
-          
+
           code append (x.member + "\n")
-          
+
           // give wildcard imports a import wrapper all to their own
           if (x.importsWildcard) addWrapper()
           else currentImps ++= x.importedNames
 
         // For other requests, import each defined name.
         // import them explicitly instead of with _, so that
-        // ambiguity errors will not be generated. Also, quote 
-        // the name of the variable, so that we don't need to 
-        // handle quoting keywords separately. 
+        // ambiguity errors will not be generated. Also, quote
+        // the name of the variable, so that we don't need to
+        // handle quoting keywords separately.
         case x =>
           for (imv <- x.definedNames) {
-            // MATEI: Changed this check because it was messing up for case classes
-            // (trying to import them twice within the same wrapper), but that is more likely
-            // due to a miscomputation of names that makes the code think they're unique.
-            // Need to evaluate whether having so many wrappers is a bad thing.
-            /*if (currentImps contains imv)*/
-            val imvName = imv.toString
-            if (currentImps exists (_.toString == imvName)) addWrapper()
-        
+            if (currentImps contains imv) addWrapper()
             val objName = req.lineRep.readPath
             val valName = "$VAL" + newValId();
             code.append("val " + valName + " = " + objName + ".INSTANCE;\n")
             code.append("import " + valName + req.accessPath + ".`" + imv + "`;\n")
-
-            //code append ("import %s\n" format (req fullPath imv))
+            // code append ("import " + (req fullPath imv) + "\n")
             currentImps += imv
           }
       }
@@ -196,14 +196,14 @@ trait SparkImports {
     // add one extra wrapper, to prevent warnings in the common case of
     // redefining the value bound in the last interpreter request.
     addWrapper()
-    ComputedImports(code.toString, trailingBraces.toString, accessPath.toString)
+    SparkComputedImports(code.toString, trailingBraces.toString, accessPath.toString)
   }
-  
+
   private def allReqAndHandlers =
     prevRequestList flatMap (req => req.handlers map (req -> _))
 
   private def membersAtPickler(sym: Symbol): List[Symbol] =
-    atPickler(sym.info.nonPrivateMembers)
+    beforePickler(sym.info.nonPrivateMembers.toList)
 
   private var curValId = 0
 
diff --git a/repl/src/main/scala/spark/repl/SparkJLineCompletion.scala b/repl/src/main/scala/spark/repl/SparkJLineCompletion.scala
index 0069d8b2f49c74d565f85a3a7bad12362f5ff3ec..0e1170688db9cd6e290835b1ee88a8c02ab2a43f 100644
--- a/repl/src/main/scala/spark/repl/SparkJLineCompletion.scala
+++ b/repl/src/main/scala/spark/repl/SparkJLineCompletion.scala
@@ -1,9 +1,11 @@
 /* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
  * @author Paul Phillips
  */
 
-package spark.repl
+package spark
+package repl
+
 
 import scala.tools.nsc._
 import scala.tools.nsc.interpreter._
@@ -11,30 +13,31 @@ import scala.tools.nsc.interpreter._
 import scala.tools.jline._
 import scala.tools.jline.console.completer._
 import Completion._
-import collection.mutable.ListBuffer
+import scala.collection.mutable.ListBuffer
+import spark.Logging
 
 // REPL completor - queries supplied interpreter for valid
 // completions based on current contents of buffer.
-class SparkJLineCompletion(val intp: SparkIMain) extends Completion with CompletionOutput {
+class SparkJLineCompletion(val intp: SparkIMain) extends Completion with CompletionOutput with Logging {
   val global: intp.global.type = intp.global
   import global._
-  import definitions.{ PredefModule, RootClass, AnyClass, AnyRefClass, ScalaPackage, JavaLangPackage }
+  import definitions.{ PredefModule, AnyClass, AnyRefClass, ScalaPackage, JavaLangPackage }
+  import rootMirror.{ RootClass, getModuleIfDefined }
   type ExecResult = Any
-  import intp.{ DBG, debugging, afterTyper }
-  
+  import intp.{ debugging }
+
   // verbosity goes up with consecutive tabs
   private var verbosity: Int = 0
   def resetVerbosity() = verbosity = 0
-    
-  def getType(name: String, isModule: Boolean) = {
-    val f = if (isModule) definitions.getModule(_: Name) else definitions.getClass(_: Name)
-    try Some(f(name).tpe)
-    catch { case _: MissingRequirementError => None }
-  }
-  
-  def typeOf(name: String) = getType(name, false)
-  def moduleOf(name: String) = getType(name, true)
-    
+
+  def getSymbol(name: String, isModule: Boolean) = (
+    if (isModule) getModuleIfDefined(name)
+    else getModuleIfDefined(name)
+  )
+  def getType(name: String, isModule: Boolean) = getSymbol(name, isModule).tpe
+  def typeOf(name: String)                     = getType(name, false)
+  def moduleOf(name: String)                   = getType(name, true)
+
   trait CompilerCompletion {
     def tp: Type
     def effectiveTp = tp match {
@@ -48,16 +51,16 @@ class SparkJLineCompletion(val intp: SparkIMain) extends Completion with Complet
     private def anyMembers = AnyClass.tpe.nonPrivateMembers
     def anyRefMethodsToShow = Set("isInstanceOf", "asInstanceOf", "toString")
 
-    def tos(sym: Symbol) = sym.name.decode.toString
-    def memberNamed(s: String) = members find (x => tos(x) == s)
-    def hasMethod(s: String) = methods exists (x => tos(x) == s)
+    def tos(sym: Symbol): String = sym.decodedName
+    def memberNamed(s: String) = afterTyper(effectiveTp member newTermName(s))
+    def hasMethod(s: String) = memberNamed(s).isMethod
 
     // XXX we'd like to say "filterNot (_.isDeprecated)" but this causes the
     // compiler to crash for reasons not yet known.
-    def members     = afterTyper((effectiveTp.nonPrivateMembers ++ anyMembers) filter (_.isPublic))
-    def methods     = members filter (_.isMethod)
-    def packages    = members filter (_.isPackage)
-    def aliases     = members filter (_.isAliasType)
+    def members     = afterTyper((effectiveTp.nonPrivateMembers.toList ++ anyMembers) filter (_.isPublic))
+    def methods     = members.toList filter (_.isMethod)
+    def packages    = members.toList filter (_.isPackage)
+    def aliases     = members.toList filter (_.isAliasType)
 
     def memberNames   = members map tos
     def methodNames   = methods map tos
@@ -65,12 +68,19 @@ class SparkJLineCompletion(val intp: SparkIMain) extends Completion with Complet
     def aliasNames    = aliases map tos
   }
 
+  object NoTypeCompletion extends TypeMemberCompletion(NoType) {
+    override def memberNamed(s: String) = NoSymbol
+    override def members = Nil
+    override def follow(s: String) = None
+    override def alternativesFor(id: String) = Nil
+  }
+
   object TypeMemberCompletion {
     def apply(tp: Type, runtimeType: Type, param: NamedParam): TypeMemberCompletion = {
       new TypeMemberCompletion(tp) {
         var upgraded = false
         lazy val upgrade = {
-          intp rebind param          
+          intp rebind param
           intp.reporter.printMessage("\nRebinding stable value %s from %s to %s".format(param.name, tp, param.tpe))
           upgraded = true
           new TypeMemberCompletion(runtimeType)
@@ -92,7 +102,8 @@ class SparkJLineCompletion(val intp: SparkIMain) extends Completion with Complet
       }
     }
     def apply(tp: Type): TypeMemberCompletion = {
-      if (tp.typeSymbol.isPackageClass) new PackageCompletion(tp)
+      if (tp eq NoType) NoTypeCompletion
+      else if (tp.typeSymbol.isPackageClass) new PackageCompletion(tp)
       else new TypeMemberCompletion(tp)
     }
     def imported(tp: Type) = new ImportCompletion(tp)
@@ -103,9 +114,9 @@ class SparkJLineCompletion(val intp: SparkIMain) extends Completion with Complet
     def excludeEndsWith: List[String] = Nil
     def excludeStartsWith: List[String] = List("<") // <byname>, <repeated>, etc.
     def excludeNames: List[String] = (anyref.methodNames filterNot anyRefMethodsToShow) :+ "_root_"
-    
+
     def methodSignatureString(sym: Symbol) = {
-      SparkIMain stripString afterTyper(new MethodSymbolOutput(sym).methodString())
+      IMain stripString afterTyper(new MethodSymbolOutput(sym).methodString())
     }
 
     def exclude(name: String): Boolean = (
@@ -118,10 +129,10 @@ class SparkJLineCompletion(val intp: SparkIMain) extends Completion with Complet
 
     def completions(verbosity: Int) =
       debugging(tp + " completions ==> ")(filtered(memberNames))
-    
+
     override def follow(s: String): Option[CompletionAware] =
-      debugging(tp + " -> '" + s + "' ==> ")(memberNamed(s) map (x => TypeMemberCompletion(x.tpe)))      
-    
+      debugging(tp + " -> '" + s + "' ==> ")(Some(TypeMemberCompletion(memberNamed(s).tpe)) filterNot (_ eq NoTypeCompletion))
+
     override def alternativesFor(id: String): List[String] =
       debugging(id + " alternatives ==> ") {
         val alts = members filter (x => x.isMethod && tos(x) == id) map methodSignatureString
@@ -131,7 +142,7 @@ class SparkJLineCompletion(val intp: SparkIMain) extends Completion with Complet
 
     override def toString = "%s (%d members)".format(tp, members.size)
   }
-  
+
   class PackageCompletion(tp: Type) extends TypeMemberCompletion(tp) {
     override def excludeNames = anyref.methodNames
   }
@@ -142,43 +153,44 @@ class SparkJLineCompletion(val intp: SparkIMain) extends Completion with Complet
       case _    => memberNames
     }
   }
-  
+
   class ImportCompletion(tp: Type) extends TypeMemberCompletion(tp) {
     override def completions(verbosity: Int) = verbosity match {
       case 0    => filtered(members filterNot (_.isSetter) map tos)
       case _    => super.completions(verbosity)
     }
   }
-  
+
   // not for completion but for excluding
   object anyref extends TypeMemberCompletion(AnyRefClass.tpe) { }
-  
+
   // the unqualified vals/defs/etc visible in the repl
   object ids extends CompletionAware {
     override def completions(verbosity: Int) = intp.unqualifiedIds ++ List("classOf") //, "_root_")
     // now we use the compiler for everything.
-    override def follow(id: String) = {
-      if (completions(0) contains id) {
-        intp typeOfExpression id map { tpe =>
-          def default = TypeMemberCompletion(tpe)
-
-          // only rebinding vals in power mode for now.
-          if (!isReplPower) default
-          else intp runtimeClassAndTypeOfTerm id match {
-            case Some((clazz, runtimeType)) =>
-              val sym = intp.symbolOfTerm(id)
-              if (sym.isStable) {
-                val param = new NamedParam.Untyped(id, intp valueOfTerm id getOrElse null)
-                TypeMemberCompletion(tpe, runtimeType, param)
-              }
-              else default
-            case _        =>
-              default
+    override def follow(id: String): Option[CompletionAware] = {
+      if (!completions(0).contains(id))
+        return None
+
+      val tpe = intp typeOfExpression id
+      if (tpe == NoType)
+        return None
+
+      def default = Some(TypeMemberCompletion(tpe))
+
+      // only rebinding vals in power mode for now.
+      if (!isReplPower) default
+      else intp runtimeClassAndTypeOfTerm id match {
+        case Some((clazz, runtimeType)) =>
+          val sym = intp.symbolOfTerm(id)
+          if (sym.isStable) {
+            val param = new NamedParam.Untyped(id, intp valueOfTerm id getOrElse null)
+            Some(TypeMemberCompletion(tpe, runtimeType, param))
           }
-        }
+          else default
+        case _        =>
+          default
       }
-      else
-        None
     }
     override def toString = "<repl ids> (%s)".format(completions(0).size)
   }
@@ -187,17 +199,10 @@ class SparkJLineCompletion(val intp: SparkIMain) extends Completion with Complet
   private def imported = intp.sessionWildcards map TypeMemberCompletion.imported
 
   // literal Ints, Strings, etc.
-  object literals extends CompletionAware {    
-    def simpleParse(code: String): Tree = {
-      val unit    = new CompilationUnit(new util.BatchSourceFile("<console>", code))
-      val scanner = new syntaxAnalyzer.UnitParser(unit)
-      val tss     = scanner.templateStatSeq(false)._2
-
-      if (tss.size == 1) tss.head else EmptyTree
-    }
-  
+  object literals extends CompletionAware {
+    def simpleParse(code: String): Tree = newUnitParser(code).templateStats().last
     def completions(verbosity: Int) = Nil
-    
+
     override def follow(id: String) = simpleParse(id) match {
       case x: Literal   => Some(new LiteralCompletion(x))
       case _            => None
@@ -210,18 +215,18 @@ class SparkJLineCompletion(val intp: SparkIMain) extends Completion with Complet
     override def follow(id: String) = id match {
       case "_root_" => Some(this)
       case _        => super.follow(id)
-    }    
+    }
   }
   // members of Predef
   object predef extends TypeMemberCompletion(PredefModule.tpe) {
     override def excludeEndsWith    = super.excludeEndsWith ++ List("Wrapper", "ArrayOps")
     override def excludeStartsWith  = super.excludeStartsWith ++ List("wrap")
     override def excludeNames       = anyref.methodNames
-    
+
     override def exclude(name: String) = super.exclude(name) || (
       (name contains "2")
     )
-    
+
     override def completions(verbosity: Int) = verbosity match {
       case 0    => Nil
       case _    => super.completions(verbosity)
@@ -234,7 +239,7 @@ class SparkJLineCompletion(val intp: SparkIMain) extends Completion with Complet
     override def exclude(name: String) = super.exclude(name) || (
       skipArity(name)
     )
-    
+
     override def completions(verbosity: Int) = verbosity match {
       case 0    => filtered(packageNames ++ aliasNames)
       case _    => super.completions(verbosity)
@@ -244,7 +249,7 @@ class SparkJLineCompletion(val intp: SparkIMain) extends Completion with Complet
   object javalang extends PackageCompletion(JavaLangPackage.tpe) {
     override lazy val excludeEndsWith   = super.excludeEndsWith ++ List("Exception", "Error")
     override lazy val excludeStartsWith = super.excludeStartsWith ++ List("CharacterData")
-    
+
     override def completions(verbosity: Int) = verbosity match {
       case 0    => filtered(packageNames)
       case _    => super.completions(verbosity)
@@ -256,7 +261,7 @@ class SparkJLineCompletion(val intp: SparkIMain) extends Completion with Complet
   lazy val topLevelBase: List[CompletionAware] = List(ids, rootClass, predef, scalalang, javalang, literals)
   def topLevel = topLevelBase ++ imported
   def topLevelThreshold = 50
-  
+
   // the first tier of top level objects (doesn't include file completion)
   def topLevelFor(parsed: Parsed): List[String] = {
     val buf = new ListBuffer[String]
@@ -280,19 +285,6 @@ class SparkJLineCompletion(val intp: SparkIMain) extends Completion with Complet
     if (parsed.isEmpty) xs map ("." + _) else xs
   }
 
-  // chasing down results which won't parse
-  def execute(line: String): Option[ExecResult] = {
-    val parsed = Parsed(line)
-    def noDotOrSlash = line forall (ch => ch != '.' && ch != '/')
-    
-    if (noDotOrSlash) None  // we defer all unqualified ids to the repl.
-    else {
-      (ids executionFor parsed) orElse
-      (rootClass executionFor parsed) orElse
-      (FileCompletion executionFor line)
-    }
-  }
-  
   // generic interface for querying (e.g. interpreter loop, testing)
   def completions(buf: String): List[String] =
     topLevelFor(Parsed.dotted(buf + ".", buf.length + 1))
@@ -327,11 +319,11 @@ class SparkJLineCompletion(val intp: SparkIMain) extends Completion with Complet
     // This is jline's entry point for completion.
     override def complete(buf: String, cursor: Int): Candidates = {
       verbosity = if (isConsecutiveTabs(buf, cursor)) verbosity + 1 else 0
-      DBG("\ncomplete(%s, %d) last = (%s, %d), verbosity: %s".format(buf, cursor, lastBuf, lastCursor, verbosity))
+      logDebug("\ncomplete(%s, %d) last = (%s, %d), verbosity: %s".format(buf, cursor, lastBuf, lastCursor, verbosity))
 
       // we don't try lower priority completions unless higher ones return no results.
       def tryCompletion(p: Parsed, completionFunction: Parsed => List[String]): Option[Candidates] = {
-        val winners = completionFunction(p) 
+        val winners = completionFunction(p)
         if (winners.isEmpty)
           return None
         val newCursor =
@@ -340,39 +332,45 @@ class SparkJLineCompletion(val intp: SparkIMain) extends Completion with Complet
             val advance = commonPrefix(winners)
             lastCursor = p.position + advance.length
             lastBuf = (buf take p.position) + advance
-            DBG("tryCompletion(%s, _) lastBuf = %s, lastCursor = %s, p.position = %s".format(
+            logDebug("tryCompletion(%s, _) lastBuf = %s, lastCursor = %s, p.position = %s".format(
               p, lastBuf, lastCursor, p.position))
             p.position
           }
-        
+
         Some(Candidates(newCursor, winners))
       }
-      
+
       def mkDotted      = Parsed.dotted(buf, cursor) withVerbosity verbosity
       def mkUndelimited = Parsed.undelimited(buf, cursor) withVerbosity verbosity
 
       // a single dot is special cased to completion on the previous result
       def lastResultCompletion =
-        if (!looksLikeInvocation(buf)) None            
+        if (!looksLikeInvocation(buf)) None
         else tryCompletion(Parsed.dotted(buf drop 1, cursor), lastResultFor)
 
-      def regularCompletion = tryCompletion(mkDotted, topLevelFor)
-      def fileCompletion    = 
-        if (!looksLikePath(buf)) None
-        else tryCompletion(mkUndelimited, FileCompletion completionsFor _.buffer)
-      
-      /** This is the kickoff point for all manner of theoretically possible compiler
-       *  unhappiness - fault may be here or elsewhere, but we don't want to crash the
-       *  repl regardless.  Hopefully catching Exception is enough, but because the
-       *  compiler still throws some Errors it may not be.
+      def tryAll = (
+                  lastResultCompletion
+           orElse tryCompletion(mkDotted, topLevelFor)
+        getOrElse Candidates(cursor, Nil)
+      )
+
+      /**
+       *  This is the kickoff point for all manner of theoretically
+       *  possible compiler unhappiness. The fault may be here or
+       *  elsewhere, but we don't want to crash the repl regardless.
+       *  The compiler makes it impossible to avoid catching Throwable
+       *  with its unfortunate tendency to throw java.lang.Errors and
+       *  AssertionErrors as the hats drop. We take two swings at it
+       *  because there are some spots which like to throw an assertion
+       *  once, then work after that. Yeah, what can I say.
        */
-      try {
-        (lastResultCompletion orElse regularCompletion orElse fileCompletion) getOrElse Candidates(cursor, Nil)
-      }
-      catch {
-        case ex: Exception =>
-          DBG("Error: complete(%s, %s) provoked %s".format(buf, cursor, ex))
-          Candidates(cursor, List(" ", "<completion error: " + ex.getMessage +  ">"))
+      try tryAll
+      catch { case ex: Throwable =>
+        logWarning("Error: complete(%s, %s) provoked".format(buf, cursor) + ex)
+        Candidates(cursor,
+          if (isReplDebug) List("<error:" + ex + ">")
+          else Nil
+        )
       }
     }
   }
diff --git a/repl/src/main/scala/spark/repl/SparkJLineReader.scala b/repl/src/main/scala/spark/repl/SparkJLineReader.scala
index ef6b6e092e04df8143d0a0d72ac489a32707bed5..00aca60f343d5e73cb6bf94d88b3058736c3e003 100644
--- a/repl/src/main/scala/spark/repl/SparkJLineReader.scala
+++ b/repl/src/main/scala/spark/repl/SparkJLineReader.scala
@@ -1,5 +1,5 @@
 /* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
  * @author Stepan Koltsov
  */
 
@@ -15,28 +15,33 @@ import scala.collection.JavaConverters._
 import Completion._
 import io.Streamable.slurp
 
-/** Reads from the console using JLine */
-class SparkJLineReader(val completion: Completion) extends InteractiveReader {
+/**
+ *  Reads from the console using JLine.
+ */
+class SparkJLineReader(_completion: => Completion) extends InteractiveReader {
   val interactive = true
+  val consoleReader = new JLineConsoleReader()
+
+  lazy val completion = _completion
   lazy val history: JLineHistory = JLineHistory()
-  lazy val keyBindings =
-    try KeyBinding parse slurp(term.getDefaultBindings)
-    catch { case _: Exception => Nil }
 
   private def term = consoleReader.getTerminal()
   def reset() = term.reset()
   def init()  = term.init()
-  
+
   def scalaToJline(tc: ScalaCompleter): Completer = new Completer {
     def complete(_buf: String, cursor: Int, candidates: JList[CharSequence]): Int = {
-      val buf   = if (_buf == null) "" else _buf      
+      val buf   = if (_buf == null) "" else _buf
       val Candidates(newCursor, newCandidates) = tc.complete(buf, cursor)
       newCandidates foreach (candidates add _)
       newCursor
     }
   }
-    
+
   class JLineConsoleReader extends ConsoleReader with ConsoleReaderHelper {
+    if ((history: History) ne NoHistory)
+      this setHistory history
+
     // working around protected/trait/java insufficiencies.
     def goBack(num: Int): Unit = back(num)
     def readOneKey(prompt: String) = {
@@ -46,34 +51,28 @@ class SparkJLineReader(val completion: Completion) extends InteractiveReader {
     }
     def eraseLine() = consoleReader.resetPromptLine("", "", 0)
     def redrawLineAndFlush(): Unit = { flush() ; drawLine() ; flush() }
-    
-    this setBellEnabled false
-    if (history ne NoHistory)
-      this setHistory history
-    
-    if (completion ne NoCompletion) {
-      val argCompletor: ArgumentCompleter =
-        new ArgumentCompleter(new JLineDelimiter, scalaToJline(completion.completer()))
-      argCompletor setStrict false
-      
-      this addCompleter argCompletor
-      this setAutoprintThreshold 400 // max completion candidates without warning
+    // override def readLine(prompt: String): String
+
+    // A hook for running code after the repl is done initializing.
+    lazy val postInit: Unit = {
+      this setBellEnabled false
+
+      if (completion ne NoCompletion) {
+        val argCompletor: ArgumentCompleter =
+          new ArgumentCompleter(new JLineDelimiter, scalaToJline(completion.completer()))
+        argCompletor setStrict false
+
+        this addCompleter argCompletor
+        this setAutoprintThreshold 400 // max completion candidates without warning
+      }
     }
   }
-  
-  val consoleReader: JLineConsoleReader = new JLineConsoleReader()
 
-  def currentLine: String = consoleReader.getCursorBuffer.buffer.toString
+  def currentLine = consoleReader.getCursorBuffer.buffer.toString
   def redrawLine() = consoleReader.redrawLineAndFlush()
-  def eraseLine() = {
-    while (consoleReader.delete()) { }
-    // consoleReader.eraseLine()
-  }
+  def eraseLine() = consoleReader.eraseLine()
+  // Alternate implementation, not sure if/when I need this.
+  // def eraseLine() = while (consoleReader.delete()) { }
   def readOneLine(prompt: String) = consoleReader readLine prompt
   def readOneKey(prompt: String)  = consoleReader readOneKey prompt
 }
-
-object SparkJLineReader {
-  def apply(intp: SparkIMain): SparkJLineReader = apply(new SparkJLineCompletion(intp))
-  def apply(comp: Completion): SparkJLineReader = new SparkJLineReader(comp)
-}
diff --git a/repl/src/main/scala/spark/repl/SparkMemberHandlers.scala b/repl/src/main/scala/spark/repl/SparkMemberHandlers.scala
index 2980dfcd76834652b0a4e205253f99c93d8b6789..2d894a0356b2e40381670c2c90aadc66d746c2f1 100644
--- a/repl/src/main/scala/spark/repl/SparkMemberHandlers.scala
+++ b/repl/src/main/scala/spark/repl/SparkMemberHandlers.scala
@@ -1,22 +1,24 @@
 /* NSC -- new Scala compiler
- * Copyright 2005-2011 LAMP/EPFL
+ * Copyright 2005-2013 LAMP/EPFL
  * @author  Martin Odersky
  */
 
-package spark.repl
+package spark
+package repl
 
 import scala.tools.nsc._
 import scala.tools.nsc.interpreter._
 
 import scala.collection.{ mutable, immutable }
 import scala.PartialFunction.cond
-import scala.reflect.NameTransformer
-import util.Chars
+import scala.reflect.internal.Chars
+import scala.reflect.internal.Flags._
+import scala.language.implicitConversions
 
 trait SparkMemberHandlers {
   val intp: SparkIMain
 
-  import intp.{ Request, global, naming, atPickler }
+  import intp.{ Request, global, naming }
   import global._
   import naming._
 
@@ -29,7 +31,7 @@ trait SparkMemberHandlers {
     front + (xs map string2codeQuoted mkString " + ")
   }
   private implicit def name2string(name: Name) = name.toString
-  
+
   /** A traverser that finds all mentioned identifiers, i.e. things
    *  that need to be imported.  It might return extra names.
    */
@@ -54,26 +56,28 @@ trait SparkMemberHandlers {
   }
 
   def chooseHandler(member: Tree): MemberHandler = member match {
-    case member: DefDef               => new DefHandler(member)
-    case member: ValDef               => new ValHandler(member)
-    case member@Assign(Ident(_), _)   => new AssignHandler(member)
-    case member: ModuleDef            => new ModuleHandler(member)
-    case member: ClassDef             => new ClassHandler(member)
-    case member: TypeDef              => new TypeAliasHandler(member)
-    case member: Import               => new ImportHandler(member)
-    case DocDef(_, documented)        => chooseHandler(documented)
-    case member                       => new GenericHandler(member)
+    case member: DefDef        => new DefHandler(member)
+    case member: ValDef        => new ValHandler(member)
+    case member: Assign        => new AssignHandler(member)
+    case member: ModuleDef     => new ModuleHandler(member)
+    case member: ClassDef      => new ClassHandler(member)
+    case member: TypeDef       => new TypeAliasHandler(member)
+    case member: Import        => new ImportHandler(member)
+    case DocDef(_, documented) => chooseHandler(documented)
+    case member                => new GenericHandler(member)
   }
-  
+
   sealed abstract class MemberDefHandler(override val member: MemberDef) extends MemberHandler(member) {
+    def symbol          = if (member.symbol eq null) NoSymbol else member.symbol
     def name: Name      = member.name
     def mods: Modifiers = member.mods
     def keyword         = member.keyword
-    def prettyName      = NameTransformer.decode(name)
-    
+    def prettyName      = name.decode
+
     override def definesImplicit = member.mods.isImplicit
     override def definesTerm: Option[TermName] = Some(name.toTermName) filter (_ => name.isTermName)
     override def definesType: Option[TypeName] = Some(name.toTypeName) filter (_ => name.isTypeName)
+    override def definedSymbols = if (symbol eq NoSymbol) Nil else List(symbol)
   }
 
   /** Class to handle one member among all the members included
@@ -82,11 +86,8 @@ trait SparkMemberHandlers {
   sealed abstract class MemberHandler(val member: Tree) {
     def definesImplicit = false
     def definesValue    = false
-    def isLegalTopLevel = member match {
-      case _: ModuleDef | _: ClassDef | _: Import => true
-      case _                                      => false
-    }
-    
+    def isLegalTopLevel = false
+
     def definesTerm     = Option.empty[TermName]
     def definesType     = Option.empty[TypeName]
 
@@ -94,6 +95,7 @@ trait SparkMemberHandlers {
     def importedNames        = List[Name]()
     def definedNames         = definesTerm.toList ++ definesType.toList
     def definedOrImported    = definedNames ++ importedNames
+    def definedSymbols       = List[Symbol]()
 
     def extraCodeToEvaluate(req: Request): String = ""
     def resultExtractionCode(req: Request): String = ""
@@ -103,11 +105,11 @@ trait SparkMemberHandlers {
   }
 
   class GenericHandler(member: Tree) extends MemberHandler(member)
-  
+
   class ValHandler(member: ValDef) extends MemberDefHandler(member) {
-    val maxStringElements = 1000  // no need to mkString billions of elements    
+    val maxStringElements = 1000  // no need to mkString billions of elements
     override def definesValue = true
-    
+
     override def resultExtractionCode(req: Request): String = {
       val isInternal = isUserVarName(name) && req.lookupTypeOf(name) == "Unit"
       if (!mods.isPublic || isInternal) ""
@@ -116,22 +118,27 @@ trait SparkMemberHandlers {
         val resultString =
           if (mods.isLazy) codegenln(false, "<lazy>")
           else any2stringOf(req fullPath name, maxStringElements)
-      
-        """ + "%s: %s = " + %s""".format(prettyName, string2code(req typeOf name), resultString)
+
+        val vidString =
+          if (replProps.vids) """" + " @ " + "%%8x".format(System.identityHashCode(%s)) + " """.trim.format(req fullPath name)
+          else ""
+
+        """ + "%s%s: %s = " + %s""".format(string2code(prettyName), vidString, string2code(req typeOf name), resultString)
       }
     }
   }
 
   class DefHandler(member: DefDef) extends MemberDefHandler(member) {
     private def vparamss = member.vparamss
-    // true if 0-arity
-    override def definesValue = vparamss.isEmpty || vparamss.head.isEmpty
+    private def isMacro = member.symbol hasFlag MACRO
+    // true if not a macro and 0-arity
+    override def definesValue = !isMacro && flattensToEmpty(vparamss)
     override def resultExtractionCode(req: Request) =
       if (mods.isPublic) codegenln(name, ": ", req.typeOf(name)) else ""
   }
 
   class AssignHandler(member: Assign) extends MemberHandler(member) {
-    val lhs = member.lhs.asInstanceOf[Ident] // an unfortunate limitation
+    val Assign(lhs, rhs) = member
     val name = newTermName(freshInternalVarName())
 
     override def definesTerm = Some(name)
@@ -142,15 +149,15 @@ trait SparkMemberHandlers {
     /** Print out lhs instead of the generated varName */
     override def resultExtractionCode(req: Request) = {
       val lhsType = string2code(req lookupTypeOf name)
-      val res = string2code(req fullPath name)
-      
-      """ + "%s: %s = " + %s + "\n" """.format(lhs, lhsType, res) + "\n"
+      val res     = string2code(req fullPath name)
+      """ + "%s: %s = " + %s + "\n" """.format(string2code(lhs.toString), lhsType, res) + "\n"
     }
   }
 
   class ModuleHandler(module: ModuleDef) extends MemberDefHandler(module) {
     override def definesTerm = Some(name)
     override def definesValue = true
+    override def isLegalTopLevel = true
 
     override def resultExtractionCode(req: Request) = codegenln("defined module ", name)
   }
@@ -158,7 +165,8 @@ trait SparkMemberHandlers {
   class ClassHandler(member: ClassDef) extends MemberDefHandler(member) {
     override def definesType = Some(name.toTypeName)
     override def definesTerm = Some(name.toTermName) filter (_ => mods.isCase)
-    
+    override def isLegalTopLevel = true
+
     override def resultExtractionCode(req: Request) =
       codegenln("defined %s %s".format(keyword, name))
   }
@@ -173,26 +181,42 @@ trait SparkMemberHandlers {
 
   class ImportHandler(imp: Import) extends MemberHandler(imp) {
     val Import(expr, selectors) = imp
-    def targetType = intp.typeOfExpression("" + expr)
-    
+    def targetType: Type = intp.typeOfExpression("" + expr)
+    override def isLegalTopLevel = true
+
+    def createImportForName(name: Name): String = {
+      selectors foreach {
+        case sel @ ImportSelector(old, _, `name`, _)  => return "import %s.{ %s }".format(expr, sel)
+        case _ => ()
+      }
+      "import %s.%s".format(expr, name)
+    }
+    // TODO: Need to track these specially to honor Predef masking attempts,
+    // because they must be the leading imports in the code generated for each
+    // line.  We can use the same machinery as Contexts now, anyway.
+    def isPredefImport = isReferenceToPredef(expr)
+
     // wildcard imports, e.g. import foo._
     private def selectorWild    = selectors filter (_.name == nme.USCOREkw)
     // renamed imports, e.g. import foo.{ bar => baz }
     private def selectorRenames = selectors map (_.rename) filterNot (_ == null)
-    
+
     /** Whether this import includes a wildcard import */
     val importsWildcard = selectorWild.nonEmpty
-    
+
+    /** Whether anything imported is implicit .*/
+    def importsImplicit = implicitSymbols.nonEmpty
+
     def implicitSymbols = importedSymbols filter (_.isImplicit)
     def importedSymbols = individualSymbols ++ wildcardSymbols
-    
+
     lazy val individualSymbols: List[Symbol] =
-      atPickler(targetType.toList flatMap (tp => individualNames map (tp nonPrivateMember _)))
+      beforePickler(individualNames map (targetType nonPrivateMember _))
 
     lazy val wildcardSymbols: List[Symbol] =
-      if (importsWildcard) atPickler(targetType.toList flatMap (_.nonPrivateMembers))
+      if (importsWildcard) beforePickler(targetType.nonPrivateMembers.toList)
       else Nil
-    
+
     /** Complete list of names imported by a wildcard */
     lazy val wildcardNames: List[Name]   = wildcardSymbols map (_.name)
     lazy val individualNames: List[Name] = selectorRenames filterNot (_ == nme.USCOREkw) flatMap (_.bothNames)
@@ -200,7 +224,7 @@ trait SparkMemberHandlers {
     /** The names imported by this statement */
     override lazy val importedNames: List[Name] = wildcardNames ++ individualNames
     lazy val importsSymbolNamed: Set[String] = importedNames map (_.toString) toSet
-    
+
     def importString = imp.toString
     override def resultExtractionCode(req: Request) = codegenln(importString) + "\n"
   }
diff --git a/repl/src/test/scala/spark/repl/ReplSuite.scala b/repl/src/test/scala/spark/repl/ReplSuite.scala
index db78d06d4f0b68cb4731685c3eb2c84f3f7470ca..4dfd3127bf4f9f3f0de5bb38a36093ecd89772c5 100644
--- a/repl/src/test/scala/spark/repl/ReplSuite.scala
+++ b/repl/src/test/scala/spark/repl/ReplSuite.scala
@@ -1,51 +1,14 @@
 package spark.repl
 
-import java.io._
-import java.net.URLClassLoader
-
-import scala.collection.mutable.ArrayBuffer
-import scala.collection.JavaConversions._
+import java.io.FileWriter
 
 import org.scalatest.FunSuite
+
 import com.google.common.io.Files
 
-class ReplSuite extends FunSuite {
-  def runInterpreter(master: String, input: String): String = {
-    val in = new BufferedReader(new StringReader(input + "\n"))
-    val out = new StringWriter()
-    val cl = getClass.getClassLoader
-    var paths = new ArrayBuffer[String]
-    if (cl.isInstanceOf[URLClassLoader]) {
-      val urlLoader = cl.asInstanceOf[URLClassLoader]
-      for (url <- urlLoader.getURLs) {
-        if (url.getProtocol == "file") {
-          paths += url.getFile
-        }
-      }
-    }
-    val interp = new SparkILoop(in, new PrintWriter(out), master)
-    spark.repl.Main.interp = interp
-    val separator = System.getProperty("path.separator")
-    interp.process(Array("-classpath", paths.mkString(separator)))
-    spark.repl.Main.interp = null
-    if (interp.sparkContext != null)
-      interp.sparkContext.stop()
-    // To avoid Akka rebinding to the same port, since it doesn't unbind immediately on shutdown
-    System.clearProperty("spark.master.port")
-    return out.toString
-  }
-  
-  def assertContains(message: String, output: String) {
-    assert(output contains message,
-           "Interpreter output did not contain '" + message + "':\n" + output)
-  }
-  
-  def assertDoesNotContain(message: String, output: String) {
-    assert(!(output contains message),
-           "Interpreter output contained '" + message + "':\n" + output)
-  }
-  
-  test ("simple foreach with accumulator") {
+class ReplSuite extends FunSuite with ReplSuiteMixin {
+
+  test("simple foreach with accumulator") {
     val output = runInterpreter("local", """
       val accum = sc.accumulator(0)
       sc.parallelize(1 to 10).foreach(x => accum += x)
@@ -55,8 +18,8 @@ class ReplSuite extends FunSuite {
     assertDoesNotContain("Exception", output)
     assertContains("res1: Int = 55", output)
   }
-  
-  test ("external vars") {
+
+  test("external vars") {
     val output = runInterpreter("local", """
       var v = 7
       sc.parallelize(1 to 10).map(x => v).collect.reduceLeft(_+_)
@@ -69,7 +32,7 @@ class ReplSuite extends FunSuite {
     assertContains("res1: Int = 100", output)
   }
 
-  test ("external classes") {
+  test("external classes") {
     val output = runInterpreter("local", """
       class C {
         def foo = 5
@@ -81,7 +44,7 @@ class ReplSuite extends FunSuite {
     assertContains("res0: Int = 50", output)
   }
 
-  test ("external functions") {
+  test("external functions") {
     val output = runInterpreter("local", """
       def double(x: Int) = x + x
       sc.parallelize(1 to 10).map(x => double(x)).collect.reduceLeft(_+_)
@@ -91,7 +54,7 @@ class ReplSuite extends FunSuite {
     assertContains("res0: Int = 110", output)
   }
 
-  test ("external functions that access vars") {
+  test("external functions that access vars") {
     val output = runInterpreter("local", """
       var v = 7
       def getV() = v
@@ -104,8 +67,8 @@ class ReplSuite extends FunSuite {
     assertContains("res0: Int = 70", output)
     assertContains("res1: Int = 100", output)
   }
-  
-  test ("broadcast vars") {
+
+  test("broadcast vars") {
     // Test that the value that a broadcast var had when it was created is used,
     // even if that variable is then modified in the driver program
     // TODO: This doesn't actually work for arrays when we run in local mode!
@@ -122,7 +85,7 @@ class ReplSuite extends FunSuite {
     assertContains("res2: Array[Int] = Array(5, 0, 0, 0, 0)", output)
   }
 
-  test ("interacting with files") {
+  test("interacting with files") {
     val tempDir = Files.createTempDir()
     val out = new FileWriter(tempDir + "/input")
     out.write("Hello world!\n")
@@ -143,7 +106,7 @@ class ReplSuite extends FunSuite {
   }
 
   if (System.getenv("MESOS_NATIVE_LIBRARY") != null) {
-    test ("running on Mesos") {
+    test("running on Mesos") {
       val output = runInterpreter("localquiet", """
         var v = 7
         def getV() = v
@@ -164,4 +127,5 @@ class ReplSuite extends FunSuite {
       assertContains("res4: Array[Int] = Array(0, 0, 0, 0, 0)", output)
     }
   }
+
 }
diff --git a/repl/src/test/scala/spark/repl/ReplSuiteMixin.scala b/repl/src/test/scala/spark/repl/ReplSuiteMixin.scala
new file mode 100644
index 0000000000000000000000000000000000000000..fd1a1b1e7cd50def36de4a981338231d48281cdc
--- /dev/null
+++ b/repl/src/test/scala/spark/repl/ReplSuiteMixin.scala
@@ -0,0 +1,60 @@
+package spark.repl
+
+import java.io.BufferedReader
+import java.io.PrintWriter
+import java.io.StringReader
+import java.io.StringWriter
+import java.net.URLClassLoader
+
+import scala.collection.mutable.ArrayBuffer
+import scala.concurrent.ExecutionContext.Implicits.global
+import scala.concurrent.future
+
+import spark.deploy.master.Master
+import spark.deploy.worker.Worker
+
+trait ReplSuiteMixin {
+  val localIp = "127.0.1.2"
+  val port = "7089"
+  val sparkUrl = s"spark://$localIp:$port"
+  def setupStandaloneCluster() {
+    future { Master.main(Array("-i", localIp, "-p", port, "--webui-port", "0")) }
+    Thread.sleep(2000)
+    future { Worker.main(Array(sparkUrl, "--webui-port", "0")) }
+  }
+
+  def runInterpreter(master: String, input: String): String = {
+    val in = new BufferedReader(new StringReader(input + "\n"))
+    val out = new StringWriter()
+    val cl = getClass.getClassLoader
+    var paths = new ArrayBuffer[String]
+    if (cl.isInstanceOf[URLClassLoader]) {
+      val urlLoader = cl.asInstanceOf[URLClassLoader]
+      for (url <- urlLoader.getURLs) {
+        if (url.getProtocol == "file") {
+          paths += url.getFile
+        }
+      }
+    }
+
+    val interp = new SparkILoop(in, new PrintWriter(out), master)
+    spark.repl.Main.interp = interp
+    val separator = System.getProperty("path.separator")
+    interp.process(Array("-classpath", paths.mkString(separator)))
+    if (interp != null)
+      interp.closeInterpreter();
+    // To avoid Akka rebinding to the same port, since it doesn't unbind immediately on shutdown
+    System.clearProperty("spark.master.port")
+    return out.toString
+  }
+
+  def assertContains(message: String, output: String) {
+    assert(output contains message,
+      "Interpreter output did not contain '" + message + "':\n" + output)
+  }
+
+  def assertDoesNotContain(message: String, output: String) {
+    assert(!(output contains message),
+      "Interpreter output contained '" + message + "':\n" + output)
+  }
+}
diff --git a/repl/src/test/scala/spark/repl/StandaloneClusterReplSuite.scala b/repl/src/test/scala/spark/repl/StandaloneClusterReplSuite.scala
new file mode 100644
index 0000000000000000000000000000000000000000..0822770fe2774d67b4f2eed249d228929f74d281
--- /dev/null
+++ b/repl/src/test/scala/spark/repl/StandaloneClusterReplSuite.scala
@@ -0,0 +1,103 @@
+package spark.repl
+
+import java.io.FileWriter
+
+import org.scalatest.FunSuite
+
+import com.google.common.io.Files
+
+class StandaloneClusterReplSuite extends FunSuite with ReplSuiteMixin {
+  setupStandaloneCluster
+
+  test("simple collect") {
+    val output = runInterpreter(sparkUrl, """
+      var x = 123
+      val data = sc.parallelize(1 to 3).map(_ + x)
+      data.take(3)
+      """)
+    assertDoesNotContain("error:", output)
+    assertDoesNotContain("Exception", output)
+    assertContains("124", output)
+    assertContains("125", output)
+    assertContains("126", output)
+  }
+
+  test("simple foreach with accumulator") {
+    val output = runInterpreter(sparkUrl, """
+      val accum = sc.accumulator(0)
+      sc.parallelize(1 to 10).foreach(x => accum += x)
+      accum.value
+      """)
+    assertDoesNotContain("error:", output)
+    assertDoesNotContain("Exception", output)
+    assertContains("res1: Int = 55", output)
+  }
+
+  test("external vars") {
+    val output = runInterpreter(sparkUrl, """
+      var v = 7
+      sc.parallelize(1 to 10).map(x => v).take(10).reduceLeft(_+_)
+      v = 10
+      sc.parallelize(1 to 10).map(x => v).take(10).reduceLeft(_+_)
+      """)
+    assertDoesNotContain("error:", output)
+    assertDoesNotContain("Exception", output)
+    assertContains("res0: Int = 70", output)
+    assertContains("res1: Int = 100", output)
+  }
+
+  test("external classes") {
+    val output = runInterpreter(sparkUrl, """
+      class C {
+        def foo = 5
+      }
+      sc.parallelize(1 to 10).map(x => (new C).foo).take(10).reduceLeft(_+_)
+      """)
+    assertDoesNotContain("error:", output)
+    assertDoesNotContain("Exception", output)
+    assertContains("res0: Int = 50", output)
+  }
+
+  test("external functions") {
+    val output = runInterpreter(sparkUrl, """
+      def double(x: Int) = x + x
+      sc.parallelize(1 to 10).map(x => double(x)).take(10).reduceLeft(_+_)
+      """)
+    assertDoesNotContain("error:", output)
+    assertDoesNotContain("Exception", output)
+    assertContains("res0: Int = 110", output)
+  }
+
+ test("external functions that access vars") {
+   val output = runInterpreter(sparkUrl, """
+     var v = 7
+     def getV() = v
+     sc.parallelize(1 to 10).map(x => getV()).take(10).reduceLeft(_+_)
+     v = 10
+     sc.parallelize(1 to 10).map(x => getV()).take(10).reduceLeft(_+_)
+     """)
+   assertDoesNotContain("error:", output)
+   assertDoesNotContain("Exception", output)
+   assertContains("res0: Int = 70", output)
+   assertContains("res1: Int = 100", output)
+ }
+
+  test("broadcast vars") {
+    // Test that the value that a broadcast var had when it was created is used,
+    // even if that variable is then modified in the driver program
+
+    val output = runInterpreter(sparkUrl, """
+             var array = new Array[Int](5)
+             val broadcastArray = sc.broadcast(array)
+             sc.parallelize(0 to 4).map(x => broadcastArray.value(x)).take(5)
+             array(0) = 5
+             sc.parallelize(0 to 4).map(x => broadcastArray.value(x)).take(5)
+             """)
+    assertDoesNotContain("error:", output)
+    assertDoesNotContain("Exception", output)
+    assertContains("res0: Array[Int] = Array(0, 0, 0, 0, 0)", output)
+    assertContains("res2: Array[Int] = Array(5, 0, 0, 0, 0)", output)
+  }
+
+
+}
diff --git a/run b/run
index eb93db66db310aeaaa76826befb4aa5c19954bb7..16fb0efc6e887220dac532d3a25eccb5e5a9bf1a 100755
--- a/run
+++ b/run
@@ -1,6 +1,6 @@
 #!/bin/bash
 
-SCALA_VERSION=2.9.2
+SCALA_VERSION=2.10
 
 # Figure out where the Scala framework is installed
 FWDIR="$(cd `dirname $0`; pwd)"