diff --git a/repl/src/main/scala/spark/repl/SparkILoop.scala b/repl/src/main/scala/spark/repl/SparkILoop.scala
index 904a72244f1217e8714f3a8b2bab5526e3480625..28a7c161f67c93876c48706202dac5453ca6b209 100644
--- a/repl/src/main/scala/spark/repl/SparkILoop.scala
+++ b/repl/src/main/scala/spark/repl/SparkILoop.scala
@@ -885,6 +885,8 @@ class SparkILoop(in0: Option[BufferedReader], protected val out: JPrintWriter,
       val autorun = replProps.replAutorunCode.option flatMap (f => io.File(f).safeSlurp())
       if (autorun.isDefined) intp.quietRun(autorun.get)
     })
+
+    addThunk(printWelcome())
     addThunk(initializeSpark())
 
     loadFiles(settings)
@@ -905,7 +907,7 @@ class SparkILoop(in0: Option[BufferedReader], protected val out: JPrintWriter,
       intp.initializeSynchronous()
       postInitialization()
     }
-    printWelcome()
+    // printWelcome()
 
     try loop()
     catch AbstractOrMissingHandler()
diff --git a/repl/src/main/scala/spark/repl/SparkILoopInit.scala b/repl/src/main/scala/spark/repl/SparkILoopInit.scala
index c3d7f45dc9aa34e0b39b990e70b6242cb098fe0d..6ae535c4e6f2486d461a0fc9e1e8603838ea2756 100644
--- a/repl/src/main/scala/spark/repl/SparkILoopInit.scala
+++ b/repl/src/main/scala/spark/repl/SparkILoopInit.scala
@@ -117,11 +117,11 @@ trait SparkILoopInit {
  def initializeSpark() {
     intp.beQuietDuring {
       command("""
-         @transient lazy val sc = spark.repl.Main.interp.createSparkContext();
+         @transient val sc = spark.repl.Main.interp.createSparkContext();
         """)
       command("import spark.SparkContext._");
     }
-   echo("... Spark context available as sc.")
+   echo("Spark context available as sc.")
   }
 
   // code to be executed only after the interpreter is initialized