From b49d1be65b21b486f2c52e6d9aa023a5a5b0c6e9 Mon Sep 17 00:00:00 2001 From: Matei Zaharia <matei@eecs.berkeley.edu> Date: Tue, 31 May 2011 23:54:48 -0700 Subject: [PATCH] Ensure logging is initialized before any Spark threads run in the REPL --- repl/src/main/scala/spark/repl/SparkILoop.scala | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/repl/src/main/scala/spark/repl/SparkILoop.scala b/repl/src/main/scala/spark/repl/SparkILoop.scala index aec398b965..c0558f4ec4 100644 --- a/repl/src/main/scala/spark/repl/SparkILoop.scala +++ b/repl/src/main/scala/spark/repl/SparkILoop.scala @@ -22,6 +22,7 @@ import util.{ ClassPath, Exceptional, stringFromWriter, stringFromStream } import interpreter._ import io.{ File, Sources } +import spark.Logging import spark.SparkContext /** The Scala interactive shell. It provides a read-eval-print loop @@ -39,6 +40,7 @@ import spark.SparkContext class SparkILoop(in0: Option[BufferedReader], val out: PrintWriter, val master: Option[String]) extends AnyRef with LoopCommands + with Logging { def this(in0: BufferedReader, out: PrintWriter, master: String) = this(Some(in0), out, Some(master)) def this(in0: BufferedReader, out: PrintWriter) = this(Some(in0), out, None) @@ -842,6 +844,10 @@ class SparkILoop(in0: Option[BufferedReader], val out: PrintWriter, val master: } def process(settings: Settings): Boolean = { + // Ensure logging is initialized before any Spark threads try to use logs + // (because SLF4J initialization is not thread safe) + initLogging() + printWelcome() echo("Initializing interpreter...") -- GitLab