diff --git a/core/src/main/resources/org/apache/spark/default-log4j.properties b/core/src/main/resources/org/apache/spark/default-log4j.properties
new file mode 100644
index 0000000000000000000000000000000000000000..d72dbadc3904f327effddf99594045067be2f529
--- /dev/null
+++ b/core/src/main/resources/org/apache/spark/default-log4j.properties
@@ -0,0 +1,8 @@
+# Set everything to be logged to the console
+log4j.rootCategory=INFO, console
+log4j.appender.console=org.apache.log4j.ConsoleAppender
+log4j.appender.console.layout=org.apache.log4j.PatternLayout
+log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{1}: %m%n
+
+# Ignore messages below warning level from Jetty, because it's a bit verbose
+log4j.logger.org.eclipse.jetty=WARN
diff --git a/core/src/main/scala/org/apache/spark/Logging.scala b/core/src/main/scala/org/apache/spark/Logging.scala
index 6a973ea4951c3a389a202f6c5dfd77f355061648..a8892737fad15b3544accfacb7e109ebc0c614f4 100644
--- a/core/src/main/scala/org/apache/spark/Logging.scala
+++ b/core/src/main/scala/org/apache/spark/Logging.scala
@@ -17,8 +17,8 @@
 
 package org.apache.spark
 
-import org.slf4j.Logger
-import org.slf4j.LoggerFactory
+import org.apache.log4j.{LogManager, PropertyConfigurator}
+import org.slf4j.{Logger, LoggerFactory}
 
 /**
  * Utility trait for classes that want to log data. Creates a SLF4J logger for the class and allows
@@ -91,5 +91,17 @@ trait Logging {
 
   // Method for ensuring that logging is initialized, to avoid having multiple
   // threads do it concurrently (as SLF4J initialization is not thread safe).
-  protected def initLogging() { log }
+  protected def initLogging() {
+    // If Log4j doesn't seem initialized, load a default properties file
+    def log4jInitialized = LogManager.getRootLogger.getAllAppenders.hasMoreElements
+    if (!log4jInitialized) {
+      val defaultLogProps = "org/apache/spark/default-log4j.properties"
+      val classLoader = this.getClass.getClassLoader
+      Option(classLoader.getResource(defaultLogProps)) match {
+        case Some(url) => PropertyConfigurator.configure(url)
+        case None => System.err.println(s"Spark was unable to load $defaultLogProps")
+      }
+    }
+    log
+  }
 }
diff --git a/repl/src/main/scala/org/apache/spark/repl/SparkILoop.scala b/repl/src/main/scala/org/apache/spark/repl/SparkILoop.scala
index 523fd1222dd71dff1770f1488c21ceb73617b9f6..b2a181545938c2265b7196046441743950996fb0 100644
--- a/repl/src/main/scala/org/apache/spark/repl/SparkILoop.scala
+++ b/repl/src/main/scala/org/apache/spark/repl/SparkILoop.scala
@@ -60,6 +60,8 @@ class SparkILoop(in0: Option[BufferedReader], protected val out: JPrintWriter,
   def this(in0: BufferedReader, out: JPrintWriter) = this(Some(in0), out, None)
   def this() = this(None, new JPrintWriter(Console.out, true), None)
 
+  initLogging()
+
   var in: InteractiveReader = _   // the input stream from which commands come
   var settings: Settings = _
   var intp: SparkIMain = _
diff --git a/spark-class b/spark-class
index 802e4aa1045e483fc96f061b44640d53f781df9b..1c4323ee53200b483bf0efe22f029503ee1ba81d 100755
--- a/spark-class
+++ b/spark-class
@@ -115,16 +115,18 @@ if [ ! -f "$FWDIR/RELEASE" ]; then
   fi
 fi
 
-TOOLS_DIR="$FWDIR"/tools
-SPARK_TOOLS_JAR=""
-if [ -e "$TOOLS_DIR"/target/scala-$SCALA_VERSION/*assembly*[0-9Tg].jar ]; then
-  # Use the JAR from the SBT build
-  export SPARK_TOOLS_JAR=`ls "$TOOLS_DIR"/target/scala-$SCALA_VERSION/*assembly*[0-9Tg].jar`
-fi
-if [ -e "$TOOLS_DIR"/target/spark-tools*[0-9Tg].jar ]; then
-  # Use the JAR from the Maven build
-  # TODO: this also needs to become an assembly!
-  export SPARK_TOOLS_JAR=`ls "$TOOLS_DIR"/target/spark-tools*[0-9Tg].jar`
+if [ "$1" == "org.apache.spark.tools.JavaAPICompletenessChecker" ]; then
+  TOOLS_DIR="$FWDIR"/tools
+  SPARK_TOOLS_JAR=""
+  if [ -e "$TOOLS_DIR"/target/scala-$SCALA_VERSION/*assembly*[0-9Tg].jar ]; then
+    # Use the JAR from the SBT build
+    export SPARK_TOOLS_JAR=`ls "$TOOLS_DIR"/target/scala-$SCALA_VERSION/*assembly*[0-9Tg].jar`
+  fi
+  if [ -e "$TOOLS_DIR"/target/spark-tools*[0-9Tg].jar ]; then
+    # Use the JAR from the Maven build
+    # TODO: this also needs to become an assembly!
+    export SPARK_TOOLS_JAR=`ls "$TOOLS_DIR"/target/spark-tools*[0-9Tg].jar`
+  fi
 fi
 
 # Compute classpath using external script