Skip to content
Snippets Groups Projects
Commit cffe1c1d authored by Patrick Wendell's avatar Patrick Wendell
Browse files

SPARK-1008: Logging improvments

1. Adds a default log4j file that gets loaded if users haven't specified a log4j file.
2. Isolates use of the tools assembly jar. I found this produced SLF4J warnings
   after building with SBT (and I've seen similar warnings on the mailing list).
parent 72a17b69
No related branches found
No related tags found
No related merge requests found
# Set everything to be logged to the console
log4j.rootCategory=INFO, console
log4j.appender.console=org.apache.log4j.ConsoleAppender
log4j.appender.console.layout=org.apache.log4j.PatternLayout
log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{1}: %m%n
# Ignore messages below warning level from Jetty, because it's a bit verbose
log4j.logger.org.eclipse.jetty=WARN
...@@ -17,8 +17,8 @@ ...@@ -17,8 +17,8 @@
package org.apache.spark package org.apache.spark
import org.slf4j.Logger import org.apache.log4j.{LogManager, PropertyConfigurator}
import org.slf4j.LoggerFactory import org.slf4j.{Logger, LoggerFactory}
/** /**
* Utility trait for classes that want to log data. Creates a SLF4J logger for the class and allows * Utility trait for classes that want to log data. Creates a SLF4J logger for the class and allows
...@@ -91,5 +91,17 @@ trait Logging { ...@@ -91,5 +91,17 @@ trait Logging {
// Method for ensuring that logging is initialized, to avoid having multiple // Method for ensuring that logging is initialized, to avoid having multiple
// threads do it concurrently (as SLF4J initialization is not thread safe). // threads do it concurrently (as SLF4J initialization is not thread safe).
protected def initLogging() { log } protected def initLogging() {
// If Log4j doesn't seem initialized, load a default properties file
def log4jInitialized = LogManager.getRootLogger.getAllAppenders.hasMoreElements
if (!log4jInitialized) {
val defaultLogProps = "org/apache/spark/default-log4j.properties"
val classLoader = this.getClass.getClassLoader
Option(classLoader.getResource(defaultLogProps)) match {
case Some(url) => PropertyConfigurator.configure(url)
case None => System.err.println(s"Spark was unable to load $defaultLogProps")
}
}
log
}
} }
...@@ -60,6 +60,8 @@ class SparkILoop(in0: Option[BufferedReader], protected val out: JPrintWriter, ...@@ -60,6 +60,8 @@ class SparkILoop(in0: Option[BufferedReader], protected val out: JPrintWriter,
def this(in0: BufferedReader, out: JPrintWriter) = this(Some(in0), out, None) def this(in0: BufferedReader, out: JPrintWriter) = this(Some(in0), out, None)
def this() = this(None, new JPrintWriter(Console.out, true), None) def this() = this(None, new JPrintWriter(Console.out, true), None)
initLogging()
var in: InteractiveReader = _ // the input stream from which commands come var in: InteractiveReader = _ // the input stream from which commands come
var settings: Settings = _ var settings: Settings = _
var intp: SparkIMain = _ var intp: SparkIMain = _
......
...@@ -115,16 +115,18 @@ if [ ! -f "$FWDIR/RELEASE" ]; then ...@@ -115,16 +115,18 @@ if [ ! -f "$FWDIR/RELEASE" ]; then
fi fi
fi fi
TOOLS_DIR="$FWDIR"/tools if [ "$1" == "org.apache.spark.tools.JavaAPICompletenessChecker" ]; then
SPARK_TOOLS_JAR="" TOOLS_DIR="$FWDIR"/tools
if [ -e "$TOOLS_DIR"/target/scala-$SCALA_VERSION/*assembly*[0-9Tg].jar ]; then SPARK_TOOLS_JAR=""
# Use the JAR from the SBT build if [ -e "$TOOLS_DIR"/target/scala-$SCALA_VERSION/*assembly*[0-9Tg].jar ]; then
export SPARK_TOOLS_JAR=`ls "$TOOLS_DIR"/target/scala-$SCALA_VERSION/*assembly*[0-9Tg].jar` # Use the JAR from the SBT build
fi export SPARK_TOOLS_JAR=`ls "$TOOLS_DIR"/target/scala-$SCALA_VERSION/*assembly*[0-9Tg].jar`
if [ -e "$TOOLS_DIR"/target/spark-tools*[0-9Tg].jar ]; then fi
# Use the JAR from the Maven build if [ -e "$TOOLS_DIR"/target/spark-tools*[0-9Tg].jar ]; then
# TODO: this also needs to become an assembly! # Use the JAR from the Maven build
export SPARK_TOOLS_JAR=`ls "$TOOLS_DIR"/target/spark-tools*[0-9Tg].jar` # TODO: this also needs to become an assembly!
export SPARK_TOOLS_JAR=`ls "$TOOLS_DIR"/target/spark-tools*[0-9Tg].jar`
fi
fi fi
# Compute classpath using external script # Compute classpath using external script
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment