Skip to content
Snippets Groups Projects
Commit 74737264 authored by Aaron Davidson's avatar Aaron Davidson
Browse files

Spark shell exits if it cannot create SparkContext

Mainly, this occurs if you provide a messed up MASTER url (one that doesn't match one
of our regexes). Previously, we would default to Mesos, fail, and then start the shell
anyway, except that any Spark command would fail.
parent fc26e5b8
No related branches found
No related tags found
No related merge requests found
...@@ -217,21 +217,20 @@ class SparkContext( ...@@ -217,21 +217,20 @@ class SparkContext(
scheduler.initialize(backend) scheduler.initialize(backend)
scheduler scheduler
case _ => case MESOS_REGEX(mesosUrl) =>
if (MESOS_REGEX.findFirstIn(master).isEmpty) {
logWarning("Master %s does not match expected format, parsing as Mesos URL".format(master))
}
MesosNativeLibrary.load() MesosNativeLibrary.load()
val scheduler = new ClusterScheduler(this) val scheduler = new ClusterScheduler(this)
val coarseGrained = System.getProperty("spark.mesos.coarse", "false").toBoolean val coarseGrained = System.getProperty("spark.mesos.coarse", "false").toBoolean
val masterWithoutProtocol = master.replaceFirst("^mesos://", "") // Strip initial mesos://
val backend = if (coarseGrained) { val backend = if (coarseGrained) {
new CoarseMesosSchedulerBackend(scheduler, this, masterWithoutProtocol, appName) new CoarseMesosSchedulerBackend(scheduler, this, mesosUrl, appName)
} else { } else {
new MesosSchedulerBackend(scheduler, this, masterWithoutProtocol, appName) new MesosSchedulerBackend(scheduler, this, mesosUrl, appName)
} }
scheduler.initialize(backend) scheduler.initialize(backend)
scheduler scheduler
case _ =>
throw new SparkException("Could not parse Master URL: '" + master + "'")
} }
} }
taskScheduler.start() taskScheduler.start()
......
...@@ -845,7 +845,14 @@ class SparkILoop(in0: Option[BufferedReader], val out: PrintWriter, val master: ...@@ -845,7 +845,14 @@ class SparkILoop(in0: Option[BufferedReader], val out: PrintWriter, val master:
val jars = Option(System.getenv("ADD_JARS")).map(_.split(',')) val jars = Option(System.getenv("ADD_JARS")).map(_.split(','))
.getOrElse(new Array[String](0)) .getOrElse(new Array[String](0))
.map(new java.io.File(_).getAbsolutePath) .map(new java.io.File(_).getAbsolutePath)
sparkContext = new SparkContext(master, "Spark shell", System.getenv("SPARK_HOME"), jars) try {
sparkContext = new SparkContext(master, "Spark shell", System.getenv("SPARK_HOME"), jars)
} catch {
case e: Exception =>
e.printStackTrace()
echo("Failed to create SparkContext, exiting...")
sys.exit(1)
}
sparkContext sparkContext
} }
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment