diff --git a/core/src/main/scala/org/apache/spark/SparkContext.scala b/core/src/main/scala/org/apache/spark/SparkContext.scala
index 0aafc0a2fc7d072a3d1451eb966a78a60940da09..3ed9caa242b841d57a9c7d964d2154ddefe4f520 100644
--- a/core/src/main/scala/org/apache/spark/SparkContext.scala
+++ b/core/src/main/scala/org/apache/spark/SparkContext.scala
@@ -217,21 +217,20 @@ class SparkContext(
         scheduler.initialize(backend)
         scheduler
 
-      case _ =>
-        if (MESOS_REGEX.findFirstIn(master).isEmpty) {
-          logWarning("Master %s does not match expected format, parsing as Mesos URL".format(master))
-        }
+      case MESOS_REGEX(mesosUrl) =>
         MesosNativeLibrary.load()
         val scheduler = new ClusterScheduler(this)
         val coarseGrained = System.getProperty("spark.mesos.coarse", "false").toBoolean
-        val masterWithoutProtocol = master.replaceFirst("^mesos://", "")  // Strip initial mesos://
         val backend = if (coarseGrained) {
-          new CoarseMesosSchedulerBackend(scheduler, this, masterWithoutProtocol, appName)
+          new CoarseMesosSchedulerBackend(scheduler, this, mesosUrl, appName)
         } else {
-          new MesosSchedulerBackend(scheduler, this, masterWithoutProtocol, appName)
+          new MesosSchedulerBackend(scheduler, this, mesosUrl, appName)
         }
         scheduler.initialize(backend)
         scheduler
+
+      case _ =>
+        throw new SparkException("Could not parse Master URL: '" + master + "'")
     }
   }
   taskScheduler.start()
diff --git a/repl/src/main/scala/org/apache/spark/repl/SparkILoop.scala b/repl/src/main/scala/org/apache/spark/repl/SparkILoop.scala
index 36f54a22cf30449d421364ab74d8dcb2db459783..48a8fa93288cfa3269a9da574f5ff03c32a68582 100644
--- a/repl/src/main/scala/org/apache/spark/repl/SparkILoop.scala
+++ b/repl/src/main/scala/org/apache/spark/repl/SparkILoop.scala
@@ -845,7 +845,14 @@ class SparkILoop(in0: Option[BufferedReader], val out: PrintWriter, val master:
     val jars = Option(System.getenv("ADD_JARS")).map(_.split(','))
                                                 .getOrElse(new Array[String](0))
                                                 .map(new java.io.File(_).getAbsolutePath)
-    sparkContext = new SparkContext(master, "Spark shell", System.getenv("SPARK_HOME"), jars)
+    try {
+      sparkContext = new SparkContext(master, "Spark shell", System.getenv("SPARK_HOME"), jars)
+    } catch {
+      case e: Exception =>
+        e.printStackTrace()
+        echo("Failed to create SparkContext, exiting...")
+        sys.exit(1)
+    }
     sparkContext
   }