Merge pull request #71 from aarondav/scdefaults

Spark shell exits if it cannot create SparkContext

Mainly, this occurs if you provide a messed up MASTER url (one that doesn't match one
of our regexes). Previously, we would default to Mesos, fail, and then start the shell
anyway, except that any Spark command would fail. Simply exiting seems clearer.
This commit is contained in:
Matei Zaharia 2013-10-18 20:24:10 -07:00
commit 8d528af829
2 changed files with 14 additions and 8 deletions

View file

@ -217,21 +217,20 @@ class SparkContext(
scheduler.initialize(backend) scheduler.initialize(backend)
scheduler scheduler
case _ => case MESOS_REGEX(mesosUrl) =>
if (MESOS_REGEX.findFirstIn(master).isEmpty) {
logWarning("Master %s does not match expected format, parsing as Mesos URL".format(master))
}
MesosNativeLibrary.load() MesosNativeLibrary.load()
val scheduler = new ClusterScheduler(this) val scheduler = new ClusterScheduler(this)
val coarseGrained = System.getProperty("spark.mesos.coarse", "false").toBoolean val coarseGrained = System.getProperty("spark.mesos.coarse", "false").toBoolean
val masterWithoutProtocol = master.replaceFirst("^mesos://", "") // Strip initial mesos://
val backend = if (coarseGrained) { val backend = if (coarseGrained) {
new CoarseMesosSchedulerBackend(scheduler, this, masterWithoutProtocol, appName) new CoarseMesosSchedulerBackend(scheduler, this, mesosUrl, appName)
} else { } else {
new MesosSchedulerBackend(scheduler, this, masterWithoutProtocol, appName) new MesosSchedulerBackend(scheduler, this, mesosUrl, appName)
} }
scheduler.initialize(backend) scheduler.initialize(backend)
scheduler scheduler
case _ =>
throw new SparkException("Could not parse Master URL: '" + master + "'")
} }
} }
taskScheduler.start() taskScheduler.start()

View file

@ -845,7 +845,14 @@ class SparkILoop(in0: Option[BufferedReader], val out: PrintWriter, val master:
val jars = Option(System.getenv("ADD_JARS")).map(_.split(',')) val jars = Option(System.getenv("ADD_JARS")).map(_.split(','))
.getOrElse(new Array[String](0)) .getOrElse(new Array[String](0))
.map(new java.io.File(_).getAbsolutePath) .map(new java.io.File(_).getAbsolutePath)
sparkContext = new SparkContext(master, "Spark shell", System.getenv("SPARK_HOME"), jars) try {
sparkContext = new SparkContext(master, "Spark shell", System.getenv("SPARK_HOME"), jars)
} catch {
case e: Exception =>
e.printStackTrace()
echo("Failed to create SparkContext, exiting...")
sys.exit(1)
}
sparkContext sparkContext
} }