SPARK-1375. Additional spark-submit cleanup
Author: Sandy Ryza <sandy@cloudera.com> Closes #278 from sryza/sandy-spark-1375 and squashes the following commits: 5fbf1e9 [Sandy Ryza] SPARK-1375. Additional spark-submit cleanup
This commit is contained in:
parent
f1fa617023
commit
16b8308887
|
@ -79,20 +79,23 @@ object SparkSubmit {
|
|||
printErrorAndExit("master must start with yarn, mesos, spark, or local")
|
||||
}
|
||||
|
||||
// Because "yarn-standalone" and "yarn-client" encapsulate both the master
|
||||
// Because "yarn-cluster" and "yarn-client" encapsulate both the master
|
||||
// and deploy mode, we have some logic to infer the master and deploy mode
|
||||
// from each other if only one is specified, or exit early if they are at odds.
|
||||
if (appArgs.deployMode == null && appArgs.master == "yarn-standalone") {
|
||||
if (appArgs.deployMode == null &&
|
||||
(appArgs.master == "yarn-standalone" || appArgs.master == "yarn-cluster")) {
|
||||
appArgs.deployMode = "cluster"
|
||||
}
|
||||
if (appArgs.deployMode == "cluster" && appArgs.master == "yarn-client") {
|
||||
printErrorAndExit("Deploy mode \"cluster\" and master \"yarn-client\" are not compatible")
|
||||
}
|
||||
if (appArgs.deployMode == "client" && appArgs.master == "yarn-standalone") {
|
||||
printErrorAndExit("Deploy mode \"client\" and master \"yarn-standalone\" are not compatible")
|
||||
if (appArgs.deployMode == "client" &&
|
||||
(appArgs.master == "yarn-standalone" || appArgs.master == "yarn-cluster")) {
|
||||
printErrorAndExit("Deploy mode \"client\" and master \"" + appArgs.master
|
||||
+ "\" are not compatible")
|
||||
}
|
||||
if (appArgs.deployMode == "cluster" && appArgs.master.startsWith("yarn")) {
|
||||
appArgs.master = "yarn-standalone"
|
||||
appArgs.master = "yarn-cluster"
|
||||
}
|
||||
if (appArgs.deployMode != "cluster" && appArgs.master.startsWith("yarn")) {
|
||||
appArgs.master = "yarn-client"
|
||||
|
|
|
@ -171,7 +171,7 @@ private[spark] class SparkSubmitArguments(args: Array[String]) {
|
|||
outStream.println("Unknown/unsupported param " + unknownParam)
|
||||
}
|
||||
outStream.println(
|
||||
"""Usage: spark-submit <primary binary> [options]
|
||||
"""Usage: spark-submit <app jar> [options]
|
||||
|Options:
|
||||
| --master MASTER_URL spark://host:port, mesos://host:port, yarn, or local.
|
||||
| --deploy-mode DEPLOY_MODE Mode to deploy the app in, either 'client' or 'cluster'.
|
||||
|
|
|
@ -56,7 +56,7 @@ The recommended way to launch a compiled Spark application is through the spark-
|
|||
bin directory), which takes care of setting up the classpath with Spark and its dependencies, as well as
|
||||
provides a layer over the different cluster managers and deploy modes that Spark supports. It's usage is
|
||||
|
||||
spark-submit `<jar>` `<options>`
|
||||
spark-submit `<app jar>` `<options>`
|
||||
|
||||
Where options are any of:
|
||||
|
||||
|
|
Loading…
Reference in a new issue