Allow users to set the application name for Spark on Yarn
This commit is contained in:
parent
714fdabd99
commit
bc3b20abdc
|
@ -50,6 +50,7 @@ The command to launch the YARN Client is as follows:
|
|||
--master-memory <MEMORY_FOR_MASTER> \
|
||||
--worker-memory <MEMORY_PER_WORKER> \
|
||||
--worker-cores <CORES_PER_WORKER> \
|
||||
--name <application_name> \
|
||||
--queue <queue_name>
|
||||
|
||||
For example:
|
||||
|
|
|
@ -106,7 +106,7 @@ class Client(conf: Configuration, args: ClientArguments) extends YarnClientImpl
|
|||
logInfo("Setting up application submission context for ASM")
|
||||
val appContext = Records.newRecord(classOf[ApplicationSubmissionContext])
|
||||
appContext.setApplicationId(appId)
|
||||
appContext.setApplicationName("Spark")
|
||||
appContext.setApplicationName(args.appName)
|
||||
return appContext
|
||||
}
|
||||
|
||||
|
|
|
@ -32,6 +32,7 @@ class ClientArguments(val args: Array[String]) {
|
|||
var numWorkers = 2
|
||||
var amQueue = System.getProperty("QUEUE", "default")
|
||||
var amMemory: Int = 512
|
||||
var appName: String = "Spark"
|
||||
// TODO
|
||||
var inputFormatInfo: List[InputFormatInfo] = null
|
||||
|
||||
|
@ -78,6 +79,10 @@ class ClientArguments(val args: Array[String]) {
|
|||
amQueue = value
|
||||
args = tail
|
||||
|
||||
case ("--name") :: value :: tail =>
|
||||
appName = value
|
||||
args = tail
|
||||
|
||||
case Nil =>
|
||||
if (userJar == null || userClass == null) {
|
||||
printUsageAndExit(1)
|
||||
|
@ -108,6 +113,7 @@ class ClientArguments(val args: Array[String]) {
|
|||
" --worker-cores NUM Number of cores for the workers (Default: 1). This is unsused right now.\n" +
|
||||
" --master-memory MEM Memory for Master (e.g. 1000M, 2G) (Default: 512 Mb)\n" +
|
||||
" --worker-memory MEM Memory per Worker (e.g. 1000M, 2G) (Default: 1G)\n" +
|
||||
" --name NAME The name of your application\n" +
|
||||
" --queue QUEUE The hadoop queue to use for allocation requests (Default: 'default')"
|
||||
)
|
||||
System.exit(exitCode)
|
||||
|
|
Loading…
Reference in a new issue