[SPARK-3303][core] fix SparkContextSchedulerCreationSuite test error
run test with the master branch with this command when mesos native lib is set sbt/sbt -Phive "test-only org.apache.spark.SparkContextSchedulerCreationSuite" get this error: [info] SparkContextSchedulerCreationSuite: [info] - bad-master [info] - local [info] - local-* [info] - local-n [info] - local--n-failures [info] - local-n-failures [info] - bad-local-n [info] - bad-local-n-failures [info] - local-default-parallelism [info] - simr [info] - local-cluster [info] - yarn-cluster [info] - yarn-standalone [info] - yarn-client [info] - mesos fine-grained [info] - mesos coarse-grained ** FAILED *** [info] Executor Spark home `spark.mesos.executor.home` is not set! Since `executorSparkHome` only used in `createCommand`, move `val executorSparkHome...` to `createCommand` to fix this issue. Author: scwf <wangfei1@huawei.com> Author: wangfei <wangfei_hello@126.com> Closes #2199 from scwf/SparkContextSchedulerCreationSuite and squashes the following commits: ef1de22 [scwf] fix code fomate 19d26f3 [scwf] fix conflict d9a8a60 [wangfei] fix SparkContextSchedulerCreationSuite test error
This commit is contained in:
parent
a522407928
commit
e08ea7393d
|
@ -71,11 +71,6 @@ private[spark] class CoarseMesosSchedulerBackend(
|
|||
val taskIdToSlaveId = new HashMap[Int, String]
|
||||
val failuresBySlaveId = new HashMap[String, Int] // How many times tasks on each slave failed
|
||||
|
||||
val executorSparkHome = conf.getOption("spark.mesos.executor.home")
|
||||
.orElse(sc.getSparkHome())
|
||||
.getOrElse {
|
||||
throw new SparkException("Executor Spark home `spark.mesos.executor.home` is not set!")
|
||||
}
|
||||
|
||||
val extraCoresPerSlave = conf.getInt("spark.mesos.extra.cores", 0)
|
||||
|
||||
|
@ -112,6 +107,11 @@ private[spark] class CoarseMesosSchedulerBackend(
|
|||
}
|
||||
|
||||
def createCommand(offer: Offer, numCores: Int): CommandInfo = {
|
||||
val executorSparkHome = conf.getOption("spark.mesos.executor.home")
|
||||
.orElse(sc.getSparkHome())
|
||||
.getOrElse {
|
||||
throw new SparkException("Executor Spark home `spark.mesos.executor.home` is not set!")
|
||||
}
|
||||
val environment = Environment.newBuilder()
|
||||
val extraClassPath = conf.getOption("spark.executor.extraClassPath")
|
||||
extraClassPath.foreach { cp =>
|
||||
|
|
Loading…
Reference in a new issue