From 2c0e15e1d0c774d57a801038de4fc826702a7c5c Mon Sep 17 00:00:00 2001 From: ulysses Date: Wed, 1 Apr 2020 11:46:41 -0700 Subject: [PATCH] [SPARK-31285][CORE] uppercase schedule mode string at config ### What changes were proposed in this pull request? In `TaskSchedulerImpl`, Spark will upper schedule mode `SchedulingMode.withName(schedulingModeConf.toUpperCase(Locale.ROOT))`. But at other place, Spark does not. Such as [AllJobsPage](https://github.com/apache/spark/blob/5945d46c11a86fd85f9e65f24c2e88f368eee01f/core/src/main/scala/org/apache/spark/ui/jobs/AllJobsPage.scala#L304). We should have the same behavior and uppercase schema mode string at config. ### Why are the changes needed? Before this pr, it's ok to set `spark.scheduler.mode=fair` logically. But Spark will throw warn log ``` java.util.NoSuchElementException: No value found for 'fair' at scala.Enumeration.withName(Enumeration.scala:124) at org.apache.spark.ui.jobs.AllJobsPage$$anonfun$22.apply(AllJobsPage.scala:314) at org.apache.spark.ui.jobs.AllJobsPage$$anonfun$22.apply(AllJobsPage.scala:314) at scala.Option.map(Option.scala:146) at org.apache.spark.ui.jobs.AllJobsPage.render(AllJobsPage.scala:314) at org.apache.spark.ui.WebUI$$anonfun$2.apply(WebUI.scala:90) at org.apache.spark.ui.WebUI$$anonfun$2.apply(WebUI.scala:90) at org.apache.spark.ui.JettyUtils$$anon$3.doGet(JettyUtils.scala:90) ``` ### Does this PR introduce any user-facing change? Almost no. ### How was this patch tested? Exists Test. Closes #28049 from ulysses-you/SPARK-31285. Authored-by: ulysses Signed-off-by: Dongjoon Hyun --- .../main/scala/org/apache/spark/internal/config/package.scala | 2 ++ .../scala/org/apache/spark/scheduler/TaskSchedulerImpl.scala | 2 +- 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/core/src/main/scala/org/apache/spark/internal/config/package.scala b/core/src/main/scala/org/apache/spark/internal/config/package.scala index f70ee2e5c7..8f8b6ad1e5 100644 --- a/core/src/main/scala/org/apache/spark/internal/config/package.scala +++ b/core/src/main/scala/org/apache/spark/internal/config/package.scala @@ -17,6 +17,7 @@ package org.apache.spark.internal +import java.util.Locale import java.util.concurrent.TimeUnit import org.apache.spark.launcher.SparkLauncher @@ -1756,6 +1757,7 @@ package object config { ConfigBuilder("spark.scheduler.mode") .version("0.8.0") .stringConf + .transform(_.toUpperCase(Locale.ROOT)) .createWithDefault(SchedulingMode.FIFO.toString) private[spark] val SCHEDULER_REVIVE_INTERVAL = diff --git a/core/src/main/scala/org/apache/spark/scheduler/TaskSchedulerImpl.scala b/core/src/main/scala/org/apache/spark/scheduler/TaskSchedulerImpl.scala index f0f84fe63d..718c571952 100644 --- a/core/src/main/scala/org/apache/spark/scheduler/TaskSchedulerImpl.scala +++ b/core/src/main/scala/org/apache/spark/scheduler/TaskSchedulerImpl.scala @@ -141,7 +141,7 @@ private[spark] class TaskSchedulerImpl( private val schedulingModeConf = conf.get(SCHEDULER_MODE) val schedulingMode: SchedulingMode = try { - SchedulingMode.withName(schedulingModeConf.toUpperCase(Locale.ROOT)) + SchedulingMode.withName(schedulingModeConf) } catch { case e: java.util.NoSuchElementException => throw new SparkException(s"Unrecognized $SCHEDULER_MODE_PROPERTY: $schedulingModeConf")