You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by do...@apache.org on 2020/04/01 18:47:23 UTC

[spark] branch master updated: [SPARK-31285][CORE] uppercase schedule mode string at config

This is an automated email from the ASF dual-hosted git repository.

dongjoon pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 2c0e15e  [SPARK-31285][CORE] uppercase schedule mode string at config
2c0e15e is described below

commit 2c0e15e1d0c774d57a801038de4fc826702a7c5c
Author: ulysses <yo...@weidian.com>
AuthorDate: Wed Apr 1 11:46:41 2020 -0700

    [SPARK-31285][CORE] uppercase schedule mode string at config
    
    ### What changes were proposed in this pull request?
    
    In `TaskSchedulerImpl`, Spark will upper schedule mode `SchedulingMode.withName(schedulingModeConf.toUpperCase(Locale.ROOT))`.
    But at other place, Spark does not. Such as [AllJobsPage](https://github.com/apache/spark/blob/5945d46c11a86fd85f9e65f24c2e88f368eee01f/core/src/main/scala/org/apache/spark/ui/jobs/AllJobsPage.scala#L304).
    We should have the same behavior and uppercase schema mode string at config.
    
    ### Why are the changes needed?
    
    Before this pr, it's ok to set `spark.scheduler.mode=fair` logically.
    But Spark will throw warn log
    ```
    java.util.NoSuchElementException: No value found for 'fair'
    	at scala.Enumeration.withName(Enumeration.scala:124)
    	at org.apache.spark.ui.jobs.AllJobsPage$$anonfun$22.apply(AllJobsPage.scala:314)
    	at org.apache.spark.ui.jobs.AllJobsPage$$anonfun$22.apply(AllJobsPage.scala:314)
    	at scala.Option.map(Option.scala:146)
    	at org.apache.spark.ui.jobs.AllJobsPage.render(AllJobsPage.scala:314)
    	at org.apache.spark.ui.WebUI$$anonfun$2.apply(WebUI.scala:90)
    	at org.apache.spark.ui.WebUI$$anonfun$2.apply(WebUI.scala:90)
    	at org.apache.spark.ui.JettyUtils$$anon$3.doGet(JettyUtils.scala:90)
    ```
    
    ### Does this PR introduce any user-facing change?
    
    Almost no.
    
    ### How was this patch tested?
    
    Exists Test.
    
    Closes #28049 from ulysses-you/SPARK-31285.
    
    Authored-by: ulysses <yo...@weidian.com>
    Signed-off-by: Dongjoon Hyun <do...@apache.org>
---
 core/src/main/scala/org/apache/spark/internal/config/package.scala     | 2 ++
 core/src/main/scala/org/apache/spark/scheduler/TaskSchedulerImpl.scala | 2 +-
 2 files changed, 3 insertions(+), 1 deletion(-)

diff --git a/core/src/main/scala/org/apache/spark/internal/config/package.scala b/core/src/main/scala/org/apache/spark/internal/config/package.scala
index f70ee2e..8f8b6ad 100644
--- a/core/src/main/scala/org/apache/spark/internal/config/package.scala
+++ b/core/src/main/scala/org/apache/spark/internal/config/package.scala
@@ -17,6 +17,7 @@
 
 package org.apache.spark.internal
 
+import java.util.Locale
 import java.util.concurrent.TimeUnit
 
 import org.apache.spark.launcher.SparkLauncher
@@ -1756,6 +1757,7 @@ package object config {
     ConfigBuilder("spark.scheduler.mode")
       .version("0.8.0")
       .stringConf
+      .transform(_.toUpperCase(Locale.ROOT))
       .createWithDefault(SchedulingMode.FIFO.toString)
 
   private[spark] val SCHEDULER_REVIVE_INTERVAL =
diff --git a/core/src/main/scala/org/apache/spark/scheduler/TaskSchedulerImpl.scala b/core/src/main/scala/org/apache/spark/scheduler/TaskSchedulerImpl.scala
index f0f84fe..718c571 100644
--- a/core/src/main/scala/org/apache/spark/scheduler/TaskSchedulerImpl.scala
+++ b/core/src/main/scala/org/apache/spark/scheduler/TaskSchedulerImpl.scala
@@ -141,7 +141,7 @@ private[spark] class TaskSchedulerImpl(
   private val schedulingModeConf = conf.get(SCHEDULER_MODE)
   val schedulingMode: SchedulingMode =
     try {
-      SchedulingMode.withName(schedulingModeConf.toUpperCase(Locale.ROOT))
+      SchedulingMode.withName(schedulingModeConf)
     } catch {
       case e: java.util.NoSuchElementException =>
         throw new SparkException(s"Unrecognized $SCHEDULER_MODE_PROPERTY: $schedulingModeConf")


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org