You are viewing a plain text version of this content. The canonical link for it is here.
Posted to reviews@spark.apache.org by GitBox <gi...@apache.org> on 2022/02/08 09:04:48 UTC

[GitHub] [spark] martin-g commented on a change in pull request #35436: [SPARK-38135][K8S] Introduce job sheduling related configurations

martin-g commented on a change in pull request #35436:
URL: https://github.com/apache/spark/pull/35436#discussion_r801402311



##########
File path: resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/Config.scala
##########
@@ -675,6 +676,42 @@ private[spark] object Config extends Logging {
       .checkValue(value => value > 0, "Maximum number of pending pods should be a positive integer")
       .createWithDefault(Int.MaxValue)
 
+  val KUBERNETES_JOB_QUEUE = ConfigBuilder("spark.kubernetes.job.queue")
+    .doc(s"The name of the queue to which the job is submitted. This info " +
+      "will be stored in configuration and passed to specified feature step.")
+    .version("3.3.0")
+    .stringConf
+    .createWithDefault("default")
+
+  val KUBERNETES_JOB_MIN_CPU = ConfigBuilder("spark.kubernetes.job.minCPU")
+    .doc(s"The minimum CPU for running the job. This info " +

Review comment:
       same

##########
File path: resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/Config.scala
##########
@@ -675,6 +676,42 @@ private[spark] object Config extends Logging {
       .checkValue(value => value > 0, "Maximum number of pending pods should be a positive integer")
       .createWithDefault(Int.MaxValue)
 
+  val KUBERNETES_JOB_QUEUE = ConfigBuilder("spark.kubernetes.job.queue")
+    .doc(s"The name of the queue to which the job is submitted. This info " +

Review comment:
       no need of String interpolation - there is nothing to interpolate

##########
File path: resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/Config.scala
##########
@@ -675,6 +676,42 @@ private[spark] object Config extends Logging {
       .checkValue(value => value > 0, "Maximum number of pending pods should be a positive integer")
       .createWithDefault(Int.MaxValue)
 
+  val KUBERNETES_JOB_QUEUE = ConfigBuilder("spark.kubernetes.job.queue")
+    .doc(s"The name of the queue to which the job is submitted. This info " +
+      "will be stored in configuration and passed to specified feature step.")
+    .version("3.3.0")
+    .stringConf
+    .createWithDefault("default")
+
+  val KUBERNETES_JOB_MIN_CPU = ConfigBuilder("spark.kubernetes.job.minCPU")
+    .doc(s"The minimum CPU for running the job. This info " +
+      "will be stored in configuration and passed to specified feature step.")
+    .version("3.3.0")
+    .doubleConf
+    .createWithDefault(2.0)
+
+  val KUBERNETES_JOB_MIN_MEMORY = ConfigBuilder("spark.kubernetes.job.minMemory")
+    .doc("The minimum memory for running the job, in MiB unless otherwise specified. This info " +
+      "will be stored in configuration and passed to specified feature step.")
+    .version("3.3.0")
+    .bytesConf(ByteUnit.MiB)
+    .createWithDefaultString("3g")
+
+  val KUBERNETES_JOB_MIN_MEMBER = ConfigBuilder("spark.kubernetes.job.minMember")
+    .doc(s"The minimum number of pods running in a job. This info " +
+      "will be stored in configuration and passed to specified feature step.")
+    .version("3.3.0")
+    .intConf
+    .checkValue(value => value > 0, "The minimum number should be a positive integer")
+    .createWithDefault(1)
+
+  val KUBERNETES_JOB_PRIORITY_CLASS_NAME = ConfigBuilder("spark.kubernetes.job.priorityClassName")
+    .doc(s"The priority of the running job. This info " +

Review comment:
       same

##########
File path: resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/Config.scala
##########
@@ -675,6 +676,42 @@ private[spark] object Config extends Logging {
       .checkValue(value => value > 0, "Maximum number of pending pods should be a positive integer")
       .createWithDefault(Int.MaxValue)
 
+  val KUBERNETES_JOB_QUEUE = ConfigBuilder("spark.kubernetes.job.queue")
+    .doc(s"The name of the queue to which the job is submitted. This info " +
+      "will be stored in configuration and passed to specified feature step.")
+    .version("3.3.0")
+    .stringConf
+    .createWithDefault("default")
+
+  val KUBERNETES_JOB_MIN_CPU = ConfigBuilder("spark.kubernetes.job.minCPU")
+    .doc(s"The minimum CPU for running the job. This info " +
+      "will be stored in configuration and passed to specified feature step.")
+    .version("3.3.0")
+    .doubleConf
+    .createWithDefault(2.0)
+
+  val KUBERNETES_JOB_MIN_MEMORY = ConfigBuilder("spark.kubernetes.job.minMemory")
+    .doc("The minimum memory for running the job, in MiB unless otherwise specified. This info " +
+      "will be stored in configuration and passed to specified feature step.")
+    .version("3.3.0")
+    .bytesConf(ByteUnit.MiB)
+    .createWithDefaultString("3g")
+
+  val KUBERNETES_JOB_MIN_MEMBER = ConfigBuilder("spark.kubernetes.job.minMember")
+    .doc(s"The minimum number of pods running in a job. This info " +

Review comment:
       same




-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: reviews-unsubscribe@spark.apache.org

For queries about this service, please contact Infrastructure at:
users@infra.apache.org



---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscribe@spark.apache.org
For additional commands, e-mail: reviews-help@spark.apache.org