You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by do...@apache.org on 2022/03/03 08:42:58 UTC

[spark] branch master updated: [SPARK-38398][K8S][TESTS] Add `priorityClassName` integration test case

This is an automated email from the ASF dual-hosted git repository.

dongjoon pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 86e0903  [SPARK-38398][K8S][TESTS] Add `priorityClassName` integration test case
86e0903 is described below

commit 86e0903bfe8cbe5d451471f902d9727ffc16a5ab
Author: Dongjoon Hyun <do...@apache.org>
AuthorDate: Thu Mar 3 00:41:17 2022 -0800

    [SPARK-38398][K8S][TESTS] Add `priorityClassName` integration test case
    
    ### What changes were proposed in this pull request?
    
    Apache Spark has been supporting many K8s features via `spark.kubernetes.driver.podTemplateFile` and `spark.kubernetes.executor.podTemplateFile` in an extensible way. This PR aims to add an integration test case for `priorityClassName` pod spec.
    
    In this test case, we use one of the K8s built-in priority classes because we want to run this test on heterogenous K8s environments. In addition, `schedule` test tag is added for some esoteric K8s environments without `system-node-critical` priority class or `system-node-critical` with different values.
    - https://kubernetes.io/docs/tasks/administer-cluster/guaranteed-scheduling-critical-addon-pods/#marking-pod-as-critical
    ```
    $ k get priorityclass
    NAME                      VALUE        GLOBAL-DEFAULT   AGE
    system-cluster-critical   2000000000   false            4h19m
    system-node-critical      2000001000   false            4h19m
    ```
    
    ### Why are the changes needed?
    
    We don't need to enumerate all K8s spec via `spark.kubernetes.xxx` configurations. PodTemplate can do many things.
    This example will help the future works for customer schedulers.
    
    ### Does this PR introduce _any_ user-facing change?
    
    No.
    
    ### How was this patch tested?
    
    Pass the K8s IT. This is tested like the following.
    
    ```
    $ build/sbt -Psparkr -Pkubernetes -Pkubernetes-integration-tests -Dtest.exclude.tags=minikube -Dspark.kubernetes.test.deployMode=docker-for-desktop "kubernetes-integration-tests/test"
    ...
    [info] KubernetesSuite:
    [info] - Run SparkPi with no resources (8 seconds, 866 milliseconds)
    [info] - Run SparkPi with no resources & statefulset allocation (10 seconds, 700 milliseconds)
    [info] - Run SparkPi with a very long application name. (8 seconds, 634 milliseconds)
    [info] - Use SparkLauncher.NO_RESOURCE (8 seconds, 628 milliseconds)
    [info] - Run SparkPi with a master URL without a scheme. (8 seconds, 626 milliseconds)
    [info] - Run SparkPi with an argument. (8 seconds, 821 milliseconds)
    [info] - Run SparkPi with custom labels, annotations, and environment variables. (9 seconds, 675 milliseconds)
    [info] - All pods have the same service account by default (8 seconds, 692 milliseconds)
    [info] - Run extraJVMOptions check on driver (4 seconds, 599 milliseconds)
    [info] - Run SparkRemoteFileTest using a remote data file (8 seconds, 767 milliseconds)
    [info] - Verify logging configuration is picked from the provided SPARK_CONF_DIR/log4j2.properties (14 seconds, 140 milliseconds)
    [info] - Run SparkPi with env and mount secrets. (19 seconds, 62 milliseconds)
    [info] - Run PySpark on simple pi.py example (9 seconds, 821 milliseconds)
    [info] - Run PySpark to test a pyfiles example (11 seconds, 713 milliseconds)
    [info] - Run PySpark with memory customization (9 seconds, 630 milliseconds)
    [info] - Run in client mode. (7 seconds, 289 milliseconds)
    [info] - Start pod creation from template (8 seconds, 720 milliseconds)
    [info] - SPARK-38398: Schedule pod creation from template (8 seconds, 728 milliseconds)
    ...
    ```
    
    Closes #35716 from dongjoon-hyun/SPARK-38398.
    
    Authored-by: Dongjoon Hyun <do...@apache.org>
    Signed-off-by: Dongjoon Hyun <do...@apache.org>
---
 .../test/resources/driver-schedule-template.yml    | 27 +++++++++++++++++++++
 .../k8s/integrationtest/KubernetesSuite.scala      |  1 +
 .../k8s/integrationtest/PodTemplateSuite.scala     | 28 +++++++++++++++++++++-
 3 files changed, 55 insertions(+), 1 deletion(-)

diff --git a/resource-managers/kubernetes/integration-tests/src/test/resources/driver-schedule-template.yml b/resource-managers/kubernetes/integration-tests/src/test/resources/driver-schedule-template.yml
new file mode 100644
index 0000000..22eaa6c
--- /dev/null
+++ b/resource-managers/kubernetes/integration-tests/src/test/resources/driver-schedule-template.yml
@@ -0,0 +1,27 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+apiVersion: v1
+Kind: Pod
+metadata:
+  labels:
+    template-label-key: driver-template-label-value
+spec:
+  priorityClassName: system-node-critical
+  containers:
+  - name: test-driver-container
+    image: will-be-overwritten
+
diff --git a/resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/KubernetesSuite.scala b/resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/KubernetesSuite.scala
index 9faf73f..685149f 100644
--- a/resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/KubernetesSuite.scala
+++ b/resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/KubernetesSuite.scala
@@ -611,6 +611,7 @@ class KubernetesSuite extends SparkFunSuite
 private[spark] object KubernetesSuite {
   val k8sTestTag = Tag("k8s")
   val localTestTag = Tag("local")
+  val schedulingTestTag = Tag("schedule")
   val rTestTag = Tag("r")
   val MinikubeTag = Tag("minikube")
   val SPARK_PI_MAIN_CLASS: String = "org.apache.spark.examples.SparkPi"
diff --git a/resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/PodTemplateSuite.scala b/resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/PodTemplateSuite.scala
index e5a847e..2cd3bb4 100644
--- a/resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/PodTemplateSuite.scala
+++ b/resource-managers/kubernetes/integration-tests/src/test/scala/org/apache/spark/deploy/k8s/integrationtest/PodTemplateSuite.scala
@@ -20,7 +20,7 @@ import java.io.File
 
 import io.fabric8.kubernetes.api.model.Pod
 
-import org.apache.spark.deploy.k8s.integrationtest.KubernetesSuite.k8sTestTag
+import org.apache.spark.deploy.k8s.integrationtest.KubernetesSuite.{k8sTestTag, schedulingTestTag}
 
 private[spark] trait PodTemplateSuite { k8sSuite: KubernetesSuite =>
 
@@ -46,10 +46,36 @@ private[spark] trait PodTemplateSuite { k8sSuite: KubernetesSuite =>
       }
     )
   }
+
+  test("SPARK-38398: Schedule pod creation from template", k8sTestTag, schedulingTestTag) {
+    sparkAppConf
+      .set("spark.kubernetes.driver.podTemplateFile",
+        DRIVER_SCHEDULE_TEMPLATE_FILE.getAbsolutePath)
+      .set("spark.kubernetes.executor.podTemplateFile", EXECUTOR_TEMPLATE_FILE.getAbsolutePath)
+    runSparkPiAndVerifyCompletion(
+      driverPodChecker = (driverPod: Pod) => {
+        assert(driverPod.getMetadata.getName === driverPodName)
+        assert(driverPod.getSpec.getContainers.get(0).getImage === image)
+        assert(driverPod.getSpec.getContainers.get(0).getName === "test-driver-container")
+        assert(driverPod.getMetadata.getLabels.containsKey(LABEL_KEY))
+        assert(driverPod.getMetadata.getLabels.get(LABEL_KEY) === "driver-template-label-value")
+        assert(driverPod.getSpec.getPriority() === 2000001000)
+      },
+      executorPodChecker = (executorPod: Pod) => {
+        assert(executorPod.getSpec.getContainers.get(0).getImage === image)
+        assert(executorPod.getSpec.getContainers.get(0).getName === "test-executor-container")
+        assert(executorPod.getMetadata.getLabels.containsKey(LABEL_KEY))
+        assert(executorPod.getMetadata.getLabels.get(LABEL_KEY) === "executor-template-label-value")
+        assert(executorPod.getSpec.getPriority() === 0) // When there is no default, 0 is used.
+      }
+    )
+  }
 }
 
 private[spark] object PodTemplateSuite {
   val LABEL_KEY = "template-label-key"
   val DRIVER_TEMPLATE_FILE = new File(getClass.getResource("/driver-template.yml").getFile)
+  val DRIVER_SCHEDULE_TEMPLATE_FILE =
+    new File(getClass.getResource("/driver-schedule-template.yml").getFile)
   val EXECUTOR_TEMPLATE_FILE = new File(getClass.getResource("/executor-template.yml").getFile)
 }

---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org