You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by do...@apache.org on 2022/03/02 18:38:21 UTC

[spark] branch branch-3.2 updated: [SPARK-38029][K8S][TESTS] Support K8S integration test in SBT

This is an automated email from the ASF dual-hosted git repository.

dongjoon pushed a commit to branch branch-3.2
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/branch-3.2 by this push:
     new 6f3ea09  [SPARK-38029][K8S][TESTS] Support K8S integration test in SBT
6f3ea09 is described below

commit 6f3ea094177fd1759a367085b2e94ccb48dc9d24
Author: William Hyun <wi...@apache.org>
AuthorDate: Tue Jan 25 18:37:05 2022 -0800

    [SPARK-38029][K8S][TESTS] Support K8S integration test in SBT
    
    This PR aims to support K8S integration test in SBT.
    
    Currently, SBT only support `minikube` in a hard-coded way.
    
    No.
    
    Manually, because this is an integration test.
    
    Closes #35327 from williamhyun/sbt_k8s.
    
    Authored-by: William Hyun <wi...@apache.org>
    Signed-off-by: Dongjoon Hyun <do...@apache.org>
    (cherry picked from commit 69c213d3568d665ce239a4aa20568e89081d2419)
    Signed-off-by: Dongjoon Hyun <do...@apache.org>
---
 project/SparkBuild.scala | 13 +++++++------
 1 file changed, 7 insertions(+), 6 deletions(-)

diff --git a/project/SparkBuild.scala b/project/SparkBuild.scala
index 14af38f..39b7d18 100644
--- a/project/SparkBuild.scala
+++ b/project/SparkBuild.scala
@@ -595,8 +595,8 @@ object DockerIntegrationTests {
 }
 
 /**
- * These settings run a hardcoded configuration of the Kubernetes integration tests using
- * minikube. Docker images will have the "dev" tag, and will be overwritten every time the
+ * These settings run the Kubernetes integration tests.
+ * Docker images will have the "dev" tag, and will be overwritten every time the
  * integration tests are run. The integration tests are actually bound to the "test" phase,
  * so running "test" on this module will run the integration tests.
  *
@@ -616,6 +616,7 @@ object KubernetesIntegrationTests {
   val runITs = TaskKey[Unit]("run-its", "Only run ITs, skip image build.")
   val imageTag = settingKey[String]("Tag to use for images built during the test.")
   val namespace = settingKey[String]("Namespace where to run pods.")
+  val deployMode = sys.props.get("spark.kubernetes.test.deployMode")
 
   // Hack: this variable is used to control whether to build docker images. It's updated by
   // the tasks below in a non-obvious way, so that you get the functionality described in
@@ -629,12 +630,12 @@ object KubernetesIntegrationTests {
       if (shouldBuildImage) {
         val dockerTool = s"$sparkHome/bin/docker-image-tool.sh"
         val bindingsDir = s"$sparkHome/resource-managers/kubernetes/docker/src/main/dockerfiles/spark/bindings"
-        val cmd = Seq(dockerTool, "-m",
+        val cmd = Seq(dockerTool,
           "-t", imageTag.value,
           "-p", s"$bindingsDir/python/Dockerfile",
-          "-R", s"$bindingsDir/R/Dockerfile",
+          "-R", s"$bindingsDir/R/Dockerfile") ++
+          (if (deployMode == Some("docker-for-desktop")) Seq.empty else Seq("-m")) :+
           "build"
-        )
         val ec = Process(cmd).!
         if (ec != 0) {
           throw new IllegalStateException(s"Process '${cmd.mkString(" ")}' exited with $ec.")
@@ -650,7 +651,7 @@ object KubernetesIntegrationTests {
     }.value,
     (Test / test) := (Test / test).dependsOn(dockerBuild).value,
     (Test / javaOptions) ++= Seq(
-      "-Dspark.kubernetes.test.deployMode=minikube",
+      s"-Dspark.kubernetes.test.deployMode=${deployMode.getOrElse("minikube")}",
       s"-Dspark.kubernetes.test.imageTag=${imageTag.value}",
       s"-Dspark.kubernetes.test.namespace=${namespace.value}",
       s"-Dspark.kubernetes.test.unpackSparkDir=$sparkHome"

---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org