You are viewing a plain text version of this content. The canonical link for it is here.
Posted to reviews@spark.apache.org by GitBox <gi...@apache.org> on 2021/09/16 17:08:47 UTC

[GitHub] [spark] mridulm commented on a change in pull request #34013: [SPARK-36774][CORE][TESTS] Move SparkSubmitTestUtils to core module and use it in SparkSubmitSuite

mridulm commented on a change in pull request #34013:
URL: https://github.com/apache/spark/pull/34013#discussion_r710316154



##########
File path: core/src/test/scala/org/apache/spark/deploy/SparkSubmitTestUtils.scala
##########
@@ -25,33 +25,36 @@ import scala.collection.mutable.ArrayBuffer
 
 import org.scalatest.concurrent.{Signaler, ThreadSignaler, TimeLimits}
 import org.scalatest.exceptions.TestFailedDueToTimeoutException
+import org.scalatest.time.Span
 import org.scalatest.time.SpanSugar._
 
+import org.apache.spark.ProcessTestUtils.ProcessOutputCapturer
 import org.apache.spark.SparkFunSuite
-import org.apache.spark.sql.test.ProcessTestUtils.ProcessOutputCapturer
 import org.apache.spark.util.Utils
 
 trait SparkSubmitTestUtils extends SparkFunSuite with TimeLimits {
 
+  protected val defaultSparkSubmitTimeout: Span = 1.minute
+
   // Necessary to make ScalaTest 3.x interrupt a thread on the JVM like ScalaTest 2.2.x
   implicit val defaultSignaler: Signaler = ThreadSignaler
 
   // NOTE: This is an expensive operation in terms of time (10 seconds+). Use sparingly.
-  // This is copied from org.apache.spark.deploy.SparkSubmitSuite
   protected def runSparkSubmit(
       args: Seq[String],
       sparkHomeOpt: Option[String] = None,
+      timeout: Span = defaultSparkSubmitTimeout,
       isSparkTesting: Boolean = true): Unit = {
     val sparkHome = sparkHomeOpt.getOrElse(
       sys.props.getOrElse("spark.test.home", fail("spark.test.home is not set!")))
     val history = ArrayBuffer.empty[String]
     val sparkSubmit = if (Utils.isWindows) {
       // On Windows, `ProcessBuilder.directory` does not change the current working directory.
-      new File("..\\..\\bin\\spark-submit.cmd").getAbsolutePath
+      new File(new File(sparkHome, "bin"), "spark-submit.cmd")
     } else {
-      "./bin/spark-submit"
+      new File(new File(sparkHome, "bin"), "spark-submit")
     }
-    val commands = Seq(sparkSubmit) ++ args
+    val commands = Seq(sparkSubmit.getCanonicalPath) ++ args

Review comment:
       Yes, this change looks good to me - it was a cleaner way to handle the paths.




-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: reviews-unsubscribe@spark.apache.org

For queries about this service, please contact Infrastructure at:
users@infra.apache.org



---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscribe@spark.apache.org
For additional commands, e-mail: reviews-help@spark.apache.org