You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@kyuubi.apache.org by ch...@apache.org on 2023/04/17 01:44:57 UTC

[kyuubi] branch master updated: [KYUUBI #4713][TEST] Fix false positive result in SchedulerPoolSuite

This is an automated email from the ASF dual-hosted git repository.

chengpan pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/kyuubi.git


The following commit(s) were added to refs/heads/master by this push:
     new 57b061126 [KYUUBI #4713][TEST] Fix false positive result in SchedulerPoolSuite
57b061126 is described below

commit 57b06112657f1fd87447098e166205074426a908
Author: huangzhir <30...@qq.com>
AuthorDate: Mon Apr 17 09:44:47 2023 +0800

    [KYUUBI #4713][TEST] Fix false positive result in SchedulerPoolSuite
    
    ### _Why are the changes needed?_
    
    fix issuse https://github.com/apache/kyuubi/issues/4713
    
    ### _How was this patch tested?_
    - [ ] Add some test cases that check the changes thoroughly including negative and positive cases if possible
    
    - [ ] Add screenshots for manual tests if appropriate
    
    - [X] [Run test](https://kyuubi.readthedocs.io/en/master/develop_tools/testing.html#running-tests) locally before make a pull request
    
    Closes #4714 from huangzhir/fixtest-schedulerpool.
    
    Closes #4713
    
    e66ede214 [huangzhir] fixbug TEST SchedulerPoolSuite  a false positive result
    
    Authored-by: huangzhir <30...@qq.com>
    Signed-off-by: Cheng Pan <ch...@apache.org>
---
 .../kyuubi/engine/spark/SchedulerPoolSuite.scala       | 18 +++++++++++-------
 1 file changed, 11 insertions(+), 7 deletions(-)

diff --git a/externals/kyuubi-spark-sql-engine/src/test/scala/org/apache/kyuubi/engine/spark/SchedulerPoolSuite.scala b/externals/kyuubi-spark-sql-engine/src/test/scala/org/apache/kyuubi/engine/spark/SchedulerPoolSuite.scala
index af8c90cf2..d42b7f4d5 100644
--- a/externals/kyuubi-spark-sql-engine/src/test/scala/org/apache/kyuubi/engine/spark/SchedulerPoolSuite.scala
+++ b/externals/kyuubi-spark-sql-engine/src/test/scala/org/apache/kyuubi/engine/spark/SchedulerPoolSuite.scala
@@ -19,6 +19,8 @@ package org.apache.kyuubi.engine.spark
 
 import java.util.concurrent.Executors
 
+import scala.concurrent.duration.SECONDS
+
 import org.apache.spark.scheduler.{SparkListener, SparkListenerJobEnd, SparkListenerJobStart}
 import org.scalatest.concurrent.PatienceConfiguration.Timeout
 import org.scalatest.time.SpanSugar.convertIntToGrainOfTime
@@ -80,6 +82,7 @@ class SchedulerPoolSuite extends WithSparkSQLEngine with HiveJDBCTestHelper {
         threads.execute(() => {
           priority match {
             case 0 =>
+              // job name job2
               withJdbcStatement() { statement =>
                 statement.execute("SET kyuubi.operation.scheduler.pool=p0")
                 statement.execute("SELECT java_method('java.lang.Thread', 'sleep', 1500l)" +
@@ -92,17 +95,18 @@ class SchedulerPoolSuite extends WithSparkSQLEngine with HiveJDBCTestHelper {
                 statement.execute("SELECT java_method('java.lang.Thread', 'sleep', 1500l)" +
                   " FROM range(1, 3, 1, 2)")
               }
+              // make sure this job name job1
+              Thread.sleep(1000)
           }
         })
       }
       threads.shutdown()
-      eventually(Timeout(20.seconds)) {
-        // We can not ensure that job1 is started before job2 so here using abs.
-        assert(Math.abs(job1StartTime - job2StartTime) < 1000)
-        // Job1 minShare is 2(total resource) so that job2 should be allocated tasks after
-        // job1 finished.
-        assert(job2FinishTime - job1FinishTime >= 1000)
-      }
+      threads.awaitTermination(20, SECONDS)
+      // because after job1 submitted, sleep 1s, so job1 should be started before job2
+      assert(job1StartTime < job2StartTime)
+      // job2 minShare is 2(total resource) so that job1 should be allocated tasks after
+      // job2 finished.
+      assert(job2FinishTime < job1FinishTime)
     } finally {
       spark.sparkContext.removeSparkListener(listener)
     }