You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by sr...@apache.org on 2022/03/22 23:12:40 UTC

[spark] branch master updated: [SPARK-38619][TESTS] Clean up Junit api usage in scalatest

This is an automated email from the ASF dual-hosted git repository.

srowen pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 7373cd2  [SPARK-38619][TESTS] Clean up Junit api usage in scalatest
7373cd2 is described below

commit 7373cd22210ab9ef865740059ba265f781978469
Author: yangjie01 <ya...@baidu.com>
AuthorDate: Tue Mar 22 18:09:40 2022 -0500

    [SPARK-38619][TESTS] Clean up Junit api usage in scalatest
    
    ### What changes were proposed in this pull request?
    This pr clean up Junit api usage in scalatest and replace them with native scalatest api.
    
    ### Why are the changes needed?
    Use scalatest api in scalatest and Junit api in Java test
    
    ### Does this PR introduce _any_ user-facing change?
    No.
    
    ### How was this patch tested?
    Pass GA
    
    Closes #35929 from LuciferYang/remove-junit-in-scalatest.
    
    Authored-by: yangjie01 <ya...@baidu.com>
    Signed-off-by: Sean Owen <sr...@gmail.com>
---
 .../scala/org/apache/spark/SparkContextSuite.scala | 11 ++--
 .../spark/api/python/PythonHadoopUtilSuite.scala   |  9 ++-
 .../spark/storage/BlockManagerMasterSuite.scala    |  6 +-
 .../cluster/k8s/ExecutorRollPluginSuite.scala      | 71 +++++++++++-----------
 .../scala/org/apache/spark/sql/QueryTest.scala     |  3 +-
 5 files changed, 46 insertions(+), 54 deletions(-)

diff --git a/core/src/test/scala/org/apache/spark/SparkContextSuite.scala b/core/src/test/scala/org/apache/spark/SparkContextSuite.scala
index 411a3b1..8671180 100644
--- a/core/src/test/scala/org/apache/spark/SparkContextSuite.scala
+++ b/core/src/test/scala/org/apache/spark/SparkContextSuite.scala
@@ -33,7 +33,6 @@ import org.apache.hadoop.mapred.TextInputFormat
 import org.apache.hadoop.mapreduce.lib.input.{TextInputFormat => NewTextInputFormat}
 import org.apache.logging.log4j.{Level, LogManager}
 import org.json4s.{DefaultFormats, Extraction}
-import org.junit.Assert.{assertEquals, assertFalse}
 import org.scalatest.concurrent.Eventually
 import org.scalatest.matchers.must.Matchers._
 
@@ -1257,12 +1256,12 @@ class SparkContextSuite extends SparkFunSuite with LocalSparkContext with Eventu
   test("SPARK-35383: Fill missing S3A magic committer configs if needed") {
     val c1 = new SparkConf().setAppName("s3a-test").setMaster("local")
     sc = new SparkContext(c1)
-    assertFalse(sc.getConf.contains("spark.hadoop.fs.s3a.committer.name"))
+    assert(!sc.getConf.contains("spark.hadoop.fs.s3a.committer.name"))
 
     resetSparkContext()
     val c2 = c1.clone.set("spark.hadoop.fs.s3a.bucket.mybucket.committer.magic.enabled", "false")
     sc = new SparkContext(c2)
-    assertFalse(sc.getConf.contains("spark.hadoop.fs.s3a.committer.name"))
+    assert(!sc.getConf.contains("spark.hadoop.fs.s3a.committer.name"))
 
     resetSparkContext()
     val c3 = c1.clone.set("spark.hadoop.fs.s3a.bucket.mybucket.committer.magic.enabled", "true")
@@ -1277,7 +1276,7 @@ class SparkContextSuite extends SparkFunSuite with LocalSparkContext with Eventu
       "spark.sql.sources.commitProtocolClass" ->
         "org.apache.spark.internal.io.cloud.PathOutputCommitProtocol"
     ).foreach { case (k, v) =>
-      assertEquals(v, sc.getConf.get(k))
+      assert(v == sc.getConf.get(k))
     }
 
     // Respect a user configuration
@@ -1294,9 +1293,9 @@ class SparkContextSuite extends SparkFunSuite with LocalSparkContext with Eventu
       "spark.sql.sources.commitProtocolClass" -> null
     ).foreach { case (k, v) =>
       if (v == null) {
-        assertFalse(sc.getConf.contains(k))
+        assert(!sc.getConf.contains(k))
       } else {
-        assertEquals(v, sc.getConf.get(k))
+        assert(v == sc.getConf.get(k))
       }
     }
   }
diff --git a/core/src/test/scala/org/apache/spark/api/python/PythonHadoopUtilSuite.scala b/core/src/test/scala/org/apache/spark/api/python/PythonHadoopUtilSuite.scala
index 039d49d..b4f7f1d 100644
--- a/core/src/test/scala/org/apache/spark/api/python/PythonHadoopUtilSuite.scala
+++ b/core/src/test/scala/org/apache/spark/api/python/PythonHadoopUtilSuite.scala
@@ -21,7 +21,6 @@ import java.util.HashMap
 
 import org.apache.hadoop.io.{BooleanWritable, BytesWritable, ByteWritable, DoubleWritable, FloatWritable, IntWritable, LongWritable,
   MapWritable, NullWritable, ShortWritable, Text, Writable}
-import org.junit.Assert
 import org.mockito.Mockito.mock
 
 import org.apache.spark.SparkFunSuite
@@ -34,13 +33,13 @@ class PythonHadoopUtilSuite extends SparkFunSuite {
     val writableToJavaConverter = new WritableToJavaConverter(broadcast)
     val result = writableToJavaConverter.convert(input)
     expected match {
-      case _: Array[Byte] => Assert.assertArrayEquals(
-        expected.asInstanceOf[Array[Byte]], result.asInstanceOf[Array[Byte]])
-      case _ => Assert.assertEquals(expected, result)
+      case _: Array[Byte] => assert(expected.asInstanceOf[Array[Byte]]
+        sameElements result.asInstanceOf[Array[Byte]])
+      case _ => assert(expected == result)
     }
     val javaToWritableConverter = new JavaToWritableConverter()
     val reConverted = javaToWritableConverter.convert(result)
-    Assert.assertEquals("Round trip conversion failed", input, reConverted)
+    assert(input == reConverted, "Round trip conversion failed")
   }
 
   test("Testing roundtrip conversion of various types") {
diff --git a/core/src/test/scala/org/apache/spark/storage/BlockManagerMasterSuite.scala b/core/src/test/scala/org/apache/spark/storage/BlockManagerMasterSuite.scala
index 0d54726..2457aef 100644
--- a/core/src/test/scala/org/apache/spark/storage/BlockManagerMasterSuite.scala
+++ b/core/src/test/scala/org/apache/spark/storage/BlockManagerMasterSuite.scala
@@ -17,19 +17,17 @@
 
 package org.apache.spark.storage
 
-import org.junit.Assert.assertTrue
-
 import org.apache.spark.{SparkConf, SparkFunSuite}
 
 class BlockManagerMasterSuite extends SparkFunSuite {
 
   test("SPARK-31422: getMemoryStatus should not fail after BlockManagerMaster stops") {
     val bmm = new BlockManagerMaster(null, null, new SparkConf, true)
-    assertTrue(bmm.getMemoryStatus.isEmpty)
+    assert(bmm.getMemoryStatus.isEmpty)
   }
 
   test("SPARK-31422: getStorageStatus should not fail after BlockManagerMaster stops") {
     val bmm = new BlockManagerMaster(null, null, new SparkConf, true)
-    assertTrue(bmm.getStorageStatus.isEmpty)
+    assert(bmm.getStorageStatus.isEmpty)
   }
 }
diff --git a/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/scheduler/cluster/k8s/ExecutorRollPluginSuite.scala b/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/scheduler/cluster/k8s/ExecutorRollPluginSuite.scala
index 886abc0..a9e9b66 100644
--- a/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/scheduler/cluster/k8s/ExecutorRollPluginSuite.scala
+++ b/resource-managers/kubernetes/core/src/test/scala/org/apache/spark/scheduler/cluster/k8s/ExecutorRollPluginSuite.scala
@@ -18,7 +18,6 @@ package org.apache.spark.scheduler.cluster.k8s
 
 import java.util.Date
 
-import org.junit.Assert.assertEquals
 import org.scalatest.PrivateMethodTester
 
 import org.apache.spark.SparkFunSuite
@@ -121,61 +120,59 @@ class ExecutorRollPluginSuite extends SparkFunSuite with PrivateMethodTester {
 
   test("Empty executor list") {
     ExecutorRollPolicy.values.foreach { value =>
-      assertEquals(None, plugin.invokePrivate[Option[String]](_choose(Seq.empty, value)))
+      assert(plugin.invokePrivate[Option[String]](_choose(Seq.empty, value)).isEmpty)
     }
   }
 
   test("Driver summary should be ignored") {
     ExecutorRollPolicy.values.foreach { value =>
-      assertEquals(plugin.invokePrivate(_choose(Seq(driverSummary), value)), None)
+      assert(plugin.invokePrivate(_choose(Seq(driverSummary), value)).isEmpty)
     }
   }
 
   test("A one-item executor list") {
     ExecutorRollPolicy.values.filter(_ != ExecutorRollPolicy.OUTLIER_NO_FALLBACK).foreach { value =>
-      assertEquals(
-        Some(execWithSmallestID.id),
-        plugin.invokePrivate(_choose(Seq(execWithSmallestID), value)))
+      assert(
+        plugin.invokePrivate(_choose(Seq(execWithSmallestID), value))
+          .contains(execWithSmallestID.id))
     }
   }
 
   test("SPARK-37806: All policy should ignore executor if totalTasks < minTasks") {
     plugin.asInstanceOf[ExecutorRollDriverPlugin].minTasks = 1000
     ExecutorRollPolicy.values.foreach { value =>
-      assertEquals(None, plugin.invokePrivate(_choose(list, value)))
+      assert(plugin.invokePrivate(_choose(list, value)).isEmpty)
     }
   }
 
   test("Policy: ID") {
-    assertEquals(Some("1"), plugin.invokePrivate(_choose(list, ExecutorRollPolicy.ID)))
+    assert(plugin.invokePrivate(_choose(list, ExecutorRollPolicy.ID)).contains("1"))
   }
 
   test("Policy: ADD_TIME") {
-    assertEquals(Some("2"), plugin.invokePrivate(_choose(list, ExecutorRollPolicy.ADD_TIME)))
+    assert(plugin.invokePrivate(_choose(list, ExecutorRollPolicy.ADD_TIME)).contains("2"))
   }
 
   test("Policy: TOTAL_GC_TIME") {
-    assertEquals(Some("3"), plugin.invokePrivate(_choose(list, ExecutorRollPolicy.TOTAL_GC_TIME)))
+    assert(plugin.invokePrivate(_choose(list, ExecutorRollPolicy.TOTAL_GC_TIME)).contains("3"))
   }
 
   test("Policy: TOTAL_DURATION") {
-    assertEquals(Some("4"), plugin.invokePrivate(_choose(list, ExecutorRollPolicy.TOTAL_DURATION)))
+    assert(plugin.invokePrivate(_choose(list, ExecutorRollPolicy.TOTAL_DURATION)).contains("4"))
   }
 
   test("Policy: FAILED_TASKS") {
-    assertEquals(Some("5"), plugin.invokePrivate(_choose(list, ExecutorRollPolicy.FAILED_TASKS)))
+    assert(plugin.invokePrivate(_choose(list, ExecutorRollPolicy.FAILED_TASKS)).contains("5"))
   }
 
   test("Policy: AVERAGE_DURATION") {
-    assertEquals(
-      Some("6"),
-      plugin.invokePrivate(_choose(list, ExecutorRollPolicy.AVERAGE_DURATION)))
+    assert(plugin.invokePrivate(_choose(list, ExecutorRollPolicy.AVERAGE_DURATION)).contains("6"))
   }
 
   test("Policy: OUTLIER - Work like TOTAL_DURATION if there is no outlier") {
-    assertEquals(
-      plugin.invokePrivate(_choose(list, ExecutorRollPolicy.TOTAL_DURATION)),
-      plugin.invokePrivate(_choose(list, ExecutorRollPolicy.OUTLIER)))
+    assert(
+      plugin.invokePrivate(_choose(list, ExecutorRollPolicy.TOTAL_DURATION)) ==
+        plugin.invokePrivate(_choose(list, ExecutorRollPolicy.OUTLIER)))
   }
 
   test("Policy: OUTLIER - Detect an average task duration outlier") {
@@ -186,9 +183,9 @@ class ExecutorRollPluginSuite extends SparkFunSuite with PrivateMethodTester {
       0, false, 0, new Date(1639300001000L),
       Option.empty, Option.empty, Map(), Option.empty, Set(), Option.empty, Map(), Map(), 1,
       false, Set())
-    assertEquals(
-      plugin.invokePrivate(_choose(list :+ outlier, ExecutorRollPolicy.AVERAGE_DURATION)),
-      plugin.invokePrivate(_choose(list :+ outlier, ExecutorRollPolicy.OUTLIER)))
+    assert(
+      plugin.invokePrivate(_choose(list :+ outlier, ExecutorRollPolicy.AVERAGE_DURATION)) ==
+        plugin.invokePrivate(_choose(list :+ outlier, ExecutorRollPolicy.OUTLIER)))
   }
 
   test("Policy: OUTLIER - Detect a total task duration outlier") {
@@ -199,9 +196,9 @@ class ExecutorRollPluginSuite extends SparkFunSuite with PrivateMethodTester {
       0, false, 0, new Date(1639300001000L),
       Option.empty, Option.empty, Map(), Option.empty, Set(), Option.empty, Map(), Map(), 1,
       false, Set())
-    assertEquals(
-      plugin.invokePrivate(_choose(list :+ outlier, ExecutorRollPolicy.TOTAL_DURATION)),
-      plugin.invokePrivate(_choose(list :+ outlier, ExecutorRollPolicy.OUTLIER)))
+    assert(
+      plugin.invokePrivate(_choose(list :+ outlier, ExecutorRollPolicy.TOTAL_DURATION)) ==
+        plugin.invokePrivate(_choose(list :+ outlier, ExecutorRollPolicy.OUTLIER)))
   }
 
   test("Policy: OUTLIER - Detect a total GC time outlier") {
@@ -212,13 +209,13 @@ class ExecutorRollPluginSuite extends SparkFunSuite with PrivateMethodTester {
       0, false, 0, new Date(1639300001000L),
       Option.empty, Option.empty, Map(), Option.empty, Set(), Option.empty, Map(), Map(), 1,
       false, Set())
-    assertEquals(
-      plugin.invokePrivate(_choose(list :+ outlier, ExecutorRollPolicy.TOTAL_GC_TIME)),
-      plugin.invokePrivate(_choose(list :+ outlier, ExecutorRollPolicy.OUTLIER)))
+    assert(
+      plugin.invokePrivate(_choose(list :+ outlier, ExecutorRollPolicy.TOTAL_GC_TIME)) ==
+        plugin.invokePrivate(_choose(list :+ outlier, ExecutorRollPolicy.OUTLIER)))
   }
 
   test("Policy: OUTLIER_NO_FALLBACK - Return None if there are no outliers") {
-    assertEquals(None, plugin.invokePrivate(_choose(list, ExecutorRollPolicy.OUTLIER_NO_FALLBACK)))
+    assert(plugin.invokePrivate(_choose(list, ExecutorRollPolicy.OUTLIER_NO_FALLBACK)).isEmpty)
   }
 
   test("Policy: OUTLIER_NO_FALLBACK - Detect an average task duration outlier") {
@@ -229,9 +226,9 @@ class ExecutorRollPluginSuite extends SparkFunSuite with PrivateMethodTester {
       0, false, 0, new Date(1639300001000L),
       Option.empty, Option.empty, Map(), Option.empty, Set(), Option.empty, Map(), Map(), 1,
       false, Set())
-    assertEquals(
-      plugin.invokePrivate(_choose(list :+ outlier, ExecutorRollPolicy.AVERAGE_DURATION)),
-      plugin.invokePrivate(_choose(list :+ outlier, ExecutorRollPolicy.OUTLIER_NO_FALLBACK)))
+    assert(
+      plugin.invokePrivate(_choose(list :+ outlier, ExecutorRollPolicy.AVERAGE_DURATION)) ==
+        plugin.invokePrivate(_choose(list :+ outlier, ExecutorRollPolicy.OUTLIER_NO_FALLBACK)))
   }
 
   test("Policy: OUTLIER_NO_FALLBACK - Detect a total task duration outlier") {
@@ -242,9 +239,9 @@ class ExecutorRollPluginSuite extends SparkFunSuite with PrivateMethodTester {
       0, false, 0, new Date(1639300001000L),
       Option.empty, Option.empty, Map(), Option.empty, Set(), Option.empty, Map(), Map(), 1,
       false, Set())
-    assertEquals(
-      plugin.invokePrivate(_choose(list :+ outlier, ExecutorRollPolicy.TOTAL_DURATION)),
-      plugin.invokePrivate(_choose(list :+ outlier, ExecutorRollPolicy.OUTLIER_NO_FALLBACK)))
+    assert(
+      plugin.invokePrivate(_choose(list :+ outlier, ExecutorRollPolicy.TOTAL_DURATION)) ==
+        plugin.invokePrivate(_choose(list :+ outlier, ExecutorRollPolicy.OUTLIER_NO_FALLBACK)))
   }
 
   test("Policy: OUTLIER_NO_FALLBACK - Detect a total GC time outlier") {
@@ -255,8 +252,8 @@ class ExecutorRollPluginSuite extends SparkFunSuite with PrivateMethodTester {
       0, false, 0, new Date(1639300001000L),
       Option.empty, Option.empty, Map(), Option.empty, Set(), Option.empty, Map(), Map(), 1,
       false, Set())
-    assertEquals(
-      plugin.invokePrivate(_choose(list :+ outlier, ExecutorRollPolicy.TOTAL_GC_TIME)),
-      plugin.invokePrivate(_choose(list :+ outlier, ExecutorRollPolicy.OUTLIER_NO_FALLBACK)))
+    assert(
+      plugin.invokePrivate(_choose(list :+ outlier, ExecutorRollPolicy.TOTAL_GC_TIME)) ==
+        plugin.invokePrivate(_choose(list :+ outlier, ExecutorRollPolicy.OUTLIER_NO_FALLBACK)))
   }
 }
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/QueryTest.scala b/sql/core/src/test/scala/org/apache/spark/sql/QueryTest.scala
index 06f94c6..0bb5e52 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/QueryTest.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/QueryTest.scala
@@ -21,7 +21,6 @@ import java.util.TimeZone
 
 import scala.collection.JavaConverters._
 
-import org.junit.Assert
 import org.scalatest.Assertions
 
 import org.apache.spark.sql.catalyst.plans._
@@ -420,7 +419,7 @@ object QueryTest extends Assertions {
 
   def checkAnswer(df: DataFrame, expectedAnswer: java.util.List[Row]): Unit = {
     getErrorMessageInCheckAnswer(df, expectedAnswer.asScala.toSeq) match {
-      case Some(errorMessage) => Assert.fail(errorMessage)
+      case Some(errorMessage) => fail(errorMessage)
       case None =>
     }
   }

---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org