You are viewing a plain text version of this content. The canonical link for it is here.
Posted to issues@flink.apache.org by GitBox <gi...@apache.org> on 2022/05/05 08:33:15 UTC

[GitHub] [flink] zentol commented on a diff in pull request #18983: [FLINK-25543][flink-yarn] [JUnit5 Migration] Module: flink-yarn

zentol commented on code in PR #18983:
URL: https://github.com/apache/flink/pull/18983#discussion_r865647104


##########
flink-runtime/src/test/java/org/apache/flink/runtime/resourcemanager/active/ResourceManagerDriverTestBase.java:
##########
@@ -39,12 +38,10 @@
 import java.util.concurrent.TimeUnit;
 import java.util.function.Supplier;
 
-import static org.hamcrest.Matchers.is;
-import static org.junit.Assert.assertThat;
+import static org.assertj.core.api.Assertions.assertThat;
 
 /** Common test cases for implementations of {@link ResourceManagerDriver}. */
-public abstract class ResourceManagerDriverTestBase<WorkerType extends ResourceIDRetrievable>
-        extends TestLogger {
+public abstract class ResourceManagerDriverTestBase<WorkerType extends ResourceIDRetrievable> {

Review Comment:
   Let's annotate this test instead of adding the TestLoggerExtension service entry. As is we'd load the test logger also for all junit4 tests in flink-runtime which as far as I was told causes issues.



##########
flink-yarn/src/test/java/org/apache/flink/yarn/RegisterApplicationMasterResponseReflectorTest.java:
##########
@@ -81,28 +75,28 @@ public void testDoesntCallGetContainersFromPreviousAttemptsMethodIfAbsent() {
                 registerApplicationMasterResponseReflector.getContainersFromPreviousAttemptsUnsafe(
                         new Object());
 
-        assertThat(containersFromPreviousAttemptsUnsafe, empty());
+        assertThat(containersFromPreviousAttemptsUnsafe).isEmpty();
     }
 
     @Test
-    public void testGetContainersFromPreviousAttemptsMethodReflectiveHadoop22() {
+    void testGetContainersFromPreviousAttemptsMethodReflectiveHadoop22() {
         assumeTrue(
+                isHadoopVersionGreaterThanOrEquals(2, 2),

Review Comment:
   could be removed since we use a newer version nowadays



##########
flink-yarn/src/test/java/org/apache/flink/yarn/RegisterApplicationMasterResponseReflectorTest.java:
##########
@@ -125,24 +119,22 @@ public void testDoesntCallGetSchedulerResourceTypesMethodIfAbsent() {
                 registerApplicationMasterResponseReflector.getSchedulerResourceTypeNamesUnsafe(
                         new Object());
 
-        assertFalse(schedulerResourceTypeNames.isPresent());
+        assertThat(schedulerResourceTypeNames).isNotPresent();
     }
 
     @Test
-    public void testGetSchedulerResourceTypesMethodReflectiveHadoop26() {
+    void testGetSchedulerResourceTypesMethodReflectiveHadoop26() {
         assumeTrue(
+                isHadoopVersionGreaterThanOrEquals(2, 6),

Review Comment:
   could be removed since we use a newer version nowadays



##########
flink-yarn/src/test/java/org/apache/flink/yarn/TaskExecutorProcessSpecContainerResourcePriorityAdapterTest.java:
##########
@@ -206,40 +199,49 @@ public void testExternalResource() {
 
         final Map<String, Long> resultExternalResources =
                 ResourceInformationReflector.INSTANCE.getExternalResources(resource);
-        assertThat(resultExternalResources.size(), is(1));
-        assertThat(
-                resultExternalResources.get(SUPPORTED_EXTERNAL_RESOURCE_CONFIG_KEY),
-                is(SUPPORTED_EXTERNAL_RESOURCE_MAX));
+        assertThat(resultExternalResources).hasSize(1);
+        assertThat(resultExternalResources.get(SUPPORTED_EXTERNAL_RESOURCE_CONFIG_KEY))
+                .isEqualTo(SUPPORTED_EXTERNAL_RESOURCE_MAX);
     }
 
-    @Test(expected = IllegalStateException.class)
-    public void testExternalResourceFailExceedMax() {
+    @Test
+    void testExternalResourceFailExceedMax() {
         assumeTrue(isExternalResourceSupported());
-
-        getAdapterWithExternalResources(
-                        SUPPORTED_EXTERNAL_RESOURCE_NAME, SUPPORTED_EXTERNAL_RESOURCE_CONFIG_KEY)
-                .getPriorityAndResource(
-                        TASK_EXECUTOR_PROCESS_SPEC_WITH_EXTERNAL_RESOURCE_EXCEED_MAX);
+        assertThatThrownBy(
+                        () ->
+                                getAdapterWithExternalResources(
+                                                SUPPORTED_EXTERNAL_RESOURCE_NAME,
+                                                SUPPORTED_EXTERNAL_RESOURCE_CONFIG_KEY)

Review Comment:
   I would prefer if this were called outside to clarify what call throws the exception.



##########
flink-yarn/src/test/java/org/apache/flink/yarn/YarnFileStageTest.java:
##########
@@ -81,21 +73,19 @@ public class YarnFileStageTest extends TestLogger {
     //  Test setup and shutdown
     // ------------------------------------------------------------------------
 
-    @BeforeClass
-    public static void createHDFS() throws Exception {
-        Assume.assumeTrue(!OperatingSystem.isWindows());
-
-        final File tempDir = CLASS_TEMP_DIR.newFolder();
+    @BeforeAll
+    public static void createHDFS(@TempDir File hdfsTempDir) throws Exception {
+        assumeTrue(!OperatingSystem.isWindows());
 
         org.apache.hadoop.conf.Configuration hdConf = new org.apache.hadoop.conf.Configuration();
-        hdConf.set(MiniDFSCluster.HDFS_MINIDFS_BASEDIR, tempDir.getAbsolutePath());
+        hdConf.set(MiniDFSCluster.HDFS_MINIDFS_BASEDIR, hdfsTempDir.getAbsolutePath());
 
         MiniDFSCluster.Builder builder = new MiniDFSCluster.Builder(hdConf);
         hdfsCluster = builder.build();
         hdfsRootPath = new Path(hdfsCluster.getURI());
     }
 
-    @AfterClass
+    @AfterAll
     public static void destroyHDFS() {

Review Comment:
   ```suggestion
       static void destroyHDFS() {
   ```



##########
flink-yarn/src/test/java/org/apache/flink/yarn/YarnApplicationFileUploaderTest.java:
##########
@@ -73,15 +69,18 @@ public void testRegisterProvidedLocalResources() throws IOException {
             final Set<String> registeredResources =
                     yarnApplicationFileUploader.getRegisteredLocalResources().keySet();
 
-            assertThat(
-                    registeredResources, Matchers.containsInAnyOrder(libJars.keySet().toArray()));
+            assertThat(registeredResources)
+                    .containsExactlyInAnyOrder(libJars.keySet().toArray(new String[0]));

Review Comment:
   ```suggestion
                       .containsExactlyInAnyOrderElementsOf(libJars.keySet());
   ```



##########
flink-yarn/src/test/java/org/apache/flink/yarn/YarnFileStageTestS3ITCase.java:
##########
@@ -173,36 +175,44 @@ private void testRecursiveUploadForYarn(String scheme, String pathSuffix) throws
         }
     }
 
-    @Test
-    @RetryOnFailure(times = 3)
-    public void testRecursiveUploadForYarnS3n() throws Exception {
+    @TestTemplate
+    @RetryOnException(times = 3, exception = Exception.class)
+    public void testRecursiveUploadForYarnS3n(@TempDir File tempFolder) throws Exception {
         // skip test on Hadoop 3: https://issues.apache.org/jira/browse/HADOOP-14738
-        Assume.assumeTrue(
-                "This test is skipped for Hadoop versions above 3",
-                VersionUtil.compareVersions(System.getProperty("hadoop.version"), "3.0.0") < 0);
+        assumeTrue(
+                VersionUtil.compareVersions(System.getProperty("hadoop.version"), "3.0.0") < 0,
+                "This test is skipped for Hadoop versions above 3");
 
         try {
             Class.forName("org.apache.hadoop.fs.s3native.NativeS3FileSystem");
         } catch (ClassNotFoundException e) {
             // not in the classpath, cannot run this test
             String msg = "Skipping test because NativeS3FileSystem is not in the class path";
             log.info(msg);
-            assumeNoException(msg, e);
+            assumingThat(
+                    true,
+                    () -> {
+                        throw e;
+                    });
         }

Review Comment:
   ```
   assumeThatThrownBy(() -> Class.forName("org.apache.hadoop.fs.s3native.NativeS3FileSystem")
       .as("Skipping test because NativeS3FileSystem is not in the class path")
       .isNull();
   ```



##########
flink-yarn/src/test/java/org/apache/flink/yarn/YarnFileStageTestS3ITCase.java:
##########
@@ -173,36 +175,44 @@ private void testRecursiveUploadForYarn(String scheme, String pathSuffix) throws
         }
     }
 
-    @Test
-    @RetryOnFailure(times = 3)
-    public void testRecursiveUploadForYarnS3n() throws Exception {
+    @TestTemplate
+    @RetryOnException(times = 3, exception = Exception.class)
+    public void testRecursiveUploadForYarnS3n(@TempDir File tempFolder) throws Exception {
         // skip test on Hadoop 3: https://issues.apache.org/jira/browse/HADOOP-14738
-        Assume.assumeTrue(
-                "This test is skipped for Hadoop versions above 3",
-                VersionUtil.compareVersions(System.getProperty("hadoop.version"), "3.0.0") < 0);
+        assumeTrue(
+                VersionUtil.compareVersions(System.getProperty("hadoop.version"), "3.0.0") < 0,
+                "This test is skipped for Hadoop versions above 3");
 
         try {
             Class.forName("org.apache.hadoop.fs.s3native.NativeS3FileSystem");
         } catch (ClassNotFoundException e) {
             // not in the classpath, cannot run this test
             String msg = "Skipping test because NativeS3FileSystem is not in the class path";
             log.info(msg);
-            assumeNoException(msg, e);
+            assumingThat(
+                    true,
+                    () -> {
+                        throw e;
+                    });
         }
-        testRecursiveUploadForYarn("s3n", "testYarn-s3n");
+        testRecursiveUploadForYarn("s3n", "testYarn-s3n", tempFolder);
     }
 
-    @Test
-    @RetryOnFailure(times = 3)
-    public void testRecursiveUploadForYarnS3a() throws Exception {
+    @TestTemplate
+    @RetryOnException(times = 3, exception = Exception.class)
+    public void testRecursiveUploadForYarnS3a(@TempDir File tempFolder) throws Exception {
         try {
             Class.forName("org.apache.hadoop.fs.s3a.S3AFileSystem");
         } catch (ClassNotFoundException e) {
             // not in the classpath, cannot run this test
             String msg = "Skipping test because S3AFileSystem is not in the class path";
             log.info(msg);
-            assumeNoException(msg, e);
+            assumingThat(

Review Comment:
   see above



##########
flink-yarn/src/test/java/org/apache/flink/yarn/UtilsTest.java:
##########
@@ -19,59 +19,50 @@
 package org.apache.flink.yarn;
 
 import org.apache.flink.configuration.Configuration;
-import org.apache.flink.core.testutils.FlinkMatchers;
 import org.apache.flink.util.FlinkException;
-import org.apache.flink.util.TestLogger;
 import org.apache.flink.yarn.configuration.YarnConfigOptions;
 
 import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
 import org.apache.hadoop.yarn.api.records.Resource;
 import org.apache.hadoop.yarn.conf.YarnConfiguration;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.rules.TemporaryFolder;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.io.TempDir;
 
-import java.io.IOException;
 import java.nio.file.Files;
 import java.nio.file.Path;
 import java.util.Collections;
 import java.util.List;
 import java.util.stream.Stream;
 
-import static org.hamcrest.Matchers.equalTo;
-import static org.hamcrest.Matchers.is;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertThat;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
+import static org.assertj.core.api.Assertions.fail;
 
 /** Tests for {@link Utils}. */
-public class UtilsTest extends TestLogger {
+class UtilsTest {
 
     private static final String YARN_RM_ARBITRARY_SCHEDULER_CLAZZ =
             "org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CapacityScheduler";
 
-    @Rule public TemporaryFolder temporaryFolder = new TemporaryFolder();
-
     @Test
-    public void testDeleteApplicationFiles() throws Exception {
-        final Path applicationFilesDir = temporaryFolder.newFolder(".flink").toPath();
-        Files.createFile(applicationFilesDir.resolve("flink.jar"));
-        try (Stream<Path> files = Files.list(temporaryFolder.getRoot().toPath())) {
-            assertThat(files.count(), equalTo(1L));
+    void testDeleteApplicationFiles(@TempDir Path tempDir) throws Exception {
+        final Path applicationFilesDir = Files.createTempDirectory(tempDir, ".flink");
+        Files.createTempFile(applicationFilesDir, "flink", ".jar");
+        try (Stream<Path> files = Files.list(tempDir)) {
+            assertThat(files.count()).isEqualTo(1L);
         }
         try (Stream<Path> files = Files.list(applicationFilesDir)) {
-            assertThat(files.count(), equalTo(1L));
+            assertThat(files.count()).isEqualTo(1L);

Review Comment:
   ```suggestion
               assertThat(files).hasSize(1L);
   ```



##########
flink-yarn/src/test/java/org/apache/flink/yarn/FlinkYarnSessionCliTest.java:
##########
@@ -574,12 +580,13 @@ public void testMissingShipFiles() throws Exception {
 
         final CommandLine commandLine = flinkYarnSessionCli.parseCommandLineOptions(args, false);
 
-        try {
-            flinkYarnSessionCli.toConfiguration(commandLine);
-            fail("Expected error for missing file");
-        } catch (ConfigurationException ce) {
-            assertEquals("Ship file missing.file does not exist", ce.getMessage());
-        }
+        assertThatThrownBy(
+                        () -> {
+                            flinkYarnSessionCli.toConfiguration(commandLine);
+                            fail("Expected error for missing file");

Review Comment:
   ```suggestion
   ```



##########
flink-yarn/src/test/java/org/apache/flink/yarn/RegisterApplicationMasterResponseReflectorTest.java:
##########
@@ -111,12 +105,12 @@ public void testCallsGetSchedulerResourceTypesMethodIfPresent() {
                 registerApplicationMasterResponseReflector.getSchedulerResourceTypeNamesUnsafe(
                         new HasMethod());
 
-        assertTrue(schedulerResourceTypeNames.isPresent());
-        assertThat(schedulerResourceTypeNames.get(), containsInAnyOrder("MEMORY", "CPU"));
+        assertThat(schedulerResourceTypeNames).isPresent();
+        assertThat(schedulerResourceTypeNames.get()).containsAll(Arrays.asList("MEMORY", "CPU"));

Review Comment:
   ```suggestion
           assertThat(schedulerResourceTypeNames.get()).contains("MEMORY", "CPU");
   ```



##########
flink-yarn/src/test/java/org/apache/flink/yarn/TaskExecutorProcessSpecContainerResourcePriorityAdapterTest.java:
##########
@@ -206,40 +199,49 @@ public void testExternalResource() {
 
         final Map<String, Long> resultExternalResources =
                 ResourceInformationReflector.INSTANCE.getExternalResources(resource);
-        assertThat(resultExternalResources.size(), is(1));
-        assertThat(
-                resultExternalResources.get(SUPPORTED_EXTERNAL_RESOURCE_CONFIG_KEY),
-                is(SUPPORTED_EXTERNAL_RESOURCE_MAX));
+        assertThat(resultExternalResources).hasSize(1);
+        assertThat(resultExternalResources.get(SUPPORTED_EXTERNAL_RESOURCE_CONFIG_KEY))
+                .isEqualTo(SUPPORTED_EXTERNAL_RESOURCE_MAX);

Review Comment:
   ```suggestion
           assertThat(resultExternalResources).containsExactly(
                   entry(
                           SUPPORTED_EXTERNAL_RESOURCE_CONFIG_KEY,
                           SUPPORTED_EXTERNAL_RESOURCE_MAX));
   ```



##########
flink-yarn/src/test/java/org/apache/flink/yarn/TaskExecutorProcessSpecContainerResourcePriorityAdapterTest.java:
##########
@@ -206,40 +199,49 @@ public void testExternalResource() {
 
         final Map<String, Long> resultExternalResources =
                 ResourceInformationReflector.INSTANCE.getExternalResources(resource);
-        assertThat(resultExternalResources.size(), is(1));
-        assertThat(
-                resultExternalResources.get(SUPPORTED_EXTERNAL_RESOURCE_CONFIG_KEY),
-                is(SUPPORTED_EXTERNAL_RESOURCE_MAX));
+        assertThat(resultExternalResources).hasSize(1);
+        assertThat(resultExternalResources.get(SUPPORTED_EXTERNAL_RESOURCE_CONFIG_KEY))
+                .isEqualTo(SUPPORTED_EXTERNAL_RESOURCE_MAX);
     }
 
-    @Test(expected = IllegalStateException.class)
-    public void testExternalResourceFailExceedMax() {
+    @Test
+    void testExternalResourceFailExceedMax() {
         assumeTrue(isExternalResourceSupported());
-
-        getAdapterWithExternalResources(
-                        SUPPORTED_EXTERNAL_RESOURCE_NAME, SUPPORTED_EXTERNAL_RESOURCE_CONFIG_KEY)
-                .getPriorityAndResource(
-                        TASK_EXECUTOR_PROCESS_SPEC_WITH_EXTERNAL_RESOURCE_EXCEED_MAX);
+        assertThatThrownBy(
+                        () ->
+                                getAdapterWithExternalResources(
+                                                SUPPORTED_EXTERNAL_RESOURCE_NAME,
+                                                SUPPORTED_EXTERNAL_RESOURCE_CONFIG_KEY)
+                                        .getPriorityAndResource(
+                                                TASK_EXECUTOR_PROCESS_SPEC_WITH_EXTERNAL_RESOURCE_EXCEED_MAX))
+                .isInstanceOf(IllegalStateException.class);
     }
 
-    @Test(expected = IllegalStateException.class)
-    public void testExternalResourceFailResourceTypeNotSupported() {
+    @Test
+    void testExternalResourceFailResourceTypeNotSupported() {
         assumeTrue(isExternalResourceSupported());
-
-        getAdapterWithExternalResources(
-                        UNSUPPORTED_EXTERNAL_RESOURCE_NAME,
-                        UNSUPPORTED_EXTERNAL_RESOURCE_CONFIG_KEY)
-                .getPriorityAndResource(
-                        TASK_EXECUTOR_PROCESS_SPEC_WITH_UNSUPPORTED_EXTERNAL_RESOURCE);
+        assertThatThrownBy(
+                        () ->
+                                getAdapterWithExternalResources(
+                                                UNSUPPORTED_EXTERNAL_RESOURCE_NAME,
+                                                UNSUPPORTED_EXTERNAL_RESOURCE_CONFIG_KEY)
+                                        .getPriorityAndResource(
+                                                TASK_EXECUTOR_PROCESS_SPEC_WITH_UNSUPPORTED_EXTERNAL_RESOURCE))
+                .isInstanceOf(IllegalStateException.class);
     }
 
-    @Test(expected = IllegalStateException.class)
-    public void testExternalResourceFailHadoopVersionNotSupported() {
-        assumeFalse(isExternalResourceSupported());
+    @Test
+    void testExternalResourceFailHadoopVersionNotSupported() {
 
-        getAdapterWithExternalResources(
-                        SUPPORTED_EXTERNAL_RESOURCE_NAME, SUPPORTED_EXTERNAL_RESOURCE_CONFIG_KEY)
-                .getPriorityAndResource(TASK_EXECUTOR_PROCESS_SPEC_WITH_EXTERNAL_RESOURCE);
+        assumeFalse(isExternalResourceSupported());
+        assertThatThrownBy(
+                        () ->
+                                getAdapterWithExternalResources(

Review Comment:
   see above



##########
flink-yarn/src/test/java/org/apache/flink/yarn/UtilsTest.java:
##########
@@ -109,31 +100,34 @@ public void testSharedLibWithNonQualifiedPath() throws Exception {
 
         final List<org.apache.hadoop.fs.Path> sharedLibs =
                 Utils.getQualifiedRemoteSharedPaths(flinkConfig, yarnConfig);
-        assertThat(sharedLibs.size(), is(1));
-        assertThat(sharedLibs.get(0).toUri().toString(), is(qualifiedPath));
+        assertThat(sharedLibs).hasSize(1);
+        assertThat(sharedLibs.get(0).toUri().toString()).isEqualTo(qualifiedPath);
     }
 
     @Test
-    public void testSharedLibIsNotRemotePathShouldThrowException() throws IOException {
+    void testSharedLibIsNotRemotePathShouldThrowException() {
         final String localLib = "file:///flink/sharedLib";
         final Configuration flinkConfig = new Configuration();
         flinkConfig.set(YarnConfigOptions.PROVIDED_LIB_DIRS, Collections.singletonList(localLib));
 
-        try {
-            Utils.getQualifiedRemoteSharedPaths(flinkConfig, new YarnConfiguration());
-            fail("We should throw an exception when the shared lib is set to local path.");
-        } catch (FlinkException ex) {
-            final String msg =
-                    "The \""
-                            + YarnConfigOptions.PROVIDED_LIB_DIRS.key()
-                            + "\" should only "
-                            + "contain dirs accessible from all worker nodes";
-            assertThat(ex, FlinkMatchers.containsMessage(msg));
-        }
+        final String msg =
+                "The \""
+                        + YarnConfigOptions.PROVIDED_LIB_DIRS.key()
+                        + "\" should only "
+                        + "contain dirs accessible from all worker nodes";
+        assertThatThrownBy(
+                        () -> {
+                            Utils.getQualifiedRemoteSharedPaths(
+                                    flinkConfig, new YarnConfiguration());
+                            fail(
+                                    "We should throw an exception when the shared lib is set to local path.");

Review Comment:
   ```suggestion
   ```



##########
flink-yarn/src/test/java/org/apache/flink/yarn/YarnFileStageTest.java:
##########
@@ -320,9 +302,9 @@ private static void testCopySingleFileFromLocal(
                             localResourceDirectory,
                             LocalResourceType.FILE);
 
-            assertThat(
-                    classpath,
-                    containsInAnyOrder(new Path(localResourceDirectory, localFile).toString()));
+            assertThat(classpath)
+                    .containsExactlyInAnyOrder(

Review Comment:
   ```suggestion
                       .containsExactly(
   ```



##########
flink-yarn/src/test/java/org/apache/flink/yarn/TaskExecutorProcessSpecContainerResourcePriorityAdapterTest.java:
##########
@@ -206,40 +199,49 @@ public void testExternalResource() {
 
         final Map<String, Long> resultExternalResources =
                 ResourceInformationReflector.INSTANCE.getExternalResources(resource);
-        assertThat(resultExternalResources.size(), is(1));
-        assertThat(
-                resultExternalResources.get(SUPPORTED_EXTERNAL_RESOURCE_CONFIG_KEY),
-                is(SUPPORTED_EXTERNAL_RESOURCE_MAX));
+        assertThat(resultExternalResources).hasSize(1);
+        assertThat(resultExternalResources.get(SUPPORTED_EXTERNAL_RESOURCE_CONFIG_KEY))
+                .isEqualTo(SUPPORTED_EXTERNAL_RESOURCE_MAX);
     }
 
-    @Test(expected = IllegalStateException.class)
-    public void testExternalResourceFailExceedMax() {
+    @Test
+    void testExternalResourceFailExceedMax() {
         assumeTrue(isExternalResourceSupported());
-
-        getAdapterWithExternalResources(
-                        SUPPORTED_EXTERNAL_RESOURCE_NAME, SUPPORTED_EXTERNAL_RESOURCE_CONFIG_KEY)
-                .getPriorityAndResource(
-                        TASK_EXECUTOR_PROCESS_SPEC_WITH_EXTERNAL_RESOURCE_EXCEED_MAX);
+        assertThatThrownBy(
+                        () ->
+                                getAdapterWithExternalResources(
+                                                SUPPORTED_EXTERNAL_RESOURCE_NAME,
+                                                SUPPORTED_EXTERNAL_RESOURCE_CONFIG_KEY)
+                                        .getPriorityAndResource(
+                                                TASK_EXECUTOR_PROCESS_SPEC_WITH_EXTERNAL_RESOURCE_EXCEED_MAX))
+                .isInstanceOf(IllegalStateException.class);
     }
 
-    @Test(expected = IllegalStateException.class)
-    public void testExternalResourceFailResourceTypeNotSupported() {
+    @Test
+    void testExternalResourceFailResourceTypeNotSupported() {
         assumeTrue(isExternalResourceSupported());
-
-        getAdapterWithExternalResources(
-                        UNSUPPORTED_EXTERNAL_RESOURCE_NAME,
-                        UNSUPPORTED_EXTERNAL_RESOURCE_CONFIG_KEY)
-                .getPriorityAndResource(
-                        TASK_EXECUTOR_PROCESS_SPEC_WITH_UNSUPPORTED_EXTERNAL_RESOURCE);
+        assertThatThrownBy(
+                        () ->
+                                getAdapterWithExternalResources(

Review Comment:
   see above



##########
flink-yarn/src/test/java/org/apache/flink/yarn/YarnFileStageTest.java:
##########
@@ -443,7 +419,9 @@ private static void verifyDirectoryRecursive(
                     String relativePath = absolutePathString.substring(workDirPrefixLength);
                     targetFiles.put(relativePath, in.readUTF());
 
-                    assertEquals("extraneous data in file " + relativePath, -1, in.read());
+                    if (in.read() != -1) {
+                        fail("extraneous data in file " + relativePath);
+                    }

Review Comment:
   ```suggestion
                       assertThat(in.read())
                           .as("extraneous data in file " + relativePath)
                           .isNotEqualTo(-1);
   ```



##########
flink-yarn/src/test/java/org/apache/flink/yarn/YarnFileStageTest.java:
##########
@@ -81,21 +73,19 @@ public class YarnFileStageTest extends TestLogger {
     //  Test setup and shutdown
     // ------------------------------------------------------------------------
 
-    @BeforeClass
-    public static void createHDFS() throws Exception {
-        Assume.assumeTrue(!OperatingSystem.isWindows());
-
-        final File tempDir = CLASS_TEMP_DIR.newFolder();
+    @BeforeAll
+    public static void createHDFS(@TempDir File hdfsTempDir) throws Exception {

Review Comment:
   ```suggestion
       static void createHDFS(@TempDir File hdfsTempDir) throws Exception {
   ```



##########
flink-yarn/src/test/java/org/apache/flink/yarn/YarnFileStageTestS3ITCase.java:
##########
@@ -24,71 +24,70 @@
 import org.apache.flink.core.fs.FileSystem;
 import org.apache.flink.core.fs.Path;
 import org.apache.flink.runtime.fs.hdfs.HadoopFileSystem;
-import org.apache.flink.testutils.junit.RetryOnFailure;
-import org.apache.flink.testutils.junit.RetryRule;
+import org.apache.flink.testutils.junit.RetryOnException;
+import org.apache.flink.testutils.junit.extensions.retry.RetryExtension;
 import org.apache.flink.testutils.s3.S3TestCredentials;
-import org.apache.flink.util.TestLogger;
+import org.apache.flink.util.TestLoggerExtension;
 
 import org.apache.hadoop.util.VersionUtil;
-import org.junit.AfterClass;
-import org.junit.Assume;
-import org.junit.BeforeClass;
-import org.junit.ClassRule;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.rules.TemporaryFolder;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.TestTemplate;
+import org.junit.jupiter.api.extension.ExtendWith;
+import org.junit.jupiter.api.io.TempDir;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.io.File;
 import java.io.FileOutputStream;
 import java.io.IOException;
 import java.io.PrintStream;
+import java.nio.file.Files;
 import java.util.HashMap;
 import java.util.Map;
 import java.util.UUID;
 
+import static org.hamcrest.MatcherAssert.assertThat;
 import static org.hamcrest.Matchers.greaterThan;
-import static org.junit.Assert.assertThat;
-import static org.junit.Assume.assumeFalse;
-import static org.junit.Assume.assumeNoException;
+import static org.junit.jupiter.api.Assumptions.assumeFalse;
+import static org.junit.jupiter.api.Assumptions.assumeTrue;
+import static org.junit.jupiter.api.Assumptions.assumingThat;
 
 /**
  * Tests for verifying file staging during submission to YARN works with the S3A file system.
  *
  * <p>Note that the setup is similar to
  * <tt>org.apache.flink.fs.s3hadoop.HadoopS3FileSystemITCase</tt>.
  */
-public class YarnFileStageTestS3ITCase extends TestLogger {
+@ExtendWith({TestLoggerExtension.class, RetryExtension.class})

Review Comment:
   ```suggestion
   @ExtendWith(RetryExtension.class)
   ```



##########
flink-yarn/src/test/java/org/apache/flink/yarn/YarnFileStageTest.java:
##########
@@ -104,7 +94,7 @@ public static void destroyHDFS() {
         hdfsRootPath = null;
     }
 
-    @Before
+    @BeforeEach
     public void initConfig() {

Review Comment:
   ```suggestion
       void initConfig() {
   ```



##########
flink-yarn/src/test/java/org/apache/flink/yarn/UtilsTest.java:
##########
@@ -19,59 +19,50 @@
 package org.apache.flink.yarn;
 
 import org.apache.flink.configuration.Configuration;
-import org.apache.flink.core.testutils.FlinkMatchers;
 import org.apache.flink.util.FlinkException;
-import org.apache.flink.util.TestLogger;
 import org.apache.flink.yarn.configuration.YarnConfigOptions;
 
 import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
 import org.apache.hadoop.yarn.api.records.Resource;
 import org.apache.hadoop.yarn.conf.YarnConfiguration;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.rules.TemporaryFolder;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.io.TempDir;
 
-import java.io.IOException;
 import java.nio.file.Files;
 import java.nio.file.Path;
 import java.util.Collections;
 import java.util.List;
 import java.util.stream.Stream;
 
-import static org.hamcrest.Matchers.equalTo;
-import static org.hamcrest.Matchers.is;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertThat;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.assertj.core.api.Assertions.assertThatThrownBy;
+import static org.assertj.core.api.Assertions.fail;
 
 /** Tests for {@link Utils}. */
-public class UtilsTest extends TestLogger {
+class UtilsTest {
 
     private static final String YARN_RM_ARBITRARY_SCHEDULER_CLAZZ =
             "org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CapacityScheduler";
 
-    @Rule public TemporaryFolder temporaryFolder = new TemporaryFolder();
-
     @Test
-    public void testDeleteApplicationFiles() throws Exception {
-        final Path applicationFilesDir = temporaryFolder.newFolder(".flink").toPath();
-        Files.createFile(applicationFilesDir.resolve("flink.jar"));
-        try (Stream<Path> files = Files.list(temporaryFolder.getRoot().toPath())) {
-            assertThat(files.count(), equalTo(1L));
+    void testDeleteApplicationFiles(@TempDir Path tempDir) throws Exception {
+        final Path applicationFilesDir = Files.createTempDirectory(tempDir, ".flink");
+        Files.createTempFile(applicationFilesDir, "flink", ".jar");
+        try (Stream<Path> files = Files.list(tempDir)) {
+            assertThat(files.count()).isEqualTo(1L);
         }
         try (Stream<Path> files = Files.list(applicationFilesDir)) {
-            assertThat(files.count(), equalTo(1L));
+            assertThat(files.count()).isEqualTo(1L);
         }
 
         Utils.deleteApplicationFiles(applicationFilesDir.toString());
-        try (Stream<Path> files = Files.list(temporaryFolder.getRoot().toPath())) {
-            assertThat(files.count(), equalTo(0L));
+        try (Stream<Path> files = Files.list(tempDir.toFile().toPath())) {
+            assertThat(files.count()).isEqualTo(0L);

Review Comment:
   ```suggestion
               assertThat(files).isEmpty();
   ```



##########
flink-yarn/src/test/java/org/apache/flink/yarn/YarnFileStageTestS3ITCase.java:
##########
@@ -24,71 +24,70 @@
 import org.apache.flink.core.fs.FileSystem;
 import org.apache.flink.core.fs.Path;
 import org.apache.flink.runtime.fs.hdfs.HadoopFileSystem;
-import org.apache.flink.testutils.junit.RetryOnFailure;
-import org.apache.flink.testutils.junit.RetryRule;
+import org.apache.flink.testutils.junit.RetryOnException;
+import org.apache.flink.testutils.junit.extensions.retry.RetryExtension;
 import org.apache.flink.testutils.s3.S3TestCredentials;
-import org.apache.flink.util.TestLogger;
+import org.apache.flink.util.TestLoggerExtension;
 
 import org.apache.hadoop.util.VersionUtil;
-import org.junit.AfterClass;
-import org.junit.Assume;
-import org.junit.BeforeClass;
-import org.junit.ClassRule;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.rules.TemporaryFolder;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.TestTemplate;
+import org.junit.jupiter.api.extension.ExtendWith;
+import org.junit.jupiter.api.io.TempDir;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.io.File;
 import java.io.FileOutputStream;
 import java.io.IOException;
 import java.io.PrintStream;
+import java.nio.file.Files;
 import java.util.HashMap;
 import java.util.Map;
 import java.util.UUID;
 
+import static org.hamcrest.MatcherAssert.assertThat;
 import static org.hamcrest.Matchers.greaterThan;
-import static org.junit.Assert.assertThat;
-import static org.junit.Assume.assumeFalse;
-import static org.junit.Assume.assumeNoException;
+import static org.junit.jupiter.api.Assumptions.assumeFalse;
+import static org.junit.jupiter.api.Assumptions.assumeTrue;
+import static org.junit.jupiter.api.Assumptions.assumingThat;
 
 /**
  * Tests for verifying file staging during submission to YARN works with the S3A file system.
  *
  * <p>Note that the setup is similar to
  * <tt>org.apache.flink.fs.s3hadoop.HadoopS3FileSystemITCase</tt>.
  */
-public class YarnFileStageTestS3ITCase extends TestLogger {
+@ExtendWith({TestLoggerExtension.class, RetryExtension.class})
+public class YarnFileStageTestS3ITCase {
 
-    private static final String TEST_DATA_DIR = "tests-" + UUID.randomUUID();
-
-    @ClassRule public static final TemporaryFolder TEMP_FOLDER = new TemporaryFolder();
+    private static final Logger log = LoggerFactory.getLogger(YarnFileStageTestS3ITCase.class);
 
-    @Rule public final TemporaryFolder tempFolder = new TemporaryFolder();
-
-    @Rule public final RetryRule retryRule = new RetryRule();
+    private static final String TEST_DATA_DIR = "tests-" + UUID.randomUUID();

Review Comment:
   is this unused?



##########
flink-yarn/src/test/java/org/apache/flink/yarn/YarnResourceManagerDriverTest.java:
##########
@@ -175,15 +175,13 @@ public void testShutdownRequestCausesFatalError() throws Exception {
                             Throwable throwable =
                                     throwableCompletableFuture.get(TIMEOUT_SEC, TimeUnit.SECONDS);
                             assertThat(
-                                    ExceptionUtils.findThrowable(
-                                                    throwable, ResourceManagerException.class)
-                                            .isPresent(),
-                                    is(true));
+                                            ExceptionUtils.findThrowable(
+                                                    throwable, ResourceManagerException.class))
+                                    .isPresent();
                             assertThat(
-                                    ExceptionUtils.findThrowableWithMessage(
-                                                    throwable, ERROR_MESSAGE_ON_SHUTDOWN_REQUEST)
-                                            .isPresent(),
-                                    is(true));
+                                            ExceptionUtils.findThrowableWithMessage(
+                                                    throwable, ERROR_MESSAGE_ON_SHUTDOWN_REQUEST))
+                                    .isPresent();

Review Comment:
   ```
   assertThat(throwable)
       .satisfies(anyCauseMatches(ResourceManagerException.class)
       .satisfies(anyCauseMatches(ERROR_MESSAGE_ON_SHUTDOWN_REQUEST);
   ```



##########
flink-yarn/src/test/java/org/apache/flink/yarn/YarnFileStageTestS3ITCase.java:
##########
@@ -24,71 +24,70 @@
 import org.apache.flink.core.fs.FileSystem;
 import org.apache.flink.core.fs.Path;
 import org.apache.flink.runtime.fs.hdfs.HadoopFileSystem;
-import org.apache.flink.testutils.junit.RetryOnFailure;
-import org.apache.flink.testutils.junit.RetryRule;
+import org.apache.flink.testutils.junit.RetryOnException;
+import org.apache.flink.testutils.junit.extensions.retry.RetryExtension;
 import org.apache.flink.testutils.s3.S3TestCredentials;
-import org.apache.flink.util.TestLogger;
+import org.apache.flink.util.TestLoggerExtension;
 
 import org.apache.hadoop.util.VersionUtil;
-import org.junit.AfterClass;
-import org.junit.Assume;
-import org.junit.BeforeClass;
-import org.junit.ClassRule;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.rules.TemporaryFolder;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.TestTemplate;
+import org.junit.jupiter.api.extension.ExtendWith;
+import org.junit.jupiter.api.io.TempDir;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.io.File;
 import java.io.FileOutputStream;
 import java.io.IOException;
 import java.io.PrintStream;
+import java.nio.file.Files;
 import java.util.HashMap;
 import java.util.Map;
 import java.util.UUID;
 
+import static org.hamcrest.MatcherAssert.assertThat;
 import static org.hamcrest.Matchers.greaterThan;
-import static org.junit.Assert.assertThat;
-import static org.junit.Assume.assumeFalse;
-import static org.junit.Assume.assumeNoException;
+import static org.junit.jupiter.api.Assumptions.assumeFalse;
+import static org.junit.jupiter.api.Assumptions.assumeTrue;
+import static org.junit.jupiter.api.Assumptions.assumingThat;
 
 /**
  * Tests for verifying file staging during submission to YARN works with the S3A file system.
  *
  * <p>Note that the setup is similar to
  * <tt>org.apache.flink.fs.s3hadoop.HadoopS3FileSystemITCase</tt>.
  */
-public class YarnFileStageTestS3ITCase extends TestLogger {
+@ExtendWith({TestLoggerExtension.class, RetryExtension.class})
+public class YarnFileStageTestS3ITCase {
 
-    private static final String TEST_DATA_DIR = "tests-" + UUID.randomUUID();
-
-    @ClassRule public static final TemporaryFolder TEMP_FOLDER = new TemporaryFolder();
+    private static final Logger log = LoggerFactory.getLogger(YarnFileStageTestS3ITCase.class);
 
-    @Rule public final TemporaryFolder tempFolder = new TemporaryFolder();
-
-    @Rule public final RetryRule retryRule = new RetryRule();
+    private static final String TEST_DATA_DIR = "tests-" + UUID.randomUUID();
 
     /** Number of tests executed. */
     private static int numRecursiveUploadTests = 0;
 
     /** Will be updated by {@link #checkCredentialsAndSetup()} if the test is not skipped. */
     private static boolean skipTest = true;
 
-    @BeforeClass
-    public static void checkCredentialsAndSetup() throws IOException {
+    @BeforeAll
+    public static void checkCredentialsAndSetup(@TempDir File tempFolder) throws IOException {

Review Comment:
   ```suggestion
       static void checkCredentialsAndSetup(@TempDir File tempFolder) throws IOException {
   ```



##########
flink-yarn/src/test/java/org/apache/flink/yarn/YarnResourceManagerDriverTest.java:
##########
@@ -293,9 +304,22 @@ public void testTerminationWaitsOnContainerStopError() throws Exception {
                                                 getDriver().terminate();
                                             });
 
-                            assertThat(
-                                    driverHasTerminatedFuture,
-                                    FlinkMatchers.willNotComplete(Duration.ofMillis(20L)));
+                            assertThat(driverHasTerminatedFuture)
+                                    .satisfies(
+                                            new Condition<CompletableFuture<?>>() {

Review Comment:
   see above



##########
flink-yarn/src/test/java/org/apache/flink/yarn/entrypoint/YarnEntrypointUtilsTest.java:
##########
@@ -132,37 +130,36 @@ public void testDynamicParameterOverloading() throws IOException {
         Configuration overloadedConfiguration =
                 loadConfiguration(initialConfiguration, dynamicParameters);
 
-        assertThat(
-                overloadedConfiguration.get(JobManagerOptions.JVM_METASPACE),
-                is(MemorySize.MAX_VALUE));
+        assertThat(overloadedConfiguration.get(JobManagerOptions.JVM_METASPACE))
+                .isEqualTo(MemorySize.MAX_VALUE);
     }
 
     @Nonnull
-    private static Configuration loadConfiguration(Configuration initialConfiguration)
-            throws IOException {
+    private Configuration loadConfiguration(Configuration initialConfiguration) throws IOException {
         return loadConfiguration(initialConfiguration, new HashMap<>());
     }
 
     @Nonnull
-    private static Configuration loadConfiguration(
+    private Configuration loadConfiguration(
             Configuration initialConfiguration, Configuration dynamicParameters)
             throws IOException {
         return loadConfiguration(initialConfiguration, dynamicParameters, new HashMap<>());
     }
 
     @Nonnull
-    private static Configuration loadConfiguration(
+    private Configuration loadConfiguration(
             Configuration initialConfiguration, Map<String, String> env) throws IOException {
         return loadConfiguration(initialConfiguration, new Configuration(), env);
     }
 
     @Nonnull
-    private static Configuration loadConfiguration(

Review Comment:
   why are these no longer static? The tempBaseDir could be static as well.



##########
flink-yarn/src/test/java/org/apache/flink/yarn/YarnResourceManagerDriverTest.java:
##########
@@ -242,9 +240,22 @@ public void testTerminationWaitsOnContainerStopSuccess() throws Exception {
                                                 getDriver().terminate();
                                             });
 
-                            assertThat(
-                                    driverHasTerminatedFuture,
-                                    FlinkMatchers.willNotComplete(Duration.ofMillis(20L)));
+                            assertThat(driverHasTerminatedFuture)
+                                    .satisfies(
+                                            new Condition<CompletableFuture<?>>() {
+                                                @Override
+                                                public boolean matches(CompletableFuture<?> item) {
+                                                    try {
+                                                        item.get(20, TimeUnit.MILLISECONDS);
+                                                    } catch (TimeoutException timeoutException) {
+                                                        return true;
+                                                    } catch (InterruptedException
+                                                            | ExecutionException e) {
+                                                        return false;
+                                                    }
+                                                    return false;
+                                                }
+                                            });

Review Comment:
   ```suggestion
                               assertThatThrownBy(() -> driverHasTerminatedFuture.get(20, TimeUnit.MILLISECONDS)
                                   .isInstanceOf(TimeoutException.class);
   ```



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: issues-unsubscribe@flink.apache.org

For queries about this service, please contact Infrastructure at:
users@infra.apache.org