You are viewing a plain text version of this content. The canonical link for it is here.
Posted to issues@flink.apache.org by GitBox <gi...@apache.org> on 2022/04/29 11:23:22 UTC

[GitHub] [flink] zentol commented on a diff in pull request #19590: [FLINK-27352][tests] [JUnit5 Migration] Module: flink-json

zentol commented on code in PR #19590:
URL: https://github.com/apache/flink/pull/19590#discussion_r861704021


##########
flink-formats/flink-json/src/test/java/org/apache/flink/formats/json/ogg/OggJsonSerDeSchemaTest.java:
##########
@@ -242,7 +240,7 @@ private void testSerializationDeserialization(String resourceFile) throws Except
                         "{\"before\":{\"id\":111,\"name\":\"scooter\",\"description\":\"Big 2-wheel scooter \",\"weight\":5.18},\"after\":null,\"op_type\":\"D\"}",
                         "{\"before\":null,\"after\":{\"id\":111,\"name\":\"scooter\",\"description\":\"Big 2-wheel scooter \",\"weight\":5.17},\"op_type\":\"I\"}",
                         "{\"before\":{\"id\":111,\"name\":\"scooter\",\"description\":\"Big 2-wheel scooter \",\"weight\":5.17},\"after\":null,\"op_type\":\"D\"}");
-        assertEquals(expected, actual);
+        assertThat(actual).isEqualTo(expected);

Review Comment:
   containsExactly



##########
flink-formats/flink-json/src/test/java/org/apache/flink/formats/json/ogg/OggJsonSerDeSchemaTest.java:
##########
@@ -119,18 +116,19 @@ public void testDeserializationWithMetadata(String resourceFile) throws Exceptio
 
         final SimpleCollector collector = new SimpleCollector();
         deserializationSchema.deserialize(firstLine.getBytes(StandardCharsets.UTF_8), collector);
-        assertEquals(1, collector.list.size());
+        assertThat(collector.list.size()).isEqualTo(1);

Review Comment:
   hasSize



##########
flink-formats/flink-json/src/test/java/org/apache/flink/formats/json/debezium/DebeziumJsonFormatFactoryTest.java:
##########
@@ -96,24 +93,39 @@ public void testSeDeSchema() {
                 sinkMock.valueFormat.createRuntimeEncoder(
                         new SinkRuntimeProviderContext(false), PHYSICAL_DATA_TYPE);
 
-        assertEquals(expectedSer, actualSer);
+        assertThat(actualSer).isEqualTo(expectedSer);
     }
 
     @Test
-    public void testInvalidIgnoreParseError() {
-        thrown.expect(
-                containsCause(
-                        new IllegalArgumentException(
-                                "Unrecognized option for boolean: abc. Expected either true or false(case insensitive)")));
-
+    void testInvalidIgnoreParseError() {
         final Map<String, String> options =
                 getModifiedOptions(opts -> opts.put("debezium-json.ignore-parse-errors", "abc"));
 
-        createTableSource(SCHEMA, options);
+        assertThatThrownBy(() -> createTableSource(SCHEMA, options))
+                .satisfies(
+                        anyCauseMatches(
+                                IllegalArgumentException.class,
+                                "Unrecognized option for boolean: abc. "
+                                        + "Expected either true or false(case insensitive)"));
+    }
+
+    @Test
+    void testInvalidOptionForTimestampFormat() {

Review Comment:
   please don't change the order of tests; it makes the review unnecessarily difficult



##########
flink-formats/flink-json/src/test/java/org/apache/flink/formats/json/ogg/OggJsonFileSystemITCase.java:
##########
@@ -36,9 +36,14 @@
 import java.util.stream.Collectors;
 
 import static java.lang.String.format;
+import static org.assertj.core.api.Assertions.assertThat;
 
 /** Test Filesystem connector with OGG Json. */
-public class OggJsonFileSystemITCase extends StreamingTestBase {
+class OggJsonFileSystemITCase extends StreamingTestBase {
+
+    @TempDir private java.nio.file.Path tempSourceDir;
+
+    @TempDir private java.nio.file.Path tempSinkDir;

Review Comment:
   do not use qualified imports



##########
flink-formats/flink-json/src/test/java/org/apache/flink/formats/json/canal/CanalJsonSerDeSchemaTest.java:
##########
@@ -82,55 +76,55 @@ public void testFilteringTables() throws Exception {
     }
 
     @Test
-    public void testDeserializeNullRow() throws Exception {
+    void testDeserializeNullRow() throws Exception {
         final List<ReadableMetadata> requestedMetadata = Arrays.asList(ReadableMetadata.values());
         final CanalJsonDeserializationSchema deserializationSchema =
                 createCanalJsonDeserializationSchema(null, null, requestedMetadata);
         final SimpleCollector collector = new SimpleCollector();
 
         deserializationSchema.deserialize(null, collector);
         deserializationSchema.deserialize(new byte[0], collector);
-        assertEquals(0, collector.list.size());
+        assertThat(0).isEqualTo(collector.list.size());

Review Comment:
   hasSize, invert argument order



##########
flink-formats/flink-json/src/test/java/org/apache/flink/formats/json/canal/CanalJsonSerDeSchemaTest.java:
##########
@@ -271,7 +265,7 @@ private void testDeserializationWithMetadata(
         final SimpleCollector collector = new SimpleCollector();
 
         deserializationSchema.deserialize(firstLine.getBytes(StandardCharsets.UTF_8), collector);
-        assertEquals(9, collector.list.size());
+        assertThat(collector.list.size()).isEqualTo(9);

Review Comment:
   hasSize



##########
flink-formats/flink-json/src/test/java/org/apache/flink/formats/json/ogg/OggJsonSerDeSchemaTest.java:
##########
@@ -204,7 +202,7 @@ private void testSerializationDeserialization(String resourceFile) throws Except
                         "-D(111,scooter,Big 2-wheel scooter ,5.17)");
         List<String> actual =
                 collector.list.stream().map(Object::toString).collect(Collectors.toList());
-        assertEquals(expected, actual);
+        assertThat(actual).isEqualTo(expected);

Review Comment:
   containsExactly



##########
flink-formats/flink-json/src/test/java/org/apache/flink/formats/json/ogg/OggJsonFileSystemITCase.java:
##########
@@ -36,9 +36,14 @@
 import java.util.stream.Collectors;
 
 import static java.lang.String.format;
+import static org.assertj.core.api.Assertions.assertThat;
 
 /** Test Filesystem connector with OGG Json. */
-public class OggJsonFileSystemITCase extends StreamingTestBase {
+class OggJsonFileSystemITCase extends StreamingTestBase {
+
+    @TempDir private java.nio.file.Path tempSourceDir;

Review Comment:
   do not use qualified imports



##########
flink-formats/flink-json/src/test/java/org/apache/flink/formats/json/maxwell/MaxwellJsonSerDerTest.java:
##########
@@ -85,23 +83,23 @@ public void testDeserializationWithMetadata() throws Exception {
                         TimestampFormat.ISO_8601);
         final SimpleCollector collector = new SimpleCollector();
         deserializationSchema.deserialize(firstLine.getBytes(StandardCharsets.UTF_8), collector);
-        assertEquals(1, collector.list.size());
+        assertThat(collector.list.size()).isEqualTo(1);
         Consumer<RowData> consumer =

Review Comment:
   this can be re-written easily to .satifies(...)



##########
flink-formats/flink-json/src/test/java/org/apache/flink/formats/json/debezium/DebeziumJsonSerDeSchemaTest.java:
##########
@@ -318,7 +307,7 @@ private void testDeserializationWithMetadata(
         final SimpleCollector collector = new SimpleCollector();
         deserializationSchema.deserialize(firstLine.getBytes(StandardCharsets.UTF_8), collector);
 
-        assertEquals(1, collector.list.size());
+        assertThat(collector.list.size()).isEqualTo(1);

Review Comment:
   hasSize



##########
flink-formats/flink-json/src/test/java/org/apache/flink/formats/json/debezium/DebeziumJsonFormatFactoryTest.java:
##########
@@ -131,49 +143,37 @@ public void testSchemaIncludeOption() {
         DeserializationSchema<RowData> actualDeser =
                 scanSourceMock.valueFormat.createRuntimeDecoder(
                         ScanRuntimeProviderContext.INSTANCE, PHYSICAL_DATA_TYPE);
-        assertEquals(expectedDeser, actualDeser);
-
-        try {
-            final DynamicTableSink actualSink = createTableSink(SCHEMA, options);
-            TestDynamicTableFactory.DynamicTableSinkMock sinkMock =
-                    (TestDynamicTableFactory.DynamicTableSinkMock) actualSink;
-            // should fail
-            sinkMock.valueFormat.createRuntimeEncoder(
-                    new SinkRuntimeProviderContext(false), PHYSICAL_DATA_TYPE);
-            fail();
-        } catch (Exception e) {
-            assertEquals(
-                    e.getCause().getCause().getMessage(),
-                    "Debezium JSON serialization doesn't support "
-                            + "'debezium-json.schema-include' option been set to true.");
-        }
-    }
-
-    @Test
-    public void testInvalidOptionForTimestampFormat() {
-        final Map<String, String> tableOptions =
-                getModifiedOptions(
-                        opts -> opts.put("debezium-json.timestamp-format.standard", "test"));
-
-        thrown.expect(ValidationException.class);
-        thrown.expect(
-                containsCause(
-                        new ValidationException(
-                                "Unsupported value 'test' for timestamp-format.standard. Supported values are [SQL, ISO-8601].")));
-        createTableSource(SCHEMA, tableOptions);
+        assertThat(actualDeser).isEqualTo(expectedDeser);
+
+        assertThatThrownBy(
+                        () -> {
+                            final DynamicTableSink actualSink = createTableSink(SCHEMA, options);
+                            TestDynamicTableFactory.DynamicTableSinkMock sinkMock =
+                                    (TestDynamicTableFactory.DynamicTableSinkMock) actualSink;
+                            // should fail
+                            sinkMock.valueFormat.createRuntimeEncoder(
+                                    new SinkRuntimeProviderContext(false), PHYSICAL_DATA_TYPE);
+                            fail();

Review Comment:
   ```suggestion
   ```



##########
flink-formats/flink-json/src/test/java/org/apache/flink/formats/json/maxwell/MaxwellJsonSerDerTest.java:
##########
@@ -85,23 +83,23 @@ public void testDeserializationWithMetadata() throws Exception {
                         TimestampFormat.ISO_8601);
         final SimpleCollector collector = new SimpleCollector();
         deserializationSchema.deserialize(firstLine.getBytes(StandardCharsets.UTF_8), collector);
-        assertEquals(1, collector.list.size());
+        assertThat(collector.list.size()).isEqualTo(1);

Review Comment:
   hasSize



##########
flink-formats/flink-json/src/test/java/org/apache/flink/formats/json/JsonRowDataSerDeSchemaTest.java:
##########
@@ -330,12 +328,12 @@ public void testSerDeMultiRows() throws Exception {
             byte[] serializedJson = objectMapper.writeValueAsBytes(root);
             RowData rowData = deserializationSchema.deserialize(serializedJson);
             byte[] actual = serializationSchema.serialize(rowData);
-            assertEquals(new String(serializedJson), new String(actual));
+            assertThat(new String(actual)).isEqualTo(new String(serializedJson));

Review Comment:
   see above



##########
flink-formats/flink-json/src/test/java/org/apache/flink/formats/json/debezium/DebeziumJsonFileSystemITCase.java:
##########
@@ -36,9 +36,14 @@
 import java.util.stream.Collectors;
 
 import static java.lang.String.format;
+import static org.assertj.core.api.Assertions.assertThat;
 
 /** Test Filesystem connector with DebeziumJson. */
-public class DebeziumJsonFileSystemITCase extends StreamingTestBase {
+class DebeziumJsonFileSystemITCase extends StreamingTestBase {
+
+    @TempDir private java.nio.file.Path tempSourceDir;
+
+    @TempDir private java.nio.file.Path tempSinkDir;

Review Comment:
   no qualified imports



##########
flink-formats/flink-json/src/test/java/org/apache/flink/formats/json/debezium/DebeziumJsonFormatFactoryTest.java:
##########
@@ -131,49 +143,37 @@ public void testSchemaIncludeOption() {
         DeserializationSchema<RowData> actualDeser =
                 scanSourceMock.valueFormat.createRuntimeDecoder(
                         ScanRuntimeProviderContext.INSTANCE, PHYSICAL_DATA_TYPE);
-        assertEquals(expectedDeser, actualDeser);
-
-        try {
-            final DynamicTableSink actualSink = createTableSink(SCHEMA, options);
-            TestDynamicTableFactory.DynamicTableSinkMock sinkMock =
-                    (TestDynamicTableFactory.DynamicTableSinkMock) actualSink;
-            // should fail
-            sinkMock.valueFormat.createRuntimeEncoder(
-                    new SinkRuntimeProviderContext(false), PHYSICAL_DATA_TYPE);
-            fail();
-        } catch (Exception e) {
-            assertEquals(
-                    e.getCause().getCause().getMessage(),
-                    "Debezium JSON serialization doesn't support "
-                            + "'debezium-json.schema-include' option been set to true.");
-        }
-    }
-
-    @Test
-    public void testInvalidOptionForTimestampFormat() {
-        final Map<String, String> tableOptions =
-                getModifiedOptions(
-                        opts -> opts.put("debezium-json.timestamp-format.standard", "test"));
-
-        thrown.expect(ValidationException.class);
-        thrown.expect(
-                containsCause(
-                        new ValidationException(
-                                "Unsupported value 'test' for timestamp-format.standard. Supported values are [SQL, ISO-8601].")));
-        createTableSource(SCHEMA, tableOptions);
+        assertThat(actualDeser).isEqualTo(expectedDeser);
+
+        assertThatThrownBy(
+                        () -> {
+                            final DynamicTableSink actualSink = createTableSink(SCHEMA, options);
+                            TestDynamicTableFactory.DynamicTableSinkMock sinkMock =
+                                    (TestDynamicTableFactory.DynamicTableSinkMock) actualSink;

Review Comment:
   move these lines out, then the next comment line is unnecesary



##########
flink-formats/flink-json/src/test/java/org/apache/flink/formats/json/JsonRowDataSerDeSchemaTest.java:
##########
@@ -202,15 +200,15 @@ public void testSerDe() throws Exception {
                         true);
 
         byte[] actualBytes = serializationSchema.serialize(rowData);
-        assertEquals(new String(serializedJson), new String(actualBytes));
+        assertThat(new String(actualBytes)).isEqualTo(new String(serializedJson));

Review Comment:
   could compare the byte arrays instead I suppose



##########
flink-formats/flink-json/src/test/java/org/apache/flink/formats/json/debezium/DebeziumJsonSerDeSchemaTest.java:
##########
@@ -119,63 +109,62 @@ public void testTombstoneMessages() throws Exception {
         SimpleCollector collector = new SimpleCollector();
         deserializationSchema.deserialize(null, collector);
         deserializationSchema.deserialize(new byte[] {}, collector);
-        assertTrue(collector.list.isEmpty());
+        assertThat(collector.list).isNullOrEmpty();

Review Comment:
   the list is never null; why do we allow it?



##########
flink-formats/flink-json/src/test/java/org/apache/flink/formats/json/JsonBatchFileSystemITCase.java:
##########
@@ -36,10 +36,14 @@
 import java.util.Arrays;
 import java.util.List;
 
+import static org.assertj.core.api.Assertions.assertThat;
+
 /** ITCase to test json format for {@link JsonFormatFactory}. */
-public class JsonBatchFileSystemITCase extends BatchFileSystemITCaseBase {
+class JsonBatchFileSystemITCase extends BatchFileSystemITCaseBase {

Review Comment:
   given that the BatchFileSystemITCaseBase hasn't been migrated to junit4 (and thus a lot of things from it won't work), does this test actually behave as expected? 



##########
flink-formats/flink-json/src/test/java/org/apache/flink/formats/json/debezium/DebeziumJsonSerDeSchemaTest.java:
##########
@@ -318,7 +307,7 @@ private void testDeserializationWithMetadata(
         final SimpleCollector collector = new SimpleCollector();
         deserializationSchema.deserialize(firstLine.getBytes(StandardCharsets.UTF_8), collector);
 
-        assertEquals(1, collector.list.size());
+        assertThat(collector.list.size()).isEqualTo(1);
         testConsumer.accept(collector.list.get(0));

Review Comment:
   rewrite to .satifies()



##########
flink-formats/flink-json/src/test/java/org/apache/flink/formats/json/canal/CanalJsonSerDeSchemaTest.java:
##########
@@ -271,7 +265,7 @@ private void testDeserializationWithMetadata(
         final SimpleCollector collector = new SimpleCollector();
 
         deserializationSchema.deserialize(firstLine.getBytes(StandardCharsets.UTF_8), collector);
-        assertEquals(9, collector.list.size());
+        assertThat(collector.list.size()).isEqualTo(9);
         testConsumer.accept(collector.list.get(0));

Review Comment:
   .satisfies()



##########
flink-formats/flink-json/src/test/java/org/apache/flink/formats/json/JsonRowDataSerDeSchemaTest.java:
##########
@@ -645,28 +637,22 @@ public void testSerializationWithTypesMismatch() {
                         "null",
                         true);
         String errorMessage = "Fail to serialize at field: f1.";
-        try {
-            serializationSchema.serialize(genericRowData);
-            fail("expecting exception message: " + errorMessage);
-        } catch (Throwable t) {
-            assertThat(t, FlinkMatchers.containsMessage(errorMessage));
-        }
+
+        assertThatThrownBy(() -> serializationSchema.serialize(genericRowData))
+                .satisfies(anyCauseMatches(RuntimeException.class, errorMessage));
     }
 
     @Test
-    public void testDeserializationWithTypesMismatch() {
+    void testDeserializationWithTypesMismatch() {
         RowType rowType = (RowType) ROW(FIELD("f0", STRING()), FIELD("f1", INT())).getLogicalType();
         String json = "{\"f0\":\"abc\", \"f1\": \"abc\"}";
         JsonRowDataDeserializationSchema deserializationSchema =
                 new JsonRowDataDeserializationSchema(
                         rowType, InternalTypeInfo.of(rowType), false, false, TimestampFormat.SQL);
         String errorMessage = "Fail to deserialize at field: f1.";
-        try {
-            deserializationSchema.deserialize(json.getBytes());
-            fail("expecting exception message: " + errorMessage);
-        } catch (Throwable t) {
-            assertThat(t, FlinkMatchers.containsMessage(errorMessage));
-        }
+
+        assertThatThrownBy(() -> deserializationSchema.deserialize(json.getBytes()))
+                .satisfies(anyCauseMatches(RuntimeException.class, errorMessage));

Review Comment:
   ```suggestion
                   .satisfies(anyCauseMatches(errorMessage));
   ```



##########
flink-formats/flink-json/src/test/java/org/apache/flink/formats/json/JsonRowDataSerDeSchemaTest.java:
##########
@@ -431,42 +429,41 @@ public void testDeserializationMissingField() throws Exception {
 
         Row expected = new Row(1);
         Row actual = convertToExternal(deserializationSchema.deserialize(serializedJson), dataType);
-        assertEquals(expected, actual);
+        assertThat(actual).isEqualTo(expected);
 
         // fail on missing field
         deserializationSchema =
                 new JsonRowDataDeserializationSchema(
                         schema, InternalTypeInfo.of(schema), true, false, TimestampFormat.ISO_8601);
 
         String errorMessage = "Failed to deserialize JSON '{\"id\":123123123}'.";
-        try {
-            deserializationSchema.deserialize(serializedJson);
-            fail("expecting exception message: " + errorMessage);
-        } catch (Throwable t) {
-            assertEquals(errorMessage, t.getMessage());
-        }
+
+        JsonRowDataDeserializationSchema finalDeserializationSchema = deserializationSchema;
+        assertThatThrownBy(() -> finalDeserializationSchema.deserialize(serializedJson))
+                .hasMessageContaining(errorMessage);
 
         // ignore on parse error
         deserializationSchema =
                 new JsonRowDataDeserializationSchema(
                         schema, InternalTypeInfo.of(schema), false, true, TimestampFormat.ISO_8601);
         actual = convertToExternal(deserializationSchema.deserialize(serializedJson), dataType);
-        assertEquals(expected, actual);
+        assertThat(actual).isEqualTo(expected);
 
         errorMessage =
                 "JSON format doesn't support failOnMissingField and ignoreParseErrors are both enabled.";
-        try {
-            // failOnMissingField and ignoreParseErrors both enabled
-            new JsonRowDataDeserializationSchema(
-                    schema, InternalTypeInfo.of(schema), true, true, TimestampFormat.ISO_8601);
-            Assert.fail("expecting exception message: " + errorMessage);
-        } catch (Throwable t) {
-            assertEquals(errorMessage, t.getMessage());
-        }
+        assertThatThrownBy(
+                        () ->
+                                new JsonRowDataDeserializationSchema(
+                                        schema,
+                                        InternalTypeInfo.of(schema),
+                                        true,
+                                        true,
+                                        TimestampFormat.ISO_8601))
+                .hasMessageContaining(errorMessage);

Review Comment:
   ```suggestion
                   .hasMessage(errorMessage);
   ```



##########
flink-formats/flink-json/src/test/java/org/apache/flink/formats/json/JsonBatchFileSystemITCase.java:
##########
@@ -97,11 +117,11 @@ public void bigDataTest() throws IOException {
         }
         expected.sort(String::compareTo);
 
-        Assert.assertEquals(expected, elements);
+        assertThat(elements).isEqualTo(expected);
     }
 
     private static File generateTestData(int numRecords) throws IOException {
-        File tempDir = TEMPORARY_FOLDER.newFolder();
+        File tempDir = temporaryFolder;
 
         File root = new File(tempDir, "id=0");

Review Comment:
   ```suggestion
           File root = new File(tempraryFolder, "id=0");
   ```
   in-line variable



##########
flink-formats/flink-json/src/test/java/org/apache/flink/formats/json/JsonBatchFileSystemITCase.java:
##########
@@ -88,7 +108,7 @@ public void bigDataTest() throws IOException {
         TableResult result = tEnv().executeSql("select * from bigdata_source");
         List<String> elements = new ArrayList<>();
         result.collect().forEachRemaining(r -> elements.add((String) r.getField(1)));
-        Assert.assertEquals(numRecords, elements.size());
+        assertThat(elements.size()).isEqualTo(numRecords);

Review Comment:
   hasSize



##########
flink-formats/flink-json/src/test/java/org/apache/flink/formats/json/JsonBatchFileSystemITCase.java:
##########
@@ -50,11 +54,27 @@ public String[] formatProperties() {
     }
 
     @Test
-    public void testParseError() throws Exception {
-        String path = new URI(resultPath()).getPath();
+    void testParseError(@TempDir java.nio.file.Path temporaryFolder) throws Exception {
+        String sql =
+                String.format(
+                        "create table nonPartitionedTable ("
+                                + "x string,"
+                                + "y int,"
+                                + "a int,"
+                                + "b bigint"
+                                + ") with ("
+                                + "'connector' = 'filesystem',"
+                                + "'path' = '%s',"
+                                + "%s)",
+                        "file://" + temporaryFolder.toString(),
+                        String.join(",\n", formatProperties()));
+        tEnv().executeSql(sql);
+
+        String path = new URI(temporaryFolder.toString()).getPath();
         new File(path).mkdirs();
         File file = new File(path, "temp_file");
         file.createNewFile();
+

Review Comment:
   revert



##########
flink-formats/flink-json/src/test/java/org/apache/flink/formats/json/JsonBatchFileSystemITCase.java:
##########
@@ -50,11 +54,27 @@ public String[] formatProperties() {
     }
 
     @Test
-    public void testParseError() throws Exception {
-        String path = new URI(resultPath()).getPath();
+    void testParseError(@TempDir java.nio.file.Path temporaryFolder) throws Exception {
+        String sql =

Review Comment:
   where does this come from?



##########
flink-formats/flink-json/src/test/java/org/apache/flink/formats/json/JsonBatchFileSystemITCase.java:
##########
@@ -50,11 +54,27 @@ public String[] formatProperties() {
     }
 
     @Test
-    public void testParseError() throws Exception {
-        String path = new URI(resultPath()).getPath();
+    void testParseError(@TempDir java.nio.file.Path temporaryFolder) throws Exception {

Review Comment:
   no qualified imports



##########
flink-formats/flink-json/src/test/java/org/apache/flink/formats/json/JsonRowDataSerDeSchemaTest.java:
##########
@@ -431,42 +429,41 @@ public void testDeserializationMissingField() throws Exception {
 
         Row expected = new Row(1);
         Row actual = convertToExternal(deserializationSchema.deserialize(serializedJson), dataType);
-        assertEquals(expected, actual);
+        assertThat(actual).isEqualTo(expected);
 
         // fail on missing field
         deserializationSchema =
                 new JsonRowDataDeserializationSchema(
                         schema, InternalTypeInfo.of(schema), true, false, TimestampFormat.ISO_8601);
 
         String errorMessage = "Failed to deserialize JSON '{\"id\":123123123}'.";
-        try {
-            deserializationSchema.deserialize(serializedJson);
-            fail("expecting exception message: " + errorMessage);
-        } catch (Throwable t) {
-            assertEquals(errorMessage, t.getMessage());
-        }
+
+        JsonRowDataDeserializationSchema finalDeserializationSchema = deserializationSchema;
+        assertThatThrownBy(() -> finalDeserializationSchema.deserialize(serializedJson))
+                .hasMessageContaining(errorMessage);

Review Comment:
   ```suggestion
                   .hasMessage(errorMessage);
   ```



##########
flink-formats/flink-json/src/test/java/org/apache/flink/formats/json/JsonRowDataSerDeSchemaTest.java:
##########
@@ -310,7 +308,7 @@ public void testSerDeMultiRows() throws Exception {
             byte[] serializedJson = objectMapper.writeValueAsBytes(root);
             RowData rowData = deserializationSchema.deserialize(serializedJson);
             byte[] actual = serializationSchema.serialize(rowData);
-            assertEquals(new String(serializedJson), new String(actual));
+            assertThat(new String(actual)).isEqualTo(new String(serializedJson));

Review Comment:
   see above



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: issues-unsubscribe@flink.apache.org

For queries about this service, please contact Infrastructure at:
users@infra.apache.org