You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@flink.apache.org by ch...@apache.org on 2022/12/12 16:04:50 UTC

[flink] branch master updated: [FLINK-29846][build] Upgrade ArchUnit to 1.0.0

This is an automated email from the ASF dual-hosted git repository.

chesnay pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/master by this push:
     new ba6c0c7da89 [FLINK-29846][build] Upgrade ArchUnit to 1.0.0
ba6c0c7da89 is described below

commit ba6c0c7da89746511c567bb12ff62b9315ecfd9b
Author: Sergey Nuyanzin <sn...@gmail.com>
AuthorDate: Mon Dec 12 17:04:40 2022 +0100

    [FLINK-29846][build] Upgrade ArchUnit to 1.0.0
---
 .../flink/architecture/common/Conditions.java      |  8 +-
 .../flink/architecture/common/Predicates.java      | 25 ++----
 .../architecture/common/SourcePredicates.java      |  2 +-
 .../5b9eed8a-5fb6-4373-98ac-3be2a71941b8           |  2 +-
 .../b8900323-6aab-4e7e-9b17-f53b3c3dca46           | 92 +++++++++++-----------
 .../e5126cae-f3fe-48aa-b6fb-60ae6cc3fcd5           | 36 ++++-----
 .../architecture/rules/ApiAnnotationRules.java     |  2 +-
 .../flink/architecture/rules/ITCaseRules.java      |  5 ++
 .../83371291-f688-4eaf-a207-24981f1067f3           |  3 -
 ...va => HadoopPathBasedPartFileWriterITCase.java} |  2 +-
 ...t.java => HadoopRenameCommitterHDFSITCase.java} |  4 +-
 ...ava => HadoopRenameCommitterLocalFSITCase.java} |  4 +-
 pom.xml                                            |  2 +-
 13 files changed, 89 insertions(+), 98 deletions(-)

diff --git a/flink-architecture-tests/flink-architecture-tests-base/src/main/java/org/apache/flink/architecture/common/Conditions.java b/flink-architecture-tests/flink-architecture-tests-base/src/main/java/org/apache/flink/architecture/common/Conditions.java
index d3da232d7de..53ac1ef8122 100644
--- a/flink-architecture-tests/flink-architecture-tests-base/src/main/java/org/apache/flink/architecture/common/Conditions.java
+++ b/flink-architecture-tests/flink-architecture-tests-base/src/main/java/org/apache/flink/architecture/common/Conditions.java
@@ -43,7 +43,7 @@ public class Conditions {
         return new ArchCondition<T>(predicate.getDescription()) {
             @Override
             public void check(T item, ConditionEvents events) {
-                if (!predicate.apply(item)) {
+                if (!predicate.test(item)) {
                     final String message =
                             String.format(
                                     "%s does not satisfy: %s",
@@ -89,7 +89,7 @@ public class Conditions {
                         continue;
                     }
 
-                    if (!typePredicate.apply(leafType)) {
+                    if (!typePredicate.test(leafType)) {
                         final String message =
                                 String.format(
                                         "%s: Returned leaf type %s does not satisfy: %s",
@@ -125,7 +125,7 @@ public class Conditions {
                         continue;
                     }
 
-                    if (!typePredicate.apply(leafType)) {
+                    if (!typePredicate.test(leafType)) {
                         final String message =
                                 String.format(
                                         "%s: Argument leaf type %s does not satisfy: %s",
@@ -161,7 +161,7 @@ public class Conditions {
                         continue;
                     }
 
-                    if (!typePredicate.apply(leafType)) {
+                    if (!typePredicate.test(leafType)) {
                         final String message =
                                 String.format(
                                         "%s: Exception leaf type %s does not satisfy: %s",
diff --git a/flink-architecture-tests/flink-architecture-tests-base/src/main/java/org/apache/flink/architecture/common/Predicates.java b/flink-architecture-tests/flink-architecture-tests-base/src/main/java/org/apache/flink/architecture/common/Predicates.java
index d45cb56b5a3..1c5bd6de533 100644
--- a/flink-architecture-tests/flink-architecture-tests-base/src/main/java/org/apache/flink/architecture/common/Predicates.java
+++ b/flink-architecture-tests/flink-architecture-tests-base/src/main/java/org/apache/flink/architecture/common/Predicates.java
@@ -18,9 +18,7 @@
 
 package org.apache.flink.architecture.common;
 
-import com.tngtech.archunit.base.ChainableFunction;
 import com.tngtech.archunit.base.DescribedPredicate;
-import com.tngtech.archunit.base.Function;
 import com.tngtech.archunit.core.domain.JavaClass;
 import com.tngtech.archunit.core.domain.JavaField;
 import com.tngtech.archunit.core.domain.JavaModifier;
@@ -29,6 +27,7 @@ import com.tngtech.archunit.core.domain.properties.CanBeAnnotated;
 import java.lang.annotation.Annotation;
 import java.util.Arrays;
 import java.util.Set;
+import java.util.function.Function;
 import java.util.stream.Collectors;
 
 import static com.tngtech.archunit.lang.conditions.ArchPredicates.is;
@@ -48,7 +47,7 @@ public class Predicates {
             Class<? extends Annotation>... annotations) {
         return Arrays.stream(annotations)
                 .map(CanBeAnnotated.Predicates::annotatedWith)
-                .reduce(DescribedPredicate::or)
+                .reduce((p, pOther) -> p.or(pOther))
                 .orElseThrow(IllegalArgumentException::new)
                 .forSubtype();
     }
@@ -59,16 +58,7 @@ public class Predicates {
      */
     public static DescribedPredicate<JavaClass> containAnyFieldsInClassHierarchyThat(
             DescribedPredicate<? super JavaField> predicate) {
-        return new ContainAnyFieldsThatPredicate<>(
-                "fields",
-                new ChainableFunction<JavaClass, Set<JavaField>>() {
-                    @Override
-                    public Set<JavaField> apply(JavaClass input) {
-                        // need to get all fields with the inheritance hierarchy
-                        return input.getAllFields();
-                    }
-                },
-                predicate);
+        return new ContainAnyFieldsThatPredicate<>("fields", JavaClass::getAllFields, predicate);
     }
 
     /**
@@ -164,10 +154,7 @@ public class Predicates {
                         + Arrays.stream(other)
                                 .map(dp -> "* " + dp + "\n")
                                 .collect(Collectors.joining()),
-                t ->
-                        Arrays.stream(other)
-                                .map(dp -> dp.apply(t))
-                                .reduce(false, Boolean::logicalXor));
+                t -> Arrays.stream(other).map(dp -> dp.test(t)).reduce(false, Boolean::logicalXor));
     }
 
     private Predicates() {}
@@ -191,9 +178,9 @@ public class Predicates {
         }
 
         @Override
-        public boolean apply(JavaClass input) {
+        public boolean test(JavaClass input) {
             for (T member : getFields.apply(input)) {
-                if (predicate.apply(member)) {
+                if (predicate.test(member)) {
                     return true;
                 }
             }
diff --git a/flink-architecture-tests/flink-architecture-tests-base/src/main/java/org/apache/flink/architecture/common/SourcePredicates.java b/flink-architecture-tests/flink-architecture-tests-base/src/main/java/org/apache/flink/architecture/common/SourcePredicates.java
index c7382c16d0a..61e7b0e94e6 100644
--- a/flink-architecture-tests/flink-architecture-tests-base/src/main/java/org/apache/flink/architecture/common/SourcePredicates.java
+++ b/flink-architecture-tests/flink-architecture-tests-base/src/main/java/org/apache/flink/architecture/common/SourcePredicates.java
@@ -34,7 +34,7 @@ public class SourcePredicates {
     public static DescribedPredicate<JavaClass> areJavaClasses() {
         return new DescribedPredicate<JavaClass>("are Java classes") {
             @Override
-            public boolean apply(JavaClass clazz) {
+            public boolean test(JavaClass clazz) {
                 return isJavaClass(clazz);
             }
         };
diff --git a/flink-architecture-tests/flink-architecture-tests-production/archunit-violations/5b9eed8a-5fb6-4373-98ac-3be2a71941b8 b/flink-architecture-tests/flink-architecture-tests-production/archunit-violations/5b9eed8a-5fb6-4373-98ac-3be2a71941b8
index 02c2583ee73..fdcd5bd1d0e 100644
--- a/flink-architecture-tests/flink-architecture-tests-production/archunit-violations/5b9eed8a-5fb6-4373-98ac-3be2a71941b8
+++ b/flink-architecture-tests/flink-architecture-tests-production/archunit-violations/5b9eed8a-5fb6-4373-98ac-3be2a71941b8
@@ -384,4 +384,4 @@ org.apache.flink.streaming.runtime.operators.windowing.TimestampedValue.getStrea
 org.apache.flink.streaming.runtime.streamrecord.LatencyMarker.getOperatorId(): Returned leaf type org.apache.flink.runtime.jobgraph.OperatorID does not satisfy: reside outside of package 'org.apache.flink..' or reside in any package ['..shaded..'] or annotated with @Public or annotated with @PublicEvolving or annotated with @Deprecated
 org.apache.flink.streaming.runtime.tasks.ProcessingTimeServiceAware.setProcessingTimeService(org.apache.flink.streaming.runtime.tasks.ProcessingTimeService): Argument leaf type org.apache.flink.streaming.runtime.tasks.ProcessingTimeService does not satisfy: reside outside of package 'org.apache.flink..' or reside in any package ['..shaded..'] or annotated with @Public or annotated with @PublicEvolving or annotated with @Deprecated
 org.apache.flink.table.operations.QueryOperation.accept(org.apache.flink.table.operations.QueryOperationVisitor): Argument leaf type org.apache.flink.table.operations.QueryOperationVisitor does not satisfy: reside outside of package 'org.apache.flink..' or reside in any package ['..shaded..'] or annotated with @Public or annotated with @PublicEvolving or annotated with @Deprecated
-org.apache.flink.types.parser.FieldParser.getErrorState(): Returned leaf type org.apache.flink.types.parser.FieldParser$ParseErrorState does not satisfy: reside outside of package 'org.apache.flink..' or reside in any package ['..shaded..'] or annotated with @Public or annotated with @PublicEvolving or annotated with @Deprecated
+org.apache.flink.types.parser.FieldParser.getErrorState(): Returned leaf type org.apache.flink.types.parser.FieldParser$ParseErrorState does not satisfy: reside outside of package 'org.apache.flink..' or reside in any package ['..shaded..'] or annotated with @Public or annotated with @PublicEvolving or annotated with @Deprecated
\ No newline at end of file
diff --git a/flink-architecture-tests/flink-architecture-tests-production/archunit-violations/b8900323-6aab-4e7e-9b17-f53b3c3dca46 b/flink-architecture-tests/flink-architecture-tests-production/archunit-violations/b8900323-6aab-4e7e-9b17-f53b3c3dca46
index f26ab80c5b2..c4427924b39 100644
--- a/flink-architecture-tests/flink-architecture-tests-production/archunit-violations/b8900323-6aab-4e7e-9b17-f53b3c3dca46
+++ b/flink-architecture-tests/flink-architecture-tests-production/archunit-violations/b8900323-6aab-4e7e-9b17-f53b3c3dca46
@@ -15,13 +15,12 @@ Field <org.apache.flink.connector.hbase1.sink.HBaseDynamicTableSink.hbaseConf> h
 Field <org.apache.flink.connector.hbase2.sink.HBaseDynamicTableSink.hbaseConf> has type <org.apache.hadoop.conf.Configuration> in (HBaseDynamicTableSink.java:0)
 Method <org.apache.flink.connector.hbase.sink.HBaseSinkFunction.prepareRuntimeConfiguration()> calls method <org.apache.hadoop.conf.Configuration.get(java.lang.String)> in (HBaseSinkFunction.java:169)
 Method <org.apache.flink.connector.hbase.sink.HBaseSinkFunction.prepareRuntimeConfiguration()> has return type <org.apache.hadoop.conf.Configuration> in (HBaseSinkFunction.java:0)
-Method <org.apache.flink.connector.hbase.source.HBaseRowDataLookupFunction.prepareRuntimeConfiguration()> calls method <org.apache.hadoop.conf.Configuration.get(java.lang.String)> in (HBaseRowDataLookupFunction.java:147)
+Method <org.apache.flink.connector.hbase.source.HBaseRowDataLookupFunction.prepareRuntimeConfiguration()> calls method <org.apache.hadoop.conf.Configuration.get(java.lang.String)> in (HBaseRowDataLookupFunction.java:130)
 Method <org.apache.flink.connector.hbase.source.HBaseRowDataLookupFunction.prepareRuntimeConfiguration()> has return type <org.apache.hadoop.conf.Configuration> in (HBaseRowDataLookupFunction.java:0)
-Method <org.apache.flink.connector.hbase.table.HBaseConnectorOptionsUtil.getHBaseConfiguration(org.apache.flink.configuration.ReadableConfig)> calls method <org.apache.hadoop.conf.Configuration.set(java.lang.String, java.lang.String)> in (HBaseConnectorOptionsUtil.java:113)
-Method <org.apache.flink.connector.hbase.table.HBaseConnectorOptionsUtil.getHBaseConfiguration(org.apache.flink.configuration.ReadableConfig)> calls method <org.apache.hadoop.conf.Configuration.set(java.lang.String, java.lang.String)> in (HBaseConnectorOptionsUtil.java:114)
+Method <org.apache.flink.connector.hbase.table.HBaseConnectorOptionsUtil.getHBaseConfiguration(org.apache.flink.configuration.ReadableConfig)> calls method <org.apache.hadoop.conf.Configuration.set(java.lang.String, java.lang.String)> in (HBaseConnectorOptionsUtil.java:101)
+Method <org.apache.flink.connector.hbase.table.HBaseConnectorOptionsUtil.getHBaseConfiguration(org.apache.flink.configuration.ReadableConfig)> calls method <org.apache.hadoop.conf.Configuration.set(java.lang.String, java.lang.String)> in (HBaseConnectorOptionsUtil.java:102)
+Method <org.apache.flink.connector.hbase.table.HBaseConnectorOptionsUtil.getHBaseConfiguration(org.apache.flink.configuration.ReadableConfig)> calls method <org.apache.hadoop.conf.Configuration.set(java.lang.String, java.lang.String)> in (HBaseConnectorOptionsUtil.java:108)
 Method <org.apache.flink.connector.hbase.table.HBaseConnectorOptionsUtil.getHBaseConfiguration(org.apache.flink.configuration.ReadableConfig)> has return type <org.apache.hadoop.conf.Configuration> in (HBaseConnectorOptionsUtil.java:0)
-Method <org.apache.flink.connector.hbase.table.HBaseConnectorOptionsUtil.lambda$getHBaseConfiguration$0(org.apache.hadoop.conf.Configuration, java.lang.Object, java.lang.Object)> calls method <org.apache.hadoop.conf.Configuration.set(java.lang.String, java.lang.String)> in (HBaseConnectorOptionsUtil.java:120)
-Method <org.apache.flink.connector.hbase.table.HBaseConnectorOptionsUtil.lambda$getHBaseConfiguration$0(org.apache.hadoop.conf.Configuration, java.lang.Object, java.lang.Object)> has parameter of type <org.apache.hadoop.conf.Configuration> in (HBaseConnectorOptionsUtil.java:0)
 Method <org.apache.flink.connector.hbase.util.HBaseConfigurationUtil.addHBaseConfIfFound(org.apache.hadoop.conf.Configuration, java.lang.String)> calls constructor <org.apache.hadoop.fs.Path.<init>(java.lang.String)> in (HBaseConfigurationUtil.java:106)
 Method <org.apache.flink.connector.hbase.util.HBaseConfigurationUtil.addHBaseConfIfFound(org.apache.hadoop.conf.Configuration, java.lang.String)> calls constructor <org.apache.hadoop.fs.Path.<init>(java.lang.String)> in (HBaseConfigurationUtil.java:96)
 Method <org.apache.flink.connector.hbase.util.HBaseConfigurationUtil.addHBaseConfIfFound(org.apache.hadoop.conf.Configuration, java.lang.String)> calls method <org.apache.hadoop.conf.Configuration.addResource(org.apache.hadoop.fs.Path)> in (HBaseConfigurationUtil.java:106)
@@ -49,44 +48,47 @@ Method <org.apache.flink.connector.hbase1.sink.HBaseDynamicTableSink.getConfigur
 Method <org.apache.flink.connector.hbase1.source.AbstractTableInputFormat.getHadoopConfiguration()> has return type <org.apache.hadoop.conf.Configuration> in (AbstractTableInputFormat.java:0)
 Method <org.apache.flink.connector.hbase2.sink.HBaseDynamicTableSink.getConfiguration()> has return type <org.apache.hadoop.conf.Configuration> in (HBaseDynamicTableSink.java:0)
 Method <org.apache.flink.connector.hbase2.source.AbstractTableInputFormat.getHadoopConfiguration()> has return type <org.apache.hadoop.conf.Configuration> in (AbstractTableInputFormat.java:0)
-Method <org.apache.flink.connector.hbase2.source.HBaseRowDataAsyncLookupFunction.prepareRuntimeConfiguration()> calls method <org.apache.hadoop.conf.Configuration.get(java.lang.String)> in (HBaseRowDataAsyncLookupFunction.java:230)
+Method <org.apache.flink.connector.hbase2.source.HBaseRowDataAsyncLookupFunction.prepareRuntimeConfiguration()> calls method <org.apache.hadoop.conf.Configuration.get(java.lang.String)> in (HBaseRowDataAsyncLookupFunction.java:188)
 Method <org.apache.flink.connector.hbase2.source.HBaseRowDataAsyncLookupFunction.prepareRuntimeConfiguration()> has return type <org.apache.hadoop.conf.Configuration> in (HBaseRowDataAsyncLookupFunction.java:0)
-Method <org.apache.flink.connector.jdbc.internal.converter.OracleRowConverter.lambda$createInternalConverter$224afae6$10(java.lang.Object)> calls method <oracle.jdbc.internal.OracleBlob.getBytes(long, int)> in (OracleRowConverter.java:128)
-Method <org.apache.flink.connector.jdbc.internal.converter.OracleRowConverter.lambda$createInternalConverter$224afae6$10(java.lang.Object)> calls method <oracle.jdbc.internal.OracleBlob.length()> in (OracleRowConverter.java:128)
-Method <org.apache.flink.connector.jdbc.internal.converter.OracleRowConverter.lambda$createInternalConverter$224afae6$10(java.lang.Object)> calls method <oracle.sql.RAW.getBytes()> in (OracleRowConverter.java:125)
-Method <org.apache.flink.connector.jdbc.internal.converter.OracleRowConverter.lambda$createInternalConverter$224afae6$10(java.lang.Object)> checks instanceof <oracle.jdbc.internal.OracleBlob> in (OracleRowConverter.java:125)
-Method <org.apache.flink.connector.jdbc.internal.converter.OracleRowConverter.lambda$createInternalConverter$224afae6$10(java.lang.Object)> checks instanceof <oracle.sql.RAW> in (OracleRowConverter.java:124)
-Method <org.apache.flink.connector.jdbc.internal.converter.OracleRowConverter.lambda$createInternalConverter$224afae6$11(java.lang.Object)> calls method <oracle.sql.NUMBER.intValue()> in (OracleRowConverter.java:133)
-Method <org.apache.flink.connector.jdbc.internal.converter.OracleRowConverter.lambda$createInternalConverter$224afae6$11(java.lang.Object)> checks instanceof <oracle.sql.NUMBER> in (OracleRowConverter.java:133)
-Method <org.apache.flink.connector.jdbc.internal.converter.OracleRowConverter.lambda$createInternalConverter$224afae6$12(java.lang.Object)> calls method <oracle.sql.DATE.dateValue()> in (OracleRowConverter.java:137)
-Method <org.apache.flink.connector.jdbc.internal.converter.OracleRowConverter.lambda$createInternalConverter$224afae6$12(java.lang.Object)> checks instanceof <oracle.sql.DATE> in (OracleRowConverter.java:136)
-Method <org.apache.flink.connector.jdbc.internal.converter.OracleRowConverter.lambda$createInternalConverter$224afae6$13(java.lang.Object)> calls method <oracle.sql.DATE.timeValue()> in (OracleRowConverter.java:149)
-Method <org.apache.flink.connector.jdbc.internal.converter.OracleRowConverter.lambda$createInternalConverter$224afae6$13(java.lang.Object)> checks instanceof <oracle.sql.DATE> in (OracleRowConverter.java:147)
-Method <org.apache.flink.connector.jdbc.internal.converter.OracleRowConverter.lambda$createInternalConverter$224afae6$14(java.lang.Object)> calls method <oracle.sql.TIMESTAMP.timestampValue()> in (OracleRowConverter.java:155)
-Method <org.apache.flink.connector.jdbc.internal.converter.OracleRowConverter.lambda$createInternalConverter$224afae6$14(java.lang.Object)> checks instanceof <oracle.sql.TIMESTAMP> in (OracleRowConverter.java:154)
-Method <org.apache.flink.connector.jdbc.internal.converter.OracleRowConverter.lambda$createInternalConverter$224afae6$15(java.lang.Object)> calls method <oracle.sql.TIMESTAMPTZ.getTimeZone()> in (OracleRowConverter.java:164)
-Method <org.apache.flink.connector.jdbc.internal.converter.OracleRowConverter.lambda$createInternalConverter$224afae6$15(java.lang.Object)> calls method <oracle.sql.TIMESTAMPTZ.timestampValue()> in (OracleRowConverter.java:163)
-Method <org.apache.flink.connector.jdbc.internal.converter.OracleRowConverter.lambda$createInternalConverter$224afae6$15(java.lang.Object)> checks instanceof <oracle.sql.TIMESTAMPTZ> in (OracleRowConverter.java:159)
-Method <org.apache.flink.connector.jdbc.internal.converter.OracleRowConverter.lambda$createInternalConverter$224afae6$2(java.lang.Object)> calls method <oracle.sql.NUMBER.booleanValue()> in (OracleRowConverter.java:65)
-Method <org.apache.flink.connector.jdbc.internal.converter.OracleRowConverter.lambda$createInternalConverter$224afae6$2(java.lang.Object)> checks instanceof <oracle.sql.NUMBER> in (OracleRowConverter.java:65)
-Method <org.apache.flink.connector.jdbc.internal.converter.OracleRowConverter.lambda$createInternalConverter$224afae6$3(java.lang.Object)> calls method <oracle.sql.BINARY_FLOAT.floatValue()> in (OracleRowConverter.java:71)
-Method <org.apache.flink.connector.jdbc.internal.converter.OracleRowConverter.lambda$createInternalConverter$224afae6$3(java.lang.Object)> calls method <oracle.sql.NUMBER.floatValue()> in (OracleRowConverter.java:69)
-Method <org.apache.flink.connector.jdbc.internal.converter.OracleRowConverter.lambda$createInternalConverter$224afae6$3(java.lang.Object)> checks instanceof <oracle.sql.BINARY_FLOAT> in (OracleRowConverter.java:69)
-Method <org.apache.flink.connector.jdbc.internal.converter.OracleRowConverter.lambda$createInternalConverter$224afae6$3(java.lang.Object)> checks instanceof <oracle.sql.NUMBER> in (OracleRowConverter.java:68)
-Method <org.apache.flink.connector.jdbc.internal.converter.OracleRowConverter.lambda$createInternalConverter$224afae6$4(java.lang.Object)> calls method <oracle.sql.BINARY_DOUBLE.doubleValue()> in (OracleRowConverter.java:80)
-Method <org.apache.flink.connector.jdbc.internal.converter.OracleRowConverter.lambda$createInternalConverter$224afae6$4(java.lang.Object)> calls method <oracle.sql.NUMBER.doubleValue()> in (OracleRowConverter.java:78)
-Method <org.apache.flink.connector.jdbc.internal.converter.OracleRowConverter.lambda$createInternalConverter$224afae6$4(java.lang.Object)> checks instanceof <oracle.sql.BINARY_DOUBLE> in (OracleRowConverter.java:78)
-Method <org.apache.flink.connector.jdbc.internal.converter.OracleRowConverter.lambda$createInternalConverter$224afae6$4(java.lang.Object)> checks instanceof <oracle.sql.NUMBER> in (OracleRowConverter.java:77)
-Method <org.apache.flink.connector.jdbc.internal.converter.OracleRowConverter.lambda$createInternalConverter$224afae6$5(java.lang.Object)> calls method <oracle.sql.NUMBER.byteValue()> in (OracleRowConverter.java:87)
-Method <org.apache.flink.connector.jdbc.internal.converter.OracleRowConverter.lambda$createInternalConverter$224afae6$5(java.lang.Object)> checks instanceof <oracle.sql.NUMBER> in (OracleRowConverter.java:86)
-Method <org.apache.flink.connector.jdbc.internal.converter.OracleRowConverter.lambda$createInternalConverter$224afae6$6(java.lang.Object)> calls method <oracle.sql.NUMBER.shortValue()> in (OracleRowConverter.java:92)
-Method <org.apache.flink.connector.jdbc.internal.converter.OracleRowConverter.lambda$createInternalConverter$224afae6$6(java.lang.Object)> checks instanceof <oracle.sql.NUMBER> in (OracleRowConverter.java:91)
-Method <org.apache.flink.connector.jdbc.internal.converter.OracleRowConverter.lambda$createInternalConverter$224afae6$7(java.lang.Object)> calls method <oracle.sql.NUMBER.intValue()> in (OracleRowConverter.java:97)
-Method <org.apache.flink.connector.jdbc.internal.converter.OracleRowConverter.lambda$createInternalConverter$224afae6$7(java.lang.Object)> checks instanceof <oracle.sql.NUMBER> in (OracleRowConverter.java:96)
-Method <org.apache.flink.connector.jdbc.internal.converter.OracleRowConverter.lambda$createInternalConverter$224afae6$8(java.lang.Object)> calls method <oracle.sql.NUMBER.longValue()> in (OracleRowConverter.java:102)
-Method <org.apache.flink.connector.jdbc.internal.converter.OracleRowConverter.lambda$createInternalConverter$224afae6$8(java.lang.Object)> checks instanceof <oracle.sql.NUMBER> in (OracleRowConverter.java:101)
-Method <org.apache.flink.connector.jdbc.internal.converter.OracleRowConverter.lambda$createInternalConverter$224afae6$9(java.lang.Object)> calls method <oracle.jdbc.internal.OracleClob.stringValue()> in (OracleRowConverter.java:118)
-Method <org.apache.flink.connector.jdbc.internal.converter.OracleRowConverter.lambda$createInternalConverter$224afae6$9(java.lang.Object)> calls method <oracle.sql.CHAR.getString()> in (OracleRowConverter.java:116)
-Method <org.apache.flink.connector.jdbc.internal.converter.OracleRowConverter.lambda$createInternalConverter$224afae6$9(java.lang.Object)> checks instanceof <oracle.jdbc.internal.OracleClob> in (OracleRowConverter.java:116)
-Method <org.apache.flink.connector.jdbc.internal.converter.OracleRowConverter.lambda$createInternalConverter$224afae6$9(java.lang.Object)> checks instanceof <oracle.sql.CHAR> in (OracleRowConverter.java:115)
-Method <org.apache.flink.connector.jdbc.internal.converter.PostgresRowConverter.lambda$createPostgresArrayConverter$4f4cdb95$1(java.lang.Class, org.apache.flink.connector.jdbc.converter.AbstractJdbcRowConverter$JdbcDeserializationConverter, java.lang.Object)> calls method <org.postgresql.jdbc.PgArray.getArray()> in (PostgresRowConverter.java:90)
+Method <org.apache.flink.connector.jdbc.internal.converter.OracleRowConverter.$deserializeLambda$(java.lang.invoke.SerializedLambda)> calls method <oracle.jdbc.internal.OracleBlob.getBytes(long, int)> in (OracleRowConverter.java:128)
+Method <org.apache.flink.connector.jdbc.internal.converter.OracleRowConverter.$deserializeLambda$(java.lang.invoke.SerializedLambda)> calls method <oracle.jdbc.internal.OracleBlob.length()> in (OracleRowConverter.java:128)
+Method <org.apache.flink.connector.jdbc.internal.converter.OracleRowConverter.$deserializeLambda$(java.lang.invoke.SerializedLambda)> calls method <oracle.jdbc.internal.OracleClob.stringValue()> in (OracleRowConverter.java:118)
+Method <org.apache.flink.connector.jdbc.internal.converter.OracleRowConverter.$deserializeLambda$(java.lang.invoke.SerializedLambda)> calls method <oracle.sql.BINARY_DOUBLE.doubleValue()> in (OracleRowConverter.java:80)
+Method <org.apache.flink.connector.jdbc.internal.converter.OracleRowConverter.$deserializeLambda$(java.lang.invoke.SerializedLambda)> calls method <oracle.sql.BINARY_FLOAT.floatValue()> in (OracleRowConverter.java:71)
+Method <org.apache.flink.connector.jdbc.internal.converter.OracleRowConverter.$deserializeLambda$(java.lang.invoke.SerializedLambda)> calls method <oracle.sql.CHAR.getString()> in (OracleRowConverter.java:116)
+Method <org.apache.flink.connector.jdbc.internal.converter.OracleRowConverter.$deserializeLambda$(java.lang.invoke.SerializedLambda)> calls method <oracle.sql.DATE.dateValue()> in (OracleRowConverter.java:137)
+Method <org.apache.flink.connector.jdbc.internal.converter.OracleRowConverter.$deserializeLambda$(java.lang.invoke.SerializedLambda)> calls method <oracle.sql.DATE.timeValue()> in (OracleRowConverter.java:149)
+Method <org.apache.flink.connector.jdbc.internal.converter.OracleRowConverter.$deserializeLambda$(java.lang.invoke.SerializedLambda)> calls method <oracle.sql.NUMBER.booleanValue()> in (OracleRowConverter.java:65)
+Method <org.apache.flink.connector.jdbc.internal.converter.OracleRowConverter.$deserializeLambda$(java.lang.invoke.SerializedLambda)> calls method <oracle.sql.NUMBER.byteValue()> in (OracleRowConverter.java:87)
+Method <org.apache.flink.connector.jdbc.internal.converter.OracleRowConverter.$deserializeLambda$(java.lang.invoke.SerializedLambda)> calls method <oracle.sql.NUMBER.doubleValue()> in (OracleRowConverter.java:78)
+Method <org.apache.flink.connector.jdbc.internal.converter.OracleRowConverter.$deserializeLambda$(java.lang.invoke.SerializedLambda)> calls method <oracle.sql.NUMBER.floatValue()> in (OracleRowConverter.java:69)
+Method <org.apache.flink.connector.jdbc.internal.converter.OracleRowConverter.$deserializeLambda$(java.lang.invoke.SerializedLambda)> calls method <oracle.sql.NUMBER.intValue()> in (OracleRowConverter.java:133)
+Method <org.apache.flink.connector.jdbc.internal.converter.OracleRowConverter.$deserializeLambda$(java.lang.invoke.SerializedLambda)> calls method <oracle.sql.NUMBER.intValue()> in (OracleRowConverter.java:97)
+Method <org.apache.flink.connector.jdbc.internal.converter.OracleRowConverter.$deserializeLambda$(java.lang.invoke.SerializedLambda)> calls method <oracle.sql.NUMBER.longValue()> in (OracleRowConverter.java:102)
+Method <org.apache.flink.connector.jdbc.internal.converter.OracleRowConverter.$deserializeLambda$(java.lang.invoke.SerializedLambda)> calls method <oracle.sql.NUMBER.shortValue()> in (OracleRowConverter.java:92)
+Method <org.apache.flink.connector.jdbc.internal.converter.OracleRowConverter.$deserializeLambda$(java.lang.invoke.SerializedLambda)> calls method <oracle.sql.RAW.getBytes()> in (OracleRowConverter.java:125)
+Method <org.apache.flink.connector.jdbc.internal.converter.OracleRowConverter.$deserializeLambda$(java.lang.invoke.SerializedLambda)> calls method <oracle.sql.TIMESTAMP.timestampValue()> in (OracleRowConverter.java:155)
+Method <org.apache.flink.connector.jdbc.internal.converter.OracleRowConverter.$deserializeLambda$(java.lang.invoke.SerializedLambda)> calls method <oracle.sql.TIMESTAMPTZ.getTimeZone()> in (OracleRowConverter.java:164)
+Method <org.apache.flink.connector.jdbc.internal.converter.OracleRowConverter.$deserializeLambda$(java.lang.invoke.SerializedLambda)> calls method <oracle.sql.TIMESTAMPTZ.timestampValue()> in (OracleRowConverter.java:163)
+Method <org.apache.flink.connector.jdbc.internal.converter.OracleRowConverter.createInternalConverter(org.apache.flink.table.types.logical.LogicalType)> calls method <oracle.jdbc.internal.OracleBlob.getBytes(long, int)> in (OracleRowConverter.java:128)
+Method <org.apache.flink.connector.jdbc.internal.converter.OracleRowConverter.createInternalConverter(org.apache.flink.table.types.logical.LogicalType)> calls method <oracle.jdbc.internal.OracleBlob.length()> in (OracleRowConverter.java:128)
+Method <org.apache.flink.connector.jdbc.internal.converter.OracleRowConverter.createInternalConverter(org.apache.flink.table.types.logical.LogicalType)> calls method <oracle.jdbc.internal.OracleClob.stringValue()> in (OracleRowConverter.java:118)
+Method <org.apache.flink.connector.jdbc.internal.converter.OracleRowConverter.createInternalConverter(org.apache.flink.table.types.logical.LogicalType)> calls method <oracle.sql.BINARY_DOUBLE.doubleValue()> in (OracleRowConverter.java:80)
+Method <org.apache.flink.connector.jdbc.internal.converter.OracleRowConverter.createInternalConverter(org.apache.flink.table.types.logical.LogicalType)> calls method <oracle.sql.BINARY_FLOAT.floatValue()> in (OracleRowConverter.java:71)
+Method <org.apache.flink.connector.jdbc.internal.converter.OracleRowConverter.createInternalConverter(org.apache.flink.table.types.logical.LogicalType)> calls method <oracle.sql.CHAR.getString()> in (OracleRowConverter.java:116)
+Method <org.apache.flink.connector.jdbc.internal.converter.OracleRowConverter.createInternalConverter(org.apache.flink.table.types.logical.LogicalType)> calls method <oracle.sql.DATE.dateValue()> in (OracleRowConverter.java:137)
+Method <org.apache.flink.connector.jdbc.internal.converter.OracleRowConverter.createInternalConverter(org.apache.flink.table.types.logical.LogicalType)> calls method <oracle.sql.DATE.timeValue()> in (OracleRowConverter.java:149)
+Method <org.apache.flink.connector.jdbc.internal.converter.OracleRowConverter.createInternalConverter(org.apache.flink.table.types.logical.LogicalType)> calls method <oracle.sql.NUMBER.booleanValue()> in (OracleRowConverter.java:65)
+Method <org.apache.flink.connector.jdbc.internal.converter.OracleRowConverter.createInternalConverter(org.apache.flink.table.types.logical.LogicalType)> calls method <oracle.sql.NUMBER.byteValue()> in (OracleRowConverter.java:87)
+Method <org.apache.flink.connector.jdbc.internal.converter.OracleRowConverter.createInternalConverter(org.apache.flink.table.types.logical.LogicalType)> calls method <oracle.sql.NUMBER.doubleValue()> in (OracleRowConverter.java:78)
+Method <org.apache.flink.connector.jdbc.internal.converter.OracleRowConverter.createInternalConverter(org.apache.flink.table.types.logical.LogicalType)> calls method <oracle.sql.NUMBER.floatValue()> in (OracleRowConverter.java:69)
+Method <org.apache.flink.connector.jdbc.internal.converter.OracleRowConverter.createInternalConverter(org.apache.flink.table.types.logical.LogicalType)> calls method <oracle.sql.NUMBER.intValue()> in (OracleRowConverter.java:133)
+Method <org.apache.flink.connector.jdbc.internal.converter.OracleRowConverter.createInternalConverter(org.apache.flink.table.types.logical.LogicalType)> calls method <oracle.sql.NUMBER.intValue()> in (OracleRowConverter.java:97)
+Method <org.apache.flink.connector.jdbc.internal.converter.OracleRowConverter.createInternalConverter(org.apache.flink.table.types.logical.LogicalType)> calls method <oracle.sql.NUMBER.longValue()> in (OracleRowConverter.java:102)
+Method <org.apache.flink.connector.jdbc.internal.converter.OracleRowConverter.createInternalConverter(org.apache.flink.table.types.logical.LogicalType)> calls method <oracle.sql.NUMBER.shortValue()> in (OracleRowConverter.java:92)
+Method <org.apache.flink.connector.jdbc.internal.converter.OracleRowConverter.createInternalConverter(org.apache.flink.table.types.logical.LogicalType)> calls method <oracle.sql.RAW.getBytes()> in (OracleRowConverter.java:125)
+Method <org.apache.flink.connector.jdbc.internal.converter.OracleRowConverter.createInternalConverter(org.apache.flink.table.types.logical.LogicalType)> calls method <oracle.sql.TIMESTAMP.timestampValue()> in (OracleRowConverter.java:155)
+Method <org.apache.flink.connector.jdbc.internal.converter.OracleRowConverter.createInternalConverter(org.apache.flink.table.types.logical.LogicalType)> calls method <oracle.sql.TIMESTAMPTZ.getTimeZone()> in (OracleRowConverter.java:164)
+Method <org.apache.flink.connector.jdbc.internal.converter.OracleRowConverter.createInternalConverter(org.apache.flink.table.types.logical.LogicalType)> calls method <oracle.sql.TIMESTAMPTZ.timestampValue()> in (OracleRowConverter.java:163)
+Method <org.apache.flink.connector.jdbc.internal.converter.PostgresRowConverter.$deserializeLambda$(java.lang.invoke.SerializedLambda)> calls method <org.postgresql.jdbc.PgArray.getArray()> in (PostgresRowConverter.java:87)
+Method <org.apache.flink.connector.jdbc.internal.converter.PostgresRowConverter.createPostgresArrayConverter(org.apache.flink.table.types.logical.ArrayType)> calls method <org.postgresql.jdbc.PgArray.getArray()> in (PostgresRowConverter.java:87)
\ No newline at end of file
diff --git a/flink-architecture-tests/flink-architecture-tests-production/archunit-violations/e5126cae-f3fe-48aa-b6fb-60ae6cc3fcd5 b/flink-architecture-tests/flink-architecture-tests-production/archunit-violations/e5126cae-f3fe-48aa-b6fb-60ae6cc3fcd5
index 52bf0eb6772..7d502f7a21d 100644
--- a/flink-architecture-tests/flink-architecture-tests-production/archunit-violations/e5126cae-f3fe-48aa-b6fb-60ae6cc3fcd5
+++ b/flink-architecture-tests/flink-architecture-tests-production/archunit-violations/e5126cae-f3fe-48aa-b6fb-60ae6cc3fcd5
@@ -1,5 +1,5 @@
 Constructor <org.apache.flink.runtime.state.heap.CopyOnWriteStateMapSnapshot.<init>(org.apache.flink.runtime.state.heap.CopyOnWriteStateMap)> calls method <org.apache.flink.runtime.state.heap.CopyOnWriteStateMap.snapshotMapArrays()> in (CopyOnWriteStateMapSnapshot.java:86)
-Constructor <org.apache.flink.streaming.runtime.io.StreamTaskExternallyInducedSourceInput.<init>(org.apache.flink.streaming.api.operators.SourceOperator, java.util.function.Consumer, int, int)> calls method <org.apache.flink.streaming.api.operators.SourceOperator.getSourceReader()> in (StreamTaskExternallyInducedSourceInput.java:39)
+Constructor <org.apache.flink.streaming.runtime.io.StreamTaskExternallyInducedSourceInput.<init>(org.apache.flink.streaming.api.operators.SourceOperator, java.util.function.Consumer, int, int)> calls method <org.apache.flink.streaming.api.operators.SourceOperator.getSourceReader()> in (StreamTaskExternallyInducedSourceInput.java:41)
 Method <org.apache.flink.api.java.typeutils.runtime.TupleSerializerSnapshot.getNestedSerializers(org.apache.flink.api.java.typeutils.runtime.TupleSerializer)> calls method <org.apache.flink.api.java.typeutils.runtime.TupleSerializer.getFieldSerializers()> in (TupleSerializerSnapshot.java:70)
 Method <org.apache.flink.cep.nfa.sharedbuffer.LockableTypeSerializerSnapshot.getNestedSerializers(org.apache.flink.cep.nfa.sharedbuffer.Lockable$LockableTypeSerializer)> calls method <org.apache.flink.cep.nfa.sharedbuffer.Lockable$LockableTypeSerializer.getElementSerializer()> in (LockableTypeSerializerSnapshot.java:60)
 Method <org.apache.flink.orc.nohive.shim.OrcNoHiveShim.createRecordReader(org.apache.hadoop.conf.Configuration, org.apache.orc.TypeDescription, [I, java.util.List, org.apache.flink.core.fs.Path, long, long)> calls method <org.apache.flink.orc.shim.OrcShimV200.getOffsetAndLengthForSplit(long, long, java.util.List)> in (OrcNoHiveShim.java:62)
@@ -8,21 +8,21 @@ Method <org.apache.flink.runtime.blob.BlobInputStream.read([B, int, int)> calls
 Method <org.apache.flink.runtime.blob.BlobOutputStream.receiveAndCheckPutResponse(java.io.InputStream, java.security.MessageDigest, org.apache.flink.runtime.blob.BlobKey$BlobType)> calls method <org.apache.flink.runtime.blob.BlobKey.getHash()> in (BlobOutputStream.java:155)
 Method <org.apache.flink.runtime.blob.BlobUtils.checkAndDeleteCorruptedBlobs(java.nio.file.Path, org.slf4j.Logger)> calls method <org.apache.flink.runtime.blob.BlobKey.getHash()> in (BlobUtils.java:514)
 Method <org.apache.flink.runtime.blob.FileSystemBlobStore.get(java.lang.String, java.io.File, org.apache.flink.runtime.blob.BlobKey)> calls method <org.apache.flink.runtime.blob.BlobKey.getHash()> in (FileSystemBlobStore.java:124)
-Method <org.apache.flink.runtime.executiongraph.Execution.finishPartitionsAndUpdateConsumers()> calls method <org.apache.flink.runtime.executiongraph.ExecutionVertex.finishAllBlockingPartitions()> in (Execution.java:955)
-Method <org.apache.flink.runtime.highavailability.nonha.embedded.EmbeddedHaServicesWithLeadershipControl.grantDispatcherLeadership()> calls method <org.apache.flink.runtime.highavailability.nonha.embedded.EmbeddedLeaderService.grantLeadership()> in (EmbeddedHaServicesWithLeadershipControl.java:78)
-Method <org.apache.flink.runtime.highavailability.nonha.embedded.EmbeddedHaServicesWithLeadershipControl.grantJobMasterLeadership(org.apache.flink.api.common.JobID)> calls method <org.apache.flink.runtime.highavailability.nonha.embedded.EmbeddedLeaderService.grantLeadership()> in (EmbeddedHaServicesWithLeadershipControl.java:90)
-Method <org.apache.flink.runtime.highavailability.nonha.embedded.EmbeddedHaServicesWithLeadershipControl.grantResourceManagerLeadership()> calls method <org.apache.flink.runtime.highavailability.nonha.embedded.EmbeddedLeaderService.grantLeadership()> in (EmbeddedHaServicesWithLeadershipControl.java:104)
-Method <org.apache.flink.runtime.highavailability.nonha.embedded.EmbeddedHaServicesWithLeadershipControl.revokeDispatcherLeadership()> calls method <org.apache.flink.runtime.highavailability.nonha.embedded.EmbeddedLeaderService.revokeLeadership()> in (EmbeddedHaServicesWithLeadershipControl.java:72)
-Method <org.apache.flink.runtime.highavailability.nonha.embedded.EmbeddedHaServicesWithLeadershipControl.revokeJobMasterLeadership(org.apache.flink.api.common.JobID)> calls method <org.apache.flink.runtime.highavailability.nonha.embedded.EmbeddedLeaderService.revokeLeadership()> in (EmbeddedHaServicesWithLeadershipControl.java:84)
-Method <org.apache.flink.runtime.highavailability.nonha.embedded.EmbeddedHaServicesWithLeadershipControl.revokeResourceManagerLeadership()> calls method <org.apache.flink.runtime.highavailability.nonha.embedded.EmbeddedLeaderService.revokeLeadership()> in (EmbeddedHaServicesWithLeadershipControl.java:97)
-Method <org.apache.flink.runtime.io.network.partition.ResourceManagerPartitionTrackerImpl.lambda$listDataSets$10(java.util.Map$Entry)> calls method <org.apache.flink.runtime.io.network.partition.DataSetMetaInfo.withNumRegisteredPartitions(int, int)> in (ResourceManagerPartitionTrackerImpl.java:269)
-Method <org.apache.flink.runtime.operators.coordination.RecreateOnResetOperatorCoordinator$DeferrableCoordinator.closeAsync(long)> calls method <org.apache.flink.runtime.operators.coordination.RecreateOnResetOperatorCoordinator$QuiesceableContext.quiesce()> in (RecreateOnResetOperatorCoordinator.java:332)
-Method <org.apache.flink.runtime.taskexecutor.TaskManagerConfiguration.fromConfiguration(org.apache.flink.configuration.Configuration, org.apache.flink.runtime.taskexecutor.TaskExecutorResourceSpec, java.lang.String, java.io.File)> calls method <org.apache.flink.runtime.taskexecutor.TaskExecutorResourceUtils.generateDefaultSlotResourceProfile(org.apache.flink.runtime.taskexecutor.TaskExecutorResourceSpec, int)> in (TaskManagerConfiguration.java:246)
-Method <org.apache.flink.runtime.taskexecutor.TaskManagerConfiguration.fromConfiguration(org.apache.flink.configuration.Configuration, org.apache.flink.runtime.taskexecutor.TaskExecutorResourceSpec, java.lang.String, java.io.File)> calls method <org.apache.flink.runtime.taskexecutor.TaskExecutorResourceUtils.generateTotalAvailableResourceProfile(org.apache.flink.runtime.taskexecutor.TaskExecutorResourceSpec)> in (TaskManagerConfiguration.java:248)
-Method <org.apache.flink.runtime.taskexecutor.TaskManagerServices.createTaskSlotTable(int, org.apache.flink.runtime.taskexecutor.TaskExecutorResourceSpec, long, int, java.util.concurrent.Executor)> calls method <org.apache.flink.runtime.taskexecutor.TaskExecutorResourceUtils.generateDefaultSlotResourceProfile(org.apache.flink.runtime.taskexecutor.TaskExecutorResourceSpec, int)> in (TaskManagerServices.java:395)
-Method <org.apache.flink.runtime.taskexecutor.TaskManagerServices.createTaskSlotTable(int, org.apache.flink.runtime.taskexecutor.TaskExecutorResourceSpec, long, int, java.util.concurrent.Executor)> calls method <org.apache.flink.runtime.taskexecutor.TaskExecutorResourceUtils.generateTotalAvailableResourceProfile(org.apache.flink.runtime.taskexecutor.TaskExecutorResourceSpec)> in (TaskManagerServices.java:393)
-Method <org.apache.flink.streaming.api.operators.SourceOperator$1$1.asClassLoader()> calls method <org.apache.flink.streaming.api.operators.SourceOperator.getRuntimeContext()> in (SourceOperator.java:269)
-Method <org.apache.flink.streaming.api.operators.SourceOperator$1$1.registerReleaseHookIfAbsent(java.lang.String, java.lang.Runnable)> calls method <org.apache.flink.streaming.api.operators.SourceOperator.getRuntimeContext()> in (SourceOperator.java:275)
+Method <org.apache.flink.runtime.executiongraph.Execution.finishPartitionsAndUpdateConsumers()> calls method <org.apache.flink.runtime.executiongraph.ExecutionVertex.finishAllBlockingPartitions()> in (Execution.java:978)
+Method <org.apache.flink.runtime.highavailability.nonha.embedded.EmbeddedHaServicesWithLeadershipControl.grantDispatcherLeadership()> calls method <org.apache.flink.runtime.highavailability.nonha.embedded.EmbeddedLeaderService.grantLeadership()> in (EmbeddedHaServicesWithLeadershipControl.java:83)
+Method <org.apache.flink.runtime.highavailability.nonha.embedded.EmbeddedHaServicesWithLeadershipControl.grantJobMasterLeadership(org.apache.flink.api.common.JobID)> calls method <org.apache.flink.runtime.highavailability.nonha.embedded.EmbeddedLeaderService.grantLeadership()> in (EmbeddedHaServicesWithLeadershipControl.java:95)
+Method <org.apache.flink.runtime.highavailability.nonha.embedded.EmbeddedHaServicesWithLeadershipControl.grantResourceManagerLeadership()> calls method <org.apache.flink.runtime.highavailability.nonha.embedded.EmbeddedLeaderService.grantLeadership()> in (EmbeddedHaServicesWithLeadershipControl.java:109)
+Method <org.apache.flink.runtime.highavailability.nonha.embedded.EmbeddedHaServicesWithLeadershipControl.revokeDispatcherLeadership()> calls method <org.apache.flink.runtime.highavailability.nonha.embedded.EmbeddedLeaderService.revokeLeadership()> in (EmbeddedHaServicesWithLeadershipControl.java:77)
+Method <org.apache.flink.runtime.highavailability.nonha.embedded.EmbeddedHaServicesWithLeadershipControl.revokeJobMasterLeadership(org.apache.flink.api.common.JobID)> calls method <org.apache.flink.runtime.highavailability.nonha.embedded.EmbeddedLeaderService.revokeLeadership()> in (EmbeddedHaServicesWithLeadershipControl.java:89)
+Method <org.apache.flink.runtime.highavailability.nonha.embedded.EmbeddedHaServicesWithLeadershipControl.revokeResourceManagerLeadership()> calls method <org.apache.flink.runtime.highavailability.nonha.embedded.EmbeddedLeaderService.revokeLeadership()> in (EmbeddedHaServicesWithLeadershipControl.java:102)
+Method <org.apache.flink.runtime.io.network.partition.ResourceManagerPartitionTrackerImpl.listDataSets()> calls method <org.apache.flink.runtime.io.network.partition.DataSetMetaInfo.withNumRegisteredPartitions(int, int)> in (ResourceManagerPartitionTrackerImpl.java:286)
+Method <org.apache.flink.runtime.operators.coordination.RecreateOnResetOperatorCoordinator$DeferrableCoordinator.closeAsync(long)> calls method <org.apache.flink.runtime.operators.coordination.RecreateOnResetOperatorCoordinator$QuiesceableContext.quiesce()> in (RecreateOnResetOperatorCoordinator.java:343)
+Method <org.apache.flink.runtime.taskexecutor.TaskManagerConfiguration.fromConfiguration(org.apache.flink.configuration.Configuration, org.apache.flink.runtime.taskexecutor.TaskExecutorResourceSpec, java.lang.String, java.io.File)> calls method <org.apache.flink.runtime.taskexecutor.TaskExecutorResourceUtils.generateDefaultSlotResourceProfile(org.apache.flink.runtime.taskexecutor.TaskExecutorResourceSpec, int)> in (TaskManagerConfiguration.java:244)
+Method <org.apache.flink.runtime.taskexecutor.TaskManagerConfiguration.fromConfiguration(org.apache.flink.configuration.Configuration, org.apache.flink.runtime.taskexecutor.TaskExecutorResourceSpec, java.lang.String, java.io.File)> calls method <org.apache.flink.runtime.taskexecutor.TaskExecutorResourceUtils.generateTotalAvailableResourceProfile(org.apache.flink.runtime.taskexecutor.TaskExecutorResourceSpec)> in (TaskManagerConfiguration.java:246)
+Method <org.apache.flink.runtime.taskexecutor.TaskManagerServices.createTaskSlotTable(int, org.apache.flink.runtime.taskexecutor.TaskExecutorResourceSpec, long, int, java.util.concurrent.Executor)> calls method <org.apache.flink.runtime.taskexecutor.TaskExecutorResourceUtils.generateDefaultSlotResourceProfile(org.apache.flink.runtime.taskexecutor.TaskExecutorResourceSpec, int)> in (TaskManagerServices.java:405)
+Method <org.apache.flink.runtime.taskexecutor.TaskManagerServices.createTaskSlotTable(int, org.apache.flink.runtime.taskexecutor.TaskExecutorResourceSpec, long, int, java.util.concurrent.Executor)> calls method <org.apache.flink.runtime.taskexecutor.TaskExecutorResourceUtils.generateTotalAvailableResourceProfile(org.apache.flink.runtime.taskexecutor.TaskExecutorResourceSpec)> in (TaskManagerServices.java:403)
+Method <org.apache.flink.streaming.api.operators.SourceOperator$1$1.asClassLoader()> calls method <org.apache.flink.streaming.api.operators.SourceOperator.getRuntimeContext()> in (SourceOperator.java:288)
+Method <org.apache.flink.streaming.api.operators.SourceOperator$1$1.registerReleaseHookIfAbsent(java.lang.String, java.lang.Runnable)> calls method <org.apache.flink.streaming.api.operators.SourceOperator.getRuntimeContext()> in (SourceOperator.java:294)
 Method <org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer.getTransactionCoordinatorId()> calls method <org.apache.flink.streaming.connectors.kafka.internals.FlinkKafkaInternalProducer.getTransactionCoordinatorId()> in (FlinkKafkaProducer.java:1327)
-Method <org.apache.flink.streaming.runtime.tasks.SourceOperatorStreamTask.init()> calls method <org.apache.flink.streaming.api.operators.SourceOperator.getSourceReader()> in (SourceOperatorStreamTask.java:75)
-Method <org.apache.flink.streaming.runtime.tasks.mailbox.MailboxExecutorImpl.isIdle()> calls method <org.apache.flink.streaming.runtime.tasks.mailbox.MailboxProcessor.isDefaultActionAvailable()> in (MailboxExecutorImpl.java:63)
+Method <org.apache.flink.streaming.runtime.tasks.SourceOperatorStreamTask.init()> calls method <org.apache.flink.streaming.api.operators.SourceOperator.getSourceReader()> in (SourceOperatorStreamTask.java:96)
+Method <org.apache.flink.streaming.runtime.tasks.mailbox.MailboxExecutorImpl.isIdle()> calls method <org.apache.flink.streaming.runtime.tasks.mailbox.MailboxProcessor.isDefaultActionAvailable()> in (MailboxExecutorImpl.java:63)
\ No newline at end of file
diff --git a/flink-architecture-tests/flink-architecture-tests-production/src/main/java/org/apache/flink/architecture/rules/ApiAnnotationRules.java b/flink-architecture-tests/flink-architecture-tests-production/src/main/java/org/apache/flink/architecture/rules/ApiAnnotationRules.java
index deade017a7b..230f146665e 100644
--- a/flink-architecture-tests/flink-architecture-tests-production/src/main/java/org/apache/flink/architecture/rules/ApiAnnotationRules.java
+++ b/flink-architecture-tests/flink-architecture-tests-production/src/main/java/org/apache/flink/architecture/rules/ApiAnnotationRules.java
@@ -138,7 +138,7 @@ public class ApiAnnotationRules {
                                             "the target is annotated @"
                                                     + VisibleForTesting.class.getSimpleName()) {
                                         @Override
-                                        public boolean apply(JavaMethodCall call) {
+                                        public boolean test(JavaMethodCall call) {
                                             final JavaClass targetOwner = call.getTargetOwner();
                                             final JavaClass originOwner = call.getOriginOwner();
 
diff --git a/flink-architecture-tests/flink-architecture-tests-test/src/main/java/org/apache/flink/architecture/rules/ITCaseRules.java b/flink-architecture-tests/flink-architecture-tests-test/src/main/java/org/apache/flink/architecture/rules/ITCaseRules.java
index e679f5cce0a..30809e779ef 100644
--- a/flink-architecture-tests/flink-architecture-tests-test/src/main/java/org/apache/flink/architecture/rules/ITCaseRules.java
+++ b/flink-architecture-tests/flink-architecture-tests-test/src/main/java/org/apache/flink/architecture/rules/ITCaseRules.java
@@ -57,6 +57,9 @@ public class ITCaseRules {
                                     .doNotHaveModifier(ABSTRACT)
                                     .should()
                                     .haveSimpleNameEndingWith("ITCase"))
+                    // FALSE by default since 0.23.0 however not every module has inheritors of
+                    // AbstractTestBase
+                    .allowEmptyShould(true)
                     .as(
                             "Tests inheriting from AbstractTestBase should have name ending with ITCase");
 
@@ -128,6 +131,8 @@ public class ITCaseRules {
                                                                     miniClusterWithClientResourceClassRule())
                                                             .or(
                                                                     miniClusterWithClientResourceRule()))))
+                    // FALSE by default since 0.23.0 however not every module has *ITCase tests
+                    .allowEmptyShould(true)
                     .as("ITCASE tests should use a MiniCluster resource or extension");
 
     private static DescribedPredicate<JavaClass> miniClusterWithClientResourceClassRule() {
diff --git a/flink-formats/flink-hadoop-bulk/archunit-violations/83371291-f688-4eaf-a207-24981f1067f3 b/flink-formats/flink-hadoop-bulk/archunit-violations/83371291-f688-4eaf-a207-24981f1067f3
index 478fde0985b..e69de29bb2d 100644
--- a/flink-formats/flink-hadoop-bulk/archunit-violations/83371291-f688-4eaf-a207-24981f1067f3
+++ b/flink-formats/flink-hadoop-bulk/archunit-violations/83371291-f688-4eaf-a207-24981f1067f3
@@ -1,3 +0,0 @@
-simple name of org.apache.flink.formats.hadoop.bulk.HadoopPathBasedPartFileWriterTest does not end with 'ITCase' in (HadoopPathBasedPartFileWriterTest.java:0)
-simple name of org.apache.flink.formats.hadoop.bulk.committer.HadoopRenameCommitterHDFSTest does not end with 'ITCase' in (HadoopRenameCommitterHDFSTest.java:0)
-simple name of org.apache.flink.formats.hadoop.bulk.committer.HadoopRenameCommitterLocalFSTest does not end with 'ITCase' in (HadoopRenameCommitterLocalFSTest.java:0)
\ No newline at end of file
diff --git a/flink-formats/flink-hadoop-bulk/src/test/java/org/apache/flink/formats/hadoop/bulk/HadoopPathBasedPartFileWriterTest.java b/flink-formats/flink-hadoop-bulk/src/test/java/org/apache/flink/formats/hadoop/bulk/HadoopPathBasedPartFileWriterITCase.java
similarity index 98%
rename from flink-formats/flink-hadoop-bulk/src/test/java/org/apache/flink/formats/hadoop/bulk/HadoopPathBasedPartFileWriterTest.java
rename to flink-formats/flink-hadoop-bulk/src/test/java/org/apache/flink/formats/hadoop/bulk/HadoopPathBasedPartFileWriterITCase.java
index 80ee85cd40c..ee476fa1c7f 100644
--- a/flink-formats/flink-hadoop-bulk/src/test/java/org/apache/flink/formats/hadoop/bulk/HadoopPathBasedPartFileWriterTest.java
+++ b/flink-formats/flink-hadoop-bulk/src/test/java/org/apache/flink/formats/hadoop/bulk/HadoopPathBasedPartFileWriterITCase.java
@@ -49,7 +49,7 @@ import static org.apache.flink.formats.hadoop.bulk.HadoopPathBasedPartFileWriter
 import static org.assertj.core.api.Assertions.assertThat;
 
 /** Base class for testing writing data to the hadoop file system with different configurations. */
-public class HadoopPathBasedPartFileWriterTest extends AbstractTestBase {
+public class HadoopPathBasedPartFileWriterITCase extends AbstractTestBase {
     @Rule public final Timeout timeoutPerTest = Timeout.seconds(2000);
 
     @Test
diff --git a/flink-formats/flink-hadoop-bulk/src/test/java/org/apache/flink/formats/hadoop/bulk/committer/HadoopRenameCommitterHDFSTest.java b/flink-formats/flink-hadoop-bulk/src/test/java/org/apache/flink/formats/hadoop/bulk/committer/HadoopRenameCommitterHDFSITCase.java
similarity index 94%
rename from flink-formats/flink-hadoop-bulk/src/test/java/org/apache/flink/formats/hadoop/bulk/committer/HadoopRenameCommitterHDFSTest.java
rename to flink-formats/flink-hadoop-bulk/src/test/java/org/apache/flink/formats/hadoop/bulk/committer/HadoopRenameCommitterHDFSITCase.java
index de6991e0c62..2b3eddc82e9 100644
--- a/flink-formats/flink-hadoop-bulk/src/test/java/org/apache/flink/formats/hadoop/bulk/committer/HadoopRenameCommitterHDFSTest.java
+++ b/flink-formats/flink-hadoop-bulk/src/test/java/org/apache/flink/formats/hadoop/bulk/committer/HadoopRenameCommitterHDFSITCase.java
@@ -34,7 +34,7 @@ import org.junit.rules.TemporaryFolder;
 import java.io.IOException;
 
 /** Tests the behaviors of {@link HadoopRenameFileCommitter} with HDFS file system. */
-public class HadoopRenameCommitterHDFSTest extends AbstractFileCommitterTest {
+public class HadoopRenameCommitterHDFSITCase extends AbstractFileCommitterTest {
 
     @ClassRule public static final TemporaryFolder CLASS_TEMPORARY_FOLDER = new TemporaryFolder();
 
@@ -56,7 +56,7 @@ public class HadoopRenameCommitterHDFSTest extends AbstractFileCommitterTest {
         hdfsCluster = null;
     }
 
-    public HadoopRenameCommitterHDFSTest(boolean override) throws IOException {
+    public HadoopRenameCommitterHDFSITCase(boolean override) throws IOException {
         super(override);
     }
 
diff --git a/flink-formats/flink-hadoop-bulk/src/test/java/org/apache/flink/formats/hadoop/bulk/committer/HadoopRenameCommitterLocalFSTest.java b/flink-formats/flink-hadoop-bulk/src/test/java/org/apache/flink/formats/hadoop/bulk/committer/HadoopRenameCommitterLocalFSITCase.java
similarity index 92%
rename from flink-formats/flink-hadoop-bulk/src/test/java/org/apache/flink/formats/hadoop/bulk/committer/HadoopRenameCommitterLocalFSTest.java
rename to flink-formats/flink-hadoop-bulk/src/test/java/org/apache/flink/formats/hadoop/bulk/committer/HadoopRenameCommitterLocalFSITCase.java
index da93a5f1ff8..8e579864fb0 100644
--- a/flink-formats/flink-hadoop-bulk/src/test/java/org/apache/flink/formats/hadoop/bulk/committer/HadoopRenameCommitterLocalFSTest.java
+++ b/flink-formats/flink-hadoop-bulk/src/test/java/org/apache/flink/formats/hadoop/bulk/committer/HadoopRenameCommitterLocalFSITCase.java
@@ -27,9 +27,9 @@ import org.apache.hadoop.fs.Path;
 import java.io.IOException;
 
 /** Tests the behaviors of {@link HadoopRenameFileCommitter} with local file system. */
-public class HadoopRenameCommitterLocalFSTest extends AbstractFileCommitterTest {
+public class HadoopRenameCommitterLocalFSITCase extends AbstractFileCommitterTest {
 
-    public HadoopRenameCommitterLocalFSTest(boolean override) throws IOException {
+    public HadoopRenameCommitterLocalFSITCase(boolean override) throws IOException {
         super(override);
     }
 
diff --git a/pom.xml b/pom.xml
index f1b7ebcc9d8..785d3a7b5aa 100644
--- a/pom.xml
+++ b/pom.xml
@@ -148,7 +148,7 @@ under the License.
 		<jaxb.api.version>2.3.1</jaxb.api.version>
 		<junit4.version>4.13.2</junit4.version>
 		<junit5.version>5.9.1</junit5.version>
-		<archunit.version>0.22.0</archunit.version>
+		<archunit.version>1.0.0</archunit.version>
 		<mockito.version>3.4.6</mockito.version>
 		<powermock.version>2.0.9</powermock.version>
 		<hamcrest.version>1.3</hamcrest.version>