You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@flink.apache.org by tw...@apache.org on 2022/02/02 17:01:29 UTC

[flink] 02/02: [hotfix][table-planner] Remove org.reflections usage and dependency

This is an automated email from the ASF dual-hosted git repository.

twalthr pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git

commit d9d72ef142a2343f37b8b10ca6e04dc7f6ca086e
Author: Marios Trivyzas <ma...@gmail.com>
AuthorDate: Mon Jan 31 12:55:58 2022 +0200

    [hotfix][table-planner] Remove org.reflections usage and dependency
---
 .../flink/table/planner/loader/PlannerModule.java  |  4 +-
 flink-table/flink-table-planner/pom.xml            | 26 ------
 .../table/planner/plan/utils/ReflectionsUtil.java  | 56 -------------
 .../nodes/exec/stream/JsonSerdeCoverageTest.java   | 93 ----------------------
 .../plan/utils/ExecNodeMetadataUtilTest.java       | 44 +++++++++-
 .../planner/plan/utils/ReflectionsUtilTest.java    | 87 --------------------
 .../metadata/MetadataHandlerConsistencyTest.scala  | 36 +++++----
 7 files changed, 65 insertions(+), 281 deletions(-)

diff --git a/flink-table/flink-table-planner-loader/src/main/java/org/apache/flink/table/planner/loader/PlannerModule.java b/flink-table/flink-table-planner-loader/src/main/java/org/apache/flink/table/planner/loader/PlannerModule.java
index 7d72fdc..0a698b2 100644
--- a/flink-table/flink-table-planner-loader/src/main/java/org/apache/flink/table/planner/loader/PlannerModule.java
+++ b/flink-table/flink-table-planner-loader/src/main/java/org/apache/flink/table/planner/loader/PlannerModule.java
@@ -68,9 +68,7 @@ class PlannerModule {
                                     // flink-table-runtime or flink-dist itself
                                     "org.codehaus.janino",
                                     "org.codehaus.commons",
-                                    "org.apache.commons.lang3",
-                                    // Used by org.reflections
-                                    "javassist"))
+                                    "org.apache.commons.lang3"))
                     .toArray(String[]::new);
 
     private static final String[] COMPONENT_CLASSPATH = new String[] {"org.apache.flink"};
diff --git a/flink-table/flink-table-planner/pom.xml b/flink-table/flink-table-planner/pom.xml
index 3d986ef..ce559ac 100644
--- a/flink-table/flink-table-planner/pom.xml
+++ b/flink-table/flink-table-planner/pom.xml
@@ -189,24 +189,6 @@ under the License.
 			<artifactId>scala-parser-combinators_${scala.binary.version}</artifactId>
 		</dependency>
 
-		<dependency>
-			<!-- Utility to scan classpaths -->
-			<groupId>org.reflections</groupId>
-			<artifactId>reflections</artifactId>
-			<version>0.9.10</version>
-			<scope>compile</scope>
-			<exclusions>
-				<exclusion>
-					<groupId>com.google.code.findbugs</groupId>
-					<artifactId>annotations</artifactId>
-				</exclusion>
-				<exclusion>
-					<groupId>com.google.guava</groupId>
-					<artifactId>guava</artifactId>
-				</exclusion>
-			</exclusions>
-		</dependency>
-
 		<!-- Test dependencies -->
 
 		<dependency>
@@ -372,9 +354,6 @@ under the License.
 
 							<!-- For legacy string expressions in Table API -->
 							<include>org.scala-lang.modules:scala-parser-combinators_${scala.binary.version}</include>
-
-							<!-- ReflectionsUtil -->
-							<include>org.reflections:reflections</include>
 						</includes>
 					</artifactSet>
 					<relocations>
@@ -410,11 +389,6 @@ under the License.
 							<shadedPattern>org.apache.flink.table.shaded.com.jayway</shadedPattern>
 						</relocation>
 
-						<!-- Other table dependencies -->
-						<relocation>
-							<pattern>org.reflections</pattern>
-							<shadedPattern>org.apache.flink.table.shaded.org.reflections</shadedPattern>
-						</relocation>
 						<relocation>
 							<!-- icu4j's dependencies -->
 							<pattern>com.ibm.icu</pattern>
diff --git a/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/plan/utils/ReflectionsUtil.java b/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/plan/utils/ReflectionsUtil.java
deleted file mode 100644
index 7ea1409..0000000
--- a/flink-table/flink-table-planner/src/main/java/org/apache/flink/table/planner/plan/utils/ReflectionsUtil.java
+++ /dev/null
@@ -1,56 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.flink.table.planner.plan.utils;
-
-import org.reflections.Reflections;
-
-import java.lang.reflect.Modifier;
-import java.util.Set;
-import java.util.stream.Collectors;
-
-/** An utility class for reflection operations on classes. */
-public class ReflectionsUtil {
-
-    public static <T> Set<Class<? extends T>> scanSubClasses(
-            String packageName, Class<T> targetClass) {
-        return scanSubClasses(packageName, targetClass, false, false);
-    }
-
-    public static <T> Set<Class<? extends T>> scanSubClasses(
-            String packageName,
-            Class<T> targetClass,
-            boolean includingInterface,
-            boolean includingAbstractClass) {
-        Reflections reflections = new Reflections(packageName);
-        return reflections.getSubTypesOf(targetClass).stream()
-                .filter(
-                        c -> {
-                            if (c.isInterface()) {
-                                return includingInterface;
-                            } else if (Modifier.isAbstract(c.getModifiers())) {
-                                return includingAbstractClass;
-                            } else {
-                                return true;
-                            }
-                        })
-                .collect(Collectors.toSet());
-    }
-
-    private ReflectionsUtil() {}
-}
diff --git a/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/JsonSerdeCoverageTest.java b/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/JsonSerdeCoverageTest.java
deleted file mode 100644
index d87ecb0..0000000
--- a/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/nodes/exec/stream/JsonSerdeCoverageTest.java
+++ /dev/null
@@ -1,93 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.flink.table.planner.plan.nodes.exec.stream;
-
-import org.apache.flink.table.planner.plan.nodes.exec.serde.JsonSerdeUtil;
-import org.apache.flink.table.planner.plan.utils.ReflectionsUtil;
-
-import org.junit.Test;
-
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
-import java.util.Set;
-import java.util.stream.Collectors;
-
-import static org.junit.Assert.assertTrue;
-
-/**
- * Test to check whether all {@link StreamExecNode}s have been implemented json
- * serialization/deserialization.
- */
-public class JsonSerdeCoverageTest {
-
-    private static final List<String> UNSUPPORTED_JSON_SERDE_CLASSES =
-            Arrays.asList(
-                    "StreamExecDataStreamScan",
-                    "StreamExecLegacyTableSourceScan",
-                    "StreamExecLegacySink",
-                    "StreamExecGroupTableAggregate",
-                    "StreamExecPythonGroupTableAggregate",
-                    "StreamExecSort",
-                    "StreamExecMultipleInput");
-
-    @SuppressWarnings({"rawtypes"})
-    @Test
-    public void testStreamExecNodeJsonSerdeCoverage() {
-        Set<Class<? extends StreamExecNode>> subClasses =
-                ReflectionsUtil.scanSubClasses("org.apache.flink", StreamExecNode.class);
-
-        List<String> classes = new ArrayList<>();
-        List<String> classesWithoutJsonCreator = new ArrayList<>();
-        List<String> classesWithJsonCreatorInUnsupportedList = new ArrayList<>();
-        for (Class<? extends StreamExecNode> clazz : subClasses) {
-            String className = clazz.getSimpleName();
-            classes.add(className);
-            boolean hasJsonCreator = JsonSerdeUtil.hasJsonCreatorAnnotation(clazz);
-            if (hasJsonCreator && UNSUPPORTED_JSON_SERDE_CLASSES.contains(className)) {
-                classesWithJsonCreatorInUnsupportedList.add(className);
-            }
-            if (!hasJsonCreator && !UNSUPPORTED_JSON_SERDE_CLASSES.contains(className)) {
-                classesWithoutJsonCreator.add(className);
-            }
-        }
-        assertTrue(
-                String.format(
-                        "%s do not support json serialization/deserialization, "
-                                + "please refer the implementation of the other StreamExecNodes.",
-                        String.join(",", classesWithoutJsonCreator)),
-                classesWithoutJsonCreator.isEmpty());
-        assertTrue(
-                String.format(
-                        "%s have support json serialization/deserialization, "
-                                + "but still in UNSUPPORTED_JSON_SERDE_CLASSES list. "
-                                + "please move them from UNSUPPORTED_JSON_SERDE_CLASSES.",
-                        String.join(",", classesWithJsonCreatorInUnsupportedList)),
-                classesWithJsonCreatorInUnsupportedList.isEmpty());
-        List<String> notExistingClasses =
-                UNSUPPORTED_JSON_SERDE_CLASSES.stream()
-                        .filter(c -> !classes.contains(c))
-                        .collect(Collectors.toList());
-        assertTrue(
-                String.format(
-                        "%s do not exist any more, please remove them from UNSUPPORTED_JSON_SERDE_CLASSES.",
-                        String.join(",", notExistingClasses)),
-                notExistingClasses.isEmpty());
-    }
-}
diff --git a/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/utils/ExecNodeMetadataUtilTest.java b/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/utils/ExecNodeMetadataUtilTest.java
index 7fd6a00..a6b8642 100644
--- a/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/utils/ExecNodeMetadataUtilTest.java
+++ b/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/utils/ExecNodeMetadataUtilTest.java
@@ -22,11 +22,13 @@ import org.apache.flink.FlinkVersion;
 import org.apache.flink.api.dag.Transformation;
 import org.apache.flink.table.data.RowData;
 import org.apache.flink.table.planner.delegation.PlannerBase;
+import org.apache.flink.table.planner.plan.nodes.exec.ExecNode;
 import org.apache.flink.table.planner.plan.nodes.exec.ExecNodeBase;
 import org.apache.flink.table.planner.plan.nodes.exec.ExecNodeContext;
 import org.apache.flink.table.planner.plan.nodes.exec.ExecNodeMetadata;
 import org.apache.flink.table.planner.plan.nodes.exec.InputProperty;
 import org.apache.flink.table.planner.plan.nodes.exec.MultipleExecNodeMetadata;
+import org.apache.flink.table.planner.plan.nodes.exec.serde.JsonSerdeUtil;
 import org.apache.flink.table.planner.plan.nodes.exec.stream.StreamExecNode;
 import org.apache.flink.table.types.logical.LogicalType;
 
@@ -35,8 +37,12 @@ import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.annotation.JsonCre
 import org.assertj.core.api.Condition;
 import org.junit.Test;
 
+import java.util.ArrayList;
 import java.util.List;
+import java.util.Set;
+import java.util.stream.Collectors;
 
+import static org.apache.flink.table.planner.plan.utils.ExecNodeMetadataUtil.UNSUPPORTED_JSON_SERDE_CLASSES;
 import static org.assertj.core.api.Assertions.assertThat;
 import static org.assertj.core.api.Assertions.assertThatThrownBy;
 
@@ -60,8 +66,7 @@ public class ExecNodeMetadataUtilTest {
                 .hasMessage(
                         "ExecNode: org.apache.flink.table.planner.plan.utils."
                                 + "ExecNodeMetadataUtilTest.DummyNodeNoAnnotation is missing "
-                                + "ExecNodeMetadata annotation. This is a bug, please contact "
-                                + "developers.");
+                                + "ExecNodeMetadata annotation.");
     }
 
     @Test
@@ -133,6 +138,41 @@ public class ExecNodeMetadataUtilTest {
                                 + "classes and yet is annotated with: ExecNodeMetadata.");
     }
 
+    @Test
+    public void testStreamExecNodeJsonSerdeCoverage() {
+        Set<Class<? extends ExecNode<?>>> subClasses = ExecNodeMetadataUtil.execNodes();
+        List<Class<? extends ExecNode<?>>> classesWithoutJsonCreator = new ArrayList<>();
+        List<Class<? extends ExecNode<?>>> classesWithJsonCreatorInUnsupportedList =
+                new ArrayList<>();
+        for (Class<? extends ExecNode<?>> clazz : subClasses) {
+            boolean hasJsonCreator = JsonSerdeUtil.hasJsonCreatorAnnotation(clazz);
+            if (hasJsonCreator && UNSUPPORTED_JSON_SERDE_CLASSES.contains(clazz)) {
+                classesWithJsonCreatorInUnsupportedList.add(clazz);
+            }
+            if (!hasJsonCreator && !UNSUPPORTED_JSON_SERDE_CLASSES.contains(clazz)) {
+                classesWithoutJsonCreator.add(clazz);
+            }
+        }
+
+        assertThat(classesWithoutJsonCreator)
+                .as(
+                        "%s do not support json serialization/deserialization, "
+                                + "please refer the implementation of the other StreamExecNodes.",
+                        classesWithoutJsonCreator.stream()
+                                .map(Class::getSimpleName)
+                                .collect(Collectors.joining(",")))
+                .isEmpty();
+        assertThat(classesWithJsonCreatorInUnsupportedList)
+                .as(
+                        "%s have support for json serialization/deserialization, "
+                                + "but still in UNSUPPORTED_JSON_SERDE_CLASSES list. "
+                                + "please move them from UNSUPPORTED_JSON_SERDE_CLASSES.",
+                        classesWithJsonCreatorInUnsupportedList.stream()
+                                .map(Class::getSimpleName)
+                                .collect(Collectors.joining(",")))
+                .isEmpty();
+    }
+
     @MultipleExecNodeMetadata({
         @ExecNodeMetadata(
                 name = "dummy-node",
diff --git a/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/utils/ReflectionsUtilTest.java b/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/utils/ReflectionsUtilTest.java
deleted file mode 100644
index 822442f..0000000
--- a/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/plan/utils/ReflectionsUtilTest.java
+++ /dev/null
@@ -1,87 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.flink.table.planner.plan.utils;
-
-import org.junit.Test;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
-
-import java.util.HashSet;
-import java.util.Set;
-
-import static org.junit.Assert.assertEquals;
-
-/** Test for {@link ReflectionsUtil}. */
-@RunWith(Parameterized.class)
-public class ReflectionsUtilTest {
-
-    @Parameterized.Parameter public boolean includingInterface;
-
-    @Parameterized.Parameter(1)
-    public boolean includingAbstractClass;
-
-    @Test
-    public void testScanSubClasses() {
-        Set<Class<? extends TestInterface>> actual =
-                ReflectionsUtil.scanSubClasses(
-                        ReflectionsUtilTest.class.getPackage().getName(),
-                        TestInterface.class,
-                        includingInterface,
-                        includingAbstractClass);
-        Set<Class<? extends TestInterface>> expected = new HashSet<>();
-        expected.add(TestClass1.class);
-        expected.add(TestClass2.class);
-        expected.add(TestClass3.class);
-        if (includingInterface) {
-            expected.add(TestSubInterface.class);
-        }
-        if (includingAbstractClass) {
-            expected.add(TestAbstractClass.class);
-        }
-        assertEquals(expected, actual);
-    }
-
-    @Parameterized.Parameters(name = "includingInterface={0}, includingAbstractClass={1}")
-    public static Object[][] testData() {
-        return new Object[][] {
-            new Object[] {false, false},
-            new Object[] {true, false},
-            new Object[] {false, true},
-            new Object[] {true, true}
-        };
-    }
-
-    /** Testing interface. */
-    public interface TestInterface {}
-
-    /** Testing interface. */
-    public interface TestSubInterface extends TestInterface {}
-
-    /** Testing abstract class. */
-    public abstract static class TestAbstractClass implements TestSubInterface {}
-
-    /** Testing class. */
-    public static class TestClass1 implements TestInterface {}
-
-    /** Testing class. */
-    public static class TestClass2 implements TestSubInterface {}
-
-    /** Testing class. */
-    public static class TestClass3 extends TestAbstractClass {}
-}
diff --git a/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/planner/plan/metadata/MetadataHandlerConsistencyTest.scala b/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/planner/plan/metadata/MetadataHandlerConsistencyTest.scala
index b1e48d9..ebed9ff 100644
--- a/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/planner/plan/metadata/MetadataHandlerConsistencyTest.scala
+++ b/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/planner/plan/metadata/MetadataHandlerConsistencyTest.scala
@@ -19,7 +19,6 @@
 package org.apache.flink.table.planner.plan.metadata
 
 import org.apache.flink.table.planner.plan.nodes.physical.batch.{BatchPhysicalCorrelate, BatchPhysicalGroupAggregateBase}
-import org.apache.flink.table.planner.plan.utils.ReflectionsUtil
 
 import org.apache.calcite.rel.RelNode
 import org.apache.calcite.rel.core.{Aggregate, Correlate}
@@ -56,8 +55,28 @@ class MetadataHandlerConsistencyTest(
     logicalNodeClass: Class[_ <: RelNode],
     physicalNodeClass: Class[_ <: RelNode]) {
 
-  // get all subclasses of [[MetadataHandler]]
-  private val allMdHandlerClazz = fetchAllExtendedMetadataHandlers
+  // all subclasses of [[MetadataHandler]]
+  private val allMdHandlerClazz = List(
+    classOf[FlinkRelMdPercentageOriginalRows],
+    classOf[FlinkRelMdColumnNullCount],
+    classOf[FlinkRelMdColumnInterval],
+    classOf[FlinkRelMdCumulativeCost],
+    classOf[FlinkRelMdModifiedMonotonicity],
+    classOf[FlinkRelMdWindowProperties],
+    classOf[FlinkRelMdUniqueKeys],
+    classOf[FlinkRelMdDistribution],
+    classOf[FlinkRelMdSize],
+    classOf[FlinkRelMdCollation],
+    classOf[FlinkRelMdUniqueGroups],
+    classOf[FlinkRelMdPopulationSize],
+    classOf[FlinkRelMdFilteredColumnInterval],
+    classOf[FlinkRelMdUpsertKeys],
+    classOf[FlinkRelMdColumnUniqueness],
+    classOf[FlinkRelMdColumnOriginNullCount],
+    classOf[FlinkRelMdRowCount],
+    classOf[FlinkRelMdSelectivity],
+    classOf[FlinkRelMdNonCumulativeCost],
+    classOf[FlinkRelMdDistinctRowCount])
 
   // initiate each subclasses of [[MetadataHandler]]
   private val mdHandlerInstances = allMdHandlerClazz map { mdhClass =>
@@ -99,17 +118,6 @@ class MetadataHandlerConsistencyTest(
   }
 
   /**
-    * Scan packages to find out all subclasses of [[MetadataHandler]] in flink.
-    *
-    * @return A list contains all subclasses of [[MetadataHandler]] in flink.
-    */
-  private def fetchAllExtendedMetadataHandlers: Seq[Class[_ <: MetadataHandler[_]]] = {
-    ReflectionsUtil.scanSubClasses(
-      "org.apache.flink.table.planner.plan.cost",
-      classOf[MetadataHandler[_]]).toSeq
-  }
-
-  /**
     * Gets whether the given metadataHandler contains explicit metadata estimation for the given
     * RelNode class.
     *