You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@flink.apache.org by ja...@apache.org on 2022/08/09 16:04:27 UTC

[flink] 02/02: [FLINK-28797][hive] Simplify useModules for the parquet complex type tests

This is an automated email from the ASF dual-hosted git repository.

jark pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git

commit 9237e63bd8a671cbf3bf133b89d6a037d198de1b
Author: Jark Wu <ja...@apache.org>
AuthorDate: Tue Aug 9 15:27:45 2022 +0800

    [FLINK-28797][hive] Simplify useModules for the parquet complex type tests
---
 .../org/apache/flink/connectors/hive/HiveTableSourceITCase.java     | 6 ++----
 1 file changed, 2 insertions(+), 4 deletions(-)

diff --git a/flink-connectors/flink-connector-hive/src/test/java/org/apache/flink/connectors/hive/HiveTableSourceITCase.java b/flink-connectors/flink-connector-hive/src/test/java/org/apache/flink/connectors/hive/HiveTableSourceITCase.java
index dcce413340b..d32e6694169 100644
--- a/flink-connectors/flink-connector-hive/src/test/java/org/apache/flink/connectors/hive/HiveTableSourceITCase.java
+++ b/flink-connectors/flink-connector-hive/src/test/java/org/apache/flink/connectors/hive/HiveTableSourceITCase.java
@@ -45,6 +45,7 @@ import org.apache.flink.table.connector.ProviderContext;
 import org.apache.flink.table.data.RowData;
 import org.apache.flink.table.factories.DynamicTableFactory;
 import org.apache.flink.table.factories.TableSourceFactory;
+import org.apache.flink.table.module.CoreModuleFactory;
 import org.apache.flink.table.module.hive.HiveModule;
 import org.apache.flink.table.planner.delegation.PlannerBase;
 import org.apache.flink.table.planner.plan.nodes.exec.ExecNode;
@@ -166,10 +167,7 @@ public class HiveTableSourceITCase extends BatchAbstractTestBase {
         // load hive module so that we can use array,map, named_struct function
         // for convenient writing complex data
         batchTableEnv.loadModule("hive", new HiveModule());
-        String[] newModules = new String[modules.length + 1];
-        newModules[0] = "hive";
-        System.arraycopy(modules, 0, newModules, 1, modules.length);
-        batchTableEnv.useModules(newModules);
+        batchTableEnv.useModules("hive", CoreModuleFactory.IDENTIFIER);
 
         batchTableEnv
                 .executeSql(