You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@flink.apache.org by ji...@apache.org on 2023/01/16 09:27:54 UTC

[flink] branch master updated: [FLINK-30617][hive] Fix wrong data type for cast null literal with Hive dialect

This is an automated email from the ASF dual-hosted git repository.

jingzhang pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/master by this push:
     new 1c524794362 [FLINK-30617][hive] Fix wrong data type for cast null literal with Hive dialect
1c524794362 is described below

commit 1c5247943627dbf4ad64de89fe5b936ddb638368
Author: luoyuxia <lu...@alumni.sjtu.edu.cn>
AuthorDate: Wed Jan 11 10:13:39 2023 +0800

    [FLINK-30617][hive] Fix wrong data type for cast null literal with Hive dialect
    
    This closes #21642
---
 .../planner/delegation/hive/HiveParserRexNodeConverter.java      | 4 ++--
 .../org/apache/flink/connectors/hive/HiveDialectQueryITCase.java | 9 +++++++++
 2 files changed, 11 insertions(+), 2 deletions(-)

diff --git a/flink-connectors/flink-connector-hive/src/main/java/org/apache/flink/table/planner/delegation/hive/HiveParserRexNodeConverter.java b/flink-connectors/flink-connector-hive/src/main/java/org/apache/flink/table/planner/delegation/hive/HiveParserRexNodeConverter.java
index d8f850eb264..a12570268a2 100644
--- a/flink-connectors/flink-connector-hive/src/main/java/org/apache/flink/table/planner/delegation/hive/HiveParserRexNodeConverter.java
+++ b/flink-connectors/flink-connector-hive/src/main/java/org/apache/flink/table/planner/delegation/hive/HiveParserRexNodeConverter.java
@@ -319,9 +319,9 @@ public class HiveParserRexNodeConverter {
 
         RexNode calciteLiteral;
         HiveShim hiveShim = HiveParserUtils.getSessionHiveShim();
-        // If value is null, the type should also be VOID.
+        // If value is null, return a null literal directly
         if (value == null) {
-            hiveTypeCategory = PrimitiveObjectInspector.PrimitiveCategory.VOID;
+            return rexBuilder.makeNullLiteral(calciteDataType);
         }
         // TODO: Verify if we need to use ConstantObjectInspector to unwrap data
         switch (hiveTypeCategory) {
diff --git a/flink-connectors/flink-connector-hive/src/test/java/org/apache/flink/connectors/hive/HiveDialectQueryITCase.java b/flink-connectors/flink-connector-hive/src/test/java/org/apache/flink/connectors/hive/HiveDialectQueryITCase.java
index 8d1d276364a..2ffa4a7d44e 100644
--- a/flink-connectors/flink-connector-hive/src/test/java/org/apache/flink/connectors/hive/HiveDialectQueryITCase.java
+++ b/flink-connectors/flink-connector-hive/src/test/java/org/apache/flink/connectors/hive/HiveDialectQueryITCase.java
@@ -945,6 +945,7 @@ public class HiveDialectQueryITCase {
     public void testNullLiteralAsArgument() throws Exception {
         tableEnv.executeSql("create table test_ts(ts timestamp)");
         tableEnv.executeSql("create table t_bigint(ts bigint)");
+        tableEnv.executeSql("create table t_array(a_t array<bigint>)");
         Long testTimestamp = 1671058803926L;
         // timestamp's behavior is different between hive2 and hive3, so
         // use HiveShim in this test to hide such difference
@@ -971,9 +972,17 @@ public class HiveDialectQueryITCase {
             // verify it can cast to timestamp value correctly
             assertThat(result.toString())
                     .isEqualTo(String.format("[+I[%s], +I[null]]", expectDateTime));
+
+            // test cast null as bigint
+            tableEnv.executeSql("insert into t_array select array(cast(null as bigint))").await();
+            result =
+                    CollectionUtil.iteratorToList(
+                            tableEnv.executeSql("select * from t_array").collect());
+            assertThat(result.toString()).isEqualTo("[+I[null]]");
         } finally {
             tableEnv.executeSql("drop table test_ts");
             tableEnv.executeSql("drop table t_bigint");
+            tableEnv.executeSql("drop table t_array");
         }
     }