You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@flink.apache.org by ja...@apache.org on 2022/09/05 04:27:30 UTC

[flink] branch release-1.16 updated: [FLINK-27017][hive] Fix divide by zero exception with Hive dialect (#19216)

This is an automated email from the ASF dual-hosted git repository.

jark pushed a commit to branch release-1.16
in repository https://gitbox.apache.org/repos/asf/flink.git


The following commit(s) were added to refs/heads/release-1.16 by this push:
     new c049171c6ec [FLINK-27017][hive] Fix divide by zero exception with Hive dialect (#19216)
c049171c6ec is described below

commit c049171c6ec610fe1dc72b7ccb83981ce58bbf36
Author: yuxia Luo <lu...@alumni.sjtu.edu.cn>
AuthorDate: Mon Sep 5 12:22:31 2022 +0800

    [FLINK-27017][hive] Fix divide by zero exception with Hive dialect (#19216)
---
 .../table/planner/delegation/hive/HiveParserRexNodeConverter.java     | 1 +
 .../planner/delegation/hive/copy/HiveParserSqlFunctionConverter.java  | 4 ++++
 .../java/org/apache/flink/connectors/hive/HiveDialectQueryITCase.java | 3 ++-
 3 files changed, 7 insertions(+), 1 deletion(-)

diff --git a/flink-connectors/flink-connector-hive/src/main/java/org/apache/flink/table/planner/delegation/hive/HiveParserRexNodeConverter.java b/flink-connectors/flink-connector-hive/src/main/java/org/apache/flink/table/planner/delegation/hive/HiveParserRexNodeConverter.java
index 54775ed6c55..d8f850eb264 100644
--- a/flink-connectors/flink-connector-hive/src/main/java/org/apache/flink/table/planner/delegation/hive/HiveParserRexNodeConverter.java
+++ b/flink-connectors/flink-connector-hive/src/main/java/org/apache/flink/table/planner/delegation/hive/HiveParserRexNodeConverter.java
@@ -594,6 +594,7 @@ public class HiveParserRexNodeConverter {
         // process the function
         RelDataType retType =
                 HiveParserTypeConverter.convert(func.getTypeInfo(), cluster.getTypeFactory());
+
         SqlOperator calciteOp =
                 HiveParserSqlFunctionConverter.getCalciteOperator(
                         func.getFuncText(), func.getGenericUDF(), argTypes, retType, funcConverter);
diff --git a/flink-connectors/flink-connector-hive/src/main/java/org/apache/flink/table/planner/delegation/hive/copy/HiveParserSqlFunctionConverter.java b/flink-connectors/flink-connector-hive/src/main/java/org/apache/flink/table/planner/delegation/hive/copy/HiveParserSqlFunctionConverter.java
index 8f22c4e9a84..094b5899091 100644
--- a/flink-connectors/flink-connector-hive/src/main/java/org/apache/flink/table/planner/delegation/hive/copy/HiveParserSqlFunctionConverter.java
+++ b/flink-connectors/flink-connector-hive/src/main/java/org/apache/flink/table/planner/delegation/hive/copy/HiveParserSqlFunctionConverter.java
@@ -468,6 +468,10 @@ public class HiveParserSqlFunctionConverter {
             return operandTypes.contains(SqlTypeFamily.BOOLEAN)
                     && (operandTypes.contains(SqlTypeFamily.NUMERIC)
                             || operandTypes.contains(SqlTypeFamily.CHARACTER));
+        } else if (udfName.equals("/")) {
+            // if the operation is division "/", it should be handled by hive's corresponding udf.
+            // for in hive, divisor can be 0, but flink will throw exception directly.
+            return true;
         }
         return false;
     }
diff --git a/flink-connectors/flink-connector-hive/src/test/java/org/apache/flink/connectors/hive/HiveDialectQueryITCase.java b/flink-connectors/flink-connector-hive/src/test/java/org/apache/flink/connectors/hive/HiveDialectQueryITCase.java
index 293492d4b2d..1c8c7c6990e 100644
--- a/flink-connectors/flink-connector-hive/src/test/java/org/apache/flink/connectors/hive/HiveDialectQueryITCase.java
+++ b/flink-connectors/flink-connector-hive/src/test/java/org/apache/flink/connectors/hive/HiveDialectQueryITCase.java
@@ -182,7 +182,8 @@ public class HiveDialectQueryITCase {
                                 "select a, one from binary_t lateral view explode(ab) abs as one where a > 0",
                                 "select /*+ mapjoin(dest) */ foo.x from foo join dest on foo.x = dest.x union"
                                         + " all select /*+ mapjoin(dest) */ foo.x from foo join dest on foo.y = dest.y",
-                                "with cte as (select * from src) select * from cte"));
+                                "with cte as (select * from src) select * from cte",
+                                "select 1 / 0"));
         if (HiveVersionTestUtil.HIVE_230_OR_LATER) {
             toRun.add(
                     "select weekofyear(current_timestamp()), dayofweek(current_timestamp()) from src limit 1");