You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@flink.apache.org by ji...@apache.org on 2022/06/28 07:48:18 UTC
[flink] branch master updated: [FLINK-28036][hive] HiveInspectors should use correct writable type to create ConstantObjectInspector
This is an automated email from the ASF dual-hosted git repository.
jingzhang pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git
The following commit(s) were added to refs/heads/master by this push:
new 66d78848404 [FLINK-28036][hive] HiveInspectors should use correct writable type to create ConstantObjectInspector
66d78848404 is described below
commit 66d788484046c85d1d57a70c218120f8eb7c4a54
Author: luoyuxia <lu...@alumni.sjtu.edu.cn>
AuthorDate: Tue Jun 14 12:02:09 2022 +0800
[FLINK-28036][hive] HiveInspectors should use correct writable type to create ConstantObjectInspector
This closes #19949
---
.../functions/hive/conversion/HiveInspectors.java | 9 +++--
.../table/functions/hive/HiveGenericUDFTest.java | 47 ++++++++++++++++++++++
2 files changed, 52 insertions(+), 4 deletions(-)
diff --git a/flink-connectors/flink-connector-hive/src/main/java/org/apache/flink/table/functions/hive/conversion/HiveInspectors.java b/flink-connectors/flink-connector-hive/src/main/java/org/apache/flink/table/functions/hive/conversion/HiveInspectors.java
index 0dfd2908a5b..afa3b963fbc 100644
--- a/flink-connectors/flink-connector-hive/src/main/java/org/apache/flink/table/functions/hive/conversion/HiveInspectors.java
+++ b/flink-connectors/flink-connector-hive/src/main/java/org/apache/flink/table/functions/hive/conversion/HiveInspectors.java
@@ -37,9 +37,12 @@ import org.apache.flink.types.Row;
import org.apache.hadoop.hive.common.type.HiveChar;
import org.apache.hadoop.hive.common.type.HiveDecimal;
import org.apache.hadoop.hive.common.type.HiveVarchar;
+import org.apache.hadoop.hive.serde2.io.ByteWritable;
+import org.apache.hadoop.hive.serde2.io.DoubleWritable;
import org.apache.hadoop.hive.serde2.io.HiveCharWritable;
import org.apache.hadoop.hive.serde2.io.HiveDecimalWritable;
import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable;
+import org.apache.hadoop.hive.serde2.io.ShortWritable;
import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
@@ -88,12 +91,10 @@ import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo;
import org.apache.hadoop.io.BooleanWritable;
-import org.apache.hadoop.io.ByteWritable;
-import org.apache.hadoop.io.DoubleWritable;
+import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.io.FloatWritable;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
-import org.apache.hadoop.io.ShortWritable;
import org.apache.hadoop.io.Text;
import javax.annotation.Nullable;
@@ -511,7 +512,7 @@ public class HiveInspectors {
case BINARY:
className = WritableConstantBinaryObjectInspector.class.getName();
return HiveReflectionUtils.createConstantObjectInspector(
- className, ByteWritable.class, value);
+ className, BytesWritable.class, value);
case UNKNOWN:
case VOID:
// If type is null, we use the Constant String to replace
diff --git a/flink-connectors/flink-connector-hive/src/test/java/org/apache/flink/table/functions/hive/HiveGenericUDFTest.java b/flink-connectors/flink-connector-hive/src/test/java/org/apache/flink/table/functions/hive/HiveGenericUDFTest.java
index 1b1dd03c884..7ded892f942 100644
--- a/flink-connectors/flink-connector-hive/src/test/java/org/apache/flink/table/functions/hive/HiveGenericUDFTest.java
+++ b/flink-connectors/flink-connector-hive/src/test/java/org/apache/flink/table/functions/hive/HiveGenericUDFTest.java
@@ -286,6 +286,53 @@ public class HiveGenericUDFTest {
assertThat(udf.eval(result)).isEqualTo(3);
}
+ @Test
+ public void testInitUDFWithConstantArguments() {
+ // test init udf with different type of constants as arguments to
+ // make sure we can get the ConstantObjectInspector normally
+
+ // test with byte type as constant argument
+ init(GenericUDFCoalesce.class, new Object[] {1}, new DataType[] {DataTypes.TINYINT()});
+ // test with short type as constant argument
+ init(GenericUDFCoalesce.class, new Object[] {1}, new DataType[] {DataTypes.SMALLINT()});
+ // test with int type as constant argument
+ init(GenericUDFCoalesce.class, new Object[] {1}, new DataType[] {DataTypes.INT()});
+ // test with long type as constant argument
+ init(GenericUDFCoalesce.class, new Object[] {1}, new DataType[] {DataTypes.BIGINT()});
+ // test with float type as constant argument
+ init(GenericUDFCoalesce.class, new Object[] {1}, new DataType[] {DataTypes.FLOAT()});
+ // test with double type as constant argument
+ init(GenericUDFCoalesce.class, new Object[] {1}, new DataType[] {DataTypes.DOUBLE()});
+ // test with string type as constant argument
+ init(GenericUDFCoalesce.class, new Object[] {"test"}, new DataType[] {DataTypes.STRING()});
+ // test with char type as constant argument
+ init(GenericUDFCoalesce.class, new Object[] {"tes"}, new DataType[] {DataTypes.CHAR(7)});
+ // test with varchar type as constant argument
+ init(GenericUDFCoalesce.class, new Object[] {"tes"}, new DataType[] {DataTypes.VARCHAR(7)});
+ // test with date type as constant argument
+ init(
+ GenericUDFCoalesce.class,
+ new Object[] {new Date(10000)},
+ new DataType[] {DataTypes.DATE()});
+ // test with timestamp type as constant argument
+ init(
+ GenericUDFCoalesce.class,
+ new Object[] {new Timestamp(10000)},
+ new DataType[] {DataTypes.TIMESTAMP()});
+
+ // test with decimal type as constant argument
+ init(
+ GenericUDFCoalesce.class,
+ new Object[] {new BigDecimal("23.45")},
+ new DataType[] {DataTypes.DECIMAL(10, 3)});
+
+ // test with binary type as constant argument
+ init(
+ GenericUDFCoalesce.class,
+ new Object[] {new byte[] {1, 2}},
+ new DataType[] {DataTypes.BYTES()});
+ }
+
private static HiveGenericUDF init(
Class hiveUdfClass, Object[] constantArgs, DataType[] argTypes) {
HiveGenericUDF udf =