You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@flink.apache.org by lz...@apache.org on 2020/03/24 12:44:00 UTC

[flink] 02/02: [FLINK-16732][hive] Failed to call Hive UDF with constant return value

This is an automated email from the ASF dual-hosted git repository.

lzljs3620320 pushed a commit to branch release-1.10
in repository https://gitbox.apache.org/repos/asf/flink.git

commit f9f63a4c11cf13f56ad06d9da6231d711477f28e
Author: Rui Li <li...@apache.org>
AuthorDate: Tue Mar 24 20:40:03 2020 +0800

    [FLINK-16732][hive] Failed to call Hive UDF with constant return value
    
    
    This closes #11494
---
 .../apache/flink/table/functions/hive/HiveGenericUDF.java    |  6 +++++-
 .../org/apache/flink/table/module/hive/HiveModuleTest.java   | 12 ++++++++++++
 2 files changed, 17 insertions(+), 1 deletion(-)

diff --git a/flink-connectors/flink-connector-hive/src/main/java/org/apache/flink/table/functions/hive/HiveGenericUDF.java b/flink-connectors/flink-connector-hive/src/main/java/org/apache/flink/table/functions/hive/HiveGenericUDF.java
index 9adf5ff..1c3649a 100644
--- a/flink-connectors/flink-connector-hive/src/main/java/org/apache/flink/table/functions/hive/HiveGenericUDF.java
+++ b/flink-connectors/flink-connector-hive/src/main/java/org/apache/flink/table/functions/hive/HiveGenericUDF.java
@@ -27,6 +27,7 @@ import org.apache.flink.table.types.DataType;
 import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
+import org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
 import org.slf4j.Logger;
@@ -83,7 +84,10 @@ public class HiveGenericUDF extends HiveScalarFunction<GenericUDF> {
 		}
 
 		try {
-			return HiveInspectors.toFlinkObject(returnInspector, function.evaluate(deferredObjects), hiveShim);
+			Object result = returnInspector instanceof ConstantObjectInspector ?
+					((ConstantObjectInspector) returnInspector).getWritableConstantValue() :
+					function.evaluate(deferredObjects);
+			return HiveInspectors.toFlinkObject(returnInspector, result, hiveShim);
 		} catch (HiveException e) {
 			throw new FlinkHiveUDFException(e);
 		}
diff --git a/flink-connectors/flink-connector-hive/src/test/java/org/apache/flink/table/module/hive/HiveModuleTest.java b/flink-connectors/flink-connector-hive/src/test/java/org/apache/flink/table/module/hive/HiveModuleTest.java
index 4acc616..04cf165 100644
--- a/flink-connectors/flink-connector-hive/src/test/java/org/apache/flink/table/module/hive/HiveModuleTest.java
+++ b/flink-connectors/flink-connector-hive/src/test/java/org/apache/flink/table/module/hive/HiveModuleTest.java
@@ -149,4 +149,16 @@ public class HiveModuleTest {
 			assertFalse(hiveModule.getFunctionDefinition(banned).isPresent());
 		}
 	}
+
+	@Test
+	public void testConstantReturnValue() throws Exception {
+		TableEnvironment tableEnv = HiveTestUtils.createTableEnvWithBlinkPlannerBatchMode();
+
+		tableEnv.unloadModule("core");
+		tableEnv.loadModule("hive", new HiveModule());
+
+		List<Row> results = TableUtils.collectToList(tableEnv.sqlQuery("select str_to_map('a:1,b:2,c:3',',',':')"));
+
+		assertEquals("[{a=1, b=2, c=3}]", results.toString());
+	}
 }