You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@flink.apache.org by tw...@apache.org on 2020/07/29 15:40:42 UTC

[flink] branch master updated (61d06ac -> b7e1574)

This is an automated email from the ASF dual-hosted git repository.

twalthr pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git.


    from 61d06ac  [FLINK-15467][task] Wait for sourceTaskThread to finish before exiting from invoke
     new 82f3094  [hotfix][table] Rename UserDefinedAggregateFunction to ImperativeAggregateFunction
     new 9eda9f3  [hotfix][table-api-java] Remove @Experimental annotation for function methods in TableEnvironment
     new ff70cc8  [hotfix][table-api-java] Fix JavaDocs for TableEnvironment.fromValues
     new 9aed3a0  [hotfix][table] Remove deprecated AggregateFunction.requiresOver()
     new 5bc3c76  [hotfix][table-common] Add CallExpression.getFunctionName for easier printing
     new b7e1574  [FLINK-15803][table] Update AggregateFunction and TableAggregateFunction to the new type system

The 6 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 flink-python/pyflink/table/table_environment.py    |   2 +-
 .../api/bridge/java/StreamTableEnvironment.java    |  12 ++
 .../apache/flink/table/api/TableEnvironment.java   |  13 +-
 .../flink/table/catalog/FunctionCatalog.java       |  12 +-
 .../utils/AggregateOperationFactory.java           |  60 ++++---
 .../operations/utils/CalculatedTableFactory.java   |   5 +-
 .../flink/table/typeutils/FieldInfoUtils.java      |   8 +-
 .../table/api/ImplicitExpressionConversions.scala  |   6 +-
 .../api/ExpressionsConsistencyCheckTest.scala      |   2 +-
 .../flink/table/expressions/CallExpression.java    |  20 ++-
 .../flink/table/functions/AggregateFunction.java   | 179 +++++++++++++-------
 .../functions/ImperativeAggregateFunction.java     |  84 ++++++++++
 .../flink/table/functions/ScalarFunction.java      |   2 +-
 .../table/functions/TableAggregateFunction.java    |  84 ++++++----
 .../flink/table/functions/TableFunction.java       |  14 +-
 .../functions/UserDefinedAggregateFunction.java    |  58 -------
 .../table/functions/UserDefinedFunctionHelper.java |  12 +-
 .../types/logical/utils/LogicalTypeChecks.java     |  11 +-
 .../types/logical/utils/LogicalTypeUtils.java      |  47 ++++++
 .../flink/table/types/utils/DataTypeUtils.java     |  31 ++++
 .../flink/table/types/utils/DataTypeUtilsTest.java |  28 ++++
 .../catalog/FunctionCatalogOperatorTable.java      |  19 ++-
 .../planner/expressions/SqlAggFunctionVisitor.java |  99 ++++++++---
 .../expressions/converter/OverConvertRule.java     |   2 +-
 .../planner/plan/QueryOperationConverter.java      |   4 +-
 .../flink/table/planner/utils/ShortcutUtils.java   |   4 +
 .../table/planner/codegen/ExprCodeGenerator.scala  |  21 ++-
 .../codegen/agg/AggsHandlerCodeGenerator.scala     |   9 +-
 .../planner/codegen/agg/ImperativeAggCodeGen.scala |   4 +-
 .../codegen/calls/TableFunctionCallGen.scala       |   3 +-
 .../table/planner/dataview/DataViewUtils.scala     |   7 +-
 .../table/planner/expressions/aggregations.scala   |   4 +-
 .../planner/functions/utils/AggSqlFunction.scala   |  25 ++-
 .../functions/utils/UserDefinedFunctionUtils.scala |  54 +++---
 .../plan/nodes/calcite/TableAggregate.scala        |  17 +-
 .../planner/plan/utils/AggFunctionFactory.scala    |   6 +
 .../table/planner/plan/utils/AggregateUtil.scala   | 185 ++++++++++++++++++---
 .../plan/utils/JavaUserDefinedAggFunctions.java    |   6 +-
 .../runtime/utils/JavaUserDefinedAggFunctions.java |  93 ++++++++++-
 .../utils/JavaUserDefinedScalarFunctions.java      |  31 ----
 .../planner/plan/batch/table/GroupWindowTest.xml   |  14 +-
 .../plan/stream/sql/agg/WindowAggregateTest.xml    | 160 +++++++++---------
 .../planner/plan/stream/table/AggregateTest.xml    |  10 +-
 .../plan/stream/table/ColumnFunctionsTest.xml      |   7 +-
 .../planner/plan/stream/table/GroupWindowTest.xml  |  90 +++++-----
 .../planner/plan/stream/table/OverWindowTest.xml   |  54 +++---
 .../plan/stream/table/TableAggregateTest.xml       |   8 +-
 .../sql/validation/OverWindowValidationTest.scala  |   2 +-
 .../plan/batch/sql/agg/OverAggregateTest.scala     |   2 +-
 .../planner/plan/batch/table/GroupWindowTest.scala |   4 +-
 .../stringexpr/AggregateStringExpressionTest.scala |  35 ++--
 .../validation/OverWindowValidationTest.scala      |   2 +-
 .../plan/stream/sql/ModifiedMonotonicityTest.scala |   2 +-
 .../plan/stream/sql/agg/OverAggregateTest.scala    |   2 +-
 .../plan/stream/sql/agg/WindowAggregateTest.scala  |   2 +-
 .../planner/plan/stream/table/AggregateTest.scala  |   2 +-
 .../plan/stream/table/ColumnFunctionsTest.scala    |   2 +-
 .../plan/stream/table/GroupWindowTest.scala        |   8 +-
 .../planner/plan/stream/table/OverWindowTest.scala |  26 +--
 .../plan/stream/table/TableAggregateTest.scala     |   4 +-
 .../stringexpr/AggregateStringExpressionTest.scala |  28 ++--
 .../GroupWindowStringExpressionTest.scala          |  42 ++---
 ...pWindowTableAggregateStringExpressionTest.scala |  30 ++--
 .../OverWindowStringExpressionTest.scala           |  30 ++--
 .../TableAggregateStringExpressionTest.scala       |  10 +-
 .../GroupWindowTableAggregateValidationTest.scala  |   5 +-
 .../validation/GroupWindowValidationTest.scala     |  18 +-
 .../validation/OverWindowValidationTest.scala      |   2 +-
 .../validation/TableAggregateValidationTest.scala  |  27 ++-
 .../runtime/batch/sql/agg/SortAggITCase.scala      |  51 ++----
 .../runtime/batch/table/AggregationITCase.scala    |   8 +-
 .../runtime/stream/sql/AggregateITCase.scala       |  52 +++++-
 .../runtime/stream/sql/MatchRecognizeITCase.scala  |   2 +-
 .../runtime/stream/sql/WindowAggregateITCase.scala |   2 +-
 .../table/GroupWindowTableAggregateITCase.scala    |   2 +-
 .../runtime/stream/table/OverWindowITCase.scala    |   4 +-
 .../stream/table/TableAggregateITCase.scala        |  52 +++++-
 .../planner/runtime/utils/BatchTestBase.scala      |  21 ++-
 .../utils/UserDefinedFunctionTestUtils.scala       |  11 +-
 .../table/planner/utils/CountAggFunction.scala     |   2 +-
 .../flink/table/planner/utils/TableTestBase.scala  |  23 +--
 .../utils/UserDefinedTableAggFunctions.scala       |  33 ++--
 .../flink/table/api/internal/TableEnvImpl.scala    |   4 +-
 .../table/codegen/AggregationCodeGenerator.scala   |  59 +++----
 .../flink/table/codegen/MatchCodeGenerator.scala   |  19 ++-
 .../flink/table/expressions/aggregations.scala     |  20 +--
 .../table/functions/utils/AggSqlFunction.scala     |  34 ++--
 .../functions/utils/UserDefinedFunctionUtils.scala |  10 +-
 .../table/runtime/aggregate/AggregateUtil.scala    |  40 ++---
 .../runtime/utils/JavaUserDefinedAggFunctions.java |  11 +-
 .../table/api/stream/table/AggregateTest.scala     |   5 +-
 .../table/runtime/types/PlannerTypeUtils.java      |   8 -
 92 files changed, 1478 insertions(+), 906 deletions(-)
 create mode 100644 flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/ImperativeAggregateFunction.java
 delete mode 100644 flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/UserDefinedAggregateFunction.java


[flink] 01/06: [hotfix][table] Rename UserDefinedAggregateFunction to ImperativeAggregateFunction

Posted by tw...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

twalthr pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git

commit 82f3094e19ea1db6ee94b129d9b21852b9c366dc
Author: Timo Walther <tw...@apache.org>
AuthorDate: Mon Jul 27 15:44:18 2020 +0200

    [hotfix][table] Rename UserDefinedAggregateFunction to ImperativeAggregateFunction
    
    Renames this base class because users can easily mix up AggregateFunction and
    UserDefinedAggregateFunction. The new naming also prepares for the future
    DeclarativeAggregateFunction. ImperativeAggregateFunction has already been introduced
    as a concept in the Blink planner.
---
 flink-python/pyflink/table/table_environment.py    |  2 +-
 .../flink/table/catalog/FunctionCatalog.java       |  4 +-
 .../table/api/ImplicitExpressionConversions.scala  |  6 +--
 .../api/ExpressionsConsistencyCheckTest.scala      |  2 +-
 .../flink/table/functions/AggregateFunction.java   |  4 +-
 ...ction.java => ImperativeAggregateFunction.java} | 12 ++---
 .../table/functions/TableAggregateFunction.java    |  4 +-
 .../table/functions/UserDefinedFunctionHelper.java | 12 ++---
 .../codegen/agg/AggsHandlerCodeGenerator.scala     |  4 +-
 .../planner/codegen/agg/ImperativeAggCodeGen.scala |  4 +-
 .../table/planner/dataview/DataViewUtils.scala     |  7 +--
 .../table/planner/expressions/aggregations.scala   |  4 +-
 .../planner/functions/utils/AggSqlFunction.scala   | 21 ++++----
 .../functions/utils/UserDefinedFunctionUtils.scala | 54 ++++++++++----------
 .../table/planner/plan/utils/AggregateUtil.scala   |  4 +-
 .../flink/table/planner/utils/TableTestBase.scala  |  8 +--
 .../flink/table/api/internal/TableEnvImpl.scala    |  4 +-
 .../table/codegen/AggregationCodeGenerator.scala   | 59 +++++++++++-----------
 .../flink/table/codegen/MatchCodeGenerator.scala   | 19 +++----
 .../flink/table/expressions/aggregations.scala     | 20 ++++----
 .../table/functions/utils/AggSqlFunction.scala     | 34 +++++++------
 .../functions/utils/UserDefinedFunctionUtils.scala | 10 ++--
 .../table/runtime/aggregate/AggregateUtil.scala    | 40 ++++++++-------
 23 files changed, 175 insertions(+), 163 deletions(-)

diff --git a/flink-python/pyflink/table/table_environment.py b/flink-python/pyflink/table/table_environment.py
index aa05b13..d080b93 100644
--- a/flink-python/pyflink/table/table_environment.py
+++ b/flink-python/pyflink/table/table_environment.py
@@ -1571,7 +1571,7 @@ class TableEnvironment(object):
     def _is_aggregate_function(java_function):
         java_function_class = java_function.getClass()
         j_aggregate_function_class = get_java_class(
-            get_gateway().jvm.org.apache.flink.table.functions.UserDefinedAggregateFunction)
+            get_gateway().jvm.org.apache.flink.table.functions.ImperativeAggregateFunction)
         return j_aggregate_function_class.isAssignableFrom(java_function_class)
 
     def _register_table_function(self, name, table_function):
diff --git a/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/FunctionCatalog.java b/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/FunctionCatalog.java
index 38c4094..d0843d4 100644
--- a/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/FunctionCatalog.java
+++ b/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/FunctionCatalog.java
@@ -32,13 +32,13 @@ import org.apache.flink.table.functions.AggregateFunctionDefinition;
 import org.apache.flink.table.functions.FunctionDefinition;
 import org.apache.flink.table.functions.FunctionDefinitionUtil;
 import org.apache.flink.table.functions.FunctionIdentifier;
+import org.apache.flink.table.functions.ImperativeAggregateFunction;
 import org.apache.flink.table.functions.ScalarFunction;
 import org.apache.flink.table.functions.ScalarFunctionDefinition;
 import org.apache.flink.table.functions.TableAggregateFunction;
 import org.apache.flink.table.functions.TableAggregateFunctionDefinition;
 import org.apache.flink.table.functions.TableFunction;
 import org.apache.flink.table.functions.TableFunctionDefinition;
-import org.apache.flink.table.functions.UserDefinedAggregateFunction;
 import org.apache.flink.table.functions.UserDefinedFunction;
 import org.apache.flink.table.functions.UserDefinedFunctionHelper;
 import org.apache.flink.table.module.ModuleManager;
@@ -409,7 +409,7 @@ public final class FunctionCatalog {
 
 	public <T, ACC> void registerTempSystemAggregateFunction(
 			String name,
-			UserDefinedAggregateFunction<T, ACC> function,
+			ImperativeAggregateFunction<T, ACC> function,
 			TypeInformation<T> resultType,
 			TypeInformation<ACC> accType) {
 		UserDefinedFunctionHelper.prepareInstance(config, function);
diff --git a/flink-table/flink-table-api-scala/src/main/scala/org/apache/flink/table/api/ImplicitExpressionConversions.scala b/flink-table/flink-table-api-scala/src/main/scala/org/apache/flink/table/api/ImplicitExpressionConversions.scala
index 23e45a3..2e63b68 100644
--- a/flink-table/flink-table-api-scala/src/main/scala/org/apache/flink/table/api/ImplicitExpressionConversions.scala
+++ b/flink-table/flink-table-api-scala/src/main/scala/org/apache/flink/table/api/ImplicitExpressionConversions.scala
@@ -23,7 +23,7 @@ import org.apache.flink.api.common.typeinfo.TypeInformation
 import org.apache.flink.table.expressions.ApiExpressionUtils.{unresolvedCall, unresolvedRef, valueLiteral}
 import org.apache.flink.table.expressions.{ApiExpressionUtils, Expression, TableSymbol, TimePointUnit}
 import org.apache.flink.table.functions.BuiltInFunctionDefinitions.{DISTINCT, RANGE_TO}
-import org.apache.flink.table.functions.{ScalarFunction, TableFunction, UserDefinedAggregateFunction, UserDefinedFunctionHelper, _}
+import org.apache.flink.table.functions.{ScalarFunction, TableFunction, ImperativeAggregateFunction, UserDefinedFunctionHelper, _}
 import org.apache.flink.table.types.DataType
 import org.apache.flink.types.Row
 
@@ -173,8 +173,8 @@ trait ImplicitExpressionConversions {
     }
   }
 
-  implicit class UserDefinedAggregateFunctionCall[T: TypeInformation, ACC: TypeInformation]
-      (val a: UserDefinedAggregateFunction[T, ACC]) {
+  implicit class ImperativeAggregateFunctionCall[T: TypeInformation, ACC: TypeInformation]
+      (val a: ImperativeAggregateFunction[T, ACC]) {
 
     private def createFunctionDefinition(): FunctionDefinition = {
       val resultTypeInfo: TypeInformation[T] = UserDefinedFunctionHelper
diff --git a/flink-table/flink-table-api-scala/src/test/scala/org/apache/flink/table/api/ExpressionsConsistencyCheckTest.scala b/flink-table/flink-table-api-scala/src/test/scala/org/apache/flink/table/api/ExpressionsConsistencyCheckTest.scala
index e9ef414..26c30d5 100644
--- a/flink-table/flink-table-api-scala/src/test/scala/org/apache/flink/table/api/ExpressionsConsistencyCheckTest.scala
+++ b/flink-table/flink-table-api-scala/src/test/scala/org/apache/flink/table/api/ExpressionsConsistencyCheckTest.scala
@@ -55,7 +55,7 @@ class ExpressionsConsistencyCheckTest {
   val explicitScalaToJavaStaticMethodsMapping = Map(
     "FieldExpression" -> "$",
     "UnresolvedFieldExpression" -> "$",
-    "UserDefinedAggregateFunctionCall" -> "call",
+    "ImperativeAggregateFunctionCall" -> "call",
     "ScalarFunctionCall" -> "call",
     "TableFunctionCall" -> "call",
     "concat_ws" -> "concatWs"
diff --git a/flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/AggregateFunction.java b/flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/AggregateFunction.java
index 6d0f30a..13c2745 100644
--- a/flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/AggregateFunction.java
+++ b/flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/AggregateFunction.java
@@ -47,7 +47,7 @@ import java.util.Set;
  *
  * <p>All these methods must be declared publicly, not static, and named exactly as the names
  * mentioned above. The method {@link #createAccumulator()} is defined in the
- * {@link UserDefinedAggregateFunction} function, and method {@link #getValue} is defined in
+ * {@link ImperativeAggregateFunction} function, and method {@link #getValue} is defined in
  * the {@link AggregateFunction} while other methods are explained below.
  *
  * <pre>
@@ -114,7 +114,7 @@ import java.util.Set;
  *              AggregateFunction must be put into the accumulator.
  */
 @PublicEvolving
-public abstract class AggregateFunction<T, ACC> extends UserDefinedAggregateFunction<T, ACC> {
+public abstract class AggregateFunction<T, ACC> extends ImperativeAggregateFunction<T, ACC> {
 
 	/**
 	 * Called every time when an aggregation result should be materialized.
diff --git a/flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/UserDefinedAggregateFunction.java b/flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/ImperativeAggregateFunction.java
similarity index 73%
rename from flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/UserDefinedAggregateFunction.java
rename to flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/ImperativeAggregateFunction.java
index e03910d..e39a9d0 100644
--- a/flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/UserDefinedAggregateFunction.java
+++ b/flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/ImperativeAggregateFunction.java
@@ -25,10 +25,10 @@ import org.apache.flink.api.common.typeinfo.TypeInformation;
  * Base class for user-defined aggregates and table aggregates.
  */
 @PublicEvolving
-public abstract class UserDefinedAggregateFunction<T, ACC> extends UserDefinedFunction {
+public abstract class ImperativeAggregateFunction<T, ACC> extends UserDefinedFunction {
 
 	/**
-	 * Creates and initializes the accumulator for this {@link UserDefinedAggregateFunction}. The
+	 * Creates and initializes the accumulator for this {@link ImperativeAggregateFunction}. The
 	 * accumulator is used to keep the aggregated values which are needed to compute an aggregation
 	 * result.
 	 *
@@ -37,9 +37,9 @@ public abstract class UserDefinedAggregateFunction<T, ACC> extends UserDefinedFu
 	public abstract ACC createAccumulator();
 
 	/**
-	 * Returns the {@link TypeInformation} of the {@link UserDefinedAggregateFunction}'s result.
+	 * Returns the {@link TypeInformation} of the {@link ImperativeAggregateFunction}'s result.
 	 *
-	 * @return The {@link TypeInformation} of the {@link UserDefinedAggregateFunction}'s result or
+	 * @return The {@link TypeInformation} of the {@link ImperativeAggregateFunction}'s result or
 	 *         <code>null</code> if the result type should be automatically inferred.
 	 */
 	public TypeInformation<T> getResultType() {
@@ -47,9 +47,9 @@ public abstract class UserDefinedAggregateFunction<T, ACC> extends UserDefinedFu
 	}
 
 	/**
-	 * Returns the {@link TypeInformation} of the {@link UserDefinedAggregateFunction}'s accumulator.
+	 * Returns the {@link TypeInformation} of the {@link ImperativeAggregateFunction}'s accumulator.
 	 *
-	 * @return The {@link TypeInformation} of the {@link UserDefinedAggregateFunction}'s accumulator
+	 * @return The {@link TypeInformation} of the {@link ImperativeAggregateFunction}'s accumulator
 	 *         or <code>null</code> if the accumulator type should be automatically inferred.
 	 */
 	public TypeInformation<ACC> getAccumulatorType() {
diff --git a/flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/TableAggregateFunction.java b/flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/TableAggregateFunction.java
index c8ee292..3941eaa 100644
--- a/flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/TableAggregateFunction.java
+++ b/flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/TableAggregateFunction.java
@@ -42,7 +42,7 @@ import org.apache.flink.util.Collector;
  *
  * <p>All these methods must be declared publicly, not static, and named exactly as the names
  * mentioned above. The method {@link #createAccumulator()} is defined in
- * the {@link UserDefinedAggregateFunction} functions, while other methods are explained below.
+ * the {@link ImperativeAggregateFunction} functions, while other methods are explained below.
  *
  * <pre>
  * {@code
@@ -112,7 +112,7 @@ import org.apache.flink.util.Collector;
  *              the TableAggregateFunction must be put into the accumulator.
  */
 @PublicEvolving
-public abstract class TableAggregateFunction<T, ACC> extends UserDefinedAggregateFunction<T, ACC> {
+public abstract class TableAggregateFunction<T, ACC> extends ImperativeAggregateFunction<T, ACC> {
 
 	/**
 	 * Collects a record and forwards it. The collector can output retract messages with the retract
diff --git a/flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/UserDefinedFunctionHelper.java b/flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/UserDefinedFunctionHelper.java
index e22fd7c..17af912 100644
--- a/flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/UserDefinedFunctionHelper.java
+++ b/flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/UserDefinedFunctionHelper.java
@@ -81,7 +81,7 @@ public final class UserDefinedFunctionHelper {
 	 * @return The inferred accumulator type of the AggregateFunction.
 	 */
 	public static <T, ACC> TypeInformation<T> getReturnTypeOfAggregateFunction(
-			UserDefinedAggregateFunction<T, ACC> aggregateFunction) {
+			ImperativeAggregateFunction<T, ACC> aggregateFunction) {
 		return getReturnTypeOfAggregateFunction(aggregateFunction, null);
 	}
 
@@ -94,7 +94,7 @@ public final class UserDefinedFunctionHelper {
 	 * @return The inferred accumulator type of the AggregateFunction.
 	 */
 	public static <T, ACC> TypeInformation<T> getReturnTypeOfAggregateFunction(
-			UserDefinedAggregateFunction<T, ACC> aggregateFunction,
+			ImperativeAggregateFunction<T, ACC> aggregateFunction,
 			TypeInformation<T> scalaType) {
 
 		TypeInformation<T> userProvidedType = aggregateFunction.getResultType();
@@ -105,7 +105,7 @@ public final class UserDefinedFunctionHelper {
 		} else {
 			return TypeExtractor.createTypeInfo(
 				aggregateFunction,
-				UserDefinedAggregateFunction.class,
+				ImperativeAggregateFunction.class,
 				aggregateFunction.getClass(),
 				0);
 		}
@@ -118,7 +118,7 @@ public final class UserDefinedFunctionHelper {
 	 * @return The inferred accumulator type of the AggregateFunction.
 	 */
 	public static <T, ACC> TypeInformation<ACC> getAccumulatorTypeOfAggregateFunction(
-			UserDefinedAggregateFunction<T, ACC> aggregateFunction) {
+			ImperativeAggregateFunction<T, ACC> aggregateFunction) {
 		return getAccumulatorTypeOfAggregateFunction(aggregateFunction, null);
 	}
 
@@ -131,7 +131,7 @@ public final class UserDefinedFunctionHelper {
 	 * @return The inferred accumulator type of the AggregateFunction.
 	 */
 	public static <T, ACC> TypeInformation<ACC> getAccumulatorTypeOfAggregateFunction(
-			UserDefinedAggregateFunction<T, ACC> aggregateFunction,
+			ImperativeAggregateFunction<T, ACC> aggregateFunction,
 			TypeInformation<ACC> scalaType) {
 
 		TypeInformation<ACC> userProvidedType = aggregateFunction.getAccumulatorType();
@@ -142,7 +142,7 @@ public final class UserDefinedFunctionHelper {
 		} else {
 			return TypeExtractor.createTypeInfo(
 				aggregateFunction,
-				UserDefinedAggregateFunction.class,
+				ImperativeAggregateFunction.class,
 				aggregateFunction.getClass(),
 				1);
 		}
diff --git a/flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/planner/codegen/agg/AggsHandlerCodeGenerator.scala b/flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/planner/codegen/agg/AggsHandlerCodeGenerator.scala
index dfccc46..39f4420 100644
--- a/flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/planner/codegen/agg/AggsHandlerCodeGenerator.scala
+++ b/flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/planner/codegen/agg/AggsHandlerCodeGenerator.scala
@@ -21,7 +21,7 @@ import org.apache.flink.api.common.functions.RuntimeContext
 import org.apache.flink.table.api.TableException
 import org.apache.flink.table.data.GenericRowData
 import org.apache.flink.table.expressions._
-import org.apache.flink.table.functions.UserDefinedAggregateFunction
+import org.apache.flink.table.functions.ImperativeAggregateFunction
 import org.apache.flink.table.planner.codegen.CodeGenUtils.{ROW_DATA, _}
 import org.apache.flink.table.planner.codegen.Indenter.toISC
 import org.apache.flink.table.planner.codegen._
@@ -227,7 +227,7 @@ class AggsHandlerCodeGenerator(
             inputFieldTypes,
             constants,
             relBuilder)
-        case _: UserDefinedAggregateFunction[_, _] =>
+        case _: ImperativeAggregateFunction[_, _] =>
           new ImperativeAggCodeGen(
             ctx,
             aggInfo,
diff --git a/flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/planner/codegen/agg/ImperativeAggCodeGen.scala b/flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/planner/codegen/agg/ImperativeAggCodeGen.scala
index 36935f9..c59acf7 100644
--- a/flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/planner/codegen/agg/ImperativeAggCodeGen.scala
+++ b/flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/planner/codegen/agg/ImperativeAggCodeGen.scala
@@ -19,7 +19,7 @@ package org.apache.flink.table.planner.codegen.agg
 
 import org.apache.flink.table.data.{GenericRowData, RowData, UpdatableRowData}
 import org.apache.flink.table.expressions.Expression
-import org.apache.flink.table.functions.{UserDefinedAggregateFunction, UserDefinedFunctionHelper}
+import org.apache.flink.table.functions.{ImperativeAggregateFunction, UserDefinedFunctionHelper}
 import org.apache.flink.table.planner.codegen.CodeGenUtils._
 import org.apache.flink.table.planner.codegen.GenerateUtils.generateFieldAccess
 import org.apache.flink.table.planner.codegen.agg.AggsHandlerCodeGenerator._
@@ -79,7 +79,7 @@ class ImperativeAggCodeGen(
   private val SINGLE_ITERABLE = className[SingleElementIterator[_]]
   private val UPDATABLE_ROW = className[UpdatableRowData]
 
-  val function = aggInfo.function.asInstanceOf[UserDefinedAggregateFunction[_, _]]
+  val function = aggInfo.function.asInstanceOf[ImperativeAggregateFunction[_, _]]
   val functionTerm: String = ctx.addReusableFunction(
     function,
     contextTerm = s"$STORE_TERM.getRuntimeContext()")
diff --git a/flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/planner/dataview/DataViewUtils.scala b/flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/planner/dataview/DataViewUtils.scala
index 6cb96e3..c028d9a 100644
--- a/flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/planner/dataview/DataViewUtils.scala
+++ b/flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/planner/dataview/DataViewUtils.scala
@@ -25,7 +25,7 @@ import org.apache.flink.table.api.dataview._
 import org.apache.flink.table.data.binary.BinaryRawValueData
 import org.apache.flink.table.data.{GenericRowData, RowData}
 import org.apache.flink.table.dataview.{ListViewTypeInfo, MapViewTypeInfo}
-import org.apache.flink.table.functions.UserDefinedAggregateFunction
+import org.apache.flink.table.functions.ImperativeAggregateFunction
 import org.apache.flink.table.runtime.types.TypeInfoLogicalTypeConverter.{fromLogicalTypeToTypeInfo, fromTypeInfoToLogicalType}
 import org.apache.flink.table.runtime.typeutils.InternalTypeInfo
 import org.apache.flink.table.types.DataType
@@ -50,9 +50,10 @@ object DataViewUtils {
     */
   def useNullSerializerForStateViewFieldsFromAccType(
       index: Int,
-      aggFun: UserDefinedAggregateFunction[_, _],
+      aggFun: ImperativeAggregateFunction[_, _],
       externalAccType: DataType,
-      isStateBackedDataViews: Boolean): (DataType, Array[DataViewSpec]) = {
+      isStateBackedDataViews: Boolean)
+    : (DataType, Array[DataViewSpec]) = {
 
     val acc = aggFun.createAccumulator()
     val accumulatorSpecs = new mutable.ArrayBuffer[DataViewSpec]
diff --git a/flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/planner/expressions/aggregations.scala b/flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/planner/expressions/aggregations.scala
index 7249c34..7e903c5 100644
--- a/flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/planner/expressions/aggregations.scala
+++ b/flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/planner/expressions/aggregations.scala
@@ -20,7 +20,7 @@ package org.apache.flink.table.planner.expressions
 import org.apache.flink.api.common.typeinfo.{BasicTypeInfo, TypeInformation}
 import org.apache.flink.api.java.typeutils.MultisetTypeInfo
 import org.apache.flink.table.expressions.CallExpression
-import org.apache.flink.table.functions.{AggregateFunction, TableAggregateFunction, UserDefinedAggregateFunction}
+import org.apache.flink.table.functions.ImperativeAggregateFunction
 import org.apache.flink.table.planner.calcite.FlinkTypeSystem
 import org.apache.flink.table.planner.functions.utils.UserDefinedFunctionUtils._
 import org.apache.flink.table.planner.typeutils.TypeInfoCheckUtils
@@ -208,7 +208,7 @@ case class VarSamp(child: PlannerExpression) extends Aggregation {
   * Expression for calling a user-defined (table)aggregate function.
   */
 case class AggFunctionCall(
-    aggregateFunction: UserDefinedAggregateFunction[_, _],
+    aggregateFunction: ImperativeAggregateFunction[_, _],
     resultTypeInfo: TypeInformation[_],
     accTypeInfo: TypeInformation[_],
     args: Seq[PlannerExpression])
diff --git a/flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/planner/functions/utils/AggSqlFunction.scala b/flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/planner/functions/utils/AggSqlFunction.scala
index 0b467be..301f4d4 100644
--- a/flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/planner/functions/utils/AggSqlFunction.scala
+++ b/flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/planner/functions/utils/AggSqlFunction.scala
@@ -19,7 +19,7 @@
 package org.apache.flink.table.planner.functions.utils
 
 import org.apache.flink.table.api.ValidationException
-import org.apache.flink.table.functions.{AggregateFunction, FunctionIdentifier, TableAggregateFunction, UserDefinedAggregateFunction}
+import org.apache.flink.table.functions.{AggregateFunction, FunctionIdentifier, TableAggregateFunction, ImperativeAggregateFunction}
 import org.apache.flink.table.planner.calcite.FlinkTypeFactory
 import org.apache.flink.table.planner.functions.utils.AggSqlFunction.{createOperandTypeChecker, createOperandTypeInference, createReturnTypeInference}
 import org.apache.flink.table.planner.functions.utils.UserDefinedFunctionUtils._
@@ -50,7 +50,7 @@ import java.util
 class AggSqlFunction(
     identifier: FunctionIdentifier,
     displayName: String,
-    val aggregateFunction: UserDefinedAggregateFunction[_, _],
+    val aggregateFunction: ImperativeAggregateFunction[_, _],
     val externalResultType: DataType,
     val externalAccType: DataType,
     typeFactory: FlinkTypeFactory,
@@ -77,7 +77,7 @@ class AggSqlFunction(
     */
   def makeFunction(
       constants: Array[AnyRef],
-      argTypes: Array[LogicalType]): UserDefinedAggregateFunction[_, _] = aggregateFunction
+      argTypes: Array[LogicalType]): ImperativeAggregateFunction[_, _] = aggregateFunction
 
   override def isDeterministic: Boolean = aggregateFunction.isDeterministic
 
@@ -91,11 +91,12 @@ object AggSqlFunction {
   def apply(
       identifier: FunctionIdentifier,
       displayName: String,
-      aggregateFunction: UserDefinedAggregateFunction[_, _],
+      aggregateFunction: ImperativeAggregateFunction[_, _],
       externalResultType: DataType,
       externalAccType: DataType,
       typeFactory: FlinkTypeFactory,
-      requiresOver: Boolean): AggSqlFunction = {
+      requiresOver: Boolean)
+    : AggSqlFunction = {
 
     new AggSqlFunction(
       identifier,
@@ -109,9 +110,10 @@ object AggSqlFunction {
 
   private[flink] def createOperandTypeInference(
       name: String,
-      aggregateFunction: UserDefinedAggregateFunction[_, _],
+      aggregateFunction: ImperativeAggregateFunction[_, _],
       typeFactory: FlinkTypeFactory,
-      externalAccType: DataType): SqlOperandTypeInference = {
+      externalAccType: DataType)
+    : SqlOperandTypeInference = {
     /**
       * Operand type inference based on [[AggregateFunction]] given information.
       */
@@ -161,8 +163,9 @@ object AggSqlFunction {
 
   private[flink] def createOperandTypeChecker(
       name: String,
-      aggregateFunction: UserDefinedAggregateFunction[_, _],
-      externalAccType: DataType): SqlOperandTypeChecker = {
+      aggregateFunction: ImperativeAggregateFunction[_, _],
+      externalAccType: DataType)
+    : SqlOperandTypeChecker = {
 
     val methods = checkAndExtractMethods(aggregateFunction, "accumulate")
 
diff --git a/flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/planner/functions/utils/UserDefinedFunctionUtils.scala b/flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/planner/functions/utils/UserDefinedFunctionUtils.scala
index 5f099a8..dc02dc1 100644
--- a/flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/planner/functions/utils/UserDefinedFunctionUtils.scala
+++ b/flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/planner/functions/utils/UserDefinedFunctionUtils.scala
@@ -151,9 +151,10 @@ object UserDefinedFunctionUtils {
   }
 
   def getAggUserDefinedInputTypes(
-      func: UserDefinedAggregateFunction[_, _],
+      func: ImperativeAggregateFunction[_, _],
       externalAccType: DataType,
-      expectedTypes: Array[LogicalType]): Array[DataType] = {
+      expectedTypes: Array[LogicalType])
+    : Array[DataType] = {
     val accMethod = getAggFunctionUDIMethod(
       func, "accumulate", externalAccType, expectedTypes).getOrElse(
       throwValidationException(func.getClass.getCanonicalName, func, expectedTypes)
@@ -191,9 +192,9 @@ object UserDefinedFunctionUtils {
     * Elements of the signature can be null (act as a wildcard).
     */
   def getAccumulateMethodSignature(
-      function: UserDefinedAggregateFunction[_, _],
+      function: ImperativeAggregateFunction[_, _],
       expectedTypes: Seq[LogicalType])
-  : Option[Array[Class[_]]] = {
+    : Option[Array[Class[_]]] = {
     getAggFunctionUDIMethod(
       function,
       "accumulate",
@@ -242,11 +243,11 @@ object UserDefinedFunctionUtils {
   }
 
   def getAggFunctionUDIMethod(
-      function: UserDefinedAggregateFunction[_, _],
+      function: ImperativeAggregateFunction[_, _],
       methodName: String,
       accType: DataType,
       expectedTypes: Seq[LogicalType])
-  : Option[Method] = {
+    : Option[Method] = {
     val input = (Array(fromDataTypeToLogicalType(accType)) ++ expectedTypes).toSeq
     val possibleSignatures = Seq(
       logicalTypesToInternalClasses(input),
@@ -539,16 +540,17 @@ object UserDefinedFunctionUtils {
   // ----------------------------------------------------------------------------------------------
 
   /**
-    * Tries to infer the DataType of a [[UserDefinedAggregateFunction]]'s return type.
+    * Tries to infer the DataType of a [[ImperativeAggregateFunction]]'s return type.
     *
-    * @param userDefinedAggregateFunction The [[UserDefinedAggregateFunction]] for which the return
+    * @param userDefinedAggregateFunction The [[ImperativeAggregateFunction]] for which the return
     *                                     type is inferred.
     * @param extractedType                The implicitly inferred type of the result type.
-    * @return The inferred result type of the [[UserDefinedAggregateFunction]].
+    * @return The inferred result type of the [[ImperativeAggregateFunction]].
     */
   def getResultTypeOfAggregateFunction(
-      userDefinedAggregateFunction: UserDefinedAggregateFunction[_, _],
-      extractedType: DataType = null): DataType = {
+      userDefinedAggregateFunction: ImperativeAggregateFunction[_, _],
+      extractedType: DataType = null)
+    : DataType = {
 
     val resultType = userDefinedAggregateFunction.getResultType
     if (resultType != null) {
@@ -562,7 +564,7 @@ object UserDefinedFunctionUtils {
         case ite: InvalidTypesException =>
           throw new TableException(
             "Cannot infer generic type of ${aggregateFunction.getClass}. " +
-                "You can override UserDefinedAggregateFunction.getResultType() to " +
+                "You can override ImperativeAggregateFunction.getResultType() to " +
                 "specify the type.",
             ite
           )
@@ -571,17 +573,17 @@ object UserDefinedFunctionUtils {
   }
 
   /**
-    * Tries to infer the Type of a [[UserDefinedAggregateFunction]]'s accumulator type.
+    * Tries to infer the Type of a [[ImperativeAggregateFunction]]'s accumulator type.
     *
-    * @param userDefinedAggregateFunction The [[UserDefinedAggregateFunction]] for which the
+    * @param userDefinedAggregateFunction The [[ImperativeAggregateFunction]] for which the
     *                                     accumulator type is inferred.
     * @param extractedType                The implicitly inferred type of the accumulator type.
-    * @return The inferred accumulator type of the [[UserDefinedAggregateFunction]].
+    * @return The inferred accumulator type of the [[ImperativeAggregateFunction]].
     */
   def getAccumulatorTypeOfAggregateFunction(
-      userDefinedAggregateFunction: UserDefinedAggregateFunction[_, _],
-      extractedType: DataType = null): DataType = {
-
+      userDefinedAggregateFunction: ImperativeAggregateFunction[_, _],
+      extractedType: DataType = null)
+    : DataType = {
     val accType = userDefinedAggregateFunction.getAccumulatorType
     if (accType != null) {
       fromLegacyInfoToDataType(accType)
@@ -594,7 +596,7 @@ object UserDefinedFunctionUtils {
         case ite: InvalidTypesException =>
           throw new TableException(
             "Cannot infer generic type of ${aggregateFunction.getClass}. " +
-                "You can override UserDefinedAggregateFunction.getAccumulatorType() to specify " +
+                "You can override ImperativeAggregateFunction.getAccumulatorType() to specify " +
                 "the type.",
             ite
           )
@@ -604,21 +606,21 @@ object UserDefinedFunctionUtils {
   }
 
   /**
-    * Internal method to extract a type from a [[UserDefinedAggregateFunction]]'s type parameters.
-    *
-    * @param aggregateFunction The [[UserDefinedAggregateFunction]] for which the type is extracted.
-    * @param parameterTypePos The position of the type parameter for which the type is extracted.
+    * Internal method to extract a type from a [[ImperativeAggregateFunction]]'s type parameters.
     *
+    * @param aggregateFunction The [[ImperativeAggregateFunction]] for which the type is extracted.
+    * @param parameterTypePos  The position of the type parameter for which the type is extracted.
     * @return The extracted type.
     */
   @throws(classOf[InvalidTypesException])
   private def extractTypeFromAggregateFunction(
-      aggregateFunction: UserDefinedAggregateFunction[_, _],
-      parameterTypePos: Int): DataType = {
+      aggregateFunction: ImperativeAggregateFunction[_, _],
+      parameterTypePos: Int)
+    : DataType = {
 
     fromLegacyInfoToDataType(TypeExtractor.createTypeInfo(
       aggregateFunction,
-      classOf[UserDefinedAggregateFunction[_, _]],
+      classOf[ImperativeAggregateFunction[_, _]],
       aggregateFunction.getClass,
       parameterTypePos))
   }
diff --git a/flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/planner/plan/utils/AggregateUtil.scala b/flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/planner/plan/utils/AggregateUtil.scala
index 9145f4d..f24706d 100644
--- a/flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/planner/plan/utils/AggregateUtil.scala
+++ b/flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/planner/plan/utils/AggregateUtil.scala
@@ -24,7 +24,7 @@ import org.apache.flink.table.data.{DecimalData, RowData, StringData, TimestampD
 import org.apache.flink.table.dataview.MapViewTypeInfo
 import org.apache.flink.table.expressions.ExpressionUtils.extractValue
 import org.apache.flink.table.expressions._
-import org.apache.flink.table.functions.{AggregateFunction, TableAggregateFunction, UserDefinedAggregateFunction, UserDefinedFunction}
+import org.apache.flink.table.functions.{AggregateFunction, TableAggregateFunction, ImperativeAggregateFunction, UserDefinedFunction}
 import org.apache.flink.table.planner.JLong
 import org.apache.flink.table.planner.calcite.FlinkRelBuilder.PlannerNamedWindowProperty
 import org.apache.flink.table.planner.calcite.{FlinkTypeFactory, FlinkTypeSystem}
@@ -314,7 +314,7 @@ object AggregateUtil extends Enumeration {
             Array.empty[DataViewSpec],
             fromLogicalTypeToDataType(a.getResultType.getLogicalType)
           )
-        case a: UserDefinedAggregateFunction[_, _] =>
+        case a: ImperativeAggregateFunction[_, _] =>
           val (implicitAccType, implicitResultType) = call.getAggregation match {
             case aggSqlFun: AggSqlFunction =>
               (aggSqlFun.externalAccType, aggSqlFun.externalResultType)
diff --git a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/utils/TableTestBase.scala b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/utils/TableTestBase.scala
index c942911..9a8dd36 100644
--- a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/utils/TableTestBase.scala
+++ b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/utils/TableTestBase.scala
@@ -990,7 +990,7 @@ class TestingTableEnvironment private(
   def registerFunction[T: TypeInformation, ACC: TypeInformation](
       name: String,
       f: AggregateFunction[T, ACC]): Unit = {
-    registerUserDefinedAggregateFunction(name, f)
+    registerImperativeAggregateFunction(name, f)
   }
 
   // just for testing, remove this method while
@@ -999,12 +999,12 @@ class TestingTableEnvironment private(
   def registerFunction[T: TypeInformation, ACC: TypeInformation](
       name: String,
       f: TableAggregateFunction[T, ACC]): Unit = {
-    registerUserDefinedAggregateFunction(name, f)
+    registerImperativeAggregateFunction(name, f)
   }
 
-  private def registerUserDefinedAggregateFunction[T: TypeInformation, ACC: TypeInformation](
+  private def registerImperativeAggregateFunction[T: TypeInformation, ACC: TypeInformation](
       name: String,
-      f: UserDefinedAggregateFunction[T, ACC]): Unit = {
+      f: ImperativeAggregateFunction[T, ACC]): Unit = {
     val typeInfo = UserDefinedFunctionHelper
       .getReturnTypeOfAggregateFunction(f, implicitly[TypeInformation[T]])
     val accTypeInfo = UserDefinedFunctionHelper
diff --git a/flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/api/internal/TableEnvImpl.scala b/flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/api/internal/TableEnvImpl.scala
index 226b620..2ac2749 100644
--- a/flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/api/internal/TableEnvImpl.scala
+++ b/flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/api/internal/TableEnvImpl.scala
@@ -32,7 +32,7 @@ import org.apache.flink.table.delegation.Parser
 import org.apache.flink.table.expressions._
 import org.apache.flink.table.expressions.resolver.lookups.TableReferenceLookup
 import org.apache.flink.table.factories.{TableFactoryUtil, TableSinkFactoryContextImpl}
-import org.apache.flink.table.functions.{AggregateFunction, ScalarFunction, TableFunction, UserDefinedAggregateFunction, _}
+import org.apache.flink.table.functions.{AggregateFunction, ScalarFunction, TableFunction, ImperativeAggregateFunction, _}
 import org.apache.flink.table.module.{Module, ModuleManager}
 import org.apache.flink.table.operations.ddl._
 import org.apache.flink.table.operations.utils.OperationTreeBuilder
@@ -244,7 +244,7 @@ abstract class TableEnvImpl(
     */
   private[flink] def registerAggregateFunctionInternal[T: TypeInformation, ACC: TypeInformation](
       name: String,
-      function: UserDefinedAggregateFunction[T, ACC])
+      function: ImperativeAggregateFunction[T, ACC])
     : Unit = {
     val resultTypeInfo: TypeInformation[T] = UserDefinedFunctionHelper
       .getReturnTypeOfAggregateFunction(
diff --git a/flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/codegen/AggregationCodeGenerator.scala b/flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/codegen/AggregationCodeGenerator.scala
index 2de3f0f..426cdc5 100644
--- a/flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/codegen/AggregationCodeGenerator.scala
+++ b/flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/codegen/AggregationCodeGenerator.scala
@@ -17,27 +17,20 @@
  */
 package org.apache.flink.table.codegen
 
-import java.lang.reflect.Modifier
-import java.lang.{Iterable => JIterable}
-import java.util.{List => JList}
-
-import org.apache.calcite.rel.`type`.RelDataType
-import org.apache.calcite.rel.core.AggregateCall
-import org.apache.calcite.rex.RexLiteral
 import org.apache.flink.api.common.state.{ListStateDescriptor, MapStateDescriptor, State, StateDescriptor}
 import org.apache.flink.api.common.typeinfo.TypeInformation
 import org.apache.flink.api.java.typeutils.RowTypeInfo
 import org.apache.flink.api.java.typeutils.TypeExtractionUtils.{extractTypeArgument, getRawClass}
-import org.apache.flink.table.api.{TableConfig, ValidationException}
 import org.apache.flink.table.api.dataview._
+import org.apache.flink.table.api.{TableConfig, ValidationException}
 import org.apache.flink.table.calcite.FlinkTypeFactory
 import org.apache.flink.table.codegen.CodeGenUtils.{newName, reflectiveFieldWriteAccess}
 import org.apache.flink.table.codegen.Indenter.toISC
 import org.apache.flink.table.dataview.{StateListView, StateMapView}
-import org.apache.flink.table.functions.{TableAggregateFunction, UserDefinedAggregateFunction, UserDefinedFunction}
 import org.apache.flink.table.functions.aggfunctions.DistinctAccumulator
-import org.apache.flink.table.functions.utils.{AggSqlFunction, UserDefinedFunctionUtils}
 import org.apache.flink.table.functions.utils.UserDefinedFunctionUtils.{getUserDefinedMethod, signatureToString}
+import org.apache.flink.table.functions.utils.{AggSqlFunction, UserDefinedFunctionUtils}
+import org.apache.flink.table.functions.{ImperativeAggregateFunction, TableAggregateFunction, UserDefinedFunction}
 import org.apache.flink.table.runtime.CRowWrappingCollector
 import org.apache.flink.table.runtime.aggregate.AggregateUtil.CalcitePair
 import org.apache.flink.table.runtime.aggregate.{AggregateUtil, GeneratedAggregations, GeneratedTableAggregations, SingleElementIterable}
@@ -45,6 +38,14 @@ import org.apache.flink.table.utils.EncodingUtils
 import org.apache.flink.types.Row
 import org.apache.flink.util.Collector
 
+import org.apache.calcite.rel.`type`.RelDataType
+import org.apache.calcite.rel.core.AggregateCall
+import org.apache.calcite.rex.RexLiteral
+
+import java.lang.reflect.Modifier
+import java.lang.{Iterable => JIterable}
+import java.util.{List => JList}
+
 import scala.collection.JavaConversions._
 import scala.collection.mutable
 
@@ -82,25 +83,25 @@ import scala.collection.mutable
   * @param accConfig              Data view specification for accumulators
   */
 class AggregationCodeGenerator(
-  config: TableConfig,
-  nullableInput: Boolean,
-  inputTypeInfo: TypeInformation[_ <: Any],
-  constants: Option[Seq[RexLiteral]],
-  classNamePrefix: String,
-  physicalInputTypes: Seq[TypeInformation[_]],
-  aggregates: Array[UserDefinedAggregateFunction[_ <: Any, _ <: Any]],
-  aggFields: Array[Array[Int]],
-  aggMapping: Array[Int],
-  distinctAccMapping: Array[(Integer, JList[Integer])],
-  isStateBackedDataViews: Boolean,
-  partialResults: Boolean,
-  fwdMapping: Array[Int],
-  mergeMapping: Option[Array[Int]],
-  outputArity: Int,
-  needRetract: Boolean,
-  needMerge: Boolean,
-  needReset: Boolean,
-  accConfig: Option[Array[Seq[DataViewSpec[_]]]])
+    config: TableConfig,
+    nullableInput: Boolean,
+    inputTypeInfo: TypeInformation[_ <: Any],
+    constants: Option[Seq[RexLiteral]],
+    classNamePrefix: String,
+    physicalInputTypes: Seq[TypeInformation[_]],
+    aggregates: Array[ImperativeAggregateFunction[_ <: Any, _ <: Any]],
+    aggFields: Array[Array[Int]],
+    aggMapping: Array[Int],
+    distinctAccMapping: Array[(Integer, JList[Integer])],
+    isStateBackedDataViews: Boolean,
+    partialResults: Boolean,
+    fwdMapping: Array[Int],
+    mergeMapping: Option[Array[Int]],
+    outputArity: Int,
+    needRetract: Boolean,
+    needMerge: Boolean,
+    needReset: Boolean,
+    accConfig: Option[Array[Seq[DataViewSpec[_]]]])
   extends CodeGenerator(config, nullableInput, inputTypeInfo) {
 
   // set of statements for cleanup dataview that will be added only once
diff --git a/flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/codegen/MatchCodeGenerator.scala b/flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/codegen/MatchCodeGenerator.scala
index f248188..569a7e7 100644
--- a/flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/codegen/MatchCodeGenerator.scala
+++ b/flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/codegen/MatchCodeGenerator.scala
@@ -18,12 +18,6 @@
 
 package org.apache.flink.table.codegen
 
-import java.lang.{Long => JLong}
-import java.util
-import org.apache.calcite.rel.`type`.RelDataType
-import org.apache.calcite.rex._
-import org.apache.calcite.sql.SqlAggFunction
-import org.apache.calcite.sql.fun.SqlStdOperatorTable._
 import org.apache.flink.api.common.functions._
 import org.apache.flink.api.common.typeinfo.{SqlTimeTypeInfo, TypeInformation}
 import org.apache.flink.cep.functions.PatternProcessFunction
@@ -32,23 +26,30 @@ import org.apache.flink.configuration.Configuration
 import org.apache.flink.table.api.dataview.DataViewSpec
 import org.apache.flink.table.api.{TableConfig, TableException}
 import org.apache.flink.table.calcite.FlinkTypeFactory
+import org.apache.flink.table.catalog.BasicOperatorTable.{MATCH_PROCTIME, MATCH_ROWTIME}
 import org.apache.flink.table.codegen.CodeGenUtils.{boxedTypeTermForTypeInfo, newName, primitiveDefaultValue, primitiveTypeTermForTypeInfo}
 import org.apache.flink.table.codegen.GeneratedExpression.{NEVER_NULL, NO_CODE}
 import org.apache.flink.table.codegen.Indenter.toISC
-import org.apache.flink.table.functions.UserDefinedAggregateFunction
+import org.apache.flink.table.functions.ImperativeAggregateFunction
 import org.apache.flink.table.plan.schema.RowSchema
 import org.apache.flink.table.runtime.`match`.{IterativeConditionRunner, PatternProcessFunctionRunner}
 import org.apache.flink.table.runtime.aggregate.AggregateUtil
 import org.apache.flink.table.typeutils.TimeIndicatorTypeInfo
 import org.apache.flink.table.util.MatchUtil.{ALL_PATTERN_VARIABLE, AggregationPatternVariableFinder}
 import org.apache.flink.table.utils.EncodingUtils
-import org.apache.flink.table.catalog.BasicOperatorTable.{MATCH_PROCTIME, MATCH_ROWTIME}
 import org.apache.flink.types.Row
 import org.apache.flink.util.Collector
 import org.apache.flink.util.MathUtils.checkedDownCast
 
+import org.apache.calcite.rel.`type`.RelDataType
+import org.apache.calcite.rex._
+import org.apache.calcite.sql.SqlAggFunction
+import org.apache.calcite.sql.fun.SqlStdOperatorTable._
 import org.apache.calcite.util.ImmutableBitSet
 
+import java.lang.{Long => JLong}
+import java.util
+
 import scala.collection.JavaConverters._
 import scala.collection.mutable
 
@@ -890,7 +891,7 @@ class MatchCodeGenerator(
     )
 
     private case class SingleAggCall(
-      aggFunction: UserDefinedAggregateFunction[_, _],
+      aggFunction: ImperativeAggregateFunction[_, _],
       inputIndices: Array[Int],
       dataViews: Seq[DataViewSpec[_]],
       distinctAccIndex: Int
diff --git a/flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/expressions/aggregations.scala b/flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/expressions/aggregations.scala
index f6d0725..af02d6f 100644
--- a/flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/expressions/aggregations.scala
+++ b/flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/expressions/aggregations.scala
@@ -17,20 +17,20 @@
  */
 package org.apache.flink.table.expressions
 
+import org.apache.flink.api.common.typeinfo.{BasicTypeInfo, TypeInformation}
+import org.apache.flink.api.java.typeutils.MultisetTypeInfo
+import org.apache.flink.table.calcite.FlinkTypeFactory
+import org.apache.flink.table.functions.ImperativeAggregateFunction
+import org.apache.flink.table.functions.utils.AggSqlFunction
+import org.apache.flink.table.functions.utils.UserDefinedFunctionUtils._
+import org.apache.flink.table.typeutils.TypeCheckUtils
+import org.apache.flink.table.validate.{ValidationFailure, ValidationResult, ValidationSuccess}
+
 import org.apache.calcite.rex.RexNode
 import org.apache.calcite.sql.SqlAggFunction
 import org.apache.calcite.sql.fun._
 import org.apache.calcite.tools.RelBuilder
 import org.apache.calcite.tools.RelBuilder.AggCall
-import org.apache.flink.api.common.typeinfo.TypeInformation
-import org.apache.flink.table.functions.UserDefinedAggregateFunction
-import org.apache.flink.table.functions.utils.AggSqlFunction
-import org.apache.flink.table.typeutils.TypeCheckUtils
-import org.apache.flink.api.common.typeinfo.BasicTypeInfo
-import org.apache.flink.api.java.typeutils.MultisetTypeInfo
-import org.apache.flink.table.calcite.FlinkTypeFactory
-import org.apache.flink.table.functions.utils.UserDefinedFunctionUtils._
-import org.apache.flink.table.validate.{ValidationFailure, ValidationResult, ValidationSuccess}
 
 abstract sealed class Aggregation extends PlannerExpression {
 
@@ -361,7 +361,7 @@ case class VarSamp(child: PlannerExpression) extends Aggregation {
   * Expression for calling a user-defined (table)aggregate function.
   */
 case class AggFunctionCall(
-    aggregateFunction: UserDefinedAggregateFunction[_, _],
+    aggregateFunction: ImperativeAggregateFunction[_, _],
     resultTypeInfo: TypeInformation[_],
     accTypeInfo: TypeInformation[_],
     args: Seq[PlannerExpression])
diff --git a/flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/functions/utils/AggSqlFunction.scala b/flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/functions/utils/AggSqlFunction.scala
index de71825..ce80a4f 100644
--- a/flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/functions/utils/AggSqlFunction.scala
+++ b/flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/functions/utils/AggSqlFunction.scala
@@ -18,21 +18,22 @@
 
 package org.apache.flink.table.functions.utils
 
-import java.util
+import org.apache.flink.api.common.typeinfo._
+import org.apache.flink.table.api.ValidationException
+import org.apache.flink.table.calcite.FlinkTypeFactory
+import org.apache.flink.table.functions.utils.AggSqlFunction.{createOperandTypeChecker, createOperandTypeInference, createReturnTypeInference}
+import org.apache.flink.table.functions.utils.UserDefinedFunctionUtils._
+import org.apache.flink.table.functions.{AggregateFunction, FunctionRequirement, ImperativeAggregateFunction, TableAggregateFunction}
 
 import org.apache.calcite.rel.`type`.RelDataType
 import org.apache.calcite.sql._
-import org.apache.calcite.sql.`type`._
 import org.apache.calcite.sql.`type`.SqlOperandTypeChecker.Consistency
+import org.apache.calcite.sql.`type`._
 import org.apache.calcite.sql.parser.SqlParserPos
 import org.apache.calcite.sql.validate.SqlUserDefinedAggFunction
 import org.apache.calcite.util.Optionality
-import org.apache.flink.api.common.typeinfo._
-import org.apache.flink.table.api.ValidationException
-import org.apache.flink.table.calcite.FlinkTypeFactory
-import org.apache.flink.table.functions.{AggregateFunction, FunctionRequirement, TableAggregateFunction, UserDefinedAggregateFunction}
-import org.apache.flink.table.functions.utils.AggSqlFunction.{createOperandTypeChecker, createOperandTypeInference, createReturnTypeInference}
-import org.apache.flink.table.functions.utils.UserDefinedFunctionUtils._
+
+import java.util
 
 /**
   * Calcite wrapper for user-defined aggregate functions. Currently, the aggregate function can be
@@ -48,7 +49,7 @@ import org.apache.flink.table.functions.utils.UserDefinedFunctionUtils._
 class AggSqlFunction(
     name: String,
     displayName: String,
-    aggregateFunction: UserDefinedAggregateFunction[_, _],
+    aggregateFunction: ImperativeAggregateFunction[_, _],
     val returnType: TypeInformation[_],
     val accType: TypeInformation[_],
     typeFactory: FlinkTypeFactory,
@@ -67,7 +68,7 @@ class AggSqlFunction(
     typeFactory
   ) {
 
-  def getFunction: UserDefinedAggregateFunction[_, _] = aggregateFunction
+  def getFunction: ImperativeAggregateFunction[_, _] = aggregateFunction
 
   override def isDeterministic: Boolean = aggregateFunction.isDeterministic
 
@@ -81,10 +82,11 @@ object AggSqlFunction {
   def apply(
       name: String,
       displayName: String,
-      aggregateFunction: UserDefinedAggregateFunction[_, _],
+      aggregateFunction: ImperativeAggregateFunction[_, _],
       returnType: TypeInformation[_],
       accType: TypeInformation[_],
-      typeFactory: FlinkTypeFactory): AggSqlFunction = {
+      typeFactory: FlinkTypeFactory)
+    : AggSqlFunction = {
 
     val requiresOver = aggregateFunction match {
       case a: AggregateFunction[_, _] =>
@@ -103,9 +105,9 @@ object AggSqlFunction {
   }
 
   private[flink] def createOperandTypeInference(
-      aggregateFunction: UserDefinedAggregateFunction[_, _],
-      typeFactory: FlinkTypeFactory,
-      accType: TypeInformation[_])
+    aggregateFunction: ImperativeAggregateFunction[_, _],
+    typeFactory: FlinkTypeFactory,
+    accType: TypeInformation[_])
   : SqlOperandTypeInference = {
     /**
       * Operand type inference based on [[AggregateFunction]] given information.
@@ -157,7 +159,7 @@ object AggSqlFunction {
   }
 
   private[flink] def createOperandTypeChecker(
-      aggregateFunction: UserDefinedAggregateFunction[_, _],
+      aggregateFunction: ImperativeAggregateFunction[_, _],
       accType: TypeInformation[_])
     : SqlOperandTypeChecker = {
 
diff --git a/flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/functions/utils/UserDefinedFunctionUtils.scala b/flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/functions/utils/UserDefinedFunctionUtils.scala
index cdb2479..20137db 100644
--- a/flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/functions/utils/UserDefinedFunctionUtils.scala
+++ b/flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/functions/utils/UserDefinedFunctionUtils.scala
@@ -67,11 +67,11 @@ object UserDefinedFunctionUtils {
     * of [[TypeInformation]]. Elements of the signature can be null (act as a wildcard).
     */
   def getAccumulateMethodSignature(
-      function: UserDefinedAggregateFunction[_, _],
-      signature: Seq[TypeInformation[_]])
+    function: ImperativeAggregateFunction[_, _],
+    signature: Seq[TypeInformation[_]])
   : Option[Array[Class[_]]] = {
     val accType = TypeExtractor.createTypeInfo(
-      function, classOf[UserDefinedAggregateFunction[_, _]], function.getClass, 1)
+      function, classOf[ImperativeAggregateFunction[_, _]], function.getClass, 1)
     val input = (Array(accType) ++ signature).toSeq
     getUserDefinedMethod(
       function,
@@ -292,11 +292,11 @@ object UserDefinedFunctionUtils {
   def createAggregateSqlFunction(
       name: String,
       displayName: String,
-      aggFunction: UserDefinedAggregateFunction[_, _],
+      aggFunction: ImperativeAggregateFunction[_, _],
       resultType: TypeInformation[_],
       accTypeInfo: TypeInformation[_],
       typeFactory: FlinkTypeFactory)
-  : SqlFunction = {
+    : SqlFunction = {
     //check if a qualified accumulate method exists before create Sql function
     checkAndExtractMethods(aggFunction, "accumulate")
 
diff --git a/flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/runtime/aggregate/AggregateUtil.scala b/flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/runtime/aggregate/AggregateUtil.scala
index 21625cb..0d47ca9 100644
--- a/flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/runtime/aggregate/AggregateUtil.scala
+++ b/flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/runtime/aggregate/AggregateUtil.scala
@@ -17,16 +17,6 @@
  */
 package org.apache.flink.table.runtime.aggregate
 
-import java.util
-import java.util.{ArrayList => JArrayList, List => JList}
-
-import org.apache.calcite.rel.`type`._
-import org.apache.calcite.rel.core.AggregateCall
-import org.apache.calcite.rex.RexLiteral
-import org.apache.calcite.sql.`type`.SqlTypeName
-import org.apache.calcite.sql.`type`.SqlTypeName._
-import org.apache.calcite.sql.fun._
-import org.apache.calcite.sql.{SqlAggFunction, SqlKind}
 import org.apache.flink.api.common.functions.{MapFunction, RichGroupReduceFunction, AggregateFunction => DataStreamAggFunction, _}
 import org.apache.flink.api.common.typeinfo.{BasicTypeInfo, TypeInformation, Types}
 import org.apache.flink.api.java.typeutils.RowTypeInfo
@@ -43,13 +33,24 @@ import org.apache.flink.table.expressions._
 import org.apache.flink.table.functions.aggfunctions._
 import org.apache.flink.table.functions.utils.AggSqlFunction
 import org.apache.flink.table.functions.utils.UserDefinedFunctionUtils._
-import org.apache.flink.table.functions.{AggregateFunction, TableAggregateFunction, UserDefinedAggregateFunction, UserDefinedFunctionHelper}
+import org.apache.flink.table.functions.{AggregateFunction, ImperativeAggregateFunction, TableAggregateFunction, UserDefinedFunctionHelper}
 import org.apache.flink.table.plan.logical._
 import org.apache.flink.table.runtime.types.{CRow, CRowTypeInfo}
 import org.apache.flink.table.typeutils.TimeIntervalTypeInfo
 import org.apache.flink.table.typeutils.TypeCheckUtils._
 import org.apache.flink.types.Row
 
+import org.apache.calcite.rel.`type`._
+import org.apache.calcite.rel.core.AggregateCall
+import org.apache.calcite.rex.RexLiteral
+import org.apache.calcite.sql.`type`.SqlTypeName
+import org.apache.calcite.sql.`type`.SqlTypeName._
+import org.apache.calcite.sql.fun._
+import org.apache.calcite.sql.{SqlAggFunction, SqlKind}
+
+import java.util
+import java.util.{ArrayList => JArrayList, List => JList}
+
 import scala.collection.JavaConversions._
 import scala.collection.JavaConverters._
 import scala.collection.mutable
@@ -1246,7 +1247,7 @@ object AggregateUtil {
     * Return true if all aggregates can be partially merged. False otherwise.
     */
   private[flink] def doAllSupportPartialMerge(
-      aggregateList: Array[UserDefinedAggregateFunction[_ <: Any, _ <: Any]]): Boolean = {
+      aggregateList: Array[ImperativeAggregateFunction[_ <: Any, _ <: Any]]): Boolean = {
     aggregateList.forall(ifMethodExistInFunction("merge", _))
   }
 
@@ -1324,7 +1325,7 @@ object AggregateUtil {
     private val aggregates: Seq[(AggregateCallMetadata, Array[Int])],
     private val distinctAccTypesWithSpecs: Seq[(TypeInformation[_], Seq[DataViewSpec[_]])]) {
 
-    def getAggregateFunctions: Array[UserDefinedAggregateFunction[_, _]] = {
+    def getAggregateFunctions: Array[ImperativeAggregateFunction[_, _]] = {
       aggregates.map(_._1.aggregateFunction).toArray
     }
 
@@ -1370,7 +1371,7 @@ object AggregateUtil {
     * function.
     */
   private[flink] case class AggregateCallMetadata(
-    aggregateFunction: UserDefinedAggregateFunction[_, _],
+    aggregateFunction: ImperativeAggregateFunction[_, _],
     accumulatorType: TypeInformation[_],
     accumulatorSpecs: Seq[DataViewSpec[_]],
     distinctAccIndex: Int
@@ -1414,7 +1415,7 @@ object AggregateUtil {
     // store the aggregate fields of each aggregate function, by the same order of aggregates.
     // create aggregate function instances by function type and aggregate field data type.
 
-    val aggregate: UserDefinedAggregateFunction[_, _] = createFlinkAggFunction(
+    val aggregate: ImperativeAggregateFunction[_, _] = createFlinkAggFunction(
       aggregateFunction,
       needRetraction,
       aggregateInputTypes,
@@ -1542,7 +1543,7 @@ object AggregateUtil {
       needRetraction: Boolean,
       inputDataType: Seq[RelDataType],
       tableConfig: TableConfig)
-    : UserDefinedAggregateFunction[_ <: Any, _ <: Any] = {
+    : ImperativeAggregateFunction[_ <: Any, _ <: Any] = {
 
     lazy val outputType = inputDataType.get(0)
     lazy val outputTypeName = if (inputDataType.isEmpty) {
@@ -1796,10 +1797,11 @@ object AggregateUtil {
 
   private def createRowTypeForKeysAndAggregates(
       groupings: Array[Int],
-      aggregates: Array[UserDefinedAggregateFunction[_, _]],
+      aggregates: Array[ImperativeAggregateFunction[_, _]],
       aggTypes: Array[TypeInformation[_]],
       inputType: RelDataType,
-      windowKeyTypes: Option[Array[TypeInformation[_]]] = None): RowTypeInfo = {
+      windowKeyTypes: Option[Array[TypeInformation[_]]] = None)
+    : RowTypeInfo = {
 
     // get the field data types of group keys.
     val groupingTypes: Seq[TypeInformation[_]] =
@@ -1864,7 +1866,7 @@ object AggregateUtil {
   }
 
   private[flink] def containsTableAggregateFunction(
-      aggregates: Seq[UserDefinedAggregateFunction[_, _]])
+      aggregates: Seq[ImperativeAggregateFunction[_, _]])
     : Boolean = {
     aggregates.exists(_.isInstanceOf[TableAggregateFunction[_, _]])
   }


[flink] 04/06: [hotfix][table] Remove deprecated AggregateFunction.requiresOver()

Posted by tw...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

twalthr pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git

commit 9aed3a0c2e7e2eed68bec52028ec95b8a17860ea
Author: Timo Walther <tw...@apache.org>
AuthorDate: Mon Jul 27 16:25:43 2020 +0200

    [hotfix][table] Remove deprecated AggregateFunction.requiresOver()
---
 .../flink/table/functions/AggregateFunction.java   | 27 -------------------
 .../runtime/utils/JavaUserDefinedAggFunctions.java | 11 +++++---
 .../utils/JavaUserDefinedScalarFunctions.java      | 31 ----------------------
 .../planner/plan/stream/table/OverWindowTest.xml   | 18 ++++++-------
 .../sql/validation/OverWindowValidationTest.scala  |  2 +-
 .../plan/batch/sql/agg/OverAggregateTest.scala     |  2 +-
 .../validation/OverWindowValidationTest.scala      |  2 +-
 .../plan/stream/sql/agg/OverAggregateTest.scala    |  2 +-
 .../validation/OverWindowValidationTest.scala      |  2 +-
 .../runtime/utils/JavaUserDefinedAggFunctions.java | 10 +++----
 10 files changed, 24 insertions(+), 83 deletions(-)

diff --git a/flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/AggregateFunction.java b/flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/AggregateFunction.java
index 13c2745..8abd44c3 100644
--- a/flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/AggregateFunction.java
+++ b/flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/AggregateFunction.java
@@ -23,10 +23,6 @@ import org.apache.flink.table.api.TableException;
 import org.apache.flink.table.catalog.DataTypeFactory;
 import org.apache.flink.table.types.inference.TypeInference;
 
-import java.util.Collections;
-import java.util.HashSet;
-import java.util.Set;
-
 /**
  * Base class for user-defined aggregates.
  *
@@ -128,20 +124,6 @@ public abstract class AggregateFunction<T, ACC> extends ImperativeAggregateFunct
 	 */
 	public abstract T getValue(ACC accumulator);
 
-	/**
-	 * Returns <code>true</code> if this {@link AggregateFunction} can only be applied in an
-	 * OVER window.
-	 *
-	 * @return <code>true</code> if the {@link AggregateFunction} requires an OVER window,
-	 *         <code>false</code> otherwise.
-	 *
-	 * @deprecated Use {@link #getRequirements()} instead.
-	 */
-	@Deprecated
-	public boolean requiresOver() {
-		return false;
-	}
-
 	@Override
 	public final FunctionKind getKind() {
 		return FunctionKind.AGGREGATE;
@@ -151,13 +133,4 @@ public abstract class AggregateFunction<T, ACC> extends ImperativeAggregateFunct
 	public TypeInference getTypeInference(DataTypeFactory typeFactory) {
 		throw new TableException("Aggregate functions are not updated to the new type system yet.");
 	}
-
-	@Override
-	public Set<FunctionRequirement> getRequirements() {
-		final HashSet<FunctionRequirement> requirements = new HashSet<>();
-		if (requiresOver()) {
-			requirements.add(FunctionRequirement.OVER_WINDOW_ONLY);
-		}
-		return Collections.unmodifiableSet(requirements);
-	}
 }
diff --git a/flink-table/flink-table-planner-blink/src/test/java/org/apache/flink/table/planner/runtime/utils/JavaUserDefinedAggFunctions.java b/flink-table/flink-table-planner-blink/src/test/java/org/apache/flink/table/planner/runtime/utils/JavaUserDefinedAggFunctions.java
index de8a573..3d74f98 100644
--- a/flink-table/flink-table-planner-blink/src/test/java/org/apache/flink/table/planner/runtime/utils/JavaUserDefinedAggFunctions.java
+++ b/flink-table/flink-table-planner-blink/src/test/java/org/apache/flink/table/planner/runtime/utils/JavaUserDefinedAggFunctions.java
@@ -23,21 +23,24 @@ import org.apache.flink.api.java.tuple.Tuple2;
 import org.apache.flink.table.api.dataview.ListView;
 import org.apache.flink.table.api.dataview.MapView;
 import org.apache.flink.table.functions.AggregateFunction;
+import org.apache.flink.table.functions.FunctionRequirement;
 
+import java.util.Collections;
 import java.util.Iterator;
 import java.util.Map;
+import java.util.Set;
 
 /**
  * Test aggregator functions.
  */
 public class JavaUserDefinedAggFunctions {
 	/**
-	 * Accumulator for test requiresOver.
+	 * Accumulator for test {@link FunctionRequirement#OVER_WINDOW_ONLY}.
  	 */
 	public static class Accumulator0 extends Tuple2<Long, Integer>{}
 
 	/**
-	 * Test for requiresOver.
+	 * Test for {@link FunctionRequirement#OVER_WINDOW_ONLY}.
 	 */
 	public static class OverAgg0 extends AggregateFunction<Long, Accumulator0> {
 		@Override
@@ -55,8 +58,8 @@ public class JavaUserDefinedAggFunctions {
 		}
 
 		@Override
-		public boolean requiresOver() {
-			return true;
+		public Set<FunctionRequirement> getRequirements() {
+			return Collections.singleton(FunctionRequirement.OVER_WINDOW_ONLY);
 		}
 	}
 
diff --git a/flink-table/flink-table-planner-blink/src/test/java/org/apache/flink/table/planner/runtime/utils/JavaUserDefinedScalarFunctions.java b/flink-table/flink-table-planner-blink/src/test/java/org/apache/flink/table/planner/runtime/utils/JavaUserDefinedScalarFunctions.java
index 1c86fc4..e1a0640 100644
--- a/flink-table/flink-table-planner-blink/src/test/java/org/apache/flink/table/planner/runtime/utils/JavaUserDefinedScalarFunctions.java
+++ b/flink-table/flink-table-planner-blink/src/test/java/org/apache/flink/table/planner/runtime/utils/JavaUserDefinedScalarFunctions.java
@@ -20,10 +20,8 @@ package org.apache.flink.table.planner.runtime.utils;
 
 import org.apache.flink.api.common.typeinfo.BasicTypeInfo;
 import org.apache.flink.api.common.typeinfo.TypeInformation;
-import org.apache.flink.api.java.tuple.Tuple2;
 import org.apache.flink.table.annotation.DataTypeHint;
 import org.apache.flink.table.data.TimestampData;
-import org.apache.flink.table.functions.AggregateFunction;
 import org.apache.flink.table.functions.FunctionContext;
 import org.apache.flink.table.functions.ScalarFunction;
 import org.apache.flink.table.functions.python.PythonEnv;
@@ -43,35 +41,6 @@ import static org.junit.Assert.fail;
 public class JavaUserDefinedScalarFunctions {
 
 	/**
-	 * Accumulator for test requiresOver.
-	 */
-	public static class AccumulatorOver extends Tuple2<Long, Integer> {}
-
-	/**
-	 * Test for requiresOver.
-	 */
-	public static class OverAgg0 extends AggregateFunction<Long, AccumulatorOver> {
-		@Override
-		public AccumulatorOver createAccumulator() {
-			return new AccumulatorOver();
-		}
-
-		@Override
-		public Long getValue(AccumulatorOver accumulator) {
-			return 1L;
-		}
-
-		//Overloaded accumulate method
-		public void accumulate(AccumulatorOver accumulator, long iValue, int iWeight) {
-		}
-
-		@Override
-		public boolean requiresOver() {
-			return true;
-		}
-	}
-
-	/**
 	 * Increment input.
 	 */
 	public static class JavaFunc0 extends ScalarFunction {
diff --git a/flink-table/flink-table-planner-blink/src/test/resources/org/apache/flink/table/planner/plan/stream/table/OverWindowTest.xml b/flink-table/flink-table-planner-blink/src/test/resources/org/apache/flink/table/planner/plan/stream/table/OverWindowTest.xml
index 6a903ff..844e238 100644
--- a/flink-table/flink-table-planner-blink/src/test/resources/org/apache/flink/table/planner/plan/stream/table/OverWindowTest.xml
+++ b/flink-table/flink-table-planner-blink/src/test/resources/org/apache/flink/table/planner/plan/stream/table/OverWindowTest.xml
@@ -53,7 +53,7 @@ Calc(select=[c, w0$o0 AS _c1])
   <TestCase name="testProcTimeBoundedPartitionedRangeOver">
     <Resource name="planBefore">
       <![CDATA[
-LogicalProject(a=[$0], myAvg=[AS(org$apache$flink$table$planner$plan$utils$JavaUserDefinedAggFunctions$WeightedAvgWithRetract$d1aa8f47e869d02a75edf3bb0ed00059($2, $0) OVER (PARTITION BY $0 ORDER BY $3 NULLS FIRST RANGE BETWEEN 7200000 PRECEDING AND CURRENT ROW), _UTF-16LE'myAvg')])
+LogicalProject(a=[$0], myAvg=[AS(org$apache$flink$table$planner$plan$utils$JavaUserDefinedAggFunctions$WeightedAvgWithRetract$4060051ba256b2d83fccd580b20a09be($2, $0) OVER (PARTITION BY $0 ORDER BY $3 NULLS FIRST RANGE BETWEEN 7200000 PRECEDING AND CURRENT ROW), _UTF-16LE'myAvg')])
 +- LogicalTableScan(table=[[default_catalog, default_database, MyTable]])
 ]]>
     </Resource>
@@ -70,7 +70,7 @@ Calc(select=[a, w0$o0 AS myAvg])
   <TestCase name="testProcTimeBoundedPartitionedRowsOver">
     <Resource name="planBefore">
       <![CDATA[
-LogicalProject(c=[$2], _c1=[AS(org$apache$flink$table$planner$plan$utils$JavaUserDefinedAggFunctions$WeightedAvgWithRetract$d1aa8f47e869d02a75edf3bb0ed00059($2, $0) OVER (PARTITION BY $1 ORDER BY $3 NULLS FIRST ROWS BETWEEN 2 PRECEDING AND CURRENT ROW), _UTF-16LE'_c1')])
+LogicalProject(c=[$2], _c1=[AS(org$apache$flink$table$planner$plan$utils$JavaUserDefinedAggFunctions$WeightedAvgWithRetract$4060051ba256b2d83fccd580b20a09be($2, $0) OVER (PARTITION BY $1 ORDER BY $3 NULLS FIRST ROWS BETWEEN 2 PRECEDING AND CURRENT ROW), _UTF-16LE'_c1')])
 +- LogicalTableScan(table=[[default_catalog, default_database, MyTable]])
 ]]>
     </Resource>
@@ -121,7 +121,7 @@ Calc(select=[c, w0$o0 AS _c1])
   <TestCase name="testProcTimeUnboundedPartitionedRangeOver">
     <Resource name="planBefore">
       <![CDATA[
-LogicalProject(a=[$0], c=[$2], _c2=[AS(COUNT($0) OVER (PARTITION BY $2 ORDER BY $3 NULLS FIRST RANGE BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW), _UTF-16LE'_c2')], _c3=[AS(org$apache$flink$table$planner$plan$utils$JavaUserDefinedAggFunctions$WeightedAvgWithRetract$d1aa8f47e869d02a75edf3bb0ed00059($2, $0) OVER (PARTITION BY $2 ORDER BY $3 NULLS FIRST RANGE BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW), _UTF-16LE'_c3')])
+LogicalProject(a=[$0], c=[$2], _c2=[AS(COUNT($0) OVER (PARTITION BY $2 ORDER BY $3 NULLS FIRST RANGE BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW), _UTF-16LE'_c2')], _c3=[AS(org$apache$flink$table$planner$plan$utils$JavaUserDefinedAggFunctions$WeightedAvgWithRetract$4060051ba256b2d83fccd580b20a09be($2, $0) OVER (PARTITION BY $2 ORDER BY $3 NULLS FIRST RANGE BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW), _UTF-16LE'_c3')])
 +- LogicalTableScan(table=[[default_catalog, default_database, MyTable]])
 ]]>
     </Resource>
@@ -138,7 +138,7 @@ Calc(select=[a, c, w0$o0 AS _c2, w0$o1 AS _c3])
   <TestCase name="testProcTimeUnboundedPartitionedRowsOver">
     <Resource name="planBefore">
       <![CDATA[
-LogicalProject(c=[$2], _c1=[AS(COUNT($0) OVER (PARTITION BY $2 ORDER BY $3 NULLS FIRST ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW), _UTF-16LE'_c1')], _c2=[AS(org$apache$flink$table$planner$plan$utils$JavaUserDefinedAggFunctions$WeightedAvgWithRetract$d1aa8f47e869d02a75edf3bb0ed00059($2, $0) OVER (PARTITION BY $2 ORDER BY $3 NULLS FIRST ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW), _UTF-16LE'_c2')])
+LogicalProject(c=[$2], _c1=[AS(COUNT($0) OVER (PARTITION BY $2 ORDER BY $3 NULLS FIRST ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW), _UTF-16LE'_c1')], _c2=[AS(org$apache$flink$table$planner$plan$utils$JavaUserDefinedAggFunctions$WeightedAvgWithRetract$4060051ba256b2d83fccd580b20a09be($2, $0) OVER (PARTITION BY $2 ORDER BY $3 NULLS FIRST ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW), _UTF-16LE'_c2')])
 +- LogicalTableScan(table=[[default_catalog, default_database, MyTable]])
 ]]>
     </Resource>
@@ -172,7 +172,7 @@ Calc(select=[a, w0$o0 AS _c1])
   <TestCase name="testRowTimeBoundedPartitionedRangeOver">
     <Resource name="planBefore">
       <![CDATA[
-LogicalProject(a=[$0], _c1=[AS(AVG($2) OVER (PARTITION BY $0 ORDER BY $4 NULLS FIRST RANGE BETWEEN 7200000 PRECEDING AND CURRENT ROW), _UTF-16LE'_c1')], wAvg=[AS(org$apache$flink$table$planner$plan$utils$JavaUserDefinedAggFunctions$WeightedAvgWithRetract$d1aa8f47e869d02a75edf3bb0ed00059($2, $0) OVER (PARTITION BY $0 ORDER BY $4 NULLS FIRST RANGE BETWEEN 7200000 PRECEDING AND CURRENT ROW), _UTF-16LE'wAvg')])
+LogicalProject(a=[$0], _c1=[AS(AVG($2) OVER (PARTITION BY $0 ORDER BY $4 NULLS FIRST RANGE BETWEEN 7200000 PRECEDING AND CURRENT ROW), _UTF-16LE'_c1')], wAvg=[AS(org$apache$flink$table$planner$plan$utils$JavaUserDefinedAggFunctions$WeightedAvgWithRetract$4060051ba256b2d83fccd580b20a09be($2, $0) OVER (PARTITION BY $0 ORDER BY $4 NULLS FIRST RANGE BETWEEN 7200000 PRECEDING AND CURRENT ROW), _UTF-16LE'wAvg')])
 +- LogicalTableScan(table=[[default_catalog, default_database, MyTable]])
 ]]>
     </Resource>
@@ -206,7 +206,7 @@ Calc(select=[c, w0$o0 AS _c1])
   <TestCase name="testRowTimeBoundedPartitionedRowsOver">
     <Resource name="planBefore">
       <![CDATA[
-LogicalProject(c=[$2], _c1=[AS(COUNT($1) OVER (PARTITION BY $1 ORDER BY $4 NULLS FIRST ROWS BETWEEN 2 PRECEDING AND CURRENT ROW), _UTF-16LE'_c1')], wAvg=[AS(org$apache$flink$table$planner$plan$utils$JavaUserDefinedAggFunctions$WeightedAvgWithRetract$d1aa8f47e869d02a75edf3bb0ed00059($2, $0) OVER (PARTITION BY $1 ORDER BY $4 NULLS FIRST ROWS BETWEEN 2 PRECEDING AND CURRENT ROW), _UTF-16LE'wAvg')])
+LogicalProject(c=[$2], _c1=[AS(COUNT($1) OVER (PARTITION BY $1 ORDER BY $4 NULLS FIRST ROWS BETWEEN 2 PRECEDING AND CURRENT ROW), _UTF-16LE'_c1')], wAvg=[AS(org$apache$flink$table$planner$plan$utils$JavaUserDefinedAggFunctions$WeightedAvgWithRetract$4060051ba256b2d83fccd580b20a09be($2, $0) OVER (PARTITION BY $1 ORDER BY $4 NULLS FIRST ROWS BETWEEN 2 PRECEDING AND CURRENT ROW), _UTF-16LE'wAvg')])
 +- LogicalTableScan(table=[[default_catalog, default_database, MyTable]])
 ]]>
     </Resource>
@@ -257,7 +257,7 @@ Calc(select=[c, w0$o0 AS _c1])
   <TestCase name="testRowTimeUnboundedPartitionedRowsOver">
     <Resource name="planBefore">
       <![CDATA[
-LogicalProject(c=[$2], _c1=[AS(COUNT($0) OVER (PARTITION BY $2 ORDER BY $4 NULLS FIRST ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW), _UTF-16LE'_c1')], wAvg=[AS(org$apache$flink$table$planner$plan$utils$JavaUserDefinedAggFunctions$WeightedAvgWithRetract$d1aa8f47e869d02a75edf3bb0ed00059($2, $0) OVER (PARTITION BY $2 ORDER BY $4 NULLS FIRST ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW), _UTF-16LE'wAvg')])
+LogicalProject(c=[$2], _c1=[AS(COUNT($0) OVER (PARTITION BY $2 ORDER BY $4 NULLS FIRST ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW), _UTF-16LE'_c1')], wAvg=[AS(org$apache$flink$table$planner$plan$utils$JavaUserDefinedAggFunctions$WeightedAvgWithRetract$4060051ba256b2d83fccd580b20a09be($2, $0) OVER (PARTITION BY $2 ORDER BY $4 NULLS FIRST ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW), _UTF-16LE'wAvg')])
 +- LogicalTableScan(table=[[default_catalog, default_database, MyTable]])
 ]]>
     </Resource>
@@ -274,7 +274,7 @@ Calc(select=[c, w0$o0 AS _c1, w0$o1 AS wAvg])
   <TestCase name="testRowTimeUnboundedPartitionedRangeOver">
     <Resource name="planBefore">
       <![CDATA[
-LogicalProject(a=[$0], c=[$2], _c2=[AS(COUNT($0) OVER (PARTITION BY $2 ORDER BY $4 NULLS FIRST RANGE BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW), _UTF-16LE'_c2')], wAvg=[AS(org$apache$flink$table$planner$plan$utils$JavaUserDefinedAggFunctions$WeightedAvgWithRetract$d1aa8f47e869d02a75edf3bb0ed00059($2, $0) OVER (PARTITION BY $2 ORDER BY $4 NULLS FIRST RANGE BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW), _UTF-16LE'wAvg')])
+LogicalProject(a=[$0], c=[$2], _c2=[AS(COUNT($0) OVER (PARTITION BY $2 ORDER BY $4 NULLS FIRST RANGE BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW), _UTF-16LE'_c2')], wAvg=[AS(org$apache$flink$table$planner$plan$utils$JavaUserDefinedAggFunctions$WeightedAvgWithRetract$4060051ba256b2d83fccd580b20a09be($2, $0) OVER (PARTITION BY $2 ORDER BY $4 NULLS FIRST RANGE BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW), _UTF-16LE'wAvg')])
 +- LogicalTableScan(table=[[default_catalog, default_database, MyTable]])
 ]]>
     </Resource>
@@ -291,7 +291,7 @@ Calc(select=[a, c, w0$o0 AS _c2, w0$o1 AS wAvg])
   <TestCase name="testScalarFunctionsOnOverWindow">
     <Resource name="planBefore">
       <![CDATA[
-LogicalProject(d=[AS(org$apache$flink$table$planner$expressions$utils$Func1$$879c8537562dbe74f3349fa0e6502755(AS(SUM($0) OVER (PARTITION BY $1 ORDER BY $3 NULLS FIRST ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW), _UTF-16LE'wsum')), _UTF-16LE'd')], _c1=[AS(EXP(COUNT($0) OVER (PARTITION BY $1 ORDER BY $3 NULLS FIRST ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW)), _UTF-16LE'_c1')], _c2=[AS(+(org$apache$flink$table$planner$plan$utils$JavaUserDefinedAggFunctions$WeightedAvgWithRetrac [...]
+LogicalProject(d=[AS(org$apache$flink$table$planner$expressions$utils$Func1$$879c8537562dbe74f3349fa0e6502755(AS(SUM($0) OVER (PARTITION BY $1 ORDER BY $3 NULLS FIRST ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW), _UTF-16LE'wsum')), _UTF-16LE'd')], _c1=[AS(EXP(COUNT($0) OVER (PARTITION BY $1 ORDER BY $3 NULLS FIRST ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW)), _UTF-16LE'_c1')], _c2=[AS(+(org$apache$flink$table$planner$plan$utils$JavaUserDefinedAggFunctions$WeightedAvgWithRetrac [...]
 +- LogicalTableScan(table=[[default_catalog, default_database, MyTable]])
 ]]>
     </Resource>
diff --git a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/api/stream/sql/validation/OverWindowValidationTest.scala b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/api/stream/sql/validation/OverWindowValidationTest.scala
index fc68e5a..3da22d6 100644
--- a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/api/stream/sql/validation/OverWindowValidationTest.scala
+++ b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/api/stream/sql/validation/OverWindowValidationTest.scala
@@ -21,7 +21,7 @@ package org.apache.flink.table.api.stream.sql.validation
 import org.apache.flink.api.scala._
 import org.apache.flink.table.api._
 import org.apache.flink.table.api.bridge.scala._
-import org.apache.flink.table.planner.runtime.utils.JavaUserDefinedScalarFunctions.OverAgg0
+import org.apache.flink.table.planner.runtime.utils.JavaUserDefinedAggFunctions.OverAgg0
 import org.apache.flink.table.planner.utils.TableTestBase
 import org.apache.flink.types.Row
 
diff --git a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/batch/sql/agg/OverAggregateTest.scala b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/batch/sql/agg/OverAggregateTest.scala
index 6e10685..b5632de 100644
--- a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/batch/sql/agg/OverAggregateTest.scala
+++ b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/batch/sql/agg/OverAggregateTest.scala
@@ -20,7 +20,7 @@ package org.apache.flink.table.planner.plan.batch.sql.agg
 
 import org.apache.flink.api.scala._
 import org.apache.flink.table.api._
-import org.apache.flink.table.planner.runtime.utils.JavaUserDefinedScalarFunctions.OverAgg0
+import org.apache.flink.table.planner.runtime.utils.JavaUserDefinedAggFunctions.OverAgg0
 import org.apache.flink.table.planner.utils.TableTestBase
 
 import org.junit.Test
diff --git a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/batch/table/validation/OverWindowValidationTest.scala b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/batch/table/validation/OverWindowValidationTest.scala
index a2a6741..374c1ac 100644
--- a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/batch/table/validation/OverWindowValidationTest.scala
+++ b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/batch/table/validation/OverWindowValidationTest.scala
@@ -20,7 +20,7 @@ package org.apache.flink.table.planner.plan.batch.table.validation
 
 import org.apache.flink.api.scala._
 import org.apache.flink.table.api.{Tumble, ValidationException, _}
-import org.apache.flink.table.planner.runtime.utils.JavaUserDefinedScalarFunctions.OverAgg0
+import org.apache.flink.table.planner.runtime.utils.JavaUserDefinedAggFunctions.OverAgg0
 import org.apache.flink.table.planner.utils.TableTestBase
 
 import org.junit._
diff --git a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/stream/sql/agg/OverAggregateTest.scala b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/stream/sql/agg/OverAggregateTest.scala
index 89a3316..d241fa2 100644
--- a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/stream/sql/agg/OverAggregateTest.scala
+++ b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/stream/sql/agg/OverAggregateTest.scala
@@ -21,7 +21,7 @@ package org.apache.flink.table.planner.plan.stream.sql.agg
 import org.apache.flink.api.scala._
 import org.apache.flink.table.api._
 import org.apache.flink.table.planner.plan.utils.FlinkRelOptUtil
-import org.apache.flink.table.planner.runtime.utils.JavaUserDefinedScalarFunctions.OverAgg0
+import org.apache.flink.table.planner.runtime.utils.JavaUserDefinedAggFunctions.OverAgg0
 import org.apache.flink.table.planner.utils.{TableTestBase, TableTestUtil}
 
 import org.junit.Assert.assertEquals
diff --git a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/stream/table/validation/OverWindowValidationTest.scala b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/stream/table/validation/OverWindowValidationTest.scala
index 6905cb0..b9f8995 100644
--- a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/stream/table/validation/OverWindowValidationTest.scala
+++ b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/stream/table/validation/OverWindowValidationTest.scala
@@ -23,7 +23,7 @@ import org.apache.flink.table.api._
 import org.apache.flink.table.api.internal.TableEnvironmentImpl
 import org.apache.flink.table.planner.delegation.PlannerBase
 import org.apache.flink.table.planner.plan.utils.JavaUserDefinedAggFunctions.WeightedAvgWithRetract
-import org.apache.flink.table.planner.runtime.utils.JavaUserDefinedScalarFunctions.OverAgg0
+import org.apache.flink.table.planner.runtime.utils.JavaUserDefinedAggFunctions.OverAgg0
 import org.apache.flink.table.planner.utils.{StreamTableTestUtil, TableTestBase, TableTestUtil}
 
 import org.apache.calcite.rel.RelNode
diff --git a/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/runtime/utils/JavaUserDefinedAggFunctions.java b/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/runtime/utils/JavaUserDefinedAggFunctions.java
index d22fc18..1f059a9 100644
--- a/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/runtime/utils/JavaUserDefinedAggFunctions.java
+++ b/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/runtime/utils/JavaUserDefinedAggFunctions.java
@@ -23,6 +23,7 @@ import org.apache.flink.api.java.tuple.Tuple2;
 import org.apache.flink.table.api.dataview.ListView;
 import org.apache.flink.table.api.dataview.MapView;
 import org.apache.flink.table.functions.AggregateFunction;
+import org.apache.flink.table.functions.FunctionRequirement;
 
 import java.util.Iterator;
 import java.util.Map;
@@ -32,12 +33,12 @@ import java.util.Map;
  */
 public class JavaUserDefinedAggFunctions {
 	/**
-	 * Accumulator for test requiresOver.
+	 * Accumulator for test {@link FunctionRequirement#OVER_WINDOW_ONLY}.
  	 */
 	public static class Accumulator0 extends Tuple2<Long, Integer>{}
 
 	/**
-	 * Test for requiresOver.
+	 * Test for {@link FunctionRequirement#OVER_WINDOW_ONLY}.
 	 */
 	public static class OverAgg0 extends AggregateFunction<Long, Accumulator0> {
 		@Override
@@ -53,11 +54,6 @@ public class JavaUserDefinedAggFunctions {
 		//Overloaded accumulate method
 		public void accumulate(Accumulator0 accumulator, long iValue, int iWeight) {
 		}
-
-		@Override
-		public boolean requiresOver() {
-			return true;
-		}
 	}
 
 	/**


[flink] 02/06: [hotfix][table-api-java] Remove @Experimental annotation for function methods in TableEnvironment

Posted by tw...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

twalthr pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git

commit 9eda9f39d81f371e8da12726d4a68a96860bd852
Author: Timo Walther <tw...@apache.org>
AuthorDate: Wed Jul 29 09:43:08 2020 +0200

    [hotfix][table-api-java] Remove @Experimental annotation for function methods in TableEnvironment
---
 .../main/java/org/apache/flink/table/api/TableEnvironment.java | 10 ----------
 1 file changed, 10 deletions(-)

diff --git a/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/TableEnvironment.java b/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/TableEnvironment.java
index e6779b7..d5fa61f 100644
--- a/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/TableEnvironment.java
+++ b/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/TableEnvironment.java
@@ -18,7 +18,6 @@
 
 package org.apache.flink.table.api;
 
-import org.apache.flink.annotation.Experimental;
 import org.apache.flink.annotation.PublicEvolving;
 import org.apache.flink.api.common.JobExecutionResult;
 import org.apache.flink.table.api.internal.TableEnvironmentImpl;
@@ -389,7 +388,6 @@ public interface TableEnvironment {
 	 * @param name The name under which the function will be registered globally.
 	 * @param functionClass The function class containing the implementation.
 	 */
-	@Experimental
 	void createTemporarySystemFunction(String name, Class<? extends UserDefinedFunction> functionClass);
 
 	/**
@@ -410,7 +408,6 @@ public interface TableEnvironment {
 	 * @param name The name under which the function will be registered globally.
 	 * @param functionInstance The (possibly pre-configured) function instance containing the implementation.
 	 */
-	@Experimental
 	void createTemporarySystemFunction(String name, UserDefinedFunction functionInstance);
 
 	/**
@@ -422,7 +419,6 @@ public interface TableEnvironment {
 	 * @param name The name under which the function has been registered globally.
 	 * @return true if a function existed under the given name and was removed
 	 */
-	@Experimental
 	boolean dropTemporarySystemFunction(String name);
 
 	/**
@@ -437,7 +433,6 @@ public interface TableEnvironment {
 	 *             See also the {@link TableEnvironment} class description for the format of the path.
 	 * @param functionClass The function class containing the implementation.
 	 */
-	@Experimental
 	void createFunction(String path, Class<? extends UserDefinedFunction> functionClass);
 
 	/**
@@ -452,7 +447,6 @@ public interface TableEnvironment {
 	 * @param ignoreIfExists If a function exists under the given path and this flag is set, no operation
 	 *                       is executed. An exception is thrown otherwise.
 	 */
-	@Experimental
 	void createFunction(String path, Class<? extends UserDefinedFunction> functionClass, boolean ignoreIfExists);
 
 	/**
@@ -462,7 +456,6 @@ public interface TableEnvironment {
 	 *             See also the {@link TableEnvironment} class description for the format of the path.
 	 * @return true if a function existed in the given path and was removed
 	 */
-	@Experimental
 	boolean dropFunction(String path);
 
 	/**
@@ -479,7 +472,6 @@ public interface TableEnvironment {
 	 *             See also the {@link TableEnvironment} class description for the format of the path.
 	 * @param functionClass The function class containing the implementation.
 	 */
-	@Experimental
 	void createTemporaryFunction(String path, Class<? extends UserDefinedFunction> functionClass);
 
 	/**
@@ -500,7 +492,6 @@ public interface TableEnvironment {
 	 *             See also the {@link TableEnvironment} class description for the format of the path.
 	 * @param functionInstance The (possibly pre-configured) function instance containing the implementation.
 	 */
-	@Experimental
 	void createTemporaryFunction(String path, UserDefinedFunction functionInstance);
 
 	/**
@@ -513,7 +504,6 @@ public interface TableEnvironment {
 	 *             See also the {@link TableEnvironment} class description for the format of the path.
 	 * @return true if a function existed in the given path and was removed
 	 */
-	@Experimental
 	boolean dropTemporaryFunction(String path);
 
 	/**


[flink] 06/06: [FLINK-15803][table] Update AggregateFunction and TableAggregateFunction to the new type system

Posted by tw...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

twalthr pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git

commit b7e1574164209cc6deee4304b23854f3bc24825c
Author: Timo Walther <tw...@apache.org>
AuthorDate: Fri Jul 24 16:51:29 2020 +0200

    [FLINK-15803][table] Update AggregateFunction and TableAggregateFunction to the new type system
    
    This updates imperative aggregate functions AggregateFunction and TableAggregateFunction
    to the new type system with new type inference. The new stack is activated when using
    TableEnvironment.createTemporarySystemFunction or call(Class, ...) in Table API. Other
    locations (SQL DDL, Table API Scala implicits) will be updated once we support the
    concept of DataView as well. Currently, DataViews are not supported.
    
    This closes #13007.
---
 .../api/bridge/java/StreamTableEnvironment.java    |  12 ++
 .../flink/table/catalog/FunctionCatalog.java       |   8 +
 .../utils/AggregateOperationFactory.java           |  60 ++++---
 .../flink/table/typeutils/FieldInfoUtils.java      |   8 +-
 .../flink/table/functions/AggregateFunction.java   | 150 +++++++++++++----
 .../functions/ImperativeAggregateFunction.java     |  34 +++-
 .../flink/table/functions/ScalarFunction.java      |   2 +-
 .../table/functions/TableAggregateFunction.java    |  82 +++++----
 .../flink/table/functions/TableFunction.java       |  14 +-
 .../types/logical/utils/LogicalTypeChecks.java     |  11 +-
 .../types/logical/utils/LogicalTypeUtils.java      |  47 ++++++
 .../flink/table/types/utils/DataTypeUtils.java     |  31 ++++
 .../flink/table/types/utils/DataTypeUtilsTest.java |  28 ++++
 .../catalog/FunctionCatalogOperatorTable.java      |  19 ++-
 .../planner/expressions/SqlAggFunctionVisitor.java |  99 ++++++++---
 .../expressions/converter/OverConvertRule.java     |   2 +-
 .../planner/plan/QueryOperationConverter.java      |   4 +-
 .../flink/table/planner/utils/ShortcutUtils.java   |   4 +
 .../table/planner/codegen/ExprCodeGenerator.scala  |  21 ++-
 .../codegen/agg/AggsHandlerCodeGenerator.scala     |   5 +-
 .../codegen/calls/TableFunctionCallGen.scala       |   3 +-
 .../planner/functions/utils/AggSqlFunction.scala   |   4 +
 .../plan/nodes/calcite/TableAggregate.scala        |  17 +-
 .../planner/plan/utils/AggFunctionFactory.scala    |   6 +
 .../table/planner/plan/utils/AggregateUtil.scala   | 183 ++++++++++++++++++---
 .../plan/utils/JavaUserDefinedAggFunctions.java    |   6 +-
 .../runtime/utils/JavaUserDefinedAggFunctions.java |  82 +++++++++
 .../planner/plan/batch/table/GroupWindowTest.xml   |  14 +-
 .../plan/stream/sql/agg/WindowAggregateTest.xml    | 160 +++++++++---------
 .../planner/plan/stream/table/AggregateTest.xml    |  10 +-
 .../plan/stream/table/ColumnFunctionsTest.xml      |   7 +-
 .../planner/plan/stream/table/GroupWindowTest.xml  |  90 +++++-----
 .../planner/plan/stream/table/OverWindowTest.xml   |  54 +++---
 .../plan/stream/table/TableAggregateTest.xml       |   8 +-
 .../planner/plan/batch/table/GroupWindowTest.scala |   4 +-
 .../stringexpr/AggregateStringExpressionTest.scala |  35 ++--
 .../plan/stream/sql/ModifiedMonotonicityTest.scala |   2 +-
 .../plan/stream/sql/agg/WindowAggregateTest.scala  |   2 +-
 .../planner/plan/stream/table/AggregateTest.scala  |   2 +-
 .../plan/stream/table/ColumnFunctionsTest.scala    |   2 +-
 .../plan/stream/table/GroupWindowTest.scala        |   8 +-
 .../planner/plan/stream/table/OverWindowTest.scala |  26 +--
 .../plan/stream/table/TableAggregateTest.scala     |   4 +-
 .../stringexpr/AggregateStringExpressionTest.scala |  28 ++--
 .../GroupWindowStringExpressionTest.scala          |  42 ++---
 ...pWindowTableAggregateStringExpressionTest.scala |  30 ++--
 .../OverWindowStringExpressionTest.scala           |  30 ++--
 .../TableAggregateStringExpressionTest.scala       |  10 +-
 .../GroupWindowTableAggregateValidationTest.scala  |   5 +-
 .../validation/GroupWindowValidationTest.scala     |  18 +-
 .../validation/TableAggregateValidationTest.scala  |  27 ++-
 .../runtime/batch/sql/agg/SortAggITCase.scala      |  51 ++----
 .../runtime/batch/table/AggregationITCase.scala    |   8 +-
 .../runtime/stream/sql/AggregateITCase.scala       |  52 +++++-
 .../runtime/stream/sql/MatchRecognizeITCase.scala  |   2 +-
 .../runtime/stream/sql/WindowAggregateITCase.scala |   2 +-
 .../table/GroupWindowTableAggregateITCase.scala    |   2 +-
 .../runtime/stream/table/OverWindowITCase.scala    |   4 +-
 .../stream/table/TableAggregateITCase.scala        |  52 +++++-
 .../planner/runtime/utils/BatchTestBase.scala      |  21 ++-
 .../utils/UserDefinedFunctionTestUtils.scala       |  11 +-
 .../table/planner/utils/CountAggFunction.scala     |   2 +-
 .../flink/table/planner/utils/TableTestBase.scala  |  15 +-
 .../utils/UserDefinedTableAggFunctions.scala       |  33 ++--
 .../runtime/utils/JavaUserDefinedAggFunctions.java |   7 +
 .../table/api/stream/table/AggregateTest.scala     |   5 +-
 .../table/runtime/types/PlannerTypeUtils.java      |   8 -
 67 files changed, 1232 insertions(+), 603 deletions(-)

diff --git a/flink-table/flink-table-api-java-bridge/src/main/java/org/apache/flink/table/api/bridge/java/StreamTableEnvironment.java b/flink-table/flink-table-api-java-bridge/src/main/java/org/apache/flink/table/api/bridge/java/StreamTableEnvironment.java
index a287c9d..aad80dc 100644
--- a/flink-table/flink-table-api-java-bridge/src/main/java/org/apache/flink/table/api/bridge/java/StreamTableEnvironment.java
+++ b/flink-table/flink-table-api-java-bridge/src/main/java/org/apache/flink/table/api/bridge/java/StreamTableEnvironment.java
@@ -170,7 +170,13 @@ public interface StreamTableEnvironment extends TableEnvironment {
 	 * @param aggregateFunction The AggregateFunction to register.
 	 * @param <T> The type of the output value.
 	 * @param <ACC> The type of aggregate accumulator.
+	 *
+	 * @deprecated Use {@link #createTemporarySystemFunction(String, UserDefinedFunction)} instead. Please
+	 *             note that the new method also uses the new type system and reflective extraction logic. It
+	 *             might be necessary to update the function implementation as well. See the documentation of
+	 *             {@link AggregateFunction} for more information on the new function design.
 	 */
+	@Deprecated
 	<T, ACC> void registerFunction(String name, AggregateFunction<T, ACC> aggregateFunction);
 
 	/**
@@ -181,7 +187,13 @@ public interface StreamTableEnvironment extends TableEnvironment {
 	 * @param tableAggregateFunction The TableAggregateFunction to register.
 	 * @param <T> The type of the output value.
 	 * @param <ACC> The type of aggregate accumulator.
+	 *
+	 * @deprecated Use {@link #createTemporarySystemFunction(String, UserDefinedFunction)} instead. Please
+	 *             note that the new method also uses the new type system and reflective extraction logic. It
+	 *             might be necessary to update the function implementation as well. See the documentation of
+	 *             {@link TableAggregateFunction} for more information on the new function design.
 	 */
+	@Deprecated
 	<T, ACC> void registerFunction(String name, TableAggregateFunction<T, ACC> tableAggregateFunction);
 
 	/**
diff --git a/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/FunctionCatalog.java b/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/FunctionCatalog.java
index d0843d4..5338c83 100644
--- a/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/FunctionCatalog.java
+++ b/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/catalog/FunctionCatalog.java
@@ -407,6 +407,10 @@ public final class FunctionCatalog {
 		);
 	}
 
+	/**
+	 * @deprecated Use {@link #registerTemporarySystemFunction(String, FunctionDefinition, boolean)} instead.
+	 */
+	@Deprecated
 	public <T, ACC> void registerTempSystemAggregateFunction(
 			String name,
 			ImperativeAggregateFunction<T, ACC> function,
@@ -437,6 +441,10 @@ public final class FunctionCatalog {
 		);
 	}
 
+	/**
+	 * @deprecated Use {@link #registerTemporaryCatalogFunction(UnresolvedIdentifier, FunctionDefinition, boolean)} instead.
+	 */
+	@Deprecated
 	public void registerTempCatalogScalarFunction(ObjectIdentifier oi, ScalarFunction function) {
 		UserDefinedFunctionHelper.prepareInstance(config, function);
 
diff --git a/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/operations/utils/AggregateOperationFactory.java b/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/operations/utils/AggregateOperationFactory.java
index f4d6a3c..548a7c3 100644
--- a/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/operations/utils/AggregateOperationFactory.java
+++ b/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/operations/utils/AggregateOperationFactory.java
@@ -22,7 +22,6 @@ import org.apache.flink.annotation.Internal;
 import org.apache.flink.api.common.typeinfo.BasicTypeInfo;
 import org.apache.flink.api.common.typeinfo.SqlTimeTypeInfo;
 import org.apache.flink.api.common.typeinfo.TypeInformation;
-import org.apache.flink.api.common.typeutils.CompositeType;
 import org.apache.flink.api.java.tuple.Tuple2;
 import org.apache.flink.table.api.GroupWindow;
 import org.apache.flink.table.api.SessionWithGapOnTimeWithAlias;
@@ -31,7 +30,6 @@ import org.apache.flink.table.api.TableException;
 import org.apache.flink.table.api.TableSchema;
 import org.apache.flink.table.api.TumbleWithSizeOnTimeWithAlias;
 import org.apache.flink.table.api.ValidationException;
-import org.apache.flink.table.expressions.ApiExpressionUtils;
 import org.apache.flink.table.expressions.CallExpression;
 import org.apache.flink.table.expressions.Expression;
 import org.apache.flink.table.expressions.ExpressionUtils;
@@ -44,7 +42,6 @@ import org.apache.flink.table.expressions.utils.ResolvedExpressionDefaultVisitor
 import org.apache.flink.table.functions.BuiltInFunctionDefinitions;
 import org.apache.flink.table.functions.FunctionDefinition;
 import org.apache.flink.table.functions.FunctionRequirement;
-import org.apache.flink.table.functions.TableAggregateFunctionDefinition;
 import org.apache.flink.table.operations.AggregateQueryOperation;
 import org.apache.flink.table.operations.QueryOperation;
 import org.apache.flink.table.operations.WindowAggregateQueryOperation;
@@ -56,6 +53,7 @@ import org.apache.flink.table.types.logical.LogicalTypeRoot;
 import org.apache.flink.table.types.logical.StructuredType;
 import org.apache.flink.table.types.logical.utils.LogicalTypeChecks;
 import org.apache.flink.table.types.logical.utils.LogicalTypeDefaultVisitor;
+import org.apache.flink.table.types.utils.DataTypeUtils;
 import org.apache.flink.table.types.utils.TypeConversions;
 import org.apache.flink.table.typeutils.FieldInfoUtils;
 import org.apache.flink.table.typeutils.TimeIndicatorTypeInfo;
@@ -79,6 +77,7 @@ import static org.apache.flink.table.operations.utils.OperationExpressionsUtils.
 import static org.apache.flink.table.types.logical.LogicalTypeRoot.BIGINT;
 import static org.apache.flink.table.types.logical.LogicalTypeRoot.INTERVAL_DAY_TIME;
 import static org.apache.flink.table.types.logical.LogicalTypeRoot.TIMESTAMP_WITHOUT_TIME_ZONE;
+import static org.apache.flink.table.types.logical.utils.LogicalTypeChecks.getFieldCount;
 import static org.apache.flink.table.types.logical.utils.LogicalTypeChecks.hasRoot;
 import static org.apache.flink.table.types.logical.utils.LogicalTypeChecks.isRowtimeAttribute;
 import static org.apache.flink.table.types.logical.utils.LogicalTypeChecks.isTimeAttribute;
@@ -116,7 +115,7 @@ final class AggregateOperationFactory {
 
 		DataType[] fieldTypes = Stream.concat(
 			groupings.stream().map(ResolvedExpression::getOutputDataType),
-			aggregates.stream().flatMap(this::extractAggregateResultTypes)
+			aggregates.stream().flatMap(this::extractAggregateResultDataTypes)
 		).toArray(DataType[]::new);
 
 		String[] groupNames = groupings.stream()
@@ -153,7 +152,7 @@ final class AggregateOperationFactory {
 
 		DataType[] fieldTypes = concat(
 			groupings.stream().map(ResolvedExpression::getOutputDataType),
-			aggregates.stream().flatMap(this::extractAggregateResultTypes),
+			aggregates.stream().flatMap(this::extractAggregateResultDataTypes),
 			windowProperties.stream().map(ResolvedExpression::getOutputDataType)
 		).toArray(DataType[]::new);
 
@@ -180,11 +179,17 @@ final class AggregateOperationFactory {
 	 * Extract result types for the aggregate or the table aggregate expression. For a table aggregate,
 	 * it may return multi result types when the composite return type is flattened.
 	 */
-	private Stream<DataType> extractAggregateResultTypes(ResolvedExpression expression) {
-		if (ApiExpressionUtils.isFunctionOfKind(expression, TABLE_AGGREGATE)) {
-			TypeInformation<?> legacyInfo = TypeConversions.fromDataTypeToLegacyInfo(expression.getOutputDataType());
-			return Stream.of(FieldInfoUtils.getFieldTypes(legacyInfo))
-				.map(TypeConversions::fromLegacyInfoToDataType);
+	private Stream<DataType> extractAggregateResultDataTypes(ResolvedExpression expression) {
+		if (isFunctionOfKind(expression, TABLE_AGGREGATE)) {
+			final DataType outputDataType = expression.getOutputDataType();
+			final LogicalType outputType = expression.getOutputDataType().getLogicalType();
+			// legacy
+			if (outputType instanceof LegacyTypeInformationType) {
+				final TypeInformation<?> legacyInfo = TypeConversions.fromDataTypeToLegacyInfo(expression.getOutputDataType());
+				return Stream.of(FieldInfoUtils.getFieldTypes(legacyInfo))
+					.map(TypeConversions::fromLegacyInfoToDataType);
+			}
+			return DataTypeUtils.flattenToDataTypes(outputDataType).stream();
 		} else {
 			return Stream.of(expression.getOutputDataType());
 		}
@@ -193,13 +198,18 @@ final class AggregateOperationFactory {
 	/**
 	 * Extract names for the aggregate or the table aggregate expression. For a table aggregate, it
 	 * may return multi output names when the composite return type is flattened. If the result type
-	 * is not a {@link CompositeType}, the result name should not conflict with the group names.
+	 * is not a composite type, the result name should not conflict with the group names.
 	 */
-	private Stream<String> extractAggregateNames(Expression expression, List<String> groupNames) {
+	private Stream<String> extractAggregateNames(ResolvedExpression expression, List<String> groupNames) {
 		if (isFunctionOfKind(expression, TABLE_AGGREGATE)) {
-			final TableAggregateFunctionDefinition definition =
-				(TableAggregateFunctionDefinition) ((CallExpression) expression).getFunctionDefinition();
-			return Arrays.stream(FieldInfoUtils.getFieldNames(definition.getResultTypeInfo(), groupNames));
+			final DataType outputDataType = expression.getOutputDataType();
+			final LogicalType outputType = expression.getOutputDataType().getLogicalType();
+			// legacy
+			if (outputType instanceof LegacyTypeInformationType) {
+				final TypeInformation<?> legacyInfo = TypeConversions.fromDataTypeToLegacyInfo(expression.getOutputDataType());
+				return Arrays.stream(FieldInfoUtils.getFieldNames(legacyInfo, groupNames));
+			}
+			return DataTypeUtils.flattenToNames(outputDataType, groupNames).stream();
 		} else {
 			return Stream.of(extractName(expression).orElseGet(expression::toString));
 		}
@@ -554,28 +564,24 @@ final class AggregateOperationFactory {
 				throw fail();
 			}
 
-			validateAlias(
-				aliases,
-				(TableAggregateFunctionDefinition) ((CallExpression) children.get(0)).getFunctionDefinition());
+			validateAlias(aliases, (CallExpression) children.get(0));
+
 			alias = aliases;
+
 			return children.get(0);
 		}
 
-		private void validateAlias(
-			List<String> aliases,
-			TableAggregateFunctionDefinition aggFunctionDefinition) {
-
-			TypeInformation<?> resultType = aggFunctionDefinition.getResultTypeInfo();
-
-			int callArity = resultType.getTotalFields();
-			int aliasesSize = aliases.size();
+		private void validateAlias(List<String> aliases, CallExpression call) {
+			final int aliasesSize = aliases.size();
+			final LogicalType outputType = call.getOutputDataType().getLogicalType();
+			final int callArity = getFieldCount(outputType);
 
 			if (aliasesSize > 0 && aliasesSize != callArity) {
 				throw new ValidationException(String.format(
 					"List of column aliases must have same degree as table; " +
 						"the returned table of function '%s' has " +
 						"%d columns, whereas alias list has %d columns",
-					aggFunctionDefinition,
+					call.getFunctionName(),
 					callArity,
 					aliasesSize));
 			}
diff --git a/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/typeutils/FieldInfoUtils.java b/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/typeutils/FieldInfoUtils.java
index dae0e16..b01ba7e 100644
--- a/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/typeutils/FieldInfoUtils.java
+++ b/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/typeutils/FieldInfoUtils.java
@@ -66,6 +66,7 @@ import static org.apache.flink.table.types.logical.utils.LogicalTypeChecks.hasRo
 import static org.apache.flink.table.types.logical.utils.LogicalTypeChecks.isCompositeType;
 import static org.apache.flink.table.types.logical.utils.LogicalTypeChecks.isProctimeAttribute;
 import static org.apache.flink.table.types.logical.utils.LogicalTypeChecks.isRowtimeAttribute;
+import static org.apache.flink.table.types.logical.utils.LogicalTypeUtils.getAtomicName;
 import static org.apache.flink.table.types.utils.TypeConversions.fromDataTypeToLegacyInfo;
 import static org.apache.flink.table.types.utils.TypeConversions.fromLegacyInfoToDataType;
 
@@ -357,12 +358,7 @@ public class FieldInfoUtils {
 
 		// atomic in any case
 		if (fieldNames == null) {
-			int i = 0;
-			String fieldName = ATOMIC_FIELD_NAME;
-			while ((null != existingNames) && existingNames.contains(fieldName)) {
-				fieldName = ATOMIC_FIELD_NAME + "_" + i++;
-			}
-			fieldNames = Collections.singletonList(fieldName);
+			fieldNames = Collections.singletonList(getAtomicName(existingNames));
 		}
 
 		if (fieldNames.contains("*")) {
diff --git a/flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/AggregateFunction.java b/flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/AggregateFunction.java
index 8abd44c3..9abd4b6 100644
--- a/flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/AggregateFunction.java
+++ b/flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/AggregateFunction.java
@@ -19,41 +19,63 @@
 package org.apache.flink.table.functions;
 
 import org.apache.flink.annotation.PublicEvolving;
-import org.apache.flink.table.api.TableException;
+import org.apache.flink.table.annotation.DataTypeHint;
+import org.apache.flink.table.annotation.FunctionHint;
 import org.apache.flink.table.catalog.DataTypeFactory;
+import org.apache.flink.table.types.extraction.TypeInferenceExtractor;
 import org.apache.flink.table.types.inference.TypeInference;
 
 /**
- * Base class for user-defined aggregates.
+ * Base class for a user-defined aggregate function. A user-defined aggregate function maps scalar
+ * values of multiple rows to a new scalar value.
  *
- * <p>The behavior of an {@link AggregateFunction} can be defined by implementing a series of custom
- * methods. An {@link AggregateFunction} needs at least three methods:
+ * <p>The behavior of an {@link AggregateFunction} is centered around the concept of an accumulator.
+ * The accumulator is an intermediate data structure that stores the aggregated values until a final
+ * aggregation result is computed.
+ *
+ * <p>For each set of rows that needs to be aggregated, the runtime will create an empty accumulator
+ * by calling {@link #createAccumulator()}. Subsequently, the {@code accumulate()} method of the
+ * function is called for each input row to update the accumulator. Once all rows have been processed,
+ * the {@link #getValue(Object)} method of the function is called to compute and return the final result.
+ *
+ * <p>The main behavior of an {@link AggregateFunction} can be defined by implementing a custom accumulate
+ * method. An accumulate method must be declared publicly, not static, and named <code>accumulate</code>.
+ * Accumulate methods can also be overloaded by implementing multiple methods named <code>accumulate</code>.
+ *
+ * <p>By default, input, accumulator, and output data types are automatically extracted using reflection.
+ * This includes the generic argument {@code ACC} of the class for determining an accumulator data type and
+ * the generic argument {@code T} for determining an accumulator data type. Input arguments are derived
+ * from one or more {@code accumulate()} methods. If the reflective information is not sufficient, it
+ * can be supported and enriched with {@link DataTypeHint} and {@link FunctionHint} annotations.
+ *
+ * <p>An {@link AggregateFunction} needs at least three methods:
  * <ul>
- *     <li>createAccumulator</li>
- *     <li>accumulate</li>
- *     <li>getValue</li>
+ *     <li>{@code createAccumulator}</li>
+ *     <li>{@code accumulate}</li>
+ *     <li>{@code getValue}</li>
  * </ul>
  *
- * <p>There are a few other methods that can be optional to have:
+ * <p>There are a few other methods that are optional:
  * <ul>
- *     <li>retract</li>
- *     <li>merge</li>
- *     <li>resetAccumulator</li>
+ *     <li>{@code retract}</li>
+ *     <li>{@code merge}</li>
+ *     <li>{@code resetAccumulator}</li>
  * </ul>
  *
  * <p>All these methods must be declared publicly, not static, and named exactly as the names
- * mentioned above. The method {@link #createAccumulator()} is defined in the
- * {@link ImperativeAggregateFunction} function, and method {@link #getValue} is defined in
- * the {@link AggregateFunction} while other methods are explained below.
+ * mentioned above to be called by generated code.
+ *
+ * <p>For storing a user-defined function in a catalog, the class must have a default constructor and
+ * must be instantiable during runtime.
  *
  * <pre>
  * {@code
- * Processes the input values and update the provided accumulator instance. The method
- * accumulate can be overloaded with different custom types and arguments. An AggregateFunction
+ * Processes the input values and updates the provided accumulator instance. The method
+ * accumulate can be overloaded with different custom types and arguments. An aggregate function
  * requires at least one accumulate() method.
  *
  * param: accumulator           the accumulator which contains the current aggregated results
- * param: [user defined inputs] the input value (usually obtained from a new arrived data).
+ * param: [user defined inputs] the input value (usually obtained from new arrived data).
  *
  * public void accumulate(ACC accumulator, [user defined inputs])
  * }
@@ -63,11 +85,11 @@ import org.apache.flink.table.types.inference.TypeInference;
  * {@code
  * Retracts the input values from the accumulator instance. The current design assumes the
  * inputs are the values that have been previously accumulated. The method retract can be
- * overloaded with different custom types and arguments. This function must be implemented for
- * data stream bounded OVER aggregates.
+ * overloaded with different custom types and arguments. This method must be implemented for
+ * unbounded OVER aggregates.
  *
  * param: accumulator           the accumulator which contains the current aggregated results
- * param: [user defined inputs] the input value (usually obtained from a new arrived data).
+ * param: [user defined inputs] the input value (usually obtained from new arrived data).
  *
  * public void retract(ACC accumulator, [user defined inputs])
  * }
@@ -75,8 +97,8 @@ import org.apache.flink.table.types.inference.TypeInference;
  *
  * <pre>
  * {@code
- * Merges a group of accumulator instances into one accumulator instance. This function must be
- * implemented for data stream session window grouping aggregates and data set grouping aggregates.
+ * Merges a group of accumulator instances into one accumulator instance. This method must be
+ * implemented for unbounded session window grouping aggregates and bounded grouping aggregates.
  *
  * param: accumulator the accumulator which will keep the merged aggregate results. It should
  *                    be noted that the accumulator may contain the previous aggregated
@@ -91,8 +113,8 @@ import org.apache.flink.table.types.inference.TypeInference;
  *
  * <pre>
  * {@code
- * Resets the accumulator for this AggregateFunction. This function must be implemented for
- * data set grouping aggregates.
+ * Resets the accumulator for this aggregate function. This method must be implemented for
+ * bounded grouping aggregates.
  *
  * param: accumulator the accumulator which needs to be reset
  *
@@ -103,11 +125,74 @@ import org.apache.flink.table.types.inference.TypeInference;
  * <p>If this aggregate function can only be applied in an OVER window, this can be declared using the
  * requirement {@link FunctionRequirement#OVER_WINDOW_ONLY} in {@link #getRequirements()}.
  *
- * @param <T>   the type of the aggregation result
- * @param <ACC> the type of the aggregation accumulator. The accumulator is used to keep the
- *              aggregated values which are needed to compute an aggregation result.
- *              AggregateFunction represents its state using accumulator, thereby the state of the
- *              AggregateFunction must be put into the accumulator.
+ * <p>The following examples show how to specify an aggregate function:
+ *
+ * <pre>
+ * {@code
+ *   // a function that counts STRING arguments that are not null and emits them as STRING
+ *   // the accumulator is BIGINT
+ *   public static class CountFunction extends AggregateFunction<String, CountFunction.MyAccumulator> {
+ *     public static class MyAccumulator {
+ *       public long count = 0L;
+ *     }
+ *
+ *     {@literal @}Override
+ *     public MyAccumulator createAccumulator() {
+ *       return new MyAccumulator();
+ *     }
+ *
+ *     public void accumulate(MyAccumulator accumulator, Integer i) {
+ *       if (i != null) {
+ *         accumulator.count += i;
+ *       }
+ *     }
+ *
+ *     {@literal @}Override
+ *     public String getValue(MyAccumulator accumulator) {
+ *       return "Result: " + accumulator.count;
+ *     }
+ *   }
+ *
+ *   // a function that determines the maximum of either BIGINT or STRING arguments
+ *   // the accumulator and the output is either BIGINT or STRING
+ *   public static class MaxFunction extends AggregateFunction<Object, Row> {
+ *     {@literal @}Override
+ *     public Row createAccumulator() {
+ *       return new Row(1);
+ *     }
+ *
+ *     {@literal @}FunctionHint(
+ *       accumulator = {@literal @}DataTypeHint("ROW<max BIGINT>"),
+ *       output = {@literal @}DataTypeHint("BIGINT")
+ *     )
+ *     public void accumulate(Row accumulator, Long l) {
+ *       final Long max = (Long) accumulator.getField(0);
+ *       if (max == null || l > max) {
+ *         accumulator.setField(0, l);
+ *       }
+ *     }
+ *
+ *     {@literal @}FunctionHint(
+ *       accumulator = {@literal @}DataTypeHint("ROW<max STRING>"),
+ *       output = {@literal @}DataTypeHint("STRING")
+ *     )
+ *     public void accumulate(Row accumulator, String s) {
+ *       final String max = (String) accumulator.getField(0);
+ *       if (max == null || s.compareTo(max) > 0) {
+ *         accumulator.setField(0, s);
+ *       }
+ *     }
+ *
+ *     {@literal @}Override
+ *     public Object getValue(Row accumulator) {
+ *       return accumulator.getField(0);
+ *     }
+ *   }
+ * }
+ * </pre>
+ *
+ * @param <T> final result type of the aggregation
+ * @param <ACC> intermediate result type during the aggregation
  */
 @PublicEvolving
 public abstract class AggregateFunction<T, ACC> extends ImperativeAggregateFunction<T, ACC> {
@@ -115,11 +200,11 @@ public abstract class AggregateFunction<T, ACC> extends ImperativeAggregateFunct
 	/**
 	 * Called every time when an aggregation result should be materialized.
 	 * The returned value could be either an early and incomplete result
-	 * (periodically emitted as data arrive) or the final result of the
+	 * (periodically emitted as data arrives) or the final result of the
 	 * aggregation.
 	 *
 	 * @param accumulator the accumulator which contains the current
-	 *                    aggregated results
+	 *                    intermediate results
 	 * @return the aggregation result
 	 */
 	public abstract T getValue(ACC accumulator);
@@ -130,7 +215,8 @@ public abstract class AggregateFunction<T, ACC> extends ImperativeAggregateFunct
 	}
 
 	@Override
+	@SuppressWarnings({"unchecked", "rawtypes"})
 	public TypeInference getTypeInference(DataTypeFactory typeFactory) {
-		throw new TableException("Aggregate functions are not updated to the new type system yet.");
+		return TypeInferenceExtractor.forAggregateFunction(typeFactory, (Class) getClass());
 	}
 }
diff --git a/flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/ImperativeAggregateFunction.java b/flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/ImperativeAggregateFunction.java
index e39a9d0..e8f9adf 100644
--- a/flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/ImperativeAggregateFunction.java
+++ b/flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/ImperativeAggregateFunction.java
@@ -20,17 +20,27 @@ package org.apache.flink.table.functions;
 
 import org.apache.flink.annotation.PublicEvolving;
 import org.apache.flink.api.common.typeinfo.TypeInformation;
+import org.apache.flink.table.annotation.DataTypeHint;
+import org.apache.flink.table.annotation.FunctionHint;
+import org.apache.flink.table.catalog.DataTypeFactory;
 
 /**
- * Base class for user-defined aggregates and table aggregates.
+ * Base class for user-defined {@link AggregateFunction} and {@link TableAggregateFunction}.
+ *
+ * <p>This class is used for unified handling of imperative aggregating functions. Concrete implementations
+ * should extend from {@link AggregateFunction} or {@link TableAggregateFunction}.
+ *
+ * @param <T> final result type of the aggregation
+ * @param <ACC> intermediate result type during the aggregation
  */
 @PublicEvolving
 public abstract class ImperativeAggregateFunction<T, ACC> extends UserDefinedFunction {
 
 	/**
-	 * Creates and initializes the accumulator for this {@link ImperativeAggregateFunction}. The
-	 * accumulator is used to keep the aggregated values which are needed to compute an aggregation
-	 * result.
+	 * Creates and initializes the accumulator for this {@link ImperativeAggregateFunction}.
+	 *
+	 * <p>The accumulator is an intermediate data structure that stores the aggregated values until a
+	 * final aggregation result is computed.
 	 *
 	 * @return the accumulator with the initial value
 	 */
@@ -41,7 +51,15 @@ public abstract class ImperativeAggregateFunction<T, ACC> extends UserDefinedFun
 	 *
 	 * @return The {@link TypeInformation} of the {@link ImperativeAggregateFunction}'s result or
 	 *         <code>null</code> if the result type should be automatically inferred.
+	 *
+	 * @deprecated This method uses the old type system and is based on the old reflective extraction
+	 *             logic. The method will be removed in future versions and is only called when using
+	 *             the deprecated {@code TableEnvironment.registerFunction(...)} method. The new reflective
+	 *             extraction logic (possibly enriched with {@link DataTypeHint} and {@link FunctionHint})
+	 *             should be powerful enough to cover most use cases. For advanced users, it is possible
+	 *             to override {@link UserDefinedFunction#getTypeInference(DataTypeFactory)}.
 	 */
+	@Deprecated
 	public TypeInformation<T> getResultType() {
 		return null;
 	}
@@ -51,7 +69,15 @@ public abstract class ImperativeAggregateFunction<T, ACC> extends UserDefinedFun
 	 *
 	 * @return The {@link TypeInformation} of the {@link ImperativeAggregateFunction}'s accumulator
 	 *         or <code>null</code> if the accumulator type should be automatically inferred.
+	 *
+	 * @deprecated This method uses the old type system and is based on the old reflective extraction
+	 *             logic. The method will be removed in future versions and is only called when using
+	 *             the deprecated {@code TableEnvironment.registerFunction(...)} method. The new reflective
+	 *             extraction logic (possibly enriched with {@link DataTypeHint} and {@link FunctionHint})
+	 *             should be powerful enough to cover most use cases. For advanced users, it is possible
+	 *             to override {@link UserDefinedFunction#getTypeInference(DataTypeFactory)}.
 	 */
+	@Deprecated
 	public TypeInformation<ACC> getAccumulatorType() {
 		return null;
 	}
diff --git a/flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/ScalarFunction.java b/flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/ScalarFunction.java
index 6d7aa83..cf4c8d1 100644
--- a/flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/ScalarFunction.java
+++ b/flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/ScalarFunction.java
@@ -30,7 +30,7 @@ import org.apache.flink.table.types.extraction.TypeInferenceExtractor;
 import org.apache.flink.table.types.inference.TypeInference;
 
 /**
- * Base class for a user-defined scalar function. A user-defined scalar functions maps zero, one,
+ * Base class for a user-defined scalar function. A user-defined scalar function maps zero, one,
  * or multiple scalar values to a new scalar value.
  *
  * <p>The behavior of a {@link ScalarFunction} can be defined by implementing a custom evaluation
diff --git a/flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/TableAggregateFunction.java b/flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/TableAggregateFunction.java
index 3941eaa..d0aa0cb 100644
--- a/flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/TableAggregateFunction.java
+++ b/flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/TableAggregateFunction.java
@@ -19,39 +19,65 @@
 package org.apache.flink.table.functions;
 
 import org.apache.flink.annotation.PublicEvolving;
-import org.apache.flink.table.api.TableException;
+import org.apache.flink.table.annotation.DataTypeHint;
+import org.apache.flink.table.annotation.FunctionHint;
 import org.apache.flink.table.catalog.DataTypeFactory;
+import org.apache.flink.table.types.extraction.TypeInferenceExtractor;
 import org.apache.flink.table.types.inference.TypeInference;
 import org.apache.flink.util.Collector;
 
 /**
- * Base class for user-defined table aggregates.
- *
- * <p>The behavior of a {@link TableAggregateFunction} can be defined by implementing a series of
- * custom methods. A {@link TableAggregateFunction} needs at least three methods:
+ * Base class for a user-defined table aggregate function. A user-defined table aggregate function maps scalar
+ * values of multiple rows to zero, one, or multiple rows. If an output row consists of only one field,
+ * the row can be omitted and a scalar value can be emitted. It will be wrapped into an implicit row
+ * by the runtime.
+ *
+ * <p>Similar to an {@link AggregateFunction}, the behavior of an {@link TableAggregateFunction} is centered
+ * around the concept of an accumulator. The accumulator is an intermediate data structure that stores
+ * the aggregated values until a final aggregation result is computed.
+ *
+ * <p>For each set of rows that needs to be aggregated, the runtime will create an empty accumulator
+ * by calling {@link #createAccumulator()}. Subsequently, the {@code accumulate()} method of the
+ * function is called for each input row to update the accumulator. Once all rows have been processed,
+ * the {@code emitValue()} or {@code emitUpdateWithRetract()} method of the function is called to compute
+ * and return the final result.
+ *
+ * <p>The main behavior of an {@link TableAggregateFunction} can be defined by implementing a custom accumulate
+ * method. An accumulate method must be declared publicly, not static, and named <code>accumulate</code>.
+ * Accumulate methods can also be overloaded by implementing multiple methods named <code>accumulate</code>.
+ *
+ * <p>By default, input, accumulator, and output data types are automatically extracted using reflection.
+ * This includes the generic argument {@code ACC} of the class for determining an accumulator data type and
+ * the generic argument {@code T} for determining an accumulator data type. Input arguments are derived
+ * from one or more {@code accumulate()} methods. If the reflective information is not sufficient, it
+ * can be supported and enriched with {@link DataTypeHint} and {@link FunctionHint} annotations.
+ *
+ * <p>A {@link TableAggregateFunction} needs at least three methods:
  * <ul>
- *     <li>createAccumulator</li>
- *     <li>accumulate</li>
- *     <li>emitValue or emitUpdateWithRetract</li>
+ *     <li>{@code createAccumulator}</li>
+ *     <li>{@code accumulate}</li>
+ *     <li>{@code emitValue} or {@code emitUpdateWithRetract}</li>
  * </ul>
  *
- * <p>There is another method that can be optional to have:
+ * <p>There is another method that is optional:
  * <ul>
  *     <li>retract</li>
  * </ul>
  *
  * <p>All these methods must be declared publicly, not static, and named exactly as the names
- * mentioned above. The method {@link #createAccumulator()} is defined in
- * the {@link ImperativeAggregateFunction} functions, while other methods are explained below.
+ * mentioned above to be called by generated code.
+ *
+ * <p>For storing a user-defined function in a catalog, the class must have a default constructor and
+ * must be instantiable during runtime.
  *
  * <pre>
  * {@code
- * Processes the input values and update the provided accumulator instance. The method
- * accumulate can be overloaded with different custom types and arguments. A TableAggregateFunction
+ * Processes the input values and updates the provided accumulator instance. The method
+ * accumulate can be overloaded with different custom types and arguments. A table aggregate function
  * requires at least one accumulate() method.
  *
  * param: accumulator           the accumulator which contains the current aggregated results
- * param: [user defined inputs] the input value (usually obtained from a new arrived data).
+ * param: [user defined inputs] the input value (usually obtained from new arrived data).
  *
  * public void accumulate(ACC accumulator, [user defined inputs])
  * }
@@ -73,31 +99,31 @@ import org.apache.flink.util.Collector;
  * <pre>
  * {@code
  * Called every time when an aggregation result should be materialized. The returned value could
- * be either an early and incomplete result (periodically emitted as data arrive) or the final
+ * be either an early and incomplete result (periodically emitted as data arrives) or the final
  * result of the aggregation.
  *
  * param: accumulator           the accumulator which contains the current aggregated results
  * param: out                   the collector used to output data.
  *
- * public void emitValue(ACC accumulator, Collector<T> out)
+ * public void emitValue(ACC accumulator, org.apache.flink.util.Collector<T> out)
  * }
  * </pre>
  *
  * <pre>
  * {@code
  * Called every time when an aggregation result should be materialized. The returned value could
- * be either an early and incomplete result (periodically emitted as data arrive) or the final
+ * be either an early and incomplete result (periodically emitted as data arrives) or the final
  * result of the aggregation.
  *
- * Different from emitValue, emitUpdateWithRetract is used to emit values that have been updated.
- * This method outputs data incrementally in retract mode, i.e., once there is an update, we have
- * to retract old records before sending new updated ones. The emitUpdateWithRetract method will be
- * used in preference to the emitValue method if both methods are defined in the table aggregate
+ * Compared to emitValue(), emitUpdateWithRetract() is used to emit values that have been updated. This method
+ * outputs data incrementally in retraction mode (also known as "update before" and "update after"). Once
+ * there is an update, we have to retract old records before sending new updated ones. The emitUpdateWithRetract()
+ * method will be used in preference to the emitValue() method if both methods are defined in the table aggregate
  * function, because the method is treated to be more efficient than emitValue as it can output
  * values incrementally.
  *
  * param: accumulator           the accumulator which contains the current aggregated results
- * param: out                   the retractable collector used to output data. Use collect method
+ * param: out                   the retractable collector used to output data. Use the collect() method
  *                              to output(add) records and use retract method to retract(delete)
  *                              records.
  *
@@ -105,18 +131,15 @@ import org.apache.flink.util.Collector;
  * }
  * </pre>
  *
- * @param <T>   the type of the table aggregation result
- * @param <ACC> the type of the table aggregation accumulator. The accumulator is used to keep the
- *              aggregated values which are needed to compute an aggregation result.
- *              TableAggregateFunction represents its state using accumulator, thereby the state of
- *              the TableAggregateFunction must be put into the accumulator.
+ * @param <T> final result type of the aggregation
+ * @param <ACC> intermediate result type during the aggregation
  */
 @PublicEvolving
 public abstract class TableAggregateFunction<T, ACC> extends ImperativeAggregateFunction<T, ACC> {
 
 	/**
 	 * Collects a record and forwards it. The collector can output retract messages with the retract
-	 * method. Note: only use it in {@code emitUpdateWithRetract}.
+	 * method. Note: This collector can only be used in the {@code emitUpdateWithRetract()} method.
 	 */
 	public interface RetractableCollector<T> extends Collector<T> {
 
@@ -134,7 +157,8 @@ public abstract class TableAggregateFunction<T, ACC> extends ImperativeAggregate
 	}
 
 	@Override
+	@SuppressWarnings({"unchecked", "rawtypes"})
 	public TypeInference getTypeInference(DataTypeFactory typeFactory) {
-		throw new TableException("Table aggregate functions are not updated to the new type system yet.");
+		return TypeInferenceExtractor.forTableAggregateFunction(typeFactory, (Class) getClass());
 	}
 }
diff --git a/flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/TableFunction.java b/flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/TableFunction.java
index 4f89124..74c630b 100644
--- a/flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/TableFunction.java
+++ b/flink-table/flink-table-common/src/main/java/org/apache/flink/table/functions/TableFunction.java
@@ -31,7 +31,7 @@ import org.apache.flink.table.types.inference.TypeInference;
 import org.apache.flink.util.Collector;
 
 /**
- * Base class for a user-defined table function. A user-defined table functions maps zero, one, or
+ * Base class for a user-defined table function. A user-defined table function maps zero, one, or
  * multiple scalar values to zero, one, or multiple rows. If an output row consists of only one field,
  * the row can be omitted and a scalar value can be emitted. It will be wrapped into an implicit row
  * by the runtime.
@@ -41,9 +41,9 @@ import org.apache.flink.util.Collector;
  * Evaluation methods can also be overloaded by implementing multiple methods named <code>eval</code>.
  *
  * <p>By default, input and output data types are automatically extracted using reflection. This includes
- * the generic argument {@code T} of the class for determining an output data type. If the reflective
- * information is not sufficient, it can be supported and enriched with {@link DataTypeHint} and
- * {@link FunctionHint} annotations.
+ * the generic argument {@code T} of the class for determining an output data type. Input arguments are
+ * derived from one or more {@code eval()} methods. If the reflective information is not sufficient, it
+ * can be supported and enriched with {@link DataTypeHint} and {@link FunctionHint} annotations.
  *
  * <p>The following examples show how to specify a table function:
  *
@@ -119,12 +119,10 @@ import org.apache.flink.util.Collector;
  *   Table table = ...    // schema: ROW< a VARCHAR >
  *
  *   // for Scala users
- *   val split = new Split()
- *   table.joinLateral(split('a) as ('s)).select('a, 's)
+ *   table.joinLateral(call(classOf[Split], $"a") as ("s")).select($"a", $"s")
  *
  *   // for Java users
- *   tEnv.createTemporarySystemFunction("split", Split.class); // register table function first
- *   table.joinLateral("split(a) as (s)").select("a, s");
+ *   table.joinLateral(call(Split.class, $("a")).as("s")).select($("a"), $("s"));
  *
  *   // for SQL users
  *   tEnv.createTemporarySystemFunction("split", Split.class); // register table function first
diff --git a/flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/logical/utils/LogicalTypeChecks.java b/flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/logical/utils/LogicalTypeChecks.java
index be68934..d5a0f8e 100644
--- a/flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/logical/utils/LogicalTypeChecks.java
+++ b/flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/logical/utils/LogicalTypeChecks.java
@@ -202,7 +202,7 @@ public final class LogicalTypeChecks {
 	}
 
 	/**
-	 * Returns the field count of row and structured types.
+	 * Returns the field count of row and structured types. Other types return 1.
 	 */
 	public static int getFieldCount(LogicalType logicalType) {
 		return logicalType.accept(FIELD_COUNT_EXTRACTOR);
@@ -445,6 +445,15 @@ public final class LogicalTypeChecks {
 		public Integer visit(DistinctType distinctType) {
 			return distinctType.getSourceType().accept(this);
 		}
+
+		@Override
+		protected Integer defaultMethod(LogicalType logicalType) {
+			// legacy
+			if (hasRoot(logicalType, LogicalTypeRoot.STRUCTURED_TYPE)) {
+				return ((LegacyTypeInformationType<?>) logicalType).getTypeInformation().getArity();
+			}
+			return 1;
+		}
 	}
 
 	private static class FieldNamesExtractor extends Extractor<List<String>> {
diff --git a/flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/logical/utils/LogicalTypeUtils.java b/flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/logical/utils/LogicalTypeUtils.java
index 033d711..72c9c0e 100644
--- a/flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/logical/utils/LogicalTypeUtils.java
+++ b/flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/logical/utils/LogicalTypeUtils.java
@@ -29,15 +29,23 @@ import org.apache.flink.table.data.TimestampData;
 import org.apache.flink.table.types.logical.DistinctType;
 import org.apache.flink.table.types.logical.LocalZonedTimestampType;
 import org.apache.flink.table.types.logical.LogicalType;
+import org.apache.flink.table.types.logical.RowType;
+import org.apache.flink.table.types.logical.RowType.RowField;
+import org.apache.flink.table.types.logical.StructuredType;
 import org.apache.flink.table.types.logical.TimestampType;
 import org.apache.flink.table.types.logical.ZonedTimestampType;
 
+import java.util.List;
+import java.util.stream.Collectors;
+
 /**
  * Utilities for handling {@link LogicalType}s.
  */
 @Internal
 public final class LogicalTypeUtils {
 
+	private static final String ATOMIC_FIELD_NAME = "f0";
+
 	private static final TimeAttributeRemover TIME_ATTRIBUTE_REMOVER = new TimeAttributeRemover();
 
 	public static LogicalType removeTimeAttributes(LogicalType logicalType) {
@@ -104,6 +112,45 @@ public final class LogicalTypeUtils {
 		}
 	}
 
+	/**
+	 * Converts any logical type to a row type. Composite types are converted to a row type. Atomic
+	 * types are wrapped into a field.
+	 */
+	public static RowType toRowType(LogicalType t) {
+		switch (t.getTypeRoot()) {
+			case ROW:
+				return (RowType) t;
+			case STRUCTURED_TYPE:
+				final StructuredType structuredType = (StructuredType) t;
+				final List<RowField> fields = structuredType.getAttributes()
+						.stream()
+						.map(attribute ->
+							new RowField(
+								attribute.getName(),
+								attribute.getType(),
+								attribute.getDescription().orElse(null))
+						)
+						.collect(Collectors.toList());
+				return new RowType(structuredType.isNullable(), fields);
+			case DISTINCT_TYPE:
+				return toRowType(((DistinctType) t).getSourceType());
+			default:
+				return RowType.of(t);
+		}
+	}
+
+	/**
+	 * Returns a unique name for an atomic type.
+	 */
+	public static String getAtomicName(List<String> existingNames) {
+		int i = 0;
+		String fieldName = ATOMIC_FIELD_NAME;
+		while ((null != existingNames) && existingNames.contains(fieldName)) {
+			fieldName = ATOMIC_FIELD_NAME + "_" + i++;
+		}
+		return fieldName;
+	}
+
 	// --------------------------------------------------------------------------------------------
 
 	private static class TimeAttributeRemover extends LogicalTypeDuplicator {
diff --git a/flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/utils/DataTypeUtils.java b/flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/utils/DataTypeUtils.java
index be23c89..942aff1 100644
--- a/flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/utils/DataTypeUtils.java
+++ b/flink-table/flink-table-common/src/main/java/org/apache/flink/table/types/utils/DataTypeUtils.java
@@ -48,6 +48,7 @@ import org.apache.flink.table.types.logical.utils.LogicalTypeDefaultVisitor;
 import org.apache.flink.util.Preconditions;
 
 import java.util.Arrays;
+import java.util.Collections;
 import java.util.List;
 import java.util.Optional;
 import java.util.stream.Collectors;
@@ -55,6 +56,9 @@ import java.util.stream.IntStream;
 
 import static org.apache.flink.table.types.extraction.ExtractionUtils.primitiveToWrapper;
 import static org.apache.flink.table.types.logical.utils.LogicalTypeChecks.getFieldNames;
+import static org.apache.flink.table.types.logical.utils.LogicalTypeChecks.hasRoot;
+import static org.apache.flink.table.types.logical.utils.LogicalTypeChecks.isCompositeType;
+import static org.apache.flink.table.types.logical.utils.LogicalTypeUtils.getAtomicName;
 import static org.apache.flink.table.types.logical.utils.LogicalTypeUtils.toInternalConversionClass;
 
 /**
@@ -165,6 +169,33 @@ public final class DataTypeUtils {
 	}
 
 	/**
+	 * Returns the data types of the flat representation in the first level of the given data type.
+	 */
+	public static List<DataType> flattenToDataTypes(DataType dataType) {
+		final LogicalType type = dataType.getLogicalType();
+		if (hasRoot(type, LogicalTypeRoot.DISTINCT_TYPE)) {
+			return flattenToDataTypes(dataType.getChildren().get(0));
+		} else if (isCompositeType(type)) {
+			return dataType.getChildren();
+		}
+		return Collections.singletonList(dataType);
+	}
+
+	/**
+	 * Returns the names of the flat representation in the first level of the given data type.
+	 */
+	public static List<String> flattenToNames(DataType dataType, List<String> existingNames) {
+		final LogicalType type = dataType.getLogicalType();
+		if (hasRoot(type, LogicalTypeRoot.DISTINCT_TYPE)) {
+			return flattenToNames(dataType.getChildren().get(0), existingNames);
+		} else if (isCompositeType(type)) {
+			return getFieldNames(type);
+		} else {
+			return Collections.singletonList(getAtomicName(existingNames));
+		}
+	}
+
+	/**
 	 * The {@link DataType} class can only partially verify the conversion class. This method can perform
 	 * the final check when we know if the data type should be used for input.
 	 */
diff --git a/flink-table/flink-table-common/src/test/java/org/apache/flink/table/types/utils/DataTypeUtilsTest.java b/flink-table/flink-table-common/src/test/java/org/apache/flink/table/types/utils/DataTypeUtilsTest.java
index 9e193b1..42533c6 100644
--- a/flink-table/flink-table-common/src/test/java/org/apache/flink/table/types/utils/DataTypeUtilsTest.java
+++ b/flink-table/flink-table-common/src/test/java/org/apache/flink/table/types/utils/DataTypeUtilsTest.java
@@ -36,9 +36,11 @@ import org.junit.Test;
 import java.sql.Timestamp;
 import java.time.LocalDateTime;
 import java.util.Arrays;
+import java.util.Collections;
 import java.util.HashMap;
 import java.util.List;
 
+import static org.apache.flink.table.api.DataTypes.BOOLEAN;
 import static org.apache.flink.table.api.DataTypes.FIELD;
 import static org.apache.flink.table.api.DataTypes.INT;
 import static org.apache.flink.table.api.DataTypes.ROW;
@@ -65,6 +67,32 @@ public class DataTypeUtilsTest {
 	}
 
 	@Test
+	public void testFlattenToDataTypes() {
+		assertThat(
+			DataTypeUtils.flattenToDataTypes(INT()),
+			equalTo(Collections.singletonList(INT())));
+
+		assertThat(
+			DataTypeUtils.flattenToDataTypes(ROW(FIELD("a", INT()), FIELD("b", BOOLEAN()))),
+			equalTo(Arrays.asList(INT(), BOOLEAN())));
+	}
+
+	@Test
+	public void testFlattenToNames() {
+		assertThat(
+			DataTypeUtils.flattenToNames(INT(), Collections.emptyList()),
+			equalTo(Collections.singletonList("f0")));
+
+		assertThat(
+			DataTypeUtils.flattenToNames(INT(), Collections.singletonList("f0")),
+			equalTo(Collections.singletonList("f0_0")));
+
+		assertThat(
+			DataTypeUtils.flattenToNames(ROW(FIELD("a", INT()), FIELD("b", BOOLEAN())), Collections.emptyList()),
+			equalTo(Arrays.asList("a", "b")));
+	}
+
+	@Test
 	public void testExpandRowType() {
 		DataType dataType = ROW(
 			FIELD("f0", INT()),
diff --git a/flink-table/flink-table-planner-blink/src/main/java/org/apache/flink/table/planner/catalog/FunctionCatalogOperatorTable.java b/flink-table/flink-table-planner-blink/src/main/java/org/apache/flink/table/planner/catalog/FunctionCatalogOperatorTable.java
index 8797bd8..78feb26 100644
--- a/flink-table/flink-table-planner-blink/src/main/java/org/apache/flink/table/planner/catalog/FunctionCatalogOperatorTable.java
+++ b/flink-table/flink-table-planner-blink/src/main/java/org/apache/flink/table/planner/catalog/FunctionCatalogOperatorTable.java
@@ -194,7 +194,10 @@ public class FunctionCatalogOperatorTable implements SqlOperatorTable {
 		return Optional.of(function);
 	}
 
-	@SuppressWarnings("RedundantIfStatement")
+	/**
+	 * Verifies which kinds of functions are allowed to be returned from the catalog given the
+	 * context information.
+	 */
 	private boolean verifyFunctionKind(
 			@Nullable SqlFunctionCategory category,
 			FunctionIdentifier identifier,
@@ -206,10 +209,13 @@ public class FunctionCatalogOperatorTable implements SqlOperatorTable {
 			return false;
 		}
 
-		// it would be nice to give a more meaningful exception when a scalar function is used instead
-		// of a table function and vice versa, but we can do that only once FLIP-51 is implemented
+		final FunctionKind kind = definition.getKind();
 
-		if (definition.getKind() == FunctionKind.SCALAR) {
+		if (kind == FunctionKind.TABLE) {
+			return true;
+		} else if (kind == FunctionKind.SCALAR ||
+				kind == FunctionKind.AGGREGATE ||
+				kind == FunctionKind.TABLE_AGGREGATE) {
 			if (category != null && category.isTableFunction()) {
 				throw new ValidationException(
 					String.format(
@@ -219,12 +225,7 @@ public class FunctionCatalogOperatorTable implements SqlOperatorTable {
 				);
 			}
 			return true;
-		} else if (definition.getKind() == FunctionKind.TABLE) {
-			return true;
 		}
-
-		// aggregate function are not supported, because the code generator is not ready yet
-
 		return false;
 	}
 
diff --git a/flink-table/flink-table-planner-blink/src/main/java/org/apache/flink/table/planner/expressions/SqlAggFunctionVisitor.java b/flink-table/flink-table-planner-blink/src/main/java/org/apache/flink/table/planner/expressions/SqlAggFunctionVisitor.java
index 83fe108..928f2e8 100644
--- a/flink-table/flink-table-planner-blink/src/main/java/org/apache/flink/table/planner/expressions/SqlAggFunctionVisitor.java
+++ b/flink-table/flink-table-planner-blink/src/main/java/org/apache/flink/table/planner/expressions/SqlAggFunctionVisitor.java
@@ -19,6 +19,7 @@
 package org.apache.flink.table.planner.expressions;
 
 import org.apache.flink.table.api.TableException;
+import org.apache.flink.table.catalog.DataTypeFactory;
 import org.apache.flink.table.expressions.CallExpression;
 import org.apache.flink.table.expressions.Expression;
 import org.apache.flink.table.expressions.ExpressionDefaultVisitor;
@@ -30,11 +31,17 @@ import org.apache.flink.table.functions.FunctionIdentifier;
 import org.apache.flink.table.functions.FunctionRequirement;
 import org.apache.flink.table.functions.TableAggregateFunction;
 import org.apache.flink.table.functions.TableAggregateFunctionDefinition;
-import org.apache.flink.table.planner.calcite.FlinkTypeFactory;
+import org.apache.flink.table.planner.functions.bridging.BridgingSqlAggFunction;
 import org.apache.flink.table.planner.functions.sql.FlinkSqlOperatorTable;
 import org.apache.flink.table.planner.functions.utils.AggSqlFunction;
+import org.apache.flink.table.planner.utils.ShortcutUtils;
+import org.apache.flink.table.types.inference.TypeInference;
 
 import org.apache.calcite.sql.SqlAggFunction;
+import org.apache.calcite.sql.SqlKind;
+import org.apache.calcite.tools.RelBuilder;
+
+import javax.annotation.Nullable;
 
 import java.util.IdentityHashMap;
 import java.util.Map;
@@ -65,10 +72,10 @@ public class SqlAggFunctionVisitor extends ExpressionDefaultVisitor<SqlAggFuncti
 		AGG_DEF_SQL_OPERATOR_MAPPING.put(BuiltInFunctionDefinitions.COLLECT, FlinkSqlOperatorTable.COLLECT);
 	}
 
-	private final FlinkTypeFactory typeFactory;
+	private final RelBuilder relBuilder;
 
-	public SqlAggFunctionVisitor(FlinkTypeFactory typeFactory) {
-		this.typeFactory = typeFactory;
+	public SqlAggFunctionVisitor(RelBuilder relBuilder) {
+		this.relBuilder = relBuilder;
 	}
 
 	@Override
@@ -77,44 +84,82 @@ public class SqlAggFunctionVisitor extends ExpressionDefaultVisitor<SqlAggFuncti
 			defaultMethod(call);
 		}
 
-		FunctionDefinition def = call.getFunctionDefinition();
-		if (AGG_DEF_SQL_OPERATOR_MAPPING.containsKey(def)) {
-			return AGG_DEF_SQL_OPERATOR_MAPPING.get(def);
+		final FunctionDefinition definition = call.getFunctionDefinition();
+		if (AGG_DEF_SQL_OPERATOR_MAPPING.containsKey(definition)) {
+			return AGG_DEF_SQL_OPERATOR_MAPPING.get(definition);
 		}
-		if (BuiltInFunctionDefinitions.DISTINCT == def) {
+		if (BuiltInFunctionDefinitions.DISTINCT == definition) {
 			Expression innerAgg = call.getChildren().get(0);
 			return innerAgg.accept(this);
 		}
 
-		if (isFunctionOfKind(call, AGGREGATE)) {
-			AggregateFunctionDefinition aggDef = (AggregateFunctionDefinition) def;
-			AggregateFunction aggFunc = aggDef.getAggregateFunction();
-			FunctionIdentifier identifier = call.getFunctionIdentifier()
-				.orElse(FunctionIdentifier.of(aggFunc.functionIdentifier()));
+		return createSqlAggFunction(
+			call.getFunctionIdentifier().orElse(null),
+			call.getFunctionDefinition());
+	}
+
+	private SqlAggFunction createSqlAggFunction(@Nullable FunctionIdentifier identifier, FunctionDefinition definition) {
+		// legacy
+		if (definition instanceof AggregateFunctionDefinition) {
+			return createLegacySqlAggregateFunction(identifier, (AggregateFunctionDefinition) definition);
+		} else if (definition instanceof TableAggregateFunctionDefinition) {
+			return createLegacySqlTableAggregateFunction(identifier, (TableAggregateFunctionDefinition) definition);
+		}
+
+		// new stack
+		final DataTypeFactory dataTypeFactory = ShortcutUtils.unwrapContext(relBuilder)
+			.getCatalogManager()
+			.getDataTypeFactory();
+		final TypeInference typeInference = definition.getTypeInference(dataTypeFactory);
+		return BridgingSqlAggFunction.of(
+			dataTypeFactory,
+			ShortcutUtils.unwrapTypeFactory(relBuilder),
+			SqlKind.OTHER_FUNCTION,
+			identifier,
+			definition,
+			typeInference);
+	}
+
+	private SqlAggFunction createLegacySqlAggregateFunction(
+			@Nullable FunctionIdentifier identifier,
+			AggregateFunctionDefinition definition) {
+		final AggregateFunction<?, ?> aggFunc = definition.getAggregateFunction();
+			final FunctionIdentifier adjustedIdentifier;
+			if (identifier != null) {
+				adjustedIdentifier = identifier;
+			} else {
+				adjustedIdentifier = FunctionIdentifier.of(aggFunc.functionIdentifier());
+			}
 			return new AggSqlFunction(
-				identifier,
+				adjustedIdentifier,
 				aggFunc.toString(),
 				aggFunc,
-				fromLegacyInfoToDataType(aggDef.getResultTypeInfo()),
-				fromLegacyInfoToDataType(aggDef.getAccumulatorTypeInfo()),
-				typeFactory,
+				fromLegacyInfoToDataType(definition.getResultTypeInfo()),
+				fromLegacyInfoToDataType(definition.getAccumulatorTypeInfo()),
+				ShortcutUtils.unwrapTypeFactory(relBuilder),
 				aggFunc.getRequirements().contains(FunctionRequirement.OVER_WINDOW_ONLY),
 				scala.Option.empty());
-		} else {
-			TableAggregateFunctionDefinition aggDef = (TableAggregateFunctionDefinition) def;
-			TableAggregateFunction aggFunc = aggDef.getTableAggregateFunction();
-			FunctionIdentifier identifier = call.getFunctionIdentifier()
-				.orElse(FunctionIdentifier.of(aggFunc.functionIdentifier()));
+	}
+
+	private SqlAggFunction createLegacySqlTableAggregateFunction(
+			@Nullable FunctionIdentifier identifier,
+			TableAggregateFunctionDefinition definition) {
+		final TableAggregateFunction<?, ?> aggFunc = definition.getTableAggregateFunction();
+			final FunctionIdentifier adjustedIdentifier;
+			if (identifier != null) {
+				adjustedIdentifier = identifier;
+			} else {
+				adjustedIdentifier = FunctionIdentifier.of(aggFunc.functionIdentifier());
+			}
 			return new AggSqlFunction(
-				identifier,
+				adjustedIdentifier,
 				aggFunc.toString(),
 				aggFunc,
-				fromLegacyInfoToDataType(aggDef.getResultTypeInfo()),
-				fromLegacyInfoToDataType(aggDef.getAccumulatorTypeInfo()),
-				typeFactory,
+				fromLegacyInfoToDataType(definition.getResultTypeInfo()),
+				fromLegacyInfoToDataType(definition.getAccumulatorTypeInfo()),
+				ShortcutUtils.unwrapTypeFactory(relBuilder),
 				false,
 				scala.Option.empty());
-		}
 	}
 
 	@Override
diff --git a/flink-table/flink-table-planner-blink/src/main/java/org/apache/flink/table/planner/expressions/converter/OverConvertRule.java b/flink-table/flink-table-planner-blink/src/main/java/org/apache/flink/table/planner/expressions/converter/OverConvertRule.java
index 4b2ef6c..86abac4 100644
--- a/flink-table/flink-table-planner-blink/src/main/java/org/apache/flink/table/planner/expressions/converter/OverConvertRule.java
+++ b/flink-table/flink-table-planner-blink/src/main/java/org/apache/flink/table/planner/expressions/converter/OverConvertRule.java
@@ -72,7 +72,7 @@ public class OverConvertRule implements CallExpressionConvertRule {
 		if (call.getFunctionDefinition() == BuiltInFunctionDefinitions.OVER) {
 			FlinkTypeFactory typeFactory = context.getTypeFactory();
 			Expression agg = children.get(0);
-			SqlAggFunction aggFunc = agg.accept(new SqlAggFunctionVisitor(typeFactory));
+			SqlAggFunction aggFunc = agg.accept(new SqlAggFunctionVisitor(context.getRelBuilder()));
 			RelDataType aggResultType = typeFactory.createFieldTypeFromLogicalType(
 				fromDataTypeToLogicalType(((ResolvedExpression) agg).getOutputDataType()));
 
diff --git a/flink-table/flink-table-planner-blink/src/main/java/org/apache/flink/table/planner/plan/QueryOperationConverter.java b/flink-table/flink-table-planner-blink/src/main/java/org/apache/flink/table/planner/plan/QueryOperationConverter.java
index 78326e5..36a31c3 100644
--- a/flink-table/flink-table-planner-blink/src/main/java/org/apache/flink/table/planner/plan/QueryOperationConverter.java
+++ b/flink-table/flink-table-planner-blink/src/main/java/org/apache/flink/table/planner/plan/QueryOperationConverter.java
@@ -650,7 +650,7 @@ public class QueryOperationConverter extends QueryOperationDefaultVisitor<RelNod
 			public AggCallVisitor(RelBuilder relBuilder, ExpressionConverter expressionConverter, String name,
 					boolean isDistinct) {
 				this.relBuilder = relBuilder;
-				this.sqlAggFunctionVisitor = new SqlAggFunctionVisitor((FlinkTypeFactory) relBuilder.getTypeFactory());
+				this.sqlAggFunctionVisitor = new SqlAggFunctionVisitor(relBuilder);
 				this.expressionConverter = expressionConverter;
 				this.name = name;
 				this.isDistinct = isDistinct;
@@ -704,7 +704,7 @@ public class QueryOperationConverter extends QueryOperationDefaultVisitor<RelNod
 
 			public TableAggCallVisitor(RelBuilder relBuilder, ExpressionConverter expressionConverter) {
 				this.relBuilder = relBuilder;
-				this.sqlAggFunctionVisitor = new SqlAggFunctionVisitor((FlinkTypeFactory) relBuilder.getTypeFactory());
+				this.sqlAggFunctionVisitor = new SqlAggFunctionVisitor(relBuilder);
 				this.expressionConverter = expressionConverter;
 			}
 
diff --git a/flink-table/flink-table-planner-blink/src/main/java/org/apache/flink/table/planner/utils/ShortcutUtils.java b/flink-table/flink-table-planner-blink/src/main/java/org/apache/flink/table/planner/utils/ShortcutUtils.java
index de719a9..129795b 100644
--- a/flink-table/flink-table-planner-blink/src/main/java/org/apache/flink/table/planner/utils/ShortcutUtils.java
+++ b/flink-table/flink-table-planner-blink/src/main/java/org/apache/flink/table/planner/utils/ShortcutUtils.java
@@ -53,6 +53,10 @@ public final class ShortcutUtils {
 		return (FlinkTypeFactory) typeFactory;
 	}
 
+	public static FlinkTypeFactory unwrapTypeFactory(RelBuilder relBuilder) {
+		return unwrapTypeFactory(relBuilder.getTypeFactory());
+	}
+
 	public static FlinkContext unwrapContext(RelBuilder relBuilder) {
 		return unwrapContext(relBuilder.getCluster());
 	}
diff --git a/flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/planner/codegen/ExprCodeGenerator.scala b/flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/planner/codegen/ExprCodeGenerator.scala
index 3b41fcc..2d311ce 100644
--- a/flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/planner/codegen/ExprCodeGenerator.scala
+++ b/flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/planner/codegen/ExprCodeGenerator.scala
@@ -20,13 +20,16 @@ package org.apache.flink.table.planner.codegen
 
 import org.apache.flink.streaming.api.functions.ProcessFunction
 import org.apache.flink.table.api.TableException
+import org.apache.flink.table.data.RowData
+import org.apache.flink.table.data.binary.BinaryRowData
 import org.apache.flink.table.data.util.DataFormatConverters.{DataFormatConverter, getConverterForDataType}
 import org.apache.flink.table.planner.calcite.{FlinkTypeFactory, RexDistinctKeyVariable, RexFieldVariable}
 import org.apache.flink.table.planner.codegen.CodeGenUtils.{requireTemporal, requireTimeInterval, _}
 import org.apache.flink.table.planner.codegen.GenerateUtils._
 import org.apache.flink.table.planner.codegen.GeneratedExpression.{NEVER_NULL, NO_CODE}
 import org.apache.flink.table.planner.codegen.calls.ScalarOperatorGens._
-import org.apache.flink.table.planner.codegen.calls.{BridgingSqlFunctionCallGen, FunctionGenerator, ScalarFunctionCallGen, StringCallGen, TableFunctionCallGen}
+import org.apache.flink.table.planner.codegen.calls._
+import org.apache.flink.table.planner.functions.bridging.BridgingSqlFunction
 import org.apache.flink.table.planner.functions.sql.FlinkSqlOperatorTable._
 import org.apache.flink.table.planner.functions.sql.SqlThrowExceptionFunction
 import org.apache.flink.table.planner.functions.utils.{ScalarSqlFunction, TableSqlFunction}
@@ -35,16 +38,13 @@ import org.apache.flink.table.runtime.types.PlannerTypeUtils.isInteroperable
 import org.apache.flink.table.runtime.typeutils.TypeCheckUtils
 import org.apache.flink.table.runtime.typeutils.TypeCheckUtils.{isNumeric, isTemporal, isTimeInterval}
 import org.apache.flink.table.types.logical._
+import org.apache.flink.table.types.logical.utils.LogicalTypeChecks.{getFieldCount, isCompositeType}
 import org.apache.flink.table.typeutils.TimeIndicatorTypeInfo
 
 import org.apache.calcite.rex._
 import org.apache.calcite.sql.SqlOperator
 import org.apache.calcite.sql.`type`.{ReturnTypes, SqlTypeName}
 import org.apache.calcite.util.TimestampString
-import org.apache.flink.table.data.RowData
-import org.apache.flink.table.data.binary.BinaryRowData
-import org.apache.flink.table.planner.functions.bridging.BridgingSqlFunction
-import org.apache.flink.table.types.logical.utils.LogicalTypeChecks
 
 import scala.collection.JavaConversions._
 
@@ -118,10 +118,13 @@ class ExprCodeGenerator(ctx: CodeGeneratorContext, nullableInput: Boolean)
       case _ => Array[Int]()
     }
   }
-  
-  private def fieldIndices(t: LogicalType): Array[Int] = t match {
-    case rt: RowType => (0 until rt.getFieldCount).toArray
-    case _ => Array(0)
+
+  private def fieldIndices(t: LogicalType): Array[Int] = {
+    if (isCompositeType(t)) {
+      (0 until getFieldCount(t)).toArray
+    } else {
+      Array(0)
+    }
   }
  
   /**
diff --git a/flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/planner/codegen/agg/AggsHandlerCodeGenerator.scala b/flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/planner/codegen/agg/AggsHandlerCodeGenerator.scala
index 39f4420..4430cf4 100644
--- a/flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/planner/codegen/agg/AggsHandlerCodeGenerator.scala
+++ b/flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/planner/codegen/agg/AggsHandlerCodeGenerator.scala
@@ -17,7 +17,6 @@
  */
 package org.apache.flink.table.planner.codegen.agg
 
-import org.apache.flink.api.common.functions.RuntimeContext
 import org.apache.flink.table.api.TableException
 import org.apache.flink.table.data.GenericRowData
 import org.apache.flink.table.expressions._
@@ -34,8 +33,8 @@ import org.apache.flink.table.planner.plan.utils.AggregateInfoList
 import org.apache.flink.table.runtime.dataview.{StateListView, StateMapView}
 import org.apache.flink.table.runtime.generated._
 import org.apache.flink.table.runtime.types.LogicalTypeDataTypeConverter.fromDataTypeToLogicalType
-import org.apache.flink.table.runtime.types.PlannerTypeUtils
 import org.apache.flink.table.types.DataType
+import org.apache.flink.table.types.logical.utils.LogicalTypeUtils
 import org.apache.flink.table.types.logical.{BooleanType, IntType, LogicalType, RowType}
 import org.apache.flink.table.types.utils.TypeConversions.fromLegacyInfoToDataType
 import org.apache.flink.util.Collector
@@ -1109,7 +1108,7 @@ class AggsHandlerCodeGenerator(
 
   private def genRecordToRowData(aggExternalType: DataType, recordInputName: String): String = {
     val resultType = fromDataTypeToLogicalType(aggExternalType)
-    val resultRowType = PlannerTypeUtils.toRowType(resultType)
+    val resultRowType = LogicalTypeUtils.toRowType(resultType)
 
     val newCtx = CodeGeneratorContext(ctx.tableConfig)
     val exprGenerator = new ExprCodeGenerator(newCtx, false).bindInput(resultType)
diff --git a/flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/planner/codegen/calls/TableFunctionCallGen.scala b/flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/planner/codegen/calls/TableFunctionCallGen.scala
index 23c86cf..645f0bd 100644
--- a/flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/planner/codegen/calls/TableFunctionCallGen.scala
+++ b/flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/planner/codegen/calls/TableFunctionCallGen.scala
@@ -34,6 +34,7 @@ import org.apache.flink.table.runtime.types.PlannerTypeUtils
 import org.apache.flink.table.types.DataType
 import org.apache.flink.table.types.logical.LogicalType
 import org.apache.flink.table.types.logical.utils.LogicalTypeChecks.isCompositeType
+import org.apache.flink.table.types.logical.utils.LogicalTypeUtils
 
 import org.apache.calcite.rex.RexCall
 
@@ -117,7 +118,7 @@ class TableFunctionCallGen(
     val externalDataType = getExternalDataType
     val pojoFieldMapping = Some(UserDefinedFunctionUtils.getFieldInfo(externalDataType)._2)
     val externalType = fromDataTypeToLogicalType(externalDataType)
-    val wrappedInternalType = PlannerTypeUtils.toRowType(externalType)
+    val wrappedInternalType = LogicalTypeUtils.toRowType(externalType)
 
     val collectorCtx = CodeGeneratorContext(ctx.tableConfig)
     val externalTerm = newName("externalRecord")
diff --git a/flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/planner/functions/utils/AggSqlFunction.scala b/flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/planner/functions/utils/AggSqlFunction.scala
index 301f4d4..c4b627a 100644
--- a/flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/planner/functions/utils/AggSqlFunction.scala
+++ b/flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/planner/functions/utils/AggSqlFunction.scala
@@ -21,11 +21,13 @@ package org.apache.flink.table.planner.functions.utils
 import org.apache.flink.table.api.ValidationException
 import org.apache.flink.table.functions.{AggregateFunction, FunctionIdentifier, TableAggregateFunction, ImperativeAggregateFunction}
 import org.apache.flink.table.planner.calcite.FlinkTypeFactory
+import org.apache.flink.table.planner.functions.bridging.BridgingSqlAggFunction
 import org.apache.flink.table.planner.functions.utils.AggSqlFunction.{createOperandTypeChecker, createOperandTypeInference, createReturnTypeInference}
 import org.apache.flink.table.planner.functions.utils.UserDefinedFunctionUtils._
 import org.apache.flink.table.runtime.types.LogicalTypeDataTypeConverter.fromDataTypeToLogicalType
 import org.apache.flink.table.types.DataType
 import org.apache.flink.table.types.logical.LogicalType
+
 import org.apache.calcite.rel.`type`.RelDataType
 import org.apache.calcite.sql._
 import org.apache.calcite.sql.`type`.SqlOperandTypeChecker.Consistency
@@ -46,7 +48,9 @@ import java.util
   * @param externalResultType the type information of returned value
   * @param externalAccType the type information of the accumulator
   * @param typeFactory type factory for converting Flink's between Calcite's types
+  * @deprecated This uses the old type inference stack. Use [[BridgingSqlAggFunction]] instead.
   */
+@deprecated
 class AggSqlFunction(
     identifier: FunctionIdentifier,
     displayName: String,
diff --git a/flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/planner/plan/nodes/calcite/TableAggregate.scala b/flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/planner/plan/nodes/calcite/TableAggregate.scala
index cb1e254..cf2a8fe 100644
--- a/flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/planner/plan/nodes/calcite/TableAggregate.scala
+++ b/flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/planner/plan/nodes/calcite/TableAggregate.scala
@@ -18,16 +18,16 @@
 
 package org.apache.flink.table.planner.plan.nodes.calcite
 
-import java.util
+import org.apache.flink.table.planner.calcite.FlinkTypeFactory
+import org.apache.flink.table.types.logical.utils.LogicalTypeUtils.getAtomicName
 
 import org.apache.calcite.plan.{RelOptCluster, RelTraitSet}
 import org.apache.calcite.rel.`type`.RelDataType
 import org.apache.calcite.rel.core.AggregateCall
 import org.apache.calcite.rel.{RelNode, RelWriter, SingleRel}
 import org.apache.calcite.util.{ImmutableBitSet, Pair, Util}
-import org.apache.flink.table.planner.calcite.FlinkTypeFactory
-import org.apache.flink.table.types.utils.{LegacyTypeInfoDataTypeConverter, TypeConversions}
-import org.apache.flink.table.typeutils.FieldInfoUtils
+
+import java.util
 
 import scala.collection.JavaConversions._
 import scala.collection.mutable.ListBuffer
@@ -82,13 +82,8 @@ abstract class TableAggregate(
       // only a structured type contains a field list.
       aggCall.`type`.getFieldList.foreach(builder.add)
     } else {
-      // A non-structured type does not have a field list, so get field name through
-      // FieldInfoUtils.getFieldNames.
-      val logicalType = FlinkTypeFactory.toLogicalType(aggCall.`type`)
-      val dataType = TypeConversions.fromLogicalToDataType(logicalType)
-      val name = FieldInfoUtils
-        .getFieldNames(LegacyTypeInfoDataTypeConverter.toLegacyTypeInfo(dataType), groupNames).head
-      builder.add(name, aggCall.`type`)
+      // wrap non-structured types into a row
+      builder.add(getAtomicName(groupNames), aggCall.`type`)
     }
     builder.build()
   }
diff --git a/flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/planner/plan/utils/AggFunctionFactory.scala b/flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/planner/plan/utils/AggFunctionFactory.scala
index 563c976..f6ac522 100644
--- a/flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/planner/plan/utils/AggFunctionFactory.scala
+++ b/flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/planner/plan/utils/AggFunctionFactory.scala
@@ -31,16 +31,19 @@ import org.apache.flink.table.planner.functions.aggfunctions.MinWithRetractAggFu
 import org.apache.flink.table.planner.functions.aggfunctions.SingleValueAggFunction._
 import org.apache.flink.table.planner.functions.aggfunctions.SumWithRetractAggFunction._
 import org.apache.flink.table.planner.functions.aggfunctions._
+import org.apache.flink.table.planner.functions.bridging.BridgingSqlAggFunction
 import org.apache.flink.table.planner.functions.sql.{SqlFirstLastValueAggFunction, SqlListAggFunction}
 import org.apache.flink.table.planner.functions.utils.AggSqlFunction
 import org.apache.flink.table.runtime.types.TypeInfoLogicalTypeConverter
 import org.apache.flink.table.runtime.typeutils.DecimalDataTypeInfo
 import org.apache.flink.table.types.logical.LogicalTypeRoot._
 import org.apache.flink.table.types.logical._
+
 import org.apache.calcite.rel.`type`.RelDataType
 import org.apache.calcite.rel.core.AggregateCall
 import org.apache.calcite.sql.fun._
 import org.apache.calcite.sql.{SqlAggFunction, SqlKind, SqlRankFunction}
+
 import java.util
 
 import scala.collection.JavaConversions._
@@ -131,6 +134,9 @@ class AggFunctionFactory(
           constants.toArray,
           argTypes)
 
+      case bsf: BridgingSqlAggFunction =>
+        bsf.getDefinition.asInstanceOf[UserDefinedFunction]
+
       case unSupported: SqlAggFunction =>
         throw new TableException(s"Unsupported Function: '${unSupported.getName}'")
     }
diff --git a/flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/planner/plan/utils/AggregateUtil.scala b/flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/planner/plan/utils/AggregateUtil.scala
index f24706d..f8c4c2a 100644
--- a/flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/planner/plan/utils/AggregateUtil.scala
+++ b/flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/planner/plan/utils/AggregateUtil.scala
@@ -24,7 +24,7 @@ import org.apache.flink.table.data.{DecimalData, RowData, StringData, TimestampD
 import org.apache.flink.table.dataview.MapViewTypeInfo
 import org.apache.flink.table.expressions.ExpressionUtils.extractValue
 import org.apache.flink.table.expressions._
-import org.apache.flink.table.functions.{AggregateFunction, TableAggregateFunction, ImperativeAggregateFunction, UserDefinedFunction}
+import org.apache.flink.table.functions.{AggregateFunction, FunctionKind, ImperativeAggregateFunction, UserDefinedFunction}
 import org.apache.flink.table.planner.JLong
 import org.apache.flink.table.planner.calcite.FlinkRelBuilder.PlannerNamedWindowProperty
 import org.apache.flink.table.planner.calcite.{FlinkTypeFactory, FlinkTypeSystem}
@@ -32,16 +32,20 @@ import org.apache.flink.table.planner.dataview.DataViewUtils.useNullSerializerFo
 import org.apache.flink.table.planner.dataview.{DataViewSpec, MapViewSpec}
 import org.apache.flink.table.planner.expressions.{PlannerProctimeAttribute, PlannerRowtimeAttribute, PlannerWindowEnd, PlannerWindowStart}
 import org.apache.flink.table.planner.functions.aggfunctions.DeclarativeAggregateFunction
+import org.apache.flink.table.planner.functions.bridging.BridgingSqlAggFunction
+import org.apache.flink.table.planner.functions.inference.OperatorBindingCallContext
 import org.apache.flink.table.planner.functions.sql.{FlinkSqlOperatorTable, SqlFirstLastValueAggFunction, SqlListAggFunction}
 import org.apache.flink.table.planner.functions.utils.AggSqlFunction
 import org.apache.flink.table.planner.functions.utils.UserDefinedFunctionUtils._
 import org.apache.flink.table.planner.plan.`trait`.{ModifyKindSetTraitDef, RelModifiedMonotonicity}
 import org.apache.flink.table.planner.plan.metadata.FlinkRelMetadataQuery
 import org.apache.flink.table.planner.plan.nodes.physical.stream.StreamPhysicalRel
+import org.apache.flink.table.planner.utils.JavaScalaConversionUtil.toScala
 import org.apache.flink.table.runtime.operators.bundle.trigger.CountBundleTrigger
 import org.apache.flink.table.runtime.types.LogicalTypeDataTypeConverter.{fromDataTypeToLogicalType, fromLogicalTypeToDataType}
 import org.apache.flink.table.runtime.types.TypeInfoDataTypeConverter.fromDataTypeToTypeInfo
 import org.apache.flink.table.types.DataType
+import org.apache.flink.table.types.inference.TypeInferenceUtil
 import org.apache.flink.table.types.logical.LogicalTypeRoot._
 import org.apache.flink.table.types.logical.utils.LogicalTypeChecks
 import org.apache.flink.table.types.logical.utils.LogicalTypeChecks.hasRoot
@@ -49,7 +53,9 @@ import org.apache.flink.table.types.logical.{LogicalTypeRoot, _}
 import org.apache.flink.table.types.utils.TypeConversions.fromLegacyInfoToDataType
 
 import org.apache.calcite.rel.`type`._
+import org.apache.calcite.rel.core.Aggregate.AggCallBinding
 import org.apache.calcite.rel.core.{Aggregate, AggregateCall}
+import org.apache.calcite.sql.`type`.SqlTypeUtil
 import org.apache.calcite.sql.fun._
 import org.apache.calcite.sql.validate.SqlMonotonicity
 import org.apache.calcite.sql.{SqlKind, SqlRankFunction}
@@ -297,14 +303,151 @@ object AggregateUtil extends Enumeration {
     // Step-3:
     // create aggregate information
     val factory = new AggFunctionFactory(inputRowType, orderKeyIdx, needRetraction)
-    val aggInfos = newAggCalls.zipWithIndex.map { case (call, index) =>
-      val argIndexes = call.getAggregation match {
-        case _: SqlRankFunction => orderKeyIdx
-        case _ => call.getArgList.map(_.intValue()).toArray
+    val aggInfos = newAggCalls
+      .zipWithIndex
+      .map { case (call, index) =>
+        val argIndexes = call.getAggregation match {
+          case _: SqlRankFunction => orderKeyIdx
+          case _ => call.getArgList.map(_.intValue()).toArray
+        }
+        transformToAggregateInfo(
+          inputRowType,
+          call,
+          index,
+          argIndexes,
+          factory.createAggFunction(call, index),
+          isStateBackedDataViews,
+          needRetraction(index))
       }
 
-      val function = factory.createAggFunction(call, index)
-      val (externalArgTypes, externalAccTypes, viewSpecs, externalResultType) = function match {
+    AggregateInfoList(aggInfos.toArray, indexOfCountStar, countStarInserted, distinctInfos)
+  }
+
+  private def transformToAggregateInfo(
+      inputRowRelDataType: RelDataType,
+      call: AggregateCall,
+      index: Int,
+      argIndexes: Array[Int],
+      udf: UserDefinedFunction,
+      hasStateBackedDataViews: Boolean,
+      needsRetraction: Boolean)
+    : AggregateInfo = call.getAggregation match {
+
+    case _: BridgingSqlAggFunction =>
+      createAggregateInfoFromBridgingFunction(
+        inputRowRelDataType,
+        call,
+        index,
+        argIndexes,
+        udf,
+        hasStateBackedDataViews,
+        needsRetraction)
+
+    case _ =>
+      createAggregateInfoFromLegacyFunction(
+        inputRowRelDataType,
+        call,
+        index,
+        argIndexes,
+        udf,
+        hasStateBackedDataViews,
+        needsRetraction)
+  }
+
+  private def createAggregateInfoFromBridgingFunction(
+      inputRowRelDataType: RelDataType,
+      call: AggregateCall,
+      index: Int,
+      argIndexes: Array[Int],
+      udf: UserDefinedFunction,
+      hasStateBackedDataViews: Boolean,
+      needsRetraction: Boolean)
+    : AggregateInfo = {
+
+    val function = call.getAggregation.asInstanceOf[BridgingSqlAggFunction]
+
+    val inference = function.getTypeInference
+
+    // not all information is available in the call context of aggregate functions at this location
+    // e.g. literal information is lost because the aggregation is split into multiple operators
+    val callContext = new OperatorBindingCallContext(
+      function.getDataTypeFactory,
+      udf,
+      new AggCallBinding(
+        function.getTypeFactory,
+        function,
+        SqlTypeUtil.projectTypes(
+          inputRowRelDataType,
+          argIndexes.map(Int.box).toList),
+        0,
+        false))
+
+    // enrich argument types with conversion class
+    val adaptedCallContext = TypeInferenceUtil.adaptArguments(
+      inference,
+      callContext,
+      null)
+    val enrichedArgumentDataTypes = toScala(adaptedCallContext.getArgumentDataTypes)
+
+    // derive accumulator type with conversion class
+    val enrichedAccumulatorDataType = TypeInferenceUtil.inferOutputType(
+      adaptedCallContext,
+      inference.getAccumulatorTypeStrategy.orElse(inference.getOutputTypeStrategy))
+
+    // enrich output types with conversion class
+    val enrichedOutputDataType = TypeInferenceUtil.inferOutputType(
+      adaptedCallContext,
+      inference.getOutputTypeStrategy)
+
+    createAggregateInfo(
+      call,
+      udf,
+      index,
+      argIndexes,
+      enrichedArgumentDataTypes.toArray,
+      enrichedAccumulatorDataType,
+      enrichedOutputDataType,
+      needsRetraction,
+      hasStateBackedDataViews)
+  }
+
+  private def createAggregateInfo(
+      call: AggregateCall,
+      udf: UserDefinedFunction,
+      index: Int,
+      argIndexes: Array[Int],
+      inputDataTypes: Array[DataType],
+      accumulatorDataType: DataType,
+      outputDataType: DataType,
+      needsRetraction: Boolean,
+      hasStateBackedDataViews: Boolean)
+    : AggregateInfo = {
+
+    // TODO handle data views here
+
+    AggregateInfo(
+        call,
+        udf,
+        index,
+        argIndexes,
+        inputDataTypes,
+        Array(accumulatorDataType),
+        Array(),
+        outputDataType,
+        needsRetraction)
+  }
+
+  private def createAggregateInfoFromLegacyFunction(
+      inputRowRelDataType: RelDataType,
+      call: AggregateCall,
+      index: Int,
+      argIndexes: Array[Int],
+      udf: UserDefinedFunction,
+      hasStateBackedDataViews: Boolean,
+      needsRetraction: Boolean)
+    : AggregateInfo = {
+      val (externalArgTypes, externalAccTypes, viewSpecs, externalResultType) = udf match {
+
         case a: DeclarativeAggregateFunction =>
           val bufferTypes: Array[LogicalType] = a.getAggBufferTypes.map(_.getLogicalType)
           val bufferTypeInfos = bufferTypes.map(fromLogicalTypeToDataType)
@@ -314,6 +457,7 @@ object AggregateUtil extends Enumeration {
             Array.empty[DataViewSpec],
             fromLogicalTypeToDataType(a.getResultType.getLogicalType)
           )
+
         case a: ImperativeAggregateFunction[_, _] =>
           val (implicitAccType, implicitResultType) = call.getAggregation match {
             case aggSqlFun: AggSqlFunction =>
@@ -322,7 +466,7 @@ object AggregateUtil extends Enumeration {
           }
           val externalAccType = getAccumulatorTypeOfAggregateFunction(a, implicitAccType)
           val argTypes = call.getArgList
-            .map(idx => inputRowType.getFieldList.get(idx).getType)
+            .map(idx => inputRowRelDataType.getFieldList.get(idx).getType)
             .map(FlinkTypeFactory.toLogicalType)
           val externalArgTypes: Array[DataType] = getAggUserDefinedInputTypes(
             a,
@@ -332,33 +476,29 @@ object AggregateUtil extends Enumeration {
             index,
             a,
             externalAccType,
-            isStateBackedDataViews)
+            hasStateBackedDataViews)
           (
             externalArgTypes,
             Array(newExternalAccType),
             specs,
             getResultTypeOfAggregateFunction(a, implicitResultType)
           )
-        case _ => throw new TableException(s"Unsupported function: $function")
+
+        case _ => throw new TableException(s"Unsupported function: $udf")
       }
 
       AggregateInfo(
         call,
-        function,
+        udf,
         index,
         argIndexes,
         externalArgTypes,
         externalAccTypes,
         viewSpecs,
         externalResultType,
-        needRetraction(index))
-
-    }.toArray
-
-    AggregateInfoList(aggInfos, indexOfCountStar, countStarInserted, distinctInfos)
+        needsRetraction)
   }
 
-
   /**
     * Inserts an COUNT(*) aggregate call if needed. The COUNT(*) aggregate call is used
     * to count the number of added and retracted input records.
@@ -794,8 +934,11 @@ object AggregateUtil extends Enumeration {
 
   private[flink] def isTableAggregate(aggCalls: util.List[AggregateCall]): Boolean = {
     aggCalls
-      .filter(e => e.getAggregation.isInstanceOf[AggSqlFunction])
-      .map(e => e.getAggregation.asInstanceOf[AggSqlFunction].aggregateFunction)
-      .exists(_.isInstanceOf[TableAggregateFunction[_, _]])
+      .flatMap(call => call.getAggregation match {
+        case asf: AggSqlFunction => Some(asf.aggregateFunction)
+        case bsaf: BridgingSqlAggFunction => Some(bsaf.getDefinition)
+        case _ => None
+      })
+      .exists(_.getKind == FunctionKind.TABLE_AGGREGATE)
   }
 }
diff --git a/flink-table/flink-table-planner-blink/src/test/java/org/apache/flink/table/planner/plan/utils/JavaUserDefinedAggFunctions.java b/flink-table/flink-table-planner-blink/src/test/java/org/apache/flink/table/planner/plan/utils/JavaUserDefinedAggFunctions.java
index 9887e21..ce5716c 100644
--- a/flink-table/flink-table-planner-blink/src/test/java/org/apache/flink/table/planner/plan/utils/JavaUserDefinedAggFunctions.java
+++ b/flink-table/flink-table-planner-blink/src/test/java/org/apache/flink/table/planner/plan/utils/JavaUserDefinedAggFunctions.java
@@ -150,19 +150,19 @@ public class JavaUserDefinedAggFunctions {
 
 		// overloaded accumulate method
 		// dummy to test constants
-		public void accumulate(WeightedAvgAccum accumulator, long iValue, int iWeight, int x, String string) {
+		public void accumulate(WeightedAvgAccum accumulator, Long iValue, Integer iWeight, Integer x, String string) {
 			accumulator.sum += (iValue + Integer.parseInt(string)) * iWeight;
 			accumulator.count += iWeight;
 		}
 
 		// overloaded accumulate method
-		public void accumulate(WeightedAvgAccum accumulator, long iValue, int iWeight) {
+		public void accumulate(WeightedAvgAccum accumulator, Long iValue, Long iWeight) {
 			accumulator.sum += iValue * iWeight;
 			accumulator.count += iWeight;
 		}
 
 		//Overloaded accumulate method
-		public void accumulate(WeightedAvgAccum accumulator, int iValue, int iWeight) {
+		public void accumulate(WeightedAvgAccum accumulator, Integer iValue, Integer iWeight) {
 			accumulator.sum += iValue * iWeight;
 			accumulator.count += iWeight;
 		}
diff --git a/flink-table/flink-table-planner-blink/src/test/java/org/apache/flink/table/planner/runtime/utils/JavaUserDefinedAggFunctions.java b/flink-table/flink-table-planner-blink/src/test/java/org/apache/flink/table/planner/runtime/utils/JavaUserDefinedAggFunctions.java
index 3d74f98..52324ee 100644
--- a/flink-table/flink-table-planner-blink/src/test/java/org/apache/flink/table/planner/runtime/utils/JavaUserDefinedAggFunctions.java
+++ b/flink-table/flink-table-planner-blink/src/test/java/org/apache/flink/table/planner/runtime/utils/JavaUserDefinedAggFunctions.java
@@ -20,10 +20,15 @@ package org.apache.flink.table.planner.runtime.utils;
 
 import org.apache.flink.api.common.typeinfo.Types;
 import org.apache.flink.api.java.tuple.Tuple2;
+import org.apache.flink.table.annotation.DataTypeHint;
+import org.apache.flink.table.annotation.FunctionHint;
 import org.apache.flink.table.api.dataview.ListView;
 import org.apache.flink.table.api.dataview.MapView;
 import org.apache.flink.table.functions.AggregateFunction;
 import org.apache.flink.table.functions.FunctionRequirement;
+import org.apache.flink.table.functions.TableAggregateFunction;
+import org.apache.flink.types.Row;
+import org.apache.flink.util.Collector;
 
 import java.util.Collections;
 import java.util.Iterator;
@@ -428,4 +433,81 @@ public class JavaUserDefinedAggFunctions {
 			return acc.count;
 		}
 	}
+
+	/**
+	 * Max function with overloaded arguments and accumulators.
+	 */
+	public static class OverloadedMaxFunction extends AggregateFunction<Object, Row> {
+
+		@Override
+		public Row createAccumulator() {
+			return new Row(1);
+		}
+
+		@FunctionHint(
+			accumulator = @DataTypeHint("ROW<max BIGINT>"),
+			output = @DataTypeHint("BIGINT")
+		)
+		public void accumulate(Row accumulator, Long l) {
+			final Long max = (Long) accumulator.getField(0);
+			if (max == null || l > max) {
+				accumulator.setField(0, l);
+			}
+		}
+
+		@FunctionHint(
+			accumulator = @DataTypeHint("ROW<max STRING>"),
+			output = @DataTypeHint("STRING")
+		)
+		public void accumulate(Row accumulator, String s) {
+			final String max = (String) accumulator.getField(0);
+			if (max == null || s.compareTo(max) > 0) {
+				accumulator.setField(0, s);
+			}
+		}
+
+		@Override
+		public Object getValue(Row accumulator) {
+			return accumulator.getField(0);
+		}
+	}
+
+	/**
+	 * Max function with overloaded arguments and accumulators that returns the result twice using
+	 * {@link TableAggregateFunction}.
+	 */
+	public static class OverloadedDoubleMaxFunction extends TableAggregateFunction<Object, Row> {
+
+		@Override
+		public Row createAccumulator() {
+			return new Row(1);
+		}
+
+		@FunctionHint(
+			accumulator = @DataTypeHint("ROW<max BIGINT>"),
+			output = @DataTypeHint("BIGINT")
+		)
+		public void accumulate(Row accumulator, Long l) {
+			final Long max = (Long) accumulator.getField(0);
+			if (max == null || l > max) {
+				accumulator.setField(0, l);
+			}
+		}
+
+		@FunctionHint(
+			accumulator = @DataTypeHint("ROW<max STRING>"),
+			output = @DataTypeHint("STRING")
+		)
+		public void accumulate(Row accumulator, String s) {
+			final String max = (String) accumulator.getField(0);
+			if (max == null || s.compareTo(max) > 0) {
+				accumulator.setField(0, s);
+			}
+		}
+
+		public void emitValue(Row accumulator, Collector<Object> out) {
+			out.collect(accumulator.getField(0));
+			out.collect(accumulator.getField(0));
+		}
+	}
 }
diff --git a/flink-table/flink-table-planner-blink/src/test/resources/org/apache/flink/table/planner/plan/batch/table/GroupWindowTest.xml b/flink-table/flink-table-planner-blink/src/test/resources/org/apache/flink/table/planner/plan/batch/table/GroupWindowTest.xml
index 934d2a0..2f69e72 100644
--- a/flink-table/flink-table-planner-blink/src/test/resources/org/apache/flink/table/planner/plan/batch/table/GroupWindowTest.xml
+++ b/flink-table/flink-table-planner-blink/src/test/resources/org/apache/flink/table/planner/plan/batch/table/GroupWindowTest.xml
@@ -88,18 +88,20 @@ HashWindowAggregate(groupBy=[string], window=[TumblingGroupWindow('w, ts, 720000
     <Resource name="planBefore">
       <![CDATA[
 LogicalProject(string=[$0], EXPR$0=[$1])
-+- LogicalWindowAggregate(group=[{2}], EXPR$0=[myWeightedAvg($0, $1)], window=[TumblingGroupWindow('w, long, 5)], properties=[])
-   +- LogicalTableScan(table=[[default_catalog, default_database, Table1, source: [TestTableSource(long, int, string)]]])
++- LogicalWindowAggregate(group=[{2}], EXPR$0=[org$apache$flink$table$planner$plan$utils$JavaUserDefinedAggFunctions$WeightedAvgWithMerge$89c523cc7d9b293d037af4b0651ea4d3($0, $3)], window=[TumblingGroupWindow('w, long, 5)], properties=[])
+   +- LogicalProject(long=[$0], int=[$1], string=[$2], int0=[CAST($1):BIGINT])
+      +- LogicalTableScan(table=[[default_catalog, default_database, Table1, source: [TestTableSource(long, int, string)]]])
 ]]>
     </Resource>
     <Resource name="planAfter">
       <![CDATA[
-SortWindowAggregate(groupBy=[string], window=[TumblingGroupWindow('w, long, 5)], select=[string, Final_myWeightedAvg(EXPR$0) AS EXPR$0])
+SortWindowAggregate(groupBy=[string], window=[TumblingGroupWindow('w, long, 5)], select=[string, Final_org$apache$flink$table$planner$plan$utils$JavaUserDefinedAggFunctions$WeightedAvgWithMerge$89c523cc7d9b293d037af4b0651ea4d3(EXPR$0) AS EXPR$0])
 +- Sort(orderBy=[string ASC, assignedWindow$ ASC])
    +- Exchange(distribution=[hash[string]])
-      +- LocalSortWindowAggregate(groupBy=[string], window=[TumblingGroupWindow('w, long, 5)], select=[string, Partial_myWeightedAvg(long, int) AS EXPR$0])
-         +- Sort(orderBy=[string ASC, long ASC])
-            +- LegacyTableSourceScan(table=[[default_catalog, default_database, Table1, source: [TestTableSource(long, int, string)]]], fields=[long, int, string])
+      +- LocalSortWindowAggregate(groupBy=[string], window=[TumblingGroupWindow('w, long, 5)], select=[string, Partial_org$apache$flink$table$planner$plan$utils$JavaUserDefinedAggFunctions$WeightedAvgWithMerge$89c523cc7d9b293d037af4b0651ea4d3(long, int0) AS EXPR$0])
+         +- Calc(select=[long, int, string, CAST(int) AS int0])
+            +- Sort(orderBy=[string ASC, long ASC])
+               +- LegacyTableSourceScan(table=[[default_catalog, default_database, Table1, source: [TestTableSource(long, int, string)]]], fields=[long, int, string])
 ]]>
     </Resource>
   </TestCase>
diff --git a/flink-table/flink-table-planner-blink/src/test/resources/org/apache/flink/table/planner/plan/stream/sql/agg/WindowAggregateTest.xml b/flink-table/flink-table-planner-blink/src/test/resources/org/apache/flink/table/planner/plan/stream/sql/agg/WindowAggregateTest.xml
index 3a3b5a0..a355e43 100644
--- a/flink-table/flink-table-planner-blink/src/test/resources/org/apache/flink/table/planner/plan/stream/sql/agg/WindowAggregateTest.xml
+++ b/flink-table/flink-table-planner-blink/src/test/resources/org/apache/flink/table/planner/plan/stream/sql/agg/WindowAggregateTest.xml
@@ -120,7 +120,7 @@ FROM MyTable
       <![CDATA[
 LogicalProject(EXPR$0=[$1], wAvg=[$2], EXPR$2=[HOP_START($0)], EXPR$3=[HOP_END($0)])
 +- LogicalAggregate(group=[{0}], EXPR$0=[COUNT()], wAvg=[weightedAvg($1, $2)])
-   +- LogicalProject($f0=[HOP($3, 900000:INTERVAL MINUTE, 3600000:INTERVAL HOUR)], c=[$2], a=[$0])
+   +- LogicalProject($f0=[HOP($3, 900000:INTERVAL MINUTE, 3600000:INTERVAL HOUR)], c=[$2], $f2=[CAST($0):BIGINT])
       +- LogicalTableScan(table=[[default_catalog, default_database, MyTable]])
 ]]>
     </Resource>
@@ -129,7 +129,7 @@ LogicalProject(EXPR$0=[$1], wAvg=[$2], EXPR$2=[HOP_START($0)], EXPR$3=[HOP_END($
 Calc(select=[EXPR$0, wAvg, w$start AS EXPR$2, w$end AS EXPR$3])
 +- GroupWindowAggregate(window=[SlidingGroupWindow('w$, proctime, 3600000, 900000)], properties=[w$start, w$end, w$proctime], select=[COUNT(*) AS EXPR$0, weightedAvg(c, a) AS wAvg, start('w$) AS w$start, end('w$) AS w$end, proctime('w$) AS w$proctime])
    +- Exchange(distribution=[single])
-      +- Calc(select=[proctime, c, a])
+      +- Calc(select=[proctime, c, CAST(a) AS a])
          +- DataStreamScan(table=[[default_catalog, default_database, MyTable]], fields=[a, b, c, proctime, rowtime])
 ]]>
     </Resource>
@@ -313,7 +313,7 @@ GROUP BY b, d
       <![CDATA[
 LogicalProject(EXPR$0=[$2])
 +- LogicalAggregate(group=[{0, 1}], EXPR$0=[weightedAvg($2, $3)])
-   +- LogicalProject(b=[$1], d=[$4], c=[$2], a=[$0])
+   +- LogicalProject(b=[$1], d=[$4], c=[$2], $f3=[CAST($0):BIGINT])
       +- LogicalAggregate(group=[{0, 1, 2, 3}], d=[COUNT()])
          +- LogicalProject(a=[$0], b=[$1], c=[$2], $f3=[$TUMBLE($4, 900000:INTERVAL MINUTE)])
             +- LogicalTableScan(table=[[default_catalog, default_database, MyTable]])
@@ -322,12 +322,13 @@ LogicalProject(EXPR$0=[$2])
     <Resource name="planAfter">
       <![CDATA[
 Calc(select=[EXPR$0])
-+- GroupAggregate(groupBy=[b, d], select=[b, d, weightedAvg(c, a) AS EXPR$0])
++- GroupAggregate(groupBy=[b, d], select=[b, d, weightedAvg(c, $f3) AS EXPR$0])
    +- Exchange(distribution=[hash[b, d]])
-      +- GroupWindowAggregate(groupBy=[a, b, c], window=[TumblingGroupWindow('w$, rowtime, 900000)], select=[a, b, c, COUNT(*) AS d])
-         +- Exchange(distribution=[hash[a, b, c]])
-            +- Calc(select=[a, b, c, rowtime])
-               +- DataStreamScan(table=[[default_catalog, default_database, MyTable]], fields=[a, b, c, proctime, rowtime])
+      +- Calc(select=[b, d, c, CAST(a) AS $f3])
+         +- GroupWindowAggregate(groupBy=[a, b, c], window=[TumblingGroupWindow('w$, rowtime, 900000)], select=[a, b, c, COUNT(*) AS d])
+            +- Exchange(distribution=[hash[a, b, c]])
+               +- Calc(select=[a, b, c, rowtime])
+                  +- DataStreamScan(table=[[default_catalog, default_database, MyTable]], fields=[a, b, c, proctime, rowtime])
 ]]>
     </Resource>
   </TestCase>
@@ -381,7 +382,7 @@ FROM MyTable
       <![CDATA[
 LogicalProject(EXPR$0=[$1], wAvg=[$2], EXPR$2=[SESSION_START($0)], EXPR$3=[SESSION_END($0)])
 +- LogicalAggregate(group=[{0}], EXPR$0=[COUNT()], wAvg=[weightedAvg($1, $2)])
-   +- LogicalProject($f0=[SESSION($3, 900000:INTERVAL MINUTE)], c=[$2], a=[$0])
+   +- LogicalProject($f0=[SESSION($3, 900000:INTERVAL MINUTE)], c=[$2], $f2=[CAST($0):BIGINT])
       +- LogicalTableScan(table=[[default_catalog, default_database, MyTable]])
 ]]>
     </Resource>
@@ -390,7 +391,7 @@ LogicalProject(EXPR$0=[$1], wAvg=[$2], EXPR$2=[SESSION_START($0)], EXPR$3=[SESSI
 Calc(select=[EXPR$0, wAvg, w$start AS EXPR$2, w$end AS EXPR$3])
 +- GroupWindowAggregate(window=[SessionGroupWindow('w$, proctime, 900000)], properties=[w$start, w$end, w$proctime], select=[COUNT(*) AS EXPR$0, weightedAvg(c, a) AS wAvg, start('w$) AS w$start, end('w$) AS w$end, proctime('w$) AS w$proctime])
    +- Exchange(distribution=[single])
-      +- Calc(select=[proctime, c, a])
+      +- Calc(select=[proctime, c, CAST(a) AS a])
          +- DataStreamScan(table=[[default_catalog, default_database, MyTable]], fields=[a, b, c, proctime, rowtime])
 ]]>
     </Resource>
@@ -434,7 +435,7 @@ GROUP BY b, d, ping_start
       <![CDATA[
 LogicalProject(EXPR$0=[$3])
 +- LogicalAggregate(group=[{0, 1, 2}], EXPR$0=[weightedAvg($3, $4)])
-   +- LogicalProject(b=[$1], d=[$4], ping_start=[TUMBLE_START($3)], c=[$2], a=[$0])
+   +- LogicalProject(b=[$1], d=[$4], ping_start=[TUMBLE_START($3)], c=[$2], $f4=[CAST($0):BIGINT])
       +- LogicalAggregate(group=[{0, 1, 2, 3}], d=[COUNT()])
          +- LogicalProject(a=[$0], b=[$1], c=[$2], $f3=[$TUMBLE($4, 900000:INTERVAL MINUTE)])
             +- LogicalTableScan(table=[[default_catalog, default_database, MyTable]])
@@ -443,9 +444,9 @@ LogicalProject(EXPR$0=[$3])
     <Resource name="planAfter">
       <![CDATA[
 Calc(select=[EXPR$0])
-+- GroupAggregate(groupBy=[b, d, ping_start], select=[b, d, ping_start, weightedAvg(c, a) AS EXPR$0])
++- GroupAggregate(groupBy=[b, d, ping_start], select=[b, d, ping_start, weightedAvg(c, $f4) AS EXPR$0])
    +- Exchange(distribution=[hash[b, d, ping_start]])
-      +- Calc(select=[b, d, w$start AS ping_start, c, a])
+      +- Calc(select=[b, d, w$start AS ping_start, c, CAST(a) AS $f4])
          +- GroupWindowAggregate(groupBy=[a, b, c], window=[TumblingGroupWindow('w$, rowtime, 900000)], properties=[w$start, w$end, w$rowtime, w$proctime], select=[a, b, c, COUNT(*) AS d, start('w$) AS w$start, end('w$) AS w$end, rowtime('w$) AS w$rowtime, proctime('w$) AS w$proctime])
             +- Exchange(distribution=[hash[a, b, c]])
                +- Calc(select=[a, b, c, rowtime])
@@ -468,7 +469,7 @@ FROM MyTable
       <![CDATA[
 LogicalProject(EXPR$0=[$1], wAvg=[$2], EXPR$2=[TUMBLE_START($0)], EXPR$3=[TUMBLE_END($0)])
 +- LogicalAggregate(group=[{0}], EXPR$0=[COUNT()], wAvg=[weightedAvg($1, $2)])
-   +- LogicalProject($f0=[$TUMBLE($4, 900000:INTERVAL MINUTE)], c=[$2], a=[$0])
+   +- LogicalProject($f0=[$TUMBLE($4, 900000:INTERVAL MINUTE)], c=[$2], $f2=[CAST($0):BIGINT])
       +- LogicalTableScan(table=[[default_catalog, default_database, MyTable]])
 ]]>
     </Resource>
@@ -477,40 +478,11 @@ LogicalProject(EXPR$0=[$1], wAvg=[$2], EXPR$2=[TUMBLE_START($0)], EXPR$3=[TUMBLE
 Calc(select=[EXPR$0, wAvg, w$start AS EXPR$2, w$end AS EXPR$3])
 +- GroupWindowAggregate(window=[TumblingGroupWindow('w$, rowtime, 900000)], properties=[w$start, w$end, w$rowtime, w$proctime], select=[COUNT(*) AS EXPR$0, weightedAvg(c, a) AS wAvg, start('w$) AS w$start, end('w$) AS w$end, rowtime('w$) AS w$rowtime, proctime('w$) AS w$proctime])
    +- Exchange(distribution=[single])
-      +- Calc(select=[rowtime, c, a])
+      +- Calc(select=[rowtime, c, CAST(a) AS a])
          +- DataStreamScan(table=[[default_catalog, default_database, MyTable]], fields=[a, b, c, proctime, rowtime])
 ]]>
     </Resource>
   </TestCase>
-  <TestCase name="testWindowGroupByOnConstant">
-    <Resource name="sql">
-            <![CDATA[
-SELECT COUNT(*),
-    weightedAvg(c, a) AS wAvg,
-    TUMBLE_START(rowtime, INTERVAL '15' MINUTE),
-    TUMBLE_END(rowtime, INTERVAL '15' MINUTE)
-FROM MyTable
-    GROUP BY 'a', TUMBLE(rowtime, INTERVAL '15' MINUTE)
-      ]]>
-     </Resource>
-     <Resource name="planBefore">
-            <![CDATA[
-LogicalProject(EXPR$0=[$2], wAvg=[$3], EXPR$2=[TUMBLE_START($1)], EXPR$3=[TUMBLE_END($1)])
-+- LogicalAggregate(group=[{0, 1}], EXPR$0=[COUNT()], wAvg=[weightedAvg($2, $3)])
-   +- LogicalProject($f0=[_UTF-16LE'a'], $f1=[$TUMBLE($4, 900000:INTERVAL MINUTE)], c=[$2], a=[$0])
-      +- LogicalTableScan(table=[[default_catalog, default_database, MyTable]])
-]]>
-     </Resource>
-     <Resource name="planAfter">
-            <![CDATA[
-Calc(select=[EXPR$0, wAvg, w$start AS EXPR$2, w$end AS EXPR$3])
-+- GroupWindowAggregate(window=[TumblingGroupWindow('w$, rowtime, 900000)], properties=[w$start, w$end, w$rowtime, w$proctime], select=[COUNT(*) AS EXPR$0, weightedAvg(c, a) AS wAvg, start('w$) AS w$start, end('w$) AS w$end, rowtime('w$) AS w$rowtime, proctime('w$) AS w$proctime])
-   +- Exchange(distribution=[single])
-      +- Calc(select=[rowtime, c, a])
-         +- DataStreamScan(table=[[default_catalog, default_database, MyTable]], fields=[a, b, c, proctime, rowtime])
-]]>
-     </Resource>
-  </TestCase>
   <TestCase name="testTumbleFunInGroupBy">
     <Resource name="sql">
       <![CDATA[
@@ -527,7 +499,7 @@ SELECT weightedAvg(c, a) FROM
       <![CDATA[
 LogicalProject(EXPR$0=[$2])
 +- LogicalAggregate(group=[{0, 1}], EXPR$0=[weightedAvg($2, $3)])
-   +- LogicalProject(b=[$1], ping_start=[TUMBLE_START($3)], c=[$2], a=[$0])
+   +- LogicalProject(b=[$1], ping_start=[TUMBLE_START($3)], c=[$2], $f3=[CAST($0):BIGINT])
       +- LogicalAggregate(group=[{0, 1, 2, 3}])
          +- LogicalProject(a=[$0], b=[$1], c=[$2], $f3=[$TUMBLE($4, 900000:INTERVAL MINUTE)])
             +- LogicalTableScan(table=[[default_catalog, default_database, MyTable]])
@@ -536,9 +508,9 @@ LogicalProject(EXPR$0=[$2])
     <Resource name="planAfter">
       <![CDATA[
 Calc(select=[EXPR$0])
-+- GroupAggregate(groupBy=[b, ping_start], select=[b, ping_start, weightedAvg(c, a) AS EXPR$0])
++- GroupAggregate(groupBy=[b, ping_start], select=[b, ping_start, weightedAvg(c, $f3) AS EXPR$0])
    +- Exchange(distribution=[hash[b, ping_start]])
-      +- Calc(select=[b, w$start AS ping_start, c, a])
+      +- Calc(select=[b, w$start AS ping_start, c, CAST(a) AS $f3])
          +- GroupWindowAggregate(groupBy=[a, b, c], window=[TumblingGroupWindow('w$, rowtime, 900000)], properties=[w$start, w$end, w$rowtime, w$proctime], select=[a, b, c, start('w$) AS w$start, end('w$) AS w$end, rowtime('w$) AS w$rowtime, proctime('w$) AS w$proctime])
             +- Exchange(distribution=[hash[a, b, c]])
                +- Calc(select=[a, b, c, rowtime])
@@ -561,7 +533,7 @@ GROUP BY b
       <![CDATA[
 LogicalProject(EXPR$0=[$1])
 +- LogicalAggregate(group=[{0}], EXPR$0=[weightedAvg($1, $2)])
-   +- LogicalProject(b=[$1], c=[$2], a=[$0])
+   +- LogicalProject(b=[$1], c=[$2], $f2=[CAST($0):BIGINT])
       +- LogicalAggregate(group=[{0, 1, 2, 3}])
          +- LogicalProject(a=[$0], b=[$1], c=[$2], $f3=[$TUMBLE($4, 900000:INTERVAL MINUTE)])
             +- LogicalTableScan(table=[[default_catalog, default_database, MyTable]])
@@ -570,12 +542,13 @@ LogicalProject(EXPR$0=[$1])
     <Resource name="planAfter">
       <![CDATA[
 Calc(select=[EXPR$0])
-+- GroupAggregate(groupBy=[b], select=[b, weightedAvg(c, a) AS EXPR$0])
++- GroupAggregate(groupBy=[b], select=[b, weightedAvg(c, $f2) AS EXPR$0])
    +- Exchange(distribution=[hash[b]])
-      +- GroupWindowAggregate(groupBy=[a, b, c], window=[TumblingGroupWindow('w$, rowtime, 900000)], select=[a, b, c])
-         +- Exchange(distribution=[hash[a, b, c]])
-            +- Calc(select=[a, b, c, rowtime])
-               +- DataStreamScan(table=[[default_catalog, default_database, MyTable]], fields=[a, b, c, proctime, rowtime])
+      +- Calc(select=[b, c, CAST(a) AS $f2])
+         +- GroupWindowAggregate(groupBy=[a, b, c], window=[TumblingGroupWindow('w$, rowtime, 900000)], select=[a, b, c])
+            +- Exchange(distribution=[hash[a, b, c]])
+               +- Calc(select=[a, b, c, rowtime])
+                  +- DataStreamScan(table=[[default_catalog, default_database, MyTable]], fields=[a, b, c, proctime, rowtime])
 ]]>
     </Resource>
   </TestCase>
@@ -646,6 +619,61 @@ GroupWindowAggregate(window=[TumblingGroupWindow('w$, $f2, 1000)], select=[SUM(a
 ]]>
     </Resource>
   </TestCase>
+  <TestCase name="testWindowAggregateWithAllowLatenessOnly">
+    <Resource name="sql">
+      <![CDATA[
+SELECT TUMBLE_START(`rowtime`, INTERVAL '1' SECOND), COUNT(*) cnt
+FROM MyTable
+GROUP BY TUMBLE(`rowtime`, INTERVAL '1' SECOND)
+]]>
+    </Resource>
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(EXPR$0=[TUMBLE_START($0)], cnt=[$1])
++- LogicalAggregate(group=[{0}], cnt=[COUNT()])
+   +- LogicalProject($f0=[$TUMBLE($4, 1000:INTERVAL SECOND)])
+      +- LogicalTableScan(table=[[default_catalog, default_database, MyTable]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[w$start AS EXPR$0, cnt], changelogMode=[I])
++- GroupWindowAggregate(window=[TumblingGroupWindow('w$, rowtime, 1000)], properties=[w$start, w$end, w$rowtime, w$proctime], select=[COUNT(*) AS cnt, start('w$) AS w$start, end('w$) AS w$end, rowtime('w$) AS w$rowtime, proctime('w$) AS w$proctime], changelogMode=[I])
+   +- Exchange(distribution=[single], changelogMode=[I])
+      +- Calc(select=[rowtime], changelogMode=[I])
+         +- DataStreamScan(table=[[default_catalog, default_database, MyTable]], fields=[a, b, c, proctime, rowtime], changelogMode=[I])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testWindowGroupByOnConstant">
+    <Resource name="sql">
+      <![CDATA[
+SELECT COUNT(*),
+    weightedAvg(c, a) AS wAvg,
+    TUMBLE_START(rowtime, INTERVAL '15' MINUTE),
+    TUMBLE_END(rowtime, INTERVAL '15' MINUTE)
+FROM MyTable
+    GROUP BY 'a', TUMBLE(rowtime, INTERVAL '15' MINUTE)
+      ]]>
+    </Resource>
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(EXPR$0=[$2], wAvg=[$3], EXPR$2=[TUMBLE_START($1)], EXPR$3=[TUMBLE_END($1)])
++- LogicalAggregate(group=[{0, 1}], EXPR$0=[COUNT()], wAvg=[weightedAvg($2, $3)])
+   +- LogicalProject($f0=[_UTF-16LE'a'], $f1=[$TUMBLE($4, 900000:INTERVAL MINUTE)], c=[$2], $f3=[CAST($0):BIGINT])
+      +- LogicalTableScan(table=[[default_catalog, default_database, MyTable]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[EXPR$0, wAvg, w$start AS EXPR$2, w$end AS EXPR$3])
++- GroupWindowAggregate(window=[TumblingGroupWindow('w$, rowtime, 900000)], properties=[w$start, w$end, w$rowtime, w$proctime], select=[COUNT(*) AS EXPR$0, weightedAvg(c, a) AS wAvg, start('w$) AS w$start, end('w$) AS w$end, rowtime('w$) AS w$rowtime, proctime('w$) AS w$proctime])
+   +- Exchange(distribution=[single])
+      +- Calc(select=[rowtime, c, CAST(a) AS a])
+         +- DataStreamScan(table=[[default_catalog, default_database, MyTable]], fields=[a, b, c, proctime, rowtime])
+]]>
+    </Resource>
+  </TestCase>
   <TestCase name="testWindowAggregateWithDifferentWindows">
     <Resource name="sql">
       <![CDATA[
@@ -693,30 +721,4 @@ Union(all=[true], union=[EXPR$0])
 ]]>
     </Resource>
   </TestCase>
-  <TestCase name="testWindowAggregateWithAllowLatenessOnly">
-    <Resource name="sql">
-      <![CDATA[
-SELECT TUMBLE_START(`rowtime`, INTERVAL '1' SECOND), COUNT(*) cnt
-FROM MyTable
-GROUP BY TUMBLE(`rowtime`, INTERVAL '1' SECOND)
-]]>
-    </Resource>
-    <Resource name="planBefore">
-      <![CDATA[
-LogicalProject(EXPR$0=[TUMBLE_START($0)], cnt=[$1])
-+- LogicalAggregate(group=[{0}], cnt=[COUNT()])
-   +- LogicalProject($f0=[$TUMBLE($4, 1000:INTERVAL SECOND)])
-      +- LogicalTableScan(table=[[default_catalog, default_database, MyTable]])
-]]>
-    </Resource>
-    <Resource name="planAfter">
-      <![CDATA[
-Calc(select=[w$start AS EXPR$0, cnt], changelogMode=[I])
-+- GroupWindowAggregate(window=[TumblingGroupWindow('w$, rowtime, 1000)], properties=[w$start, w$end, w$rowtime, w$proctime], select=[COUNT(*) AS cnt, start('w$) AS w$start, end('w$) AS w$end, rowtime('w$) AS w$rowtime, proctime('w$) AS w$proctime], changelogMode=[I])
-   +- Exchange(distribution=[single], changelogMode=[I])
-      +- Calc(select=[rowtime], changelogMode=[I])
-         +- DataStreamScan(table=[[default_catalog, default_database, MyTable]], fields=[a, b, c, proctime, rowtime], changelogMode=[I])
-]]>
-    </Resource>
-  </TestCase>
 </Root>
diff --git a/flink-table/flink-table-planner-blink/src/test/resources/org/apache/flink/table/planner/plan/stream/table/AggregateTest.xml b/flink-table/flink-table-planner-blink/src/test/resources/org/apache/flink/table/planner/plan/stream/table/AggregateTest.xml
index c2f04bc..8935bc4 100644
--- a/flink-table/flink-table-planner-blink/src/test/resources/org/apache/flink/table/planner/plan/stream/table/AggregateTest.xml
+++ b/flink-table/flink-table-planner-blink/src/test/resources/org/apache/flink/table/planner/plan/stream/table/AggregateTest.xml
@@ -213,16 +213,18 @@ Calc(select=[CAST(2) AS b, EXPR$0])
     <Resource name="planBefore">
       <![CDATA[
 LogicalProject(EXPR$0=[$1], EXPR$1=[$2])
-+- LogicalAggregate(group=[{2}], EXPR$0=[WeightedAvg(DISTINCT $0, $1)], EXPR$1=[WeightedAvg($0, $1)])
-   +- LogicalTableScan(table=[[default_catalog, default_database, Table1, source: [TestTableSource(a, b, c)]]])
++- LogicalAggregate(group=[{1}], EXPR$0=[org$apache$flink$table$planner$plan$utils$JavaUserDefinedAggFunctions$WeightedAvg$de09eb7d8970fd21dea873ed9e3277e6(DISTINCT $0, $2)], EXPR$1=[org$apache$flink$table$planner$plan$utils$JavaUserDefinedAggFunctions$WeightedAvg$de09eb7d8970fd21dea873ed9e3277e6($0, $2)])
+   +- LogicalProject(a=[$0], c=[$2], b0=[CAST($1):BIGINT])
+      +- LogicalTableScan(table=[[default_catalog, default_database, Table1, source: [TestTableSource(a, b, c)]]])
 ]]>
     </Resource>
     <Resource name="planAfter">
       <![CDATA[
 Calc(select=[EXPR$0, EXPR$1])
-+- GroupAggregate(groupBy=[c], select=[c, WeightedAvg(DISTINCT a, b) AS EXPR$0, WeightedAvg(a, b) AS EXPR$1])
++- GroupAggregate(groupBy=[c], select=[c, org$apache$flink$table$planner$plan$utils$JavaUserDefinedAggFunctions$WeightedAvg$de09eb7d8970fd21dea873ed9e3277e6(DISTINCT a, b0) AS EXPR$0, org$apache$flink$table$planner$plan$utils$JavaUserDefinedAggFunctions$WeightedAvg$de09eb7d8970fd21dea873ed9e3277e6(a, b0) AS EXPR$1])
    +- Exchange(distribution=[hash[c]])
-      +- LegacyTableSourceScan(table=[[default_catalog, default_database, Table1, source: [TestTableSource(a, b, c)]]], fields=[a, b, c])
+      +- Calc(select=[a, c, CAST(b) AS b0])
+         +- LegacyTableSourceScan(table=[[default_catalog, default_database, Table1, source: [TestTableSource(a, b, c)]]], fields=[a, b, c])
 ]]>
     </Resource>
   </TestCase>
diff --git a/flink-table/flink-table-planner-blink/src/test/resources/org/apache/flink/table/planner/plan/stream/table/ColumnFunctionsTest.xml b/flink-table/flink-table-planner-blink/src/test/resources/org/apache/flink/table/planner/plan/stream/table/ColumnFunctionsTest.xml
index 54b1099..cfaf806 100644
--- a/flink-table/flink-table-planner-blink/src/test/resources/org/apache/flink/table/planner/plan/stream/table/ColumnFunctionsTest.xml
+++ b/flink-table/flink-table-planner-blink/src/test/resources/org/apache/flink/table/planner/plan/stream/table/ColumnFunctionsTest.xml
@@ -166,16 +166,17 @@ Correlate(invocation=[org$apache$flink$table$planner$utils$TableFunc0$9f62966fe1
   <TestCase name="testOver">
     <Resource name="planBefore">
       <![CDATA[
-LogicalProject(c=[$2], mycount=[AS(countFun($1) OVER (PARTITION BY $2 ORDER BY $3 NULLS FIRST ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW), _UTF-16LE'mycount')], wAvg=[AS(weightAvgFun($0, $1) OVER (PARTITION BY $2 ORDER BY $3 NULLS FIRST ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW), _UTF-16LE'wAvg')], countDist=[AS(countDist($0) OVER (PARTITION BY $2 ORDER BY $3 NULLS FIRST ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW), _UTF-16LE'countDist')])
+LogicalProject(c=[$2], mycount=[AS(countFun($1) OVER (PARTITION BY $2 ORDER BY $3 NULLS FIRST ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW), _UTF-16LE'mycount')], wAvg=[AS(weightAvgFun($0, CAST($1):BIGINT) OVER (PARTITION BY $2 ORDER BY $3 NULLS FIRST ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW), _UTF-16LE'wAvg')], countDist=[AS(countDist($0) OVER (PARTITION BY $2 ORDER BY $3 NULLS FIRST ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW), _UTF-16LE'countDist')])
 +- LogicalTableScan(table=[[default_catalog, default_database, T1]])
 ]]>
     </Resource>
     <Resource name="planAfter">
       <![CDATA[
 Calc(select=[c, w0$o0 AS mycount, w0$o1 AS wAvg, w0$o2 AS countDist])
-+- OverAggregate(partitionBy=[c], orderBy=[proctime ASC], window=[ ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW], select=[a, b, c, proctime, CountAggFunction(b) AS w0$o0, WeightedAvg(a, b) AS w0$o1, CountDistinct(a) AS w0$o2])
++- OverAggregate(partitionBy=[c], orderBy=[proctime ASC], window=[ ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW], select=[a, b, c, proctime, $4, CountAggFunction(b) AS w0$o0, weightAvgFun(a, $4) AS w0$o1, CountDistinct(a) AS w0$o2])
    +- Exchange(distribution=[hash[c]])
-      +- DataStreamScan(table=[[default_catalog, default_database, T1]], fields=[a, b, c, proctime])
+      +- Calc(select=[a, b, c, proctime, CAST(b) AS $4])
+         +- DataStreamScan(table=[[default_catalog, default_database, T1]], fields=[a, b, c, proctime])
 ]]>
     </Resource>
   </TestCase>
diff --git a/flink-table/flink-table-planner-blink/src/test/resources/org/apache/flink/table/planner/plan/stream/table/GroupWindowTest.xml b/flink-table/flink-table-planner-blink/src/test/resources/org/apache/flink/table/planner/plan/stream/table/GroupWindowTest.xml
index 0baa8fc..933946b 100644
--- a/flink-table/flink-table-planner-blink/src/test/resources/org/apache/flink/table/planner/plan/stream/table/GroupWindowTest.xml
+++ b/flink-table/flink-table-planner-blink/src/test/resources/org/apache/flink/table/planner/plan/stream/table/GroupWindowTest.xml
@@ -194,44 +194,21 @@ GroupWindowAggregate(groupBy=[string], window=[SessionGroupWindow('w, rowtime, 7
 ]]>
     </Resource>
   </TestCase>
-  <TestCase name="testWindowAggregateWithDifferentWindows">
-    <Resource name="planBefore">
-      <![CDATA[
-LogicalUnion(all=[true])
-:- LogicalProject(_c0=[AS(1, _UTF-16LE'_c0')])
-:  +- LogicalWindowTableAggregate(group=[{}], tableAggregate=[[EmptyTableAggFunc($1, $2)]], window=[SlidingGroupWindow('w1, ts, 3600000, 3600000)], properties=[])
-:     +- LogicalTableScan(table=[[default_catalog, default_database, Table1, source: [TestTableSource(ts, a, b)]]])
-+- LogicalProject(_c0=[AS(1, _UTF-16LE'_c0')])
-   +- LogicalWindowTableAggregate(group=[{}], tableAggregate=[[EmptyTableAggFunc($1, $2)]], window=[SlidingGroupWindow('w1, ts, 7200000, 3600000)], properties=[])
-      +- LogicalTableScan(table=[[default_catalog, default_database, Table1, source: [TestTableSource(ts, a, b)]]])
-]]>
-    </Resource>
-    <Resource name="planAfter">
-      <![CDATA[
-Union(all=[true], union=[_c0])
-:- Calc(select=[1 AS _c0])
-:  +- GroupWindowTableAggregate(window=[SlidingGroupWindow('w1, ts, 3600000, 3600000)], select=[EmptyTableAggFunc(a, b) AS (f0, f1)])
-:     +- Exchange(distribution=[single], reuse_id=[1])
-:        +- LegacyTableSourceScan(table=[[default_catalog, default_database, Table1, source: [TestTableSource(ts, a, b)]]], fields=[ts, a, b])
-+- Calc(select=[1 AS _c0])
-   +- GroupWindowTableAggregate(window=[SlidingGroupWindow('w1, ts, 7200000, 3600000)], select=[EmptyTableAggFunc(a, b) AS (f0, f1)])
-      +- Reused(reference_id=[1])
-]]>
-    </Resource>
-  </TestCase>
   <TestCase name="testEventTimeSessionGroupWindowWithUdAgg">
     <Resource name="planBefore">
       <![CDATA[
 LogicalProject(string=[$0], EXPR$0=[$1])
-+- LogicalWindowAggregate(group=[{2}], EXPR$0=[myWeightedAvg($0, $1)], window=[SessionGroupWindow('w, rowtime, 7)], properties=[])
-   +- LogicalTableScan(table=[[default_catalog, default_database, T1]])
++- LogicalWindowAggregate(group=[{2}], EXPR$0=[org$apache$flink$table$planner$plan$utils$JavaUserDefinedAggFunctions$WeightedAvgWithMerge$89c523cc7d9b293d037af4b0651ea4d3($0, $4)], window=[SessionGroupWindow('w, rowtime, 7)], properties=[])
+   +- LogicalProject(long=[$0], int=[$1], string=[$2], rowtime=[$3], int0=[CAST($1):BIGINT])
+      +- LogicalTableScan(table=[[default_catalog, default_database, T1]])
 ]]>
     </Resource>
     <Resource name="planAfter">
       <![CDATA[
-GroupWindowAggregate(groupBy=[string], window=[SessionGroupWindow('w, rowtime, 7)], select=[string, myWeightedAvg(long, int) AS EXPR$0])
+GroupWindowAggregate(groupBy=[string], window=[SessionGroupWindow('w, rowtime, 7)], select=[string, org$apache$flink$table$planner$plan$utils$JavaUserDefinedAggFunctions$WeightedAvgWithMerge$89c523cc7d9b293d037af4b0651ea4d3(long, int0) AS EXPR$0])
 +- Exchange(distribution=[hash[string]])
-   +- DataStreamScan(table=[[default_catalog, default_database, T1]], fields=[long, int, string, rowtime])
+   +- Calc(select=[long, int, string, rowtime, CAST(int) AS int0])
+      +- DataStreamScan(table=[[default_catalog, default_database, T1]], fields=[long, int, string, rowtime])
 ]]>
     </Resource>
   </TestCase>
@@ -287,15 +264,17 @@ GroupWindowAggregate(groupBy=[string], window=[SlidingGroupWindow('w, rowtime, 8
     <Resource name="planBefore">
       <![CDATA[
 LogicalProject(string=[$0], EXPR$0=[$1])
-+- LogicalWindowAggregate(group=[{2}], EXPR$0=[myWeightedAvg($0, $1)], window=[SlidingGroupWindow('w, rowtime, 8, 10)], properties=[])
-   +- LogicalTableScan(table=[[default_catalog, default_database, T1]])
++- LogicalWindowAggregate(group=[{2}], EXPR$0=[org$apache$flink$table$planner$plan$utils$JavaUserDefinedAggFunctions$WeightedAvgWithMerge$89c523cc7d9b293d037af4b0651ea4d3($0, $4)], window=[SlidingGroupWindow('w, rowtime, 8, 10)], properties=[])
+   +- LogicalProject(long=[$0], int=[$1], string=[$2], rowtime=[$3], int0=[CAST($1):BIGINT])
+      +- LogicalTableScan(table=[[default_catalog, default_database, T1]])
 ]]>
     </Resource>
     <Resource name="planAfter">
       <![CDATA[
-GroupWindowAggregate(groupBy=[string], window=[SlidingGroupWindow('w, rowtime, 8, 10)], select=[string, myWeightedAvg(long, int) AS EXPR$0])
+GroupWindowAggregate(groupBy=[string], window=[SlidingGroupWindow('w, rowtime, 8, 10)], select=[string, org$apache$flink$table$planner$plan$utils$JavaUserDefinedAggFunctions$WeightedAvgWithMerge$89c523cc7d9b293d037af4b0651ea4d3(long, int0) AS EXPR$0])
 +- Exchange(distribution=[hash[string]])
-   +- DataStreamScan(table=[[default_catalog, default_database, T1]], fields=[long, int, string, rowtime])
+   +- Calc(select=[long, int, string, rowtime, CAST(int) AS int0])
+      +- DataStreamScan(table=[[default_catalog, default_database, T1]], fields=[long, int, string, rowtime])
 ]]>
     </Resource>
   </TestCase>
@@ -319,15 +298,17 @@ GroupWindowAggregate(groupBy=[string], window=[TumblingGroupWindow('w, rowtime,
     <Resource name="planBefore">
       <![CDATA[
 LogicalProject(string=[$0], EXPR$0=[$1])
-+- LogicalWindowAggregate(group=[{2}], EXPR$0=[myWeightedAvg($0, $1)], window=[TumblingGroupWindow('w, rowtime, 5)], properties=[])
-   +- LogicalTableScan(table=[[default_catalog, default_database, T1]])
++- LogicalWindowAggregate(group=[{2}], EXPR$0=[org$apache$flink$table$planner$plan$utils$JavaUserDefinedAggFunctions$WeightedAvgWithMerge$89c523cc7d9b293d037af4b0651ea4d3($0, $4)], window=[TumblingGroupWindow('w, rowtime, 5)], properties=[])
+   +- LogicalProject(long=[$0], int=[$1], string=[$2], rowtime=[$3], int0=[CAST($1):BIGINT])
+      +- LogicalTableScan(table=[[default_catalog, default_database, T1]])
 ]]>
     </Resource>
     <Resource name="planAfter">
       <![CDATA[
-GroupWindowAggregate(groupBy=[string], window=[TumblingGroupWindow('w, rowtime, 5)], select=[string, myWeightedAvg(long, int) AS EXPR$0])
+GroupWindowAggregate(groupBy=[string], window=[TumblingGroupWindow('w, rowtime, 5)], select=[string, org$apache$flink$table$planner$plan$utils$JavaUserDefinedAggFunctions$WeightedAvgWithMerge$89c523cc7d9b293d037af4b0651ea4d3(long, int0) AS EXPR$0])
 +- Exchange(distribution=[hash[string]])
-   +- DataStreamScan(table=[[default_catalog, default_database, T1]], fields=[long, int, string, rowtime])
+   +- Calc(select=[long, int, string, rowtime, CAST(int) AS int0])
+      +- DataStreamScan(table=[[default_catalog, default_database, T1]], fields=[long, int, string, rowtime])
 ]]>
     </Resource>
   </TestCase>
@@ -436,16 +417,18 @@ Calc(select=[EXPR$0 AS we1, string, EXPR$1 AS cnt, EXPR$2 AS ws, EXPR$0 AS we2])
     <Resource name="planBefore">
       <![CDATA[
 LogicalProject(EXPR$0=[$3])
-+- LogicalWindowAggregate(group=[{2, 3, 4}], EXPR$0=[WeightedAvg($0, $1)], window=[SlidingGroupWindow('w, proctime, 2, 1)], properties=[])
-   +- LogicalTableScan(table=[[default_catalog, default_database, T1]])
++- LogicalWindowAggregate(group=[{2, 3, 4}], EXPR$0=[org$apache$flink$table$planner$plan$utils$JavaUserDefinedAggFunctions$WeightedAvg$de09eb7d8970fd21dea873ed9e3277e6($0, $6)], window=[SlidingGroupWindow('w, proctime, 2, 1)], properties=[])
+   +- LogicalProject(long=[$0], int=[$1], string=[$2], int2=[$3], int3=[$4], proctime=[$5], int0=[CAST($1):BIGINT])
+      +- LogicalTableScan(table=[[default_catalog, default_database, T1]])
 ]]>
     </Resource>
     <Resource name="planAfter">
       <![CDATA[
 Calc(select=[EXPR$0])
-+- GroupWindowAggregate(groupBy=[string, int2, int3], window=[SlidingGroupWindow('w, proctime, 2, 1)], select=[string, int2, int3, WeightedAvg(long, int) AS EXPR$0])
++- GroupWindowAggregate(groupBy=[string, int2, int3], window=[SlidingGroupWindow('w, proctime, 2, 1)], select=[string, int2, int3, org$apache$flink$table$planner$plan$utils$JavaUserDefinedAggFunctions$WeightedAvg$de09eb7d8970fd21dea873ed9e3277e6(long, int0) AS EXPR$0])
    +- Exchange(distribution=[hash[string, int2, int3]])
-      +- DataStreamScan(table=[[default_catalog, default_database, T1]], fields=[long, int, string, int2, int3, proctime])
+      +- Calc(select=[long, int, string, int2, int3, proctime, CAST(int) AS int0])
+         +- DataStreamScan(table=[[default_catalog, default_database, T1]], fields=[long, int, string, int2, int3, proctime])
 ]]>
     </Resource>
   </TestCase>
@@ -482,4 +465,29 @@ Calc(select=[string, +(EXPR$0, 1) AS s1, +(EXPR$0, 3) AS s2, EXPR$1 AS x, EXPR$1
 ]]>
     </Resource>
   </TestCase>
+  <TestCase name="testWindowAggregateWithDifferentWindows">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalUnion(all=[true])
+:- LogicalProject(_c0=[AS(1, _UTF-16LE'_c0')])
+:  +- LogicalWindowTableAggregate(group=[{}], tableAggregate=[[EmptyTableAggFunc($1, $2)]], window=[SlidingGroupWindow('w1, ts, 3600000, 3600000)], properties=[])
+:     +- LogicalTableScan(table=[[default_catalog, default_database, Table1, source: [TestTableSource(ts, a, b)]]])
++- LogicalProject(_c0=[AS(1, _UTF-16LE'_c0')])
+   +- LogicalWindowTableAggregate(group=[{}], tableAggregate=[[EmptyTableAggFunc($1, $2)]], window=[SlidingGroupWindow('w1, ts, 7200000, 3600000)], properties=[])
+      +- LogicalTableScan(table=[[default_catalog, default_database, Table1, source: [TestTableSource(ts, a, b)]]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Union(all=[true], union=[_c0])
+:- Calc(select=[1 AS _c0])
+:  +- GroupWindowTableAggregate(window=[SlidingGroupWindow('w1, ts, 3600000, 3600000)], select=[EmptyTableAggFunc(a, b) AS (f0, f1)])
+:     +- Exchange(distribution=[single], reuse_id=[1])
+:        +- LegacyTableSourceScan(table=[[default_catalog, default_database, Table1, source: [TestTableSource(ts, a, b)]]], fields=[ts, a, b])
++- Calc(select=[1 AS _c0])
+   +- GroupWindowTableAggregate(window=[SlidingGroupWindow('w1, ts, 7200000, 3600000)], select=[EmptyTableAggFunc(a, b) AS (f0, f1)])
+      +- Reused(reference_id=[1])
+]]>
+    </Resource>
+  </TestCase>
 </Root>
diff --git a/flink-table/flink-table-planner-blink/src/test/resources/org/apache/flink/table/planner/plan/stream/table/OverWindowTest.xml b/flink-table/flink-table-planner-blink/src/test/resources/org/apache/flink/table/planner/plan/stream/table/OverWindowTest.xml
index 844e238..055f7fa 100644
--- a/flink-table/flink-table-planner-blink/src/test/resources/org/apache/flink/table/planner/plan/stream/table/OverWindowTest.xml
+++ b/flink-table/flink-table-planner-blink/src/test/resources/org/apache/flink/table/planner/plan/stream/table/OverWindowTest.xml
@@ -53,16 +53,16 @@ Calc(select=[c, w0$o0 AS _c1])
   <TestCase name="testProcTimeBoundedPartitionedRangeOver">
     <Resource name="planBefore">
       <![CDATA[
-LogicalProject(a=[$0], myAvg=[AS(org$apache$flink$table$planner$plan$utils$JavaUserDefinedAggFunctions$WeightedAvgWithRetract$4060051ba256b2d83fccd580b20a09be($2, $0) OVER (PARTITION BY $0 ORDER BY $3 NULLS FIRST RANGE BETWEEN 7200000 PRECEDING AND CURRENT ROW), _UTF-16LE'myAvg')])
+LogicalProject(a=[$0], myAvg=[AS(org$apache$flink$table$planner$plan$utils$JavaUserDefinedAggFunctions$WeightedAvgWithRetract$081f8b23180a56edc2515c53c561ebf0($2, CAST($0):BIGINT) OVER (PARTITION BY $0 ORDER BY $3 NULLS FIRST RANGE BETWEEN 7200000 PRECEDING AND CURRENT ROW), _UTF-16LE'myAvg')])
 +- LogicalTableScan(table=[[default_catalog, default_database, MyTable]])
 ]]>
     </Resource>
     <Resource name="planAfter">
       <![CDATA[
 Calc(select=[a, w0$o0 AS myAvg])
-+- OverAggregate(partitionBy=[a], orderBy=[proctime ASC], window=[ RANG BETWEEN 7200000 PRECEDING AND CURRENT ROW], select=[a, c, proctime, WeightedAvgWithRetract(c, a) AS w0$o0])
++- OverAggregate(partitionBy=[a], orderBy=[proctime ASC], window=[ RANG BETWEEN 7200000 PRECEDING AND CURRENT ROW], select=[a, c, proctime, $3, org$apache$flink$table$planner$plan$utils$JavaUserDefinedAggFunctions$WeightedAvgWithRetract$081f8b23180a56edc2515c53c561ebf0(c, $3) AS w0$o0])
    +- Exchange(distribution=[hash[a]])
-      +- Calc(select=[a, c, proctime])
+      +- Calc(select=[a, c, proctime, CAST(a) AS $3])
          +- DataStreamScan(table=[[default_catalog, default_database, MyTable]], fields=[a, b, c, proctime, rowtime])
 ]]>
     </Resource>
@@ -70,16 +70,16 @@ Calc(select=[a, w0$o0 AS myAvg])
   <TestCase name="testProcTimeBoundedPartitionedRowsOver">
     <Resource name="planBefore">
       <![CDATA[
-LogicalProject(c=[$2], _c1=[AS(org$apache$flink$table$planner$plan$utils$JavaUserDefinedAggFunctions$WeightedAvgWithRetract$4060051ba256b2d83fccd580b20a09be($2, $0) OVER (PARTITION BY $1 ORDER BY $3 NULLS FIRST ROWS BETWEEN 2 PRECEDING AND CURRENT ROW), _UTF-16LE'_c1')])
+LogicalProject(c=[$2], _c1=[AS(org$apache$flink$table$planner$plan$utils$JavaUserDefinedAggFunctions$WeightedAvgWithRetract$081f8b23180a56edc2515c53c561ebf0($2, CAST($0):BIGINT) OVER (PARTITION BY $1 ORDER BY $3 NULLS FIRST ROWS BETWEEN 2 PRECEDING AND CURRENT ROW), _UTF-16LE'_c1')])
 +- LogicalTableScan(table=[[default_catalog, default_database, MyTable]])
 ]]>
     </Resource>
     <Resource name="planAfter">
       <![CDATA[
 Calc(select=[c, w0$o0 AS _c1])
-+- OverAggregate(partitionBy=[b], orderBy=[proctime ASC], window=[ ROWS BETWEEN 2 PRECEDING AND CURRENT ROW], select=[a, b, c, proctime, WeightedAvgWithRetract(c, a) AS w0$o0])
++- OverAggregate(partitionBy=[b], orderBy=[proctime ASC], window=[ ROWS BETWEEN 2 PRECEDING AND CURRENT ROW], select=[b, c, proctime, $3, org$apache$flink$table$planner$plan$utils$JavaUserDefinedAggFunctions$WeightedAvgWithRetract$081f8b23180a56edc2515c53c561ebf0(c, $3) AS w0$o0])
    +- Exchange(distribution=[hash[b]])
-      +- Calc(select=[a, b, c, proctime])
+      +- Calc(select=[b, c, proctime, CAST(a) AS $3])
          +- DataStreamScan(table=[[default_catalog, default_database, MyTable]], fields=[a, b, c, proctime, rowtime])
 ]]>
     </Resource>
@@ -121,16 +121,16 @@ Calc(select=[c, w0$o0 AS _c1])
   <TestCase name="testProcTimeUnboundedPartitionedRangeOver">
     <Resource name="planBefore">
       <![CDATA[
-LogicalProject(a=[$0], c=[$2], _c2=[AS(COUNT($0) OVER (PARTITION BY $2 ORDER BY $3 NULLS FIRST RANGE BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW), _UTF-16LE'_c2')], _c3=[AS(org$apache$flink$table$planner$plan$utils$JavaUserDefinedAggFunctions$WeightedAvgWithRetract$4060051ba256b2d83fccd580b20a09be($2, $0) OVER (PARTITION BY $2 ORDER BY $3 NULLS FIRST RANGE BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW), _UTF-16LE'_c3')])
+LogicalProject(a=[$0], c=[$2], _c2=[AS(COUNT($0) OVER (PARTITION BY $2 ORDER BY $3 NULLS FIRST RANGE BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW), _UTF-16LE'_c2')], _c3=[AS(org$apache$flink$table$planner$plan$utils$JavaUserDefinedAggFunctions$WeightedAvgWithRetract$081f8b23180a56edc2515c53c561ebf0($2, CAST($0):BIGINT) OVER (PARTITION BY $2 ORDER BY $3 NULLS FIRST RANGE BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW), _UTF-16LE'_c3')])
 +- LogicalTableScan(table=[[default_catalog, default_database, MyTable]])
 ]]>
     </Resource>
     <Resource name="planAfter">
       <![CDATA[
 Calc(select=[a, c, w0$o0 AS _c2, w0$o1 AS _c3])
-+- OverAggregate(partitionBy=[c], orderBy=[proctime ASC], window=[ RANG BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW], select=[a, c, proctime, COUNT(a) AS w0$o0, WeightedAvgWithRetract(c, a) AS w0$o1])
++- OverAggregate(partitionBy=[c], orderBy=[proctime ASC], window=[ RANG BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW], select=[a, c, proctime, $3, COUNT(a) AS w0$o0, org$apache$flink$table$planner$plan$utils$JavaUserDefinedAggFunctions$WeightedAvgWithRetract$081f8b23180a56edc2515c53c561ebf0(c, $3) AS w0$o1])
    +- Exchange(distribution=[hash[c]])
-      +- Calc(select=[a, c, proctime])
+      +- Calc(select=[a, c, proctime, CAST(a) AS $3])
          +- DataStreamScan(table=[[default_catalog, default_database, MyTable]], fields=[a, b, c, proctime, rowtime])
 ]]>
     </Resource>
@@ -138,16 +138,16 @@ Calc(select=[a, c, w0$o0 AS _c2, w0$o1 AS _c3])
   <TestCase name="testProcTimeUnboundedPartitionedRowsOver">
     <Resource name="planBefore">
       <![CDATA[
-LogicalProject(c=[$2], _c1=[AS(COUNT($0) OVER (PARTITION BY $2 ORDER BY $3 NULLS FIRST ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW), _UTF-16LE'_c1')], _c2=[AS(org$apache$flink$table$planner$plan$utils$JavaUserDefinedAggFunctions$WeightedAvgWithRetract$4060051ba256b2d83fccd580b20a09be($2, $0) OVER (PARTITION BY $2 ORDER BY $3 NULLS FIRST ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW), _UTF-16LE'_c2')])
+LogicalProject(c=[$2], _c1=[AS(COUNT($0) OVER (PARTITION BY $2 ORDER BY $3 NULLS FIRST ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW), _UTF-16LE'_c1')], _c2=[AS(org$apache$flink$table$planner$plan$utils$JavaUserDefinedAggFunctions$WeightedAvgWithRetract$081f8b23180a56edc2515c53c561ebf0($2, CAST($0):BIGINT) OVER (PARTITION BY $2 ORDER BY $3 NULLS FIRST ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW), _UTF-16LE'_c2')])
 +- LogicalTableScan(table=[[default_catalog, default_database, MyTable]])
 ]]>
     </Resource>
     <Resource name="planAfter">
       <![CDATA[
 Calc(select=[c, w0$o0 AS _c1, w0$o1 AS _c2])
-+- OverAggregate(partitionBy=[c], orderBy=[proctime ASC], window=[ ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW], select=[a, c, proctime, COUNT(a) AS w0$o0, WeightedAvgWithRetract(c, a) AS w0$o1])
++- OverAggregate(partitionBy=[c], orderBy=[proctime ASC], window=[ ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW], select=[a, c, proctime, $3, COUNT(a) AS w0$o0, org$apache$flink$table$planner$plan$utils$JavaUserDefinedAggFunctions$WeightedAvgWithRetract$081f8b23180a56edc2515c53c561ebf0(c, $3) AS w0$o1])
    +- Exchange(distribution=[hash[c]])
-      +- Calc(select=[a, c, proctime])
+      +- Calc(select=[a, c, proctime, CAST(a) AS $3])
          +- DataStreamScan(table=[[default_catalog, default_database, MyTable]], fields=[a, b, c, proctime, rowtime])
 ]]>
     </Resource>
@@ -172,16 +172,16 @@ Calc(select=[a, w0$o0 AS _c1])
   <TestCase name="testRowTimeBoundedPartitionedRangeOver">
     <Resource name="planBefore">
       <![CDATA[
-LogicalProject(a=[$0], _c1=[AS(AVG($2) OVER (PARTITION BY $0 ORDER BY $4 NULLS FIRST RANGE BETWEEN 7200000 PRECEDING AND CURRENT ROW), _UTF-16LE'_c1')], wAvg=[AS(org$apache$flink$table$planner$plan$utils$JavaUserDefinedAggFunctions$WeightedAvgWithRetract$4060051ba256b2d83fccd580b20a09be($2, $0) OVER (PARTITION BY $0 ORDER BY $4 NULLS FIRST RANGE BETWEEN 7200000 PRECEDING AND CURRENT ROW), _UTF-16LE'wAvg')])
+LogicalProject(a=[$0], _c1=[AS(AVG($2) OVER (PARTITION BY $0 ORDER BY $4 NULLS FIRST RANGE BETWEEN 7200000 PRECEDING AND CURRENT ROW), _UTF-16LE'_c1')], wAvg=[AS(org$apache$flink$table$planner$plan$utils$JavaUserDefinedAggFunctions$WeightedAvgWithRetract$081f8b23180a56edc2515c53c561ebf0($2, CAST($0):BIGINT) OVER (PARTITION BY $0 ORDER BY $4 NULLS FIRST RANGE BETWEEN 7200000 PRECEDING AND CURRENT ROW), _UTF-16LE'wAvg')])
 +- LogicalTableScan(table=[[default_catalog, default_database, MyTable]])
 ]]>
     </Resource>
     <Resource name="planAfter">
       <![CDATA[
 Calc(select=[a, w0$o0 AS _c1, w0$o1 AS wAvg])
-+- OverAggregate(partitionBy=[a], orderBy=[rowtime ASC], window=[ RANG BETWEEN 7200000 PRECEDING AND CURRENT ROW], select=[a, c, rowtime, AVG(c) AS w0$o0, WeightedAvgWithRetract(c, a) AS w0$o1])
++- OverAggregate(partitionBy=[a], orderBy=[rowtime ASC], window=[ RANG BETWEEN 7200000 PRECEDING AND CURRENT ROW], select=[a, c, rowtime, $3, AVG(c) AS w0$o0, org$apache$flink$table$planner$plan$utils$JavaUserDefinedAggFunctions$WeightedAvgWithRetract$081f8b23180a56edc2515c53c561ebf0(c, $3) AS w0$o1])
    +- Exchange(distribution=[hash[a]])
-      +- Calc(select=[a, c, rowtime])
+      +- Calc(select=[a, c, rowtime, CAST(a) AS $3])
          +- DataStreamScan(table=[[default_catalog, default_database, MyTable]], fields=[a, b, c, proctime, rowtime])
 ]]>
     </Resource>
@@ -206,16 +206,16 @@ Calc(select=[c, w0$o0 AS _c1])
   <TestCase name="testRowTimeBoundedPartitionedRowsOver">
     <Resource name="planBefore">
       <![CDATA[
-LogicalProject(c=[$2], _c1=[AS(COUNT($1) OVER (PARTITION BY $1 ORDER BY $4 NULLS FIRST ROWS BETWEEN 2 PRECEDING AND CURRENT ROW), _UTF-16LE'_c1')], wAvg=[AS(org$apache$flink$table$planner$plan$utils$JavaUserDefinedAggFunctions$WeightedAvgWithRetract$4060051ba256b2d83fccd580b20a09be($2, $0) OVER (PARTITION BY $1 ORDER BY $4 NULLS FIRST ROWS BETWEEN 2 PRECEDING AND CURRENT ROW), _UTF-16LE'wAvg')])
+LogicalProject(c=[$2], _c1=[AS(COUNT($1) OVER (PARTITION BY $1 ORDER BY $4 NULLS FIRST ROWS BETWEEN 2 PRECEDING AND CURRENT ROW), _UTF-16LE'_c1')], wAvg=[AS(org$apache$flink$table$planner$plan$utils$JavaUserDefinedAggFunctions$WeightedAvgWithRetract$081f8b23180a56edc2515c53c561ebf0($2, CAST($0):BIGINT) OVER (PARTITION BY $1 ORDER BY $4 NULLS FIRST ROWS BETWEEN 2 PRECEDING AND CURRENT ROW), _UTF-16LE'wAvg')])
 +- LogicalTableScan(table=[[default_catalog, default_database, MyTable]])
 ]]>
     </Resource>
     <Resource name="planAfter">
       <![CDATA[
 Calc(select=[c, w0$o0 AS _c1, w0$o1 AS wAvg])
-+- OverAggregate(partitionBy=[b], orderBy=[rowtime ASC], window=[ ROWS BETWEEN 2 PRECEDING AND CURRENT ROW], select=[a, b, c, rowtime, COUNT(b) AS w0$o0, WeightedAvgWithRetract(c, a) AS w0$o1])
++- OverAggregate(partitionBy=[b], orderBy=[rowtime ASC], window=[ ROWS BETWEEN 2 PRECEDING AND CURRENT ROW], select=[b, c, rowtime, $3, COUNT(b) AS w0$o0, org$apache$flink$table$planner$plan$utils$JavaUserDefinedAggFunctions$WeightedAvgWithRetract$081f8b23180a56edc2515c53c561ebf0(c, $3) AS w0$o1])
    +- Exchange(distribution=[hash[b]])
-      +- Calc(select=[a, b, c, rowtime])
+      +- Calc(select=[b, c, rowtime, CAST(a) AS $3])
          +- DataStreamScan(table=[[default_catalog, default_database, MyTable]], fields=[a, b, c, proctime, rowtime])
 ]]>
     </Resource>
@@ -257,16 +257,16 @@ Calc(select=[c, w0$o0 AS _c1])
   <TestCase name="testRowTimeUnboundedPartitionedRowsOver">
     <Resource name="planBefore">
       <![CDATA[
-LogicalProject(c=[$2], _c1=[AS(COUNT($0) OVER (PARTITION BY $2 ORDER BY $4 NULLS FIRST ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW), _UTF-16LE'_c1')], wAvg=[AS(org$apache$flink$table$planner$plan$utils$JavaUserDefinedAggFunctions$WeightedAvgWithRetract$4060051ba256b2d83fccd580b20a09be($2, $0) OVER (PARTITION BY $2 ORDER BY $4 NULLS FIRST ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW), _UTF-16LE'wAvg')])
+LogicalProject(c=[$2], _c1=[AS(COUNT($0) OVER (PARTITION BY $2 ORDER BY $4 NULLS FIRST ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW), _UTF-16LE'_c1')], wAvg=[AS(org$apache$flink$table$planner$plan$utils$JavaUserDefinedAggFunctions$WeightedAvgWithRetract$081f8b23180a56edc2515c53c561ebf0($2, CAST($0):BIGINT) OVER (PARTITION BY $2 ORDER BY $4 NULLS FIRST ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW), _UTF-16LE'wAvg')])
 +- LogicalTableScan(table=[[default_catalog, default_database, MyTable]])
 ]]>
     </Resource>
     <Resource name="planAfter">
       <![CDATA[
 Calc(select=[c, w0$o0 AS _c1, w0$o1 AS wAvg])
-+- OverAggregate(partitionBy=[c], orderBy=[rowtime ASC], window=[ ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW], select=[a, c, rowtime, COUNT(a) AS w0$o0, WeightedAvgWithRetract(c, a) AS w0$o1])
++- OverAggregate(partitionBy=[c], orderBy=[rowtime ASC], window=[ ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW], select=[a, c, rowtime, $3, COUNT(a) AS w0$o0, org$apache$flink$table$planner$plan$utils$JavaUserDefinedAggFunctions$WeightedAvgWithRetract$081f8b23180a56edc2515c53c561ebf0(c, $3) AS w0$o1])
    +- Exchange(distribution=[hash[c]])
-      +- Calc(select=[a, c, rowtime])
+      +- Calc(select=[a, c, rowtime, CAST(a) AS $3])
          +- DataStreamScan(table=[[default_catalog, default_database, MyTable]], fields=[a, b, c, proctime, rowtime])
 ]]>
     </Resource>
@@ -274,16 +274,16 @@ Calc(select=[c, w0$o0 AS _c1, w0$o1 AS wAvg])
   <TestCase name="testRowTimeUnboundedPartitionedRangeOver">
     <Resource name="planBefore">
       <![CDATA[
-LogicalProject(a=[$0], c=[$2], _c2=[AS(COUNT($0) OVER (PARTITION BY $2 ORDER BY $4 NULLS FIRST RANGE BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW), _UTF-16LE'_c2')], wAvg=[AS(org$apache$flink$table$planner$plan$utils$JavaUserDefinedAggFunctions$WeightedAvgWithRetract$4060051ba256b2d83fccd580b20a09be($2, $0) OVER (PARTITION BY $2 ORDER BY $4 NULLS FIRST RANGE BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW), _UTF-16LE'wAvg')])
+LogicalProject(a=[$0], c=[$2], _c2=[AS(COUNT($0) OVER (PARTITION BY $2 ORDER BY $4 NULLS FIRST RANGE BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW), _UTF-16LE'_c2')], wAvg=[AS(org$apache$flink$table$planner$plan$utils$JavaUserDefinedAggFunctions$WeightedAvgWithRetract$081f8b23180a56edc2515c53c561ebf0($2, CAST($0):BIGINT) OVER (PARTITION BY $2 ORDER BY $4 NULLS FIRST RANGE BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW), _UTF-16LE'wAvg')])
 +- LogicalTableScan(table=[[default_catalog, default_database, MyTable]])
 ]]>
     </Resource>
     <Resource name="planAfter">
       <![CDATA[
 Calc(select=[a, c, w0$o0 AS _c2, w0$o1 AS wAvg])
-+- OverAggregate(partitionBy=[c], orderBy=[rowtime ASC], window=[ RANG BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW], select=[a, c, rowtime, COUNT(a) AS w0$o0, WeightedAvgWithRetract(c, a) AS w0$o1])
++- OverAggregate(partitionBy=[c], orderBy=[rowtime ASC], window=[ RANG BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW], select=[a, c, rowtime, $3, COUNT(a) AS w0$o0, org$apache$flink$table$planner$plan$utils$JavaUserDefinedAggFunctions$WeightedAvgWithRetract$081f8b23180a56edc2515c53c561ebf0(c, $3) AS w0$o1])
    +- Exchange(distribution=[hash[c]])
-      +- Calc(select=[a, c, rowtime])
+      +- Calc(select=[a, c, rowtime, CAST(a) AS $3])
          +- DataStreamScan(table=[[default_catalog, default_database, MyTable]], fields=[a, b, c, proctime, rowtime])
 ]]>
     </Resource>
@@ -291,16 +291,16 @@ Calc(select=[a, c, w0$o0 AS _c2, w0$o1 AS wAvg])
   <TestCase name="testScalarFunctionsOnOverWindow">
     <Resource name="planBefore">
       <![CDATA[
-LogicalProject(d=[AS(org$apache$flink$table$planner$expressions$utils$Func1$$879c8537562dbe74f3349fa0e6502755(AS(SUM($0) OVER (PARTITION BY $1 ORDER BY $3 NULLS FIRST ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW), _UTF-16LE'wsum')), _UTF-16LE'd')], _c1=[AS(EXP(COUNT($0) OVER (PARTITION BY $1 ORDER BY $3 NULLS FIRST ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW)), _UTF-16LE'_c1')], _c2=[AS(+(org$apache$flink$table$planner$plan$utils$JavaUserDefinedAggFunctions$WeightedAvgWithRetrac [...]
+LogicalProject(d=[AS(org$apache$flink$table$planner$expressions$utils$Func1$$879c8537562dbe74f3349fa0e6502755(AS(SUM($0) OVER (PARTITION BY $1 ORDER BY $3 NULLS FIRST ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW), _UTF-16LE'wsum')), _UTF-16LE'd')], _c1=[AS(EXP(COUNT($0) OVER (PARTITION BY $1 ORDER BY $3 NULLS FIRST ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW)), _UTF-16LE'_c1')], _c2=[AS(+(org$apache$flink$table$planner$plan$utils$JavaUserDefinedAggFunctions$WeightedAvgWithRetrac [...]
 +- LogicalTableScan(table=[[default_catalog, default_database, MyTable]])
 ]]>
     </Resource>
     <Resource name="planAfter">
       <![CDATA[
 Calc(select=[Func1$(w0$o0) AS d, EXP(w0$o1) AS _c1, +(w0$o2, 1) AS _c2, ||(_UTF-16LE'AVG:', CAST(w0$o2)) AS _c3, ARRAY(w0$o2, w0$o1) AS _c4])
-+- OverAggregate(partitionBy=[b], orderBy=[proctime ASC], window=[ ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW], select=[a, b, c, proctime, SUM(a) AS w0$o0, COUNT(a) AS w0$o1, WeightedAvgWithRetract(c, a) AS w0$o2])
++- OverAggregate(partitionBy=[b], orderBy=[proctime ASC], window=[ ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW], select=[a, b, c, proctime, $4, SUM(a) AS w0$o0, COUNT(a) AS w0$o1, org$apache$flink$table$planner$plan$utils$JavaUserDefinedAggFunctions$WeightedAvgWithRetract$081f8b23180a56edc2515c53c561ebf0(c, $4) AS w0$o2])
    +- Exchange(distribution=[hash[b]])
-      +- Calc(select=[a, b, c, proctime])
+      +- Calc(select=[a, b, c, proctime, CAST(a) AS $4])
          +- DataStreamScan(table=[[default_catalog, default_database, MyTable]], fields=[a, b, c, proctime, rowtime])
 ]]>
     </Resource>
diff --git a/flink-table/flink-table-planner-blink/src/test/resources/org/apache/flink/table/planner/plan/stream/table/TableAggregateTest.xml b/flink-table/flink-table-planner-blink/src/test/resources/org/apache/flink/table/planner/plan/stream/table/TableAggregateTest.xml
index 6dd5480..a2d10b2 100644
--- a/flink-table/flink-table-planner-blink/src/test/resources/org/apache/flink/table/planner/plan/stream/table/TableAggregateTest.xml
+++ b/flink-table/flink-table-planner-blink/src/test/resources/org/apache/flink/table/planner/plan/stream/table/TableAggregateTest.xml
@@ -36,14 +36,14 @@ GroupTableAggregate(groupBy=[c], select=[c, EmptyTableAggFunc(a) AS (f0, f1)])
     <Resource name="planBefore">
       <![CDATA[
 LogicalProject(a=[AS($0, _UTF-16LE'a')], b=[AS($1, _UTF-16LE'b')])
-+- LogicalTableAggregate(group=[{}], tableAggregate=[[EmptyTableAggFunc($1)]])
++- LogicalTableAggregate(group=[{}], tableAggregate=[[org$apache$flink$table$planner$utils$EmptyTableAggFunc$3abce17277a07430b548ea4eacbf9f67($1)]])
    +- LogicalTableScan(table=[[default_catalog, default_database, Table1, source: [TestTableSource(a, b, c, d, e)]]])
 ]]>
     </Resource>
     <Resource name="planAfter">
       <![CDATA[
 Calc(select=[f0 AS a, f1 AS b])
-+- GroupTableAggregate(select=[EmptyTableAggFunc(b) AS (f0, f1)])
++- GroupTableAggregate(select=[org$apache$flink$table$planner$utils$EmptyTableAggFunc$3abce17277a07430b548ea4eacbf9f67(b) AS (f0, f1)])
    +- Exchange(distribution=[single])
       +- LegacyTableSourceScan(table=[[default_catalog, default_database, Table1, source: [TestTableSource(a, b, c, d, e)]]], fields=[a, b, c, d, e])
 ]]>
@@ -53,7 +53,7 @@ Calc(select=[f0 AS a, f1 AS b])
     <Resource name="planBefore">
       <![CDATA[
 LogicalProject(bb=[AS($0, _UTF-16LE'bb')], _c1=[+(AS($1, _UTF-16LE'x'), 1)], y=[AS($2, _UTF-16LE'y')])
-+- LogicalTableAggregate(group=[{2}], tableAggregate=[[EmptyTableAggFunc($0, $1)]])
++- LogicalTableAggregate(group=[{2}], tableAggregate=[[org$apache$flink$table$planner$utils$EmptyTableAggFunc$3abce17277a07430b548ea4eacbf9f67($0, $1)]])
    +- LogicalProject(a=[$0], b=[$1], bb=[MOD($1, 5)])
       +- LogicalTableScan(table=[[default_catalog, default_database, Table1, source: [TestTableSource(a, b, c, d, e)]]])
 ]]>
@@ -61,7 +61,7 @@ LogicalProject(bb=[AS($0, _UTF-16LE'bb')], _c1=[+(AS($1, _UTF-16LE'x'), 1)], y=[
     <Resource name="planAfter">
       <![CDATA[
 Calc(select=[bb, +(f0, 1) AS _c1, f1 AS y])
-+- GroupTableAggregate(groupBy=[bb], select=[bb, EmptyTableAggFunc(a, b) AS (f0, f1)])
++- GroupTableAggregate(groupBy=[bb], select=[bb, org$apache$flink$table$planner$utils$EmptyTableAggFunc$3abce17277a07430b548ea4eacbf9f67(a, b) AS (f0, f1)])
    +- Exchange(distribution=[hash[bb]])
       +- Calc(select=[a, b, MOD(b, 5) AS bb])
          +- LegacyTableSourceScan(table=[[default_catalog, default_database, Table1, source: [TestTableSource(a, b, c, d, e)]]], fields=[a, b, c, d, e])
diff --git a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/batch/table/GroupWindowTest.scala b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/batch/table/GroupWindowTest.scala
index 9a95d3e..5d2a928 100644
--- a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/batch/table/GroupWindowTest.scala
+++ b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/batch/table/GroupWindowTest.scala
@@ -50,12 +50,10 @@ class GroupWindowTest extends TableTestBase {
     val util = batchTestUtil()
     val table = util.addTableSource[(Long, Int, String)]('long, 'int, 'string)
 
-    val myWeightedAvg = new WeightedAvgWithMerge
-
     val windowedTable = table
       .window(Tumble over 5.millis on 'long as 'w)
       .groupBy('w, 'string)
-      .select('string, myWeightedAvg('long, 'int))
+      .select('string, call(classOf[WeightedAvgWithMerge], 'long, 'int))
 
     util.verifyPlan(windowedTable)
   }
diff --git a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/batch/table/stringexpr/AggregateStringExpressionTest.scala b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/batch/table/stringexpr/AggregateStringExpressionTest.scala
index e31c5fb..48aaf88 100644
--- a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/batch/table/stringexpr/AggregateStringExpressionTest.scala
+++ b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/batch/table/stringexpr/AggregateStringExpressionTest.scala
@@ -272,10 +272,12 @@ class AggregateStringExpressionTest extends TableTestBase {
 
     val myCnt = new CountAggFunction
     util.addFunction("myCnt", myCnt)
-    val myWeightedAvg = new WeightedAvgWithMergeAndReset
-    util.addFunction("myWeightedAvg", myWeightedAvg)
+    util.addTemporarySystemFunction("myWeightedAvg", classOf[WeightedAvgWithMergeAndReset])
 
-    val t1 = t.select(myCnt.distinct('a) as 'aCnt, myWeightedAvg.distinct('b, 'a) as 'wAvg)
+    val t1 = t.select(
+      myCnt.distinct('a) as 'aCnt,
+      call("myWeightedAvg", 'b, 'a).distinct() as 'wAvg
+    )
     val t2 = t.select("myCnt.distinct(a) as aCnt, myWeightedAvg.distinct(b, a) as wAvg")
 
     verifyTableEquals(t1, t2)
@@ -287,11 +289,10 @@ class AggregateStringExpressionTest extends TableTestBase {
     val t = util.addTableSource[(Int, Long, String)]("Table3", 'a, 'b, 'c)
 
     val myCnt = new CountAggFunction
-   util.addFunction("myCnt", myCnt)
-    val myWeightedAvg = new WeightedAvgWithMergeAndReset
-   util.addFunction("myWeightedAvg", myWeightedAvg)
+    util.addFunction("myCnt", myCnt)
+    util.addTemporarySystemFunction("myWeightedAvg", classOf[WeightedAvgWithMergeAndReset])
 
-    val t1 = t.select(myCnt('a) as 'aCnt, myWeightedAvg('b, 'a) as 'wAvg)
+    val t1 = t.select(myCnt('a) as 'aCnt, call("myWeightedAvg", 'b, 'a) as 'wAvg)
     val t2 = t.select("myCnt(a) as aCnt, myWeightedAvg(b, a) as wAvg")
 
     verifyTableEquals(t1, t2)
@@ -304,16 +305,15 @@ class AggregateStringExpressionTest extends TableTestBase {
 
 
     val myCnt = new CountAggFunction
-   util.addFunction("myCnt", myCnt)
-    val myWeightedAvg = new WeightedAvgWithMergeAndReset
-   util.addFunction("myWeightedAvg", myWeightedAvg)
+    util.addFunction("myCnt", myCnt)
+    util.addTemporarySystemFunction("myWeightedAvg", classOf[WeightedAvgWithMergeAndReset])
 
     val t1 = t.groupBy('b)
       .select('b,
         myCnt.distinct('a) + 9 as 'aCnt,
-        myWeightedAvg.distinct('b, 'a) * 2 as 'wAvg,
-        myWeightedAvg.distinct('a, 'a) as 'distAgg,
-        myWeightedAvg('a, 'a) as 'agg)
+        call("myWeightedAvg", 'b, 'a).distinct() * 2 as 'wAvg,
+        call("myWeightedAvg", 'a, 'a).distinct() as 'distAgg,
+        call("myWeightedAvg", 'a, 'a) as 'agg)
     val t2 = t.groupBy("b")
       .select("b, myCnt.distinct(a) + 9 as aCnt, myWeightedAvg.distinct(b, a) * 2 as wAvg, " +
         "myWeightedAvg.distinct(a, a) as distAgg, myWeightedAvg(a, a) as agg")
@@ -329,11 +329,14 @@ class AggregateStringExpressionTest extends TableTestBase {
 
     val myCnt = new CountAggFunction
    util.addFunction("myCnt", myCnt)
-    val myWeightedAvg = new WeightedAvgWithMergeAndReset
-   util.addFunction("myWeightedAvg", myWeightedAvg)
+   util.addTemporarySystemFunction("myWeightedAvg", classOf[WeightedAvgWithMergeAndReset])
 
     val t1 = t.groupBy('b)
-      .select('b, myCnt('a) + 9 as 'aCnt, myWeightedAvg('b, 'a) * 2 as 'wAvg, myWeightedAvg('a, 'a))
+      .select(
+        'b,
+        myCnt('a) + 9 as 'aCnt,
+        call("myWeightedAvg", 'b, 'a) * 2 as 'wAvg,
+        call("myWeightedAvg", 'a, 'a))
     val t2 = t.groupBy("b")
       .select("b, myCnt(a) + 9 as aCnt, myWeightedAvg(b, a) * 2 as wAvg, myWeightedAvg(a, a)")
 
diff --git a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/stream/sql/ModifiedMonotonicityTest.scala b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/stream/sql/ModifiedMonotonicityTest.scala
index 1c78035..36149a6 100644
--- a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/stream/sql/ModifiedMonotonicityTest.scala
+++ b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/stream/sql/ModifiedMonotonicityTest.scala
@@ -38,7 +38,7 @@ class ModifiedMonotonicityTest extends TableTestBase {
   util.addDataStream[(Int, Long, Long)]("B", 'b1, 'b2, 'b3)
   util.addDataStream[(Int, String, Long)](
     "MyTable", 'a, 'b, 'c, 'proctime.proctime, 'rowtime.rowtime)
-  util.addFunction("weightedAvg", new WeightedAvgWithMerge)
+  util.addTemporarySystemFunction("weightedAvg", classOf[WeightedAvgWithMerge])
   util.addTableSource[(Int, Long, String)]("AA", 'a1, 'a2, 'a3)
   util.addTableSource[(Int, Long, Int, String, Long)]("BB", 'b1, 'b2, 'b3, 'b4, 'b5)
 
diff --git a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/stream/sql/agg/WindowAggregateTest.scala b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/stream/sql/agg/WindowAggregateTest.scala
index 1776cd0..d3433e4 100644
--- a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/stream/sql/agg/WindowAggregateTest.scala
+++ b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/stream/sql/agg/WindowAggregateTest.scala
@@ -32,7 +32,7 @@ class WindowAggregateTest extends TableTestBase {
   private val util = streamTestUtil()
   util.addDataStream[(Int, String, Long)](
     "MyTable", 'a, 'b, 'c, 'proctime.proctime, 'rowtime.rowtime)
-  util.addFunction("weightedAvg", new WeightedAvgWithMerge)
+  util.addTemporarySystemFunction("weightedAvg", classOf[WeightedAvgWithMerge])
   util.tableEnv.executeSql(
     s"""
        |create table MyTable1 (
diff --git a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/stream/table/AggregateTest.scala b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/stream/table/AggregateTest.scala
index 3498f7d..f117197 100644
--- a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/stream/table/AggregateTest.scala
+++ b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/stream/table/AggregateTest.scala
@@ -48,7 +48,7 @@ class AggregateTest extends TableTestBase {
 
     val resultTable = table
       .groupBy('c)
-      .select(weightedAvg.distinct('a, 'b), weightedAvg('a, 'b))
+      .select(call(weightedAvg, 'a, 'b).distinct(), call(weightedAvg, 'a, 'b))
 
     util.verifyPlan(resultTable)
   }
diff --git a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/stream/table/ColumnFunctionsTest.scala b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/stream/table/ColumnFunctionsTest.scala
index 24bfe90..4f8b331 100644
--- a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/stream/table/ColumnFunctionsTest.scala
+++ b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/stream/table/ColumnFunctionsTest.scala
@@ -163,7 +163,7 @@ class ColumnFunctionsTest extends TableTestBase {
     val countDist = new CountDistinct
 
    util.addFunction("countFun", countFun)
-   util.addFunction("weightAvgFun", weightAvgFun)
+   util.addTemporarySystemFunction("weightAvgFun", weightAvgFun)
    util.addFunction("countDist", countDist)
 
     val tab1 = table
diff --git a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/stream/table/GroupWindowTest.scala b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/stream/table/GroupWindowTest.scala
index 446d18d..3fc1219 100644
--- a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/stream/table/GroupWindowTest.scala
+++ b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/stream/table/GroupWindowTest.scala
@@ -95,7 +95,7 @@ class GroupWindowTest extends TableTestBase {
     val windowedTable = table
       .window(Tumble over 5.millis on 'rowtime as 'w)
       .groupBy('w, 'string)
-      .select('string, weightedAvg('long, 'int))
+      .select('string, call(weightedAvg, 'long, 'int))
     util.verifyPlan(windowedTable)
   }
 
@@ -162,7 +162,7 @@ class GroupWindowTest extends TableTestBase {
     val windowedTable = table
       .window(Slide over 8.millis every 10.millis on 'rowtime as 'w)
       .groupBy('w, 'string)
-      .select('string, weightedAvg('long, 'int))
+      .select('string, call(weightedAvg, 'long, 'int))
     util.verifyPlan(windowedTable)
   }
 
@@ -190,7 +190,7 @@ class GroupWindowTest extends TableTestBase {
     val windowedTable = table
       .window(Session withGap 7.millis on 'rowtime as 'w)
       .groupBy('w, 'string)
-      .select('string, weightedAvg('long, 'int))
+      .select('string, call(weightedAvg, 'long, 'int))
     util.verifyPlan(windowedTable)
   }
 
@@ -346,7 +346,7 @@ class GroupWindowTest extends TableTestBase {
     val windowedTable = table
       .window(Slide over 2.rows every 1.rows on 'proctime as 'w)
       .groupBy('w, 'int2, 'int3, 'string)
-      .select(weightAvgFun('long, 'int))
+      .select(call(weightAvgFun, 'long, 'int))
 
     util.verifyPlan(windowedTable)
   }
diff --git a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/stream/table/OverWindowTest.scala b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/stream/table/OverWindowTest.scala
index ae96a9a..cec69be 100644
--- a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/stream/table/OverWindowTest.scala
+++ b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/stream/table/OverWindowTest.scala
@@ -40,9 +40,9 @@ class OverWindowTest extends TableTestBase {
       .select(
         plusOne('a.sum over 'w as 'wsum) as 'd,
         ('a.count over 'w).exp(),
-        (weightedAvg('c, 'a) over 'w) + 1,
-        "AVG:".toExpr + (weightedAvg('c, 'a) over 'w),
-        array(weightedAvg('c, 'a) over 'w, 'a.count over 'w))
+        (call(weightedAvg, 'c, 'a) over 'w) + 1,
+        "AVG:".toExpr + (call(weightedAvg, 'c, 'a) over 'w),
+        array(call(weightedAvg, 'c, 'a) over 'w, 'a.count over 'w))
     streamUtil.verifyPlan(result)
   }
 
@@ -52,7 +52,7 @@ class OverWindowTest extends TableTestBase {
 
     val result = table
       .window(Over partitionBy 'b orderBy 'proctime preceding 2.rows following CURRENT_ROW as 'w)
-      .select('c, weightedAvg('c, 'a) over 'w)
+      .select('c, call(weightedAvg, 'c, 'a) over 'w)
     streamUtil.verifyPlan(result)
   }
 
@@ -63,7 +63,7 @@ class OverWindowTest extends TableTestBase {
     val result = table
       .window(
         Over partitionBy 'a orderBy 'proctime preceding 2.hours following CURRENT_RANGE as 'w)
-      .select('a, weightedAvg('c, 'a) over 'w as 'myAvg)
+      .select('a, call(weightedAvg, 'c, 'a) over 'w as 'myAvg)
     streamUtil.verifyPlan(result)
   }
 
@@ -89,11 +89,11 @@ class OverWindowTest extends TableTestBase {
 
     val result = table
       .window(Over partitionBy 'c orderBy 'proctime preceding UNBOUNDED_RANGE as 'w)
-      .select('a, 'c, 'a.count over 'w, weightedAvg('c, 'a) over 'w)
+      .select('a, 'c, 'a.count over 'w, call(weightedAvg, 'c, 'a) over 'w)
 
     val result2 = table
       .window(Over partitionBy 'c orderBy 'proctime as 'w)
-      .select('a, 'c, 'a.count over 'w, weightedAvg('c, 'a) over 'w)
+      .select('a, 'c, 'a.count over 'w, call(weightedAvg, 'c, 'a) over 'w)
 
     verifyTableEquals(result, result2)
     streamUtil.verifyPlan(result)
@@ -106,7 +106,7 @@ class OverWindowTest extends TableTestBase {
     val result = table
       .window(
         Over partitionBy 'c orderBy 'proctime preceding UNBOUNDED_ROW following CURRENT_ROW as 'w)
-      .select('c, 'a.count over 'w, weightedAvg('c, 'a) over 'w)
+      .select('c, 'a.count over 'w, call(weightedAvg, 'c, 'a) over 'w)
     streamUtil.verifyPlan(result)
   }
 
@@ -135,7 +135,7 @@ class OverWindowTest extends TableTestBase {
     val result = table
       .window(
         Over partitionBy 'b orderBy 'rowtime preceding 2.rows following CURRENT_ROW as 'w)
-      .select('c, 'b.count over 'w, weightedAvg('c, 'a) over 'w as 'wAvg)
+      .select('c, 'b.count over 'w, call(weightedAvg, 'c, 'a) over 'w as 'wAvg)
 
     streamUtil.verifyPlan(result)
   }
@@ -147,7 +147,7 @@ class OverWindowTest extends TableTestBase {
     val result = table
       .window(
         Over partitionBy 'a orderBy 'rowtime preceding 2.hours following CURRENT_RANGE as 'w)
-      .select('a, 'c.avg over 'w, weightedAvg('c, 'a) over 'w as 'wAvg)
+      .select('a, 'c.avg over 'w, call(weightedAvg, 'c, 'a) over 'w as 'wAvg)
 
     streamUtil.verifyPlan(result)
   }
@@ -177,11 +177,11 @@ class OverWindowTest extends TableTestBase {
     val result = table
       .window(Over partitionBy 'c orderBy 'rowtime preceding UNBOUNDED_RANGE following
          CURRENT_RANGE as 'w)
-      .select('a, 'c, 'a.count over 'w, weightedAvg('c, 'a) over 'w as 'wAvg)
+      .select('a, 'c, 'a.count over 'w, call(weightedAvg, 'c, 'a) over 'w as 'wAvg)
 
     val result2 = table
       .window(Over partitionBy 'c orderBy 'rowtime as 'w)
-      .select('a, 'c, 'a.count over 'w, weightedAvg('c, 'a) over 'w as 'wAvg)
+      .select('a, 'c, 'a.count over 'w, call(weightedAvg, 'c, 'a) over 'w as 'wAvg)
 
     verifyTableEquals(result, result2)
 
@@ -195,7 +195,7 @@ class OverWindowTest extends TableTestBase {
     val result = table
       .window(Over partitionBy 'c orderBy 'rowtime preceding UNBOUNDED_ROW following
          CURRENT_ROW as 'w)
-      .select('c, 'a.count over 'w, weightedAvg('c, 'a) over 'w as 'wAvg)
+      .select('c, 'a.count over 'w, call(weightedAvg, 'c, 'a) over 'w as 'wAvg)
     streamUtil.verifyPlan(result)
   }
 
diff --git a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/stream/table/TableAggregateTest.scala b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/stream/table/TableAggregateTest.scala
index caf53a8..6889df2 100644
--- a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/stream/table/TableAggregateTest.scala
+++ b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/stream/table/TableAggregateTest.scala
@@ -35,7 +35,7 @@ class TableAggregateTest extends TableTestBase {
   def testTableAggregateWithGroupBy(): Unit = {
     val resultTable = table
       .groupBy('b % 5 as 'bb)
-      .flatAggregate(emptyFunc('a, 'b) as ('x, 'y))
+      .flatAggregate(call(emptyFunc, 'a, 'b) as ('x, 'y))
       .select('bb, 'x + 1, 'y)
 
     util.verifyPlan(resultTable)
@@ -74,7 +74,7 @@ class TableAggregateTest extends TableTestBase {
   def testTableAggregateWithAlias(): Unit = {
 
     val resultTable = table
-      .flatAggregate(emptyFunc('b) as ('a, 'b))
+      .flatAggregate(call(emptyFunc, 'b) as ('a, 'b))
       .select('a, 'b)
 
     util.verifyPlan(resultTable)
diff --git a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/stream/table/stringexpr/AggregateStringExpressionTest.scala b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/stream/table/stringexpr/AggregateStringExpressionTest.scala
index 933cc66..dbf66c6 100644
--- a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/stream/table/stringexpr/AggregateStringExpressionTest.scala
+++ b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/stream/table/stringexpr/AggregateStringExpressionTest.scala
@@ -27,7 +27,6 @@ import org.junit.Test
 
 class AggregateStringExpressionTest extends TableTestBase {
 
-
   @Test
   def testDistinctNonGroupedAggregate(): Unit = {
     val util = streamTestUtil()
@@ -60,11 +59,13 @@ class AggregateStringExpressionTest extends TableTestBase {
     val t = util.addTableSource[(Int, Long, String)]("Table3", 'a, 'b, 'c)
 
     val myCnt = new CountAggFunction
-   util.addFunction("myCnt", myCnt)
-    val myWeightedAvg = new WeightedAvgWithMergeAndReset
-   util.addFunction("myWeightedAvg", myWeightedAvg)
+    util.addFunction("myCnt", myCnt)
+    util.addTemporarySystemFunction("myWeightedAvg", classOf[WeightedAvgWithMergeAndReset])
 
-    val t1 = t.select(myCnt.distinct('a) as 'aCnt, myWeightedAvg.distinct('b, 'a) as 'wAvg)
+    val t1 = t.select(
+      myCnt.distinct('a) as 'aCnt,
+      call("myWeightedAvg", 'b, 'a).distinct() as 'wAvg
+    )
     val t2 = t.select("myCnt.distinct(a) as aCnt, myWeightedAvg.distinct(b, a) as wAvg")
 
     verifyTableEquals(t1, t2)
@@ -75,18 +76,16 @@ class AggregateStringExpressionTest extends TableTestBase {
     val util = streamTestUtil()
     val t = util.addTableSource[(Int, Long, String)]("Table3", 'a, 'b, 'c)
 
-
     val myCnt = new CountAggFunction
-   util.addFunction("myCnt", myCnt)
-    val myWeightedAvg = new WeightedAvgWithMergeAndReset
-   util.addFunction("myWeightedAvg", myWeightedAvg)
+    util.addFunction("myCnt", myCnt)
+    util.addTemporarySystemFunction("myWeightedAvg", classOf[WeightedAvgWithMergeAndReset])
 
     val t1 = t.groupBy('b)
       .select('b,
         myCnt.distinct('a) + 9 as 'aCnt,
-        myWeightedAvg.distinct('b, 'a) * 2 as 'wAvg,
-        myWeightedAvg.distinct('a, 'a) as 'distAgg,
-        myWeightedAvg('a, 'a) as 'agg)
+        call("myWeightedAvg", 'b, 'a).distinct() * 2 as 'wAvg,
+        call("myWeightedAvg", 'a, 'a).distinct() as 'distAgg,
+        call("myWeightedAvg", 'a, 'a) as 'agg)
     val t2 = t.groupBy("b")
       .select("b, myCnt.distinct(a) + 9 as aCnt, myWeightedAvg.distinct(b, a) * 2 as wAvg, " +
         "myWeightedAvg.distinct(a, a) as distAgg, myWeightedAvg(a, a) as agg")
@@ -99,13 +98,12 @@ class AggregateStringExpressionTest extends TableTestBase {
     val util = streamTestUtil()
     val t = util.addTableSource[(Int, Long, String)]('int, 'long, 'string)
 
-    val weightAvgFun = new WeightedAvg
-   util.addFunction("weightAvgFun", weightAvgFun)
+    util.addTemporarySystemFunction("weightAvgFun", classOf[WeightedAvg])
 
     // Expression / Scala API
     val resScala = t
       .groupBy('string)
-      .select('int.count as 'cnt, weightAvgFun('long, 'int))
+      .select('int.count as 'cnt, call("weightAvgFun", 'long, 'int))
 
     // String / Java API
     val resJava = t
diff --git a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/stream/table/stringexpr/GroupWindowStringExpressionTest.scala b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/stream/table/stringexpr/GroupWindowStringExpressionTest.scala
index 09cc818..a19f86f 100644
--- a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/stream/table/stringexpr/GroupWindowStringExpressionTest.scala
+++ b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/stream/table/stringexpr/GroupWindowStringExpressionTest.scala
@@ -37,8 +37,7 @@ class GroupWindowStringExpressionTest extends TableTestBase {
 
     val myCountFun = new CountAggFunction
     util.addFunction("myCountFun", myCountFun)
-    val weightAvgFun = new WeightedAvg
-    util.addFunction("weightAvgFun", weightAvgFun)
+    util.addTemporarySystemFunction("weightAvgFun", classOf[WeightedAvg])
 
     // Expression / Scala API
     val resScala = t
@@ -48,8 +47,8 @@ class GroupWindowStringExpressionTest extends TableTestBase {
         'string,
         myCountFun('string),
         'int.sum,
-        weightAvgFun('long, 'int),
-        weightAvgFun('int, 'int) * 2,
+        call("weightAvgFun", 'long, 'int),
+        call("weightAvgFun", 'int, 'int) * 2,
         'w.start,
         'w.end)
 
@@ -77,8 +76,7 @@ class GroupWindowStringExpressionTest extends TableTestBase {
 
     val myCountFun = new CountAggFunction
     util.addFunction("myCountFun", myCountFun)
-    val weightAvgFun = new WeightedAvg
-    util.addFunction("weightAvgFun", weightAvgFun)
+    util.addTemporarySystemFunction("weightAvgFun", classOf[WeightedAvg])
 
     // Expression / Scala API
     val resScala = t
@@ -88,8 +86,8 @@ class GroupWindowStringExpressionTest extends TableTestBase {
         'string,
         myCountFun('string),
         'int.sum,
-        weightAvgFun('long, 'int),
-        weightAvgFun('int, 'int) * 2,
+        call("weightAvgFun", 'long, 'int),
+        call("weightAvgFun", 'int, 'int) * 2,
         'w.start,
         'w.end)
 
@@ -117,8 +115,7 @@ class GroupWindowStringExpressionTest extends TableTestBase {
 
     val myCountFun = new CountAggFunction
     util.addFunction("myCountFun", myCountFun)
-    val weightAvgFun = new WeightedAvg
-    util.addFunction("weightAvgFun", weightAvgFun)
+    util.addTemporarySystemFunction("weightAvgFun", classOf[WeightedAvg])
 
     // Expression / Scala API
     val resScala = t
@@ -128,8 +125,8 @@ class GroupWindowStringExpressionTest extends TableTestBase {
         'string,
         myCountFun('string),
         'int.sum,
-        weightAvgFun('long, 'int),
-        weightAvgFun('int, 'int) * 2,
+        call("weightAvgFun", 'long, 'int),
+        call("weightAvgFun", 'int, 'int) * 2,
         'w.start)
 
     // String / Java API
@@ -155,8 +152,7 @@ class GroupWindowStringExpressionTest extends TableTestBase {
 
     val myCountFun = new CountAggFunction
     util.addFunction("myCountFun", myCountFun)
-    val weightAvgFun = new WeightedAvg
-    util.addFunction("weightAvgFun", weightAvgFun)
+    util.addTemporarySystemFunction("weightAvgFun", classOf[WeightedAvg])
 
     // Expression / Scala API
     val resScala = t
@@ -165,8 +161,8 @@ class GroupWindowStringExpressionTest extends TableTestBase {
       .select(
         myCountFun('string),
         'int.sum,
-        weightAvgFun('long, 'int),
-        weightAvgFun('int, 'int) * 2,
+        call("weightAvgFun", 'long, 'int),
+        call("weightAvgFun", 'int, 'int) * 2,
         'w.start,
         'w.end)
 
@@ -193,8 +189,7 @@ class GroupWindowStringExpressionTest extends TableTestBase {
 
     val myCountFun = new CountAggFunction
     util.addFunction("myCountFun", myCountFun)
-    val weightAvgFun = new WeightedAvg
-    util.addFunction("weightAvgFun", weightAvgFun)
+    util.addTemporarySystemFunction("weightAvgFun", classOf[WeightedAvg])
 
     // Expression / Scala API
     val resScala = t
@@ -203,8 +198,8 @@ class GroupWindowStringExpressionTest extends TableTestBase {
       .select(
         myCountFun('string),
         'int.sum,
-        weightAvgFun('long, 'int),
-        weightAvgFun('int, 'int) * 2,
+        call("weightAvgFun", 'long, 'int),
+        call("weightAvgFun", 'int, 'int) * 2,
         'w.start,
         'w.end)
 
@@ -231,8 +226,7 @@ class GroupWindowStringExpressionTest extends TableTestBase {
 
     val myCountFun = new CountAggFunction
     util.addFunction("myCountFun", myCountFun)
-    val weightAvgFun = new WeightedAvg
-    util.addFunction("weightAvgFun", weightAvgFun)
+    util.addTemporarySystemFunction("weightAvgFun", classOf[WeightedAvg])
 
     // Expression / Scala API
     val resScala = t
@@ -241,8 +235,8 @@ class GroupWindowStringExpressionTest extends TableTestBase {
       .select(
         myCountFun('string),
         'int.sum,
-        weightAvgFun('long, 'int),
-        weightAvgFun('int, 'int) * 2,
+        call("weightAvgFun", 'long, 'int),
+        call("weightAvgFun", 'int, 'int) * 2,
         'w.start,
         'w.end)
 
diff --git a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/stream/table/stringexpr/GroupWindowTableAggregateStringExpressionTest.scala b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/stream/table/stringexpr/GroupWindowTableAggregateStringExpressionTest.scala
index c263d11..6d0fce1 100644
--- a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/stream/table/stringexpr/GroupWindowTableAggregateStringExpressionTest.scala
+++ b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/stream/table/stringexpr/GroupWindowTableAggregateStringExpressionTest.scala
@@ -31,14 +31,13 @@ class GroupWindowTableAggregateStringExpressionTest extends TableTestBase {
     val util = streamTestUtil()
     val t = util.addTableSource[(Int, Long, String)]('int, 'long, 'string, 'rowtime.rowtime)
 
-    val top3 = new Top3
-    util.addFunction("top3", top3)
+    util.addTemporarySystemFunction("top3", classOf[Top3])
 
     // Expression / Scala API
     val resScala = t
       .window(Slide over 4.hours every 2.hours on 'rowtime as 'w)
       .groupBy('w, 'string)
-      .flatAggregate(top3('int) as ('x, 'y))
+      .flatAggregate(call("top3", 'int) as ('x, 'y))
       .select(
         'string,
         'x,
@@ -66,14 +65,13 @@ class GroupWindowTableAggregateStringExpressionTest extends TableTestBase {
     val util = streamTestUtil()
     val t = util.addTableSource[(Int, Long, Long, String)]('int, 'long, 'rowtime.rowtime, 'string)
 
-    val top3 = new Top3
-    util.addFunction("top3", top3)
+    util.addTemporarySystemFunction("top3", classOf[Top3])
 
     // Expression / Scala API
     val resScala = t
       .window(Tumble over 4.hours on 'rowtime as 'w)
       .groupBy('w, 'string)
-      .flatAggregate(top3('int) as ('x, 'y))
+      .flatAggregate(call("top3", 'int) as ('x, 'y))
       .select(
         'string,
         'x,
@@ -101,14 +99,13 @@ class GroupWindowTableAggregateStringExpressionTest extends TableTestBase {
     val util = streamTestUtil()
     val t = util.addTableSource[(Int, Long, String)]('int, 'long, 'string, 'rowtime.rowtime)
 
-    val top3 = new Top3
-    util.addFunction("top3", top3)
+    util.addTemporarySystemFunction("top3", classOf[Top3])
 
     // Expression / Scala API
     val resScala = t
       .window(Session withGap 4.hours on 'rowtime as 'w)
       .groupBy('w, 'string)
-      .flatAggregate(top3('int) as ('x, 'y))
+      .flatAggregate(call("top3", 'int) as ('x, 'y))
       .select(
         'string,
         'x,
@@ -135,14 +132,13 @@ class GroupWindowTableAggregateStringExpressionTest extends TableTestBase {
     val util = streamTestUtil()
     val t = util.addTableSource[(Int, Long, String)]('int, 'long, 'string, 'proctime.proctime)
 
-    val top3 = new Top3
-    util.addFunction("top3", top3)
+    util.addTemporarySystemFunction("top3", classOf[Top3])
 
     // Expression / Scala API
     val resScala = t
       .window(Slide over 4.hours every 2.hours on 'proctime as 'w)
       .groupBy('w)
-      .flatAggregate(top3('int) as ('x, 'y))
+      .flatAggregate(call("top3", 'int) as ('x, 'y))
       .select(
         'x,
         'y + 1,
@@ -168,14 +164,13 @@ class GroupWindowTableAggregateStringExpressionTest extends TableTestBase {
     val util = streamTestUtil()
     val t = util.addTableSource[(Int, Long, String)]('int, 'long,'string, 'proctime.proctime)
 
-    val top3 = new Top3
-    util.addFunction("top3", top3)
+    util.addTemporarySystemFunction("top3", classOf[Top3])
 
     // Expression / Scala API
     val resScala = t
       .window(Tumble over 4.hours on 'proctime as 'w)
       .groupBy('w)
-      .flatAggregate(top3('int) as ('x, 'y))
+      .flatAggregate(call("top3", 'int) as ('x, 'y))
       .select(
         'x,
         'y + 1,
@@ -201,14 +196,13 @@ class GroupWindowTableAggregateStringExpressionTest extends TableTestBase {
     val util = streamTestUtil()
     val t = util.addTableSource[(Int, Long, String)]('int, 'long, 'string, 'proctime.proctime)
 
-    val top3 = new Top3
-    util.addFunction("top3", top3)
+    util.addTemporarySystemFunction("top3", classOf[Top3])
 
     // Expression / Scala API
     val resScala = t
       .window(Session withGap 4.hours on 'proctime as 'w)
       .groupBy('w)
-      .flatAggregate(top3('int) as ('x, 'y))
+      .flatAggregate(call("top3", 'int) as ('x, 'y))
       .select(
         'x,
         'y + 1,
diff --git a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/stream/table/stringexpr/OverWindowStringExpressionTest.scala b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/stream/table/stringexpr/OverWindowStringExpressionTest.scala
index 2db10d5..f4af6ed 100644
--- a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/stream/table/stringexpr/OverWindowStringExpressionTest.scala
+++ b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/stream/table/stringexpr/OverWindowStringExpressionTest.scala
@@ -34,8 +34,7 @@ class OverWindowStringExpressionTest extends TableTestBase {
     val t = util.addDataStream[(Long, Int, String, Int, Long)](
       "T1", 'a, 'b, 'c, 'd, 'e, 'rowtime.rowtime)
 
-    val weightAvgFun = new WeightedAvg
-    util.addFunction("weightAvgFun", weightAvgFun)
+    util.addTemporarySystemFunction("weightAvgFun", classOf[WeightedAvg])
 
     val resScala = t
       .window(Over partitionBy 'a orderBy 'rowtime preceding UNBOUNDED_ROW as 'w)
@@ -53,8 +52,7 @@ class OverWindowStringExpressionTest extends TableTestBase {
     val t = util.addDataStream[(Long, Int, String, Int, Long)](
       "T1", 'a, 'b, 'c, 'd, 'e, 'rowtime.rowtime)
 
-    val weightAvgFun = new WeightedAvg
-    util.addFunction("weightAvgFun", weightAvgFun)
+    util.addTemporarySystemFunction("weightAvgFun", classOf[WeightedAvg])
 
     val resScala = t
       .window(Over orderBy 'rowtime preceding UNBOUNDED_ROW following CURRENT_ROW as 'w)
@@ -72,8 +70,7 @@ class OverWindowStringExpressionTest extends TableTestBase {
     val t = util.addDataStream[(Long, Int, String, Int, Long)](
       "T1", 'a, 'b, 'c, 'd, 'e, 'rowtime.rowtime)
 
-    val weightAvgFun = new WeightedAvg
-    util.addFunction("weightAvgFun", weightAvgFun)
+    util.addTemporarySystemFunction("weightAvgFun", classOf[WeightedAvg])
 
     val resScala = t
       .window(Over partitionBy('a, 'd) orderBy 'rowtime preceding 10.rows as 'w)
@@ -91,8 +88,7 @@ class OverWindowStringExpressionTest extends TableTestBase {
     val t = util.addDataStream[(Long, Int, String, Int, Long)](
       "T1", 'a, 'b, 'c, 'd, 'e, 'rowtime.rowtime)
 
-    val weightAvgFun = new WeightedAvg
-    util.addFunction("weightAvgFun", weightAvgFun)
+    util.addTemporarySystemFunction("weightAvgFun", classOf[WeightedAvg])
 
     val resScala = t
       .window(Over orderBy 'rowtime preceding 10.rows following CURRENT_ROW as 'w)
@@ -110,8 +106,7 @@ class OverWindowStringExpressionTest extends TableTestBase {
     val t = util.addDataStream[(Long, Int, String, Int, Long)](
       "T1",'a, 'b, 'c, 'd, 'e, 'rowtime.rowtime)
 
-    val weightAvgFun = new WeightedAvg
-    util.addFunction("weightAvgFun", weightAvgFun)
+    util.addTemporarySystemFunction("weightAvgFun", classOf[WeightedAvg])
 
     val resScala = t
       .window(Over partitionBy 'a orderBy 'rowtime preceding UNBOUNDED_RANGE as 'w)
@@ -129,8 +124,7 @@ class OverWindowStringExpressionTest extends TableTestBase {
     val t = util.addDataStream[(Long, Int, String, Int, Long)](
       "T1", 'a, 'b, 'c, 'd, 'e, 'rowtime.rowtime)
 
-    val weightAvgFun = new WeightedAvg
-    util.addFunction("weightAvgFun", weightAvgFun)
+    util.addTemporarySystemFunction("weightAvgFun", classOf[WeightedAvg])
 
     val resScala = t
       .window(Over orderBy 'rowtime preceding UNBOUNDED_RANGE following CURRENT_RANGE as 'w)
@@ -154,8 +148,7 @@ class OverWindowStringExpressionTest extends TableTestBase {
     val t = util.addDataStream[(Long, Int, String, Int, Long)](
       "T1", 'a, 'b, 'c, 'd, 'e, 'proctime.proctime)
 
-    val weightAvgFun = new WeightedAvg
-    util.addFunction("weightAvgFun", weightAvgFun)
+    util.addTemporarySystemFunction("weightAvgFun", classOf[WeightedAvg])
 
     val resScala = t
       .window(Over orderBy 'proctime preceding UNBOUNDED_RANGE following CURRENT_RANGE as 'w)
@@ -179,8 +172,7 @@ class OverWindowStringExpressionTest extends TableTestBase {
     val t = util.addDataStream[(Long, Int, String, Int, Long)](
       "T1", 'a, 'b, 'c, 'd, 'e, 'rowtime.rowtime)
 
-    val weightAvgFun = new WeightedAvg
-    util.addFunction("weightAvgFun", weightAvgFun)
+    util.addTemporarySystemFunction("weightAvgFun", classOf[WeightedAvg])
 
     val resScala = t
       .window(Over partitionBy('a, 'c) orderBy 'rowtime preceding 10.minutes as 'w)
@@ -198,8 +190,7 @@ class OverWindowStringExpressionTest extends TableTestBase {
     val t = util.addDataStream[(Long, Int, String, Int, Long)](
       "T1", 'a, 'b, 'c, 'd, 'e, 'rowtime.rowtime)
 
-    val weightAvgFun = new WeightedAvg
-    util.addFunction("weightAvgFun", weightAvgFun)
+    util.addTemporarySystemFunction("weightAvgFun", classOf[WeightedAvg])
 
     val resScala = t
       .window(Over orderBy 'rowtime preceding 4.hours following CURRENT_RANGE as 'w)
@@ -217,10 +208,9 @@ class OverWindowStringExpressionTest extends TableTestBase {
     val t = util.addDataStream[(Long, Int, String, Int, Long)](
       "T1", 'a, 'b, 'c, 'd, 'e, 'rowtime.rowtime)
 
-    val weightedAvg = new WeightedAvgWithRetract
     val plusOne = Func1
     util.addFunction("plusOne", plusOne)
-    util.addFunction("weightedAvg", weightedAvg)
+    util.addTemporarySystemFunction("weightedAvg", classOf[WeightedAvg])
 
     val resScala = t
       .window(Over partitionBy 'a orderBy 'rowtime preceding UNBOUNDED_ROW as 'w)
diff --git a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/stream/table/stringexpr/TableAggregateStringExpressionTest.scala b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/stream/table/stringexpr/TableAggregateStringExpressionTest.scala
index d32f582..ddee770 100644
--- a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/stream/table/stringexpr/TableAggregateStringExpressionTest.scala
+++ b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/stream/table/stringexpr/TableAggregateStringExpressionTest.scala
@@ -78,13 +78,12 @@ class TableAggregateStringExpressionTest extends TableTestBase {
     val util = streamTestUtil()
     val t = util.addTableSource[(Int, Long, String)]('a, 'b, 'c)
 
-    val top3 = new EmptyTableAggFunc
-    util.addFunction("top3", top3)
+    util.addTemporarySystemFunction("top3", classOf[EmptyTableAggFunc])
     util.addFunction("Func0", Func0)
 
     // Expression / Scala API
     val resScala = t
-      .flatAggregate(top3('a) as ('d, 'e))
+      .flatAggregate(call("top3", 'a) as ('d, 'e))
       .select('*)
 
     // String / Java API
@@ -100,14 +99,13 @@ class TableAggregateStringExpressionTest extends TableTestBase {
     val util = streamTestUtil()
     val t = util.addTableSource[(Int, Long, String)]('a, 'b, 'c)
 
-    val top3 = new EmptyTableAggFunc
-    util.addFunction("top3", top3)
+    util.addTemporarySystemFunction("top3", classOf[EmptyTableAggFunc])
     util.addFunction("Func0", Func0)
 
     // Expression / Scala API
     val resScala = t
       .groupBy('b)
-      .flatAggregate(top3('a) as ('d, 'e))
+      .flatAggregate(call("top3", 'a) as ('d, 'e))
       .select('*)
 
     // String / Java API
diff --git a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/stream/table/validation/GroupWindowTableAggregateValidationTest.scala b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/stream/table/validation/GroupWindowTableAggregateValidationTest.scala
index c7c5a3b..3bd6a24 100644
--- a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/stream/table/validation/GroupWindowTableAggregateValidationTest.scala
+++ b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/stream/table/validation/GroupWindowTableAggregateValidationTest.scala
@@ -37,13 +37,12 @@ class GroupWindowTableAggregateValidationTest extends TableTestBase {
   @Test
   def testTumbleUdAggWithInvalidArgs(): Unit = {
     expectedException.expect(classOf[ValidationException])
-    expectedException.expectMessage("Given parameters do not match any signature. \n" +
-      "Actual: (java.lang.Long) \nExpected: (int)")
+    expectedException.expectMessage("Invalid function call:\nTop3(BIGINT)")
 
     table
       .window(Slide over 2.hours every 30.minutes on 'rowtime as 'w)
       .groupBy('string, 'w)
-      .flatAggregate(top3('long)) // invalid args
+      .flatAggregate(call(top3, 'long)) // invalid args
       .select('string, 'f0)
   }
 
diff --git a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/stream/table/validation/GroupWindowValidationTest.scala b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/stream/table/validation/GroupWindowValidationTest.scala
index 2a84a1a..31280eb 100644
--- a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/stream/table/validation/GroupWindowValidationTest.scala
+++ b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/stream/table/validation/GroupWindowValidationTest.scala
@@ -109,9 +109,7 @@ class GroupWindowValidationTest extends TableTestBase {
   @Test
   def testTumbleUdAggWithInvalidArgs(): Unit = {
     expectedException.expect(classOf[ValidationException])
-    expectedException.expectMessage("Given parameters do not match any signature. \n" +
-      "Actual: (java.lang.String, java.lang.Integer) \nExpected: (int, int), (long, int), " +
-      "(long, int, int, java.lang.String)")
+    expectedException.expectMessage("Invalid function call:\nmyWeightedAvg(STRING, INT)")
 
     val util = streamTestUtil()
     val weightedAvg = new WeightedAvgWithMerge
@@ -121,7 +119,7 @@ class GroupWindowValidationTest extends TableTestBase {
     table
       .window(Tumble over 2.hours on 'rowtime as 'w)
       .groupBy('w, 'string)
-      .select('string, weightedAvg('string, 'int)) // invalid UDAGG args
+      .select('string, call(weightedAvg, 'string, 'int)) // invalid UDAGG args
   }
 
   @Test
@@ -176,9 +174,7 @@ class GroupWindowValidationTest extends TableTestBase {
   @Test
   def testSlideUdAggWithInvalidArgs(): Unit = {
     expectedException.expect(classOf[ValidationException])
-    expectedException.expectMessage("Given parameters do not match any signature. \n" +
-      "Actual: (java.lang.String, java.lang.Integer) \nExpected: (int, int), (long, int), " +
-      "(long, int, int, java.lang.String)")
+    expectedException.expectMessage("Invalid function call:\nmyWeightedAvg(STRING, INT)")
 
     val util = streamTestUtil()
     val weightedAvg = new WeightedAvgWithMerge
@@ -188,7 +184,7 @@ class GroupWindowValidationTest extends TableTestBase {
     table
       .window(Slide over 2.hours every 30.minutes on 'rowtime as 'w)
       .groupBy('w, 'string)
-      .select('string, weightedAvg('string, 'int)) // invalid UDAGG args
+      .select('string, call(weightedAvg, 'string, 'int)) // invalid UDAGG args
   }
 
   @Test
@@ -260,9 +256,7 @@ class GroupWindowValidationTest extends TableTestBase {
   @Test
   def testSessionUdAggWithInvalidArgs(): Unit = {
     expectedException.expect(classOf[ValidationException])
-    expectedException.expectMessage("Given parameters do not match any signature. \n" +
-      "Actual: (java.lang.String, java.lang.Integer) \nExpected: (int, int), (long, int), " +
-      "(long, int, int, java.lang.String)")
+    expectedException.expectMessage("Invalid function call:\nmyWeightedAvg(STRING, INT)")
 
     val util = streamTestUtil()
     val weightedAvg = new WeightedAvgWithMerge
@@ -272,7 +266,7 @@ class GroupWindowValidationTest extends TableTestBase {
     table
       .window(Session withGap 2.hours on 'rowtime as 'w)
       .groupBy('w, 'string)
-      .select('string, weightedAvg('string, 'int)) // invalid UDAGG args
+      .select('string, call(weightedAvg, 'string, 'int)) // invalid UDAGG args
   }
 
   @Test
diff --git a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/stream/table/validation/TableAggregateValidationTest.scala b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/stream/table/validation/TableAggregateValidationTest.scala
index ee29eed..3fad0e37 100644
--- a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/stream/table/validation/TableAggregateValidationTest.scala
+++ b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/stream/table/validation/TableAggregateValidationTest.scala
@@ -30,10 +30,8 @@ class TableAggregateValidationTest extends TableTestBase {
   @Test
   def testInvalidParameterNumber(): Unit = {
     expectedException.expect(classOf[ValidationException])
-    expectedException.expectMessage("Given parameters do not match any signature. \n" +
-      "Actual: (java.lang.Long, java.lang.Integer, java.lang.String) \n" +
-      "Expected: (int), (java.sql.Timestamp, java.sql.Timestamp), " +
-      "(long, int), (long, java.sql.Timestamp)")
+    expectedException.expectMessage(
+      "Invalid function call:\nEmptyTableAggFunc(BIGINT, INT, STRING)")
 
     val util = streamTestUtil()
     val table = util.addTableSource[(Long, Int, String)]('a, 'b, 'c)
@@ -42,17 +40,15 @@ class TableAggregateValidationTest extends TableTestBase {
     table
       .groupBy('c)
       // must fail. func does not take 3 parameters
-      .flatAggregate(func('a, 'b, 'c))
+      .flatAggregate(call(func, 'a, 'b, 'c))
       .select('_1, '_2, '_3)
   }
 
   @Test
   def testInvalidParameterType(): Unit = {
     expectedException.expect(classOf[ValidationException])
-    expectedException.expectMessage("Given parameters do not match any signature. \n" +
-      "Actual: (java.lang.Long, java.lang.String) \n" +
-      "Expected: (int), (java.sql.Timestamp, java.sql.Timestamp), " +
-      "(long, int), (long, java.sql.Timestamp)")
+    expectedException.expectMessage(
+      "Invalid function call:\nEmptyTableAggFunc(BIGINT, STRING)")
 
     val util = streamTestUtil()
     val table = util.addTableSource[(Long, Int, String)]('a, 'b, 'c)
@@ -61,7 +57,7 @@ class TableAggregateValidationTest extends TableTestBase {
     table
       .groupBy('c)
       // must fail. func take 2 parameters of type Long and Timestamp or Long Int
-      .flatAggregate(func('a, 'c))
+      .flatAggregate(call(func, 'a, 'c))
       .select('_1, '_2, '_3)
   }
 
@@ -76,7 +72,7 @@ class TableAggregateValidationTest extends TableTestBase {
     val func = new EmptyTableAggFunc
     table
       .groupBy('b)
-      .flatAggregate(func('a, 'b) as ('x, 'y))
+      .flatAggregate(call(func, 'a, 'b) as ('x, 'y))
       .select('x.start, 'y)
   }
 
@@ -92,7 +88,7 @@ class TableAggregateValidationTest extends TableTestBase {
     val func = new EmptyTableAggFunc
     table
       .groupBy('b)
-      .flatAggregate(func('a, 'b) as ('x, 'y))
+      .flatAggregate(call(func, 'a, 'b) as ('x, 'y))
       .select('x.count)
   }
 
@@ -117,8 +113,7 @@ class TableAggregateValidationTest extends TableTestBase {
   def testInvalidAliasWithWrongNumber(): Unit = {
     expectedException.expect(classOf[ValidationException])
     expectedException.expectMessage("List of column aliases must have same degree as " +
-      "table; the returned table of function " +
-      "'org.apache.flink.table.planner.utils.EmptyTableAggFunc' has 2 columns, " +
+      "table; the returned table of function 'EmptyTableAggFunc' has 2 columns, " +
       "whereas alias list has 3 columns")
 
     val util = streamTestUtil()
@@ -128,7 +123,7 @@ class TableAggregateValidationTest extends TableTestBase {
     table
       .groupBy('b)
       // must fail. alias with wrong number of fields
-      .flatAggregate(func('a, 'b) as ('a, 'b, 'c))
+      .flatAggregate(call(func, 'a, 'b) as ('a, 'b, 'c))
       .select('*)
   }
 
@@ -144,7 +139,7 @@ class TableAggregateValidationTest extends TableTestBase {
     table
       .groupBy('b)
       // must fail. alias with name conflict
-      .flatAggregate(func('a, 'b) as ('a, 'b))
+      .flatAggregate(call(func, 'a, 'b) as ('a, 'b))
       .select('*)
   }
 }
diff --git a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/batch/sql/agg/SortAggITCase.scala b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/batch/sql/agg/SortAggITCase.scala
index 9ac94e0..7fd419d 100644
--- a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/batch/sql/agg/SortAggITCase.scala
+++ b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/batch/sql/agg/SortAggITCase.scala
@@ -24,6 +24,7 @@ import org.apache.flink.api.scala._
 import org.apache.flink.table.api.Types
 import org.apache.flink.table.api.config.ExecutionConfigOptions.{TABLE_EXEC_DISABLED_OPERATORS, TABLE_EXEC_RESOURCE_DEFAULT_PARALLELISM}
 import org.apache.flink.table.functions.AggregateFunction
+import org.apache.flink.table.planner.{JInt, JLong}
 import org.apache.flink.table.planner.plan.utils.JavaUserDefinedAggFunctions.WeightedAvgWithMergeAndReset
 import org.apache.flink.table.planner.runtime.utils.BatchTestBase.row
 import org.apache.flink.table.planner.runtime.utils.UserDefinedFunctionTestUtils.{MyPojo, MyToPojoFunc}
@@ -49,16 +50,16 @@ class SortAggITCase
 
     registerFunction("countFun", new CountAggFunction())
     registerFunction("intSumFun", new IntSumAggFunction())
-    registerFunction("weightedAvg", new WeightedAvgWithMergeAndReset())
+    registerTemporarySystemFunction("weightedAvg", classOf[WeightedAvgWithMergeAndReset])
 
     registerFunction("myPrimitiveArrayUdaf", new MyPrimitiveArrayUdaf())
     registerFunction("myObjectArrayUdaf", new MyObjectArrayUdaf())
     registerFunction("myNestedLongArrayUdaf", new MyNestedLongArrayUdaf())
-    registerFunction("myNestedStringArrayUdaf", new MyNestedStringArrayUdaf())
+    registerTemporarySystemFunction("myNestedStringArrayUdaf", classOf[MyNestedStringArrayUdaf])
 
     registerFunction("myPrimitiveMapUdaf", new MyPrimitiveMapUdaf())
     registerFunction("myObjectMapUdaf", new MyObjectMapUdaf())
-    registerFunction("myNestedMapUdaf", new MyNestedMapUdf())
+    registerTemporarySystemFunction("myNestedMapUdaf", classOf[MyNestedMapUdf])
   }
 
   @Test
@@ -332,7 +333,7 @@ class MyPojoAggFunction extends AggregateFunction[MyPojo, CountAccumulator] {
   }
 
   override def getValue(acc: CountAccumulator): MyPojo = {
-    new MyPojo(acc.f0.asInstanceOf[Int], acc.f0.asInstanceOf[Int])
+    new MyPojo(acc.f0.toInt, acc.f0.toInt)
   }
 
   def merge(acc: CountAccumulator, its: JIterable[CountAccumulator]): Unit = {
@@ -357,21 +358,21 @@ class MyPojoAggFunction extends AggregateFunction[MyPojo, CountAccumulator] {
   override def getResultType: TypeInformation[MyPojo] = MyToPojoFunc.getResultType(null)
 }
 
-class VarArgsAggFunction extends AggregateFunction[Long, CountAccumulator] {
+class VarArgsAggFunction extends AggregateFunction[JLong, CountAccumulator] {
 
   @varargs
-  def accumulate(acc: CountAccumulator, value: Long, args: String*): Unit = {
+  def accumulate(acc: CountAccumulator, value: JLong, args: String*): Unit = {
     acc.f0 += value
     args.foreach(s => acc.f0 += s.toLong)
   }
 
   @varargs
-  def retract(acc: CountAccumulator, value: Long, args: String*): Unit = {
+  def retract(acc: CountAccumulator, value: JLong, args: String*): Unit = {
     acc.f0 -= value
     args.foreach(s => acc.f0 -= s.toLong)
   }
 
-  override def getValue(acc: CountAccumulator): Long = {
+  override def getValue(acc: CountAccumulator): JLong = {
     acc.f0
   }
 
@@ -389,10 +390,6 @@ class VarArgsAggFunction extends AggregateFunction[Long, CountAccumulator] {
   def resetAccumulator(acc: CountAccumulator): Unit = {
     acc.f0 = 0L
   }
-
-  override def getAccumulatorType: TypeInformation[CountAccumulator] = {
-    new TupleTypeInfo[CountAccumulator](classOf[CountAccumulator], Types.LONG)
-  }
 }
 
 class SimplePrimitiveArrayUdaf extends AggregateFunction[lang.Long, Array[Long]] {
@@ -485,12 +482,6 @@ class MyNestedStringArrayUdaf extends AggregateFunction[
     accumulator(1)(0) = accumulator(1)(0) + c.charAt(c.length - 1)
     accumulator(1)(1) = accumulator(1)(1) + c.charAt(0)
   }
-
-  override def getAccumulatorType =
-    ObjectArrayTypeInfo.getInfoFor(BasicArrayTypeInfo.STRING_ARRAY_TYPE_INFO)
-
-  override def getResultType =
-    getAccumulatorType
 }
 
 class MyPrimitiveMapUdaf extends AggregateFunction[
@@ -538,28 +529,22 @@ class MyObjectMapUdaf extends AggregateFunction[
 }
 
 class MyNestedMapUdf extends AggregateFunction[
-    java.util.Map[Long, java.util.Map[String, Int]],
-    java.util.Map[Long, java.util.Map[String, Int]]] {
+    java.util.Map[JLong, java.util.Map[String, JInt]],
+    java.util.Map[JLong, java.util.Map[String, JInt]]] {
 
-  override def createAccumulator(): java.util.Map[Long, java.util.Map[String, Int]] =
-    new java.util.HashMap[Long, java.util.Map[String, Int]]()
+  override def createAccumulator(): java.util.Map[JLong, java.util.Map[String, JInt]] =
+    new java.util.HashMap[JLong, java.util.Map[String, JInt]]()
 
-  override def getValue(accumulator: java.util.Map[Long, java.util.Map[String, Int]])
-  : java.util.Map[Long, java.util.Map[String, Int]] =
+  override def getValue(accumulator: java.util.Map[JLong, java.util.Map[String, JInt]])
+  : java.util.Map[JLong, java.util.Map[String, JInt]] =
     accumulator
 
   def accumulate(
-      accumulator: java.util.Map[Long, java.util.Map[String, Int]],
-      a: Int, b: Long, c: String): Unit = {
+      accumulator: java.util.Map[JLong, java.util.Map[String, JInt]],
+      a: JInt, b: JLong, c: String): Unit = {
     val key = c.substring(0, 2)
-    accumulator.putIfAbsent(b, new java.util.HashMap[String, Int]())
+    accumulator.putIfAbsent(b, new java.util.HashMap[String, JInt]())
     accumulator.get(b).putIfAbsent(key, 0)
     accumulator.get(b).put(key, accumulator.get(b).get(key) + a)
   }
-
-  override def getAccumulatorType =
-    new MapTypeInfo(Types.LONG, new MapTypeInfo(Types.STRING, Types.INT))
-        .asInstanceOf[TypeInformation[java.util.Map[Long, java.util.Map[String, Int]]]]
-
-  override def getResultType = getAccumulatorType
 }
diff --git a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/batch/table/AggregationITCase.scala b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/batch/table/AggregationITCase.scala
index ca16518..a89cd73 100644
--- a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/batch/table/AggregationITCase.scala
+++ b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/batch/table/AggregationITCase.scala
@@ -184,7 +184,13 @@ class AggregationITCase extends BatchTestBase {
 
     val t = CollectionBatchExecTable.get3TupleDataSet(tEnv, "a, b, c")
       .groupBy('b)
-      .select('b, 'a.sum, countFun('c), wAvgFun('b, 'a), wAvgFun('a, 'a), countDistinct('c))
+      .select(
+        'b,
+        'a.sum,
+        countFun('c),
+        call(wAvgFun, 'b, 'a),
+        call(wAvgFun, 'a, 'a),
+        countDistinct('c))
 
     val expected = "1,1,1,1,1,1\n" + "2,5,2,2,2,2\n" + "3,15,3,3,5,3\n" + "4,34,4,4,8,4\n" +
       "5,65,5,5,13,5\n" + "6,111,6,6,18,6\n"
diff --git a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/stream/sql/AggregateITCase.scala b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/stream/sql/AggregateITCase.scala
index c920bdc..b93e611 100644
--- a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/stream/sql/AggregateITCase.scala
+++ b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/stream/sql/AggregateITCase.scala
@@ -23,13 +23,13 @@ import org.apache.flink.api.java.typeutils.RowTypeInfo
 import org.apache.flink.api.scala._
 import org.apache.flink.streaming.api.TimeCharacteristic
 import org.apache.flink.streaming.api.scala.DataStream
-import org.apache.flink.table.api.{Types, _}
 import org.apache.flink.table.api.bridge.scala._
 import org.apache.flink.table.api.internal.TableEnvironmentInternal
-import org.apache.flink.table.planner.factories.TestValuesTableFactory
+import org.apache.flink.table.api.{Types, _}
 import org.apache.flink.table.planner.functions.aggfunctions.{ListAggWithRetractAggFunction, ListAggWsWithRetractAggFunction}
 import org.apache.flink.table.planner.plan.utils.JavaUserDefinedAggFunctions.VarSumAggFunction
 import org.apache.flink.table.planner.runtime.batch.sql.agg.{MyPojoAggFunction, VarArgsAggFunction}
+import org.apache.flink.table.planner.runtime.utils.JavaUserDefinedAggFunctions.OverloadedMaxFunction
 import org.apache.flink.table.planner.runtime.utils.StreamingWithAggTestBase.AggMode
 import org.apache.flink.table.planner.runtime.utils.StreamingWithMiniBatchTestBase.MiniBatchMode
 import org.apache.flink.table.planner.runtime.utils.StreamingWithStateTestBase.StateBackendMode
@@ -384,7 +384,7 @@ class AggregateITCase(
 
     val t = failingDataSource(data).toTable(tEnv, 'a, 'b, 'c)
     tEnv.registerTable("T", t)
-    tEnv.registerFunction("CntNullNonNull", new CountNullNonNull)
+    tEnv.createTemporarySystemFunction("CntNullNonNull", new CountNullNonNull)
     val t1 = tEnv.sqlQuery(
       "SELECT b, count(*), CntNullNonNull(DISTINCT c)  FROM T GROUP BY b")
 
@@ -829,7 +829,7 @@ class AggregateITCase(
 
     val t = failingDataSource(data).toTable(tEnv, 'id, 's, 's1, 's2)
     tEnv.registerTable("MyTable", t)
-    tEnv.registerFunction("func", new VarArgsAggFunction)
+    tEnv.createTemporarySystemFunction("func", classOf[VarArgsAggFunction])
 
     val sql = "SELECT func(s, s1, s2) FROM MyTable"
     val sink = new TestingRetractSink
@@ -849,7 +849,7 @@ class AggregateITCase(
 
     val t = failingDataSource(data).toTable(tEnv, 'id, 's, 's1, 's2)
     tEnv.registerTable("MyTable", t)
-    tEnv.registerFunction("func", new VarArgsAggFunction)
+    tEnv.createTemporarySystemFunction("func", classOf[VarArgsAggFunction])
 
     val sink = new TestingRetractSink
     tEnv
@@ -1110,7 +1110,7 @@ class AggregateITCase(
 
   @Test
   def testLongVarargsAgg(): Unit = {
-    tEnv.registerFunction("var_sum", new VarSumAggFunction)
+    tEnv.createTemporarySystemFunction("var_sum", classOf[VarSumAggFunction])
     val sqlQuery = s"SELECT a, " +
       s"var_sum(${0.until(260).map(_ => "b").mkString(",")}) from MyTable group by a"
     val data = Seq[(Int, Int)]((1, 1), (2,2))
@@ -1283,4 +1283,44 @@ class AggregateITCase(
       assertEquals(expected, result)
     )
   }
+
+  @Test
+  def testOverloadedAccumulator(): Unit = {
+    val data = new mutable.MutableList[(String, Long)]
+    data .+= (("x", 1L))
+    data .+= (("x", 2L))
+    data .+= (("x", 3L))
+    data .+= (("y", 1L))
+    data .+= (("y", 2L))
+    data .+= (("z", 3L))
+
+    val t = failingDataSource(data).toTable(tEnv, 'a, 'b)
+    tEnv.createTemporaryView("T", t)
+    tEnv.createTemporarySystemFunction("OverloadedMaxFunction", classOf[OverloadedMaxFunction])
+
+    val sink1 = new TestingRetractSink
+    val sink2 = new TestingRetractSink
+
+    tEnv.sqlQuery("SELECT a, OverloadedMaxFunction(b) FROM T GROUP BY a")
+      .toRetractStream[Row]
+      .addSink(sink1)
+
+    tEnv.sqlQuery("SELECT b, OverloadedMaxFunction(a) FROM T GROUP BY b")
+      .toRetractStream[Row]
+      .addSink(sink2)
+
+    env.execute()
+
+    val expected1 = List(
+      "x,3",
+      "y,2",
+      "z,3")
+    assertEquals(expected1.sorted, sink1.getRetractResults.sorted)
+
+    val expected2 = List(
+      "1,y",
+      "2,y",
+      "3,z")
+    assertEquals(expected2.sorted, sink2.getRetractResults.sorted)
+  }
 }
diff --git a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/stream/sql/MatchRecognizeITCase.scala b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/stream/sql/MatchRecognizeITCase.scala
index 787f062..d2e2e00 100644
--- a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/stream/sql/MatchRecognizeITCase.scala
+++ b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/stream/sql/MatchRecognizeITCase.scala
@@ -597,7 +597,7 @@ class MatchRecognizeITCase(backend: StateBackendMode) extends StreamingWithState
     val t = env.fromCollection(data)
       .toTable(tEnv, 'id, 'name, 'price, 'rate, 'weight, 'proctime.proctime)
     tEnv.registerTable("MyTable", t)
-    tEnv.registerFunction("weightedAvg", new WeightedAvg)
+    tEnv.createTemporarySystemFunction("weightedAvg", classOf[WeightedAvg])
 
     val sqlQuery =
       s"""
diff --git a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/stream/sql/WindowAggregateITCase.scala b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/stream/sql/WindowAggregateITCase.scala
index 053bad0..8ab9ff5 100644
--- a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/stream/sql/WindowAggregateITCase.scala
+++ b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/stream/sql/WindowAggregateITCase.scala
@@ -195,7 +195,7 @@ class WindowAggregateITCase(mode: StateBackendMode)
       .assignTimestampsAndWatermarks(new TimestampAndWatermarkWithOffset[(Long, Int, String)](0L))
     val table = stream.toTable(tEnv, 'long, 'int, 'string, 'rowtime.rowtime)
     tEnv.registerTable("T1", table)
-    tEnv.registerFunction("weightAvgFun", new WeightedAvg)
+    tEnv.createTemporarySystemFunction("weightAvgFun", classOf[WeightedAvg])
 
     val sql =
       """
diff --git a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/stream/table/GroupWindowTableAggregateITCase.scala b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/stream/table/GroupWindowTableAggregateITCase.scala
index 85f1818..1df81f1 100644
--- a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/stream/table/GroupWindowTableAggregateITCase.scala
+++ b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/stream/table/GroupWindowTableAggregateITCase.scala
@@ -68,7 +68,7 @@ class GroupWindowTableAggregateITCase(mode: StateBackendMode)
     val windowedTable = table
       .window(Slide over 4.rows every 2.rows on 'proctime as 'w)
       .groupBy('w, 'long)
-      .flatAggregate(top3('int) as ('x, 'y))
+      .flatAggregate(call(top3, 'int) as ('x, 'y))
       .select('long, 'x, 'y)
 
     val sink = new TestingAppendSink
diff --git a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/stream/table/OverWindowITCase.scala b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/stream/table/OverWindowITCase.scala
index 3528b30..8ed3692 100644
--- a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/stream/table/OverWindowITCase.scala
+++ b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/stream/table/OverWindowITCase.scala
@@ -74,7 +74,7 @@ class OverWindowITCase(mode: StateBackendMode) extends StreamingWithStateTestBas
       Over partitionBy 'c orderBy 'proctime preceding UNBOUNDED_ROW as 'w)
       .select('c,
         countFun('b) over 'w as 'mycount,
-        weightAvgFun('a, 'b) over 'w as 'wAvg,
+        call(weightAvgFun, 'a, 'b) over 'w as 'wAvg,
         countDist('a) over 'w as 'countDist)
       .select('c, 'mycount, 'wAvg, 'countDist)
 
@@ -167,7 +167,7 @@ class OverWindowITCase(mode: StateBackendMode) extends StreamingWithStateTestBas
         'b.max over 'w,
         'b.min over 'w,
         ('b.min over 'w).abs(),
-        weightAvgFun('b, 'a) over 'w,
+        call(weightAvgFun, 'b, 'a) over 'w,
         countDist('c) over 'w as 'countDist)
 
     val sink = new TestingAppendSink
diff --git a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/stream/table/TableAggregateITCase.scala b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/stream/table/TableAggregateITCase.scala
index f9805f3..664690d 100644
--- a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/stream/table/TableAggregateITCase.scala
+++ b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/stream/table/TableAggregateITCase.scala
@@ -22,6 +22,7 @@ import org.apache.flink.api.common.time.Time
 import org.apache.flink.api.scala._
 import org.apache.flink.table.api._
 import org.apache.flink.table.api.bridge.scala._
+import org.apache.flink.table.planner.runtime.utils.JavaUserDefinedAggFunctions.OverloadedDoubleMaxFunction
 import org.apache.flink.table.planner.runtime.utils.StreamingWithStateTestBase.StateBackendMode
 import org.apache.flink.table.planner.runtime.utils.TestData.tupleData3
 import org.apache.flink.table.planner.runtime.utils.{StreamingWithStateTestBase, TestingRetractSink}
@@ -170,7 +171,7 @@ class TableAggregateITCase(mode: StateBackendMode) extends StreamingWithStateTes
     val resultTable = source
       .groupBy('b)
       .select('b, 'a.sum as 'a)
-      .flatAggregate(top3('a) as ('v1, 'v2))
+      .flatAggregate(call(top3, 'a) as ('v1, 'v2))
       .select('v1, 'v2)
 
     val sink = new TestingRetractSink()
@@ -187,11 +188,10 @@ class TableAggregateITCase(mode: StateBackendMode) extends StreamingWithStateTes
 
   @Test
   def testInternalAccumulatorType(): Unit = {
-    val tableAggSum = new TableAggSum
     val source = failingDataSource(tupleData3).toTable(tEnv, 'a, 'b, 'c)
     val resultTable = source
       .groupBy('b)
-      .flatAggregate(tableAggSum('a) as 'sum)
+      .flatAggregate(call(classOf[TableAggSum], 'a) as 'sum)
       .select('b, 'sum)
 
     val sink = new TestingRetractSink()
@@ -209,7 +209,7 @@ class TableAggregateITCase(mode: StateBackendMode) extends StreamingWithStateTes
     expectedException.expectMessage(
       s"Could not find an implementation method 'retract' in class '${classOf[Top3]}' for " +
       s"function 'Top3' that matches the following signature:\n" +
-      s"void retract(${classOf[Top3Accum].getName}, int)")
+      s"void retract(${classOf[Top3Accum].getName}, java.lang.Integer)")
 
     val top3 = new Top3
     val source = env.fromCollection(tupleData3).toTable(tEnv, 'a, 'b, 'c)
@@ -222,4 +222,48 @@ class TableAggregateITCase(mode: StateBackendMode) extends StreamingWithStateTes
 
     env.execute()
   }
+
+  @Test
+  def testOverloadedAccumulator(): Unit = {
+    val source = failingDataSource(tupleData3).toTable(tEnv, 'a, 'b, 'c)
+
+    val sink1 = new TestingRetractSink()
+    source
+      .groupBy('b)
+      .flatAggregate(call(classOf[OverloadedDoubleMaxFunction], 'a) as 'max)
+      .select('b, 'max)
+      .toRetractStream[Row]
+      .addSink(sink1).setParallelism(1)
+
+    val sink2 = new TestingRetractSink()
+    source
+      .select('a.cast(DataTypes.STRING()) + "str" as 'a, 'b)
+      .groupBy('b)
+      .flatAggregate(call(classOf[OverloadedDoubleMaxFunction], 'a) as 'max)
+      .select('b, 'max)
+      .toRetractStream[Row]
+      .addSink(sink2).setParallelism(1)
+
+    env.execute()
+
+    val expected1 = List(
+      "1,1", "1,1",
+      "2,3", "2,3",
+      "3,6", "3,6",
+      "4,10", "4,10",
+      "5,15", "5,15",
+      "6,21", "6,21"
+    )
+    assertEquals(expected1.sorted, sink1.getRetractResults.sorted)
+
+    val expected2 = List(
+      "1,1str", "1,1str",
+      "2,3str", "2,3str",
+      "3,6str", "3,6str",
+      "4,9str", "4,9str",
+      "5,15str", "5,15str",
+      "6,21str", "6,21str"
+    )
+    assertEquals(expected2.sorted, sink2.getRetractResults.sorted)
+  }
 }
diff --git a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/utils/BatchTestBase.scala b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/utils/BatchTestBase.scala
index 8b3be2c..52f951e 100644
--- a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/utils/BatchTestBase.scala
+++ b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/utils/BatchTestBase.scala
@@ -30,7 +30,7 @@ import org.apache.flink.table.api.internal.TableEnvironmentImpl
 import org.apache.flink.table.data.RowData
 import org.apache.flink.table.data.binary.BinaryRowData
 import org.apache.flink.table.data.writer.BinaryRowWriter
-import org.apache.flink.table.functions.{AggregateFunction, ScalarFunction, TableFunction}
+import org.apache.flink.table.functions.{AggregateFunction, ScalarFunction, TableFunction, UserDefinedFunction}
 import org.apache.flink.table.planner.delegation.PlannerBase
 import org.apache.flink.table.planner.factories.TestValuesTableFactory
 import org.apache.flink.table.planner.plan.stats.FlinkStatistic
@@ -391,16 +391,35 @@ class BatchTestBase extends BatchAbstractTestBase {
       tEnv, tableName, data, typeInfo, fields, fieldNullables, Some(statistic))
   }
 
+  def registerTemporarySystemFunction(
+      name: String,
+      functionClass: Class[_ <: UserDefinedFunction])
+    : Unit = {
+    testingTableEnv.createTemporarySystemFunction(name, functionClass)
+  }
+
+  /**
+   * @deprecated Use [[registerTemporarySystemFunction()]] for the new type inference.
+   */
+  @deprecated
   def registerFunction(name: String, function: ScalarFunction): Unit = {
     testingTableEnv.registerFunction(name, function)
   }
 
+  /**
+   * @deprecated Use [[registerTemporarySystemFunction()]] for the new type inference.
+   */
+  @deprecated
   def registerFunction[T: TypeInformation, ACC: TypeInformation](
       name: String,
       f: AggregateFunction[T, ACC]): Unit = {
     testingTableEnv.registerFunction(name, f)
   }
 
+  /**
+   * @deprecated Use [[registerTemporarySystemFunction()]] for the new type inference.
+   */
+  @deprecated
   def registerFunction[T: TypeInformation](name: String, tf: TableFunction[T]): Unit = {
     testingTableEnv.registerFunction(name, tf)
   }
diff --git a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/utils/UserDefinedFunctionTestUtils.scala b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/utils/UserDefinedFunctionTestUtils.scala
index c06b69b..fb7fe3e 100644
--- a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/utils/UserDefinedFunctionTestUtils.scala
+++ b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/runtime/utils/UserDefinedFunctionTestUtils.scala
@@ -27,6 +27,7 @@ import org.apache.flink.configuration.Configuration
 import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment
 import org.apache.flink.table.data.{RowData, StringData}
 import org.apache.flink.table.functions.{AggregateFunction, FunctionContext, ScalarFunction}
+import org.apache.flink.table.planner.JLong
 import org.apache.flink.types.Row
 
 import com.google.common.base.Charsets
@@ -139,13 +140,13 @@ object UserDefinedFunctionTestUtils {
     override def getValue(acc: Tuple1[Long]): Long = acc.f0
   }
 
-  class CountNullNonNull extends AggregateFunction[String, Tuple2[Long, Long]] {
+  class CountNullNonNull extends AggregateFunction[String, Tuple2[JLong, JLong]] {
 
-    override def createAccumulator(): Tuple2[Long, Long] = Tuple2.of(0L, 0L)
+    override def createAccumulator(): Tuple2[JLong, JLong] = Tuple2.of(0L, 0L)
 
-    override def getValue(acc: Tuple2[Long, Long]): String = s"${acc.f0}|${acc.f1}"
+    override def getValue(acc: Tuple2[JLong, JLong]): String = s"${acc.f0}|${acc.f1}"
 
-    def accumulate(acc: Tuple2[Long, Long], v: String): Unit = {
+    def accumulate(acc: Tuple2[JLong, JLong], v: String): Unit = {
       if (v == null) {
         acc.f1 += 1
       } else {
@@ -153,7 +154,7 @@ object UserDefinedFunctionTestUtils {
       }
     }
 
-    def retract(acc: Tuple2[Long, Long], v: String): Unit = {
+    def retract(acc: Tuple2[JLong, JLong], v: String): Unit = {
       if (v == null) {
         acc.f1 -= 1
       } else {
diff --git a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/utils/CountAggFunction.scala b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/utils/CountAggFunction.scala
index 6b08ebb..be95993 100644
--- a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/utils/CountAggFunction.scala
+++ b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/utils/CountAggFunction.scala
@@ -26,7 +26,7 @@ import org.apache.flink.table.functions.AggregateFunction
 import java.lang.{Iterable => JIterable, Long => JLong}
 
 /** The initial accumulator for count aggregate function */
-class CountAccumulator extends JTuple1[Long] {
+class CountAccumulator extends JTuple1[JLong] {
   f0 = 0L //count
 }
 
diff --git a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/utils/TableTestBase.scala b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/utils/TableTestBase.scala
index 9a8dd36..95033a8 100644
--- a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/utils/TableTestBase.scala
+++ b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/utils/TableTestBase.scala
@@ -558,22 +558,25 @@ abstract class TableTestUtil(
   }
 
   /**
-    * Registers a [[TableFunction]] under given name into the TableEnvironment's catalog.
-    */
+   * @deprecated Use [[addTemporarySystemFunction()]] for the new type inference.
+   */
+  @deprecated
   def addFunction[T: TypeInformation](
       name: String,
       function: TableFunction[T]): Unit = testingTableEnv.registerFunction(name, function)
 
   /**
-    * Registers a [[AggregateFunction]] under given name into the TableEnvironment's catalog.
-    */
+   * @deprecated Use [[addTemporarySystemFunction()]] for the new type inference.
+   */
+  @deprecated
   def addFunction[T: TypeInformation, ACC: TypeInformation](
       name: String,
       function: AggregateFunction[T, ACC]): Unit = testingTableEnv.registerFunction(name, function)
 
   /**
-    * Registers a [[TableAggregateFunction]] under given name into the TableEnvironment's catalog.
-    */
+   * @deprecated Use [[addTemporarySystemFunction()]] for the new type inference.
+   */
+  @deprecated
   def addFunction[T: TypeInformation, ACC: TypeInformation](
       name: String,
       function: TableAggregateFunction[T, ACC]): Unit = {
diff --git a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/utils/UserDefinedTableAggFunctions.scala b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/utils/UserDefinedTableAggFunctions.scala
index 50e3164..5515bfb 100644
--- a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/utils/UserDefinedTableAggFunctions.scala
+++ b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/utils/UserDefinedTableAggFunctions.scala
@@ -20,12 +20,15 @@ package org.apache.flink.table.planner.utils
 
 import org.apache.flink.api.common.typeinfo.TypeInformation
 import org.apache.flink.api.java.tuple.{Tuple2 => JTuple2}
+import org.apache.flink.table.annotation.{DataTypeHint, FunctionHint}
 import org.apache.flink.table.api.Types
 import org.apache.flink.table.api.dataview.MapView
 import org.apache.flink.table.data.{GenericRowData, RowData}
 import org.apache.flink.table.functions.TableAggregateFunction
+import org.apache.flink.table.planner.JLong
 import org.apache.flink.table.runtime.typeutils.InternalTypeInfo
 import org.apache.flink.table.types.logical.IntType
+import org.apache.flink.types.Row
 import org.apache.flink.util.Collector
 
 import java.lang.{Integer => JInt, Iterable => JIterable}
@@ -40,7 +43,7 @@ import scala.collection.mutable.ListBuffer
 /****** Function for testing basic functionality of TableAggregateFunction ******/
 
 class Top3Accum {
-  var data: util.HashMap[JInt, JInt] = _
+  var data: util.Map[JInt, JInt] = _
   var size: JInt = _
   var smallest: JInt = _
 }
@@ -54,7 +57,7 @@ class Top3 extends TableAggregateFunction[JTuple2[JInt, JInt], Top3Accum] {
     acc
   }
 
-  def add(acc: Top3Accum, v: Int): Unit = {
+  def add(acc: Top3Accum, v: JInt): Unit = {
     var cnt = acc.data.get(v)
     acc.size += 1
     if (cnt == null) {
@@ -63,7 +66,7 @@ class Top3 extends TableAggregateFunction[JTuple2[JInt, JInt], Top3Accum] {
     acc.data.put(v, cnt + 1)
   }
 
-  def delete(acc: Top3Accum, v: Int): Unit = {
+  def delete(acc: Top3Accum, v: JInt): Unit = {
     if (acc.data.containsKey(v)) {
       acc.size -= 1
       val cnt = acc.data.get(v) - 1
@@ -86,7 +89,7 @@ class Top3 extends TableAggregateFunction[JTuple2[JInt, JInt], Top3Accum] {
     }
   }
 
-  def accumulate(acc: Top3Accum, v: Int) {
+  def accumulate(acc: Top3Accum, v: JInt) {
     if (acc.size == 0) {
       acc.size = 1
       acc.smallest = v
@@ -210,6 +213,7 @@ class Top3WithMapView extends TableAggregateFunction[JTuple2[JInt, JInt], Top3Wi
 /****** Function for testing retract input ******/
 
 class Top3WithRetractInputAcc {
+  @DataTypeHint("RAW")
   var data: ListBuffer[Int] = _
 }
 
@@ -223,11 +227,11 @@ class Top3WithRetractInput
     acc
   }
 
-  def accumulate(acc: Top3WithRetractInputAcc, v: Int) {
+  def accumulate(acc: Top3WithRetractInputAcc, v: JInt) {
     acc.data.append(v)
   }
 
-  def retract(acc: Top3WithRetractInputAcc, v: Int) {
+  def retract(acc: Top3WithRetractInputAcc, v: JInt) {
     acc.data.remove(acc.data.indexOf(v))
   }
 
@@ -245,6 +249,7 @@ class Top3WithRetractInput
 
 /****** Function for testing internal accumulator type ******/
 
+@FunctionHint(accumulator = new DataTypeHint(value = "ROW<i INT>", bridgedTo = classOf[RowData]))
 class TableAggSum extends TableAggregateFunction[JInt, RowData] {
 
   override def createAccumulator(): RowData = {
@@ -253,7 +258,7 @@ class TableAggSum extends TableAggregateFunction[JInt, RowData] {
     acc
   }
 
-  def accumulate(rowData: RowData, v: Int): Unit = {
+  def accumulate(rowData: RowData, v: JInt): Unit = {
     val acc = rowData.asInstanceOf[GenericRowData]
     acc.setField(0, acc.getInt(0) + v)
   }
@@ -265,10 +270,6 @@ class TableAggSum extends TableAggregateFunction[JInt, RowData] {
     out.collect(result)
     out.collect(result)
   }
-
-  override def getAccumulatorType: TypeInformation[RowData] = {
-    InternalTypeInfo.ofFields(new IntType())
-  }
 }
 
 /**
@@ -278,13 +279,13 @@ class EmptyTableAggFunc extends TableAggregateFunction[JTuple2[JInt, JInt], Top3
 
   override def createAccumulator(): Top3Accum = new Top3Accum
 
-  def accumulate(acc: Top3Accum, catagory: Timestamp, value: Timestamp): Unit = {}
+  def accumulate(acc: Top3Accum, category: Timestamp, value: Timestamp): Unit = {}
 
-  def accumulate(acc: Top3Accum, category: Long, value: Timestamp): Unit = {}
+  def accumulate(acc: Top3Accum, category: JLong, value: Timestamp): Unit = {}
 
-  def accumulate(acc: Top3Accum, category: Long, value: Int): Unit = {}
+  def accumulate(acc: Top3Accum, category: JLong, value: JInt): Unit = {}
 
-  def accumulate(acc: Top3Accum, value: Int): Unit = {}
+  def accumulate(acc: Top3Accum, value: JInt): Unit = {}
 
   def emitValue(acc: Top3Accum, out: Collector[JTuple2[JInt, JInt]]): Unit = {}
 }
@@ -293,7 +294,7 @@ class EmptyTableAggFuncWithIntResultType extends TableAggregateFunction[JInt, To
 
   override def createAccumulator(): Top3Accum = new Top3Accum
 
-  def accumulate(acc: Top3Accum, value: Int): Unit = {}
+  def accumulate(acc: Top3Accum, value: JInt): Unit = {}
 
   def emitValue(acc: Top3Accum, out: Collector[JInt]): Unit = {}
 }
diff --git a/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/runtime/utils/JavaUserDefinedAggFunctions.java b/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/runtime/utils/JavaUserDefinedAggFunctions.java
index 1f059a9..8249d2e 100644
--- a/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/runtime/utils/JavaUserDefinedAggFunctions.java
+++ b/flink-table/flink-table-planner/src/test/java/org/apache/flink/table/runtime/utils/JavaUserDefinedAggFunctions.java
@@ -25,8 +25,10 @@ import org.apache.flink.table.api.dataview.MapView;
 import org.apache.flink.table.functions.AggregateFunction;
 import org.apache.flink.table.functions.FunctionRequirement;
 
+import java.util.Collections;
 import java.util.Iterator;
 import java.util.Map;
+import java.util.Set;
 
 /**
  * Test aggregator functions.
@@ -54,6 +56,11 @@ public class JavaUserDefinedAggFunctions {
 		//Overloaded accumulate method
 		public void accumulate(Accumulator0 accumulator, long iValue, int iWeight) {
 		}
+
+		@Override
+		public Set<FunctionRequirement> getRequirements() {
+			return Collections.singleton(FunctionRequirement.OVER_WINDOW_ONLY);
+		}
 	}
 
 	/**
diff --git a/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/api/stream/table/AggregateTest.scala b/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/api/stream/table/AggregateTest.scala
index 6ddf133..21540a2 100644
--- a/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/api/stream/table/AggregateTest.scala
+++ b/flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/api/stream/table/AggregateTest.scala
@@ -18,7 +18,6 @@
 
 package org.apache.flink.table.api.stream.table
 
-import org.apache.flink.api.common.typeinfo.BasicTypeInfo
 import org.apache.flink.api.scala._
 import org.apache.flink.table.api._
 import org.apache.flink.table.runtime.utils.JavaUserDefinedAggFunctions.WeightedAvg
@@ -81,7 +80,7 @@ class AggregateTest extends TableTestBase {
   }
 
   @Test
-  def testGroupAggregate() = {
+  def testGroupAggregate(): Unit = {
     val util = streamTestUtil()
     val table = util.addTable[(Long, Int, String)]('a, 'b, 'c)
 
@@ -223,7 +222,7 @@ class AggregateTest extends TableTestBase {
 
     val resultTable = table
       .groupBy('b)
-      .select('b, 'a.cast(BasicTypeInfo.DOUBLE_TYPE_INFO).avg)
+      .select('b, 'a.cast(DataTypes.DOUBLE()).avg)
 
     val expected =
       unaryNode(
diff --git a/flink-table/flink-table-runtime-blink/src/main/java/org/apache/flink/table/runtime/types/PlannerTypeUtils.java b/flink-table/flink-table-runtime-blink/src/main/java/org/apache/flink/table/runtime/types/PlannerTypeUtils.java
index 75db55f..2f550c5 100644
--- a/flink-table/flink-table-runtime-blink/src/main/java/org/apache/flink/table/runtime/types/PlannerTypeUtils.java
+++ b/flink-table/flink-table-runtime-blink/src/main/java/org/apache/flink/table/runtime/types/PlannerTypeUtils.java
@@ -71,14 +71,6 @@ public class PlannerTypeUtils {
 		}
 	}
 
-	public static RowType toRowType(LogicalType t) {
-		if (t instanceof RowType) {
-			return (RowType) t;
-		} else {
-			return RowType.of(t);
-		}
-	}
-
 	/**
 	 * A conversion that removes all {@link LegacyTypeInformationType}s by mapping to corresponding new types.
 	 */


[flink] 05/06: [hotfix][table-common] Add CallExpression.getFunctionName for easier printing

Posted by tw...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

twalthr pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git

commit 5bc3c7678c3ea532ec3deecfdcd7a8f98786ebe1
Author: Timo Walther <tw...@apache.org>
AuthorDate: Fri Jul 24 15:45:58 2020 +0200

    [hotfix][table-common] Add CallExpression.getFunctionName for easier printing
---
 .../operations/utils/CalculatedTableFactory.java     |  5 +----
 .../flink/table/expressions/CallExpression.java      | 20 ++++++++++++--------
 2 files changed, 13 insertions(+), 12 deletions(-)

diff --git a/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/operations/utils/CalculatedTableFactory.java b/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/operations/utils/CalculatedTableFactory.java
index b194a5d..de9989c 100644
--- a/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/operations/utils/CalculatedTableFactory.java
+++ b/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/operations/utils/CalculatedTableFactory.java
@@ -27,7 +27,6 @@ import org.apache.flink.table.expressions.ExpressionUtils;
 import org.apache.flink.table.expressions.ResolvedExpression;
 import org.apache.flink.table.expressions.utils.ResolvedExpressionDefaultVisitor;
 import org.apache.flink.table.functions.FunctionDefinition;
-import org.apache.flink.table.functions.FunctionIdentifier;
 import org.apache.flink.table.operations.CalculatedQueryOperation;
 import org.apache.flink.table.operations.QueryOperation;
 import org.apache.flink.table.types.DataType;
@@ -101,9 +100,7 @@ final class CalculatedTableFactory {
 			final TableSchema tableSchema = adjustNames(
 				extractSchema(callExpression.getOutputDataType()),
 				aliases,
-				callExpression.getFunctionIdentifier()
-					.map(FunctionIdentifier::asSummaryString)
-					.orElse(functionDefinition.toString()));
+				callExpression.getFunctionName());
 
 			return new CalculatedQueryOperation(
 				functionDefinition,
diff --git a/flink-table/flink-table-common/src/main/java/org/apache/flink/table/expressions/CallExpression.java b/flink-table/flink-table-common/src/main/java/org/apache/flink/table/expressions/CallExpression.java
index e3c5cb9..b3d4103 100644
--- a/flink-table/flink-table-common/src/main/java/org/apache/flink/table/expressions/CallExpression.java
+++ b/flink-table/flink-table-common/src/main/java/org/apache/flink/table/expressions/CallExpression.java
@@ -86,6 +86,17 @@ public final class CallExpression implements ResolvedExpression {
 		return functionDefinition;
 	}
 
+	/**
+	 * Returns a string representation of the call's function for logging or printing to a console.
+	 */
+	public String getFunctionName() {
+		if (functionIdentifier == null) {
+			return functionDefinition.toString();
+		} else {
+			return functionIdentifier.asSummaryString();
+		}
+	}
+
 	@Override
 	public DataType getOutputDataType() {
 		return dataType;
@@ -98,18 +109,11 @@ public final class CallExpression implements ResolvedExpression {
 
 	@Override
 	public String asSummaryString() {
-		final String functionName;
-		if (functionIdentifier == null) {
-			functionName = functionDefinition.toString();
-		} else {
-			functionName = functionIdentifier.asSummaryString();
-		}
-
 		final String argList = args.stream()
 			.map(Expression::asSummaryString)
 			.collect(Collectors.joining(", ", "(", ")"));
 
-		return functionName + argList;
+		return getFunctionName() + argList;
 	}
 
 	@Override


[flink] 03/06: [hotfix][table-api-java] Fix JavaDocs for TableEnvironment.fromValues

Posted by tw...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

twalthr pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git

commit ff70cc865e7b583126cd6b739b998be706c5c7ca
Author: Timo Walther <tw...@apache.org>
AuthorDate: Mon Jul 27 15:49:44 2020 +0200

    [hotfix][table-api-java] Fix JavaDocs for TableEnvironment.fromValues
---
 .../src/main/java/org/apache/flink/table/api/TableEnvironment.java     | 3 +--
 1 file changed, 1 insertion(+), 2 deletions(-)

diff --git a/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/TableEnvironment.java b/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/TableEnvironment.java
index d5fa61f..b56cd5b 100644
--- a/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/TableEnvironment.java
+++ b/flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/TableEnvironment.java
@@ -31,7 +31,6 @@ import org.apache.flink.table.module.Module;
 import org.apache.flink.table.sinks.TableSink;
 import org.apache.flink.table.sources.TableSource;
 import org.apache.flink.table.types.AbstractDataType;
-import org.apache.flink.table.types.DataType;
 
 import java.io.Serializable;
 import java.util.Arrays;
@@ -223,7 +222,7 @@ public interface TableEnvironment {
 	 * <p>The method will derive the types automatically from the input expressions. If types
 	 * at a certain position differ, the method will try to find a common super type for all types. If a common
 	 * super type does not exist, an exception will be thrown. If you want to specify the requested type explicitly
-	 * see {@link #fromValues(DataType, Expression...)}.
+	 * see {@link #fromValues(AbstractDataType, Expression...)}.
 	 *
 	 * <p>It is also possible to use {@link org.apache.flink.types.Row} object instead of
 	 * {@code row} expressions.