You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@flink.apache.org by ku...@apache.org on 2019/07/11 11:19:35 UTC

[flink] 05/05: [FLINK-13107][table-planner-blink] Copy TableApi IT and UT to Blink planner.

This is an automated email from the ASF dual-hosted git repository.

kurt pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/flink.git

commit 116c10b2c67ca1187ccf7847cd795261802f74df
Author: beyond1920 <be...@126.com>
AuthorDate: Mon Jul 8 12:30:28 2019 +0800

    [FLINK-13107][table-planner-blink] Copy TableApi IT and UT to Blink planner.
    
    This closes #9006
---
 .../flink/table/plan/batch/table/AggregateTest.xml |  97 +++++
 .../flink/table/plan/batch/table/CalcTest.xml      | 235 +++++++++++
 .../table/plan/batch/table/ColumnFunctionsTest.xml |  35 ++
 .../flink/table/plan/batch/table/CorrelateTest.xml | 130 ++++++
 .../table/plan/batch/table/GroupWindowTest.xml     | 123 ++++++
 .../flink/table/plan/batch/table/JoinTest.xml      | 280 +++++++++++++
 .../table/plan/batch/table/SetOperatorsTest.xml    | 201 +++++++++
 .../stringexpr/CorrelateStringExpressionTest.xml   | 156 +++++++
 .../batch/table/stringexpr/SetOperatorsTest.xml    |  33 ++
 .../table/plan/stream/table/AggregateTest.xml      | 279 +++++++++++++
 .../flink/table/plan/stream/table/CalcTest.xml     | 187 +++++++++
 .../plan/stream/table/ColumnFunctionsTest.xml      | 229 ++++++++++
 .../table/plan/stream/table/CorrelateTest.xml      | 209 ++++++++++
 .../table/plan/stream/table/GroupWindowTest.xml    | 460 +++++++++++++++++++++
 .../flink/table/plan/stream/table/JoinTest.xml     | 392 ++++++++++++++++++
 .../table/plan/stream/table/OverWindowTest.xml     | 308 ++++++++++++++
 .../table/plan/stream/table/SetOperatorsTest.xml   | 140 +++++++
 .../table/plan/stream/table/TableSourceTest.xml    | 153 +++++++
 .../plan/stream/table/TwoStageAggregateTest.xml    | 142 +++++++
 .../table/plan/batch/table/AggregateTest.scala     |  75 ++++
 .../flink/table/plan/batch/table/CalcTest.scala    | 201 +++++++++
 .../plan/batch/table/ColumnFunctionsTest.scala     |  51 +++
 .../table/plan/batch/table/CorrelateTest.scala     | 120 ++++++
 .../table/plan/batch/table/GroupWindowTest.scala   | 158 +++++++
 .../flink/table/plan/batch/table/JoinTest.scala    | 211 ++++++++++
 .../table/plan/batch/table/SetOperatorsTest.scala  | 133 ++++++
 .../plan/batch/table/TemporalTableJoinTest.scala   |  72 ++++
 .../stringexpr/AggregateStringExpressionTest.scala | 341 +++++++++++++++
 .../stringexpr/CalcStringExpressionTest.scala      | 366 ++++++++++++++++
 .../stringexpr/CorrelateStringExpressionTest.scala |  94 +++++
 .../stringexpr/JoinStringExpressionTest.scala      | 187 +++++++++
 .../batch/table/stringexpr/SetOperatorsTest.scala  |  52 +++
 .../stringexpr/SortStringExpressionTest.scala      |  61 +++
 .../table/validation/AggregateValidationTest.scala | 221 ++++++++++
 .../table/validation/CalcValidationTest.scala      | 117 ++++++
 .../table/validation/CorrelateValidationTest.scala |  46 +++
 .../validation/GroupWindowValidationTest.scala     | 172 ++++++++
 .../table/validation/JoinValidationTest.scala      | 118 ++++++
 .../validation/OverWindowValidationTest.scala      |  56 +++
 .../validation/SetOperatorsValidationTest.scala    | 113 +++++
 .../table/validation/SortValidationTest.scala      |  69 ++++
 .../table/plan/stream/table/AggregateTest.scala    | 230 +++++++++++
 .../flink/table/plan/stream/table/CalcTest.scala   | 161 ++++++++
 .../plan/stream/table/ColumnFunctionsTest.scala    | 228 ++++++++++
 .../table/plan/stream/table/CorrelateTest.scala    | 181 ++++++++
 .../table/plan/stream/table/GroupWindowTest.scala  | 409 ++++++++++++++++++
 .../flink/table/plan/stream/table/JoinTest.scala   | 263 ++++++++++++
 .../table/plan/stream/table/OverWindowTest.scala   | 222 ++++++++++
 .../table/plan/stream/table/SetOperatorsTest.scala |  87 ++++
 .../table/plan/stream/table/TableSourceTest.scala  | 302 ++++++++++++++
 .../plan/stream/table/TemporalTableJoinTest.scala  | 191 +++++++++
 .../plan/stream/table/TwoStageAggregateTest.scala  | 111 +++++
 .../stringexpr/AggregateStringExpressionTest.scala | 246 +++++++++++
 .../stringexpr/CalcStringExpressionTest.scala      | 183 ++++++++
 .../stringexpr/CorrelateStringExpressionTest.scala | 160 +++++++
 .../GroupWindowStringExpressionTest.scala          | 263 ++++++++++++
 ...pWindowTableAggregateStringExpressionTest.scala | 227 ++++++++++
 .../OverWindowStringExpressionTest.scala           | 248 +++++++++++
 .../SetOperatorsStringExpressionTest.scala         |  50 +++
 .../table/validation/AggregateValidationTest.scala | 127 ++++++
 .../table/validation/CalcValidationTest.scala      | 165 ++++++++
 .../table/validation/CorrelateValidationTest.scala | 178 ++++++++
 .../validation/GroupWindowValidationTest.scala     | 308 ++++++++++++++
 .../validation/OverWindowValidationTest.scala      | 164 ++++++++
 .../validation/SetOperatorsValidationTest.scala    |  80 ++++
 .../table/validation/TableSinkValidationTest.scala |  87 ++++
 .../TemporalTableJoinValidationTest.scala          | 114 +++++
 .../validation/UnsupportedOpsValidationTest.scala  | 109 +++++
 .../runtime/batch/table/AggregationITCase.scala    |  16 -
 .../table/runtime/batch/table/CalcITCase.scala     |   8 +-
 .../runtime/batch/table/GroupWindowITCase.scala    |   6 +-
 .../runtime/batch/table/SetOperatorsITCase.scala   |   2 -
 .../table/runtime/stream/sql/CalcITCase.scala      |   2 -
 .../table/runtime/stream/sql/CorrelateITCase.scala |   9 +-
 .../runtime/stream/table/GroupWindowITCase.scala   | 295 +++++++++++++
 .../stream/table/MiniBatchGroupWindowITCase.scala  | 156 +++++++
 .../runtime/stream/table/OverWindowITCase.scala    |  16 +-
 .../runtime/stream/table/SetOperatorsITCase.scala  | 205 +++++++++
 .../runtime/stream/table/SubQueryITCase.scala      | 148 +++++++
 .../table/util/MemoryTableSourceSinkUtil.scala     | 165 ++++++++
 .../apache/flink/table/util/TableTestBase.scala    |  32 +-
 81 files changed, 12930 insertions(+), 47 deletions(-)

diff --git a/flink-table/flink-table-planner-blink/src/test/resources/org/apache/flink/table/plan/batch/table/AggregateTest.xml b/flink-table/flink-table-planner-blink/src/test/resources/org/apache/flink/table/plan/batch/table/AggregateTest.xml
new file mode 100644
index 0000000..1141264
--- /dev/null
+++ b/flink-table/flink-table-planner-blink/src/test/resources/org/apache/flink/table/plan/batch/table/AggregateTest.xml
@@ -0,0 +1,97 @@
+<?xml version="1.0" ?>
+<!--
+Licensed to the Apache Software Foundation (ASF) under one or more
+contributor license agreements.  See the NOTICE file distributed with
+this work for additional information regarding copyright ownership.
+The ASF licenses this file to you under the Apache License, Version 2.0
+(the "License"); you may not use this file except in compliance with
+the License.  You may obtain a copy of the License at
+
+http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+-->
+<Root>
+  <TestCase name="testAggregate">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(EXPR$0=[$0], EXPR$1=[$1], EXPR$2=[$2])
++- LogicalAggregate(group=[{}], EXPR$0=[AVG($0)], EXPR$1=[SUM($1)], EXPR$2=[COUNT($2)])
+   +- LogicalTableScan(table=[[default_catalog, default_database, MyTable, source: [TestTableSource(a, b, c)]]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+SortAggregate(isMerge=[true], select=[Final_AVG(sum$0, count$1) AS EXPR$0, Final_SUM(sum$2) AS EXPR$1, Final_COUNT(count$3) AS EXPR$2])
++- Exchange(distribution=[single])
+   +- LocalSortAggregate(select=[Partial_AVG(a) AS (sum$0, count$1), Partial_SUM(b) AS sum$2, Partial_COUNT(c) AS count$3])
+      +- TableSourceScan(table=[[default_catalog, default_database, MyTable, source: [TestTableSource(a, b, c)]]], fields=[a, b, c])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testAggregateWithFilter">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(EXPR$0=[$0], EXPR$1=[$1], EXPR$2=[$2])
++- LogicalAggregate(group=[{}], EXPR$0=[AVG($0)], EXPR$1=[SUM($1)], EXPR$2=[COUNT($2)])
+   +- LogicalFilter(condition=[=($0, 1)])
+      +- LogicalProject(a=[$0], b=[$1], c=[$2])
+         +- LogicalTableScan(table=[[default_catalog, default_database, MyTable, source: [TestTableSource(a, b, c)]]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+SortAggregate(isMerge=[true], select=[Final_AVG(sum$0, count$1) AS EXPR$0, Final_SUM(sum$2) AS EXPR$1, Final_COUNT(count$3) AS EXPR$2])
++- Exchange(distribution=[single])
+   +- LocalSortAggregate(select=[Partial_AVG(a) AS (sum$0, count$1), Partial_SUM(b) AS sum$2, Partial_COUNT(c) AS count$3])
+      +- Calc(select=[CAST(1) AS a, b, c], where=[=(a, 1)])
+         +- TableSourceScan(table=[[default_catalog, default_database, MyTable, source: [TestTableSource(a, b, c)]]], fields=[a, b, c])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testAggregateWithFilterOnNestedFields">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(EXPR$0=[$0], EXPR$1=[$1], EXPR$2=[$2], EXPR$3=[$3])
++- LogicalAggregate(group=[{}], EXPR$0=[AVG($0)], EXPR$1=[SUM($1)], EXPR$2=[COUNT($2)], EXPR$3=[SUM($3)])
+   +- LogicalProject(a=[$0], b=[$1], c=[$2], $f3=[$2._1])
+      +- LogicalFilter(condition=[=($0, 1)])
+         +- LogicalProject(a=[$0], b=[$1], c=[$2])
+            +- LogicalTableScan(table=[[default_catalog, default_database, MyTable, source: [TestTableSource(a, b, c)]]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+SortAggregate(isMerge=[true], select=[Final_AVG(sum$0, count$1) AS EXPR$0, Final_SUM(sum$2) AS EXPR$1, Final_COUNT(count$3) AS EXPR$2, Final_SUM(sum$4) AS EXPR$3])
++- Exchange(distribution=[single])
+   +- LocalSortAggregate(select=[Partial_AVG(a) AS (sum$0, count$1), Partial_SUM(b) AS sum$2, Partial_COUNT(c) AS count$3, Partial_SUM($f3) AS sum$4])
+      +- Calc(select=[CAST(1) AS a, b, c, c._1 AS $f3], where=[=(a, 1)])
+         +- TableSourceScan(table=[[default_catalog, default_database, MyTable, source: [TestTableSource(a, b, c)]]], fields=[a, b, c])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testGroupAggregateWithFilter">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalFilter(condition=[=($0, 1)])
++- LogicalProject(a=[$0], EXPR$0=[$1], EXPR$1=[$2], EXPR$2=[$3])
+   +- LogicalAggregate(group=[{0}], EXPR$0=[AVG($0)], EXPR$1=[SUM($1)], EXPR$2=[COUNT($2)])
+      +- LogicalTableScan(table=[[default_catalog, default_database, MyTable, source: [TestTableSource(a, b, c)]]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[CAST(1) AS a, EXPR$0, EXPR$1, EXPR$2])
++- HashAggregate(isMerge=[true], groupBy=[a], select=[a, Final_AVG(sum$0, count$1) AS EXPR$0, Final_SUM(sum$2) AS EXPR$1, Final_COUNT(count$3) AS EXPR$2])
+   +- Exchange(distribution=[hash[a]])
+      +- LocalHashAggregate(groupBy=[a], select=[a, Partial_AVG(a) AS (sum$0, count$1), Partial_SUM(b) AS sum$2, Partial_COUNT(c) AS count$3])
+         +- Calc(select=[a, b, c], where=[=(a, 1)])
+            +- TableSourceScan(table=[[default_catalog, default_database, MyTable, source: [TestTableSource(a, b, c)]]], fields=[a, b, c])
+]]>
+    </Resource>
+  </TestCase>
+</Root>
diff --git a/flink-table/flink-table-planner-blink/src/test/resources/org/apache/flink/table/plan/batch/table/CalcTest.xml b/flink-table/flink-table-planner-blink/src/test/resources/org/apache/flink/table/plan/batch/table/CalcTest.xml
new file mode 100644
index 0000000..9500570
--- /dev/null
+++ b/flink-table/flink-table-planner-blink/src/test/resources/org/apache/flink/table/plan/batch/table/CalcTest.xml
@@ -0,0 +1,235 @@
+<?xml version="1.0" ?>
+<!--
+Licensed to the Apache Software Foundation (ASF) under one or more
+contributor license agreements.  See the NOTICE file distributed with
+this work for additional information regarding copyright ownership.
+The ASF licenses this file to you under the Apache License, Version 2.0
+(the "License"); you may not use this file except in compliance with
+the License.  You may obtain a copy of the License at
+
+http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+-->
+<Root>
+  <TestCase name="testScalarFunctionAccess">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(_c0=[org$apache$flink$table$plan$batch$table$CalcTest$giveMeCaseClass$$fe1bff2b06d8e0e495536102224cfe83().my], _c1=[org$apache$flink$table$plan$batch$table$CalcTest$giveMeCaseClass$$fe1bff2b06d8e0e495536102224cfe83().clazz], _c2=[org$apache$flink$table$plan$batch$table$CalcTest$giveMeCaseClass$$fe1bff2b06d8e0e495536102224cfe83().my], _c3=[org$apache$flink$table$plan$batch$table$CalcTest$giveMeCaseClass$$fe1bff2b06d8e0e495536102224cfe83().clazz])
++- LogicalTableScan(table=[[default_catalog, default_database, MyTable, source: [TestTableSource(a, b)]]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[giveMeCaseClass$().my AS _c0, giveMeCaseClass$().clazz AS _c1, giveMeCaseClass$().my AS _c2, giveMeCaseClass$().clazz AS _c3])
++- TableSourceScan(table=[[default_catalog, default_database, MyTable, source: [TestTableSource(a, b)]]], fields=[a, b])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testMultiFilter">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalFilter(condition=[=(MOD($0, 2), 1)])
++- LogicalFilter(condition=[<($1, 2)])
+   +- LogicalFilter(condition=[>($0, 0)])
+      +- LogicalProject(a=[$0], b=[$1])
+         +- LogicalTableScan(table=[[default_catalog, default_database, MyTable, source: [TestTableSource(a, b, c, d)]]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[a, b], where=[AND(>(a, 0), <(b, 2), =(MOD(a, 2), 1))])
++- TableSourceScan(table=[[default_catalog, default_database, MyTable, source: [TestTableSource(a, b, c, d)]]], fields=[a, b, c, d])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testMultipleFlatteningsTable">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(a$_1=[$0._1], a$_2=[$0._2], c=[$2], b$_1=[$1._1], b$_2=[$1._2])
++- LogicalTableScan(table=[[default_catalog, default_database, MyTable, source: [TestTableSource(a, b, c)]]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[a._1 AS a$_1, a._2 AS a$_2, c, b._1 AS b$_1, b._2 AS b$_2])
++- TableSourceScan(table=[[default_catalog, default_database, MyTable, source: [TestTableSource(a, b, c)]]], fields=[a, b, c])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testNestedFlattening">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(a$_1=[$0._1], a$_2=[$0._2], b=[$1])
++- LogicalTableScan(table=[[default_catalog, default_database, MyTable, source: [TestTableSource(a, b)]]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[a._1 AS a$_1, a._2 AS a$_2, b])
++- TableSourceScan(table=[[default_catalog, default_database, MyTable, source: [TestTableSource(a, b)]]], fields=[a, b])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testSelectAggregation">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(EXPR$0=[$0], EXPR$1=[$1])
++- LogicalAggregate(group=[{}], EXPR$0=[SUM($0)], EXPR$1=[MAX($1)])
+   +- LogicalTableScan(table=[[default_catalog, default_database, MyTable, source: [TestTableSource(a, b, c, d)]]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+SortAggregate(isMerge=[true], select=[Final_SUM(sum$0) AS EXPR$0, Final_MAX(max$1) AS EXPR$1])
++- Exchange(distribution=[single])
+   +- LocalSortAggregate(select=[Partial_SUM(a) AS sum$0, Partial_MAX(b) AS max$1])
+      +- TableSourceScan(table=[[default_catalog, default_database, MyTable, source: [TestTableSource(a, b, c, d)]]], fields=[a, b, c, d])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testSelectAggregationFromGroupedTable">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(EXPR$0=[$1])
++- LogicalAggregate(group=[{2}], EXPR$0=[SUM($0)])
+   +- LogicalTableScan(table=[[default_catalog, default_database, MyTable, source: [TestTableSource(a, b, c, d)]]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[EXPR$0])
++- HashAggregate(isMerge=[true], groupBy=[c], select=[c, Final_SUM(sum$0) AS EXPR$0])
+   +- Exchange(distribution=[hash[c]])
+      +- LocalHashAggregate(groupBy=[c], select=[c, Partial_SUM(a) AS sum$0])
+         +- TableSourceScan(table=[[default_catalog, default_database, MyTable, source: [TestTableSource(a, b, c, d)]]], fields=[a, b, c, d])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testSelectAllFieldsFromGroupedTable">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(a=[$0], c=[$1])
++- LogicalAggregate(group=[{0, 2}])
+   +- LogicalTableScan(table=[[default_catalog, default_database, MyTable, source: [TestTableSource(a, b, c, d)]]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+HashAggregate(isMerge=[true], groupBy=[a, c], select=[a, c])
++- Exchange(distribution=[hash[a, c]])
+   +- LocalHashAggregate(groupBy=[a, c], select=[a, c])
+      +- TableSourceScan(table=[[default_catalog, default_database, MyTable, source: [TestTableSource(a, b, c, d)]]], fields=[a, b, c, d])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testSelectFromAggregatedPojoTable">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalFilter(condition=[=($1, 2)])
++- LogicalProject(word=[$0], frequency=[$1])
+   +- LogicalAggregate(group=[{0}], EXPR$0=[SUM($1)])
+      +- LogicalTableScan(table=[[default_catalog, default_database, MyTable, source: [TestTableSource(word, frequency)]]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[word, CAST(2:BIGINT) AS frequency], where=[=(EXPR$0, 2)])
++- HashAggregate(isMerge=[true], groupBy=[word], select=[word, Final_SUM(sum$0) AS EXPR$0])
+   +- Exchange(distribution=[hash[word]])
+      +- LocalHashAggregate(groupBy=[word], select=[word, Partial_SUM(frequency) AS sum$0])
+         +- TableSourceScan(table=[[default_catalog, default_database, MyTable, source: [TestTableSource(word, frequency)]]], fields=[word, frequency])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testSelectFromGroupedTable">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(a=[$0])
++- LogicalAggregate(group=[{0, 2}])
+   +- LogicalTableScan(table=[[default_catalog, default_database, MyTable, source: [TestTableSource(a, b, c, d)]]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[a])
++- HashAggregate(isMerge=[true], groupBy=[a, c], select=[a, c])
+   +- Exchange(distribution=[hash[a, c]])
+      +- LocalHashAggregate(groupBy=[a, c], select=[a, c])
+         +- TableSourceScan(table=[[default_catalog, default_database, MyTable, source: [TestTableSource(a, b, c, d)]]], fields=[a, b, c, d])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testSelectFromGroupedTableWithFunctionKey">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(EXPR$0=[$1])
++- LogicalAggregate(group=[{4}], EXPR$0=[SUM($0)])
+   +- LogicalProject(a=[$0], b=[$1], c=[$2], d=[$3], k=[org$apache$flink$table$plan$batch$table$CalcTest$MyHashCode$$d14b486109d9dd062ae7c60e04977975($2)])
+      +- LogicalTableScan(table=[[default_catalog, default_database, MyTable, source: [TestTableSource(a, b, c, d)]]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[EXPR$0])
++- HashAggregate(isMerge=[true], groupBy=[k], select=[k, Final_SUM(sum$0) AS EXPR$0])
+   +- Exchange(distribution=[hash[k]])
+      +- LocalHashAggregate(groupBy=[k], select=[k, Partial_SUM(a) AS sum$0])
+         +- Calc(select=[a, b, c, d, MyHashCode$(c) AS k])
+            +- TableSourceScan(table=[[default_catalog, default_database, MyTable, source: [TestTableSource(a, b, c, d)]]], fields=[a, b, c, d])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testSelectFromGroupedTableWithNonTrivialKey">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(EXPR$0=[$1])
++- LogicalAggregate(group=[{4}], EXPR$0=[SUM($0)])
+   +- LogicalProject(a=[$0], b=[$1], c=[$2], d=[$3], k=[UPPER($2)])
+      +- LogicalTableScan(table=[[default_catalog, default_database, MyTable, source: [TestTableSource(a, b, c, d)]]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[EXPR$0])
++- HashAggregate(isMerge=[true], groupBy=[k], select=[k, Final_SUM(sum$0) AS EXPR$0])
+   +- Exchange(distribution=[hash[k]])
+      +- LocalHashAggregate(groupBy=[k], select=[k, Partial_SUM(a) AS sum$0])
+         +- Calc(select=[a, b, c, d, UPPER(c) AS k])
+            +- TableSourceScan(table=[[default_catalog, default_database, MyTable, source: [TestTableSource(a, b, c, d)]]], fields=[a, b, c, d])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testSimpleSelect">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(a=[$0], b=[$1])
++- LogicalTableScan(table=[[default_catalog, default_database, MyTable, source: [TestTableSource(a, b, c, d)]]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[a, b])
++- TableSourceScan(table=[[default_catalog, default_database, MyTable, source: [TestTableSource(a, b, c, d)]]], fields=[a, b, c, d])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testSelectFunction">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(_c0=[org$apache$flink$table$plan$batch$table$CalcTest$MyHashCode$$d14b486109d9dd062ae7c60e04977975($2)], b=[$1])
++- LogicalTableScan(table=[[default_catalog, default_database, MyTable, source: [TestTableSource(a, b, c, d)]]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[MyHashCode$(c) AS _c0, b])
++- TableSourceScan(table=[[default_catalog, default_database, MyTable, source: [TestTableSource(a, b, c, d)]]], fields=[a, b, c, d])
+]]>
+    </Resource>
+  </TestCase>
+</Root>
diff --git a/flink-table/flink-table-planner-blink/src/test/resources/org/apache/flink/table/plan/batch/table/ColumnFunctionsTest.xml b/flink-table/flink-table-planner-blink/src/test/resources/org/apache/flink/table/plan/batch/table/ColumnFunctionsTest.xml
new file mode 100644
index 0000000..8d14a3d
--- /dev/null
+++ b/flink-table/flink-table-planner-blink/src/test/resources/org/apache/flink/table/plan/batch/table/ColumnFunctionsTest.xml
@@ -0,0 +1,35 @@
+<?xml version="1.0" ?>
+<!--
+Licensed to the Apache Software Foundation (ASF) under one or more
+contributor license agreements.  See the NOTICE file distributed with
+this work for additional information regarding copyright ownership.
+The ASF licenses this file to you under the Apache License, Version 2.0
+(the "License"); you may not use this file except in compliance with
+the License.  You may obtain a copy of the License at
+
+http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+-->
+<Root>
+  <TestCase name="testOrderBy">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalSort(sort0=[$0], sort1=[$1], sort2=[$2], dir0=[ASC], dir1=[ASC], dir2=[ASC])
++- LogicalTableScan(table=[[default_catalog, default_database, Table1, source: [TestTableSource(a, b, c, d, e, f)]]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+SortLimit(orderBy=[a ASC, b ASC, c ASC], offset=[0], fetch=[200], global=[true])
++- Exchange(distribution=[single])
+   +- SortLimit(orderBy=[a ASC, b ASC, c ASC], offset=[0], fetch=[200], global=[false])
+      +- TableSourceScan(table=[[default_catalog, default_database, Table1, source: [TestTableSource(a, b, c, d, e, f)]]], fields=[a, b, c, d, e, f])
+]]>
+    </Resource>
+  </TestCase>
+</Root>
diff --git a/flink-table/flink-table-planner-blink/src/test/resources/org/apache/flink/table/plan/batch/table/CorrelateTest.xml b/flink-table/flink-table-planner-blink/src/test/resources/org/apache/flink/table/plan/batch/table/CorrelateTest.xml
new file mode 100644
index 0000000..a6ca745
--- /dev/null
+++ b/flink-table/flink-table-planner-blink/src/test/resources/org/apache/flink/table/plan/batch/table/CorrelateTest.xml
@@ -0,0 +1,130 @@
+<?xml version="1.0" ?>
+<!--
+Licensed to the Apache Software Foundation (ASF) under one or more
+contributor license agreements.  See the NOTICE file distributed with
+this work for additional information regarding copyright ownership.
+The ASF licenses this file to you under the Apache License, Version 2.0
+(the "License"); you may not use this file except in compliance with
+the License.  You may obtain a copy of the License at
+
+http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+-->
+<Root>
+  <TestCase name="testCorrelateWithMultiFilterAndWithoutCalcMergeRules">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(c=[$0], d=[$1])
++- LogicalFilter(condition=[>($2, 20)])
+   +- LogicalFilter(condition=[>($2, 10)])
+      +- LogicalProject(c=[$2], d=[$3], e=[$4])
+         +- LogicalCorrelate(correlation=[$cor0], joinType=[inner], requiredColumns=[{}])
+            :- LogicalProject(a=[$0], b=[$1], c=[$2])
+            :  +- LogicalTableScan(table=[[default_catalog, default_database, MyTable, source: [TestTableSource(a, b, c)]]])
+            +- LogicalTableFunctionScan(invocation=[org$apache$flink$table$util$TableFunc0$2d39fea38a8a8fb8536772fd858e67ed($2)], rowType=[RecordType(VARCHAR(2147483647) d, INTEGER e)], elementType=[class [Ljava.lang.Object;])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[c, d])
++- Correlate(invocation=[org$apache$flink$table$util$TableFunc0$2d39fea38a8a8fb8536772fd858e67ed($2)], correlate=[table(TableFunc0(c))], select=[a,b,c,d,e], rowType=[RecordType(INTEGER a, BIGINT b, VARCHAR(2147483647) c, VARCHAR(2147483647) d, INTEGER e)], joinType=[INNER], condition=[>($1, 20)])
+   +- TableSourceScan(table=[[default_catalog, default_database, MyTable, source: [TestTableSource(a, b, c)]]], fields=[a, b, c])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testCorrelateWithMultiFilter">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(c=[$0], d=[$1])
++- LogicalFilter(condition=[>($2, 20)])
+   +- LogicalFilter(condition=[>($2, 10)])
+      +- LogicalProject(c=[$2], d=[$3], e=[$4])
+         +- LogicalCorrelate(correlation=[$cor0], joinType=[inner], requiredColumns=[{}])
+            :- LogicalProject(a=[$0], b=[$1], c=[$2])
+            :  +- LogicalTableScan(table=[[default_catalog, default_database, MyTable, source: [TestTableSource(a, b, c)]]])
+            +- LogicalTableFunctionScan(invocation=[org$apache$flink$table$util$TableFunc0$2d39fea38a8a8fb8536772fd858e67ed($2)], rowType=[RecordType(VARCHAR(2147483647) d, INTEGER e)], elementType=[class [Ljava.lang.Object;])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[c, d])
++- Correlate(invocation=[org$apache$flink$table$util$TableFunc0$2d39fea38a8a8fb8536772fd858e67ed($2)], correlate=[table(TableFunc0(c))], select=[a,b,c,d,e], rowType=[RecordType(INTEGER a, BIGINT b, VARCHAR(2147483647) c, VARCHAR(2147483647) d, INTEGER e)], joinType=[INNER], condition=[>($1, 20)])
+   +- TableSourceScan(table=[[default_catalog, default_database, MyTable, source: [TestTableSource(a, b, c)]]], fields=[a, b, c])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testCrossJoin">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(c=[$2], s=[$3])
++- LogicalCorrelate(correlation=[$cor0], joinType=[inner], requiredColumns=[{}])
+   :- LogicalTableScan(table=[[default_catalog, default_database, MyTable, source: [TestTableSource(a, b, c)]]])
+   +- LogicalTableFunctionScan(invocation=[org$apache$flink$table$util$TableFunc1$ad38060966060e704b09fa4c94287696($2)], rowType=[RecordType(VARCHAR(2147483647) s)], elementType=[class [Ljava.lang.Object;])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[c, s])
++- Correlate(invocation=[org$apache$flink$table$util$TableFunc1$ad38060966060e704b09fa4c94287696($2)], correlate=[table(TableFunc1(c))], select=[a,b,c,s], rowType=[RecordType(INTEGER a, BIGINT b, VARCHAR(2147483647) c, VARCHAR(2147483647) s)], joinType=[INNER])
+   +- TableSourceScan(table=[[default_catalog, default_database, MyTable, source: [TestTableSource(a, b, c)]]], fields=[a, b, c])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testCrossJoin2">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(c=[$2], s=[$3])
++- LogicalCorrelate(correlation=[$cor0], joinType=[inner], requiredColumns=[{}])
+   :- LogicalTableScan(table=[[default_catalog, default_database, MyTable, source: [TestTableSource(a, b, c)]]])
+   +- LogicalTableFunctionScan(invocation=[org$apache$flink$table$util$TableFunc1$ad38060966060e704b09fa4c94287696($2, _UTF-16LE'$')], rowType=[RecordType(VARCHAR(2147483647) s)], elementType=[class [Ljava.lang.Object;])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[c, s])
++- Correlate(invocation=[org$apache$flink$table$util$TableFunc1$ad38060966060e704b09fa4c94287696($2, _UTF-16LE'$')], correlate=[table(TableFunc1(c,_UTF-16LE'$'))], select=[a,b,c,s], rowType=[RecordType(INTEGER a, BIGINT b, VARCHAR(2147483647) c, VARCHAR(2147483647) s)], joinType=[INNER])
+   +- TableSourceScan(table=[[default_catalog, default_database, MyTable, source: [TestTableSource(a, b, c)]]], fields=[a, b, c])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testLeftOuterJoinWithoutJoinPredicates">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalFilter(condition=[>($1, _UTF-16LE'')])
++- LogicalProject(c=[$2], s=[$3])
+   +- LogicalCorrelate(correlation=[$cor0], joinType=[left], requiredColumns=[{}])
+      :- LogicalTableScan(table=[[default_catalog, default_database, MyTable, source: [TestTableSource(a, b, c)]]])
+      +- LogicalTableFunctionScan(invocation=[org$apache$flink$table$util$TableFunc1$ad38060966060e704b09fa4c94287696($2)], rowType=[RecordType(VARCHAR(2147483647) s)], elementType=[class [Ljava.lang.Object;])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[c, s], where=[>(s, _UTF-16LE'')])
++- Correlate(invocation=[org$apache$flink$table$util$TableFunc1$ad38060966060e704b09fa4c94287696($2)], correlate=[table(TableFunc1(c))], select=[a,b,c,s], rowType=[RecordType(INTEGER a, BIGINT b, VARCHAR(2147483647) c, VARCHAR(2147483647) s)], joinType=[LEFT])
+   +- TableSourceScan(table=[[default_catalog, default_database, MyTable, source: [TestTableSource(a, b, c)]]], fields=[a, b, c])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testLeftOuterJoinWithLiteralTrue">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(c=[$2], s=[$3])
++- LogicalCorrelate(correlation=[$cor0], joinType=[left], requiredColumns=[{}])
+   :- LogicalTableScan(table=[[default_catalog, default_database, MyTable, source: [TestTableSource(a, b, c)]]])
+   +- LogicalTableFunctionScan(invocation=[org$apache$flink$table$util$TableFunc1$ad38060966060e704b09fa4c94287696($2)], rowType=[RecordType(VARCHAR(2147483647) s)], elementType=[class [Ljava.lang.Object;])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[c, s])
++- Correlate(invocation=[org$apache$flink$table$util$TableFunc1$ad38060966060e704b09fa4c94287696($2)], correlate=[table(TableFunc1(c))], select=[a,b,c,s], rowType=[RecordType(INTEGER a, BIGINT b, VARCHAR(2147483647) c, VARCHAR(2147483647) s)], joinType=[LEFT])
+   +- TableSourceScan(table=[[default_catalog, default_database, MyTable, source: [TestTableSource(a, b, c)]]], fields=[a, b, c])
+]]>
+    </Resource>
+  </TestCase>
+</Root>
diff --git a/flink-table/flink-table-planner-blink/src/test/resources/org/apache/flink/table/plan/batch/table/GroupWindowTest.xml b/flink-table/flink-table-planner-blink/src/test/resources/org/apache/flink/table/plan/batch/table/GroupWindowTest.xml
new file mode 100644
index 0000000..e14de92
--- /dev/null
+++ b/flink-table/flink-table-planner-blink/src/test/resources/org/apache/flink/table/plan/batch/table/GroupWindowTest.xml
@@ -0,0 +1,123 @@
+<?xml version="1.0" ?>
+<!--
+Licensed to the Apache Software Foundation (ASF) under one or more
+contributor license agreements.  See the NOTICE file distributed with
+this work for additional information regarding copyright ownership.
+The ASF licenses this file to you under the Apache License, Version 2.0
+(the "License"); you may not use this file except in compliance with
+the License.  You may obtain a copy of the License at
+
+http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+-->
+<Root>
+  <TestCase name="testAllEventTimeTumblingGroupWindowOverTime">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(EXPR$0=[$0])
++- LogicalWindowAggregate(group=[{}], EXPR$0=[COUNT($1)], window=[TumblingGroupWindow], properties=[])
+   +- LogicalTableScan(table=[[default_catalog, default_database, Table1, source: [TestTableSource(long, int, string)]]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+HashWindowAggregate(window=[TumblingGroupWindow], select=[Final_COUNT(count$0) AS EXPR$0])
++- Exchange(distribution=[single])
+   +- LocalHashWindowAggregate(window=[TumblingGroupWindow], select=[Partial_COUNT(int) AS count$0])
+      +- TableSourceScan(table=[[default_catalog, default_database, Table1, source: [TestTableSource(long, int, string)]]], fields=[long, int, string])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testEventTimeSlidingGroupWindowOverTime">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(string=[$0], EXPR$0=[$1])
++- LogicalWindowAggregate(group=[{2}], EXPR$0=[COUNT($1)], window=[SlidingGroupWindow('w, long, 8, 10)], properties=[])
+   +- LogicalTableScan(table=[[default_catalog, default_database, Table1, source: [TestTableSource(long, int, string)]]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+HashWindowAggregate(groupBy=[string], window=[SlidingGroupWindow('w, long, 8, 10)], select=[string, Final_COUNT(count$0) AS EXPR$0])
++- Exchange(distribution=[hash[string]])
+   +- LocalHashWindowAggregate(groupBy=[string], window=[SlidingGroupWindow('w, long, 8, 10)], select=[string, Partial_COUNT(int) AS count$0])
+      +- TableSourceScan(table=[[default_catalog, default_database, Table1, source: [TestTableSource(long, int, string)]]], fields=[long, int, string])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testEventTimeTumblingGroupWindowOverTime">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(string=[$0], EXPR$0=[$1])
++- LogicalWindowAggregate(group=[{2}], EXPR$0=[COUNT($1)], window=[TumblingGroupWindow], properties=[])
+   +- LogicalTableScan(table=[[default_catalog, default_database, Table1, source: [TestTableSource(long, int, string)]]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+HashWindowAggregate(groupBy=[string], window=[TumblingGroupWindow], select=[string, Final_COUNT(count$0) AS EXPR$0])
++- Exchange(distribution=[hash[string]])
+   +- LocalHashWindowAggregate(groupBy=[string], window=[TumblingGroupWindow], select=[string, Partial_COUNT(int) AS count$0])
+      +- TableSourceScan(table=[[default_catalog, default_database, Table1, source: [TestTableSource(long, int, string)]]], fields=[long, int, string])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testLongEventTimeTumblingGroupWindowWithProperties">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(string=[$0], EXPR$0=[$1], EXPR$1=[$2], EXPR$2=[$3], EXPR$3=[$4])
++- LogicalWindowAggregate(group=[{2}], EXPR$0=[COUNT($1)], window=[TumblingGroupWindow], properties=[EXPR$1, EXPR$2, EXPR$3])
+   +- LogicalTableScan(table=[[default_catalog, default_database, Table1, source: [TestTableSource(ts, int, string)]]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+HashWindowAggregate(groupBy=[string], window=[TumblingGroupWindow], properties=[EXPR$1, EXPR$2, EXPR$3], select=[string, Final_COUNT(count$0) AS EXPR$0])
++- Exchange(distribution=[hash[string]])
+   +- LocalHashWindowAggregate(groupBy=[string], window=[TumblingGroupWindow], properties=[EXPR$1, EXPR$2, EXPR$3], select=[string, Partial_COUNT(int) AS count$0])
+      +- TableSourceScan(table=[[default_catalog, default_database, Table1, source: [TestTableSource(ts, int, string)]]], fields=[ts, int, string])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testEventTimeTumblingGroupWindowOverTimeWithUdAgg">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(string=[$0], EXPR$0=[$1])
++- LogicalWindowAggregate(group=[{2}], EXPR$0=[myWeightedAvg($0, $1)], window=[TumblingGroupWindow], properties=[])
+   +- LogicalTableScan(table=[[default_catalog, default_database, Table1, source: [TestTableSource(long, int, string)]]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+SortWindowAggregate(groupBy=[string], window=[TumblingGroupWindow], select=[string, Final_myWeightedAvg(EXPR$0) AS EXPR$0])
++- Sort(orderBy=[string ASC, assignedWindow$ ASC])
+   +- Exchange(distribution=[hash[string]])
+      +- LocalSortWindowAggregate(groupBy=[string], window=[TumblingGroupWindow], select=[string, Partial_myWeightedAvg(long, int) AS EXPR$0])
+         +- Sort(orderBy=[string ASC, long ASC])
+            +- TableSourceScan(table=[[default_catalog, default_database, Table1, source: [TestTableSource(long, int, string)]]], fields=[long, int, string])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testTimestampEventTimeTumblingGroupWindowWithProperties">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(string=[$0], EXPR$0=[$1], EXPR$1=[$2], EXPR$2=[$3], EXPR$3=[$4])
++- LogicalWindowAggregate(group=[{2}], EXPR$0=[COUNT($1)], window=[TumblingGroupWindow], properties=[EXPR$1, EXPR$2, EXPR$3])
+   +- LogicalTableScan(table=[[default_catalog, default_database, Table1, source: [TestTableSource(ts, int, string)]]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+HashWindowAggregate(groupBy=[string], window=[TumblingGroupWindow], properties=[EXPR$1, EXPR$2, EXPR$3], select=[string, Final_COUNT(count$0) AS EXPR$0])
++- Exchange(distribution=[hash[string]])
+   +- LocalHashWindowAggregate(groupBy=[string], window=[TumblingGroupWindow], properties=[EXPR$1, EXPR$2, EXPR$3], select=[string, Partial_COUNT(int) AS count$0])
+      +- TableSourceScan(table=[[default_catalog, default_database, Table1, source: [TestTableSource(ts, int, string)]]], fields=[ts, int, string])
+]]>
+    </Resource>
+  </TestCase>
+</Root>
diff --git a/flink-table/flink-table-planner-blink/src/test/resources/org/apache/flink/table/plan/batch/table/JoinTest.xml b/flink-table/flink-table-planner-blink/src/test/resources/org/apache/flink/table/plan/batch/table/JoinTest.xml
new file mode 100644
index 0000000..e2d258a
--- /dev/null
+++ b/flink-table/flink-table-planner-blink/src/test/resources/org/apache/flink/table/plan/batch/table/JoinTest.xml
@@ -0,0 +1,280 @@
+<?xml version="1.0" ?>
+<!--
+Licensed to the Apache Software Foundation (ASF) under one or more
+contributor license agreements.  See the NOTICE file distributed with
+this work for additional information regarding copyright ownership.
+The ASF licenses this file to you under the Apache License, Version 2.0
+(the "License"); you may not use this file except in compliance with
+the License.  You may obtain a copy of the License at
+
+http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+-->
+<Root>
+  <TestCase name="testFilterJoinRule">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalFilter(condition=[>=($0, 0)])
++- LogicalProject(c1=[org$apache$flink$table$plan$batch$table$JoinTest$Merger$$223b7380fec29c4077a893c60165d845($2, org$apache$flink$table$plan$batch$table$JoinTest$Merger$$223b7380fec29c4077a893c60165d845($2, $5))])
+   +- LogicalJoin(condition=[=($1, $4)], joinType=[left])
+      :- LogicalTableScan(table=[[default_catalog, default_database, Table1, source: [TestTableSource(a, b, c)]]])
+      +- LogicalTableScan(table=[[default_catalog, default_database, Table2, source: [TestTableSource(d, e, f)]]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[Merger$(c, Merger$(c, f)) AS c1], where=[>=(Merger$(c, Merger$(c, f)), 0)])
++- HashJoin(joinType=[LeftOuterJoin], where=[=(b, e)], select=[b, c, e, f], build=[right])
+   :- Exchange(distribution=[hash[b]])
+   :  +- Calc(select=[b, c])
+   :     +- TableSourceScan(table=[[default_catalog, default_database, Table1, source: [TestTableSource(a, b, c)]]], fields=[a, b, c])
+   +- Exchange(distribution=[hash[e]])
+      +- Calc(select=[e, f])
+         +- TableSourceScan(table=[[default_catalog, default_database, Table2, source: [TestTableSource(d, e, f)]]], fields=[d, e, f])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testFullOuterJoinEquiAndLocalPred">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(b=[$1], y=[$4])
++- LogicalJoin(condition=[AND(=($0, $5), <($1, 2))], joinType=[full])
+   :- LogicalTableScan(table=[[default_catalog, default_database, T, source: [TestTableSource(a, b, c)]]])
+   +- LogicalTableScan(table=[[default_catalog, default_database, S, source: [TestTableSource(x, y, z)]]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[b, y])
++- HashJoin(joinType=[FullOuterJoin], where=[AND(=(a, z), $f3)], select=[a, b, $f3, y, z], build=[left])
+   :- Exchange(distribution=[hash[a]])
+   :  +- Calc(select=[a, b, <(b, 2) AS $f3])
+   :     +- TableSourceScan(table=[[default_catalog, default_database, T, source: [TestTableSource(a, b, c)]]], fields=[a, b, c])
+   +- Exchange(distribution=[hash[z]])
+      +- Calc(select=[y, z])
+         +- TableSourceScan(table=[[default_catalog, default_database, S, source: [TestTableSource(x, y, z)]]], fields=[x, y, z])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testFullOuterJoinEquiAndNonEquiPred">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(b=[$1], y=[$4])
++- LogicalJoin(condition=[AND(=($0, $5), <($1, $3))], joinType=[full])
+   :- LogicalTableScan(table=[[default_catalog, default_database, T, source: [TestTableSource(a, b, c)]]])
+   +- LogicalTableScan(table=[[default_catalog, default_database, S, source: [TestTableSource(x, y, z)]]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[b, y])
++- HashJoin(joinType=[FullOuterJoin], where=[AND(=(a, z), <(b, x))], select=[a, b, x, y, z], build=[left])
+   :- Exchange(distribution=[hash[a]])
+   :  +- Calc(select=[a, b])
+   :     +- TableSourceScan(table=[[default_catalog, default_database, T, source: [TestTableSource(a, b, c)]]], fields=[a, b, c])
+   +- Exchange(distribution=[hash[z]])
+      +- TableSourceScan(table=[[default_catalog, default_database, S, source: [TestTableSource(x, y, z)]]], fields=[x, y, z])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testFullOuterJoinEquiPred">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(b=[$1], y=[$4])
++- LogicalJoin(condition=[=($0, $5)], joinType=[full])
+   :- LogicalTableScan(table=[[default_catalog, default_database, T, source: [TestTableSource(a, b, c)]]])
+   +- LogicalTableScan(table=[[default_catalog, default_database, S, source: [TestTableSource(x, y, z)]]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[b, y])
++- HashJoin(joinType=[FullOuterJoin], where=[=(a, z)], select=[a, b, y, z], build=[left])
+   :- Exchange(distribution=[hash[a]])
+   :  +- Calc(select=[a, b])
+   :     +- TableSourceScan(table=[[default_catalog, default_database, T, source: [TestTableSource(a, b, c)]]], fields=[a, b, c])
+   +- Exchange(distribution=[hash[z]])
+      +- Calc(select=[y, z])
+         +- TableSourceScan(table=[[default_catalog, default_database, S, source: [TestTableSource(x, y, z)]]], fields=[x, y, z])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testLeftOuterJoinEquiAndLocalPred">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(b=[$1], y=[$4])
++- LogicalJoin(condition=[AND(=($0, $5), <($1, 2))], joinType=[left])
+   :- LogicalTableScan(table=[[default_catalog, default_database, T, source: [TestTableSource(a, b, c)]]])
+   +- LogicalTableScan(table=[[default_catalog, default_database, S, source: [TestTableSource(x, y, z)]]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[b, y])
++- HashJoin(joinType=[LeftOuterJoin], where=[AND(=(a, z), $f3)], select=[a, b, $f3, y, z], build=[left])
+   :- Exchange(distribution=[hash[a]])
+   :  +- Calc(select=[a, b, <(b, 2) AS $f3])
+   :     +- TableSourceScan(table=[[default_catalog, default_database, T, source: [TestTableSource(a, b, c)]]], fields=[a, b, c])
+   +- Exchange(distribution=[hash[z]])
+      +- Calc(select=[y, z])
+         +- TableSourceScan(table=[[default_catalog, default_database, S, source: [TestTableSource(x, y, z)]]], fields=[x, y, z])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testLeftOuterJoinEquiAndNonEquiPred">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(b=[$1], y=[$4])
++- LogicalJoin(condition=[AND(=($0, $5), <($1, $3))], joinType=[left])
+   :- LogicalTableScan(table=[[default_catalog, default_database, T, source: [TestTableSource(a, b, c)]]])
+   +- LogicalTableScan(table=[[default_catalog, default_database, S, source: [TestTableSource(x, y, z)]]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[b, y])
++- HashJoin(joinType=[LeftOuterJoin], where=[AND(=(a, z), <(b, x))], select=[a, b, x, y, z], build=[left])
+   :- Exchange(distribution=[hash[a]])
+   :  +- Calc(select=[a, b])
+   :     +- TableSourceScan(table=[[default_catalog, default_database, T, source: [TestTableSource(a, b, c)]]], fields=[a, b, c])
+   +- Exchange(distribution=[hash[z]])
+      +- TableSourceScan(table=[[default_catalog, default_database, S, source: [TestTableSource(x, y, z)]]], fields=[x, y, z])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testLeftOuterJoinEquiPred">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(b=[$1], y=[$4])
++- LogicalJoin(condition=[=($0, $5)], joinType=[left])
+   :- LogicalTableScan(table=[[default_catalog, default_database, T, source: [TestTableSource(a, b, c)]]])
+   +- LogicalTableScan(table=[[default_catalog, default_database, S, source: [TestTableSource(x, y, z)]]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[b, y])
++- HashJoin(joinType=[LeftOuterJoin], where=[=(a, z)], select=[a, b, y, z], build=[left])
+   :- Exchange(distribution=[hash[a]])
+   :  +- Calc(select=[a, b])
+   :     +- TableSourceScan(table=[[default_catalog, default_database, T, source: [TestTableSource(a, b, c)]]], fields=[a, b, c])
+   +- Exchange(distribution=[hash[z]])
+      +- Calc(select=[y, z])
+         +- TableSourceScan(table=[[default_catalog, default_database, S, source: [TestTableSource(x, y, z)]]], fields=[x, y, z])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testNoEqualityJoinPredicate1">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(c=[$2], g=[$6])
++- LogicalFilter(condition=[=($3, $5)])
+   +- LogicalJoin(condition=[true], joinType=[inner])
+      :- LogicalTableScan(table=[[default_catalog, default_database, Table3, source: [TestTableSource(a, b, c)]]])
+      +- LogicalTableScan(table=[[default_catalog, default_database, Table5, source: [TestTableSource(d, e, f, g, h)]]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+NestedLoopJoin(joinType=[InnerJoin], where=[true], select=[c, g], build=[right])
+:- Calc(select=[c])
+:  +- TableSourceScan(table=[[default_catalog, default_database, Table3, source: [TestTableSource(a, b, c)]]], fields=[a, b, c])
++- Exchange(distribution=[broadcast])
+   +- Calc(select=[g], where=[=(d, f)])
+      +- TableSourceScan(table=[[default_catalog, default_database, Table5, source: [TestTableSource(d, e, f, g, h)]]], fields=[d, e, f, g, h])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testNoEqualityJoinPredicate2">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(c=[$2], g=[$6])
++- LogicalFilter(condition=[<($0, $3)])
+   +- LogicalJoin(condition=[true], joinType=[inner])
+      :- LogicalTableScan(table=[[default_catalog, default_database, Table3, source: [TestTableSource(a, b, c)]]])
+      +- LogicalTableScan(table=[[default_catalog, default_database, Table5, source: [TestTableSource(d, e, f, g, h)]]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[c, g])
++- NestedLoopJoin(joinType=[InnerJoin], where=[<(a, d)], select=[a, c, d, g], build=[left])
+   :- Exchange(distribution=[broadcast])
+   :  +- Calc(select=[a, c])
+   :     +- TableSourceScan(table=[[default_catalog, default_database, Table3, source: [TestTableSource(a, b, c)]]], fields=[a, b, c])
+   +- Calc(select=[d, g])
+      +- TableSourceScan(table=[[default_catalog, default_database, Table5, source: [TestTableSource(d, e, f, g, h)]]], fields=[d, e, f, g, h])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testRightOuterJoinEquiPred">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(b=[$1], y=[$4])
++- LogicalJoin(condition=[=($0, $5)], joinType=[right])
+   :- LogicalTableScan(table=[[default_catalog, default_database, T, source: [TestTableSource(a, b, c)]]])
+   +- LogicalTableScan(table=[[default_catalog, default_database, S, source: [TestTableSource(x, y, z)]]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[b, y])
++- HashJoin(joinType=[RightOuterJoin], where=[=(a, z)], select=[a, b, y, z], build=[left])
+   :- Exchange(distribution=[hash[a]])
+   :  +- Calc(select=[a, b])
+   :     +- TableSourceScan(table=[[default_catalog, default_database, T, source: [TestTableSource(a, b, c)]]], fields=[a, b, c])
+   +- Exchange(distribution=[hash[z]])
+      +- Calc(select=[y, z])
+         +- TableSourceScan(table=[[default_catalog, default_database, S, source: [TestTableSource(x, y, z)]]], fields=[x, y, z])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testRightOuterJoinEquiAndLocalPred">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(b=[$1], x=[$3])
++- LogicalJoin(condition=[AND(=($0, $5), <($3, 2))], joinType=[right])
+   :- LogicalTableScan(table=[[default_catalog, default_database, T, source: [TestTableSource(a, b, c)]]])
+   +- LogicalTableScan(table=[[default_catalog, default_database, S, source: [TestTableSource(x, y, z)]]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[b, x])
++- HashJoin(joinType=[RightOuterJoin], where=[AND(=(a, z), $f3)], select=[a, b, x, z, $f3], build=[left])
+   :- Exchange(distribution=[hash[a]])
+   :  +- Calc(select=[a, b])
+   :     +- TableSourceScan(table=[[default_catalog, default_database, T, source: [TestTableSource(a, b, c)]]], fields=[a, b, c])
+   +- Exchange(distribution=[hash[z]])
+      +- Calc(select=[x, z, <(x, 2) AS $f3])
+         +- TableSourceScan(table=[[default_catalog, default_database, S, source: [TestTableSource(x, y, z)]]], fields=[x, y, z])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testRightOuterJoinEquiAndNonEquiPred">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(b=[$1], y=[$4])
++- LogicalJoin(condition=[AND(=($0, $5), <($1, $3))], joinType=[right])
+   :- LogicalTableScan(table=[[default_catalog, default_database, T, source: [TestTableSource(a, b, c)]]])
+   +- LogicalTableScan(table=[[default_catalog, default_database, S, source: [TestTableSource(x, y, z)]]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[b, y])
++- HashJoin(joinType=[RightOuterJoin], where=[AND(=(a, z), <(b, x))], select=[a, b, x, y, z], build=[left])
+   :- Exchange(distribution=[hash[a]])
+   :  +- Calc(select=[a, b])
+   :     +- TableSourceScan(table=[[default_catalog, default_database, T, source: [TestTableSource(a, b, c)]]], fields=[a, b, c])
+   +- Exchange(distribution=[hash[z]])
+      +- TableSourceScan(table=[[default_catalog, default_database, S, source: [TestTableSource(x, y, z)]]], fields=[x, y, z])
+]]>
+    </Resource>
+  </TestCase>
+</Root>
diff --git a/flink-table/flink-table-planner-blink/src/test/resources/org/apache/flink/table/plan/batch/table/SetOperatorsTest.xml b/flink-table/flink-table-planner-blink/src/test/resources/org/apache/flink/table/plan/batch/table/SetOperatorsTest.xml
new file mode 100644
index 0000000..ee628f4
--- /dev/null
+++ b/flink-table/flink-table-planner-blink/src/test/resources/org/apache/flink/table/plan/batch/table/SetOperatorsTest.xml
@@ -0,0 +1,201 @@
+<?xml version="1.0" ?>
+<!--
+Licensed to the Apache Software Foundation (ASF) under one or more
+contributor license agreements.  See the NOTICE file distributed with
+this work for additional information regarding copyright ownership.
+The ASF licenses this file to you under the Apache License, Version 2.0
+(the "License"); you may not use this file except in compliance with
+the License.  You may obtain a copy of the License at
+
+http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+-->
+<Root>
+  <TestCase name="testFilterMinusTranspose">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(a=[$1], b=[$0], c=[$2])
++- LogicalAggregate(group=[{1}], EXPR$0=[SUM($0)], EXPR$1=[COUNT($2)])
+   +- LogicalFilter(condition=[>($0, 0)])
+      +- LogicalMinus(all=[true])
+         :- LogicalTableScan(table=[[default_catalog, default_database, left, source: [TestTableSource(a, b, c)]]])
+         +- LogicalTableScan(table=[[default_catalog, default_database, right, source: [TestTableSource(a, b, c)]]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[EXPR$0 AS a, b, EXPR$1 AS c])
++- HashAggregate(isMerge=[true], groupBy=[b], select=[b, Final_SUM(sum$0) AS EXPR$0, Final_COUNT(count$1) AS EXPR$1])
+   +- Exchange(distribution=[hash[b]])
+      +- LocalHashAggregate(groupBy=[b], select=[b, Partial_SUM(a) AS sum$0, Partial_COUNT(c) AS count$1])
+         +- Calc(select=[a0 AS a, b0 AS b, c0 AS c])
+            +- Correlate(invocation=[org$apache$flink$table$functions$tablefunctions$ReplicateRows$a265580be75179078c2732913dc90daa($0, $1, $2, $3)], correlate=[table(ReplicateRows(sum_vcol_marker,a,b,c))], select=[sum_vcol_marker,a,b,c,a0,b0,c0], rowType=[RecordType(BIGINT sum_vcol_marker, INTEGER a, BIGINT b, VARCHAR(2147483647) c, INTEGER a0, BIGINT b0, VARCHAR(2147483647) c0)], joinType=[INNER])
+               +- Calc(select=[sum_vcol_marker, a, b, c], where=[>(sum_vcol_marker, 0)])
+                  +- HashAggregate(isMerge=[true], groupBy=[a, b, c], select=[a, b, c, Final_SUM(sum$0) AS sum_vcol_marker])
+                     +- Exchange(distribution=[hash[a, b, c]])
+                        +- LocalHashAggregate(groupBy=[a, b, c], select=[a, b, c, Partial_SUM(vcol_marker) AS sum$0])
+                           +- Union(all=[true], union=[a, b, c, vcol_marker])
+                              :- Calc(select=[a, b, c, 1:BIGINT AS vcol_marker], where=[>(a, 0)])
+                              :  +- TableSourceScan(table=[[default_catalog, default_database, left, source: [TestTableSource(a, b, c)]]], fields=[a, b, c])
+                              +- Calc(select=[a, b, c, -1:BIGINT AS vcol_marker], where=[>(a, 0)])
+                                 +- TableSourceScan(table=[[default_catalog, default_database, right, source: [TestTableSource(a, b, c)]]], fields=[a, b, c])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testFilterUnionTranspose">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(a=[$1], b=[$0], c=[$2])
++- LogicalAggregate(group=[{1}], EXPR$0=[SUM($0)], EXPR$1=[COUNT($2)])
+   +- LogicalFilter(condition=[>($0, 0)])
+      +- LogicalUnion(all=[true])
+         :- LogicalTableScan(table=[[default_catalog, default_database, left, source: [TestTableSource(a, b, c)]]])
+         +- LogicalTableScan(table=[[default_catalog, default_database, right, source: [TestTableSource(a, b, c)]]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[EXPR$0 AS a, b, EXPR$1 AS c])
++- HashAggregate(isMerge=[true], groupBy=[b], select=[b, Final_SUM(sum$0) AS EXPR$0, Final_COUNT(count$1) AS EXPR$1])
+   +- Exchange(distribution=[hash[b]])
+      +- LocalHashAggregate(groupBy=[b], select=[b, Partial_SUM(a) AS sum$0, Partial_COUNT(c) AS count$1])
+         +- Union(all=[true], union=[a, b, c])
+            :- Calc(select=[a, b, c], where=[>(a, 0)])
+            :  +- TableSourceScan(table=[[default_catalog, default_database, left, source: [TestTableSource(a, b, c)]]], fields=[a, b, c])
+            +- Calc(select=[a, b, c], where=[>(a, 0)])
+               +- TableSourceScan(table=[[default_catalog, default_database, right, source: [TestTableSource(a, b, c)]]], fields=[a, b, c])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testInWithFilter">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalFilter(condition=[IN($2, {
+LogicalProject(a1=[AS($0, _UTF-16LE'a1')])
+  LogicalFilter(condition=[=($1, _UTF-16LE'two')])
+    LogicalTableScan(table=[[default_catalog, default_database, A, source: [TestTableSource(a, b, c)]]])
+})])
++- LogicalProject(a=[$0], b=[$1], c=[$2])
+   +- LogicalTableScan(table=[[default_catalog, default_database, A, source: [TestTableSource(a, b, c)]]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+HashJoin(joinType=[LeftSemiJoin], where=[=(c, a1)], select=[a, b, c], build=[right], tryDistinctBuildRow=[true])
+:- Exchange(distribution=[hash[c]], exchange_mode=[BATCH])
+:  +- TableSourceScan(table=[[default_catalog, default_database, A, source: [TestTableSource(a, b, c)]]], fields=[a, b, c], reuse_id=[1])
++- Exchange(distribution=[hash[a1]])
+   +- LocalHashAggregate(groupBy=[a1], select=[a1])
+      +- Calc(select=[a AS a1], where=[=(b, _UTF-16LE'two')])
+         +- Reused(reference_id=[1])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testInWithProject">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(b2=[AS(IN($1, 1972-02-22 07:12:00.333:TIMESTAMP(3)), _UTF-16LE'b2')])
++- LogicalTableScan(table=[[default_catalog, default_database, A, source: [TestTableSource(a, b, c)]]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[IN(b, 1972-02-22 07:12:00.333:TIMESTAMP(3)) AS b2])
++- TableSourceScan(table=[[default_catalog, default_database, A, source: [TestTableSource(a, b, c)]]], fields=[a, b, c])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testProjectMinusTranspose">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(b=[$1], c=[$2])
++- LogicalMinus(all=[true])
+   :- LogicalProject(a=[$0], b=[$1], c=[$2])
+   :  +- LogicalTableScan(table=[[default_catalog, default_database, left, source: [TestTableSource(a, b, c)]]])
+   +- LogicalProject(a=[$0], b=[$1], c=[$2])
+      +- LogicalTableScan(table=[[default_catalog, default_database, right, source: [TestTableSource(a, b, c)]]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[b0 AS b, c0 AS c])
++- Correlate(invocation=[org$apache$flink$table$functions$tablefunctions$ReplicateRows$a496217af28b47b93e0451f0aad9786d($0, $1, $2)], correlate=[table(ReplicateRows(sum_vcol_marker,b,c))], select=[sum_vcol_marker,b,c,b0,c0], rowType=[RecordType(BIGINT sum_vcol_marker, BIGINT b, VARCHAR(2147483647) c, BIGINT b0, VARCHAR(2147483647) c0)], joinType=[INNER])
+   +- Calc(select=[sum_vcol_marker, b, c], where=[>(sum_vcol_marker, 0)])
+      +- HashAggregate(isMerge=[true], groupBy=[b, c], select=[b, c, Final_SUM(sum$0) AS sum_vcol_marker])
+         +- Exchange(distribution=[hash[b, c]])
+            +- LocalHashAggregate(groupBy=[b, c], select=[b, c, Partial_SUM(vcol_marker) AS sum$0])
+               +- Union(all=[true], union=[b, c, vcol_marker])
+                  :- Calc(select=[b, c, 1:BIGINT AS vcol_marker])
+                  :  +- TableSourceScan(table=[[default_catalog, default_database, left, source: [TestTableSource(a, b, c)]]], fields=[a, b, c])
+                  +- Calc(select=[b, c, -1:BIGINT AS vcol_marker])
+                     +- TableSourceScan(table=[[default_catalog, default_database, right, source: [TestTableSource(a, b, c)]]], fields=[a, b, c])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testUnionNullableTypes">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalUnion(all=[true])
+:- LogicalProject(a=[$0])
+:  +- LogicalTableScan(table=[[default_catalog, default_database, A, source: [TestTableSource(a, b, c)]]])
++- LogicalProject(_c0=[CASE(>($2, 0), $1, null:RecordType:peek_no_expand(INTEGER _1, VARCHAR(2147483647) CHARACTER SET "UTF-16LE" _2))])
+   +- LogicalTableScan(table=[[default_catalog, default_database, A, source: [TestTableSource(a, b, c)]]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Union(all=[true], union=[a])
+:- Calc(select=[a])
+:  +- TableSourceScan(table=[[default_catalog, default_database, A, source: [TestTableSource(a, b, c)]]], fields=[a, b, c], reuse_id=[1])
++- Calc(select=[CASE(>(c, 0), b, null:RecordType:peek_no_expand(INTEGER _1, VARCHAR(2147483647) CHARACTER SET "UTF-16LE" _2)) AS _c0])
+   +- Reused(reference_id=[1])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testProjectUnionTranspose">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(b=[$1], c=[$2])
++- LogicalUnion(all=[true])
+   :- LogicalProject(a=[$0], b=[$1], c=[$2])
+   :  +- LogicalTableScan(table=[[default_catalog, default_database, left, source: [TestTableSource(a, b, c)]]])
+   +- LogicalProject(a=[$0], b=[$1], c=[$2])
+      +- LogicalTableScan(table=[[default_catalog, default_database, right, source: [TestTableSource(a, b, c)]]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Union(all=[true], union=[b, c])
+:- Calc(select=[b, c])
+:  +- TableSourceScan(table=[[default_catalog, default_database, left, source: [TestTableSource(a, b, c)]]], fields=[a, b, c])
++- Calc(select=[b, c])
+   +- TableSourceScan(table=[[default_catalog, default_database, right, source: [TestTableSource(a, b, c)]]], fields=[a, b, c])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testUnionAnyType">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalUnion(all=[true])
+:- LogicalProject(a=[$0])
+:  +- LogicalTableScan(table=[[default_catalog, default_database, A, source: [TestTableSource(a, b)]]])
++- LogicalProject(b=[$1])
+   +- LogicalTableScan(table=[[default_catalog, default_database, A, source: [TestTableSource(a, b)]]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Union(all=[true], union=[a])
+:- Calc(select=[a])
+:  +- TableSourceScan(table=[[default_catalog, default_database, A, source: [TestTableSource(a, b)]]], fields=[a, b], reuse_id=[1])
++- Calc(select=[b])
+   +- Reused(reference_id=[1])
+]]>
+    </Resource>
+  </TestCase>
+</Root>
diff --git a/flink-table/flink-table-planner-blink/src/test/resources/org/apache/flink/table/plan/batch/table/stringexpr/CorrelateStringExpressionTest.xml b/flink-table/flink-table-planner-blink/src/test/resources/org/apache/flink/table/plan/batch/table/stringexpr/CorrelateStringExpressionTest.xml
new file mode 100644
index 0000000..88c0563
--- /dev/null
+++ b/flink-table/flink-table-planner-blink/src/test/resources/org/apache/flink/table/plan/batch/table/stringexpr/CorrelateStringExpressionTest.xml
@@ -0,0 +1,156 @@
+<?xml version="1.0" ?>
+<!--
+Licensed to the Apache Software Foundation (ASF) under one or more
+contributor license agreements.  See the NOTICE file distributed with
+this work for additional information regarding copyright ownership.
+The ASF licenses this file to you under the Apache License, Version 2.0
+(the "License"); you may not use this file except in compliance with
+the License.  You may obtain a copy of the License at
+
+http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+-->
+<Root>
+  <TestCase name="testCorrelateJoins1">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(c=[$2], s=[$3])
++- LogicalCorrelate(correlation=[$cor0], joinType=[inner], requiredColumns=[{}])
+   :- LogicalTableScan(table=[[default_catalog, default_database, Table1, source: [TestTableSource(a, b, c)]]])
+   +- LogicalTableFunctionScan(invocation=[org$apache$flink$table$util$TableFunc1$ad38060966060e704b09fa4c94287696($2)], rowType=[RecordType(VARCHAR(2147483647) s)], elementType=[class [Ljava.lang.Object;])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[c, s])
++- Correlate(invocation=[org$apache$flink$table$util$TableFunc1$ad38060966060e704b09fa4c94287696($2)], correlate=[table(TableFunc1(c))], select=[a,b,c,s], rowType=[RecordType(INTEGER a, BIGINT b, VARCHAR(2147483647) c, VARCHAR(2147483647) s)], joinType=[INNER])
+   +- TableSourceScan(table=[[default_catalog, default_database, Table1, source: [TestTableSource(a, b, c)]]], fields=[a, b, c])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testCorrelateJoins2">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(c=[$2], s=[$3])
++- LogicalCorrelate(correlation=[$cor0], joinType=[left], requiredColumns=[{}])
+   :- LogicalTableScan(table=[[default_catalog, default_database, Table1, source: [TestTableSource(a, b, c)]]])
+   +- LogicalTableFunctionScan(invocation=[org$apache$flink$table$util$TableFunc1$ad38060966060e704b09fa4c94287696($2)], rowType=[RecordType(VARCHAR(2147483647) s)], elementType=[class [Ljava.lang.Object;])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[c, s])
++- Correlate(invocation=[org$apache$flink$table$util$TableFunc1$ad38060966060e704b09fa4c94287696($2)], correlate=[table(TableFunc1(c))], select=[a,b,c,s], rowType=[RecordType(INTEGER a, BIGINT b, VARCHAR(2147483647) c, VARCHAR(2147483647) s)], joinType=[LEFT])
+   +- TableSourceScan(table=[[default_catalog, default_database, Table1, source: [TestTableSource(a, b, c)]]], fields=[a, b, c])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testCorrelateJoins3">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(c=[$2], s=[$3])
++- LogicalCorrelate(correlation=[$cor0], joinType=[inner], requiredColumns=[{}])
+   :- LogicalTableScan(table=[[default_catalog, default_database, Table1, source: [TestTableSource(a, b, c)]]])
+   +- LogicalTableFunctionScan(invocation=[org$apache$flink$table$util$TableFunc1$ad38060966060e704b09fa4c94287696($2, _UTF-16LE'$')], rowType=[RecordType(VARCHAR(2147483647) s)], elementType=[class [Ljava.lang.Object;])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[c, s])
++- Correlate(invocation=[org$apache$flink$table$util$TableFunc1$ad38060966060e704b09fa4c94287696($2, _UTF-16LE'$')], correlate=[table(TableFunc1(c,_UTF-16LE'$'))], select=[a,b,c,s], rowType=[RecordType(INTEGER a, BIGINT b, VARCHAR(2147483647) c, VARCHAR(2147483647) s)], joinType=[INNER])
+   +- TableSourceScan(table=[[default_catalog, default_database, Table1, source: [TestTableSource(a, b, c)]]], fields=[a, b, c])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testCorrelateJoins4">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(c=[$2], name=[$3], len=[$4])
++- LogicalCorrelate(correlation=[$cor0], joinType=[inner], requiredColumns=[{}])
+   :- LogicalTableScan(table=[[default_catalog, default_database, Table1, source: [TestTableSource(a, b, c)]]])
+   +- LogicalTableFunctionScan(invocation=[org$apache$flink$table$util$TableFunc2$b3b1f988779be024ed9386bce5019112($2)], rowType=[RecordType(VARCHAR(2147483647) name, INTEGER len)], elementType=[class [Ljava.lang.Object;])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[c, name, len])
++- Correlate(invocation=[org$apache$flink$table$util$TableFunc2$b3b1f988779be024ed9386bce5019112($2)], correlate=[table(TableFunc2(c))], select=[a,b,c,name,len], rowType=[RecordType(INTEGER a, BIGINT b, VARCHAR(2147483647) c, VARCHAR(2147483647) name, INTEGER len)], joinType=[INNER])
+   +- TableSourceScan(table=[[default_catalog, default_database, Table1, source: [TestTableSource(a, b, c)]]], fields=[a, b, c])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testCorrelateJoins5">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(c=[$2], name=[$3], len=[$5], adult=[$4])
++- LogicalCorrelate(correlation=[$cor0], joinType=[inner], requiredColumns=[{}])
+   :- LogicalTableScan(table=[[default_catalog, default_database, Table1, source: [TestTableSource(a, b, c)]]])
+   +- LogicalTableFunctionScan(invocation=[org$apache$flink$table$util$HierarchyTableFunction$172d96aa11f5379846a3a8c5fa560e0e($2)], rowType=[RecordType(VARCHAR(2147483647) name, BOOLEAN adult, INTEGER len)], elementType=[class [Ljava.lang.Object;])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[c, name, len, adult])
++- Correlate(invocation=[org$apache$flink$table$util$HierarchyTableFunction$172d96aa11f5379846a3a8c5fa560e0e($2)], correlate=[table(HierarchyTableFunction(c))], select=[a,b,c,name,adult,len], rowType=[RecordType(INTEGER a, BIGINT b, VARCHAR(2147483647) c, VARCHAR(2147483647) name, BOOLEAN adult, INTEGER len)], joinType=[INNER])
+   +- TableSourceScan(table=[[default_catalog, default_database, Table1, source: [TestTableSource(a, b, c)]]], fields=[a, b, c])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testCorrelateJoins6">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(c=[$2], name=[$4], age=[$3])
++- LogicalCorrelate(correlation=[$cor0], joinType=[inner], requiredColumns=[{}])
+   :- LogicalTableScan(table=[[default_catalog, default_database, Table1, source: [TestTableSource(a, b, c)]]])
+   +- LogicalTableFunctionScan(invocation=[org$apache$flink$table$util$PojoTableFunc$b05c920aa134b36f9cfc9d9b23368bcf($2)], rowType=[RecordType(INTEGER age, VARCHAR(2147483647) name)], elementType=[class [Ljava.lang.Object;])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[c, name, age])
++- Correlate(invocation=[org$apache$flink$table$util$PojoTableFunc$b05c920aa134b36f9cfc9d9b23368bcf($2)], correlate=[table(PojoTableFunc(c))], select=[a,b,c,age,name], rowType=[RecordType(INTEGER a, BIGINT b, VARCHAR(2147483647) c, INTEGER age, VARCHAR(2147483647) name)], joinType=[INNER])
+   +- TableSourceScan(table=[[default_catalog, default_database, Table1, source: [TestTableSource(a, b, c)]]], fields=[a, b, c])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testCorrelateJoins7">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalFilter(condition=[>($2, 2)])
++- LogicalProject(c=[$2], name=[$3], len=[$4])
+   +- LogicalCorrelate(correlation=[$cor0], joinType=[inner], requiredColumns=[{}])
+      :- LogicalTableScan(table=[[default_catalog, default_database, Table1, source: [TestTableSource(a, b, c)]]])
+      +- LogicalTableFunctionScan(invocation=[org$apache$flink$table$util$TableFunc2$b3b1f988779be024ed9386bce5019112($2)], rowType=[RecordType(VARCHAR(2147483647) name, INTEGER len)], elementType=[class [Ljava.lang.Object;])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[c, name, len])
++- Correlate(invocation=[org$apache$flink$table$util$TableFunc2$b3b1f988779be024ed9386bce5019112($2)], correlate=[table(TableFunc2(c))], select=[a,b,c,name,len], rowType=[RecordType(INTEGER a, BIGINT b, VARCHAR(2147483647) c, VARCHAR(2147483647) name, INTEGER len)], joinType=[INNER], condition=[>($1, 2)])
+   +- TableSourceScan(table=[[default_catalog, default_database, Table1, source: [TestTableSource(a, b, c)]]], fields=[a, b, c])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testCorrelateJoins8">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(a=[$0], c=[$2], s=[$3])
++- LogicalCorrelate(correlation=[$cor0], joinType=[inner], requiredColumns=[{}])
+   :- LogicalTableScan(table=[[default_catalog, default_database, Table1, source: [TestTableSource(a, b, c)]]])
+   +- LogicalTableFunctionScan(invocation=[org$apache$flink$table$util$TableFunc1$ad38060966060e704b09fa4c94287696(SUBSTRING($2, 2))], rowType=[RecordType(VARCHAR(2147483647) s)], elementType=[class [Ljava.lang.Object;])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[a, c, s])
++- Correlate(invocation=[org$apache$flink$table$util$TableFunc1$ad38060966060e704b09fa4c94287696(SUBSTRING($2, 2))], correlate=[table(TableFunc1(SUBSTRING(c, 2)))], select=[a,b,c,s], rowType=[RecordType(INTEGER a, BIGINT b, VARCHAR(2147483647) c, VARCHAR(2147483647) s)], joinType=[INNER])
+   +- TableSourceScan(table=[[default_catalog, default_database, Table1, source: [TestTableSource(a, b, c)]]], fields=[a, b, c])
+]]>
+    </Resource>
+  </TestCase>
+</Root>
diff --git a/flink-table/flink-table-planner-blink/src/test/resources/org/apache/flink/table/plan/batch/table/stringexpr/SetOperatorsTest.xml b/flink-table/flink-table-planner-blink/src/test/resources/org/apache/flink/table/plan/batch/table/stringexpr/SetOperatorsTest.xml
new file mode 100644
index 0000000..0ebd86b
--- /dev/null
+++ b/flink-table/flink-table-planner-blink/src/test/resources/org/apache/flink/table/plan/batch/table/stringexpr/SetOperatorsTest.xml
@@ -0,0 +1,33 @@
+<?xml version="1.0" ?>
+<!--
+Licensed to the Apache Software Foundation (ASF) under one or more
+contributor license agreements.  See the NOTICE file distributed with
+this work for additional information regarding copyright ownership.
+The ASF licenses this file to you under the Apache License, Version 2.0
+(the "License"); you may not use this file except in compliance with
+the License.  You may obtain a copy of the License at
+
+http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+-->
+<Root>
+  <TestCase name="testInWithProject">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(b2=[AS(IN($1, 1972-02-22 07:12:00.333:TIMESTAMP(3)), _UTF-16LE'b2')])
++- LogicalTableScan(table=[[default_catalog, default_database, A, source: [TestTableSource(a, b, c)]]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[IN(b, 1972-02-22 07:12:00.333:TIMESTAMP(3)) AS b2])
++- TableSourceScan(table=[[default_catalog, default_database, A, source: [TestTableSource(a, b, c)]]], fields=[a, b, c])
+]]>
+    </Resource>
+  </TestCase>
+</Root>
diff --git a/flink-table/flink-table-planner-blink/src/test/resources/org/apache/flink/table/plan/stream/table/AggregateTest.xml b/flink-table/flink-table-planner-blink/src/test/resources/org/apache/flink/table/plan/stream/table/AggregateTest.xml
new file mode 100644
index 0000000..d5b8a85
--- /dev/null
+++ b/flink-table/flink-table-planner-blink/src/test/resources/org/apache/flink/table/plan/stream/table/AggregateTest.xml
@@ -0,0 +1,279 @@
+<?xml version="1.0" ?>
+<!--
+Licensed to the Apache Software Foundation (ASF) under one or more
+contributor license agreements.  See the NOTICE file distributed with
+this work for additional information regarding copyright ownership.
+The ASF licenses this file to you under the Apache License, Version 2.0
+(the "License"); you may not use this file except in compliance with
+the License.  You may obtain a copy of the License at
+
+http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+-->
+<Root>
+  <TestCase name="testAggregateWithAlias">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(b=[AS($0, _UTF-16LE'b')], x=[AS($1.f0, _UTF-16LE'x')], y=[AS($1.f1, _UTF-16LE'y')])
++- LogicalAggregate(group=[{1}], TMP_0=[CountMinMax($0)])
+   +- LogicalTableScan(table=[[default_catalog, default_database, MyTable, source: [TestTableSource(a, b, c)]]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[b, TMP_0.f0 AS x, TMP_0.f1 AS y])
++- GroupAggregate(groupBy=[b], select=[b, CountMinMax(a) AS TMP_0])
+   +- Exchange(distribution=[hash[b]])
+      +- TableSourceScan(table=[[default_catalog, default_database, MyTable, source: [TestTableSource(a, b, c)]]], fields=[a, b, c])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testAggregateWithScalarResult">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(b=[$0], TMP_0=[$1])
++- LogicalAggregate(group=[{1}], TMP_0=[COUNT($0)])
+   +- LogicalTableScan(table=[[default_catalog, default_database, MyTable, source: [TestTableSource(a, b, c)]]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+GroupAggregate(groupBy=[b], select=[b, COUNT(a) AS TMP_0])
++- Exchange(distribution=[hash[b]])
+   +- TableSourceScan(table=[[default_catalog, default_database, MyTable, source: [TestTableSource(a, b, c)]]], fields=[a, b, c])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testDistinctAggregateOnTumbleWindow">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(EXPR$0=[$0], EXPR$1=[$1])
++- LogicalWindowAggregate(group=[{}], EXPR$0=[COUNT(DISTINCT $0)], EXPR$1=[SUM($0)], window=[TumblingGroupWindow], properties=[])
+   +- LogicalTableScan(table=[[default_catalog, default_database, MyTable]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+GroupWindowAggregate(window=[TumblingGroupWindow], select=[COUNT(DISTINCT a) AS EXPR$0, SUM(a) AS EXPR$1])
++- Exchange(distribution=[single])
+   +- DataStreamScan(table=[[default_catalog, default_database, MyTable]], fields=[a, b, c, rowtime])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testDistinctAggregateWithGroupingOnSessionWindow">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(a=[$0], EXPR$0=[$1], EXPR$1=[$2])
++- LogicalWindowAggregate(group=[{0}], EXPR$0=[COUNT($0)], EXPR$1=[COUNT(DISTINCT $2)], window=[SessionGroupWindow('w, rowtime, 900000)], properties=[])
+   +- LogicalTableScan(table=[[default_catalog, default_database, MyTable]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+GroupWindowAggregate(groupBy=[a], window=[SessionGroupWindow('w, rowtime, 900000)], select=[a, COUNT(a) AS EXPR$0, COUNT(DISTINCT c) AS EXPR$1])
++- Exchange(distribution=[hash[a]])
+   +- DataStreamScan(table=[[default_catalog, default_database, MyTable]], fields=[a, b, c, rowtime])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testGroupAggregate">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(EXPR$0=[$1])
++- LogicalAggregate(group=[{1}], EXPR$0=[COUNT($0)])
+   +- LogicalTableScan(table=[[default_catalog, default_database, Table1, source: [TestTableSource(a, b, c)]]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[EXPR$0])
++- GroupAggregate(groupBy=[b], select=[b, COUNT(a) AS EXPR$0])
+   +- Exchange(distribution=[hash[b]])
+      +- TableSourceScan(table=[[default_catalog, default_database, Table1, source: [TestTableSource(a, b, c)]]], fields=[a, b, c])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testGroupAggregateWithAverage">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(b=[$0], EXPR$0=[$1])
++- LogicalAggregate(group=[{1}], EXPR$0=[AVG($3)])
+   +- LogicalProject(a=[$0], b=[$1], c=[$2], a0=[CAST($0):DOUBLE])
+      +- LogicalTableScan(table=[[default_catalog, default_database, Table1, source: [TestTableSource(a, b, c)]]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+GroupAggregate(groupBy=[b], select=[b, AVG(a0) AS EXPR$0])
++- Exchange(distribution=[hash[b]])
+   +- Calc(select=[a, b, c, CAST(a) AS a0])
+      +- TableSourceScan(table=[[default_catalog, default_database, Table1, source: [TestTableSource(a, b, c)]]], fields=[a, b, c])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testGroupAggregateWithConstant1">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(four=[$1], EXPR$0=[$2])
++- LogicalAggregate(group=[{0, 1}], EXPR$0=[SUM($2)])
+   +- LogicalProject(a=[$0], four=[4], b=[$1])
+      +- LogicalTableScan(table=[[default_catalog, default_database, Table1, source: [TestTableSource(a, b, c)]]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[4 AS four, EXPR$0])
++- GroupAggregate(groupBy=[a, four], select=[a, four, SUM(b) AS EXPR$0])
+   +- Exchange(distribution=[hash[a, four]])
+      +- Calc(select=[a, 4 AS four, b])
+         +- TableSourceScan(table=[[default_catalog, default_database, Table1, source: [TestTableSource(a, b, c)]]], fields=[a, b, c])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testGroupAggregateWithConstant2">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(four=[$1], EXPR$0=[$2])
++- LogicalAggregate(group=[{0, 1}], EXPR$0=[SUM($2)])
+   +- LogicalProject(b=[$1], four=[4], a=[$0])
+      +- LogicalTableScan(table=[[default_catalog, default_database, Table1, source: [TestTableSource(a, b, c)]]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[4 AS four, EXPR$0])
++- GroupAggregate(groupBy=[b, four], select=[b, four, SUM(a) AS EXPR$0])
+   +- Exchange(distribution=[hash[b, four]])
+      +- Calc(select=[b, 4 AS four, a])
+         +- TableSourceScan(table=[[default_catalog, default_database, Table1, source: [TestTableSource(a, b, c)]]], fields=[a, b, c])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testSimpleAggregate">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(b=[AS($0, _UTF-16LE'b')], f0=[AS($1.f0, _UTF-16LE'f0')], f1=[AS($1.f1, _UTF-16LE'f1')])
++- LogicalAggregate(group=[{1}], TMP_0=[CountMinMax($0)])
+   +- LogicalTableScan(table=[[default_catalog, default_database, MyTable, source: [TestTableSource(a, b, c)]]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[b, TMP_0.f0 AS f0, TMP_0.f1 AS f1])
++- GroupAggregate(groupBy=[b], select=[b, CountMinMax(a) AS TMP_0])
+   +- Exchange(distribution=[hash[b]])
+      +- TableSourceScan(table=[[default_catalog, default_database, MyTable, source: [TestTableSource(a, b, c)]]], fields=[a, b, c])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testGroupAggregateWithExpressionInSelect">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(EXPR$0=[$1], EXPR$1=[$2])
++- LogicalAggregate(group=[{1}], EXPR$0=[MIN($2)], EXPR$1=[AVG($0)])
+   +- LogicalProject(a=[$0], d=[MOD($1, 3)], c=[$2])
+      +- LogicalTableScan(table=[[default_catalog, default_database, Table1, source: [TestTableSource(a, b, c)]]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[EXPR$0, EXPR$1])
++- GroupAggregate(groupBy=[d], select=[d, MIN(c) AS EXPR$0, AVG(a) AS EXPR$1])
+   +- Exchange(distribution=[hash[d]])
+      +- Calc(select=[a, MOD(b, 3) AS d, c])
+         +- TableSourceScan(table=[[default_catalog, default_database, Table1, source: [TestTableSource(a, b, c)]]], fields=[a, b, c])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testGroupAggregateWithFilter">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalFilter(condition=[=($0, 2)])
++- LogicalProject(b=[$0], EXPR$0=[$1])
+   +- LogicalAggregate(group=[{1}], EXPR$0=[SUM($0)])
+      +- LogicalTableScan(table=[[default_catalog, default_database, Table1, source: [TestTableSource(a, b, c)]]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[CAST(2) AS b, EXPR$0])
++- GroupAggregate(groupBy=[b], select=[b, SUM(a) AS EXPR$0])
+   +- Exchange(distribution=[hash[b]])
+      +- Calc(select=[a, b, c], where=[=(b, 2)])
+         +- TableSourceScan(table=[[default_catalog, default_database, Table1, source: [TestTableSource(a, b, c)]]], fields=[a, b, c])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testGroupDistinctAggregateWithUDAGG">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(EXPR$0=[$1], EXPR$1=[$2])
++- LogicalAggregate(group=[{2}], EXPR$0=[WeightedAvg(DISTINCT $0, $1)], EXPR$1=[WeightedAvg($0, $1)])
+   +- LogicalTableScan(table=[[default_catalog, default_database, Table1, source: [TestTableSource(a, b, c)]]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[EXPR$0, EXPR$1])
++- GroupAggregate(groupBy=[c], select=[c, WeightedAvg(DISTINCT a, b) AS EXPR$0, WeightedAvg(a, b) AS EXPR$1])
+   +- Exchange(distribution=[hash[c]])
+      +- TableSourceScan(table=[[default_catalog, default_database, Table1, source: [TestTableSource(a, b, c)]]], fields=[a, b, c])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testGroupDistinctAggregate">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(EXPR$0=[$1], EXPR$1=[$2])
++- LogicalAggregate(group=[{1}], EXPR$0=[SUM(DISTINCT $0)], EXPR$1=[COUNT(DISTINCT $2)])
+   +- LogicalTableScan(table=[[default_catalog, default_database, Table1, source: [TestTableSource(a, b, c)]]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[EXPR$0, EXPR$1])
++- GroupAggregate(groupBy=[b], select=[b, SUM(DISTINCT a) AS EXPR$0, COUNT(DISTINCT c) AS EXPR$1])
+   +- Exchange(distribution=[hash[b]])
+      +- TableSourceScan(table=[[default_catalog, default_database, Table1, source: [TestTableSource(a, b, c)]]], fields=[a, b, c])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testMultiDistinctAggregateSameFieldOnHopWindow">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(EXPR$0=[$0], EXPR$1=[$1], EXPR$2=[$2])
++- LogicalWindowAggregate(group=[{}], EXPR$0=[COUNT(DISTINCT $0)], EXPR$1=[SUM(DISTINCT $0)], EXPR$2=[MAX(DISTINCT $0)], window=[SlidingGroupWindow('w, rowtime, 3600000, 900000)], properties=[])
+   +- LogicalTableScan(table=[[default_catalog, default_database, MyTable]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+GroupWindowAggregate(window=[SlidingGroupWindow('w, rowtime, 3600000, 900000)], select=[COUNT(DISTINCT a) AS EXPR$0, SUM(DISTINCT a) AS EXPR$1, MAX(DISTINCT a) AS EXPR$2])
++- Exchange(distribution=[single])
+   +- DataStreamScan(table=[[default_catalog, default_database, MyTable]], fields=[a, b, c, rowtime])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testSelectStar">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(b=[AS($0, _UTF-16LE'b')], f0=[AS($1.f0, _UTF-16LE'f0')], f1=[AS($1.f1, _UTF-16LE'f1')], f2=[AS($1.f2, _UTF-16LE'f2')])
++- LogicalAggregate(group=[{1}], TMP_0=[CountMinMax($0)])
+   +- LogicalTableScan(table=[[default_catalog, default_database, MyTable, source: [TestTableSource(a, b, c)]]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[b, TMP_0.f0 AS f0, TMP_0.f1 AS f1, TMP_0.f2 AS f2])
++- GroupAggregate(groupBy=[b], select=[b, CountMinMax(a) AS TMP_0])
+   +- Exchange(distribution=[hash[b]])
+      +- TableSourceScan(table=[[default_catalog, default_database, MyTable, source: [TestTableSource(a, b, c)]]], fields=[a, b, c])
+]]>
+    </Resource>
+  </TestCase>
+</Root>
diff --git a/flink-table/flink-table-planner-blink/src/test/resources/org/apache/flink/table/plan/stream/table/CalcTest.xml b/flink-table/flink-table-planner-blink/src/test/resources/org/apache/flink/table/plan/stream/table/CalcTest.xml
new file mode 100644
index 0000000..1191d18
--- /dev/null
+++ b/flink-table/flink-table-planner-blink/src/test/resources/org/apache/flink/table/plan/stream/table/CalcTest.xml
@@ -0,0 +1,187 @@
+<?xml version="1.0" ?>
+<!--
+Licensed to the Apache Software Foundation (ASF) under one or more
+contributor license agreements.  See the NOTICE file distributed with
+this work for additional information regarding copyright ownership.
+The ASF licenses this file to you under the Apache License, Version 2.0
+(the "License"); you may not use this file except in compliance with
+the License.  You may obtain a copy of the License at
+
+http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+-->
+<Root>
+  <TestCase name="testAddColumns">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(a=[$0], b=[$1], c=[$2], kid=[CONCAT($2, _UTF-16LE'_kid_last')], _c4=[+($0, 2)], b2=[$1], _c6=[_UTF-16LE'literal_value'])
++- LogicalTableScan(table=[[default_catalog, default_database, MyTable, source: [TestTableSource(a, b, c)]]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[a, b, c, CONCAT(c, _UTF-16LE'_kid_last') AS kid, +(a, 2) AS _c4, b AS b2, _UTF-16LE'literal_value' AS _c6])
++- TableSourceScan(table=[[default_catalog, default_database, MyTable, source: [TestTableSource(a, b, c)]]], fields=[a, b, c])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testDropColumns">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(c=[$2])
++- LogicalTableScan(table=[[default_catalog, default_database, MyTable, source: [TestTableSource(a, b, c)]]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[c])
++- TableSourceScan(table=[[default_catalog, default_database, MyTable, source: [TestTableSource(a, b, c)]]], fields=[a, b, c])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testIn">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalFilter(condition=[AND(OR(=($1, 1), =($1, 2), =($1, 3), =($1, 4), =($1, 5), =($1, 6), =($1, 7), =($1, 8), =($1, 9), =($1, 10), =($1, 11), =($1, 12), =($1, 13), =($1, 14), =($1, 15), =($1, 16), =($1, 17), =($1, 18), =($1, 19), =($1, 20), =($1, 21), =($1, 22), =($1, 23), =($1, 24), =($1, 25), =($1, 26), =($1, 27), =($1, 28), =($1, 29), =($1, 30)), =($2, _UTF-16LE'xx'))])
++- LogicalProject(a=[$0], b=[$1], c=[$2])
+   +- LogicalTableScan(table=[[default_catalog, default_database, MyTable, source: [TestTableSource(a, b, c)]]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[a, b, CAST(_UTF-16LE'xx':VARCHAR(2147483647) CHARACTER SET "UTF-16LE") AS c], where=[AND(IN(b, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30), =(c, _UTF-16LE'xx'))])
++- TableSourceScan(table=[[default_catalog, default_database, MyTable, source: [TestTableSource(a, b, c)]]], fields=[a, b, c])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testSimpleMap">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(_c0=[org$apache$flink$table$expressions$utils$Func23$$de6190eff5cfcd5dd1d5877a871e2387($0, $1, $2).f0], _c1=[org$apache$flink$table$expressions$utils$Func23$$de6190eff5cfcd5dd1d5877a871e2387($0, $1, $2).f1], _c2=[org$apache$flink$table$expressions$utils$Func23$$de6190eff5cfcd5dd1d5877a871e2387($0, $1, $2).f2], _c3=[org$apache$flink$table$expressions$utils$Func23$$de6190eff5cfcd5dd1d5877a871e2387($0, $1, $2).f3])
++- LogicalTableScan(table=[[default_catalog, default_database, MyTable, source: [TestTableSource(a, b, c)]]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[Func23$(a, b, c).f0 AS _c0, Func23$(a, b, c).f1 AS _c1, Func23$(a, b, c).f2 AS _c2, Func23$(a, b, c).f3 AS _c3])
++- TableSourceScan(table=[[default_catalog, default_database, MyTable, source: [TestTableSource(a, b, c)]]], fields=[a, b, c])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testMultiFilter">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalFilter(condition=[=(MOD($0, 2), 1)])
++- LogicalFilter(condition=[<($1, 2)])
+   +- LogicalFilter(condition=[>($0, 0)])
+      +- LogicalProject(a=[$0], b=[$1])
+         +- LogicalTableScan(table=[[default_catalog, default_database, MyTable, source: [TestTableSource(a, b, c, d)]]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[a, b], where=[AND(>(a, 0), <(b, 2), =(MOD(a, 2), 1))])
++- TableSourceScan(table=[[default_catalog, default_database, MyTable, source: [TestTableSource(a, b, c, d)]]], fields=[a, b, c, d])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testMultiMap">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(_c0=[org$apache$flink$table$expressions$utils$Func24$$4d71da721f8fba30223be1cd2b5af2ce(org$apache$flink$table$expressions$utils$Func23$$de6190eff5cfcd5dd1d5877a871e2387($0, $1, $2).f0, org$apache$flink$table$expressions$utils$Func23$$de6190eff5cfcd5dd1d5877a871e2387($0, $1, $2).f1, org$apache$flink$table$expressions$utils$Func23$$de6190eff5cfcd5dd1d5877a871e2387($0, $1, $2).f2, org$apache$flink$table$expressions$utils$Func23$$de6190eff5cfcd5dd1d5877a871e2387($0, $1, $2).f3 [...]
++- LogicalTableScan(table=[[default_catalog, default_database, MyTable, source: [TestTableSource(a, b, c)]]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[Func24$(Func23$(a, b, c).f0, Func23$(a, b, c).f1, Func23$(a, b, c).f2, Func23$(a, b, c).f3).f0 AS _c0, Func24$(Func23$(a, b, c).f0, Func23$(a, b, c).f1, Func23$(a, b, c).f2, Func23$(a, b, c).f3).f1 AS _c1, Func24$(Func23$(a, b, c).f0, Func23$(a, b, c).f1, Func23$(a, b, c).f2, Func23$(a, b, c).f3).f2 AS _c2, Func24$(Func23$(a, b, c).f0, Func23$(a, b, c).f1, Func23$(a, b, c).f2, Func23$(a, b, c).f3).f3 AS _c3])
++- TableSourceScan(table=[[default_catalog, default_database, MyTable, source: [TestTableSource(a, b, c)]]], fields=[a, b, c])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testNotIn">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalFilter(condition=[OR(AND(<>($1, 1), <>($1, 2), <>($1, 3), <>($1, 4), <>($1, 5), <>($1, 6), <>($1, 7), <>($1, 8), <>($1, 9), <>($1, 10), <>($1, 11), <>($1, 12), <>($1, 13), <>($1, 14), <>($1, 15), <>($1, 16), <>($1, 17), <>($1, 18), <>($1, 19), <>($1, 20), <>($1, 21), <>($1, 22), <>($1, 23), <>($1, 24), <>($1, 25), <>($1, 26), <>($1, 27), <>($1, 28), <>($1, 29), <>($1, 30)), <>($2, _UTF-16LE'xx'))])
++- LogicalProject(a=[$0], b=[$1], c=[$2])
+   +- LogicalTableScan(table=[[default_catalog, default_database, MyTable, source: [TestTableSource(a, b, c)]]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[a, b, c], where=[OR(NOT IN(b, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30), <>(c, _UTF-16LE'xx'))])
++- TableSourceScan(table=[[default_catalog, default_database, MyTable, source: [TestTableSource(a, b, c)]]], fields=[a, b, c])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testScalarResult">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(_c0=[org$apache$flink$table$expressions$utils$Func1$$a39386268ffec8461452460bcbe089ad($0)])
++- LogicalTableScan(table=[[default_catalog, default_database, MyTable, source: [TestTableSource(a, b, c)]]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[Func1$(a) AS _c0])
++- TableSourceScan(table=[[default_catalog, default_database, MyTable, source: [TestTableSource(a, b, c)]]], fields=[a, b, c])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testRenameColumns">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(a2=[$0], b2=[$1])
++- LogicalTableScan(table=[[default_catalog, default_database, MyTable, source: [TestTableSource(a, b, c)]]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[a AS a2, b AS b2])
++- TableSourceScan(table=[[default_catalog, default_database, MyTable, source: [TestTableSource(a, b, c)]]], fields=[a, b, c])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testSelectFromGroupedWindow">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(EXPR$0=[$1], EXPR$1=[$2], b=[$0])
++- LogicalWindowAggregate(group=[{1}], EXPR$0=[COUNT($5)], EXPR$1=[SUM($0)], window=[TumblingGroupWindow], properties=[])
+   +- LogicalProject(a=[$0], b=[$1], c=[$2], d=[$3], rowtime=[$4], $f5=[UPPER($2)])
+      +- LogicalTableScan(table=[[default_catalog, default_database, MyTable]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[EXPR$0, EXPR$1, b])
++- GroupWindowAggregate(groupBy=[b], window=[TumblingGroupWindow], select=[b, COUNT($f5) AS EXPR$0, SUM(a) AS EXPR$1])
+   +- Exchange(distribution=[hash[b]])
+      +- Calc(select=[a, b, c, d, rowtime, UPPER(c) AS $f5])
+         +- DataStreamScan(table=[[default_catalog, default_database, MyTable]], fields=[a, b, c, d, rowtime])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testSelectFromWindow">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(EXPR$0=[$0], EXPR$1=[$1])
++- LogicalWindowAggregate(group=[{}], EXPR$0=[COUNT($5)], EXPR$1=[SUM($0)], window=[TumblingGroupWindow], properties=[])
+   +- LogicalProject(a=[$0], b=[$1], c=[$2], d=[$3], rowtime=[$4], $f5=[UPPER($2)])
+      +- LogicalTableScan(table=[[default_catalog, default_database, MyTable]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+GroupWindowAggregate(window=[TumblingGroupWindow], select=[COUNT($f5) AS EXPR$0, SUM(a) AS EXPR$1])
++- Exchange(distribution=[single])
+   +- Calc(select=[a, b, c, d, rowtime, UPPER(c) AS $f5])
+      +- DataStreamScan(table=[[default_catalog, default_database, MyTable]], fields=[a, b, c, d, rowtime])
+]]>
+    </Resource>
+  </TestCase>
+</Root>
diff --git a/flink-table/flink-table-planner-blink/src/test/resources/org/apache/flink/table/plan/stream/table/ColumnFunctionsTest.xml b/flink-table/flink-table-planner-blink/src/test/resources/org/apache/flink/table/plan/stream/table/ColumnFunctionsTest.xml
new file mode 100644
index 0000000..420f6ee
--- /dev/null
+++ b/flink-table/flink-table-planner-blink/src/test/resources/org/apache/flink/table/plan/stream/table/ColumnFunctionsTest.xml
@@ -0,0 +1,229 @@
+<?xml version="1.0" ?>
+<!--
+Licensed to the Apache Software Foundation (ASF) under one or more
+contributor license agreements.  See the NOTICE file distributed with
+this work for additional information regarding copyright ownership.
+The ASF licenses this file to you under the Apache License, Version 2.0
+(the "License"); you may not use this file except in compliance with
+the License.  You may obtain a copy of the License at
+
+http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+-->
+<Root>
+  <TestCase name="testAddColumns">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(a=[$0], b=[$1], c=[$2], d=[org$apache$flink$table$plan$stream$table$TestFunc$$fd4dfa9e9ae53c7b8d0f13d2db94ac9b($0, $1)])
++- LogicalTableScan(table=[[default_catalog, default_database, Table1, source: [TestTableSource(a, b, c)]]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[a, b, c, TestFunc$(a, b) AS d])
++- TableSourceScan(table=[[default_catalog, default_database, Table1, source: [TestTableSource(a, b, c)]]], fields=[a, b, c])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testColumnFunctionsInUDF">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(_c0=[CONCAT($2, $3)])
++- LogicalTableScan(table=[[default_catalog, default_database, Table1, source: [TestTableSource(int, long, string1, string2)]]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[CONCAT(string1, string2) AS _c0])
++- TableSourceScan(table=[[default_catalog, default_database, Table1, source: [TestTableSource(int, long, string1, string2)]]], fields=[int, long, string1, string2])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testColumnRange">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(b=[$1], c=[$2], a=[$0], e=[$4], f=[$5], d=[$3])
++- LogicalTableScan(table=[[default_catalog, default_database, Table1, source: [TestTableSource(a, b, c, d, e, f)]]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[b, c, a, e, f, d])
++- TableSourceScan(table=[[default_catalog, default_database, Table1, source: [TestTableSource(a, b, c, d, e, f)]]], fields=[a, b, c, d, e, f])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testColumnWithoutRange">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(a=[$0], b=[$1], c=[$2], f=[$5])
++- LogicalTableScan(table=[[default_catalog, default_database, Table1, source: [TestTableSource(a, b, c, d, e, f)]]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[a, b, c, f])
++- TableSourceScan(table=[[default_catalog, default_database, Table1, source: [TestTableSource(a, b, c, d, e, f)]]], fields=[a, b, c, d, e, f])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testDropColumns">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(c=[$2])
++- LogicalTableScan(table=[[default_catalog, default_database, Table1, source: [TestTableSource(a, b, c)]]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[c])
++- TableSourceScan(table=[[default_catalog, default_database, Table1, source: [TestTableSource(a, b, c)]]], fields=[a, b, c])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testFilter">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalFilter(condition=[=(CONCAT($2, $3), _UTF-16LE'a')])
++- LogicalTableScan(table=[[default_catalog, default_database, Table1, source: [TestTableSource(int, long, string1, string2)]]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[int, long, string1, string2], where=[=(CONCAT(string1, string2), _UTF-16LE'a')])
++- TableSourceScan(table=[[default_catalog, default_database, Table1, source: [TestTableSource(int, long, string1, string2)]]], fields=[int, long, string1, string2])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testGroupBy">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(a=[$0], b=[$1], EXPR$0=[$2])
++- LogicalAggregate(group=[{0, 1}], EXPR$0=[COUNT($2)])
+   +- LogicalTableScan(table=[[default_catalog, default_database, Table1, source: [TestTableSource(a, b, c, d, e, f)]]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+GroupAggregate(groupBy=[a, b], select=[a, b, COUNT(c) AS EXPR$0])
++- Exchange(distribution=[hash[a, b]])
+   +- TableSourceScan(table=[[default_catalog, default_database, Table1, source: [TestTableSource(a, b, c, d, e, f)]]], fields=[a, b, c, d, e, f])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testInverseSelection">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(e=[$4], f=[$5])
++- LogicalTableScan(table=[[default_catalog, default_database, Table1, source: [TestTableSource(a, b, c, d, e, f)]]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[e, f])
++- TableSourceScan(table=[[default_catalog, default_database, Table1, source: [TestTableSource(a, b, c, d, e, f)]]], fields=[a, b, c, d, e, f])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testJoin">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalJoin(condition=[=($0, $3)], joinType=[inner])
+:- LogicalTableScan(table=[[default_catalog, default_database, Table1, source: [TestTableSource(int1, long1, string1)]]])
++- LogicalTableScan(table=[[default_catalog, default_database, Table2, source: [TestTableSource(int2, long2, string2)]]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Join(joinType=[InnerJoin], where=[=(int1, int2)], select=[int1, long1, string1, int2, long2, string2], leftInputSpec=[NoUniqueKey], rightInputSpec=[NoUniqueKey])
+:- Exchange(distribution=[hash[int1]])
+:  +- TableSourceScan(table=[[default_catalog, default_database, Table1, source: [TestTableSource(int1, long1, string1)]]], fields=[int1, long1, string1])
++- Exchange(distribution=[hash[int2]])
+   +- TableSourceScan(table=[[default_catalog, default_database, Table2, source: [TestTableSource(int2, long2, string2)]]], fields=[int2, long2, string2])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testJoinLateral">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalCorrelate(correlation=[$cor0], joinType=[inner], requiredColumns=[{}])
+:- LogicalTableScan(table=[[default_catalog, default_database, Table1, source: [TestTableSource(int, long, string)]]])
++- LogicalTableFunctionScan(invocation=[org$apache$flink$table$util$TableFunc0$2d39fea38a8a8fb8536772fd858e67ed($2)], rowType=[RecordType(VARCHAR(2147483647) name, INTEGER age)], elementType=[class [Ljava.lang.Object;])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Correlate(invocation=[org$apache$flink$table$util$TableFunc0$2d39fea38a8a8fb8536772fd858e67ed($2)], correlate=[table(TableFunc0(string))], select=[int,long,string,name,age], rowType=[RecordType(DOUBLE int, BIGINT long, VARCHAR(2147483647) string, VARCHAR(2147483647) name, INTEGER age)], joinType=[INNER])
++- TableSourceScan(table=[[default_catalog, default_database, Table1, source: [TestTableSource(int, long, string)]]], fields=[int, long, string])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testOver">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(c=[$2], mycount=[AS(org$apache$flink$table$util$CountAggFunction$d107ba7513e384f4b916c9bf9a5c9efc($1) OVER (PARTITION BY $2 ORDER BY $3 NULLS FIRST ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW), _UTF-16LE'mycount')], wAvg=[AS(org$apache$flink$table$plan$util$JavaUserDefinedAggFunctions$WeightedAvg$ac5eee6b97ac7834a1700f7ee70e778c($0, $1) OVER (PARTITION BY $2 ORDER BY $3 NULLS FIRST ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW), _UTF-16LE'wAvg')], countDist=[AS(org [...]
++- LogicalTableScan(table=[[default_catalog, default_database, T1]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[c, w0$o0 AS mycount, w0$o1 AS wAvg, w0$o2 AS countDist])
++- OverAggregate(partitionBy=[c], orderBy=[proctime ASC], window=[ ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW], select=[a, b, c, proctime, CountAggFunction(b) AS w0$o0, WeightedAvg(a, b) AS w0$o1, CountDistinct(a) AS w0$o2])
+   +- Exchange(distribution=[hash[c]])
+      +- DataStreamScan(table=[[default_catalog, default_database, T1]], fields=[a, b, c, proctime])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testRenameColumns">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(d=[$0], b=[$1])
++- LogicalTableScan(table=[[default_catalog, default_database, Table1, source: [TestTableSource(a, b, c)]]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[a AS d, b])
++- TableSourceScan(table=[[default_catalog, default_database, Table1, source: [TestTableSource(a, b, c)]]], fields=[a, b, c])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testStar">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(_c0=[org$apache$flink$table$plan$stream$table$TestFunc$$fd4dfa9e9ae53c7b8d0f13d2db94ac9b($0, $1)])
++- LogicalTableScan(table=[[default_catalog, default_database, Table1, source: [TestTableSource(double, long)]]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[TestFunc$(double, long) AS _c0])
++- TableSourceScan(table=[[default_catalog, default_database, Table1, source: [TestTableSource(double, long)]]], fields=[double, long])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testWindowGroupBy">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(a=[$0], b=[$1], c=[AS($2, _UTF-16LE'c')])
++- LogicalWindowAggregate(group=[{0, 1}], EXPR$0=[COUNT($2)], window=[SlidingGroupWindow('w, b, 3, 10)], properties=[])
+   +- LogicalProject(a=[AS($0, _UTF-16LE'a')], b=[AS($1, _UTF-16LE'b')], c=[AS($2, _UTF-16LE'c')], d=[AS($3, _UTF-16LE'd')])
+      +- LogicalTableScan(table=[[default_catalog, default_database, T1]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[a, b, EXPR$0 AS c])
++- GroupWindowAggregate(groupBy=[a, b], window=[SlidingGroupWindow('w, b, 3, 10)], select=[a, b, COUNT(c) AS EXPR$0])
+   +- Exchange(distribution=[hash[a, b]])
+      +- Calc(select=[a, CAST(rowtime) AS b, c, d])
+         +- DataStreamScan(table=[[default_catalog, default_database, T1]], fields=[a, rowtime, c, d])
+]]>
+    </Resource>
+  </TestCase>
+</Root>
diff --git a/flink-table/flink-table-planner-blink/src/test/resources/org/apache/flink/table/plan/stream/table/CorrelateTest.xml b/flink-table/flink-table-planner-blink/src/test/resources/org/apache/flink/table/plan/stream/table/CorrelateTest.xml
new file mode 100644
index 0000000..8740858
--- /dev/null
+++ b/flink-table/flink-table-planner-blink/src/test/resources/org/apache/flink/table/plan/stream/table/CorrelateTest.xml
@@ -0,0 +1,209 @@
+<?xml version="1.0" ?>
+<!--
+Licensed to the Apache Software Foundation (ASF) under one or more
+contributor license agreements.  See the NOTICE file distributed with
+this work for additional information regarding copyright ownership.
+The ASF licenses this file to you under the Apache License, Version 2.0
+(the "License"); you may not use this file except in compliance with
+the License.  You may obtain a copy of the License at
+
+http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+-->
+<Root>
+  <TestCase name="testCorrelateWithMultiFilter">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(c=[$0], d=[$1])
++- LogicalFilter(condition=[>($2, 20)])
+   +- LogicalFilter(condition=[>($2, 10)])
+      +- LogicalProject(c=[$2], d=[$3], e=[$4])
+         +- LogicalCorrelate(correlation=[$cor0], joinType=[inner], requiredColumns=[{}])
+            :- LogicalProject(a=[$0], b=[$1], c=[$2])
+            :  +- LogicalTableScan(table=[[default_catalog, default_database, MyTable, source: [TestTableSource(a, b, c)]]])
+            +- LogicalTableFunctionScan(invocation=[org$apache$flink$table$util$TableFunc0$2d39fea38a8a8fb8536772fd858e67ed($2)], rowType=[RecordType(VARCHAR(2147483647) d, INTEGER e)], elementType=[class [Ljava.lang.Object;])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[c, d])
++- Correlate(invocation=[org$apache$flink$table$util$TableFunc0$2d39fea38a8a8fb8536772fd858e67ed($2)], correlate=[table(TableFunc0(c))], select=[a,b,c,d,e], rowType=[RecordType(INTEGER a, BIGINT b, VARCHAR(2147483647) c, VARCHAR(2147483647) d, INTEGER e)], joinType=[INNER], condition=[>($1, 20)])
+   +- TableSourceScan(table=[[default_catalog, default_database, MyTable, source: [TestTableSource(a, b, c)]]], fields=[a, b, c])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testCorrelateWithMultiFilterAndWithoutCalcMergeRules">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(c=[$0], d=[$1])
++- LogicalFilter(condition=[>($2, 20)])
+   +- LogicalFilter(condition=[>($2, 10)])
+      +- LogicalProject(c=[$2], d=[$3], e=[$4])
+         +- LogicalCorrelate(correlation=[$cor0], joinType=[inner], requiredColumns=[{}])
+            :- LogicalProject(a=[$0], b=[$1], c=[$2])
+            :  +- LogicalTableScan(table=[[default_catalog, default_database, MyTable, source: [TestTableSource(a, b, c)]]])
+            +- LogicalTableFunctionScan(invocation=[org$apache$flink$table$util$TableFunc0$2d39fea38a8a8fb8536772fd858e67ed($2)], rowType=[RecordType(VARCHAR(2147483647) d, INTEGER e)], elementType=[class [Ljava.lang.Object;])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[c, d])
++- Correlate(invocation=[org$apache$flink$table$util$TableFunc0$2d39fea38a8a8fb8536772fd858e67ed($2)], correlate=[table(TableFunc0(c))], select=[a,b,c,d,e], rowType=[RecordType(INTEGER a, BIGINT b, VARCHAR(2147483647) c, VARCHAR(2147483647) d, INTEGER e)], joinType=[INNER], condition=[>($1, 20)])
+   +- TableSourceScan(table=[[default_catalog, default_database, MyTable, source: [TestTableSource(a, b, c)]]], fields=[a, b, c])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testCrossJoin">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(c=[$2], s=[$3])
++- LogicalCorrelate(correlation=[$cor0], joinType=[inner], requiredColumns=[{}])
+   :- LogicalTableScan(table=[[default_catalog, default_database, MyTable, source: [TestTableSource(a, b, c)]]])
+   +- LogicalTableFunctionScan(invocation=[org$apache$flink$table$util$TableFunc1$ad38060966060e704b09fa4c94287696($2)], rowType=[RecordType(VARCHAR(2147483647) s)], elementType=[class [Ljava.lang.Object;])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[c, s])
++- Correlate(invocation=[org$apache$flink$table$util$TableFunc1$ad38060966060e704b09fa4c94287696($2)], correlate=[table(TableFunc1(c))], select=[a,b,c,s], rowType=[RecordType(INTEGER a, BIGINT b, VARCHAR(2147483647) c, VARCHAR(2147483647) s)], joinType=[INNER])
+   +- TableSourceScan(table=[[default_catalog, default_database, MyTable, source: [TestTableSource(a, b, c)]]], fields=[a, b, c])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testCrossJoin2">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(c=[$2], s=[$3])
++- LogicalCorrelate(correlation=[$cor0], joinType=[inner], requiredColumns=[{}])
+   :- LogicalTableScan(table=[[default_catalog, default_database, MyTable, source: [TestTableSource(a, b, c)]]])
+   +- LogicalTableFunctionScan(invocation=[org$apache$flink$table$util$TableFunc1$ad38060966060e704b09fa4c94287696($2, _UTF-16LE'$')], rowType=[RecordType(VARCHAR(2147483647) s)], elementType=[class [Ljava.lang.Object;])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[c, s])
++- Correlate(invocation=[org$apache$flink$table$util$TableFunc1$ad38060966060e704b09fa4c94287696($2, _UTF-16LE'$')], correlate=[table(TableFunc1(c,_UTF-16LE'$'))], select=[a,b,c,s], rowType=[RecordType(INTEGER a, BIGINT b, VARCHAR(2147483647) c, VARCHAR(2147483647) s)], joinType=[INNER])
+   +- TableSourceScan(table=[[default_catalog, default_database, MyTable, source: [TestTableSource(a, b, c)]]], fields=[a, b, c])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testCustomType">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(c=[$2], name=[$3], len=[$4])
++- LogicalCorrelate(correlation=[$cor0], joinType=[inner], requiredColumns=[{}])
+   :- LogicalTableScan(table=[[default_catalog, default_database, MyTable, source: [TestTableSource(a, b, c)]]])
+   +- LogicalTableFunctionScan(invocation=[org$apache$flink$table$util$TableFunc2$b3b1f988779be024ed9386bce5019112(org$apache$flink$table$expressions$utils$Func13$054570f6203667830dd24328319ff13c($2))], rowType=[RecordType(VARCHAR(2147483647) name, INTEGER len)], elementType=[class [Ljava.lang.Object;])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[c, name, len])
++- Correlate(invocation=[org$apache$flink$table$util$TableFunc2$b3b1f988779be024ed9386bce5019112(org$apache$flink$table$expressions$utils$Func13$054570f6203667830dd24328319ff13c($2))], correlate=[table(TableFunc2(Func13(c)))], select=[a,b,c,name,len], rowType=[RecordType(INTEGER a, BIGINT b, VARCHAR(2147483647) c, VARCHAR(2147483647) name, INTEGER len)], joinType=[INNER])
+   +- TableSourceScan(table=[[default_catalog, default_database, MyTable, source: [TestTableSource(a, b, c)]]], fields=[a, b, c])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testFilter">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalFilter(condition=[>($2, 2)])
++- LogicalProject(c=[$2], name=[$3], len=[$4])
+   +- LogicalCorrelate(correlation=[$cor0], joinType=[inner], requiredColumns=[{}])
+      :- LogicalTableScan(table=[[default_catalog, default_database, MyTable, source: [TestTableSource(a, b, c)]]])
+      +- LogicalTableFunctionScan(invocation=[org$apache$flink$table$util$TableFunc2$b3b1f988779be024ed9386bce5019112($2)], rowType=[RecordType(VARCHAR(2147483647) name, INTEGER len)], elementType=[class [Ljava.lang.Object;])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[c, name, len])
++- Correlate(invocation=[org$apache$flink$table$util$TableFunc2$b3b1f988779be024ed9386bce5019112($2)], correlate=[table(TableFunc2(c))], select=[a,b,c,name,len], rowType=[RecordType(INTEGER a, BIGINT b, VARCHAR(2147483647) c, VARCHAR(2147483647) name, INTEGER len)], joinType=[INNER], condition=[>($1, 2)])
+   +- TableSourceScan(table=[[default_catalog, default_database, MyTable, source: [TestTableSource(a, b, c)]]], fields=[a, b, c])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testHierarchyType">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalCorrelate(correlation=[$cor0], joinType=[inner], requiredColumns=[{}])
+:- LogicalTableScan(table=[[default_catalog, default_database, MyTable, source: [TestTableSource(a, b, c)]]])
++- LogicalTableFunctionScan(invocation=[org$apache$flink$table$util$HierarchyTableFunction$172d96aa11f5379846a3a8c5fa560e0e($2)], rowType=[RecordType(VARCHAR(2147483647) name, BOOLEAN adult, INTEGER len)], elementType=[class [Ljava.lang.Object;])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Correlate(invocation=[org$apache$flink$table$util$HierarchyTableFunction$172d96aa11f5379846a3a8c5fa560e0e($2)], correlate=[table(HierarchyTableFunction(c))], select=[a,b,c,name,adult,len], rowType=[RecordType(INTEGER a, BIGINT b, VARCHAR(2147483647) c, VARCHAR(2147483647) name, BOOLEAN adult, INTEGER len)], joinType=[INNER])
++- TableSourceScan(table=[[default_catalog, default_database, MyTable, source: [TestTableSource(a, b, c)]]], fields=[a, b, c])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testFlatMap">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(f0=[AS($3, _UTF-16LE'f0')], f1=[AS($4, _UTF-16LE'f1')])
++- LogicalCorrelate(correlation=[$cor0], joinType=[inner], requiredColumns=[{}])
+   :- LogicalTableScan(table=[[default_catalog, default_database, MyTable, source: [TestTableSource(f1, f2, f3)]]])
+   +- LogicalTableFunctionScan(invocation=[org$apache$flink$table$util$TableFunc2$b3b1f988779be024ed9386bce5019112($2)], rowType=[RecordType(VARCHAR(2147483647) f0, INTEGER f1_0)], elementType=[class [Ljava.lang.Object;])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[f0, f1_0 AS f1])
++- Correlate(invocation=[org$apache$flink$table$util$TableFunc2$b3b1f988779be024ed9386bce5019112($2)], correlate=[table(TableFunc2(f3))], select=[f1,f2,f3,f0,f1_0], rowType=[RecordType(INTEGER f1, BIGINT f2, VARCHAR(2147483647) f3, VARCHAR(2147483647) f0, INTEGER f1_0)], joinType=[INNER])
+   +- TableSourceScan(table=[[default_catalog, default_database, MyTable, source: [TestTableSource(f1, f2, f3)]]], fields=[f1, f2, f3])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testLeftOuterJoinWithLiteralTrue">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(c=[$2], s=[$3])
++- LogicalCorrelate(correlation=[$cor0], joinType=[left], requiredColumns=[{}])
+   :- LogicalTableScan(table=[[default_catalog, default_database, MyTable, source: [TestTableSource(a, b, c)]]])
+   +- LogicalTableFunctionScan(invocation=[org$apache$flink$table$util$TableFunc1$ad38060966060e704b09fa4c94287696($2)], rowType=[RecordType(VARCHAR(2147483647) s)], elementType=[class [Ljava.lang.Object;])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[c, s])
++- Correlate(invocation=[org$apache$flink$table$util$TableFunc1$ad38060966060e704b09fa4c94287696($2)], correlate=[table(TableFunc1(c))], select=[a,b,c,s], rowType=[RecordType(INTEGER a, BIGINT b, VARCHAR(2147483647) c, VARCHAR(2147483647) s)], joinType=[LEFT])
+   +- TableSourceScan(table=[[default_catalog, default_database, MyTable, source: [TestTableSource(a, b, c)]]], fields=[a, b, c])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testScalarFunction">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalCorrelate(correlation=[$cor0], joinType=[inner], requiredColumns=[{}])
+:- LogicalTableScan(table=[[default_catalog, default_database, MyTable, source: [TestTableSource(a, b, c)]]])
++- LogicalTableFunctionScan(invocation=[org$apache$flink$table$util$TableFunc1$ad38060966060e704b09fa4c94287696(SUBSTRING($2, 2))], rowType=[RecordType(VARCHAR(2147483647) s)], elementType=[class [Ljava.lang.Object;])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Correlate(invocation=[org$apache$flink$table$util$TableFunc1$ad38060966060e704b09fa4c94287696(SUBSTRING($2, 2))], correlate=[table(TableFunc1(SUBSTRING(c, 2)))], select=[a,b,c,s], rowType=[RecordType(INTEGER a, BIGINT b, VARCHAR(2147483647) c, VARCHAR(2147483647) s)], joinType=[INNER])
++- TableSourceScan(table=[[default_catalog, default_database, MyTable, source: [TestTableSource(a, b, c)]]], fields=[a, b, c])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testPojoType">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalCorrelate(correlation=[$cor0], joinType=[inner], requiredColumns=[{}])
+:- LogicalTableScan(table=[[default_catalog, default_database, MyTable, source: [TestTableSource(a, b, c)]]])
++- LogicalTableFunctionScan(invocation=[org$apache$flink$table$util$PojoTableFunc$b05c920aa134b36f9cfc9d9b23368bcf($2)], rowType=[RecordType(INTEGER age, VARCHAR(2147483647) name)], elementType=[class [Ljava.lang.Object;])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Correlate(invocation=[org$apache$flink$table$util$PojoTableFunc$b05c920aa134b36f9cfc9d9b23368bcf($2)], correlate=[table(PojoTableFunc(c))], select=[a,b,c,age,name], rowType=[RecordType(INTEGER a, BIGINT b, VARCHAR(2147483647) c, INTEGER age, VARCHAR(2147483647) name)], joinType=[INNER])
++- TableSourceScan(table=[[default_catalog, default_database, MyTable, source: [TestTableSource(a, b, c)]]], fields=[a, b, c])
+]]>
+    </Resource>
+  </TestCase>
+</Root>
diff --git a/flink-table/flink-table-planner-blink/src/test/resources/org/apache/flink/table/plan/stream/table/GroupWindowTest.xml b/flink-table/flink-table-planner-blink/src/test/resources/org/apache/flink/table/plan/stream/table/GroupWindowTest.xml
new file mode 100644
index 0000000..e081653
--- /dev/null
+++ b/flink-table/flink-table-planner-blink/src/test/resources/org/apache/flink/table/plan/stream/table/GroupWindowTest.xml
@@ -0,0 +1,460 @@
+<?xml version="1.0" ?>
+<!--
+Licensed to the Apache Software Foundation (ASF) under one or more
+contributor license agreements.  See the NOTICE file distributed with
+this work for additional information regarding copyright ownership.
+The ASF licenses this file to you under the Apache License, Version 2.0
+(the "License"); you may not use this file except in compliance with
+the License.  You may obtain a copy of the License at
+
+http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+-->
+<Root>
+  <TestCase name="testAllEventTimeSessionGroupWindowOverTime">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(EXPR$0=[$0])
++- LogicalWindowAggregate(group=[{}], EXPR$0=[COUNT($1)], window=[SessionGroupWindow('w, rowtime, 7)], properties=[])
+   +- LogicalTableScan(table=[[default_catalog, default_database, T1]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+GroupWindowAggregate(window=[SessionGroupWindow('w, rowtime, 7)], select=[COUNT(int) AS EXPR$0])
++- Exchange(distribution=[single])
+   +- DataStreamScan(table=[[default_catalog, default_database, T1]], fields=[rowtime, int, string])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testAllEventTimeSlidingGroupWindowOverCount">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(EXPR$0=[$0])
++- LogicalWindowAggregate(group=[{}], EXPR$0=[COUNT($1)], window=[SlidingGroupWindow('w, rowtime, 8, 10)], properties=[])
+   +- LogicalTableScan(table=[[default_catalog, default_database, T1]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+GroupWindowAggregate(window=[SlidingGroupWindow('w, rowtime, 8, 10)], select=[COUNT(int) AS EXPR$0])
++- Exchange(distribution=[single])
+   +- DataStreamScan(table=[[default_catalog, default_database, T1]], fields=[rowtime, int, string])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testAllEventTimeSlidingGroupWindowOverTime">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(EXPR$0=[$0])
++- LogicalWindowAggregate(group=[{}], EXPR$0=[COUNT($1)], window=[SlidingGroupWindow('w, rowtime, 8, 10)], properties=[])
+   +- LogicalTableScan(table=[[default_catalog, default_database, T1]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+GroupWindowAggregate(window=[SlidingGroupWindow('w, rowtime, 8, 10)], select=[COUNT(int) AS EXPR$0])
++- Exchange(distribution=[single])
+   +- DataStreamScan(table=[[default_catalog, default_database, T1]], fields=[long, int, string, rowtime])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testAllEventTimeTumblingGroupWindowOverCount">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(EXPR$0=[$0])
++- LogicalWindowAggregate(group=[{}], EXPR$0=[COUNT($1)], window=[TumblingGroupWindow], properties=[])
+   +- LogicalTableScan(table=[[default_catalog, default_database, T1]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+GroupWindowAggregate(window=[TumblingGroupWindow], select=[COUNT(int) AS EXPR$0])
++- Exchange(distribution=[single])
+   +- DataStreamScan(table=[[default_catalog, default_database, T1]], fields=[rowtime, int, string])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testAllProcessingTimeSlidingGroupWindowOverCount">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(EXPR$0=[$0])
++- LogicalWindowAggregate(group=[{}], EXPR$0=[COUNT($1)], window=[SlidingGroupWindow('w, proctime, 2, 1)], properties=[])
+   +- LogicalTableScan(table=[[default_catalog, default_database, T1]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+GroupWindowAggregate(window=[SlidingGroupWindow('w, proctime, 2, 1)], select=[COUNT(int) AS EXPR$0])
++- Exchange(distribution=[single])
+   +- DataStreamScan(table=[[default_catalog, default_database, T1]], fields=[long, int, string, proctime])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testAllEventTimeTumblingGroupWindowOverTime">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(EXPR$0=[$0])
++- LogicalWindowAggregate(group=[{}], EXPR$0=[COUNT($1)], window=[TumblingGroupWindow], properties=[])
+   +- LogicalTableScan(table=[[default_catalog, default_database, T1]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+GroupWindowAggregate(window=[TumblingGroupWindow], select=[COUNT(int) AS EXPR$0])
++- Exchange(distribution=[single])
+   +- DataStreamScan(table=[[default_catalog, default_database, T1]], fields=[long, int, string, rowtime])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testAllProcessingTimeSlidingGroupWindowOverTime">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(EXPR$0=[$0])
++- LogicalWindowAggregate(group=[{}], EXPR$0=[COUNT($1)], window=[SlidingGroupWindow('w, proctime, 50, 50)], properties=[])
+   +- LogicalTableScan(table=[[default_catalog, default_database, T1]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+GroupWindowAggregate(window=[SlidingGroupWindow('w, proctime, 50, 50)], select=[COUNT(int) AS EXPR$0])
++- Exchange(distribution=[single])
+   +- DataStreamScan(table=[[default_catalog, default_database, T1]], fields=[long, int, string, proctime])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testAllProcessingTimeTumblingGroupWindowOverCount">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(EXPR$0=[$0])
++- LogicalWindowAggregate(group=[{}], EXPR$0=[COUNT($1)], window=[TumblingGroupWindow], properties=[])
+   +- LogicalTableScan(table=[[default_catalog, default_database, T1]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+GroupWindowAggregate(window=[TumblingGroupWindow], select=[COUNT(int) AS EXPR$0])
++- Exchange(distribution=[single])
+   +- DataStreamScan(table=[[default_catalog, default_database, T1]], fields=[long, int, string, proctime])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testAllProcessingTimeTumblingGroupWindowOverTime">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(string=[$0], EXPR$0=[$1])
++- LogicalWindowAggregate(group=[{2}], EXPR$0=[COUNT($1)], window=[TumblingGroupWindow], properties=[])
+   +- LogicalTableScan(table=[[default_catalog, default_database, T1]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+GroupWindowAggregate(groupBy=[string], window=[TumblingGroupWindow], select=[string, COUNT(int) AS EXPR$0])
++- Exchange(distribution=[hash[string]])
+   +- DataStreamScan(table=[[default_catalog, default_database, T1]], fields=[long, int, string, proctime])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testDecomposableAggFunctions">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(EXPR$0=[$0], EXPR$1=[$1], EXPR$2=[$2], EXPR$3=[$3], EXPR$4=[$4], EXPR$5=[$5])
++- LogicalWindowAggregate(group=[{}], EXPR$0=[VAR_POP($3)], EXPR$1=[VAR_SAMP($3)], EXPR$2=[STDDEV_POP($3)], EXPR$3=[STDDEV_SAMP($3)], window=[TumblingGroupWindow], properties=[EXPR$4, EXPR$5])
+   +- LogicalTableScan(table=[[default_catalog, default_database, T1]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[/(-($f0, /(*(CAST($f1), CAST($f1)), $f2)), $f2) AS EXPR$0, /(-($f0, /(*(CAST($f1), CAST($f1)), $f2)), CASE(=($f2, 1), null:BIGINT, -($f2, 1))) AS EXPR$1, POWER(/(-($f0, /(*(CAST($f1), CAST($f1)), $f2)), $f2), 0.5:DECIMAL(2, 1)) AS EXPR$2, POWER(/(-($f0, /(*(CAST($f1), CAST($f1)), $f2)), CASE(=($f2, 1), null:BIGINT, -($f2, 1))), 0.5:DECIMAL(2, 1)) AS EXPR$3, EXPR$4, EXPR$5])
++- GroupWindowAggregate(window=[TumblingGroupWindow], properties=[EXPR$4, EXPR$5], select=[SUM($f4) AS $f0, SUM(c) AS $f1, COUNT(c) AS $f2, start('w) AS EXPR$4, end('w) AS EXPR$5])
+   +- Exchange(distribution=[single])
+      +- Calc(select=[rowtime, a, b, c, *(CAST(c), CAST(c)) AS $f4])
+         +- DataStreamScan(table=[[default_catalog, default_database, T1]], fields=[rowtime, a, b, c])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testEventTimeSessionGroupWindowOverTime">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(string=[$0], EXPR$0=[$1])
++- LogicalWindowAggregate(group=[{2}], EXPR$0=[COUNT($1)], window=[SessionGroupWindow('w, rowtime, 7)], properties=[])
+   +- LogicalTableScan(table=[[default_catalog, default_database, T1]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+GroupWindowAggregate(groupBy=[string], window=[SessionGroupWindow('w, rowtime, 7)], select=[string, COUNT(int) AS EXPR$0])
++- Exchange(distribution=[hash[string]])
+   +- DataStreamScan(table=[[default_catalog, default_database, T1]], fields=[rowtime, int, string])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testEventTimeSessionGroupWindowWithUdAgg">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(string=[$0], EXPR$0=[$1])
++- LogicalWindowAggregate(group=[{2}], EXPR$0=[myWeightedAvg($0, $1)], window=[SessionGroupWindow('w, rowtime, 7)], properties=[])
+   +- LogicalTableScan(table=[[default_catalog, default_database, T1]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+GroupWindowAggregate(groupBy=[string], window=[SessionGroupWindow('w, rowtime, 7)], select=[string, myWeightedAvg(long, int) AS EXPR$0])
++- Exchange(distribution=[hash[string]])
+   +- DataStreamScan(table=[[default_catalog, default_database, T1]], fields=[long, int, string, rowtime])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testProcessingTimeSlidingGroupWindowOverCount">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(string=[$0], EXPR$0=[$1])
++- LogicalWindowAggregate(group=[{2}], EXPR$0=[COUNT($1)], window=[SlidingGroupWindow('w, proctime, 2, 1)], properties=[])
+   +- LogicalTableScan(table=[[default_catalog, default_database, T1]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+GroupWindowAggregate(groupBy=[string], window=[SlidingGroupWindow('w, proctime, 2, 1)], select=[string, COUNT(int) AS EXPR$0])
++- Exchange(distribution=[hash[string]])
+   +- DataStreamScan(table=[[default_catalog, default_database, T1]], fields=[long, int, string, proctime])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testEventTimeSlidingGroupWindowOverCount">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(string=[$0], EXPR$0=[$1])
++- LogicalWindowAggregate(group=[{2}], EXPR$0=[COUNT($1)], window=[SlidingGroupWindow('w, rowtime, 8, 10)], properties=[])
+   +- LogicalTableScan(table=[[default_catalog, default_database, T1]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+GroupWindowAggregate(groupBy=[string], window=[SlidingGroupWindow('w, rowtime, 8, 10)], select=[string, COUNT(int) AS EXPR$0])
++- Exchange(distribution=[hash[string]])
+   +- DataStreamScan(table=[[default_catalog, default_database, T1]], fields=[rowtime, int, string])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testEventTimeSlidingGroupWindowOverTime">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(string=[$0], EXPR$0=[$1])
++- LogicalWindowAggregate(group=[{2}], EXPR$0=[COUNT($1)], window=[SlidingGroupWindow('w, rowtime, 8, 10)], properties=[])
+   +- LogicalTableScan(table=[[default_catalog, default_database, T1]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+GroupWindowAggregate(groupBy=[string], window=[SlidingGroupWindow('w, rowtime, 8, 10)], select=[string, COUNT(int) AS EXPR$0])
++- Exchange(distribution=[hash[string]])
+   +- DataStreamScan(table=[[default_catalog, default_database, T1]], fields=[long, int, string, rowtime])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testEventTimeSlidingGroupWindowWithUdAgg">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(string=[$0], EXPR$0=[$1])
++- LogicalWindowAggregate(group=[{2}], EXPR$0=[myWeightedAvg($0, $1)], window=[SlidingGroupWindow('w, rowtime, 8, 10)], properties=[])
+   +- LogicalTableScan(table=[[default_catalog, default_database, T1]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+GroupWindowAggregate(groupBy=[string], window=[SlidingGroupWindow('w, rowtime, 8, 10)], select=[string, myWeightedAvg(long, int) AS EXPR$0])
++- Exchange(distribution=[hash[string]])
+   +- DataStreamScan(table=[[default_catalog, default_database, T1]], fields=[long, int, string, rowtime])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testEventTimeTumblingGroupWindowOverTime">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(string=[$0], EXPR$0=[$1])
++- LogicalWindowAggregate(group=[{2}], EXPR$0=[COUNT($1)], window=[TumblingGroupWindow], properties=[])
+   +- LogicalTableScan(table=[[default_catalog, default_database, T1]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+GroupWindowAggregate(groupBy=[string], window=[TumblingGroupWindow], select=[string, COUNT(int) AS EXPR$0])
++- Exchange(distribution=[hash[string]])
+   +- DataStreamScan(table=[[default_catalog, default_database, T1]], fields=[rowtime, int, string])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testEventTimeTumblingGroupWindowWithUdAgg">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(string=[$0], EXPR$0=[$1])
++- LogicalWindowAggregate(group=[{2}], EXPR$0=[myWeightedAvg($0, $1)], window=[TumblingGroupWindow], properties=[])
+   +- LogicalTableScan(table=[[default_catalog, default_database, T1]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+GroupWindowAggregate(groupBy=[string], window=[TumblingGroupWindow], select=[string, myWeightedAvg(long, int) AS EXPR$0])
++- Exchange(distribution=[hash[string]])
+   +- DataStreamScan(table=[[default_catalog, default_database, T1]], fields=[long, int, string, rowtime])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testSlideWindowStartEnd">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(string=[$0], EXPR$0=[$1], EXPR$1=[$2], EXPR$2=[$3])
++- LogicalWindowAggregate(group=[{2}], EXPR$0=[COUNT($1)], window=[SlidingGroupWindow('w, rowtime, 10, 5)], properties=[EXPR$1, EXPR$2])
+   +- LogicalTableScan(table=[[default_catalog, default_database, T1]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+GroupWindowAggregate(groupBy=[string], window=[SlidingGroupWindow('w, rowtime, 10, 5)], properties=[EXPR$1, EXPR$2], select=[string, COUNT(int) AS EXPR$0, start('w) AS EXPR$1, end('w) AS EXPR$2])
++- Exchange(distribution=[hash[string]])
+   +- DataStreamScan(table=[[default_catalog, default_database, T1]], fields=[long, int, string, rowtime])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testMultiWindow">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(EXPR$0=[$0])
++- LogicalWindowAggregate(group=[{}], EXPR$0=[COUNT($1)], window=[SlidingGroupWindow('w2, proctime, 20, 10)], properties=[])
+   +- LogicalProject(proctime=[AS($2, _UTF-16LE'proctime')], string=[$0], EXPR$1=[$1])
+      +- LogicalWindowAggregate(group=[{2}], EXPR$1=[COUNT($1)], window=[TumblingGroupWindow], properties=[EXPR$0])
+         +- LogicalTableScan(table=[[default_catalog, default_database, T1]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+GroupWindowAggregate(window=[SlidingGroupWindow('w2, proctime, 20, 10)], select=[COUNT(string) AS EXPR$0])
++- Exchange(distribution=[single])
+   +- GroupWindowAggregate(groupBy=[string], window=[TumblingGroupWindow], properties=[EXPR$0], select=[string, COUNT(int) AS EXPR$1, proctime('w1) AS EXPR$0])
+      +- Exchange(distribution=[hash[string]])
+         +- DataStreamScan(table=[[default_catalog, default_database, T1]], fields=[long, int, string, proctime])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testProcessingTimeSlidingGroupWindowOverTime">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(string=[$0], EXPR$0=[$1])
++- LogicalWindowAggregate(group=[{2}], EXPR$0=[COUNT($1)], window=[SlidingGroupWindow('w, proctime, 50, 50)], properties=[])
+   +- LogicalTableScan(table=[[default_catalog, default_database, T1]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+GroupWindowAggregate(groupBy=[string], window=[SlidingGroupWindow('w, proctime, 50, 50)], select=[string, COUNT(int) AS EXPR$0])
++- Exchange(distribution=[hash[string]])
+   +- DataStreamScan(table=[[default_catalog, default_database, T1]], fields=[long, int, string, proctime])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testProcessingTimeTumblingGroupWindowOverCount">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(string=[$0], EXPR$0=[$1])
++- LogicalWindowAggregate(group=[{2}], EXPR$0=[COUNT($1)], window=[TumblingGroupWindow], properties=[])
+   +- LogicalTableScan(table=[[default_catalog, default_database, T1]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+GroupWindowAggregate(groupBy=[string], window=[TumblingGroupWindow], select=[string, COUNT(int) AS EXPR$0])
++- Exchange(distribution=[hash[string]])
+   +- DataStreamScan(table=[[default_catalog, default_database, T1]], fields=[long, int, string, proctime])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testProcessingTimeTumblingGroupWindowOverTime">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(string=[$0], EXPR$0=[$1])
++- LogicalWindowAggregate(group=[{2}], EXPR$0=[COUNT($1)], window=[TumblingGroupWindow], properties=[])
+   +- LogicalTableScan(table=[[default_catalog, default_database, T1]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+GroupWindowAggregate(groupBy=[string], window=[TumblingGroupWindow], select=[string, COUNT(int) AS EXPR$0])
++- Exchange(distribution=[hash[string]])
+   +- DataStreamScan(table=[[default_catalog, default_database, T1]], fields=[long, int, string, proctime])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testSessionWindowStartWithTwoEnd">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(we1=[AS($2, _UTF-16LE'we1')], string=[$0], cnt=[AS($1, _UTF-16LE'cnt')], ws=[AS($3, _UTF-16LE'ws')], we2=[AS($2, _UTF-16LE'we2')])
++- LogicalWindowAggregate(group=[{2}], EXPR$1=[COUNT($1)], window=[SessionGroupWindow('w, rowtime, 3)], properties=[EXPR$0, EXPR$2])
+   +- LogicalTableScan(table=[[default_catalog, default_database, T1]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[EXPR$0 AS we1, string, EXPR$1 AS cnt, EXPR$2 AS ws, EXPR$0 AS we2])
++- GroupWindowAggregate(groupBy=[string], window=[SessionGroupWindow('w, rowtime, 3)], properties=[EXPR$0, EXPR$2], select=[string, COUNT(int) AS EXPR$1, end('w) AS EXPR$0, start('w) AS EXPR$2])
+   +- Exchange(distribution=[hash[string]])
+      +- DataStreamScan(table=[[default_catalog, default_database, T1]], fields=[rowtime, int, string])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testSlidingWindowWithUDAF">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(EXPR$0=[$3])
++- LogicalWindowAggregate(group=[{2, 3, 4}], EXPR$0=[WeightedAvg($0, $1)], window=[SlidingGroupWindow('w, proctime, 2, 1)], properties=[])
+   +- LogicalTableScan(table=[[default_catalog, default_database, T1]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[EXPR$0])
++- GroupWindowAggregate(groupBy=[string, int2, int3], window=[SlidingGroupWindow('w, proctime, 2, 1)], select=[string, int2, int3, WeightedAvg(long, int) AS EXPR$0])
+   +- Exchange(distribution=[hash[string, int2, int3]])
+      +- DataStreamScan(table=[[default_catalog, default_database, T1]], fields=[long, int, string, int2, int3, proctime])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testTumbleWindowStartEnd">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(string=[$0], EXPR$0=[$1], EXPR$1=[$2], EXPR$2=[$3])
++- LogicalWindowAggregate(group=[{2}], EXPR$0=[COUNT($1)], window=[TumblingGroupWindow], properties=[EXPR$1, EXPR$2])
+   +- LogicalTableScan(table=[[default_catalog, default_database, T1]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+GroupWindowAggregate(groupBy=[string], window=[TumblingGroupWindow], properties=[EXPR$1, EXPR$2], select=[string, COUNT(int) AS EXPR$0, start('w) AS EXPR$1, end('w) AS EXPR$2])
++- Exchange(distribution=[hash[string]])
+   +- DataStreamScan(table=[[default_catalog, default_database, T1]], fields=[long, int, string, rowtime])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testTumbleWindowWithDuplicateAggsAndProps">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(string=[$0], s1=[AS(+($1, 1), _UTF-16LE's1')], s2=[AS(+($1, 3), _UTF-16LE's2')], x=[AS($2, _UTF-16LE'x')], x2=[AS($2, _UTF-16LE'x2')], x3=[AS($3, _UTF-16LE'x3')], EXPR$2=[$3])
++- LogicalWindowAggregate(group=[{2}], EXPR$0=[SUM($1)], window=[TumblingGroupWindow], properties=[EXPR$1, EXPR$2])
+   +- LogicalTableScan(table=[[default_catalog, default_database, T1]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[string, +(EXPR$0, 1) AS s1, +(EXPR$0, 3) AS s2, EXPR$1 AS x, EXPR$1 AS x2, EXPR$2 AS x3, EXPR$2])
++- GroupWindowAggregate(groupBy=[string], window=[TumblingGroupWindow], properties=[EXPR$1, EXPR$2], select=[string, SUM(int) AS EXPR$0, start('w) AS EXPR$1, end('w) AS EXPR$2])
+   +- Exchange(distribution=[hash[string]])
+      +- DataStreamScan(table=[[default_catalog, default_database, T1]], fields=[rowtime, int, string])
+]]>
+    </Resource>
+  </TestCase>
+</Root>
diff --git a/flink-table/flink-table-planner-blink/src/test/resources/org/apache/flink/table/plan/stream/table/JoinTest.xml b/flink-table/flink-table-planner-blink/src/test/resources/org/apache/flink/table/plan/stream/table/JoinTest.xml
new file mode 100644
index 0000000..437cf16
--- /dev/null
+++ b/flink-table/flink-table-planner-blink/src/test/resources/org/apache/flink/table/plan/stream/table/JoinTest.xml
@@ -0,0 +1,392 @@
+<?xml version="1.0" ?>
+<!--
+Licensed to the Apache Software Foundation (ASF) under one or more
+contributor license agreements.  See the NOTICE file distributed with
+this work for additional information regarding copyright ownership.
+The ASF licenses this file to you under the Apache License, Version 2.0
+(the "License"); you may not use this file except in compliance with
+the License.  You may obtain a copy of the License at
+
+http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+-->
+<Root>
+  <TestCase name="testLeftOuterJoinEquiAndLocalPred">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(b=[$1], y=[$4])
++- LogicalJoin(condition=[AND(=($0, $5), <($1, 2))], joinType=[left])
+   :- LogicalTableScan(table=[[default_catalog, default_database, T]])
+   +- LogicalTableScan(table=[[default_catalog, default_database, S]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[b, y])
++- Join(joinType=[LeftOuterJoin], where=[AND(=(a, z), $f3)], select=[a, b, $f3, y, z], leftInputSpec=[NoUniqueKey], rightInputSpec=[NoUniqueKey])
+   :- Exchange(distribution=[hash[a]])
+   :  +- Calc(select=[a, b, <(b, 2) AS $f3])
+   :     +- DataStreamScan(table=[[default_catalog, default_database, T]], fields=[a, b, c])
+   +- Exchange(distribution=[hash[z]])
+      +- Calc(select=[y, z])
+         +- DataStreamScan(table=[[default_catalog, default_database, S]], fields=[x, y, z])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testLeftOuterJoinEquiAndNonEquiPred">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(b=[$1], y=[$4])
++- LogicalJoin(condition=[AND(=($0, $5), <($1, $3))], joinType=[left])
+   :- LogicalTableScan(table=[[default_catalog, default_database, T]])
+   +- LogicalTableScan(table=[[default_catalog, default_database, S]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[b, y])
++- Join(joinType=[LeftOuterJoin], where=[AND(=(a, z), <(b, x))], select=[a, b, x, y, z], leftInputSpec=[NoUniqueKey], rightInputSpec=[NoUniqueKey])
+   :- Exchange(distribution=[hash[a]])
+   :  +- Calc(select=[a, b])
+   :     +- DataStreamScan(table=[[default_catalog, default_database, T]], fields=[a, b, c])
+   +- Exchange(distribution=[hash[z]])
+      +- DataStreamScan(table=[[default_catalog, default_database, S]], fields=[x, y, z])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testRowTimeInnerJoinWithTimeAccessed">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalFilter(condition=[AND(=($0, $4), >=($3, -($7, 300000:INTERVAL DAY TO SECOND)), <($3, $7), >($3, $6))])
++- LogicalJoin(condition=[true], joinType=[inner])
+   :- LogicalTableScan(table=[[default_catalog, default_database, T1]])
+   +- LogicalTableScan(table=[[default_catalog, default_database, T2]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+WindowJoin(joinType=[InnerJoin], windowBounds=[isRowTime=true, leftLowerBound=-300000, leftUpperBound=-1, leftTimeIndex=3, rightTimeIndex=3], where=[AND(=(a, d), >=(CAST(lrtime), -(CAST(rrtime), 300000:INTERVAL DAY TO SECOND)), <(CAST(lrtime), CAST(rrtime)), >(CAST(lrtime), f))], select=[a, b, c, lrtime, d, e, f, rrtime])
+:- Exchange(distribution=[hash[a]])
+:  +- DataStreamScan(table=[[default_catalog, default_database, T1]], fields=[a, b, c, lrtime])
++- Exchange(distribution=[hash[d]])
+   +- DataStreamScan(table=[[default_catalog, default_database, T2]], fields=[d, e, f, rrtime])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testLeftOuterJoinEquiPred">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(b=[$1], y=[$4])
++- LogicalJoin(condition=[=($0, $5)], joinType=[left])
+   :- LogicalTableScan(table=[[default_catalog, default_database, T]])
+   +- LogicalTableScan(table=[[default_catalog, default_database, S]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[b, y])
++- Join(joinType=[LeftOuterJoin], where=[=(a, z)], select=[a, b, y, z], leftInputSpec=[NoUniqueKey], rightInputSpec=[NoUniqueKey])
+   :- Exchange(distribution=[hash[a]])
+   :  +- Calc(select=[a, b])
+   :     +- DataStreamScan(table=[[default_catalog, default_database, T]], fields=[a, b, c])
+   +- Exchange(distribution=[hash[z]])
+      +- Calc(select=[y, z])
+         +- DataStreamScan(table=[[default_catalog, default_database, S]], fields=[x, y, z])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testProcTimeWindowFullOuterJoin">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(a=[$0], e=[$5], lptime=[$3])
++- LogicalJoin(condition=[AND(=($0, $4), >=($3, -($7, 1000:INTERVAL DAY TO SECOND)), <($3, $7))], joinType=[full])
+   :- LogicalTableScan(table=[[default_catalog, default_database, T1]])
+   +- LogicalTableScan(table=[[default_catalog, default_database, T2]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[a, e, PROCTIME_MATERIALIZE(lptime) AS lptime])
++- WindowJoin(joinType=[FullOuterJoin], windowBounds=[isRowTime=false, leftLowerBound=-1000, leftUpperBound=-1, leftTimeIndex=1, rightTimeIndex=2], where=[AND(=(a, d), >=(lptime, -(rptime, 1000:INTERVAL DAY TO SECOND)), <(lptime, rptime))], select=[a, lptime, d, e, rptime])
+   :- Exchange(distribution=[hash[a]])
+   :  +- Calc(select=[a, lptime])
+   :     +- DataStreamScan(table=[[default_catalog, default_database, T1]], fields=[a, b, c, lptime])
+   +- Exchange(distribution=[hash[d]])
+      +- Calc(select=[d, e, rptime])
+         +- DataStreamScan(table=[[default_catalog, default_database, T2]], fields=[d, e, f, rptime])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testProcTimeWindowInnerJoin">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(a=[$0], e=[$5], lptime=[$3])
++- LogicalFilter(condition=[AND(=($0, $4), >=($3, -($7, 1000:INTERVAL DAY TO SECOND)), <($3, $7))])
+   +- LogicalJoin(condition=[true], joinType=[inner])
+      :- LogicalTableScan(table=[[default_catalog, default_database, T1]])
+      +- LogicalTableScan(table=[[default_catalog, default_database, T2]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[a, e, PROCTIME_MATERIALIZE(lptime) AS lptime])
++- WindowJoin(joinType=[InnerJoin], windowBounds=[isRowTime=false, leftLowerBound=-1000, leftUpperBound=-1, leftTimeIndex=1, rightTimeIndex=2], where=[AND(=(a, d), >=(PROCTIME_MATERIALIZE(lptime), -(PROCTIME_MATERIALIZE(rptime), 1000:INTERVAL DAY TO SECOND)), <(PROCTIME_MATERIALIZE(lptime), PROCTIME_MATERIALIZE(rptime)))], select=[a, lptime, d, e, rptime])
+   :- Exchange(distribution=[hash[a]])
+   :  +- Calc(select=[a, lptime])
+   :     +- DataStreamScan(table=[[default_catalog, default_database, T1]], fields=[a, b, c, lptime])
+   +- Exchange(distribution=[hash[d]])
+      +- Calc(select=[d, e, rptime])
+         +- DataStreamScan(table=[[default_catalog, default_database, T2]], fields=[d, e, f, rptime])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testProcTimeWindowInnerJoinWithEquiTimeAttrs">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(a=[$0], e=[$5], lptime=[$3])
++- LogicalFilter(condition=[AND(=($0, $4), =($3, $7))])
+   +- LogicalJoin(condition=[true], joinType=[inner])
+      :- LogicalTableScan(table=[[default_catalog, default_database, T1]])
+      +- LogicalTableScan(table=[[default_catalog, default_database, T2]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[a, e, PROCTIME_MATERIALIZE(lptime) AS lptime])
++- Join(joinType=[InnerJoin], where=[AND(=(a, d), =($f4, $f40))], select=[a, lptime, $f4, d, e, $f40], leftInputSpec=[NoUniqueKey], rightInputSpec=[NoUniqueKey])
+   :- Exchange(distribution=[hash[a, $f4]])
+   :  +- Calc(select=[a, lptime, PROCTIME_MATERIALIZE(lptime) AS $f4])
+   :     +- DataStreamScan(table=[[default_catalog, default_database, T1]], fields=[a, b, c, lptime])
+   +- Exchange(distribution=[hash[d, $f4]])
+      +- Calc(select=[d, e, PROCTIME_MATERIALIZE(rptime) AS $f4])
+         +- DataStreamScan(table=[[default_catalog, default_database, T2]], fields=[d, e, f, rptime])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testProcTimeWindowLeftOuterJoin">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(a=[$0], e=[$5], lptime=[$3])
++- LogicalJoin(condition=[AND(=($0, $4), >=($3, -($7, 1000:INTERVAL DAY TO SECOND)), <($3, $7))], joinType=[left])
+   :- LogicalTableScan(table=[[default_catalog, default_database, T1]])
+   +- LogicalTableScan(table=[[default_catalog, default_database, T2]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[a, e, PROCTIME_MATERIALIZE(lptime) AS lptime])
++- WindowJoin(joinType=[LeftOuterJoin], windowBounds=[isRowTime=false, leftLowerBound=-1000, leftUpperBound=-1, leftTimeIndex=1, rightTimeIndex=2], where=[AND(=(a, d), >=(lptime, -(rptime, 1000:INTERVAL DAY TO SECOND)), <(lptime, rptime))], select=[a, lptime, d, e, rptime])
+   :- Exchange(distribution=[hash[a]])
+   :  +- Calc(select=[a, lptime])
+   :     +- DataStreamScan(table=[[default_catalog, default_database, T1]], fields=[a, b, c, lptime])
+   +- Exchange(distribution=[hash[d]])
+      +- Calc(select=[d, e, rptime])
+         +- DataStreamScan(table=[[default_catalog, default_database, T2]], fields=[d, e, f, rptime])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testProcTimeWindowRightOuterJoin">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(a=[$0], e=[$5], lptime=[$3])
++- LogicalJoin(condition=[AND(=($0, $4), >=($3, -($7, 1000:INTERVAL DAY TO SECOND)), <($3, $7))], joinType=[right])
+   :- LogicalTableScan(table=[[default_catalog, default_database, T1]])
+   +- LogicalTableScan(table=[[default_catalog, default_database, T2]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[a, e, PROCTIME_MATERIALIZE(lptime) AS lptime])
++- WindowJoin(joinType=[RightOuterJoin], windowBounds=[isRowTime=false, leftLowerBound=-1000, leftUpperBound=-1, leftTimeIndex=1, rightTimeIndex=2], where=[AND(=(a, d), >=(lptime, -(rptime, 1000:INTERVAL DAY TO SECOND)), <(lptime, rptime))], select=[a, lptime, d, e, rptime])
+   :- Exchange(distribution=[hash[a]])
+   :  +- Calc(select=[a, lptime])
+   :     +- DataStreamScan(table=[[default_catalog, default_database, T1]], fields=[a, b, c, lptime])
+   +- Exchange(distribution=[hash[d]])
+      +- Calc(select=[d, e, rptime])
+         +- DataStreamScan(table=[[default_catalog, default_database, T2]], fields=[d, e, f, rptime])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testRightOuterJoinEquiAndLocalPred">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(b=[$1], x=[$3])
++- LogicalJoin(condition=[AND(=($0, $5), <($3, 2))], joinType=[right])
+   :- LogicalTableScan(table=[[default_catalog, default_database, T]])
+   +- LogicalTableScan(table=[[default_catalog, default_database, S]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[b, x])
++- Join(joinType=[RightOuterJoin], where=[AND(=(a, z), $f3)], select=[a, b, x, z, $f3], leftInputSpec=[NoUniqueKey], rightInputSpec=[NoUniqueKey])
+   :- Exchange(distribution=[hash[a]])
+   :  +- Calc(select=[a, b])
+   :     +- DataStreamScan(table=[[default_catalog, default_database, T]], fields=[a, b, c])
+   +- Exchange(distribution=[hash[z]])
+      +- Calc(select=[x, z, <(x, 2) AS $f3])
+         +- DataStreamScan(table=[[default_catalog, default_database, S]], fields=[x, y, z])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testRightOuterJoinEquiAndNonEquiPred">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(b=[$1], y=[$4])
++- LogicalJoin(condition=[AND(=($0, $5), <($1, $3))], joinType=[right])
+   :- LogicalTableScan(table=[[default_catalog, default_database, T]])
+   +- LogicalTableScan(table=[[default_catalog, default_database, S]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[b, y])
++- Join(joinType=[RightOuterJoin], where=[AND(=(a, z), <(b, x))], select=[a, b, x, y, z], leftInputSpec=[NoUniqueKey], rightInputSpec=[NoUniqueKey])
+   :- Exchange(distribution=[hash[a]])
+   :  +- Calc(select=[a, b])
+   :     +- DataStreamScan(table=[[default_catalog, default_database, T]], fields=[a, b, c])
+   +- Exchange(distribution=[hash[z]])
+      +- DataStreamScan(table=[[default_catalog, default_database, S]], fields=[x, y, z])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testRowTimeWindowInnerJoin">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(a=[$0], e=[$5], lrtime=[$3])
++- LogicalFilter(condition=[AND(=($0, $4), >=($3, -($7, 300000:INTERVAL DAY TO SECOND)), <($3, +($7, 3000:INTERVAL DAY TO SECOND)))])
+   +- LogicalJoin(condition=[true], joinType=[inner])
+      :- LogicalTableScan(table=[[default_catalog, default_database, T1]])
+      +- LogicalTableScan(table=[[default_catalog, default_database, T2]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[a, e, lrtime])
++- WindowJoin(joinType=[InnerJoin], windowBounds=[isRowTime=true, leftLowerBound=-300000, leftUpperBound=2999, leftTimeIndex=1, rightTimeIndex=2], where=[AND(=(a, d), >=(CAST(lrtime), -(CAST(rrtime), 300000:INTERVAL DAY TO SECOND)), <(CAST(lrtime), +(CAST(rrtime), 3000:INTERVAL DAY TO SECOND)))], select=[a, lrtime, d, e, rrtime])
+   :- Exchange(distribution=[hash[a]])
+   :  +- Calc(select=[a, lrtime])
+   :     +- DataStreamScan(table=[[default_catalog, default_database, T1]], fields=[a, b, c, lrtime])
+   +- Exchange(distribution=[hash[d]])
+      +- Calc(select=[d, e, rrtime])
+         +- DataStreamScan(table=[[default_catalog, default_database, T2]], fields=[d, e, f, rrtime])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testRightOuterJoinEquiPred">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(b=[$1], y=[$4])
++- LogicalJoin(condition=[=($0, $5)], joinType=[right])
+   :- LogicalTableScan(table=[[default_catalog, default_database, T]])
+   +- LogicalTableScan(table=[[default_catalog, default_database, S]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[b, y])
++- Join(joinType=[RightOuterJoin], where=[=(a, z)], select=[a, b, y, z], leftInputSpec=[NoUniqueKey], rightInputSpec=[NoUniqueKey])
+   :- Exchange(distribution=[hash[a]])
+   :  +- Calc(select=[a, b])
+   :     +- DataStreamScan(table=[[default_catalog, default_database, T]], fields=[a, b, c])
+   +- Exchange(distribution=[hash[z]])
+      +- Calc(select=[y, z])
+         +- DataStreamScan(table=[[default_catalog, default_database, S]], fields=[x, y, z])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testRowTimeWindowFullOuterJoin">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(a=[$0], e=[$5], lrtime=[$3])
++- LogicalJoin(condition=[AND(=($0, $4), >=($3, -($7, 300000:INTERVAL DAY TO SECOND)), <($3, +($7, 3000:INTERVAL DAY TO SECOND)))], joinType=[full])
+   :- LogicalTableScan(table=[[default_catalog, default_database, T1]])
+   +- LogicalTableScan(table=[[default_catalog, default_database, T2]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[a, e, lrtime])
++- WindowJoin(joinType=[FullOuterJoin], windowBounds=[isRowTime=true, leftLowerBound=-300000, leftUpperBound=2999, leftTimeIndex=1, rightTimeIndex=2], where=[AND(=(a, d), >=(lrtime, -(rrtime, 300000:INTERVAL DAY TO SECOND)), <(lrtime, +(rrtime, 3000:INTERVAL DAY TO SECOND)))], select=[a, lrtime, d, e, rrtime])
+   :- Exchange(distribution=[hash[a]])
+   :  +- Calc(select=[a, lrtime])
+   :     +- DataStreamScan(table=[[default_catalog, default_database, T1]], fields=[a, b, c, lrtime])
+   +- Exchange(distribution=[hash[d]])
+      +- Calc(select=[d, e, rrtime])
+         +- DataStreamScan(table=[[default_catalog, default_database, T2]], fields=[d, e, f, rrtime])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testRowTimeWindowLeftOuterJoin">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(a=[$0], e=[$5], lrtime=[$3])
++- LogicalJoin(condition=[AND(=($0, $4), >=($3, -($7, 300000:INTERVAL DAY TO SECOND)), <($3, +($7, 3000:INTERVAL DAY TO SECOND)))], joinType=[left])
+   :- LogicalTableScan(table=[[default_catalog, default_database, T1]])
+   +- LogicalTableScan(table=[[default_catalog, default_database, T2]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[a, e, lrtime])
++- WindowJoin(joinType=[LeftOuterJoin], windowBounds=[isRowTime=true, leftLowerBound=-300000, leftUpperBound=2999, leftTimeIndex=1, rightTimeIndex=2], where=[AND(=(a, d), >=(lrtime, -(rrtime, 300000:INTERVAL DAY TO SECOND)), <(lrtime, +(rrtime, 3000:INTERVAL DAY TO SECOND)))], select=[a, lrtime, d, e, rrtime])
+   :- Exchange(distribution=[hash[a]])
+   :  +- Calc(select=[a, lrtime])
+   :     +- DataStreamScan(table=[[default_catalog, default_database, T1]], fields=[a, b, c, lrtime])
+   +- Exchange(distribution=[hash[d]])
+      +- Calc(select=[d, e, rrtime])
+         +- DataStreamScan(table=[[default_catalog, default_database, T2]], fields=[d, e, f, rrtime])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testRowTimeWindowOuterJoinOpt">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(a=[$0], e=[$5], lrtime=[$3])
++- LogicalFilter(condition=[AND(=($0, $4), >=($3, -($7, 300000:INTERVAL DAY TO SECOND)), <($3, +($7, 3000:INTERVAL DAY TO SECOND)))])
+   +- LogicalJoin(condition=[true], joinType=[left])
+      :- LogicalTableScan(table=[[default_catalog, default_database, T1]])
+      +- LogicalTableScan(table=[[default_catalog, default_database, T2]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[a, e, lrtime])
++- WindowJoin(joinType=[InnerJoin], windowBounds=[isRowTime=true, leftLowerBound=-300000, leftUpperBound=2999, leftTimeIndex=1, rightTimeIndex=2], where=[AND(=(a, d), >=(CAST(lrtime), -(CAST(rrtime), 300000:INTERVAL DAY TO SECOND)), <(CAST(lrtime), +(CAST(rrtime), 3000:INTERVAL DAY TO SECOND)))], select=[a, lrtime, d, e, rrtime])
+   :- Exchange(distribution=[hash[a]])
+   :  +- Calc(select=[a, lrtime])
+   :     +- DataStreamScan(table=[[default_catalog, default_database, T1]], fields=[a, b, c, lrtime])
+   +- Exchange(distribution=[hash[d]])
+      +- Calc(select=[d, e, rrtime])
+         +- DataStreamScan(table=[[default_catalog, default_database, T2]], fields=[d, e, f, rrtime])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testRowTimeWindowRightOuterJoin">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(a=[$0], e=[$5], lrtime=[$3])
++- LogicalJoin(condition=[AND(=($0, $4), >=($3, -($7, 300000:INTERVAL DAY TO SECOND)), <($3, +($7, 3000:INTERVAL DAY TO SECOND)))], joinType=[right])
+   :- LogicalTableScan(table=[[default_catalog, default_database, T1]])
+   +- LogicalTableScan(table=[[default_catalog, default_database, T2]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[a, e, lrtime])
++- WindowJoin(joinType=[RightOuterJoin], windowBounds=[isRowTime=true, leftLowerBound=-300000, leftUpperBound=2999, leftTimeIndex=1, rightTimeIndex=2], where=[AND(=(a, d), >=(lrtime, -(rrtime, 300000:INTERVAL DAY TO SECOND)), <(lrtime, +(rrtime, 3000:INTERVAL DAY TO SECOND)))], select=[a, lrtime, d, e, rrtime])
+   :- Exchange(distribution=[hash[a]])
+   :  +- Calc(select=[a, lrtime])
+   :     +- DataStreamScan(table=[[default_catalog, default_database, T1]], fields=[a, b, c, lrtime])
+   +- Exchange(distribution=[hash[d]])
+      +- Calc(select=[d, e, rrtime])
+         +- DataStreamScan(table=[[default_catalog, default_database, T2]], fields=[d, e, f, rrtime])
+]]>
+    </Resource>
+  </TestCase>
+</Root>
diff --git a/flink-table/flink-table-planner-blink/src/test/resources/org/apache/flink/table/plan/stream/table/OverWindowTest.xml b/flink-table/flink-table-planner-blink/src/test/resources/org/apache/flink/table/plan/stream/table/OverWindowTest.xml
new file mode 100644
index 0000000..4f3cb39
--- /dev/null
+++ b/flink-table/flink-table-planner-blink/src/test/resources/org/apache/flink/table/plan/stream/table/OverWindowTest.xml
@@ -0,0 +1,308 @@
+<?xml version="1.0" ?>
+<!--
+Licensed to the Apache Software Foundation (ASF) under one or more
+contributor license agreements.  See the NOTICE file distributed with
+this work for additional information regarding copyright ownership.
+The ASF licenses this file to you under the Apache License, Version 2.0
+(the "License"); you may not use this file except in compliance with
+the License.  You may obtain a copy of the License at
+
+http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+-->
+<Root>
+  <TestCase name="testProcTimeBoundedNonPartitionedRangeOver">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(a=[$0], _c1=[AS(COUNT($2) OVER (ORDER BY $3 NULLS FIRST RANGE BETWEEN 10000 PRECEDING AND CURRENT ROW), _UTF-16LE'_c1')])
++- LogicalTableScan(table=[[default_catalog, default_database, MyTable]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[a, w0$o0 AS _c1])
++- OverAggregate(orderBy=[proctime ASC], window=[ RANG BETWEEN 10000 PRECEDING AND CURRENT ROW], select=[a, c, proctime, COUNT(c) AS w0$o0])
+   +- Exchange(distribution=[single])
+      +- Calc(select=[a, c, proctime])
+         +- DataStreamScan(table=[[default_catalog, default_database, MyTable]], fields=[a, b, c, proctime, rowtime])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testProcTimeBoundedNonPartitionedRowsOver">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(c=[$2], _c1=[AS(COUNT($0) OVER (ORDER BY $3 NULLS FIRST ROWS BETWEEN 2 PRECEDING AND CURRENT ROW), _UTF-16LE'_c1')])
++- LogicalTableScan(table=[[default_catalog, default_database, MyTable]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[c, w0$o0 AS _c1])
++- OverAggregate(orderBy=[proctime ASC], window=[ ROWS BETWEEN 2 PRECEDING AND CURRENT ROW], select=[a, c, proctime, COUNT(a) AS w0$o0])
+   +- Exchange(distribution=[single])
+      +- Calc(select=[a, c, proctime])
+         +- DataStreamScan(table=[[default_catalog, default_database, MyTable]], fields=[a, b, c, proctime, rowtime])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testProcTimeBoundedPartitionedRangeOver">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(a=[$0], myAvg=[AS(org$apache$flink$table$plan$util$JavaUserDefinedAggFunctions$WeightedAvgWithRetract$c79ed2615f99cd5c38d2dd215979bb8a($2, $0) OVER (PARTITION BY $0 ORDER BY $3 NULLS FIRST RANGE BETWEEN 7200000 PRECEDING AND CURRENT ROW), _UTF-16LE'myAvg')])
++- LogicalTableScan(table=[[default_catalog, default_database, MyTable]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[a, w0$o0 AS myAvg])
++- OverAggregate(partitionBy=[a], orderBy=[proctime ASC], window=[ RANG BETWEEN 7200000 PRECEDING AND CURRENT ROW], select=[a, c, proctime, WeightedAvgWithRetract(c, a) AS w0$o0])
+   +- Exchange(distribution=[hash[a]])
+      +- Calc(select=[a, c, proctime])
+         +- DataStreamScan(table=[[default_catalog, default_database, MyTable]], fields=[a, b, c, proctime, rowtime])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testProcTimeBoundedPartitionedRowsOver">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(c=[$2], _c1=[AS(org$apache$flink$table$plan$util$JavaUserDefinedAggFunctions$WeightedAvgWithRetract$c79ed2615f99cd5c38d2dd215979bb8a($2, $0) OVER (PARTITION BY $1 ORDER BY $3 NULLS FIRST ROWS BETWEEN 2 PRECEDING AND CURRENT ROW), _UTF-16LE'_c1')])
++- LogicalTableScan(table=[[default_catalog, default_database, MyTable]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[c, w0$o0 AS _c1])
++- OverAggregate(partitionBy=[b], orderBy=[proctime ASC], window=[ ROWS BETWEEN 2 PRECEDING AND CURRENT ROW], select=[a, b, c, proctime, WeightedAvgWithRetract(c, a) AS w0$o0])
+   +- Exchange(distribution=[hash[b]])
+      +- Calc(select=[a, b, c, proctime])
+         +- DataStreamScan(table=[[default_catalog, default_database, MyTable]], fields=[a, b, c, proctime, rowtime])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testProcTimeUnboundedNonPartitionedRangeOver">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(a=[$0], c=[$2], _c2=[AS(COUNT($0) OVER (ORDER BY $3 NULLS FIRST RANGE BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW), _UTF-16LE'_c2')], _c3=[AS(SUM($0) OVER (ORDER BY $3 NULLS FIRST RANGE BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW), _UTF-16LE'_c3')])
++- LogicalTableScan(table=[[default_catalog, default_database, MyTable]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[a, c, w0$o0 AS _c2, w0$o1 AS _c3])
++- OverAggregate(orderBy=[proctime ASC], window=[ RANG BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW], select=[a, c, proctime, COUNT(a) AS w0$o0, SUM(a) AS w0$o1])
+   +- Exchange(distribution=[single])
+      +- Calc(select=[a, c, proctime])
+         +- DataStreamScan(table=[[default_catalog, default_database, MyTable]], fields=[a, b, c, proctime, rowtime])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testProcTimeUnboundedNonPartitionedRowsOver">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(c=[$2], _c1=[AS(COUNT($0) OVER (ORDER BY $3 NULLS FIRST ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW), _UTF-16LE'_c1')])
++- LogicalTableScan(table=[[default_catalog, default_database, MyTable]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[c, w0$o0 AS _c1])
++- OverAggregate(orderBy=[proctime ASC], window=[ ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW], select=[a, c, proctime, COUNT(a) AS w0$o0])
+   +- Exchange(distribution=[single])
+      +- Calc(select=[a, c, proctime])
+         +- DataStreamScan(table=[[default_catalog, default_database, MyTable]], fields=[a, b, c, proctime, rowtime])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testProcTimeUnboundedPartitionedRangeOver">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(a=[$0], c=[$2], _c2=[AS(COUNT($0) OVER (PARTITION BY $2 ORDER BY $3 NULLS FIRST RANGE BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW), _UTF-16LE'_c2')], _c3=[AS(org$apache$flink$table$plan$util$JavaUserDefinedAggFunctions$WeightedAvgWithRetract$c79ed2615f99cd5c38d2dd215979bb8a($2, $0) OVER (PARTITION BY $2 ORDER BY $3 NULLS FIRST RANGE BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW), _UTF-16LE'_c3')])
++- LogicalTableScan(table=[[default_catalog, default_database, MyTable]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[a, c, w0$o0 AS _c2, w0$o1 AS _c3])
++- OverAggregate(partitionBy=[c], orderBy=[proctime ASC], window=[ RANG BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW], select=[a, c, proctime, COUNT(a) AS w0$o0, WeightedAvgWithRetract(c, a) AS w0$o1])
+   +- Exchange(distribution=[hash[c]])
+      +- Calc(select=[a, c, proctime])
+         +- DataStreamScan(table=[[default_catalog, default_database, MyTable]], fields=[a, b, c, proctime, rowtime])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testProcTimeUnboundedPartitionedRowsOver">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(c=[$2], _c1=[AS(COUNT($0) OVER (PARTITION BY $2 ORDER BY $3 NULLS FIRST ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW), _UTF-16LE'_c1')], _c2=[AS(org$apache$flink$table$plan$util$JavaUserDefinedAggFunctions$WeightedAvgWithRetract$c79ed2615f99cd5c38d2dd215979bb8a($2, $0) OVER (PARTITION BY $2 ORDER BY $3 NULLS FIRST ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW), _UTF-16LE'_c2')])
++- LogicalTableScan(table=[[default_catalog, default_database, MyTable]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[c, w0$o0 AS _c1, w0$o1 AS _c2])
++- OverAggregate(partitionBy=[c], orderBy=[proctime ASC], window=[ ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW], select=[a, c, proctime, COUNT(a) AS w0$o0, WeightedAvgWithRetract(c, a) AS w0$o1])
+   +- Exchange(distribution=[hash[c]])
+      +- Calc(select=[a, c, proctime])
+         +- DataStreamScan(table=[[default_catalog, default_database, MyTable]], fields=[a, b, c, proctime, rowtime])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testRowTimeBoundedNonPartitionedRangeOver">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(a=[$0], _c1=[AS(COUNT($2) OVER (ORDER BY $4 NULLS FIRST RANGE BETWEEN 10000 PRECEDING AND CURRENT ROW), _UTF-16LE'_c1')])
++- LogicalTableScan(table=[[default_catalog, default_database, MyTable]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[a, w0$o0 AS _c1])
++- OverAggregate(orderBy=[rowtime ASC], window=[ RANG BETWEEN 10000 PRECEDING AND CURRENT ROW], select=[a, c, rowtime, COUNT(c) AS w0$o0])
+   +- Exchange(distribution=[single])
+      +- Calc(select=[a, c, rowtime])
+         +- DataStreamScan(table=[[default_catalog, default_database, MyTable]], fields=[a, b, c, proctime, rowtime])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testRowTimeBoundedPartitionedRangeOver">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(a=[$0], _c1=[AS(AVG($2) OVER (PARTITION BY $0 ORDER BY $4 NULLS FIRST RANGE BETWEEN 7200000 PRECEDING AND CURRENT ROW), _UTF-16LE'_c1')], wAvg=[AS(org$apache$flink$table$plan$util$JavaUserDefinedAggFunctions$WeightedAvgWithRetract$c79ed2615f99cd5c38d2dd215979bb8a($2, $0) OVER (PARTITION BY $0 ORDER BY $4 NULLS FIRST RANGE BETWEEN 7200000 PRECEDING AND CURRENT ROW), _UTF-16LE'wAvg')])
++- LogicalTableScan(table=[[default_catalog, default_database, MyTable]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[a, w0$o0 AS _c1, w0$o1 AS wAvg])
++- OverAggregate(partitionBy=[a], orderBy=[rowtime ASC], window=[ RANG BETWEEN 7200000 PRECEDING AND CURRENT ROW], select=[a, c, rowtime, AVG(c) AS w0$o0, WeightedAvgWithRetract(c, a) AS w0$o1])
+   +- Exchange(distribution=[hash[a]])
+      +- Calc(select=[a, c, rowtime])
+         +- DataStreamScan(table=[[default_catalog, default_database, MyTable]], fields=[a, b, c, proctime, rowtime])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testRowTimeBoundedNonPartitionedRowsOver">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(c=[$2], _c1=[AS(COUNT($0) OVER (ORDER BY $4 NULLS FIRST ROWS BETWEEN 2 PRECEDING AND CURRENT ROW), _UTF-16LE'_c1')])
++- LogicalTableScan(table=[[default_catalog, default_database, MyTable]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[c, w0$o0 AS _c1])
++- OverAggregate(orderBy=[rowtime ASC], window=[ ROWS BETWEEN 2 PRECEDING AND CURRENT ROW], select=[a, c, rowtime, COUNT(a) AS w0$o0])
+   +- Exchange(distribution=[single])
+      +- Calc(select=[a, c, rowtime])
+         +- DataStreamScan(table=[[default_catalog, default_database, MyTable]], fields=[a, b, c, proctime, rowtime])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testRowTimeBoundedPartitionedRowsOver">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(c=[$2], _c1=[AS(COUNT($1) OVER (PARTITION BY $1 ORDER BY $4 NULLS FIRST ROWS BETWEEN 2 PRECEDING AND CURRENT ROW), _UTF-16LE'_c1')], wAvg=[AS(org$apache$flink$table$plan$util$JavaUserDefinedAggFunctions$WeightedAvgWithRetract$c79ed2615f99cd5c38d2dd215979bb8a($2, $0) OVER (PARTITION BY $1 ORDER BY $4 NULLS FIRST ROWS BETWEEN 2 PRECEDING AND CURRENT ROW), _UTF-16LE'wAvg')])
++- LogicalTableScan(table=[[default_catalog, default_database, MyTable]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[c, w0$o0 AS _c1, w0$o1 AS wAvg])
++- OverAggregate(partitionBy=[b], orderBy=[rowtime ASC], window=[ ROWS BETWEEN 2 PRECEDING AND CURRENT ROW], select=[a, b, c, rowtime, COUNT(b) AS w0$o0, WeightedAvgWithRetract(c, a) AS w0$o1])
+   +- Exchange(distribution=[hash[b]])
+      +- Calc(select=[a, b, c, rowtime])
+         +- DataStreamScan(table=[[default_catalog, default_database, MyTable]], fields=[a, b, c, proctime, rowtime])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testRowTimeUnboundedNonPartitionedRangeOver">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(a=[$0], c=[$2], _c2=[AS(COUNT($0) OVER (ORDER BY $4 NULLS FIRST RANGE BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW), _UTF-16LE'_c2')], _c3=[AS(SUM($0) OVER (ORDER BY $4 NULLS FIRST RANGE BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW), _UTF-16LE'_c3')])
++- LogicalTableScan(table=[[default_catalog, default_database, MyTable]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[a, c, w0$o0 AS _c2, w0$o1 AS _c3])
++- OverAggregate(orderBy=[rowtime ASC], window=[ RANG BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW], select=[a, c, rowtime, COUNT(a) AS w0$o0, SUM(a) AS w0$o1])
+   +- Exchange(distribution=[single])
+      +- Calc(select=[a, c, rowtime])
+         +- DataStreamScan(table=[[default_catalog, default_database, MyTable]], fields=[a, b, c, proctime, rowtime])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testRowTimeUnboundedNonPartitionedRowsOver">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(c=[$2], _c1=[AS(COUNT($0) OVER (ORDER BY $4 NULLS FIRST ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW), _UTF-16LE'_c1')])
++- LogicalTableScan(table=[[default_catalog, default_database, MyTable]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[c, w0$o0 AS _c1])
++- OverAggregate(orderBy=[rowtime ASC], window=[ ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW], select=[a, c, rowtime, COUNT(a) AS w0$o0])
+   +- Exchange(distribution=[single])
+      +- Calc(select=[a, c, rowtime])
+         +- DataStreamScan(table=[[default_catalog, default_database, MyTable]], fields=[a, b, c, proctime, rowtime])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testRowTimeUnboundedPartitionedRowsOver">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(c=[$2], _c1=[AS(COUNT($0) OVER (PARTITION BY $2 ORDER BY $4 NULLS FIRST ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW), _UTF-16LE'_c1')], wAvg=[AS(org$apache$flink$table$plan$util$JavaUserDefinedAggFunctions$WeightedAvgWithRetract$c79ed2615f99cd5c38d2dd215979bb8a($2, $0) OVER (PARTITION BY $2 ORDER BY $4 NULLS FIRST ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW), _UTF-16LE'wAvg')])
++- LogicalTableScan(table=[[default_catalog, default_database, MyTable]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[c, w0$o0 AS _c1, w0$o1 AS wAvg])
++- OverAggregate(partitionBy=[c], orderBy=[rowtime ASC], window=[ ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW], select=[a, c, rowtime, COUNT(a) AS w0$o0, WeightedAvgWithRetract(c, a) AS w0$o1])
+   +- Exchange(distribution=[hash[c]])
+      +- Calc(select=[a, c, rowtime])
+         +- DataStreamScan(table=[[default_catalog, default_database, MyTable]], fields=[a, b, c, proctime, rowtime])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testRowTimeUnboundedPartitionedRangeOver">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(a=[$0], c=[$2], _c2=[AS(COUNT($0) OVER (PARTITION BY $2 ORDER BY $4 NULLS FIRST RANGE BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW), _UTF-16LE'_c2')], wAvg=[AS(org$apache$flink$table$plan$util$JavaUserDefinedAggFunctions$WeightedAvgWithRetract$c79ed2615f99cd5c38d2dd215979bb8a($2, $0) OVER (PARTITION BY $2 ORDER BY $4 NULLS FIRST RANGE BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW), _UTF-16LE'wAvg')])
++- LogicalTableScan(table=[[default_catalog, default_database, MyTable]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[a, c, w0$o0 AS _c2, w0$o1 AS wAvg])
++- OverAggregate(partitionBy=[c], orderBy=[rowtime ASC], window=[ RANG BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW], select=[a, c, rowtime, COUNT(a) AS w0$o0, WeightedAvgWithRetract(c, a) AS w0$o1])
+   +- Exchange(distribution=[hash[c]])
+      +- Calc(select=[a, c, rowtime])
+         +- DataStreamScan(table=[[default_catalog, default_database, MyTable]], fields=[a, b, c, proctime, rowtime])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testScalarFunctionsOnOverWindow">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(d=[AS(org$apache$flink$table$expressions$utils$Func1$$a39386268ffec8461452460bcbe089ad(AS(SUM($0) OVER (PARTITION BY $1 ORDER BY $3 NULLS FIRST ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW), _UTF-16LE'wsum')), _UTF-16LE'd')], _c1=[AS(EXP(CAST(COUNT($0) OVER (PARTITION BY $1 ORDER BY $3 NULLS FIRST ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW)):DOUBLE), _UTF-16LE'_c1')], _c2=[AS(+(org$apache$flink$table$plan$util$JavaUserDefinedAggFunctions$WeightedAvgWithRetract$c7 [...]
++- LogicalTableScan(table=[[default_catalog, default_database, MyTable]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[Func1$(w0$o0) AS d, EXP(CAST(w0$o1)) AS _c1, +(w0$o2, 1) AS _c2, ||(_UTF-16LE'AVG:', CAST(w0$o2)) AS _c3, ARRAY(w0$o2, w0$o1) AS _c4])
++- OverAggregate(partitionBy=[b], orderBy=[proctime ASC], window=[ ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW], select=[a, b, c, proctime, SUM(a) AS w0$o0, COUNT(a) AS w0$o1, WeightedAvgWithRetract(c, a) AS w0$o2])
+   +- Exchange(distribution=[hash[b]])
+      +- Calc(select=[a, b, c, proctime])
+         +- DataStreamScan(table=[[default_catalog, default_database, MyTable]], fields=[a, b, c, proctime, rowtime])
+]]>
+    </Resource>
+  </TestCase>
+</Root>
diff --git a/flink-table/flink-table-planner-blink/src/test/resources/org/apache/flink/table/plan/stream/table/SetOperatorsTest.xml b/flink-table/flink-table-planner-blink/src/test/resources/org/apache/flink/table/plan/stream/table/SetOperatorsTest.xml
new file mode 100644
index 0000000..a910d93
--- /dev/null
+++ b/flink-table/flink-table-planner-blink/src/test/resources/org/apache/flink/table/plan/stream/table/SetOperatorsTest.xml
@@ -0,0 +1,140 @@
+<?xml version="1.0" ?>
+<!--
+Licensed to the Apache Software Foundation (ASF) under one or more
+contributor license agreements.  See the NOTICE file distributed with
+this work for additional information regarding copyright ownership.
+The ASF licenses this file to you under the Apache License, Version 2.0
+(the "License"); you may not use this file except in compliance with
+the License.  You may obtain a copy of the License at
+
+http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+-->
+<Root>
+  <TestCase name="testInUncorrelated">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalFilter(condition=[IN($0, {
+LogicalProject(x=[$0])
+  LogicalTableScan(table=[[default_catalog, default_database, Table2, source: [TestTableSource(x, y)]]])
+})])
++- LogicalTableScan(table=[[default_catalog, default_database, Table1, source: [TestTableSource(a, b, c)]]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Join(joinType=[LeftSemiJoin], where=[=(a, x)], select=[a, b, c], leftInputSpec=[NoUniqueKey], rightInputSpec=[NoUniqueKey])
+:- Exchange(distribution=[hash[a]])
+:  +- TableSourceScan(table=[[default_catalog, default_database, Table1, source: [TestTableSource(a, b, c)]]], fields=[a, b, c])
++- Exchange(distribution=[hash[x]])
+   +- Calc(select=[x])
+      +- TableSourceScan(table=[[default_catalog, default_database, Table2, source: [TestTableSource(x, y)]]], fields=[x, y])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testFilterUnionTranspose">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(a=[$1], b=[$0], c=[$2])
++- LogicalAggregate(group=[{1}], EXPR$0=[SUM($0)], EXPR$1=[COUNT($2)])
+   +- LogicalFilter(condition=[>($0, 0)])
+      +- LogicalUnion(all=[true])
+         :- LogicalTableScan(table=[[default_catalog, default_database, left, source: [TestTableSource(a, b, c)]]])
+         +- LogicalTableScan(table=[[default_catalog, default_database, right, source: [TestTableSource(a, b, c)]]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[EXPR$0 AS a, b, EXPR$1 AS c])
++- GroupAggregate(groupBy=[b], select=[b, SUM(a) AS EXPR$0, COUNT(c) AS EXPR$1])
+   +- Exchange(distribution=[hash[b]])
+      +- Union(all=[true], union=[a, b, c])
+         :- Calc(select=[a, b, c], where=[>(a, 0)])
+         :  +- TableSourceScan(table=[[default_catalog, default_database, left, source: [TestTableSource(a, b, c)]]], fields=[a, b, c])
+         +- Calc(select=[a, b, c], where=[>(a, 0)])
+            +- TableSourceScan(table=[[default_catalog, default_database, right, source: [TestTableSource(a, b, c)]]], fields=[a, b, c])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testInUncorrelatedWithConditionAndAgg">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalFilter(condition=[IN($0, {
+LogicalProject(EXPR$0=[$1])
+  LogicalAggregate(group=[{1}], EXPR$0=[SUM($0)])
+    LogicalFilter(condition=[LIKE($1, _UTF-16LE'%Hanoi%')])
+      LogicalTableScan(table=[[default_catalog, default_database, tableB, source: [TestTableSource(x, y)]]])
+})])
++- LogicalTableScan(table=[[default_catalog, default_database, tableA, source: [TestTableSource(a, b, c)]]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Join(joinType=[LeftSemiJoin], where=[=(a, EXPR$0)], select=[a, b, c], leftInputSpec=[NoUniqueKey], rightInputSpec=[NoUniqueKey])
+:- Exchange(distribution=[hash[a]])
+:  +- TableSourceScan(table=[[default_catalog, default_database, tableA, source: [TestTableSource(a, b, c)]]], fields=[a, b, c])
++- Exchange(distribution=[hash[EXPR$0]])
+   +- Calc(select=[EXPR$0])
+      +- GroupAggregate(groupBy=[y], select=[y, SUM(x) AS EXPR$0])
+         +- Exchange(distribution=[hash[y]])
+            +- Calc(select=[x, y], where=[LIKE(y, _UTF-16LE'%Hanoi%')])
+               +- TableSourceScan(table=[[default_catalog, default_database, tableB, source: [TestTableSource(x, y)]]], fields=[x, y])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testProjectUnionTranspose">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(b=[$1], c=[$2])
++- LogicalUnion(all=[true])
+   :- LogicalProject(a=[$0], b=[$1], c=[$2])
+   :  +- LogicalTableScan(table=[[default_catalog, default_database, left, source: [TestTableSource(a, b, c)]]])
+   +- LogicalProject(a=[$0], b=[$1], c=[$2])
+      +- LogicalTableScan(table=[[default_catalog, default_database, right, source: [TestTableSource(a, b, c)]]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Union(all=[true], union=[b, c])
+:- Calc(select=[b, c])
+:  +- TableSourceScan(table=[[default_catalog, default_database, left, source: [TestTableSource(a, b, c)]]], fields=[a, b, c])
++- Calc(select=[b, c])
+   +- TableSourceScan(table=[[default_catalog, default_database, right, source: [TestTableSource(a, b, c)]]], fields=[a, b, c])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testInWithMultiUncorrelatedCondition">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalFilter(condition=[AND(IN($0, {
+LogicalProject(x=[$0])
+  LogicalTableScan(table=[[default_catalog, default_database, tableB, source: [TestTableSource(x, y)]]])
+}), IN($1, {
+LogicalProject(w=[$0])
+  LogicalTableScan(table=[[default_catalog, default_database, tableC, source: [TestTableSource(w, z)]]])
+}))])
++- LogicalTableScan(table=[[default_catalog, default_database, tableA, source: [TestTableSource(a, b, c)]]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Join(joinType=[LeftSemiJoin], where=[=(b, w)], select=[a, b, c], leftInputSpec=[NoUniqueKey], rightInputSpec=[NoUniqueKey])
+:- Exchange(distribution=[hash[b]])
+:  +- Join(joinType=[LeftSemiJoin], where=[=(a, x)], select=[a, b, c], leftInputSpec=[NoUniqueKey], rightInputSpec=[NoUniqueKey])
+:     :- Exchange(distribution=[hash[a]])
+:     :  +- TableSourceScan(table=[[default_catalog, default_database, tableA, source: [TestTableSource(a, b, c)]]], fields=[a, b, c])
+:     +- Exchange(distribution=[hash[x]])
+:        +- Calc(select=[x])
+:           +- TableSourceScan(table=[[default_catalog, default_database, tableB, source: [TestTableSource(x, y)]]], fields=[x, y])
++- Exchange(distribution=[hash[w]])
+   +- Calc(select=[w])
+      +- TableSourceScan(table=[[default_catalog, default_database, tableC, source: [TestTableSource(w, z)]]], fields=[w, z])
+]]>
+    </Resource>
+  </TestCase>
+</Root>
diff --git a/flink-table/flink-table-planner-blink/src/test/resources/org/apache/flink/table/plan/stream/table/TableSourceTest.xml b/flink-table/flink-table-planner-blink/src/test/resources/org/apache/flink/table/plan/stream/table/TableSourceTest.xml
new file mode 100644
index 0000000..fcdf107
--- /dev/null
+++ b/flink-table/flink-table-planner-blink/src/test/resources/org/apache/flink/table/plan/stream/table/TableSourceTest.xml
@@ -0,0 +1,153 @@
+<?xml version="1.0" ?>
+<!--
+Licensed to the Apache Software Foundation (ASF) under one or more
+contributor license agreements.  See the NOTICE file distributed with
+this work for additional information regarding copyright ownership.
+The ASF licenses this file to you under the Apache License, Version 2.0
+(the "License"); you may not use this file except in compliance with
+the License.  You may obtain a copy of the License at
+
+http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+-->
+<Root>
+  <TestCase name="testNestedProject">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(id=[$0], nestedName=[$1.nested1.name], nestedValue=[$2.value], nestedFlag=[$1.nested2.flag], nestedNum=[$1.nested2.num])
++- LogicalTableScan(table=[[default_catalog, default_database, T, source: [TestSource(read nested fields: id.*, deepNested.*, nested.*, name.*)]]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[id, deepNested.nested1.name AS nestedName, nested.value AS nestedValue, deepNested.nested2.flag AS nestedFlag, deepNested.nested2.num AS nestedNum])
++- TableSourceScan(table=[[default_catalog, default_database, T, source: [TestSource(read nested fields: id.*, deepNested.nested2.num, deepNested.nested2.flag, deepNested.nested1.name, nested.value)]]], fields=[id, deepNested, nested])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testProcTimeTableSourceOverWindow">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalFilter(condition=[>($2, 100)])
++- LogicalProject(id=[$0], name=[$3], valSum=[AS(SUM($2) OVER (PARTITION BY $0 ORDER BY $1 NULLS FIRST RANGE BETWEEN 7200000 PRECEDING AND CURRENT ROW), _UTF-16LE'valSum')])
+   +- LogicalTableScan(table=[[default_catalog, default_database, procTimeT]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[id, name, w0$o0 AS valSum], where=[>(w0$o0, 100)])
++- OverAggregate(partitionBy=[id], orderBy=[proctime ASC], window=[ RANG BETWEEN 7200000 PRECEDING AND CURRENT ROW], select=[id, proctime, val, name, SUM(val) AS w0$o0])
+   +- Exchange(distribution=[hash[id]])
+      +- TableSourceScan(table=[[default_catalog, default_database, procTimeT]], fields=[id, proctime, val, name])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testProcTimeTableSourceSimple">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(proctime=[$1], id=[$0], name=[$3], val=[$2])
++- LogicalTableScan(table=[[default_catalog, default_database, procTimeT]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[PROCTIME_MATERIALIZE(proctime) AS proctime, id, name, val])
++- TableSourceScan(table=[[default_catalog, default_database, procTimeT]], fields=[id, proctime, val, name])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testProjectWithMapping">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(name=[$4], rtime=[$1], val=[$2])
++- LogicalTableScan(table=[[default_catalog, default_database, T, source: [TestSource(physical fields: p-rtime, p-id, p-name, p-val)]]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[name, rtime, val])
++- TableSourceScan(table=[[default_catalog, default_database, T, source: [TestSource(physical fields: p-rtime, p-id, p-name, p-val)]]], fields=[id, rtime, val, ptime, name])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testProjectWithoutRowtime">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(ptime=[$3], name=[$4], val=[$2], id=[$0])
++- LogicalTableScan(table=[[default_catalog, default_database, T, source: [TestSource(physical fields: id, name, val, rtime)]]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[PROCTIME_MATERIALIZE(ptime) AS ptime, name, val, id])
++- TableSourceScan(table=[[default_catalog, default_database, T, source: [TestSource(physical fields: id, name, val, rtime)]]], fields=[id, rtime, val, ptime, name])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testProjectWithRowtimeProctime">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(name=[$4], val=[$2], id=[$0])
++- LogicalTableScan(table=[[default_catalog, default_database, T, source: [TestSource(physical fields: id, name, val, rtime)]]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[name, val, id])
++- TableSourceScan(table=[[default_catalog, default_database, T, source: [TestSource(physical fields: id, name, val, rtime)]]], fields=[id, rtime, val, ptime, name])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testRowTimeTableSourceGroupWindow">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(name=[$0], EXPR$0=[$2], EXPR$1=[$1])
++- LogicalWindowAggregate(group=[{3}], EXPR$1=[AVG($2)], window=[TumblingGroupWindow], properties=[EXPR$0])
+   +- LogicalFilter(condition=[>($2, 100)])
+      +- LogicalTableScan(table=[[default_catalog, default_database, rowTimeT]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[name, EXPR$0, EXPR$1])
++- GroupWindowAggregate(groupBy=[name], window=[TumblingGroupWindow], properties=[EXPR$0], select=[name, AVG(val) AS EXPR$1, end('w) AS EXPR$0])
+   +- Exchange(distribution=[hash[name]])
+      +- Calc(select=[id, rowtime, val, name], where=[>(val, 100)])
+         +- TableSourceScan(table=[[default_catalog, default_database, rowTimeT]], fields=[id, rowtime, val, name])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testTableSourceWithTimestampRowTimeField">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(rowtime=[$1], id=[$0], name=[$3], val=[$2])
++- LogicalTableScan(table=[[default_catalog, default_database, rowTimeT]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[rowtime, id, name, val])
++- TableSourceScan(table=[[default_catalog, default_database, rowTimeT]], fields=[id, rowtime, val, name])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testTableSourceWithLongRowTimeField">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(rowtime=[$1], id=[$0], name=[$3], val=[$2])
++- LogicalTableScan(table=[[default_catalog, default_database, rowTimeT]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[rowtime, id, name, val])
++- TableSourceScan(table=[[default_catalog, default_database, rowTimeT]], fields=[id, rowtime, val, name])
+]]>
+    </Resource>
+  </TestCase>
+</Root>
diff --git a/flink-table/flink-table-planner-blink/src/test/resources/org/apache/flink/table/plan/stream/table/TwoStageAggregateTest.xml b/flink-table/flink-table-planner-blink/src/test/resources/org/apache/flink/table/plan/stream/table/TwoStageAggregateTest.xml
new file mode 100644
index 0000000..bcb1716
--- /dev/null
+++ b/flink-table/flink-table-planner-blink/src/test/resources/org/apache/flink/table/plan/stream/table/TwoStageAggregateTest.xml
@@ -0,0 +1,142 @@
+<?xml version="1.0" ?>
+<!--
+Licensed to the Apache Software Foundation (ASF) under one or more
+contributor license agreements.  See the NOTICE file distributed with
+this work for additional information regarding copyright ownership.
+The ASF licenses this file to you under the Apache License, Version 2.0
+(the "License"); you may not use this file except in compliance with
+the License.  You may obtain a copy of the License at
+
+http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+-->
+<Root>
+  <TestCase name="testGroupAggregate">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(EXPR$0=[$1])
++- LogicalAggregate(group=[{1}], EXPR$0=[COUNT($0)])
+   +- LogicalTableScan(table=[[default_catalog, default_database, Table1, source: [TestTableSource(a, b, c)]]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[EXPR$0])
++- GlobalGroupAggregate(groupBy=[b], select=[b, COUNT(count$0) AS EXPR$0])
+   +- Exchange(distribution=[hash[b]])
+      +- LocalGroupAggregate(groupBy=[b], select=[b, COUNT(a) AS count$0])
+         +- WatermarkAssigner(fields=[a, b, c], miniBatchInterval=[Proctime, 1000ms])
+            +- TableSourceScan(table=[[default_catalog, default_database, Table1, source: [TestTableSource(a, b, c)]]], fields=[a, b, c])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testGroupAggregateWithAverage">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(b=[$0], EXPR$0=[$1])
++- LogicalAggregate(group=[{1}], EXPR$0=[AVG($3)])
+   +- LogicalProject(a=[$0], b=[$1], c=[$2], a0=[CAST($0):DOUBLE])
+      +- LogicalTableScan(table=[[default_catalog, default_database, Table1, source: [TestTableSource(a, b, c)]]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+GlobalGroupAggregate(groupBy=[b], select=[b, AVG((sum$0, count$1)) AS EXPR$0])
++- Exchange(distribution=[hash[b]])
+   +- LocalGroupAggregate(groupBy=[b], select=[b, AVG(a0) AS (sum$0, count$1)])
+      +- Calc(select=[a, b, c, CAST(a) AS a0])
+         +- WatermarkAssigner(fields=[a, b, c], miniBatchInterval=[Proctime, 1000ms])
+            +- TableSourceScan(table=[[default_catalog, default_database, Table1, source: [TestTableSource(a, b, c)]]], fields=[a, b, c])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testGroupAggregateWithExpressionInSelect">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(EXPR$0=[$1], EXPR$1=[$2])
++- LogicalAggregate(group=[{1}], EXPR$0=[MIN($2)], EXPR$1=[AVG($0)])
+   +- LogicalProject(a=[$0], d=[MOD($1, 3)], c=[$2])
+      +- LogicalTableScan(table=[[default_catalog, default_database, Table1, source: [TestTableSource(a, b, c)]]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[EXPR$0, EXPR$1])
++- GlobalGroupAggregate(groupBy=[d], select=[d, MIN(min$0) AS EXPR$0, AVG((sum$1, count$2)) AS EXPR$1])
+   +- Exchange(distribution=[hash[d]])
+      +- LocalGroupAggregate(groupBy=[d], select=[d, MIN(c) AS min$0, AVG(a) AS (sum$1, count$2)])
+         +- Calc(select=[a, MOD(b, 3) AS d, c])
+            +- WatermarkAssigner(fields=[a, b, c], miniBatchInterval=[Proctime, 1000ms])
+               +- TableSourceScan(table=[[default_catalog, default_database, Table1, source: [TestTableSource(a, b, c)]]], fields=[a, b, c])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testGroupAggregateWithConstant1">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(four=[$1], EXPR$0=[$2])
++- LogicalAggregate(group=[{0, 1}], EXPR$0=[SUM($2)])
+   +- LogicalProject(a=[$0], four=[4], b=[$1])
+      +- LogicalTableScan(table=[[default_catalog, default_database, Table1, source: [TestTableSource(a, b, c)]]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[4 AS four, EXPR$0])
++- GlobalGroupAggregate(groupBy=[a, four], select=[a, four, SUM(sum$0) AS EXPR$0])
+   +- Exchange(distribution=[hash[a, four]])
+      +- LocalGroupAggregate(groupBy=[a, four], select=[a, four, SUM(b) AS sum$0])
+         +- Calc(select=[a, 4 AS four, b])
+            +- WatermarkAssigner(fields=[a, b, c], miniBatchInterval=[Proctime, 1000ms])
+               +- TableSourceScan(table=[[default_catalog, default_database, Table1, source: [TestTableSource(a, b, c)]]], fields=[a, b, c])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testGroupAggregateWithConstant2">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalProject(four=[$1], EXPR$0=[$2])
++- LogicalAggregate(group=[{0, 1}], EXPR$0=[SUM($2)])
+   +- LogicalProject(b=[$1], four=[4], a=[$0])
+      +- LogicalTableScan(table=[[default_catalog, default_database, Table1, source: [TestTableSource(a, b, c)]]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[4 AS four, EXPR$0])
++- GlobalGroupAggregate(groupBy=[b, four], select=[b, four, SUM(sum$0) AS EXPR$0])
+   +- Exchange(distribution=[hash[b, four]])
+      +- LocalGroupAggregate(groupBy=[b, four], select=[b, four, SUM(a) AS sum$0])
+         +- Calc(select=[b, 4 AS four, a])
+            +- WatermarkAssigner(fields=[a, b, c], miniBatchInterval=[Proctime, 1000ms])
+               +- TableSourceScan(table=[[default_catalog, default_database, Table1, source: [TestTableSource(a, b, c)]]], fields=[a, b, c])
+]]>
+    </Resource>
+  </TestCase>
+  <TestCase name="testGroupAggregateWithFilter">
+    <Resource name="planBefore">
+      <![CDATA[
+LogicalFilter(condition=[=($0, 2)])
++- LogicalProject(b=[$0], EXPR$0=[$1])
+   +- LogicalAggregate(group=[{1}], EXPR$0=[SUM($0)])
+      +- LogicalTableScan(table=[[default_catalog, default_database, Table1, source: [TestTableSource(a, b, c)]]])
+]]>
+    </Resource>
+    <Resource name="planAfter">
+      <![CDATA[
+Calc(select=[CAST(2) AS b, EXPR$0])
++- GlobalGroupAggregate(groupBy=[b], select=[b, SUM(sum$0) AS EXPR$0])
+   +- Exchange(distribution=[hash[b]])
+      +- LocalGroupAggregate(groupBy=[b], select=[b, SUM(a) AS sum$0])
+         +- Calc(select=[a, b, c], where=[=(b, 2)])
+            +- WatermarkAssigner(fields=[a, b, c], miniBatchInterval=[Proctime, 1000ms])
+               +- TableSourceScan(table=[[default_catalog, default_database, Table1, source: [TestTableSource(a, b, c)]]], fields=[a, b, c])
+]]>
+    </Resource>
+  </TestCase>
+</Root>
diff --git a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/plan/batch/table/AggregateTest.scala b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/plan/batch/table/AggregateTest.scala
new file mode 100644
index 0000000..a4c7e65
--- /dev/null
+++ b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/plan/batch/table/AggregateTest.scala
@@ -0,0 +1,75 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.table.plan.batch.table
+
+import org.apache.flink.api.scala._
+import org.apache.flink.table.api.scala._
+import org.apache.flink.table.util.TableTestBase
+
+import org.junit.Test
+
+/**
+  * Test for testing aggregate plans.
+  */
+class AggregateTest extends TableTestBase {
+
+  @Test
+  def testGroupAggregateWithFilter(): Unit = {
+
+    val util = batchTestUtil()
+    val sourceTable = util.addTableSource[(Int, Long, Int)]("MyTable", 'a, 'b, 'c)
+
+    val resultTable = sourceTable.groupBy('a)
+      .select('a, 'a.avg, 'b.sum, 'c.count)
+      .where('a === 1)
+
+    util.verifyPlan(resultTable)
+  }
+
+  @Test
+  def testAggregate(): Unit = {
+    val util = batchTestUtil()
+    val sourceTable = util.addTableSource[(Int, Long, Int)]("MyTable", 'a, 'b, 'c)
+    val resultTable = sourceTable.select('a.avg,'b.sum,'c.count)
+
+    util.verifyPlan(resultTable)
+  }
+
+  @Test
+  def testAggregateWithFilter(): Unit = {
+    val util = batchTestUtil()
+    val sourceTable = util.addTableSource[(Int, Long, Int)]("MyTable", 'a, 'b, 'c)
+
+    val resultTable = sourceTable.select('a,'b,'c).where('a === 1)
+      .select('a.avg,'b.sum,'c.count)
+
+    util.verifyPlan(resultTable)
+  }
+
+  @Test
+  def testAggregateWithFilterOnNestedFields(): Unit = {
+    val util = batchTestUtil()
+    val sourceTable = util.addTableSource[(Int, Long, (Int, Long))]("MyTable", 'a, 'b, 'c)
+
+    val resultTable = sourceTable.select('a,'b,'c).where('a === 1)
+      .select('a.avg,'b.sum,'c.count, 'c.get("_1").sum)
+
+    util.verifyPlan(resultTable)
+  }
+}
diff --git a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/plan/batch/table/CalcTest.scala b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/plan/batch/table/CalcTest.scala
new file mode 100644
index 0000000..354e0d1
--- /dev/null
+++ b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/plan/batch/table/CalcTest.scala
@@ -0,0 +1,201 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.table.plan.batch.table
+
+import org.apache.flink.api.common.typeinfo.TypeInformation
+import org.apache.flink.api.scala.createTypeInformation
+import org.apache.flink.table.api.scala._
+import org.apache.flink.table.functions.ScalarFunction
+import org.apache.flink.table.plan.batch.table.CalcTest.{MyHashCode, TestCaseClass, WC, giveMeCaseClass}
+import org.apache.flink.table.util.TableTestBase
+
+import org.junit.Test
+
+class CalcTest extends TableTestBase {
+
+  @Test
+  def testMultipleFlatteningsTable(): Unit = {
+    val util = batchTestUtil()
+    val table = util.addTableSource[((Int, Long), (String, Boolean), String)]("MyTable", 'a, 'b, 'c)
+
+    val result = table.select('a.flatten(), 'c, 'b.flatten())
+
+    util.verifyPlan(result)
+  }
+
+  @Test
+  def testNestedFlattening(): Unit = {
+    val util = batchTestUtil()
+    val table = util
+      .addTableSource[((((String, TestCaseClass), Boolean), String), String)]("MyTable", 'a, 'b)
+
+    val result = table.select('a.flatten(), 'b.flatten())
+
+    util.verifyPlan(result)
+  }
+
+  @Test
+  def testScalarFunctionAccess(): Unit = {
+    val util = batchTestUtil()
+    val table = util
+      .addTableSource[(String, Int)]("MyTable", 'a, 'b)
+
+    val result = table.select(
+      giveMeCaseClass().get("my"),
+      giveMeCaseClass().get("clazz"),
+      giveMeCaseClass().flatten())
+
+    util.verifyPlan(result)
+  }
+
+  // ----------------------------------------------------------------------------------------------
+  // Tests for all the situations when we can do fields projection. Like selecting few fields
+  // from a large field count source.
+  // ----------------------------------------------------------------------------------------------
+
+  @Test
+  def testSimpleSelect(): Unit = {
+    val util = batchTestUtil()
+    val sourceTable = util.addTableSource[(Int, Long, String, Double)]("MyTable", 'a, 'b, 'c, 'd)
+    val resultTable = sourceTable.select('a, 'b)
+
+    util.verifyPlan(resultTable)
+  }
+
+  @Test
+  def testSelectAllFields(): Unit = {
+    val util = batchTestUtil()
+    val sourceTable = util.addTableSource[(Int, Long, String, Double)]("MyTable", 'a, 'b, 'c, 'd)
+    val resultTable1 = sourceTable.select('*)
+    val resultTable2 = sourceTable.select('a, 'b, 'c, 'd)
+
+    verifyTableEquals(resultTable1, resultTable2)
+  }
+
+  @Test
+  def testSelectAggregation(): Unit = {
+    val util = batchTestUtil()
+    val sourceTable = util.addTableSource[(Int, Long, String, Double)]("MyTable", 'a, 'b, 'c, 'd)
+    val resultTable = sourceTable.select('a.sum, 'b.max)
+
+    util.verifyPlan(resultTable)
+  }
+
+  @Test
+  def testSelectFunction(): Unit = {
+    val util = batchTestUtil()
+    val sourceTable = util.addTableSource[(Int, Long, String, Double)]("MyTable", 'a, 'b, 'c, 'd)
+
+    util.tableEnv.registerFunction("hashCode", MyHashCode)
+
+    val resultTable = sourceTable.select("hashCode(c), b")
+
+    util.verifyPlan(resultTable)
+  }
+
+  @Test
+  def testSelectFromGroupedTable(): Unit = {
+    val util = batchTestUtil()
+    val sourceTable = util.addTableSource[(Int, Long, String, Double)]("MyTable", 'a, 'b, 'c, 'd)
+    val resultTable = sourceTable.groupBy('a, 'c).select('a)
+
+    util.verifyPlan(resultTable)
+  }
+
+  @Test
+  def testSelectAllFieldsFromGroupedTable(): Unit = {
+    val util = batchTestUtil()
+    val sourceTable = util.addTableSource[(Int, Long, String, Double)]("MyTable", 'a, 'b, 'c, 'd)
+    val resultTable = sourceTable.groupBy('a, 'c).select('a, 'c)
+
+    util.verifyPlan(resultTable)
+  }
+
+  @Test
+  def testSelectAggregationFromGroupedTable(): Unit = {
+    val util = batchTestUtil()
+    val sourceTable = util.addTableSource[(Int, Long, String, Double)]("MyTable", 'a, 'b, 'c, 'd)
+    val resultTable = sourceTable.groupBy('c).select('a.sum)
+
+    util.verifyPlan(resultTable)
+  }
+
+  @Test
+  def testSelectFromGroupedTableWithNonTrivialKey(): Unit = {
+    val util = batchTestUtil()
+    val sourceTable = util.addTableSource[(Int, Long, String, Double)]("MyTable", 'a, 'b, 'c, 'd)
+    val resultTable = sourceTable.groupBy('c.upperCase() as 'k).select('a.sum)
+
+    util.verifyPlan(resultTable)
+  }
+
+  @Test
+  def testSelectFromGroupedTableWithFunctionKey(): Unit = {
+    val util = batchTestUtil()
+    val sourceTable = util.addTableSource[(Int, Long, String, Double)]("MyTable", 'a, 'b, 'c, 'd)
+    val resultTable = sourceTable.groupBy(MyHashCode('c) as 'k).select('a.sum)
+
+    util.verifyPlan(resultTable)
+  }
+
+  @Test
+  def testSelectFromAggregatedPojoTable(): Unit = {
+    val util = batchTestUtil()
+    val sourceTable = util.addTableSource[WC]("MyTable", 'word, 'frequency)
+    val resultTable = sourceTable
+      .groupBy('word)
+      .select('word, 'frequency.sum as 'frequency)
+      .filter('frequency === 2)
+
+    util.verifyPlan(resultTable)
+  }
+
+  @Test
+  def testMultiFilter(): Unit = {
+    val util = batchTestUtil()
+    val sourceTable = util.addTableSource[(Int, Long, String, Double)]("MyTable", 'a, 'b, 'c, 'd)
+    val resultTable = sourceTable.select('a, 'b)
+      .filter('a > 0)
+      .filter('b < 2)
+      .filter(('a % 2) === 1)
+
+    util.verifyPlan(resultTable)
+  }
+}
+
+object CalcTest {
+
+  case class TestCaseClass(my: String, clazz: Int)
+
+  object giveMeCaseClass extends ScalarFunction {
+    def eval(): TestCaseClass = {
+      TestCaseClass("hello", 42)
+    }
+
+    override def getResultType(argTypes: Array[Class[_]]): TypeInformation[TestCaseClass] = {
+      createTypeInformation[TestCaseClass]
+    }
+  }
+
+  object MyHashCode extends ScalarFunction {
+    def eval(s: String): Int = s.hashCode()
+  }
+
+  case class WC(word: String, frequency: Long)
+}
diff --git a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/plan/batch/table/ColumnFunctionsTest.scala b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/plan/batch/table/ColumnFunctionsTest.scala
new file mode 100644
index 0000000..90cc7a5
--- /dev/null
+++ b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/plan/batch/table/ColumnFunctionsTest.scala
@@ -0,0 +1,51 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.table.plan.batch.table
+
+import org.apache.flink.api.scala._
+import org.apache.flink.table.api.scala._
+import org.apache.flink.table.functions.ScalarFunction
+import org.apache.flink.table.util.TableTestBase
+
+import org.junit.Test
+
+/**
+  * Tests for column functions.
+  */
+class ColumnFunctionsTest extends TableTestBase {
+
+  val util = batchTestUtil()
+
+  @Test
+  def testOrderBy(): Unit = {
+    val t = util.addTableSource[(Int, Long, String, Int, Long, String)](
+      'a, 'b, 'c, 'd, 'e, 'f)
+
+    val tab1 = t.orderBy(withColumns(1, 2 to 3))
+    val tab2 = t.orderBy("withColumns(1, 2 to 3)")
+    verifyTableEquals(tab1, tab2)
+    util.verifyPlan(tab1)
+  }
+}
+
+object TestFunc extends ScalarFunction {
+  def eval(a: Double, b: Long): Double = {
+    a
+  }
+}
diff --git a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/plan/batch/table/CorrelateTest.scala b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/plan/batch/table/CorrelateTest.scala
new file mode 100644
index 0000000..ca8d035
--- /dev/null
+++ b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/plan/batch/table/CorrelateTest.scala
@@ -0,0 +1,120 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.table.plan.batch.table
+
+import org.apache.flink.api.scala._
+import org.apache.flink.table.api.scala._
+import org.apache.flink.table.plan.optimize.program.FlinkBatchProgram
+import org.apache.flink.table.util.{TableFunc0, TableFunc1, TableTestBase}
+
+import org.apache.calcite.rel.rules.{CalcMergeRule, FilterCalcMergeRule, ProjectCalcMergeRule}
+import org.apache.calcite.tools.RuleSets
+import org.junit.Test
+
+class CorrelateTest extends TableTestBase {
+
+  @Test
+  def testCrossJoin(): Unit = {
+    val util = batchTestUtil()
+    val table = util.addTableSource[(Int, Long, String)]("MyTable", 'a, 'b, 'c)
+    val func = new TableFunc1
+    util.addFunction("func1", func)
+
+    val result1 = table.joinLateral(func('c) as 's).select('c, 's)
+
+    util.verifyPlan(result1)
+  }
+
+  @Test
+  def testCrossJoin2(): Unit = {
+    val util = batchTestUtil()
+    val table = util.addTableSource[(Int, Long, String)]("MyTable", 'a, 'b, 'c)
+    val func = new TableFunc1
+    util.addFunction("func1", func)
+
+    val result2 = table.joinLateral(func('c, "$") as 's).select('c, 's)
+    util.verifyPlan(result2)
+  }
+
+  @Test
+  def testLeftOuterJoinWithoutJoinPredicates(): Unit = {
+    val util = batchTestUtil()
+    val table = util.addTableSource[(Int, Long, String)]("MyTable", 'a, 'b, 'c)
+    val func = new TableFunc1
+    util.addFunction("func1", func)
+
+    val result = table.leftOuterJoinLateral(func('c) as 's).select('c, 's).where('s > "")
+    util.verifyPlan(result)
+  }
+
+  @Test
+  def testLeftOuterJoinWithLiteralTrue(): Unit = {
+    val util = batchTestUtil()
+    val table = util.addTableSource[(Int, Long, String)]("MyTable", 'a, 'b, 'c)
+    val func = new TableFunc1
+    util.addFunction("func1", func)
+
+    val result = table.leftOuterJoinLateral(func('c) as 's, true).select('c, 's)
+    util.verifyPlan(result)
+  }
+
+  @Test
+  def testCorrelateWithMultiFilter(): Unit = {
+    val util = batchTestUtil()
+    val sourceTable = util.addTableSource[(Int, Long, String)]("MyTable", 'a, 'b, 'c)
+    val func = new TableFunc0
+    util.addFunction("func1", func)
+
+    val result = sourceTable.select('a, 'b, 'c)
+      .joinLateral(func('c) as('d, 'e))
+      .select('c, 'd, 'e)
+      .where('e > 10)
+      .where('e > 20)
+      .select('c, 'd)
+
+    util.verifyPlan(result)
+  }
+
+  @Test
+  def testCorrelateWithMultiFilterAndWithoutCalcMergeRules(): Unit = {
+    val util = batchTestUtil()
+    val programs = util.getBatchProgram()
+    programs.getFlinkRuleSetProgram(FlinkBatchProgram.LOGICAL)
+      .get.remove(
+      RuleSets.ofList(
+        CalcMergeRule.INSTANCE,
+        FilterCalcMergeRule.INSTANCE,
+        ProjectCalcMergeRule.INSTANCE))
+    // removing
+    util.replaceBatchProgram(programs)
+
+    val sourceTable = util.addTableSource[(Int, Long, String)]("MyTable", 'a, 'b, 'c)
+    val func = new TableFunc0
+    util.addFunction("func1", func)
+
+    val result = sourceTable.select('a, 'b, 'c)
+      .joinLateral(func('c) as('d, 'e))
+      .select('c, 'd, 'e)
+      .where('e > 10)
+      .where('e > 20)
+      .select('c, 'd)
+
+    util.verifyPlan(result)
+  }
+}
diff --git a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/plan/batch/table/GroupWindowTest.scala b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/plan/batch/table/GroupWindowTest.scala
new file mode 100644
index 0000000..f81e0f0
--- /dev/null
+++ b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/plan/batch/table/GroupWindowTest.scala
@@ -0,0 +1,158 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.table.plan.batch.table
+
+import org.apache.flink.api.scala._
+import org.apache.flink.table.api.scala._
+import org.apache.flink.table.api.{Slide, TableException, Tumble}
+import org.apache.flink.table.plan.util.JavaUserDefinedAggFunctions.WeightedAvgWithMerge
+import org.apache.flink.table.util.TableTestBase
+
+import org.junit.Test
+
+import java.sql.Timestamp
+
+class GroupWindowTest extends TableTestBase {
+
+  //===============================================================================================
+  // Common test
+  //===============================================================================================
+
+  @Test(expected = classOf[TableException])
+  def testEventTimeTumblingGroupWindowOverCount(): Unit = {
+    val util = batchTestUtil()
+    val table = util.addTableSource[(Long, Int, String)]('long, 'int, 'string)
+
+    val windowedTable = table
+      .window(Tumble over 2.rows on 'long as 'w)
+      .groupBy('w, 'string)
+      .select('string, 'int.count)
+    util.verifyPlan(windowedTable)
+  }
+
+  @Test
+  def testEventTimeTumblingGroupWindowOverTimeWithUdAgg(): Unit = {
+    val util = batchTestUtil()
+    val table = util.addTableSource[(Long, Int, String)]('long, 'int, 'string)
+
+    val myWeightedAvg = new WeightedAvgWithMerge
+
+    val windowedTable = table
+      .window(Tumble over 5.millis on 'long as 'w)
+      .groupBy('w, 'string)
+      .select('string, myWeightedAvg('long, 'int))
+
+    util.verifyPlan(windowedTable)
+  }
+
+  @Test
+  def testEventTimeTumblingGroupWindowOverTime(): Unit = {
+    val util = batchTestUtil()
+    val table = util.addTableSource[(Long, Int, String)]('long, 'int, 'string)
+
+    val windowedTable = table
+      .window(Tumble over 5.millis on 'long as 'w)
+      .groupBy('w, 'string)
+      .select('string, 'int.count)
+
+    util.verifyPlan(windowedTable)
+  }
+
+  @Test
+  def testAllEventTimeTumblingGroupWindowOverTime(): Unit = {
+    val util = batchTestUtil()
+    val table = util.addTableSource[(Long, Int, String)]('long, 'int, 'string)
+
+    val windowedTable = table
+      .window(Tumble over 5.millis on 'long as 'w)
+      .groupBy('w)
+      .select('int.count)
+    util.verifyPlan(windowedTable)
+  }
+
+  @Test(expected = classOf[TableException])
+  def testAllEventTimeTumblingGroupWindowOverCount(): Unit = {
+    val util = batchTestUtil()
+    val table = util.addTableSource[(Long, Int, String)]('long, 'int, 'string)
+
+    val windowedTable = table
+      .window(Tumble over 2.rows on 'long as 'w)
+      .groupBy('w)
+      .select('int.count)
+
+    util.verifyPlan(windowedTable)
+  }
+
+  @Test
+  def testLongEventTimeTumblingGroupWindowWithProperties(): Unit = {
+    val util = batchTestUtil()
+    val table = util.addTableSource[(Long, Int, String)]('ts, 'int, 'string)
+
+    val windowedTable = table
+      .window(Tumble over 2.hours on 'ts as 'w)
+      .groupBy('w, 'string)
+      .select('string, 'int.count, 'w.start, 'w.end, 'w.rowtime)
+
+    util.verifyPlan(windowedTable)
+  }
+
+  @Test
+  def testTimestampEventTimeTumblingGroupWindowWithProperties(): Unit = {
+    val util = batchTestUtil()
+    val table = util.addTableSource[(Timestamp, Int, String)]('ts, 'int, 'string)
+
+    val windowedTable = table
+      .window(Tumble over 2.hours on 'ts as 'w)
+      .groupBy('w, 'string)
+      .select('string, 'int.count, 'w.start, 'w.end, 'w.rowtime)
+
+    util.verifyPlan(windowedTable)
+  }
+
+  //===============================================================================================
+  // Sliding Windows
+  //===============================================================================================
+
+  @Test
+  def testEventTimeSlidingGroupWindowOverTime(): Unit = {
+    val util = batchTestUtil()
+    val table = util.addTableSource[(Long, Int, String)]('long, 'int, 'string)
+
+    val windowedTable = table
+      .window(Slide over 8.millis every 10.millis on 'long as 'w)
+      .groupBy('w, 'string)
+      .select('string, 'int.count)
+
+    util.verifyPlan(windowedTable)
+  }
+
+  @Test(expected = classOf[TableException])
+  def testEventTimeSlidingGroupWindowOverCount(): Unit = {
+    val util = batchTestUtil()
+    val table = util.addTableSource[(Long, Int, String)]('long, 'int, 'string)
+
+    val windowedTable = table
+      .window(Slide over 2.rows every 1.rows on 'long as 'w)
+      .groupBy('w, 'string)
+      .select('string, 'int.count)
+
+    util.verifyPlan(windowedTable)
+  }
+
+}
diff --git a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/plan/batch/table/JoinTest.scala b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/plan/batch/table/JoinTest.scala
new file mode 100644
index 0000000..ca74b0e7
--- /dev/null
+++ b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/plan/batch/table/JoinTest.scala
@@ -0,0 +1,211 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.table.plan.batch.table
+
+import org.apache.flink.api.scala._
+import org.apache.flink.table.api.scala._
+import org.apache.flink.table.functions.ScalarFunction
+import org.apache.flink.table.plan.batch.table.JoinTest.Merger
+import org.apache.flink.table.util.TableTestBase
+
+import org.junit.{Ignore, Test}
+
+class JoinTest extends TableTestBase {
+
+  @Test
+  def testLeftOuterJoinEquiPred(): Unit = {
+    val util = batchTestUtil()
+    val t = util.addTableSource[(Int, Long, String)]("T", 'a, 'b, 'c)
+    val s = util.addTableSource[(Long, String, Int)]("S", 'x, 'y, 'z)
+
+    val joined = t.leftOuterJoin(s, 'a === 'z).select('b, 'y)
+
+    util.verifyPlan(joined)
+  }
+
+  @Test
+  def testLeftOuterJoinEquiAndLocalPred(): Unit = {
+    val util = batchTestUtil()
+    val t = util.addTableSource[(Int, Long, String)]("T", 'a, 'b, 'c)
+    val s = util.addTableSource[(Long, String, Int)]("S", 'x, 'y, 'z)
+
+    val joined = t.leftOuterJoin(s, 'a === 'z && 'b < 2).select('b, 'y)
+
+    util.verifyPlan(joined)
+  }
+
+  @Test
+  def testLeftOuterJoinEquiAndNonEquiPred(): Unit = {
+    val util = batchTestUtil()
+    val t = util.addTableSource[(Int, Long, String)]("T", 'a, 'b, 'c)
+    val s = util.addTableSource[(Long, String, Int)]("S", 'x, 'y, 'z)
+
+    val joined = t.leftOuterJoin(s, 'a === 'z && 'b < 'x).select('b, 'y)
+
+    util.verifyPlan(joined)
+  }
+
+  @Test
+  def testRightOuterJoinEquiPred(): Unit = {
+    val util = batchTestUtil()
+    val t = util.addTableSource[(Int, Long, String)]("T", 'a, 'b, 'c)
+    val s = util.addTableSource[(Long, String, Int)]("S", 'x, 'y, 'z)
+
+    val joined = t.rightOuterJoin(s, 'a === 'z).select('b, 'y)
+
+    util.verifyPlan(joined)
+  }
+
+  @Test
+  def testRightOuterJoinEquiAndLocalPred(): Unit = {
+    val util = batchTestUtil()
+    val t = util.addTableSource[(Int, Long, String)]("T", 'a, 'b, 'c)
+    val s = util.addTableSource[(Long, String, Int)]("S", 'x, 'y, 'z)
+
+    val joined = t.rightOuterJoin(s, 'a === 'z && 'x < 2).select('b, 'x)
+
+    util.verifyPlan(joined)
+  }
+
+  @Test
+  def testRightOuterJoinEquiAndNonEquiPred(): Unit = {
+    val util = batchTestUtil()
+    val t = util.addTableSource[(Int, Long, String)]("T", 'a, 'b, 'c)
+    val s = util.addTableSource[(Long, String, Int)]("S", 'x, 'y, 'z)
+
+    val joined = t.rightOuterJoin(s, 'a === 'z && 'b < 'x).select('b, 'y)
+
+    util.verifyPlan(joined)
+  }
+
+  @Test
+  def testFullOuterJoinEquiPred(): Unit = {
+    val util = batchTestUtil()
+    val t = util.addTableSource[(Int, Long, String)]("T", 'a, 'b, 'c)
+    val s = util.addTableSource[(Long, String, Int)]("S", 'x, 'y, 'z)
+
+    val joined = t.fullOuterJoin(s, 'a === 'z).select('b, 'y)
+
+    util.verifyPlan(joined)
+  }
+
+  @Test
+  def testFullOuterJoinEquiAndLocalPred(): Unit = {
+    val util = batchTestUtil()
+    val t = util.addTableSource[(Int, Long, String)]("T", 'a, 'b, 'c)
+    val s = util.addTableSource[(Long, String, Int)]("S", 'x, 'y, 'z)
+
+    val joined = t.fullOuterJoin(s, 'a === 'z && 'b < 2).select('b, 'y)
+
+    util.verifyPlan(joined)
+  }
+
+  @Test
+  def testFullOuterJoinEquiAndNonEquiPred(): Unit = {
+    val util = batchTestUtil()
+    val t = util.addTableSource[(Int, Long, String)]("T", 'a, 'b, 'c)
+    val s = util.addTableSource[(Long, String, Int)]("S", 'x, 'y, 'z)
+
+    val joined = t.fullOuterJoin(s, 'a === 'z && 'b < 'x).select('b, 'y)
+
+    util.verifyPlan(joined)
+  }
+
+  // TODO [FLINK-7942] [table] Reduce aliasing in RexNodes
+ // @Ignore
+  @Test
+  def testFilterJoinRule(): Unit = {
+    val util = batchTestUtil()
+    val t1 = util.addTableSource[(String, Int, Int)]('a, 'b, 'c)
+    val t2 = util.addTableSource[(String, Int, Int)]('d, 'e, 'f)
+    val results = t1
+      .leftOuterJoin(t2, 'b === 'e)
+      .select('c, Merger('c, 'f) as 'c0)
+      .select(Merger('c, 'c0) as 'c1)
+      .where('c1 >= 0)
+
+    util.verifyPlan(results)
+  }
+
+  // TODO
+  @Ignore("Non-equi-join could be supported later.")
+  @Test
+  def testFullJoinNoEquiJoinPredicate(): Unit = {
+    val util = batchTestUtil()
+    val ds1 = util.addTableSource[(Int, Long, String)]("Table3",'a, 'b, 'c)
+    val ds2 = util.addTableSource[(Int, Long, Int, String, Long)]("Table5", 'd, 'e, 'f, 'g, 'h)
+
+    util.verifyPlan(ds2.fullOuterJoin(ds1, 'b < 'd).select('c, 'g))
+  }
+
+  // TODO
+  @Ignore("Non-equi-join could be supported later.")
+  @Test
+  def testLeftJoinNoEquiJoinPredicate(): Unit = {
+    val util = batchTestUtil()
+    val ds1 = util.addTableSource[(Int, Long, String)]("Table3",'a, 'b, 'c)
+    val ds2 = util.addTableSource[(Int, Long, Int, String, Long)]("Table5", 'd, 'e, 'f, 'g, 'h)
+
+    util.verifyPlan(ds2.leftOuterJoin(ds1, 'b < 'd).select('c, 'g))
+  }
+
+  // TODO
+  @Ignore("Non-equi-join could be supported later.")
+  @Test
+  def testRightJoinNoEquiJoinPredicate(): Unit = {
+    val util = batchTestUtil()
+    val ds1 = util.addTableSource[(Int, Long, String)]("Table3",'a, 'b, 'c)
+    val ds2 = util.addTableSource[(Int, Long, Int, String, Long)]("Table5", 'd, 'e, 'f, 'g, 'h)
+
+    util.verifyPlan(ds2.rightOuterJoin(ds1, 'b < 'd).select('c, 'g))
+  }
+
+  @Test
+  def testNoEqualityJoinPredicate1(): Unit = {
+    val util = batchTestUtil()
+    val ds1 = util.addTableSource[(Int, Long, String)]("Table3",'a, 'b, 'c)
+    val ds2 = util.addTableSource[(Int, Long, Int, String, Long)]("Table5", 'd, 'e, 'f, 'g, 'h)
+
+    util.verifyPlan(ds1.join(ds2)
+      // must fail. No equality join predicate
+      .where('d === 'f)
+      .select('c, 'g))
+  }
+
+  @Test
+  def testNoEqualityJoinPredicate2(): Unit = {
+    val util = batchTestUtil()
+    val ds1 = util.addTableSource[(Int, Long, String)]("Table3",'a, 'b, 'c)
+    val ds2 = util.addTableSource[(Int, Long, Int, String, Long)]("Table5", 'd, 'e, 'f, 'g, 'h)
+
+    util.verifyPlan(ds1.join(ds2)
+      // must fail. No equality join predicate
+      .where('a < 'd)
+      .select('c, 'g))
+  }
+}
+
+object JoinTest {
+
+  object Merger extends ScalarFunction {
+    def eval(f0: Int, f1: Int): Int = {
+      f0 + f1
+    }
+  }
+}
diff --git a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/plan/batch/table/SetOperatorsTest.scala b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/plan/batch/table/SetOperatorsTest.scala
new file mode 100644
index 0000000..8f3893d
--- /dev/null
+++ b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/plan/batch/table/SetOperatorsTest.scala
@@ -0,0 +1,133 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.table.plan.batch.table
+
+import org.apache.flink.api.common.typeinfo.TypeInformation
+import org.apache.flink.api.java.typeutils.GenericTypeInfo
+import org.apache.flink.api.scala._
+import org.apache.flink.table.api.scala._
+import org.apache.flink.table.plan.util.NonPojo
+import org.apache.flink.table.util.TableTestBase
+
+import org.junit.Test
+
+import java.sql.Timestamp
+
+class SetOperatorsTest extends TableTestBase {
+
+  @Test
+  def testInWithFilter(): Unit = {
+    val util = batchTestUtil()
+    val t = util.addTableSource[((Int, Int), String, (Int, Int))]("A", 'a, 'b, 'c)
+
+    val elements = t.where('b === "two").select('a).as("a1")
+    val in = t.select("*").where('c.in(elements))
+
+    util.verifyPlan(in)
+  }
+
+  @Test
+  def testInWithProject(): Unit = {
+    val util = batchTestUtil()
+    val t = util.addTableSource[(Int, Timestamp, String)]("A", 'a, 'b, 'c)
+
+    val in = t.select('b.in(Timestamp.valueOf("1972-02-22 07:12:00.333"))).as("b2")
+
+    util.verifyPlan(in)
+  }
+
+  @Test
+  def testUnionNullableTypes(): Unit = {
+    val util = batchTestUtil()
+    val t = util.addTableSource[((Int, String), (Int, String), Int)]("A", 'a, 'b, 'c)
+
+    val in = t.select('a)
+      .unionAll(
+        t.select(('c > 0) ? ('b, nullOf(createTypeInformation[(Int, String)]))))
+    util.verifyPlan(in)
+  }
+
+  @Test
+  def testUnionAnyType(): Unit = {
+    val util = batchTestUtil()
+    val t = util.addTableSource("A",
+      Array[TypeInformation[_]](
+        new GenericTypeInfo(classOf[NonPojo]),
+        new GenericTypeInfo(classOf[NonPojo])),
+      Array("a", "b"))
+    val in = t.select('a).unionAll(t.select('b))
+    util.verifyPlan(in)
+  }
+
+  @Test
+  def testFilterUnionTranspose(): Unit = {
+    val util = batchTestUtil()
+    val left = util.addTableSource[(Int, Long, String)]("left", 'a, 'b, 'c)
+    val right = util.addTableSource[(Int, Long, String)]("right", 'a, 'b, 'c)
+
+    val result = left.unionAll(right)
+      .where('a > 0)
+      .groupBy('b)
+      .select('a.sum as 'a, 'b as 'b, 'c.count as 'c)
+
+    util.verifyPlan(result)
+  }
+
+  @Test
+  def testFilterMinusTranspose(): Unit = {
+    val util = batchTestUtil()
+    val left = util.addTableSource[(Int, Long, String)]("left", 'a, 'b, 'c)
+    val right = util.addTableSource[(Int, Long, String)]("right", 'a, 'b, 'c)
+
+    val result = left.minusAll(right)
+      .where('a > 0)
+      .groupBy('b)
+      .select('a.sum as 'a, 'b as 'b, 'c.count as 'c)
+
+    util.verifyPlan(result)
+  }
+
+  @Test
+  def testProjectUnionTranspose(): Unit = {
+    val util = batchTestUtil()
+    val left = util.addTableSource[(Int, Long, String)]("left", 'a, 'b, 'c)
+    val right = util.addTableSource[(Int, Long, String)]("right", 'a, 'b, 'c)
+
+    val result = left.select('a, 'b, 'c)
+                 .unionAll(right.select('a, 'b, 'c))
+                 .select('b, 'c)
+
+    util.verifyPlan(result)
+
+  }
+
+  @Test
+  def testProjectMinusTranspose(): Unit = {
+    val util = batchTestUtil()
+    val left = util.addTableSource[(Int, Long, String)]("left", 'a, 'b, 'c)
+    val right = util.addTableSource[(Int, Long, String)]("right", 'a, 'b, 'c)
+
+    val result = left.select('a, 'b, 'c)
+                 .minusAll(right.select('a, 'b, 'c))
+                 .select('b, 'c)
+
+    util.verifyPlan(result)
+
+  }
+}
diff --git a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/plan/batch/table/TemporalTableJoinTest.scala b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/plan/batch/table/TemporalTableJoinTest.scala
new file mode 100644
index 0000000..02b8078
--- /dev/null
+++ b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/plan/batch/table/TemporalTableJoinTest.scala
@@ -0,0 +1,72 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.flink.table.plan.batch.table
+
+import org.apache.flink.api.scala._
+import org.apache.flink.table.api.TableException
+import org.apache.flink.table.api.scala._
+import org.apache.flink.table.util.{TableTestBase, TableTestUtil}
+
+import org.hamcrest.Matchers.containsString
+import org.junit.Test
+
+import java.sql.Timestamp
+
+class TemporalTableJoinTest extends TableTestBase {
+
+  val util: TableTestUtil = batchTestUtil()
+
+  val orders = util.addDataStream[(Long, String, Timestamp)](
+    "Orders", 'o_amount, 'o_currency, 'rowtime)
+
+  val ratesHistory = util.addDataStream[(String, Int, Timestamp)](
+    "RatesHistory", 'currency, 'rate, 'rowtime)
+
+  val rates = ratesHistory.createTemporalTableFunction('rowtime, 'currency)
+  util.addFunction("Rates", rates)
+
+  @Test
+  def testSimpleJoin(): Unit = {
+    expectedException.expect(classOf[TableException])
+    expectedException.expectMessage("Cannot generate a valid execution plan for the given query")
+
+    val result = orders
+      .as('o_amount, 'o_currency, 'o_rowtime)
+      .joinLateral(rates('o_rowtime), 'currency === 'o_currency)
+      .select("o_amount * rate").as("rate")
+
+    util.verifyPlan(result)
+  }
+
+  @Test
+  def testUncorrelatedJoin(): Unit = {
+    expectedException.expect(classOf[TableException])
+    expectedException.expectMessage(
+      containsString("Cannot generate a valid execution plan"))
+
+    val result = orders
+      .as('o_amount, 'o_currency, 'o_rowtime)
+      .joinLateral(
+        rates(java.sql.Timestamp.valueOf("2016-06-27 10:10:42.123")),
+        'o_currency === 'currency)
+      .select("o_amount * rate")
+
+    util.verifyPlan(result)
+  }
+
+}
diff --git a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/plan/batch/table/stringexpr/AggregateStringExpressionTest.scala b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/plan/batch/table/stringexpr/AggregateStringExpressionTest.scala
new file mode 100644
index 0000000..4a47248
--- /dev/null
+++ b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/plan/batch/table/stringexpr/AggregateStringExpressionTest.scala
@@ -0,0 +1,341 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.table.plan.batch.table.stringexpr
+
+import org.apache.flink.api.scala._
+import org.apache.flink.table.api.scala._
+import org.apache.flink.table.plan.util.JavaUserDefinedAggFunctions.WeightedAvgWithMergeAndReset
+import org.apache.flink.table.util.{CountAggFunction, TableTestBase}
+
+import org.junit._
+
+class AggregateStringExpressionTest extends TableTestBase {
+
+  @Test
+  def testDistinctAggregationTypes(): Unit = {
+    val util = batchTestUtil()
+    val t = util.addTableSource[(Int, Long, String)]("Table3")
+
+    val t1 = t.select('_1.sum.distinct, '_1.count.distinct, '_1.avg.distinct)
+    val t2 = t.select("_1.sum.distinct, _1.count.distinct, _1.avg.distinct")
+    val t3 = t.select("sum.distinct(_1), count.distinct(_1), avg.distinct(_1)")
+
+    verifyTableEquals(t1, t2)
+    verifyTableEquals(t1, t3)
+  }
+
+  @Test
+  def testAggregationTypes(): Unit = {
+    val util = batchTestUtil()
+    val t = util.addTableSource[(Int, Long, String)]("Table3")
+
+    val t1 = t.select('_1.sum, '_1.sum0, '_1.min, '_1.max, '_1.count, '_1.avg)
+    val t2 = t.select("_1.sum, _1.sum0, _1.min, _1.max, _1.count, _1.avg")
+
+    verifyTableEquals(t1, t2)
+  }
+
+  @Test
+  def testWorkingAggregationDataTypes(): Unit = {
+    val util = batchTestUtil()
+    val t = util.addTableSource[(Byte, Short, Int, Long, Float, Double, String)]("Table7")
+
+    val t1 = t.select('_1.avg, '_2.avg, '_3.avg, '_4.avg, '_5.avg, '_6.avg, '_7.count, '_7.collect)
+    val t2 = t.select("_1.avg, _2.avg, _3.avg, _4.avg, _5.avg, _6.avg, _7.count, _7.collect")
+
+    verifyTableEquals(t1, t2)
+  }
+
+  @Test
+  def testProjection(): Unit = {
+    val util = batchTestUtil()
+    val t = util.addTableSource[(Byte, Short)]("Table2")
+
+    val t1 = t.select('_1.avg, '_1.sum, '_1.count, '_2.avg, '_2.sum)
+    val t2 = t.select("_1.avg, _1.sum, _1.count, _2.avg, _2.sum")
+
+    verifyTableEquals(t1, t2)
+  }
+
+  @Test
+  def testAggregationWithArithmetic(): Unit = {
+    val util = batchTestUtil()
+    val t = util.addTableSource[(Long, String)]("Table2")
+
+    val t1 = t.select(('_1 + 2).avg + 2, '_2.count + 5)
+    val t2 = t.select("(_1 + 2).avg + 2, _2.count + 5")
+
+    verifyTableEquals(t1, t2)
+  }
+
+  @Test
+  def testAggregationWithTwoCount(): Unit = {
+    val util = batchTestUtil()
+    val t = util.addTableSource[(Long, String)]("Table2")
+
+    val t1 = t.select('_1.count, '_2.count)
+    val t2 = t.select("_1.count, _2.count")
+
+    verifyTableEquals(t1, t2)
+  }
+
+  @Test
+  def testAggregationAfterProjection(): Unit = {
+    val util = batchTestUtil()
+    val t = util.addTableSource[(Byte, Short, Int, Long, Float, Double, String)]("Table7")
+
+    val t1 = t.select('_1, '_2, '_3)
+      .select('_1.avg, '_2.sum, '_3.count)
+
+    val t2 = t.select("_1, _2, _3")
+      .select("_1.avg, _2.sum, _3.count")
+
+    verifyTableEquals(t1, t2)
+  }
+
+  @Test
+  def testDistinct(): Unit = {
+    val util = batchTestUtil()
+    val ds = util.addTableSource[(Int, Long, String)]("Table3",'a, 'b, 'c)
+
+    val distinct = ds.select('b).distinct()
+    val distinct2 = ds.select("b").distinct()
+
+    verifyTableEquals(distinct, distinct2)
+  }
+
+  @Test
+  def testDistinctAfterAggregate(): Unit = {
+    val util = batchTestUtil()
+    val ds = util.addTableSource[(Int, Long, Int, String, Long)]("Table5", 'a, 'b, 'c, 'd, 'e)
+
+    val distinct = ds.groupBy('a, 'e).select('e).distinct()
+    val distinct2 = ds.groupBy("a, e").select("e").distinct()
+
+    verifyTableEquals(distinct, distinct2)
+  }
+
+  @Test
+  def testDistinctGroupedAggregate(): Unit = {
+    val util = batchTestUtil()
+    val t = util.addTableSource[(Int, Long, String)]("Table3", 'a, 'b, 'c)
+
+    val t1 = t.groupBy('b).select('b, 'a.sum.distinct, 'a.sum)
+    val t2 = t.groupBy("b").select("b, a.sum.distinct, a.sum")
+    val t3 = t.groupBy("b").select("b, sum.distinct(a), sum(a)")
+
+    verifyTableEquals(t1, t2)
+    verifyTableEquals(t1, t3)
+  }
+
+  @Test
+  def testGroupedAggregate(): Unit = {
+    val util = batchTestUtil()
+    val t = util.addTableSource[(Int, Long, String)]("Table3", 'a, 'b, 'c)
+
+    val t1 = t.groupBy('b).select('b, 'a.sum)
+    val t2 = t.groupBy("b").select("b, a.sum")
+
+    verifyTableEquals(t1, t2)
+  }
+
+  @Test
+  def testGroupingKeyForwardIfNotUsed(): Unit = {
+    val util = batchTestUtil()
+    val t = util.addTableSource[(Int, Long, String)]("Table3", 'a, 'b, 'c)
+
+    val t1 = t.groupBy('b).select('a.sum)
+    val t2 = t.groupBy("b").select("a.sum")
+
+    verifyTableEquals(t1, t2)
+  }
+
+  @Test
+  def testGroupNoAggregation(): Unit = {
+    val util = batchTestUtil()
+    val t = util.addTableSource[(Int, Long, String)]("Table3", 'a, 'b, 'c)
+
+    val t1 = t
+      .groupBy('b)
+      .select('a.sum as 'd, 'b)
+      .groupBy('b, 'd)
+      .select('b)
+
+    val t2 = t
+      .groupBy("b")
+      .select("a.sum as d, b")
+      .groupBy("b, d")
+      .select("b")
+
+    verifyTableEquals(t1, t2)
+  }
+
+  @Test
+  def testGroupedAggregateWithConstant1(): Unit = {
+    val util = batchTestUtil()
+    val t = util.addTableSource[(Int, Long, String)]("Table3", 'a, 'b, 'c)
+
+    val t1 = t.select('a, 4 as 'four, 'b)
+      .groupBy('four, 'a)
+      .select('four, 'b.sum)
+
+    val t2 = t.select("a, 4 as four, b")
+      .groupBy("four, a")
+      .select("four, b.sum")
+
+    verifyTableEquals(t1, t2)
+  }
+
+  @Test
+  def testGroupedAggregateWithConstant2(): Unit = {
+    val util = batchTestUtil()
+    val t = util.addTableSource[(Int, Long, String)]("Table3", 'a, 'b, 'c)
+
+    val t1 = t.select('b, 4 as 'four, 'a)
+      .groupBy('b, 'four)
+      .select('four, 'a.sum)
+    val t2 = t.select("b, 4 as four, a")
+      .groupBy("b, four")
+      .select("four, a.sum")
+
+    verifyTableEquals(t1, t2)
+  }
+
+  @Test
+  def testGroupedAggregateWithExpression(): Unit = {
+    val util = batchTestUtil()
+    val t = util.addTableSource[(Int, Long, Int, String, Long)]("Table5", 'a, 'b, 'c, 'd, 'e)
+
+    val t1 = t.groupBy('e, 'b % 3)
+      .select('c.min, 'e, 'a.avg, 'd.count)
+    val t2 = t.groupBy("e, b % 3")
+      .select("c.min, e, a.avg, d.count")
+
+    verifyTableEquals(t1, t2)
+  }
+
+  @Test
+  def testGroupedAggregateWithFilter(): Unit = {
+    val util = batchTestUtil()
+    val t = util.addTableSource[(Int, Long, String)]("Table3", 'a, 'b, 'c)
+
+    val t1 = t.groupBy('b)
+      .select('b, 'a.sum)
+      .where('b === 2)
+    val t2 = t.groupBy("b")
+      .select("b, a.sum")
+      .where("b = 2")
+
+    verifyTableEquals(t1, t2)
+  }
+
+  @Test
+  def testAnalyticAggregation(): Unit = {
+    val util = batchTestUtil()
+    val t = util.addTableSource[(Int, Long, Float, Double)]('_1, '_2, '_3, '_4)
+
+    val resScala = t.select(
+      '_1.stddevPop, '_2.stddevPop, '_3.stddevPop, '_4.stddevPop,
+      '_1.stddevSamp, '_2.stddevSamp, '_3.stddevSamp, '_4.stddevSamp,
+      '_1.varPop, '_2.varPop, '_3.varPop, '_4.varPop,
+      '_1.varSamp, '_2.varSamp, '_3.varSamp, '_4.varSamp)
+    val resJava = t.select("""
+      _1.stddevPop, _2.stddevPop, _3.stddevPop, _4.stddevPop,
+      _1.stddevSamp, _2.stddevSamp, _3.stddevSamp, _4.stddevSamp,
+      _1.varPop, _2.varPop, _3.varPop, _4.varPop,
+      _1.varSamp, _2.varSamp, _3.varSamp, _4.varSamp""")
+
+    verifyTableEquals(resScala, resJava)
+  }
+
+  @Test
+  def testDistinctAggregateWithUDAGG(): Unit = {
+    val util = batchTestUtil()
+    val t = util.addTableSource[(Int, Long, String)]("Table3", 'a, 'b, 'c)
+
+    val myCnt = new CountAggFunction
+    util.addFunction("myCnt", myCnt)
+    val myWeightedAvg = new WeightedAvgWithMergeAndReset
+    util.addFunction("myWeightedAvg", myWeightedAvg)
+
+    val t1 = t.select(myCnt.distinct('a) as 'aCnt, myWeightedAvg.distinct('b, 'a) as 'wAvg)
+    val t2 = t.select("myCnt.distinct(a) as aCnt, myWeightedAvg.distinct(b, a) as wAvg")
+
+    verifyTableEquals(t1, t2)
+  }
+
+  @Test
+  def testAggregateWithUDAGG(): Unit = {
+    val util = batchTestUtil()
+    val t = util.addTableSource[(Int, Long, String)]("Table3", 'a, 'b, 'c)
+
+    val myCnt = new CountAggFunction
+   util.addFunction("myCnt", myCnt)
+    val myWeightedAvg = new WeightedAvgWithMergeAndReset
+   util.addFunction("myWeightedAvg", myWeightedAvg)
+
+    val t1 = t.select(myCnt('a) as 'aCnt, myWeightedAvg('b, 'a) as 'wAvg)
+    val t2 = t.select("myCnt(a) as aCnt, myWeightedAvg(b, a) as wAvg")
+
+    verifyTableEquals(t1, t2)
+  }
+
+  @Test
+  def testDistinctGroupedAggregateWithUDAGG(): Unit = {
+    val util = batchTestUtil()
+    val t = util.addTableSource[(Int, Long, String)]("Table3", 'a, 'b, 'c)
+
+
+    val myCnt = new CountAggFunction
+   util.addFunction("myCnt", myCnt)
+    val myWeightedAvg = new WeightedAvgWithMergeAndReset
+   util.addFunction("myWeightedAvg", myWeightedAvg)
+
+    val t1 = t.groupBy('b)
+      .select('b,
+        myCnt.distinct('a) + 9 as 'aCnt,
+        myWeightedAvg.distinct('b, 'a) * 2 as 'wAvg,
+        myWeightedAvg.distinct('a, 'a) as 'distAgg,
+        myWeightedAvg('a, 'a) as 'agg)
+    val t2 = t.groupBy("b")
+      .select("b, myCnt.distinct(a) + 9 as aCnt, myWeightedAvg.distinct(b, a) * 2 as wAvg, " +
+        "myWeightedAvg.distinct(a, a) as distAgg, myWeightedAvg(a, a) as agg")
+
+    verifyTableEquals(t1, t2)
+  }
+
+  @Test
+  def testGroupedAggregateWithUDAGG(): Unit = {
+    val util = batchTestUtil()
+    val t = util.addTableSource[(Int, Long, String)]("Table3", 'a, 'b, 'c)
+
+
+    val myCnt = new CountAggFunction
+   util.addFunction("myCnt", myCnt)
+    val myWeightedAvg = new WeightedAvgWithMergeAndReset
+   util.addFunction("myWeightedAvg", myWeightedAvg)
+
+    val t1 = t.groupBy('b)
+      .select('b, myCnt('a) + 9 as 'aCnt, myWeightedAvg('b, 'a) * 2 as 'wAvg, myWeightedAvg('a, 'a))
+    val t2 = t.groupBy("b")
+      .select("b, myCnt(a) + 9 as aCnt, myWeightedAvg(b, a) * 2 as wAvg, myWeightedAvg(a, a)")
+
+    verifyTableEquals(t1, t2)
+  }
+}
diff --git a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/plan/batch/table/stringexpr/CalcStringExpressionTest.scala b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/plan/batch/table/stringexpr/CalcStringExpressionTest.scala
new file mode 100644
index 0000000..1284c4b
--- /dev/null
+++ b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/plan/batch/table/stringexpr/CalcStringExpressionTest.scala
@@ -0,0 +1,366 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.table.plan.batch.table.stringexpr
+
+import org.apache.flink.api.scala._
+import org.apache.flink.table.api.Types
+import org.apache.flink.table.api.Types._
+import org.apache.flink.table.api.scala._
+import org.apache.flink.table.runtime.utils.CollectionBatchExecTable.CustomType
+import org.apache.flink.table.util.TableTestBase
+
+import org.junit._
+
+import java.sql.{Date, Time, Timestamp}
+
+class CalcStringExpressionTest extends TableTestBase {
+
+  @Test
+  def testSimpleSelectAllWithAs(): Unit = {
+    val util = batchTestUtil()
+    val t = util.addTableSource[(Int, Long, String)]("Table3",'a, 'b, 'c)
+
+    val t1 = t.select('a, 'b, 'c)
+    val t2 = t.select("a, b, c")
+
+    verifyTableEquals(t1, t2)
+  }
+
+  @Test
+  def testSimpleSelectWithNaming(): Unit = {
+    val util = batchTestUtil()
+    val t = util.addTableSource[(Int, Long, String)]("Table3")
+
+    val t1 = t
+      .select('_1 as 'a, '_2 as 'b, '_1 as 'c)
+      .select('a, 'b)
+
+    val t2 = t
+      .select("_1 as a, _2 as b, _1 as c")
+      .select("a, b")
+
+    verifyTableEquals(t1, t2)
+  }
+
+  @Test
+  def testSimpleSelectRenameAll(): Unit = {
+    val util = batchTestUtil()
+    val t = util.addTableSource[(Int, Long, String)]("Table3")
+
+    val t1 = t
+      .select('_1 as 'a, '_2 as 'b, '_3 as 'c)
+      .select('a, 'b)
+
+    val t2 = t
+      .select("_1 as a, _2 as b, _3 as c")
+      .select("a, b")
+
+    verifyTableEquals(t1, t2)
+  }
+
+  @Test
+  def testSelectStar(): Unit = {
+    val util = batchTestUtil()
+    val t = util.addTableSource[(Int, Long, String)]("Table3",'a, 'b, 'c)
+
+    val t1 = t.select('*)
+    val t2 = t.select("*")
+
+    verifyTableEquals(t1, t2)
+  }
+
+  @Test
+  def testAllRejectingFilter(): Unit = {
+    val util = batchTestUtil()
+    val ds = util.addTableSource[(Int, Long, String)]("Table3",'a, 'b, 'c)
+
+    val t1 = ds.filter(false)
+    val t2 = ds.filter("faLsE")
+
+    verifyTableEquals(t1, t2)
+  }
+
+  @Test
+  def testAllPassingFilter(): Unit = {
+    val util = batchTestUtil()
+    val ds = util.addTableSource[(Int, Long, String)]("Table3",'a, 'b, 'c)
+
+    val t1 = ds.filter(true)
+    val t2 = ds.filter("trUe")
+
+    verifyTableEquals(t1, t2)
+  }
+
+  @Test
+  def testFilterOnStringTupleField(): Unit = {
+    val util = batchTestUtil()
+    val ds = util.addTableSource[(Int, Long, String)]("Table3",'a, 'b, 'c)
+
+    val t1 = ds.filter( 'c.like("%world%") )
+    val t2 = ds.filter("c.like('%world%')")
+
+    verifyTableEquals(t1, t2)
+  }
+
+  @Test
+  def testFilterOnIntegerTupleField(): Unit = {
+    val util = batchTestUtil()
+    val ds = util.addTableSource[(Int, Long, String)]("Table3",'a, 'b, 'c)
+
+    val t1 = ds.filter( 'a % 2 === 0 )
+    val t2 = ds.filter( "a % 2 = 0 ")
+
+    verifyTableEquals(t1, t2)
+  }
+
+  @Test
+  def testNotEquals(): Unit = {
+    val util = batchTestUtil()
+    val ds = util.addTableSource[(Int, Long, String)]("Table3",'a, 'b, 'c)
+
+    val t1 = ds.filter( 'a % 2 !== 0 )
+    val t2 = ds.filter("a % 2 <> 0")
+
+    verifyTableEquals(t1, t2)
+  }
+
+  @Test
+  def testDisjunctivePredicate(): Unit = {
+    val util = batchTestUtil()
+    val ds = util.addTableSource[(Int, Long, String)]("Table3",'a, 'b, 'c)
+
+    val t1 = ds.filter( 'a < 2 || 'a > 20)
+    val t2 = ds.filter("a < 2 || a > 20")
+
+    verifyTableEquals(t1, t2)
+  }
+
+  @Test
+  def testConsecutiveFilters(): Unit = {
+    val util = batchTestUtil()
+    val ds = util.addTableSource[(Int, Long, String)]("Table3",'a, 'b, 'c)
+
+    val t1 = ds.filter('a % 2 !== 0).filter('b % 2 === 0)
+    val t2 = ds.filter("a % 2 != 0").filter("b % 2 = 0")
+
+    verifyTableEquals(t1, t2)
+  }
+
+  @Test
+  def testFilterBasicType(): Unit = {
+    val util = batchTestUtil()
+    val ds = util.addTableSource[String]("Table3",'a)
+
+    val t1 = ds.filter( 'a.like("H%") )
+    val t2 = ds.filter( "a.like('H%')" )
+
+    verifyTableEquals(t1, t2)
+  }
+
+  @Test
+  def testFilterOnCustomType(): Unit = {
+    val util = batchTestUtil()
+    val t = util.addTableSource[CustomType]("Table3",'myInt, 'myLong, 'myString)
+      .as('i, 'l, 's)
+
+    val t1 = t.filter( 's.like("%a%") )
+    val t2 = t.filter("s.like('%a%')")
+
+    verifyTableEquals(t1, t2)
+  }
+
+  @Test
+  def testSimpleCalc(): Unit = {
+    val util = batchTestUtil()
+    val t = util.addTableSource[(Int, Long, String)]("Table3")
+
+    val t1 = t.select('_1, '_2, '_3)
+      .where('_1 < 7)
+      .select('_1, '_3)
+
+    val t2 = t.select("_1, _2, _3")
+      .where("_1 < 7")
+      .select("_1, _3")
+
+    verifyTableEquals(t1, t2)
+  }
+
+  @Test
+  def testCalcWithTwoFilters(): Unit = {
+    val util = batchTestUtil()
+    val t = util.addTableSource[(Int, Long, String)]("Table3")
+
+    val t1 = t.select('_1, '_2, '_3)
+      .where('_1 < 7 && '_2 === 3)
+      .select('_1, '_3)
+      .where('_1 === 4)
+      .select('_1)
+
+    val t2 = t.select("_1, _2, _3")
+      .where("_1 < 7 && _2 = 3")
+      .select("_1, _3")
+      .where("_1 === 4")
+      .select("_1")
+
+    verifyTableEquals(t1, t2)
+  }
+
+  @Test
+  def testCalcWithAggregation(): Unit = {
+    val util = batchTestUtil()
+    val t = util.addTableSource[(Int, Long, String)]("Table3")
+
+    val t1 = t.select('_1, '_2, '_3)
+      .where('_1 < 15)
+      .groupBy('_2)
+      .select('_1.min, '_2.count as 'cnt)
+      .where('cnt > 3)
+
+
+    val t2 = t.select("_1, _2, _3")
+      .where("_1 < 15")
+      .groupBy("_2")
+      .select("_1.min, _2.count as cnt")
+      .where("cnt > 3")
+
+    verifyTableEquals(t1, t2)
+  }
+
+  @Test
+  def testCalcJoin(): Unit = {
+    val util = batchTestUtil()
+    val ds1 = util.addTableSource[(Int, Long, String)]("Table3",'a, 'b, 'c)
+    val ds2 = util.addTableSource[(Int, Long, Int, String, Long)]("Table5", 'd, 'e, 'f, 'g, 'h)
+
+    val t1 = ds1.select('a, 'b).join(ds2).where('b === 'e).select('a, 'b, 'd, 'e, 'f)
+      .where('b > 1).select('a, 'd).where('d === 2)
+    val t2 = ds1.select("a, b").join(ds2).where("b = e").select("a, b, d, e, f")
+      .where("b > 1").select("a, d").where("d = 2")
+
+    verifyTableEquals(t1, t2)
+  }
+
+  @Test
+  def testAdvancedDataTypes(): Unit = {
+    val util = batchTestUtil()
+    val t = util
+      .addTableSource[(BigDecimal, BigDecimal, Date, Time, Timestamp)]("Table5", 'a, 'b, 'c, 'd, 'e)
+
+    val t1 = t.select('a, 'b, 'c, 'd, 'e, BigDecimal("11.2"), BigDecimal("11.2").bigDecimal,
+        "1984-07-12".cast(Types.SQL_DATE), "14:34:24".cast(Types.SQL_TIME),
+        "1984-07-12 14:34:24".cast(Types.SQL_TIMESTAMP))
+    val t2 = t.select("a, b, c, d, e, 11.2p, 11.2p," +
+      "'1984-07-12'.toDate, '14:34:24'.toTime," +
+      "'1984-07-12 14:34:24'.toTimestamp")
+
+    verifyTableEquals(t1, t2)
+  }
+
+  @Test
+  def testIntegerBiggerThan128(): Unit = {
+    val util = batchTestUtil()
+    val t = util.addTableSource[(Int, Long, String)]("Table3",'a, 'b, 'c)
+
+    val t1 = t.filter('a === 300)
+    val t2 = t.filter("a = 300")
+
+    verifyTableEquals(t1, t2)
+  }
+
+  @Test
+  def testNumericAutoCastInArithmetic() {
+    val util = batchTestUtil()
+    val table = util.addTableSource[(Byte, Short, Int, Long, Float, Double, Long, Double)](
+      "Table",
+      '_1, '_2, '_3, '_4, '_5, '_6, '_7, '_8)
+
+    val t1 = table.select('_1 + 1, '_2 + 1, '_3 + 1L, '_4 + 1.0f,
+      '_5 + 1.0d, '_6 + 1, '_7 + 1.0d, '_8 + '_1)
+    val t2 = table.select("_1 + 1, _2 +" +
+      " 1, _3 + 1L, _4 + 1.0f, _5 + 1.0d, _6 + 1, _7 + 1.0d, _8 + _1")
+
+    verifyTableEquals(t1, t2)
+  }
+
+  @Test
+  @throws[Exception]
+  def testNumericAutoCastInComparison() {
+    val util = batchTestUtil()
+    val table = util.addTableSource[(Byte, Short, Int, Long, Float, Double)](
+      "Table",
+      'a, 'b, 'c, 'd, 'e, 'f)
+
+    val t1 = table.filter('a > 1 && 'b > 1 && 'c > 1L &&
+      'd > 1.0f && 'e > 1.0d && 'f > 1)
+    val t2 = table
+      .filter("a > 1 && b > 1 && c > 1L && d > 1.0f && e > 1.0d && f > 1")
+
+    verifyTableEquals(t1, t2)
+  }
+
+  @Test
+  @throws[Exception]
+  def testCasting() {
+    val util = batchTestUtil()
+    val table = util.addTableSource[(Int, Double, Long, Boolean)](
+      "Table",
+      '_1, '_2, '_3, '_4)
+
+    val t1 = table .select(
+      // * -> String
+      '_1.cast(STRING), '_2.cast(STRING), '_3.cast(STRING), '_4.cast(STRING),
+      // NUMERIC TYPE -> Boolean
+      '_1.cast(BOOLEAN), '_2.cast(BOOLEAN), '_3.cast(BOOLEAN),
+      // NUMERIC TYPE -> NUMERIC TYPE
+      '_1.cast(DOUBLE), '_2.cast(INT), '_3.cast(SHORT),
+      // Boolean -> NUMERIC TYPE
+      '_4.cast(DOUBLE), // identity casting
+      '_1.cast(INT), '_2.cast(DOUBLE), '_3.cast(LONG), '_4.cast(BOOLEAN))
+    val t2 = table.select(
+      // * -> String
+      "_1.cast(STRING), _2.cast(STRING), _3.cast(STRING), _4.cast(STRING)," +
+        // NUMERIC TYPE -> Boolean
+        "_1.cast(BOOLEAN), _2.cast(BOOLEAN), _3.cast(BOOLEAN)," +
+        // NUMERIC TYPE -> NUMERIC TYPE
+        "_1.cast(DOUBLE), _2.cast(INT), _3.cast(SHORT)," +
+        // Boolean -> NUMERIC TYPE
+        "_4.cast(DOUBLE)," +
+        // identity casting
+        "_1.cast(INT), _2.cast(DOUBLE), _3.cast(LONG), _4.cast(BOOLEAN)")
+
+    verifyTableEquals(t1, t2)
+  }
+
+  @Test
+  @throws[Exception]
+  def testCastFromString() {
+    val util = batchTestUtil()
+    val table = util.addTableSource[(String, String, String)](
+      "Table",
+      '_1, '_2, '_3)
+
+    val t1 = table .select('_1.cast(BYTE), '_1.cast(SHORT), '_1.cast(INT), '_1.cast(LONG),
+        '_3.cast(DOUBLE), '_3.cast(FLOAT), '_2.cast(BOOLEAN))
+    val t2 = table.select(
+      "_1.cast(BYTE), _1.cast(SHORT), _1.cast(INT), _1.cast(LONG), " +
+        "_3.cast(DOUBLE), _3.cast(FLOAT), _2.cast(BOOLEAN)")
+
+    verifyTableEquals(t1, t2)
+  }
+}
diff --git a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/plan/batch/table/stringexpr/CorrelateStringExpressionTest.scala b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/plan/batch/table/stringexpr/CorrelateStringExpressionTest.scala
new file mode 100644
index 0000000..ef3bf94
--- /dev/null
+++ b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/plan/batch/table/stringexpr/CorrelateStringExpressionTest.scala
@@ -0,0 +1,94 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.table.plan.batch.table.stringexpr
+
+import org.apache.flink.api.scala._
+import org.apache.flink.table.api.scala._
+import org.apache.flink.table.util.{HierarchyTableFunction, PojoTableFunc, TableFunc1, TableFunc2, TableTestBase}
+
+import org.junit.Test
+
+class CorrelateStringExpressionTest extends TableTestBase {
+
+  private val util = batchTestUtil()
+  private val tab = util.addTableSource[(Int, Long, String)]("Table1", 'a, 'b, 'c)
+  private val func1 = new TableFunc1
+ util.addFunction("func1", func1)
+  private val func2 = new TableFunc2
+ util.addFunction("func2", func2)
+
+  @Test
+  def testCorrelateJoins1(): Unit = {
+    // test cross join
+    util.verifyPlan(tab.joinLateral(func1('c) as 's).select('c, 's))
+  }
+
+  @Test
+  def testCorrelateJoins2(): Unit = {
+    // test left outer join
+    util.verifyPlan(tab.leftOuterJoinLateral(func1('c) as 's).select('c, 's))
+  }
+
+  @Test
+  def testCorrelateJoins3(): Unit = {
+    // test overloading
+    util.verifyPlan(tab.joinLateral(func1('c, "$") as 's).select('c, 's))
+  }
+
+  @Test
+  def testCorrelateJoins4(): Unit = {
+    // test custom result type
+    util.verifyPlan(tab.joinLateral(func2('c) as('name, 'len)).select('c, 'name, 'len))
+  }
+
+  @Test
+  def testCorrelateJoins5(): Unit = {
+    // test hierarchy generic type
+    val hierarchy = new HierarchyTableFunction
+   util.addFunction("hierarchy", hierarchy)
+    val scalaTable = tab.joinLateral(
+      hierarchy('c) as('name, 'adult, 'len)).select('c, 'name, 'len, 'adult)
+    util.verifyPlan(scalaTable)
+  }
+
+  @Test
+  def testCorrelateJoins6(): Unit = {
+    // test pojo type
+    val pojo = new PojoTableFunc
+   util.addFunction("pojo", pojo)
+    val scalaTable = tab.joinLateral(pojo('c)).select('c, 'name, 'age)
+    util.verifyPlan(scalaTable)
+  }
+
+  @Test
+  def testCorrelateJoins7(): Unit = {
+    // test with filter
+    val scalaTable = tab.joinLateral(
+      func2('c) as('name, 'len)).select('c, 'name, 'len).filter('len > 2)
+    util.verifyPlan(scalaTable)
+  }
+
+  @Test
+  def testCorrelateJoins8(): Unit = {
+    // test with scalar function
+    val scalaTable = tab.joinLateral(func1('c.substring(2)) as 's).select(
+      'a, 'c, 's)
+    util.verifyPlan(scalaTable)
+  }
+}
diff --git a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/plan/batch/table/stringexpr/JoinStringExpressionTest.scala b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/plan/batch/table/stringexpr/JoinStringExpressionTest.scala
new file mode 100644
index 0000000..4138594
--- /dev/null
+++ b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/plan/batch/table/stringexpr/JoinStringExpressionTest.scala
@@ -0,0 +1,187 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.table.plan.batch.table.stringexpr
+
+import org.apache.flink.api.scala._
+import org.apache.flink.table.api.scala._
+import org.apache.flink.table.util.TableTestBase
+
+import org.junit._
+
+class JoinStringExpressionTest extends TableTestBase {
+
+  @Test
+  def testJoin(): Unit = {
+    val util = batchTestUtil()
+    val ds1 = util.addTableSource[(Int, Long, String)]("Table3",'a, 'b, 'c)
+    val ds2 = util.addTableSource[(Int, Long, Int, String, Long)]("Table5", 'd, 'e, 'f, 'g, 'h)
+
+    val t1Scala = ds1.join(ds2).where('b === 'e).select('c, 'g)
+    val t1Java = ds1.join(ds2).where("b === e").select("c, g")
+
+    verifyTableEquals(t1Scala, t1Java)
+  }
+
+  @Test
+  def testJoinWithFilter(): Unit = {
+    val util = batchTestUtil()
+    val ds1 = util.addTableSource[(Int, Long, String)]("Table3",'a, 'b, 'c)
+    val ds2 = util.addTableSource[(Int, Long, Int, String, Long)]("Table5", 'd, 'e, 'f, 'g, 'h)
+
+    val t1Scala = ds1.join(ds2).where('b === 'e && 'b < 2).select('c, 'g)
+    val t1Java = ds1.join(ds2).where("b === e && b < 2").select("c, g")
+
+    verifyTableEquals(t1Scala, t1Java)
+  }
+
+  @Test
+  def testJoinWithJoinFilter(): Unit = {
+    val util = batchTestUtil()
+    val ds1 = util.addTableSource[(Int, Long, String)]("Table3",'a, 'b, 'c)
+    val ds2 = util.addTableSource[(Int, Long, Int, String, Long)]("Table5", 'd, 'e, 'f, 'g, 'h)
+
+    val t1Scala = ds1.join(ds2).where('b === 'e && 'a < 6 && 'h < 'b).select('c, 'g)
+    val t1Java = ds1.join(ds2).where("b === e && a < 6 && h < b").select("c, g")
+
+    verifyTableEquals(t1Scala, t1Java)
+  }
+
+  @Test
+  def testJoinWithMultipleKeys(): Unit = {
+    val util = batchTestUtil()
+    val ds1 = util.addTableSource[(Int, Long, String)]("Table3",'a, 'b, 'c)
+    val ds2 = util.addTableSource[(Int, Long, Int, String, Long)]("Table5", 'd, 'e, 'f, 'g, 'h)
+
+    val t1Scala = ds1.join(ds2).filter('a === 'd && 'b === 'h).select('c, 'g)
+    val t1Java = ds1.join(ds2).filter("a === d && b === h").select("c, g")
+
+    verifyTableEquals(t1Scala, t1Java)
+  }
+
+  @Test
+  def testJoinWithAggregation(): Unit = {
+    val util = batchTestUtil()
+    val ds1 = util.addTableSource[(Int, Long, String)]("Table3",'a, 'b, 'c)
+    val ds2 = util.addTableSource[(Int, Long, Int, String, Long)]("Table5", 'd, 'e, 'f, 'g, 'h)
+
+    val t1Scala = ds1.join(ds2).where('a === 'd).select('g.count)
+    val t1Java = ds1.join(ds2).where("a === d").select("g.count")
+
+    verifyTableEquals(t1Scala, t1Java)
+  }
+
+  @Test
+  def testJoinWithGroupedAggregation(): Unit = {
+    val util = batchTestUtil()
+    val ds1 = util.addTableSource[(Int, Long, String)]("Table3",'a, 'b, 'c)
+    val ds2 = util.addTableSource[(Int, Long, Int, String, Long)]("Table5", 'd, 'e, 'f, 'g, 'h)
+
+    val t1 = ds1.join(ds2)
+      .where('a === 'd)
+      .groupBy('a, 'd)
+      .select('b.sum, 'g.count)
+    val t2 = ds1.join(ds2)
+      .where("a = d")
+      .groupBy("a, d")
+      .select("b.sum, g.count")
+
+    verifyTableEquals(t1, t2)
+  }
+
+  @Test
+  def testJoinPushThroughJoin(): Unit = {
+    val util = batchTestUtil()
+    val ds1 = util.addTableSource[(Int, Long, String)]("Table3",'a, 'b, 'c)
+    val ds3 = util.addTableSource[(Int, Long, String)]("Table4",'j, 'k, 'l)
+    val ds2 = util.addTableSource[(Int, Long, Int, String, Long)]("Table5", 'd, 'e, 'f, 'g, 'h)
+
+    val t1 = ds1.join(ds2)
+      .where(true)
+      .join(ds3)
+      .where('a === 'd && 'e === 'k)
+      .select('a, 'f, 'l)
+    val t2 = ds1.join(ds2)
+      .where("true")
+      .join(ds3)
+      .where("a === d && e === k")
+      .select("a, f, l")
+
+    verifyTableEquals(t1, t2)
+  }
+
+  @Test
+  def testJoinWithDisjunctivePred(): Unit = {
+    val util = batchTestUtil()
+    val ds1 = util.addTableSource[(Int, Long, String)]("Table3",'a, 'b, 'c)
+    val ds2 = util.addTableSource[(Int, Long, Int, String, Long)]("Table5", 'd, 'e, 'f, 'g, 'h)
+
+    val t1 = ds1.join(ds2).filter('a === 'd && ('b === 'e || 'b === 'e - 10)).select('c, 'g)
+    val t2 = ds1.join(ds2).filter("a = d && (b = e || b = e - 10)").select("c, g")
+
+    verifyTableEquals(t1, t2)
+  }
+
+  @Test
+  def testJoinWithExpressionPreds(): Unit = {
+    val util = batchTestUtil()
+    val ds1 = util.addTableSource[(Int, Long, String)]("Table3",'a, 'b, 'c)
+    val ds2 = util.addTableSource[(Int, Long, Int, String, Long)]("Table5", 'd, 'e, 'f, 'g, 'h)
+
+    val t1 = ds1.join(ds2).filter('b === 'h + 1 && 'a - 1 === 'd + 2).select('c, 'g)
+    val t2 = ds1.join(ds2).filter("b = h + 1 && a - 1 = d + 2").select("c, g")
+
+    verifyTableEquals(t1, t2)
+  }
+
+  @Test
+  def testLeftJoinWithMultipleKeys(): Unit = {
+    val util = batchTestUtil()
+    val ds1 = util.addTableSource[(Int, Long, String)]("Table3",'a, 'b, 'c)
+    val ds2 = util.addTableSource[(Int, Long, Int, String, Long)]("Table5", 'd, 'e, 'f, 'g, 'h)
+
+    val t1 = ds1.leftOuterJoin(ds2, 'a === 'd && 'b === 'h).select('c, 'g)
+    val t2 = ds1.leftOuterJoin(ds2, "a = d && b = h").select("c, g")
+
+    verifyTableEquals(t1, t2)
+  }
+
+  @Test
+  def testRightJoinWithMultipleKeys(): Unit = {
+    val util = batchTestUtil()
+    val ds1 = util.addTableSource[(Int, Long, String)]("Table3",'a, 'b, 'c)
+    val ds2 = util.addTableSource[(Int, Long, Int, String, Long)]("Table5", 'd, 'e, 'f, 'g, 'h)
+
+    val t1 = ds1.rightOuterJoin(ds2, 'a === 'd && 'b === 'h).select('c, 'g)
+    val t2 = ds1.rightOuterJoin(ds2, "a = d && b = h").select("c, g")
+
+    verifyTableEquals(t1, t2)
+  }
+
+  @Test
+  def testFullOuterJoinWithMultipleKeys(): Unit = {
+    val util = batchTestUtil()
+    val ds1 = util.addTableSource[(Int, Long, String)]("Table3",'a, 'b, 'c)
+    val ds2 = util.addTableSource[(Int, Long, Int, String, Long)]("Table5", 'd, 'e, 'f, 'g, 'h)
+
+    val t1 = ds1.fullOuterJoin(ds2, 'a === 'd && 'b === 'h).select('c, 'g)
+    val t2 = ds1.fullOuterJoin(ds2, "a = d && b = h").select("c, g")
+
+    verifyTableEquals(t1, t2)
+  }
+}
diff --git a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/plan/batch/table/stringexpr/SetOperatorsTest.scala b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/plan/batch/table/stringexpr/SetOperatorsTest.scala
new file mode 100644
index 0000000..077d973
--- /dev/null
+++ b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/plan/batch/table/stringexpr/SetOperatorsTest.scala
@@ -0,0 +1,52 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.table.plan.batch.table.stringexpr
+
+import org.apache.flink.api.scala._
+import org.apache.flink.table.api.scala._
+import org.apache.flink.table.util.TableTestBase
+
+import org.junit.{Ignore, Test}
+
+import java.sql.Timestamp
+
+class SetOperatorsTest extends TableTestBase {
+
+  @Ignore("Support in subQuery in RexNodeConverter")
+  @Test
+  def testInWithFilter(): Unit = {
+    val util = batchTestUtil()
+    val t = util.addTableSource[((Int, Int), String, (Int, Int))]("A", 'a, 'b, 'c)
+
+    val elements = t.where("b === 'two'").select("a").as("a1")
+    val in = t.select("*").where('c.in(elements))
+
+    util.verifyPlan(in)
+  }
+
+  @Test
+  def testInWithProject(): Unit = {
+    val util = batchTestUtil()
+    val t = util.addTableSource[(Int, Timestamp, String)]("A", 'a, 'b, 'c)
+
+    val in = t.select("b.in('1972-02-22 07:12:00.333'.toTimestamp)").as("b2")
+
+    util.verifyPlan(in)
+  }
+}
diff --git a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/plan/batch/table/stringexpr/SortStringExpressionTest.scala b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/plan/batch/table/stringexpr/SortStringExpressionTest.scala
new file mode 100644
index 0000000..e0f2621
--- /dev/null
+++ b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/plan/batch/table/stringexpr/SortStringExpressionTest.scala
@@ -0,0 +1,61 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.table.plan.batch.table.stringexpr
+
+import org.apache.flink.api.scala._
+import org.apache.flink.table.api.scala._
+import org.apache.flink.table.util.TableTestBase
+
+import org.junit.Test
+
+class SortStringExpressionTest extends TableTestBase {
+
+  @Test
+  def testOrdering(): Unit = {
+    val util = batchTestUtil()
+    val t = util.addTableSource[(Int, Long, String)]("Table3")
+
+    val t1 = t.select('_1 as 'a, '_2 as 'b, '_3 as 'c).orderBy('a)
+    val t2 = t.select("_1 as a, _2 as b, _3 as c").orderBy("a")
+
+    verifyTableEquals(t1, t2)
+  }
+
+  @Test
+  def testExplicitAscendOrdering(): Unit = {
+    val util = batchTestUtil()
+    val t = util.addTableSource[(Int, Long, String)]("Table3")
+
+    val t1 = t.select('_1, '_2).orderBy('_1.asc)
+    val t2 = t.select("_1, _2").orderBy("_1.asc")
+
+    verifyTableEquals(t1, t2)
+  }
+
+  @Test
+  def testExplicitDescendOrdering(): Unit = {
+    val util = batchTestUtil()
+    val t = util.addTableSource[(Int, Long, String)]("Table3")
+
+    val t1 = t.select('_1, '_2).orderBy('_1.desc)
+    val t2 = t.select("_1, _2").orderBy("_1.desc")
+
+    verifyTableEquals(t1, t2)
+  }
+}
diff --git a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/plan/batch/table/validation/AggregateValidationTest.scala b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/plan/batch/table/validation/AggregateValidationTest.scala
new file mode 100644
index 0000000..4133788
--- /dev/null
+++ b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/plan/batch/table/validation/AggregateValidationTest.scala
@@ -0,0 +1,221 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.table.plan.batch.table.validation
+
+import org.apache.flink.api.scala._
+import org.apache.flink.table.api.ValidationException
+import org.apache.flink.table.api.scala._
+import org.apache.flink.table.plan.util.JavaUserDefinedAggFunctions.WeightedAvgWithMergeAndReset
+import org.apache.flink.table.util.TableTestBase
+
+import org.junit._
+
+class AggregateValidationTest extends TableTestBase {
+
+  @Test(expected = classOf[ValidationException])
+  def testNonWorkingAggregationDataTypes(): Unit = {
+    val util = batchTestUtil()
+    val t = util.addTableSource[(String, Int)]("Table2")
+
+    // Must fail. Field '_1 is not a numeric type.
+    t.select('_1.sum)
+  }
+
+  @Test(expected = classOf[ValidationException])
+  def testNoNestedAggregations(): Unit = {
+    val util = batchTestUtil()
+    val t = util.addTableSource[(String, Int)]("Table2")
+
+    // Must fail. Sum aggregation can not be chained.
+    t.select('_2.sum.sum)
+  }
+
+  @Test(expected = classOf[ValidationException])
+  def testGroupingOnNonExistentField(): Unit = {
+    val util = batchTestUtil()
+    val t = util.addTableSource[(Int, Long, String)]("Table3", 'a, 'b, 'c)
+
+    // must fail. '_foo not a valid field
+    t.groupBy('_foo).select('a.avg)
+  }
+
+  @Test(expected = classOf[ValidationException])
+  def testGroupingInvalidSelection(): Unit = {
+    val util = batchTestUtil()
+    val t = util.addTableSource[(Int, Long, String)]("Table3", 'a, 'b, 'c)
+
+    t.groupBy('a, 'b)
+    // must fail. 'c is not a grouping key or aggregation
+    .select('c)
+  }
+
+  @Test(expected = classOf[ValidationException])
+  def testAggregationOnNonExistingField(): Unit = {
+
+    val util = batchTestUtil()
+    val t = util.addTableSource[(Int, Long, String)]("Table3", 'a, 'b, 'c)
+
+    // Must fail. Field 'foo does not exist.
+    t.select('foo.avg)
+  }
+
+  @Test(expected = classOf[ValidationException])
+  @throws[Exception]
+  def testInvalidUdAggArgs() {
+    val util = batchTestUtil()
+    val t = util.addTableSource[(Int, Long, String)]("Table3", 'a, 'b, 'c)
+
+    val myWeightedAvg = new WeightedAvgWithMergeAndReset
+
+    // must fail. UDAGG does not accept String type
+    t.select(myWeightedAvg('c, 'a))
+  }
+
+  @Test(expected = classOf[ValidationException])
+  @throws[Exception]
+  def testGroupingInvalidUdAggArgs() {
+    val util = batchTestUtil()
+    val t = util.addTableSource[(Int, Long, String)]("Table3", 'a, 'b, 'c)
+
+    val myWeightedAvg = new WeightedAvgWithMergeAndReset
+
+    t.groupBy('b)
+    // must fail. UDAGG does not accept String type
+    .select(myWeightedAvg('c, 'a))
+  }
+
+  @Test(expected = classOf[ValidationException])
+  @throws[Exception]
+  def testGroupingNestedUdAgg() {
+    val util = batchTestUtil()
+    val t = util.addTableSource[(Int, Long, String)]("Table3", 'a, 'b, 'c)
+
+    val myWeightedAvg = new WeightedAvgWithMergeAndReset
+
+    t.groupBy('c)
+    // must fail. UDAGG does not accept String type
+    .select(myWeightedAvg(myWeightedAvg('b, 'a), 'a))
+  }
+
+  @Test(expected = classOf[ValidationException])
+  @throws[Exception]
+  def testAggregationOnNonExistingFieldJava() {
+    val util = batchTestUtil()
+    val t = util.addTableSource[(Int, Long, String)]("Table3", 'a, 'b, 'c)
+
+    t.select("foo.avg")
+  }
+
+  @Test(expected = classOf[ValidationException])
+  @throws[Exception]
+  def testNonWorkingAggregationDataTypesJava() {
+    val util = batchTestUtil()
+    val t = util.addTableSource[(Long, String)]("Table2",'b, 'c)
+    // Must fail. Cannot compute SUM aggregate on String field.
+    t.select("c.sum")
+  }
+
+  @Test(expected = classOf[ValidationException])
+  @throws[Exception]
+  def testNoNestedAggregationsJava() {
+    val util = batchTestUtil()
+    val t = util.addTableSource[(Long, String)]("Table2",'b, 'c)
+    // Must fail. Aggregation on aggregation not allowed.
+    t.select("b.sum.sum")
+  }
+
+  @Test(expected = classOf[ValidationException])
+  @throws[Exception]
+  def testNoDeeplyNestedAggregationsJava() {
+    val util = batchTestUtil()
+    val t = util.addTableSource[(Long, String)]("Table2",'b, 'c)
+    // Must fail. Aggregation on aggregation not allowed.
+    t.select("(b.sum + 1).sum")
+  }
+
+  @Test(expected = classOf[ValidationException])
+  @throws[Exception]
+  def testGroupingOnNonExistentFieldJava() {
+    val util = batchTestUtil()
+    val t = util.addTableSource[(Int, Long, String)]("Table3", 'a, 'b, 'c)
+
+    // must fail. Field foo is not in input
+    t.groupBy("foo")
+    .select("a.avg")
+  }
+
+  @Test(expected = classOf[ValidationException])
+  @throws[Exception]
+  def testGroupingInvalidSelectionJava() {
+    val util = batchTestUtil()
+    val t = util.addTableSource[(Int, Long, String)]("Table3", 'a, 'b, 'c)
+
+    t.groupBy("a, b")
+    // must fail. Field c is not a grouping key or aggregation
+    .select("c")
+  }
+
+  @Test(expected = classOf[ValidationException])
+  @throws[Exception]
+  def testUnknownUdAggJava() {
+    val util = batchTestUtil()
+    val t = util.addTableSource[(Int, Long, String)]("Table3", 'a, 'b, 'c)
+
+    // must fail. unknown is not known
+    t.select("unknown(c)")
+  }
+
+  @Test(expected = classOf[ValidationException])
+  @throws[Exception]
+  def testGroupingUnknownUdAggJava() {
+    val util = batchTestUtil()
+    val t = util.addTableSource[(Int, Long, String)]("Table3", 'a, 'b, 'c)
+
+    t.groupBy("a, b")
+    // must fail. unknown is not known
+    .select("unknown(c)")
+  }
+
+  @Test(expected = classOf[ValidationException])
+  @throws[Exception]
+  def testInvalidUdAggArgsJava() {
+    val util = batchTestUtil()
+    val t = util.addTableSource[(Int, Long, String)]("Table3", 'a, 'b, 'c)
+
+    val myWeightedAvg = new WeightedAvgWithMergeAndReset
+   util.addFunction("myWeightedAvg", myWeightedAvg)
+
+    // must fail. UDAGG does not accept String type
+    t.select("myWeightedAvg(c, a)")
+  }
+
+  @Test(expected = classOf[ValidationException])
+  @throws[Exception]
+  def testGroupingInvalidUdAggArgsJava() {
+    val util = batchTestUtil()
+    val t = util.addTableSource[(Int, Long, String)]("Table3", 'a, 'b, 'c)
+
+    val myWeightedAvg = new WeightedAvgWithMergeAndReset
+   util.addFunction("myWeightedAvg", myWeightedAvg)
+
+    t.groupBy("b")
+    // must fail. UDAGG does not accept String type
+    .select("myWeightedAvg(c, a)")
+  }
+}
diff --git a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/plan/batch/table/validation/CalcValidationTest.scala b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/plan/batch/table/validation/CalcValidationTest.scala
new file mode 100644
index 0000000..c9ce368
--- /dev/null
+++ b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/plan/batch/table/validation/CalcValidationTest.scala
@@ -0,0 +1,117 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.table.plan.batch.table.validation
+
+import org.apache.flink.api.scala._
+import org.apache.flink.table.api.ValidationException
+import org.apache.flink.table.api.scala._
+import org.apache.flink.table.util.TableTestBase
+
+import org.junit.Assert._
+import org.junit._
+
+class CalcValidationTest extends TableTestBase {
+
+  @Test
+  def testSelectInvalidFieldFields(): Unit = {
+    expectedException.expect(classOf[ValidationException])
+    expectedException.expectMessage("Cannot resolve field [foo], input field list:[a, b, c].")
+    val util = batchTestUtil()
+    util.addTableSource[(Int, Long, String)]("Table3",'a, 'b, 'c)
+      // must fail. Field 'foo does not exist
+      .select('a, 'foo)
+  }
+
+  @Test(expected = classOf[ValidationException])
+  def testFilterInvalidFieldName(): Unit = {
+    val util = batchTestUtil()
+    val t = util.addTableSource[(Int, Long, String)]("Table3",'a, 'b, 'c)
+
+    // must fail. Field 'foo does not exist
+    t.filter( 'foo === 2 )
+  }
+
+  @Test(expected = classOf[ValidationException])
+  def testSelectInvalidField() {
+    val util = batchTestUtil()
+    val t = util.addTableSource[(Int, Long, String)]("Table3",'a, 'b, 'c)
+
+    // Must fail. Field foo does not exist
+    t.select("a + 1, foo + 2")
+  }
+
+  @Test(expected = classOf[ValidationException])
+  def testSelectAmbiguousFieldNames() {
+    val util = batchTestUtil()
+    val t = util.addTableSource[(Int, Long, String)]("Table3",'a, 'b, 'c)
+
+    // Must fail. Field foo does not exist
+    t.select("a + 1 as foo, b + 2 as foo")
+  }
+
+  @Test(expected = classOf[ValidationException])
+  def testFilterInvalidField() {
+    val util = batchTestUtil()
+    val t = util.addTableSource[(Int, Long, String)]("Table3",'a, 'b, 'c)
+
+    // Must fail. Field foo does not exist.
+    t.filter("foo = 17")
+  }
+
+  @Test
+  def testAliasStarException(): Unit = {
+    val util = batchTestUtil()
+
+    try {
+      util.addTableSource[(Int, Long, String)]("Table1", '*, 'b, 'c)
+      fail("TableException expected")
+    } catch {
+      case _: ValidationException => //ignore
+    }
+
+    try {
+      util.addTableSource[(Int, Long, String)]("Table2")
+      .select('_1 as '*, '_2 as 'b, '_1 as 'c)
+      fail("ValidationException expected")
+    } catch {
+      case _: ValidationException => //ignore
+    }
+
+    try {
+      util.addTableSource[(Int, Long, String)]("Table3").as('*, 'b, 'c)
+      fail("ValidationException expected")
+    } catch {
+      case _: ValidationException => //ignore
+    }
+    try {
+      util.addTableSource[(Int, Long, String)]("Table4", 'a, 'b, 'c).select('*, 'b)
+      fail("ValidationException expected")
+    } catch {
+      case _: ValidationException => //ignore
+    }
+  }
+
+  @Test(expected = classOf[ValidationException])
+  def testDuplicateFlattening(): Unit = {
+    val util = batchTestUtil()
+    val table = util.addTableSource[((Int, Long), (String, Boolean), String)]("MyTable", 'a, 'b, 'c)
+
+    table.select('a.flatten(), 'a.flatten())
+  }
+}
diff --git a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/plan/batch/table/validation/CorrelateValidationTest.scala b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/plan/batch/table/validation/CorrelateValidationTest.scala
new file mode 100644
index 0000000..99f7e52
--- /dev/null
+++ b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/plan/batch/table/validation/CorrelateValidationTest.scala
@@ -0,0 +1,46 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.table.plan.batch.table.validation
+
+import org.apache.flink.api.scala._
+import org.apache.flink.table.api.ValidationException
+import org.apache.flink.table.api.scala._
+import org.apache.flink.table.util.{TableFunc1, TableTestBase}
+
+import org.junit.Test
+
+class CorrelateValidationTest extends TableTestBase {
+
+  /**
+    * Due to the improper translation of TableFunction left outer join (see CALCITE-2004), the
+    * join predicate can only be empty or literal true (the restriction should be removed in
+    * FLINK-7865).
+    */
+  @Test (expected = classOf[ValidationException])
+  def testLeftOuterJoinWithPredicates(): Unit = {
+    val util = batchTestUtil()
+    val table = util.addTableSource[(Int, Long, String)]("MyTable", 'a, 'b, 'c)
+    val func = new TableFunc1
+   util.addFunction("func1", func)
+    val result = table
+      .leftOuterJoinLateral(func('c) as 's, 'c === 's)
+      .select('c, 's)
+    util.verifyPlan(result)
+  }
+}
diff --git a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/plan/batch/table/validation/GroupWindowValidationTest.scala b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/plan/batch/table/validation/GroupWindowValidationTest.scala
new file mode 100644
index 0000000..49fe003
--- /dev/null
+++ b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/plan/batch/table/validation/GroupWindowValidationTest.scala
@@ -0,0 +1,172 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.table.plan.batch.table.validation
+
+import org.apache.flink.api.scala._
+import org.apache.flink.table.api.scala._
+import org.apache.flink.table.api.{Session, Slide, Tumble, ValidationException}
+import org.apache.flink.table.plan.util.JavaUserDefinedAggFunctions.WeightedAvgWithMerge
+import org.apache.flink.table.util.TableTestBase
+
+import org.junit.Test
+
+class GroupWindowValidationTest extends TableTestBase {
+
+  //===============================================================================================
+  // Common test
+  //===============================================================================================
+
+  @Test(expected = classOf[ValidationException])
+  def testGroupByWithoutWindowAlias(): Unit = {
+    val util = batchTestUtil()
+    val table = util.addTableSource[(Long, Int, String)]('long, 'int, 'string)
+
+    table
+      .window(Tumble over 5.milli on 'long as 'w)
+      .groupBy('string)
+      .select('string, 'int.count)
+  }
+
+  @Test(expected = classOf[ValidationException])
+  def testInvalidRowTimeRef(): Unit = {
+    val util = batchTestUtil()
+    val table = util.addTableSource[(Long, Int, String)]('long, 'int, 'string)
+
+    table
+      .window(Tumble over 5.milli on 'long as 'w)
+      .groupBy('w, 'string)
+      .select('string, 'int.count)
+      .window(Slide over 5.milli every 1.milli on 'int as 'w2) // 'Int  does not exist in input.
+      .groupBy('w2)
+      .select('string)
+  }
+
+  //===============================================================================================
+  // Tumbling Windows
+  //===============================================================================================
+
+  @Test(expected = classOf[ValidationException])
+  def testInvalidProcessingTimeDefinition(): Unit = {
+    val util = batchTestUtil()
+    // proctime is not allowed
+    util.addTableSource[(Int, String)]('proctime, 'int, 'string)
+  }
+
+  @Test(expected = classOf[ValidationException])
+  def testInvalidProcessingTimeDefinition2(): Unit = {
+    val util = batchTestUtil()
+    // proctime is not allowed
+    util.addTableSource[(Long, Int, String)]('long, 'int, 'string, 'proctime)
+  }
+
+  @Test(expected = classOf[ValidationException])
+  def testInvalidEventTimeDefinition(): Unit = {
+    val util = batchTestUtil()
+    // definition must not extend schema
+    util.addTableSource[(Long, Int, String)]('long, 'int, 'string, 'rowtime)
+  }
+
+  @Test(expected = classOf[ValidationException])
+  def testTumblingGroupWindowWithInvalidUdAggArgs(): Unit = {
+    val util = batchTestUtil()
+    val table = util.addTableSource[(Long, Int, String)]('long, 'int, 'string)
+
+    val myWeightedAvg = new WeightedAvgWithMerge
+
+    table
+      .window(Tumble over 2.minutes on 'rowtime as 'w)
+      .groupBy('w, 'long)
+      // invalid function arguments
+      .select(myWeightedAvg('int, 'string))
+  }
+
+  @Test(expected = classOf[ValidationException])
+  def testAllTumblingGroupWindowWithInvalidUdAggArgs(): Unit = {
+    val util = batchTestUtil()
+    val table = util.addTableSource[(Long, Int, String)]('long, 'int, 'string)
+
+    val myWeightedAvg = new WeightedAvgWithMerge
+
+    table
+      .window(Tumble over 2.minutes on 'rowtime as 'w)
+      .groupBy('w)
+      // invalid function arguments
+      .select(myWeightedAvg('int, 'string))
+  }
+
+  //===============================================================================================
+  // Sliding Windows
+  //===============================================================================================
+
+  @Test(expected = classOf[ValidationException])
+  def testSlidingGroupWindowWithInvalidUdAggArgs(): Unit = {
+    val util = batchTestUtil()
+    val table = util.addTableSource[(Long, Int, String)]('long, 'int, 'string)
+
+    val myWeightedAvg = new WeightedAvgWithMerge
+
+    table
+      .window(Slide over 2.minutes every 1.minute on 'rowtime as 'w)
+      .groupBy('w, 'long)
+      // invalid function arguments
+      .select(myWeightedAvg('int, 'string))
+  }
+
+  @Test(expected = classOf[ValidationException])
+  def testAllSlidingGroupWindowWithInvalidUdAggArgs(): Unit = {
+    val util = batchTestUtil()
+    val table = util.addTableSource[(Long, Int, String)]('long, 'int, 'string)
+
+    val myWeightedAvg = new WeightedAvgWithMerge
+
+    table
+      .window(Slide over 2.minutes every 1.minute on 'long as 'w)
+      .groupBy('w)
+      // invalid function arguments
+      .select(myWeightedAvg('int, 'string))
+  }
+
+  @Test(expected = classOf[ValidationException])
+  def testSessionGroupWindowWithInvalidUdAggArgs(): Unit = {
+    val util = batchTestUtil()
+    val table = util.addTableSource[(Long, Int, String)]('long, 'int, 'string)
+
+    val myWeightedAvg = new WeightedAvgWithMerge
+
+    table
+      .window(Session withGap 2.minutes on 'rowtime as 'w)
+      .groupBy('w, 'long)
+      // invalid function arguments
+      .select(myWeightedAvg('int, 'string))
+  }
+
+  @Test(expected = classOf[ValidationException])
+  def testAllSessionGroupWindowWithInvalidUdAggArgs(): Unit = {
+    val util = batchTestUtil()
+    val table = util.addTableSource[(Long, Int, String)]('long, 'int, 'string)
+
+    val myWeightedAvg = new WeightedAvgWithMerge
+
+    table
+      .window(Session withGap 2.minutes on 'rowtime as 'w)
+      .groupBy('w)
+      // invalid function arguments
+      .select(myWeightedAvg('int, 'string))
+  }
+}
diff --git a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/plan/batch/table/validation/JoinValidationTest.scala b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/plan/batch/table/validation/JoinValidationTest.scala
new file mode 100644
index 0000000..7939d22
--- /dev/null
+++ b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/plan/batch/table/validation/JoinValidationTest.scala
@@ -0,0 +1,118 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.table.plan.batch.table.validation
+
+import org.apache.flink.api.scala._
+import org.apache.flink.table.api.internal.TableEnvironmentImpl
+import org.apache.flink.table.api.scala._
+import org.apache.flink.table.api.{EnvironmentSettings, ValidationException}
+import org.apache.flink.table.runtime.utils.CollectionBatchExecTable
+import org.apache.flink.table.util.TableTestBase
+
+import org.junit._
+
+class JoinValidationTest extends TableTestBase {
+
+  @Test(expected = classOf[ValidationException])
+  def testJoinNonExistingKey(): Unit = {
+    val util = batchTestUtil()
+    val ds1 = util.addTableSource[(Int, Long, String)]("Table3",'a, 'b, 'c)
+    val ds2 = util.addTableSource[(Int, Long, Int, String, Long)]("Table5", 'd, 'e, 'f, 'g, 'h)
+
+    ds1.join(ds2)
+      // must fail. Field 'foo does not exist
+      .where('foo === 'e)
+      .select('c, 'g)
+  }
+
+  @Test(expected = classOf[ValidationException])
+  def testJoinWithNonMatchingKeyTypes(): Unit = {
+    val util = batchTestUtil()
+    val ds1 = util.addTableSource[(Int, Long, String)]("Table3",'a, 'b, 'c)
+    val ds2 = util.addTableSource[(Int, Long, Int, String, Long)]("Table5", 'd, 'e, 'f, 'g, 'h)
+
+    ds1.join(ds2)
+      // must fail. Field 'a is Int, and 'g is String
+      .where('a === 'g)
+      .select('c, 'g)
+
+  }
+
+  @Test(expected = classOf[ValidationException])
+  def testJoinWithAmbiguousFields(): Unit = {
+    val util = batchTestUtil()
+    val ds1 = util.addTableSource[(Int, Long, String)]("Table3",'a, 'b, 'c)
+    val ds2 = util.addTableSource[(Int, Long, Int, String, Long)]("Table5", 'd, 'e, 'f, 'g, 'c)
+
+    ds1.join(ds2)
+      // must fail. Both inputs share the same field 'c
+      .where('a === 'd)
+      .select('c, 'g)
+  }
+
+  @Test(expected = classOf[ValidationException])
+  def testLeftJoinNoEquiJoinPredicate(): Unit = {
+    val util = batchTestUtil()
+    val ds1 = util.addTableSource[(Int, Long, String)]("Table3",'a, 'b, 'c)
+    val ds2 = util.addTableSource[(Int, Long, Int, String, Long)]("Table5", 'd, 'e, 'f, 'g, 'h)
+
+    ds2.leftOuterJoin(ds1, 'b < 'd).select('c, 'g)
+  }
+
+  @Test(expected = classOf[ValidationException])
+  def testRightJoinNoEquiJoinPredicate(): Unit = {
+    val util = batchTestUtil()
+    val ds1 = util.addTableSource[(Int, Long, String)]("Table3",'a, 'b, 'c)
+    val ds2 = util.addTableSource[(Int, Long, Int, String, Long)]("Table5", 'd, 'e, 'f, 'g, 'h)
+
+    ds2.rightOuterJoin(ds1, 'b < 'd).select('c, 'g)
+  }
+
+  @Test(expected = classOf[ValidationException])
+  def testFullJoinNoEquiJoinPredicate(): Unit = {
+    val util = batchTestUtil()
+    val ds1 = util.addTableSource[(Int, Long, String)]("Table3",'a, 'b, 'c)
+    val ds2 = util.addTableSource[(Int, Long, Int, String, Long)]("Table5", 'd, 'e, 'f, 'g, 'h)
+
+    ds2.fullOuterJoin(ds1, 'b < 'd).select('c, 'g)
+  }
+
+  @Test(expected = classOf[ValidationException])
+  def testJoinTablesFromDifferentEnvs(): Unit = {
+    val settings = EnvironmentSettings.newInstance().useBlinkPlanner().inBatchMode().build()
+    val tEnv1 = TableEnvironmentImpl.create(settings)
+    val tEnv2 = TableEnvironmentImpl.create(settings)
+    val ds1 = CollectionBatchExecTable.getSmall3TupleDataSet(tEnv1, "a, b, c")
+    val ds2 = CollectionBatchExecTable.get5TupleDataSet(tEnv2, "d, e, f, g, c")
+
+    // Must fail. Tables are bound to different TableEnvironments.
+    ds1.join(ds2).where('b === 'e).select('c, 'g)
+  }
+
+  @Test(expected = classOf[ValidationException])
+  def testJoinTablesFromDifferentEnvsJava() {
+    val settings = EnvironmentSettings.newInstance().useBlinkPlanner().inBatchMode().build()
+    val tEnv1 = TableEnvironmentImpl.create(settings)
+    val tEnv2 = TableEnvironmentImpl.create(settings)
+    val ds1 = CollectionBatchExecTable.getSmall3TupleDataSet(tEnv1, "a, b, c")
+    val ds2 = CollectionBatchExecTable.get5TupleDataSet(tEnv2, "d, e, f, g, c")
+    // Must fail. Tables are bound to different TableEnvironments.
+    ds1.join(ds2).where("a === d").select("g.count")
+  }
+}
diff --git a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/plan/batch/table/validation/OverWindowValidationTest.scala b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/plan/batch/table/validation/OverWindowValidationTest.scala
new file mode 100644
index 0000000..c35124a
--- /dev/null
+++ b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/plan/batch/table/validation/OverWindowValidationTest.scala
@@ -0,0 +1,56 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.table.plan.batch.table.validation
+
+import org.apache.flink.api.scala._
+import org.apache.flink.table.api.scala._
+import org.apache.flink.table.api.{Tumble, ValidationException}
+import org.apache.flink.table.runtime.utils.JavaUserDefinedScalarFunctions.OverAgg0
+import org.apache.flink.table.util.TableTestBase
+
+import org.junit._
+
+class OverWindowValidationTest extends TableTestBase {
+
+  /**
+    * OVER clause is necessary for [[OverAgg0]] window function.
+    */
+  @Test(expected = classOf[ValidationException])
+  def testInvalidOverAggregation(): Unit = {
+    val util = batchTestUtil()
+    val t = util.addTableSource[(Int, Long, String)]("Table3",'a, 'b, 'c)
+
+    val overAgg = new OverAgg0
+    t.select('c.count, overAgg('b, 'a))
+  }
+
+  /**
+    * OVER clause is necessary for [[OverAgg0]] window function.
+    */
+  @Test(expected = classOf[ValidationException])
+  def testInvalidOverAggregation2(): Unit = {
+    val util = batchTestUtil()
+    val table = util.addTableSource[(Long, Int, String)]('long, 'int, 'string)
+    val overAgg = new OverAgg0
+    table
+      .window(Tumble over 5.milli on 'long as 'w)
+      .groupBy('string,'w)
+      .select(overAgg('long, 'int))
+  }
+}
diff --git a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/plan/batch/table/validation/SetOperatorsValidationTest.scala b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/plan/batch/table/validation/SetOperatorsValidationTest.scala
new file mode 100644
index 0000000..75d6d22
--- /dev/null
+++ b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/plan/batch/table/validation/SetOperatorsValidationTest.scala
@@ -0,0 +1,113 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.table.plan.batch.table.validation
+
+import org.apache.flink.api.scala._
+import org.apache.flink.table.api.internal.TableEnvironmentImpl
+import org.apache.flink.table.api.scala._
+import org.apache.flink.table.api.{EnvironmentSettings, ValidationException}
+import org.apache.flink.table.runtime.utils.CollectionBatchExecTable
+import org.apache.flink.table.util.TableTestBase
+
+import org.junit._
+
+class SetOperatorsValidationTest extends TableTestBase {
+
+  @Test(expected = classOf[ValidationException])
+  def testUnionDifferentColumnSize(): Unit = {
+    val util = batchTestUtil()
+    val ds1 = util.addTableSource[(Int, Long, String)]("Table3",'a, 'b, 'c)
+    val ds2 = util.addTableSource[(Int, Long, Int, String, Long)]("Table5", 'a, 'b, 'd, 'c, 'e)
+
+    // must fail. Union inputs have different column size.
+    ds1.unionAll(ds2)
+  }
+
+  @Test(expected = classOf[ValidationException])
+  def testUnionDifferentFieldTypes(): Unit = {
+    val util = batchTestUtil()
+    val ds1 = util.addTableSource[(Int, Long, String)]("Table3",'a, 'b, 'c)
+    val ds2 = util.addTableSource[(Int, Long, Int, String, Long)]("Table5", 'a, 'b, 'c, 'd, 'e)
+      .select('a, 'b, 'c)
+
+    // must fail. Union inputs have different field types.
+    ds1.unionAll(ds2)
+  }
+
+  @Test(expected = classOf[ValidationException])
+  def testUnionTablesFromDifferentEnvs(): Unit = {
+    val settings = EnvironmentSettings.newInstance().useBlinkPlanner().inBatchMode().build()
+    val tEnv1 = TableEnvironmentImpl.create(settings)
+    val tEnv2 = TableEnvironmentImpl.create(settings)
+
+    val ds1 = CollectionBatchExecTable.getSmall3TupleDataSet(tEnv1)
+    val ds2 = CollectionBatchExecTable.getSmall3TupleDataSet(tEnv2)
+
+    // Must fail. Tables are bound to different TableEnvironments.
+    ds1.unionAll(ds2).select('c)
+  }
+
+  @Test(expected = classOf[ValidationException])
+  def testMinusDifferentFieldTypes(): Unit = {
+    val util = batchTestUtil()
+    val ds1 = util.addTableSource[(Int, Long, String)]("Table3",'a, 'b, 'c)
+    val ds2 = util.addTableSource[(Int, Long, Int, String, Long)]("Table5", 'a, 'b, 'c, 'd, 'e)
+      .select('a, 'b, 'c)
+
+    // must fail. Minus inputs have different field types.
+    ds1.minus(ds2)
+  }
+
+  @Test(expected = classOf[ValidationException])
+  def testMinusAllTablesFromDifferentEnvs(): Unit = {
+    val settings = EnvironmentSettings.newInstance().useBlinkPlanner().inBatchMode().build()
+    val tEnv1 = TableEnvironmentImpl.create(settings)
+    val tEnv2 = TableEnvironmentImpl.create(settings)
+
+    val ds1 = CollectionBatchExecTable.getSmall3TupleDataSet(tEnv1)
+    val ds2 = CollectionBatchExecTable.getSmall3TupleDataSet(tEnv2)
+
+    // Must fail. Tables are bound to different TableEnvironments.
+    ds1.minusAll(ds2).select('c)
+  }
+
+  @Test(expected = classOf[ValidationException])
+  def testIntersectWithDifferentFieldTypes(): Unit = {
+    val util = batchTestUtil()
+    val ds1 = util.addTableSource[(Int, Long, String)]("Table3",'a, 'b, 'c)
+    val ds2 = util.addTableSource[(Int, Long, Int, String, Long)]("Table5", 'a, 'b, 'c, 'd, 'e)
+      .select('a, 'b, 'c)
+
+    // must fail. Intersect inputs have different field types.
+    ds1.intersect(ds2)
+  }
+
+  @Test(expected = classOf[ValidationException])
+  def testIntersectTablesFromDifferentEnvs(): Unit = {
+    val settings = EnvironmentSettings.newInstance().useBlinkPlanner().inBatchMode().build()
+    val tEnv1 = TableEnvironmentImpl.create(settings)
+    val tEnv2 = TableEnvironmentImpl.create(settings)
+
+    val ds1 = CollectionBatchExecTable.getSmall3TupleDataSet(tEnv1)
+    val ds2 = CollectionBatchExecTable.getSmall3TupleDataSet(tEnv2)
+
+    // Must fail. Tables are bound to different TableEnvironments.
+    ds1.intersect(ds2).select('c)
+  }
+}
diff --git a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/plan/batch/table/validation/SortValidationTest.scala b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/plan/batch/table/validation/SortValidationTest.scala
new file mode 100644
index 0000000..56a6011
--- /dev/null
+++ b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/plan/batch/table/validation/SortValidationTest.scala
@@ -0,0 +1,69 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.table.plan.batch.table.validation
+
+import org.apache.flink.api.scala._
+import org.apache.flink.table.api.ValidationException
+import org.apache.flink.table.api.scala._
+import org.apache.flink.table.util.TableTestBase
+
+import org.junit._
+
+class SortValidationTest extends TableTestBase {
+
+  @Test(expected = classOf[ValidationException])
+  def testOffsetWithoutOrder(): Unit = {
+    val util = batchTestUtil()
+    val ds = util.addTableSource[(Int, Long, String)]("Table3", 'a, 'b, 'c)
+
+    ds.offset(5)
+  }
+
+  @Test(expected = classOf[ValidationException])
+  def testFetchWithoutOrder(): Unit = {
+    val util = batchTestUtil()
+    val ds = util.addTableSource[(Int, Long, String)]("Table3", 'a, 'b, 'c)
+
+    ds.fetch(5)
+  }
+
+  @Test(expected = classOf[ValidationException])
+  def testFetchBeforeOffset(): Unit = {
+    val util = batchTestUtil()
+    val ds = util.addTableSource[(Int, Long, String)]("Table3", 'a, 'b, 'c)
+
+    ds.orderBy('a.asc).fetch(5).offset(10)
+  }
+
+  @Test(expected = classOf[ValidationException])
+  def testOffsetBeforeOffset(): Unit = {
+    val util = batchTestUtil()
+    val ds = util.addTableSource[(Int, Long, String)]("Table3", 'a, 'b, 'c)
+
+    ds.orderBy('a.asc).offset(10).offset(5)
+  }
+
+  @Test(expected = classOf[ValidationException])
+  def testNegativeFetch(): Unit = {
+    val util = batchTestUtil()
+    val ds = util.addTableSource[(Int, Long, String)]("Table3", 'a, 'b, 'c)
+
+    ds.orderBy('a.asc).offset(-1)
+  }
+}
diff --git a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/plan/stream/table/AggregateTest.scala b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/plan/stream/table/AggregateTest.scala
new file mode 100644
index 0000000..606f73e
--- /dev/null
+++ b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/plan/stream/table/AggregateTest.scala
@@ -0,0 +1,230 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.table.plan.stream.table
+
+import org.apache.flink.api.common.typeinfo.BasicTypeInfo
+import org.apache.flink.api.scala._
+import org.apache.flink.table.api.scala._
+import org.apache.flink.table.api.{Session, Slide, Tumble}
+import org.apache.flink.table.plan.util.JavaUserDefinedAggFunctions.WeightedAvg
+import org.apache.flink.table.util.{CountMinMax, TableTestBase}
+
+import org.junit.Test
+
+class AggregateTest extends TableTestBase {
+
+  @Test
+  def testGroupDistinctAggregate(): Unit = {
+    val util = streamTestUtil()
+    val table = util.addTableSource[(Long, Int, String)]('a, 'b, 'c)
+
+    val resultTable = table
+      .groupBy('b)
+      .select('a.sum.distinct, 'c.count.distinct)
+
+    util.verifyPlan(resultTable)
+  }
+
+  @Test
+  def testGroupDistinctAggregateWithUDAGG(): Unit = {
+    val util = streamTestUtil()
+    val table = util.addTableSource[(Long, Int, String)]('a, 'b, 'c)
+    val weightedAvg = new WeightedAvg
+
+    val resultTable = table
+      .groupBy('c)
+      .select(weightedAvg.distinct('a, 'b), weightedAvg('a, 'b))
+
+    util.verifyPlan(resultTable)
+  }
+
+  @Test
+  def testGroupAggregate() = {
+    val util = streamTestUtil()
+    val table = util.addTableSource[(Long, Int, String)]('a, 'b, 'c)
+
+    val resultTable = table
+      .groupBy('b)
+      .select('a.count)
+
+    util.verifyPlan(resultTable)
+  }
+
+  @Test
+  def testGroupAggregateWithConstant1(): Unit = {
+    val util = streamTestUtil()
+    val table = util.addTableSource[(Long, Int, String)]('a, 'b, 'c)
+
+    val resultTable = table
+      .select('a, 4 as 'four, 'b)
+      .groupBy('four, 'a)
+      .select('four, 'b.sum)
+
+    util.verifyPlan(resultTable)
+  }
+
+  @Test
+  def testGroupAggregateWithConstant2(): Unit = {
+    val util = streamTestUtil()
+    val table = util.addTableSource[(Long, Int, String)]('a, 'b, 'c)
+
+    val resultTable = table
+      .select('b, 4 as 'four, 'a)
+      .groupBy('b, 'four)
+      .select('four, 'a.sum)
+
+    util.verifyPlan(resultTable)
+  }
+
+  @Test
+  def testGroupAggregateWithExpressionInSelect(): Unit = {
+    val util = streamTestUtil()
+    val table = util.addTableSource[(Long, Int, String)]('a, 'b, 'c)
+
+    val resultTable = table
+      .select('a as 'a, 'b % 3 as 'd, 'c as 'c)
+      .groupBy('d)
+      .select('c.min, 'a.avg)
+
+    util.verifyPlan(resultTable)
+  }
+
+  @Test
+  def testGroupAggregateWithFilter(): Unit = {
+    val util = streamTestUtil()
+    val table = util.addTableSource[(Long, Int, String)]('a, 'b, 'c)
+
+    val resultTable = table
+      .groupBy('b)
+      .select('b, 'a.sum)
+      .where('b === 2)
+
+    util.verifyPlan(resultTable)
+  }
+
+  @Test
+  def testGroupAggregateWithAverage(): Unit = {
+    val util = streamTestUtil()
+    val table = util.addTableSource[(Long, Int, String)]('a, 'b, 'c)
+
+    val resultTable = table
+      .groupBy('b)
+      .select('b, 'a.cast(BasicTypeInfo.DOUBLE_TYPE_INFO).avg)
+
+    util.verifyPlan(resultTable)
+  }
+
+  @Test
+  def testDistinctAggregateOnTumbleWindow(): Unit = {
+    val util = streamTestUtil()
+    val table = util.addDataStream[(Int, Long, String)](
+      "MyTable", 'a, 'b, 'c, 'rowtime.rowtime)
+    val result = table
+      .window(Tumble over 15.minute on 'rowtime as 'w)
+      .groupBy('w)
+      .select('a.count.distinct, 'a.sum)
+
+    util.verifyPlan(result)
+  }
+
+  @Test
+  def testMultiDistinctAggregateSameFieldOnHopWindow(): Unit = {
+    val util = streamTestUtil()
+    val table = util.addDataStream[(Int, Long, String)](
+      "MyTable", 'a, 'b, 'c, 'rowtime.rowtime)
+    val result = table
+      .window(Slide over 1.hour every 15.minute on 'rowtime as 'w)
+      .groupBy('w)
+      .select('a.count.distinct, 'a.sum.distinct, 'a.max.distinct)
+
+    util.verifyPlan(result)
+  }
+
+  @Test
+  def testDistinctAggregateWithGroupingOnSessionWindow(): Unit = {
+    val util = streamTestUtil()
+    val table = util.addDataStream[(Int, Long, String)](
+      "MyTable", 'a, 'b, 'c, 'rowtime.rowtime)
+    val result = table
+      .window(Session withGap 15.minute on 'rowtime as 'w)
+      .groupBy('a, 'w)
+      .select('a, 'a.count, 'c.count.distinct)
+
+    util.verifyPlan(result)
+  }
+
+  @Test
+  def testSimpleAggregate(): Unit = {
+    val util = streamTestUtil()
+    val table = util.addTableSource[(Int, Long, String)](
+      "MyTable", 'a, 'b, 'c)
+
+    val testAgg = new CountMinMax
+    val resultTable = table
+      .groupBy('b)
+      .aggregate(testAgg('a))
+      .select('b, 'f0, 'f1)
+
+    util.verifyPlan(resultTable)
+  }
+
+  @Test
+  def testSelectStar(): Unit = {
+    val util = streamTestUtil()
+    val table = util.addTableSource[(Int, Long, String)](
+      "MyTable", 'a, 'b, 'c)
+
+    val testAgg = new CountMinMax
+    val resultTable = table
+      .groupBy('b)
+      .aggregate(testAgg('a))
+      .select('*)
+
+    util.verifyPlan(resultTable)
+  }
+
+  @Test
+  def testAggregateWithScalarResult(): Unit = {
+    val util = streamTestUtil()
+    val table = util.addTableSource[(Int, Long, String)](
+      "MyTable", 'a, 'b, 'c)
+
+    val resultTable = table
+      .groupBy('b)
+      .aggregate('a.count)
+      .select('b, 'TMP_0)
+
+    util.verifyPlan(resultTable)
+  }
+
+  @Test
+  def testAggregateWithAlias(): Unit = {
+    val util = streamTestUtil()
+    val table = util.addTableSource[(Int, Long, String)](
+      "MyTable", 'a, 'b, 'c)
+
+    val testAgg = new CountMinMax
+    val resultTable = table
+      .groupBy('b)
+      .aggregate(testAgg('a) as ('x, 'y, 'z))
+      .select('b, 'x, 'y)
+
+    util.verifyPlan(resultTable)
+  }
+}
diff --git a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/plan/stream/table/CalcTest.scala b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/plan/stream/table/CalcTest.scala
new file mode 100644
index 0000000..224591c
--- /dev/null
+++ b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/plan/stream/table/CalcTest.scala
@@ -0,0 +1,161 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.table.plan.stream.table
+
+import org.apache.flink.api.scala._
+import org.apache.flink.table.api.Tumble
+import org.apache.flink.table.api.scala._
+import org.apache.flink.table.expressions.utils.{Func1, Func23, Func24}
+import org.apache.flink.table.util.TableTestBase
+
+import org.junit.Test
+
+class CalcTest extends TableTestBase {
+
+  // ----------------------------------------------------------------------------------------------
+  // Tests for all the situations when we can do fields projection. Like selecting few fields
+  // from a large field count source.
+  // ----------------------------------------------------------------------------------------------
+
+  @Test
+  def testSelectFromWindow(): Unit = {
+    val util = streamTestUtil()
+    val sourceTable =
+      util.addDataStream[(Int, Long, String, Double)](
+        "MyTable", 'a, 'b, 'c, 'd, 'rowtime.rowtime)
+    val resultTable = sourceTable
+        .window(Tumble over 5.millis on 'rowtime as 'w)
+        .groupBy('w)
+        .select('c.upperCase().count, 'a.sum)
+
+    util.verifyPlan(resultTable)
+  }
+
+  @Test
+  def testSelectFromGroupedWindow(): Unit = {
+    val util = streamTestUtil()
+    val sourceTable =
+      util.addDataStream[(Int, Long, String, Double)](
+        "MyTable", 'a, 'b, 'c, 'd, 'rowtime.rowtime)
+    val resultTable = sourceTable
+        .window(Tumble over 5.millis on 'rowtime as 'w)
+        .groupBy('w, 'b)
+        .select('c.upperCase().count, 'a.sum, 'b)
+
+    util.verifyPlan(resultTable)
+  }
+
+  @Test
+  def testMultiFilter(): Unit = {
+    val util = streamTestUtil()
+    val sourceTable = util.addTableSource[(Int, Long, String, Double)]("MyTable", 'a, 'b, 'c, 'd)
+    val resultTable = sourceTable.select('a, 'b)
+      .filter('a > 0)
+      .filter('b < 2)
+      .filter(('a % 2) === 1)
+
+    util.verifyPlan(resultTable)
+  }
+
+  @Test
+  def testIn(): Unit = {
+    val util = streamTestUtil()
+    val sourceTable = util.addTableSource[(Int, Long, String)]("MyTable", 'a, 'b, 'c)
+    val resultTable = sourceTable.select('a, 'b, 'c)
+      .where(s"${(1 to 30).map("b = " + _).mkString(" || ")} && c = 'xx'")
+
+    util.verifyPlan(resultTable)
+  }
+
+  @Test
+  def testNotIn(): Unit = {
+    val util = streamTestUtil()
+    val sourceTable = util.addTableSource[(Int, Long, String)]("MyTable", 'a, 'b, 'c)
+    val resultTable = sourceTable.select('a, 'b, 'c)
+      .where(s"${(1 to 30).map("b != " + _).mkString(" && ")} || c != 'xx'")
+
+    util.verifyPlan(resultTable)
+  }
+
+  @Test
+  def testAddColumns(): Unit = {
+    val util = streamTestUtil()
+    val sourceTable = util.addTableSource[(Int, Long, String)]("MyTable", 'a, 'b, 'c)
+    val resultTable = sourceTable
+      .addColumns("concat(c, 'sunny') as kid")
+      .addColumns('a + 2, 'b as 'b2)
+      .addOrReplaceColumns(concat('c, "_kid") as 'kid, concat('c, "kid") as 'kid)
+      .addOrReplaceColumns("concat(c, '_kid_last') as kid")
+      .addColumns("'literal_value'")
+
+    util.verifyPlan(resultTable)
+  }
+
+  @Test
+  def testRenameColumns(): Unit = {
+    val util = streamTestUtil()
+    val sourceTable = util.addTableSource[(Int, Long, String)]("MyTable", 'a, 'b, 'c)
+    val resultTable = sourceTable.renameColumns('a as 'a2, 'b as 'b2).select('a2, 'b2)
+
+    util.verifyPlan(resultTable)
+  }
+
+  @Test
+  def testDropColumns(): Unit = {
+    val util = streamTestUtil()
+    val sourceTable = util.addTableSource[(Int, Long, String)]("MyTable", 'a, 'b, 'c)
+    val resultTable = sourceTable.dropColumns('a, 'b)
+
+    util.verifyPlan(resultTable)
+  }
+
+  @Test
+  def testSimpleMap(): Unit = {
+    val util = streamTestUtil()
+
+    val sourceTable = util.addTableSource[(Int, Long, String)]("MyTable", 'a, 'b, 'c)
+    val resultTable = sourceTable.map(Func23('a, 'b, 'c))
+
+    util.verifyPlan(resultTable)
+  }
+
+  @Test
+  def testScalarResult(): Unit = {
+    val util = streamTestUtil()
+
+    val sourceTable = util.addTableSource[(Int, Long, String)]("MyTable", 'a, 'b, 'c)
+    val resultTable = sourceTable.map(Func1('a))
+
+    util.verifyPlan(resultTable)
+  }
+
+  @Test
+  def testMultiMap(): Unit = {
+    val util = streamTestUtil()
+
+    val sourceTable = util.addTableSource[(Int, Long, String)]("MyTable", 'a, 'b, 'c)
+    val resultTable = sourceTable
+      .map(Func23('a, 'b, 'c))
+      .map(Func24('_c0, '_c1, '_c2, '_c3))
+
+    util.verifyPlan(resultTable)
+  }
+}
+
+
diff --git a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/plan/stream/table/ColumnFunctionsTest.scala b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/plan/stream/table/ColumnFunctionsTest.scala
new file mode 100644
index 0000000..f3a11dc
--- /dev/null
+++ b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/plan/stream/table/ColumnFunctionsTest.scala
@@ -0,0 +1,228 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.table.plan.stream.table
+
+import org.apache.flink.api.scala._
+import org.apache.flink.table.api.scala._
+import org.apache.flink.table.api.{Over, Slide}
+import org.apache.flink.table.functions.ScalarFunction
+import org.apache.flink.table.plan.util.JavaUserDefinedAggFunctions.{CountDistinct, WeightedAvg}
+import org.apache.flink.table.util.{CountAggFunction, TableFunc0, TableTestBase}
+
+import org.junit.Test
+
+/**
+  * Tests for column functions which includes tests for different column functions.
+  */
+class ColumnFunctionsTest extends TableTestBase {
+
+  val util = streamTestUtil()
+
+  @Test
+  def testStar(): Unit = {
+
+    val t = util.addTableSource[(Double, Long)]('double, 'long)
+
+   util.addFunction("TestFunc", TestFunc)
+    val tab1 = t.select(TestFunc(withColumns('*)))
+    val tab2 = t.select("TestFunc(withColumns(*))")
+    verifyTableEquals(tab1, tab2)
+    util.verifyPlan(tab1)
+  }
+
+  @Test
+  def testColumnRange(): Unit = {
+    val t = util.addTableSource[(Int, Long, String, Int, Long, String)]('a, 'b, 'c, 'd, 'e, 'f)
+
+    val tab1 = t.select(withColumns('b to 'c), 'a, withColumns(5 to 6, 'd))
+    val tab2 = t.select("withColumns(b to c), a, withColumns(5 to 6, d)")
+    verifyTableEquals(tab1, tab2)
+    util.verifyPlan(tab1)
+  }
+
+  @Test
+  def testColumnWithoutRange(): Unit = {
+    val t = util.addTableSource[(Int, Long, String, Int, Long, String)]('a, 'b, 'c, 'd, 'e, 'f)
+
+    val tab1 = t.select(withColumns(1, 'b, 'c), 'f)
+    val tab2 = t.select("withColumns(1, b, c), f")
+    verifyTableEquals(tab1, tab2)
+    util.verifyPlan(tab1)
+  }
+
+  @Test
+  def testInverseSelection(): Unit = {
+    val t = util.addTableSource[(Int, Long, String, Int, Long, String)]('a, 'b, 'c, 'd, 'e, 'f)
+
+    val tab1 = t
+      .select(withoutColumns(1, 'b))
+      .select(withoutColumns(1 to 2))
+
+    val tab2 = t
+      .select("withoutColumns(1, b)")
+      .select("withoutColumns(1 to 2)")
+    verifyTableEquals(tab1, tab2)
+    util.verifyPlan(tab1)
+  }
+
+  @Test
+  def testColumnFunctionsInUDF(): Unit = {
+    val t = util.addTableSource[(Int, Long, String, String)]('int, 'long, 'string1, 'string2)
+
+    val tab1 = t.select(concat(withColumns('string1 to 'string2)))
+    val tab2 = t.select("concat(withColumns(string1 to string2))")
+    verifyTableEquals(tab1, tab2)
+    util.verifyPlan(tab1)
+  }
+
+  @Test
+  def testJoin(): Unit = {
+    val t1 = util.addTableSource[(Int, Long, String)]('int1, 'long1, 'string1)
+    val t2 = util.addTableSource[(Int, Long, String)]('int2, 'long2, 'string2)
+
+    val tab1 = t1.join(t2, withColumns(1) === withColumns(4))
+    val tab2 = t1.join(t2, "withColumns(1) === withColumns(4)")
+    verifyTableEquals(tab1, tab2)
+    util.verifyPlan(tab1)
+  }
+
+  @Test
+  def testJoinLateral(): Unit = {
+    val t = util.addTableSource[(Double, Long, String)]('int, 'long, 'string)
+    val func0 = new TableFunc0
+   util.addFunction("func0", func0)
+
+    val tab1 = t.joinLateral(func0(withColumns('string)))
+    util.verifyPlan(tab1)
+  }
+
+  @Test
+  def testFilter(): Unit = {
+    val t = util.addTableSource[(Int, Long, String, String)]('int, 'long, 'string1, 'string2)
+
+    val tab1 = t.where(concat(withColumns('string1 to 'string2)) === "a")
+    val tab2 = t.where("concat(withColumns(string1 to string2)) = 'a'")
+    verifyTableEquals(tab1, tab2)
+    util.verifyPlan(tab1)
+  }
+
+  @Test
+  def testGroupBy(): Unit = {
+    val t = util.addTableSource[(Int, Long, String, Int, Long, String)]('a, 'b, 'c, 'd, 'e, 'f)
+
+    val tab1 = t
+      .groupBy(withColumns(1), 'b)
+      .select('a, 'b, withColumns('c).count)
+
+    val tab2 = t
+      .groupBy("withColumns(1), b")
+      .select("a, b, withColumns(c).count")
+    verifyTableEquals(tab1, tab2)
+    util.verifyPlan(tab1)
+  }
+
+  @Test
+  def testWindowGroupBy(): Unit = {
+    val t = util.addDataStream[(Int, Long, String, Int)]("T1",'a, 'rowtime.rowtime, 'c, 'd)
+      .as('a, 'b, 'c, 'd)
+
+    val tab1 = t
+      .window(Slide over 3.milli every 10.milli on withColumns('b) as 'w)
+      .groupBy(withColumns('a, 'b), 'w)
+      .select(withColumns(1 to 2), withColumns('c).count as 'c)
+
+    val tab2 = t
+      .window(Slide.over("3.milli").every("10.milli").on("withColumns(b)").as("w"))
+      .groupBy("withColumns(a, b), w")
+      .select("withColumns(1 to 2), withColumns(c).count as c")
+    verifyTableEquals(tab1, tab2)
+    util.verifyPlan(tab1)
+  }
+
+  @Test
+  def testOver(): Unit = {
+    val table = util.addDataStream[(Long, Int, String)]("T1", 'a, 'b, 'c, 'proctime.proctime)
+
+    val countFun = new CountAggFunction
+    val weightAvgFun = new WeightedAvg
+    val countDist = new CountDistinct
+
+   util.addFunction("countFun", countFun)
+   util.addFunction("weightAvgFun", weightAvgFun)
+   util.addFunction("countDist", countDist)
+
+    val tab1 = table
+      .window(
+        Over partitionBy withColumns('c) orderBy 'proctime preceding UNBOUNDED_ROW as 'w)
+      .select('c,
+        countFun(withColumns('b)) over 'w as 'mycount,
+        weightAvgFun(withColumns('a to 'b)) over 'w as 'wAvg,
+        countDist('a) over 'w as 'countDist)
+      .select('c, 'mycount, 'wAvg, 'countDist)
+
+    val tab2 = table
+      .window(
+        Over.partitionBy("withColumns(c)")
+          .orderBy("proctime")
+          .preceding("UNBOUNDED_ROW")
+          .as("w"))
+      .select("c, countFun(withColumns(b)) over w as mycount, " +
+        "weightAvgFun(withColumns(a to b)) over w as wAvg, countDist(a) over w as countDist")
+      .select('c, 'mycount, 'wAvg, 'countDist)
+    verifyTableEquals(tab1, tab2)
+    util.verifyPlan(tab1)
+  }
+
+  @Test
+  def testAddColumns(): Unit = {
+    val t = util.addTableSource[(Double, Long, String)]('a, 'b, 'c)
+
+   util.addFunction("TestFunc", TestFunc)
+    val tab1 = t.addColumns(TestFunc(withColumns('a, 'b)) as 'd)
+    val tab2 = t.addColumns("TestFunc(withColumns(a, b)) as d")
+    verifyTableEquals(tab1, tab2)
+    util.verifyPlan(tab1)
+  }
+
+  @Test
+  def testRenameColumns(): Unit = {
+    val t = util.addTableSource[(Double, Long, String)]('a, 'b, 'c)
+
+    val tab1 = t.renameColumns(withColumns('a) as 'd).select("d, b")
+    val tab2 = t.renameColumns("withColumns(a) as d").select('d, 'b)
+    verifyTableEquals(tab1, tab2)
+    util.verifyPlan(tab1)
+  }
+
+  @Test
+  def testDropColumns(): Unit = {
+    val t = util.addTableSource[(Double, Long, String)]('a, 'b, 'c)
+
+    val tab1 = t.dropColumns(withColumns('a to 'b))
+    val tab2 = t.dropColumns("withColumns(a to b)")
+    verifyTableEquals(tab1, tab2)
+    util.verifyPlan(tab1)
+  }
+}
+
+object TestFunc extends ScalarFunction {
+  def eval(a: Double, b: Long): Double = {
+    a
+  }
+}
diff --git a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/plan/stream/table/CorrelateTest.scala b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/plan/stream/table/CorrelateTest.scala
new file mode 100644
index 0000000..2085829
--- /dev/null
+++ b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/plan/stream/table/CorrelateTest.scala
@@ -0,0 +1,181 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.flink.table.plan.stream.table
+
+import org.apache.flink.api.scala._
+import org.apache.flink.table.api.scala._
+import org.apache.flink.table.expressions.utils.Func13
+import org.apache.flink.table.plan.optimize.program.FlinkStreamProgram
+import org.apache.flink.table.util.{HierarchyTableFunction, PojoTableFunc, TableFunc0, TableFunc1, TableFunc2, TableTestBase}
+
+import org.apache.calcite.rel.rules.{CalcMergeRule, FilterCalcMergeRule, ProjectCalcMergeRule}
+import org.apache.calcite.tools.RuleSets
+import org.junit.Test
+
+class CorrelateTest extends TableTestBase {
+
+  @Test
+  def testCrossJoin(): Unit = {
+    val util = streamTestUtil()
+    val table = util.addTableSource[(Int, Long, String)]("MyTable", 'a, 'b, 'c)
+
+    val function = new TableFunc1
+    util.addFunction("func1", function)
+
+    val result1 = table.joinLateral(function('c) as 's).select('c, 's)
+    util.verifyPlan(result1)
+  }
+
+  @Test
+  def testCrossJoin2(): Unit = {
+    val util = streamTestUtil()
+    val table = util.addTableSource[(Int, Long, String)]("MyTable", 'a, 'b, 'c)
+
+    val function = new TableFunc1
+    util.addFunction("func1", function)
+    // test overloading
+    val result2 = table.joinLateral(function('c, "$") as 's).select('c, 's)
+    util.verifyPlan(result2)
+  }
+
+  @Test
+  def testLeftOuterJoinWithLiteralTrue(): Unit = {
+    val util = streamTestUtil()
+    val table = util.addTableSource[(Int, Long, String)]("MyTable", 'a, 'b, 'c)
+    val function = new TableFunc1
+    util.addFunction("func1", function)
+
+    val result = table.leftOuterJoinLateral(function('c) as 's, true).select('c, 's)
+    util.verifyPlan(result)
+  }
+
+  @Test
+  def testCustomType(): Unit = {
+    val util = streamTestUtil()
+    val table = util.addTableSource[(Int, Long, String)]("MyTable", 'a, 'b, 'c)
+    val function = new TableFunc2
+    util.addFunction("func2", function)
+    val scalarFunc = new Func13("pre")
+
+    val result = table.joinLateral(
+      function(scalarFunc('c)) as ('name, 'len)).select('c, 'name, 'len)
+
+    util.verifyPlan(result)
+  }
+
+  @Test
+  def testHierarchyType(): Unit = {
+    val util = streamTestUtil()
+    val table = util.addTableSource[(Int, Long, String)]("MyTable", 'a, 'b, 'c)
+    val function = new HierarchyTableFunction
+    util.addFunction("hierarchy", function)
+
+    val result = table.joinLateral(function('c) as ('name, 'adult, 'len))
+    util.verifyPlan(result)
+  }
+
+  @Test
+  def testPojoType(): Unit = {
+    val util = streamTestUtil()
+    val table = util.addTableSource[(Int, Long, String)]("MyTable", 'a, 'b, 'c)
+    val function = new PojoTableFunc
+    util.addFunction("pojo", function)
+
+    val result = table.joinLateral(function('c))
+    util.verifyPlan(result)
+  }
+
+  @Test
+  def testFilter(): Unit = {
+    val util = streamTestUtil()
+    val table = util.addTableSource[(Int, Long, String)]("MyTable", 'a, 'b, 'c)
+    val function = new TableFunc2
+    util.addFunction("func2", function)
+
+    val result = table
+      .joinLateral(function('c) as ('name, 'len))
+      .select('c, 'name, 'len)
+      .filter('len > 2)
+    util.verifyPlan(result)
+  }
+
+  @Test
+  def testScalarFunction(): Unit = {
+    val util = streamTestUtil()
+    val table = util.addTableSource[(Int, Long, String)]("MyTable", 'a, 'b, 'c)
+    val function = new TableFunc1
+    util.addFunction("func1", function)
+
+    val result = table.joinLateral(function('c.substring(2)) as 's)
+    util.verifyPlan(result)
+  }
+
+  @Test
+  def testCorrelateWithMultiFilter(): Unit = {
+    val util = streamTestUtil()
+    val sourceTable = util.addTableSource[(Int, Long, String)]("MyTable", 'a, 'b, 'c)
+    val function = new TableFunc0
+    util.addFunction("func1", function)
+
+    val result = sourceTable.select('a, 'b, 'c)
+      .joinLateral(function('c) as('d, 'e))
+      .select('c, 'd, 'e)
+      .where('e > 10)
+      .where('e > 20)
+      .select('c, 'd)
+
+    util.verifyPlan(result)
+  }
+
+  @Test
+  def testCorrelateWithMultiFilterAndWithoutCalcMergeRules(): Unit = {
+    val util = streamTestUtil()
+    val programs = util.getStreamProgram()
+    programs.getFlinkRuleSetProgram(FlinkStreamProgram.LOGICAL)
+      .get.remove(
+      RuleSets.ofList(
+        CalcMergeRule.INSTANCE,
+        FilterCalcMergeRule.INSTANCE,
+        ProjectCalcMergeRule.INSTANCE))
+    // removing
+    util.replaceStreamProgram(programs)
+
+    val sourceTable = util.addTableSource[(Int, Long, String)]("MyTable", 'a, 'b, 'c)
+    val function = new TableFunc0
+    util.addFunction("func1", function)
+    val result = sourceTable.select('a, 'b, 'c)
+      .joinLateral(function('c) as('d, 'e))
+      .select('c, 'd, 'e)
+      .where('e > 10)
+      .where('e > 20)
+      .select('c, 'd)
+
+    util.verifyPlan(result)
+  }
+
+  @Test
+  def testFlatMap(): Unit = {
+    val util = streamTestUtil()
+
+    val func2 = new TableFunc2
+    val sourceTable = util.addTableSource[(Int, Long, String)]("MyTable", 'f1, 'f2, 'f3)
+    val resultTable = sourceTable
+      .flatMap(func2('f3))
+    util.verifyPlan(resultTable)
+  }
+}
diff --git a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/plan/stream/table/GroupWindowTest.scala b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/plan/stream/table/GroupWindowTest.scala
new file mode 100644
index 0000000..927e01d
--- /dev/null
+++ b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/plan/stream/table/GroupWindowTest.scala
@@ -0,0 +1,409 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.table.plan.stream.table
+
+import org.apache.flink.api.scala._
+import org.apache.flink.table.api.scala._
+import org.apache.flink.table.api.{Session, Slide, Tumble}
+import org.apache.flink.table.plan.util.JavaUserDefinedAggFunctions.{WeightedAvg, WeightedAvgWithMerge}
+import org.apache.flink.table.util.TableTestBase
+
+import org.junit.Test
+
+class GroupWindowTest extends TableTestBase {
+
+  @Test
+  def testMultiWindow(): Unit = {
+    val util = streamTestUtil()
+    val table = util.addDataStream[(Long, Int, String)](
+      "T1", 'long, 'int, 'string, 'proctime.proctime)
+
+    val windowedTable = table
+      .window(Tumble over 50.millis on 'proctime as 'w1)
+      .groupBy('w1, 'string)
+      .select('w1.proctime as 'proctime, 'string, 'int.count)
+      .window(Slide over 20.millis every 10.millis on 'proctime as 'w2)
+      .groupBy('w2)
+      .select('string.count)
+    util.verifyPlan(windowedTable)
+  }
+
+  @Test
+  def testProcessingTimeTumblingGroupWindowOverTime(): Unit = {
+    val util = streamTestUtil()
+    val table = util.addDataStream[(Long, Int, String)](
+      "T1", 'long, 'int, 'string, 'proctime.proctime)
+
+    val windowedTable = table
+      .window(Tumble over 50.millis on 'proctime as 'w)
+      .groupBy('w, 'string)
+      .select('string, 'int.count)
+    util.verifyPlan(windowedTable)
+  }
+
+  @Test
+  def testProcessingTimeTumblingGroupWindowOverCount(): Unit = {
+    val util = streamTestUtil()
+    val table = util.addDataStream[(Long, Int, String)](
+      "T1", 'long, 'int, 'string, 'proctime.proctime)
+
+    val windowedTable = table
+      .window(Tumble over 2.rows on 'proctime as 'w)
+      .groupBy('w, 'string)
+      .select('string, 'int.count)
+    util.verifyPlan(windowedTable)
+  }
+
+  @Test
+  def testEventTimeTumblingGroupWindowOverTime(): Unit = {
+    val util = streamTestUtil()
+    val table = util.addDataStream[(Long, Int, String)](
+      "T1", 'rowtime.rowtime, 'int, 'string)
+
+    val windowedTable = table
+      .window(Tumble over 5.millis on 'rowtime as 'w)
+      .groupBy('w, 'string)
+      .select('string, 'int.count)
+    util.verifyPlan(windowedTable)
+  }
+
+  @Test
+  def testEventTimeTumblingGroupWindowWithUdAgg(): Unit = {
+    val util = streamTestUtil()
+    val table = util.addDataStream[(Long, Int, String)](
+      "T1",'long, 'int, 'string, 'rowtime.rowtime)
+
+    val weightedAvg = new WeightedAvgWithMerge
+
+    val windowedTable = table
+      .window(Tumble over 5.millis on 'rowtime as 'w)
+      .groupBy('w, 'string)
+      .select('string, weightedAvg('long, 'int))
+    util.verifyPlan(windowedTable)
+  }
+
+  @Test
+  def testProcessingTimeSlidingGroupWindowOverTime(): Unit = {
+    val util = streamTestUtil()
+    val table = util.addDataStream[(Long, Int, String)](
+      "T1", 'long, 'int, 'string, 'proctime.proctime)
+
+    val windowedTable = table
+      .window(Slide over 50.millis every 50.millis on 'proctime as 'w)
+      .groupBy('w, 'string)
+      .select('string, 'int.count)
+    util.verifyPlan(windowedTable)
+  }
+
+  @Test
+  def testProcessingTimeSlidingGroupWindowOverCount(): Unit = {
+    val util = streamTestUtil()
+    val table = util.addDataStream[(Long, Int, String)](
+      "T1", 'long, 'int, 'string, 'proctime.proctime)
+
+    val windowedTable = table
+      .window(Slide over 2.rows every 1.rows on 'proctime as 'w)
+      .groupBy('w, 'string)
+      .select('string, 'int.count)
+    util.verifyPlan(windowedTable)
+  }
+
+  @Test
+  def testEventTimeSlidingGroupWindowOverTime(): Unit = {
+    val util = streamTestUtil()
+    val table = util.addDataStream[(Long, Int, String)](
+      "T1", 'long, 'int, 'string, 'rowtime.rowtime)
+
+    val windowedTable = table
+      .window(Slide over 8.millis every 10.millis on 'rowtime as 'w)
+      .groupBy('w, 'string)
+      .select('string, 'int.count)
+    util.verifyPlan(windowedTable)
+  }
+
+  @Test
+  def testEventTimeSlidingGroupWindowOverCount(): Unit = {
+    val util = streamTestUtil()
+    val table = util.addDataStream[(Long, Int, String)](
+      "T1", 'rowtime.rowtime, 'int, 'string)
+
+    val windowedTable = table
+      .window(Slide over 8.millis every 10.millis on 'rowtime as 'w)
+      .groupBy('w, 'string)
+      .select('string, 'int.count)
+    util.verifyPlan(windowedTable)
+  }
+
+  @Test
+  def testEventTimeSlidingGroupWindowWithUdAgg(): Unit = {
+    val util = streamTestUtil()
+    val table = util.addDataStream[(Long, Int, String)](
+      "T1", 'long, 'int, 'string, 'rowtime.rowtime)
+
+    val weightedAvg = new WeightedAvgWithMerge
+
+    val windowedTable = table
+      .window(Slide over 8.millis every 10.millis on 'rowtime as 'w)
+      .groupBy('w, 'string)
+      .select('string, weightedAvg('long, 'int))
+    util.verifyPlan(windowedTable)
+  }
+
+  @Test
+  def testEventTimeSessionGroupWindowOverTime(): Unit = {
+    val util = streamTestUtil()
+    val table = util.addDataStream[(Long, Int, String)](
+      "T1", 'rowtime.rowtime, 'int, 'string)
+
+    val windowedTable = table
+      .window(Session withGap 7.millis on 'rowtime as 'w)
+      .groupBy('w, 'string)
+      .select('string, 'int.count)
+    util.verifyPlan(windowedTable)
+  }
+
+  @Test
+  def testEventTimeSessionGroupWindowWithUdAgg(): Unit = {
+    val util = streamTestUtil()
+    val table = util.addDataStream[(Long, Int, String)](
+      "T1", 'long, 'int, 'string, 'rowtime.rowtime)
+
+    val weightedAvg = new WeightedAvgWithMerge
+
+    val windowedTable = table
+      .window(Session withGap 7.millis on 'rowtime as 'w)
+      .groupBy('w, 'string)
+      .select('string, weightedAvg('long, 'int))
+    util.verifyPlan(windowedTable)
+  }
+
+  @Test
+  def testAllProcessingTimeTumblingGroupWindowOverTime(): Unit = {
+    val util = streamTestUtil()
+    val table = util.addDataStream[(Long, Int, String)](
+      "T1", 'long, 'int, 'string, 'proctime.proctime)
+
+    val windowedTable = table
+      .window(Tumble over 50.millis on 'proctime as 'w)
+      .groupBy('w, 'string)
+      .select('string, 'int.count)
+    util.verifyPlan(windowedTable)
+  }
+
+  @Test
+  def testAllProcessingTimeTumblingGroupWindowOverCount(): Unit = {
+    val util = streamTestUtil()
+    val table = util.addDataStream[(Long, Int, String)](
+      "T1", 'long, 'int, 'string, 'proctime.proctime)
+
+    val windowedTable = table
+      .window(Tumble over 2.rows on 'proctime as 'w)
+      .groupBy('w)
+      .select('int.count)
+    util.verifyPlan(windowedTable)
+  }
+
+  @Test
+  def testAllEventTimeTumblingGroupWindowOverTime(): Unit = {
+    val util = streamTestUtil()
+    val table = util.addDataStream[(Long, Int, String)](
+      "T1", 'long, 'int, 'string, 'rowtime.rowtime)
+
+    val windowedTable = table
+      .window(Tumble over 5.millis on 'rowtime as 'w)
+      .groupBy('w)
+      .select('int.count)
+    util.verifyPlan(windowedTable)
+  }
+
+  @Test
+  def testAllEventTimeTumblingGroupWindowOverCount(): Unit = {
+    val util = streamTestUtil()
+    val table = util.addDataStream[(Long, Int, String)](
+      "T1", 'rowtime.rowtime, 'int, 'string)
+
+    val windowedTable = table
+      .window(Tumble over 5.millis on 'rowtime as 'w)
+      .groupBy('w)
+      .select('int.count)
+    util.verifyPlan(windowedTable)
+  }
+
+  @Test
+  def testAllProcessingTimeSlidingGroupWindowOverTime(): Unit = {
+    val util = streamTestUtil()
+    val table = util.addDataStream[(Long, Int, String)](
+      "T1", 'long, 'int, 'string, 'proctime.proctime)
+
+    val windowedTable = table
+      .window(Slide over 50.millis every 50.millis on 'proctime as 'w)
+      .groupBy('w)
+      .select('int.count)
+    util.verifyPlan(windowedTable)
+  }
+
+  @Test
+  def testAllProcessingTimeSlidingGroupWindowOverCount(): Unit = {
+    val util = streamTestUtil()
+    val table = util.addDataStream[(Long, Int, String)](
+      "T1", 'long, 'int, 'string, 'proctime.proctime)
+
+    val windowedTable = table
+      .window(Slide over 2.rows every 1.rows on 'proctime as 'w)
+      .groupBy('w)
+      .select('int.count)
+
+    util.verifyPlan(windowedTable)
+  }
+
+  @Test
+  def testAllEventTimeSlidingGroupWindowOverTime(): Unit = {
+    val util = streamTestUtil()
+    val table = util.addDataStream[(Long, Int, String)](
+      "T1", 'long, 'int, 'string, 'rowtime.rowtime)
+
+    val windowedTable = table
+      .window(Slide over 8.millis every 10.millis on 'rowtime as 'w)
+      .groupBy('w)
+      .select('int.count)
+
+    util.verifyPlan(windowedTable)
+  }
+
+  @Test
+  def testAllEventTimeSlidingGroupWindowOverCount(): Unit = {
+    val util = streamTestUtil()
+    val table = util.addDataStream[(Long, Int, String)](
+      "T1", 'rowtime.rowtime, 'int, 'string)
+
+    val windowedTable = table
+      .window(Slide over 8.millis every 10.millis on 'rowtime as 'w)
+      .groupBy('w)
+      .select('int.count)
+
+    util.verifyPlan(windowedTable)
+  }
+
+  @Test
+  def testAllEventTimeSessionGroupWindowOverTime(): Unit = {
+    val util = streamTestUtil()
+    val table = util.addDataStream[(Long, Int, String)](
+      "T1", 'rowtime.rowtime, 'int, 'string)
+
+    val windowedTable = table
+      .window(Session withGap 7.millis on 'rowtime as 'w)
+      .groupBy('w)
+      .select('int.count)
+
+    util.verifyPlan(windowedTable)
+  }
+
+  @Test
+  def testTumbleWindowStartEnd(): Unit = {
+    val util = streamTestUtil()
+    val table = util.addDataStream[(Long, Int, String)](
+      "T1", 'long, 'int, 'string, 'rowtime.rowtime)
+
+    val windowedTable = table
+      .window(Tumble over 5.millis on 'rowtime as 'w)
+      .groupBy('w, 'string)
+      .select('string, 'int.count, 'w.start, 'w.end)
+
+    util.verifyPlan(windowedTable)
+  }
+
+  @Test
+  def testSlidingWindowWithUDAF(): Unit = {
+    val util = streamTestUtil()
+    val table = util.addDataStream[(Long, Int, String, Int, Int)](
+      "T1",
+      'long,
+      'int,
+      'string,
+      'int2,
+      'int3,
+      'proctime.proctime)
+
+    val weightAvgFun = new WeightedAvg
+
+    val windowedTable = table
+      .window(Slide over 2.rows every 1.rows on 'proctime as 'w)
+      .groupBy('w, 'int2, 'int3, 'string)
+      .select(weightAvgFun('long, 'int))
+
+    util.verifyPlan(windowedTable)
+  }
+
+  @Test
+  def testSlideWindowStartEnd(): Unit = {
+    val util = streamTestUtil()
+    val table = util.addDataStream[(Long, Int, String)](
+      "T1", 'long, 'int, 'string, 'rowtime.rowtime)
+
+    val windowedTable = table
+      .window(Slide over 10.millis every 5.millis on 'rowtime as 'w)
+      .groupBy('w, 'string)
+      .select('string, 'int.count, 'w.start, 'w.end)
+
+    util.verifyPlan(windowedTable)
+  }
+
+  @Test
+  def testSessionWindowStartWithTwoEnd(): Unit = {
+    val util = streamTestUtil()
+    val table = util.addDataStream[(Long, Int, String)](
+      "T1", 'rowtime.rowtime, 'int, 'string)
+
+    val windowedTable = table
+      .window(Session withGap 3.millis on 'rowtime as 'w)
+      .groupBy('w, 'string)
+      .select('w.end as 'we1, 'string, 'int.count as 'cnt, 'w.start as 'ws, 'w.end as 'we2)
+
+    util.verifyPlan(windowedTable)
+  }
+
+  @Test
+  def testTumbleWindowWithDuplicateAggsAndProps(): Unit = {
+    val util = streamTestUtil()
+    val table = util.addDataStream[(Long, Int, String)](
+      "T1", 'rowtime.rowtime, 'int, 'string)
+
+    val windowedTable = table
+      .window(Tumble over 5.millis on 'rowtime as 'w)
+      .groupBy('w, 'string)
+      .select('string, 'int.sum + 1 as 's1, 'int.sum + 3 as 's2, 'w.start as 'x, 'w.start as 'x2,
+        'w.end as 'x3, 'w.end)
+
+    util.verifyPlan(windowedTable)
+  }
+
+  @Test
+  def testDecomposableAggFunctions(): Unit = {
+    val util = streamTestUtil()
+    val table = util.addDataStream[(Long, Int, String, Long)](
+      "T1", 'rowtime.rowtime, 'a, 'b, 'c)
+
+    val windowedTable = table
+      .window(Tumble over 15.minutes on 'rowtime as 'w)
+      .groupBy('w)
+      .select('c.varPop, 'c.varSamp, 'c.stddevPop, 'c.stddevSamp, 'w.start, 'w.end)
+
+    util.verifyPlan(windowedTable)
+  }
+}
diff --git a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/plan/stream/table/JoinTest.scala b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/plan/stream/table/JoinTest.scala
new file mode 100644
index 0000000..ebbc3a6
--- /dev/null
+++ b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/plan/stream/table/JoinTest.scala
@@ -0,0 +1,263 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.table.plan.stream.table
+
+import org.apache.flink.api.scala._
+import org.apache.flink.table.api.scala._
+import org.apache.flink.table.util.TableTestBase
+
+import org.junit.Test
+
+import java.sql.Timestamp
+
+/**
+  * Tests for both windowed and non-windowed joins.
+  */
+class JoinTest extends TableTestBase {
+
+  // Tests for inner join
+  @Test
+  def testRowTimeWindowInnerJoin(): Unit = {
+    val util = streamTestUtil()
+    val left = util.addDataStream[(Long, Int, String)]("T1", 'a, 'b, 'c, 'lrtime.rowtime)
+
+    val right = util.addDataStream[(Long, Int, String)]("T2",'d, 'e, 'f, 'rrtime.rowtime)
+
+    val resultTable = left.join(right)
+      .where('a === 'd && 'lrtime >= 'rrtime - 5.minutes && 'lrtime < 'rrtime + 3.seconds)
+      .select('a, 'e, 'lrtime)
+
+    util.verifyPlan(resultTable)
+  }
+
+  @Test
+  def testProcTimeWindowInnerJoin(): Unit = {
+    val util = streamTestUtil()
+    val left = util.addDataStream[(Long, Int, String)]("T1", 'a, 'b, 'c, 'lptime.proctime)
+
+    val right = util.addDataStream[(Long, Int, String)]("T2",'d, 'e, 'f, 'rptime.proctime)
+
+    val resultTable = left.join(right)
+      .where('a === 'd && 'lptime >= 'rptime - 1.second && 'lptime < 'rptime)
+      .select('a, 'e, 'lptime)
+
+    util.verifyPlan(resultTable)
+  }
+
+  @Test
+  def testProcTimeWindowInnerJoinWithEquiTimeAttrs(): Unit = {
+    val util = streamTestUtil()
+    val left = util.addDataStream[(Long, Int, String)]("T1",'a, 'b, 'c, 'lptime.proctime)
+
+    val right = util.addDataStream[(Long, Int, String)]("T2",'d, 'e, 'f, 'rptime.proctime)
+
+    val resultTable = left.join(right)
+      .where('a === 'd && 'lptime === 'rptime)
+      .select('a, 'e, 'lptime)
+
+    util.verifyPlan(resultTable)
+  }
+
+  /**
+    * The time indicator can be accessed from non-time predicates now.
+    */
+  @Test
+  def testRowTimeInnerJoinWithTimeAccessed(): Unit = {
+    val util = streamTestUtil()
+    val left = util.addDataStream[(Long, Int, Timestamp)]("T1",'a, 'b, 'c, 'lrtime.rowtime)
+    val right = util.addDataStream[(Long, Int, Timestamp)]("T2",'d, 'e, 'f, 'rrtime.rowtime)
+    val resultTable = left.join(right)
+      .where('a ==='d && 'lrtime >= 'rrtime - 5.minutes && 'lrtime < 'rrtime && 'lrtime > 'f)
+
+    util.verifyPlan(resultTable)
+  }
+
+  // Tests for left outer join
+  @Test
+  def testRowTimeWindowLeftOuterJoin(): Unit = {
+    val util = streamTestUtil()
+    val left = util.addDataStream[(Long, Int, String)]("T1",'a, 'b, 'c, 'lrtime.rowtime)
+    val right = util.addDataStream[(Long, Int, String)]("T2",'d, 'e, 'f, 'rrtime.rowtime)
+
+    val resultTable = left
+      .leftOuterJoin(
+        right,
+        'a === 'd && 'lrtime >= 'rrtime - 5.minutes && 'lrtime < 'rrtime + 3.seconds)
+      .select('a, 'e, 'lrtime)
+
+    util.verifyPlan(resultTable)
+  }
+
+  @Test
+  def testProcTimeWindowLeftOuterJoin(): Unit = {
+    val util = streamTestUtil()
+    val left = util.addDataStream[(Long, Int, String)]("T1",'a, 'b, 'c, 'lptime.proctime)
+
+    val right = util.addDataStream[(Long, Int, String)]("T2",'d, 'e, 'f, 'rptime.proctime)
+
+    val resultTable = left
+      .leftOuterJoin(right, 'a === 'd && 'lptime >= 'rptime - 1.second && 'lptime < 'rptime)
+      .select('a, 'e, 'lptime)
+
+    util.verifyPlan(resultTable)
+  }
+
+  // Tests for right outer join
+  @Test
+  def testRowTimeWindowRightOuterJoin(): Unit = {
+    val util = streamTestUtil()
+    val left = util.addDataStream[(Long, Int, String)]("T1",'a, 'b, 'c, 'lrtime.rowtime)
+
+    val right = util.addDataStream[(Long, Int, String)]("T2",'d, 'e, 'f, 'rrtime.rowtime)
+
+    val resultTable = left
+      .rightOuterJoin(
+        right,
+        'a === 'd && 'lrtime >= 'rrtime - 5.minutes && 'lrtime < 'rrtime + 3.seconds)
+      .select('a, 'e, 'lrtime)
+
+    util.verifyPlan(resultTable)
+  }
+
+  @Test
+  def testProcTimeWindowRightOuterJoin(): Unit = {
+    val util = streamTestUtil()
+    val left = util.addDataStream[(Long, Int, String)]("T1",'a, 'b, 'c, 'lptime.proctime)
+
+    val right = util.addDataStream[(Long, Int, String)]("T2",'d, 'e, 'f, 'rptime.proctime)
+
+    val resultTable = left
+      .rightOuterJoin(right, 'a === 'd && 'lptime >= 'rptime - 1.second && 'lptime < 'rptime)
+      .select('a, 'e, 'lptime)
+
+    util.verifyPlan(resultTable)
+  }
+
+  // Tests for full outer join
+  @Test
+  def testRowTimeWindowFullOuterJoin(): Unit = {
+    val util = streamTestUtil()
+    val left = util.addDataStream[(Long, Int, String)]("T1",'a, 'b, 'c, 'lrtime.rowtime)
+
+    val right = util.addDataStream[(Long, Int, String)]("T2",'d, 'e, 'f, 'rrtime.rowtime)
+
+    val resultTable = left
+      .fullOuterJoin(
+        right,
+        'a === 'd && 'lrtime >= 'rrtime - 5.minutes && 'lrtime < 'rrtime + 3.seconds)
+      .select('a, 'e, 'lrtime)
+
+    util.verifyPlan(resultTable)
+  }
+
+  @Test
+  def testProcTimeWindowFullOuterJoin(): Unit = {
+    val util = streamTestUtil()
+    val left = util.addDataStream[(Long, Int, String)]("T1",'a, 'b, 'c, 'lptime.proctime)
+
+    val right = util.addDataStream[(Long, Int, String)]("T2", 'd, 'e, 'f, 'rptime.proctime)
+
+    val resultTable = left
+      .fullOuterJoin(right, 'a === 'd && 'lptime >= 'rptime - 1.second && 'lptime < 'rptime)
+      .select('a, 'e, 'lptime)
+
+    util.verifyPlan(resultTable)
+  }
+
+  // Test for outer join optimization
+  @Test
+  def testRowTimeWindowOuterJoinOpt(): Unit = {
+    val util = streamTestUtil()
+    val left = util.addDataStream[(Long, Int, String)]("T1",'a, 'b, 'c, 'lrtime.rowtime)
+
+    val right = util.addDataStream[(Long, Int, String)]("T2", 'd, 'e, 'f, 'rrtime.rowtime)
+
+    val resultTable = left.leftOuterJoin(right)
+      .where('a === 'd && 'lrtime >= 'rrtime - 5.minutes && 'lrtime < 'rrtime + 3.seconds)
+      .select('a, 'e, 'lrtime)
+
+    util.verifyPlan(resultTable)
+  }
+
+  @Test
+  def testLeftOuterJoinEquiPred(): Unit = {
+    val util = streamTestUtil()
+    val t = util.addDataStream[(Int, Long, String)]("T", 'a, 'b, 'c)
+    val s = util.addDataStream[(Long, String, Int)]("S", 'x, 'y, 'z)
+
+    val joined = t.leftOuterJoin(s, 'a === 'z).select('b, 'y)
+
+    util.verifyPlan(joined)
+  }
+
+  @Test
+  def testLeftOuterJoinEquiAndLocalPred(): Unit = {
+    val util = streamTestUtil()
+    val t = util.addDataStream[(Int, Long, String)]("T", 'a, 'b, 'c)
+    val s = util.addDataStream[(Long, String, Int)]("S", 'x, 'y, 'z)
+
+    val joined = t.leftOuterJoin(s, 'a === 'z && 'b < 2).select('b, 'y)
+
+    util.verifyPlan(joined)
+  }
+
+  @Test
+  def testLeftOuterJoinEquiAndNonEquiPred(): Unit = {
+    val util = streamTestUtil()
+    val t = util.addDataStream[(Int, Long, String)]("T", 'a, 'b, 'c)
+    val s = util.addDataStream[(Long, String, Int)]("S", 'x, 'y, 'z)
+
+    val joined = t.leftOuterJoin(s, 'a === 'z && 'b < 'x).select('b, 'y)
+
+    util.verifyPlan(joined)
+  }
+
+  @Test
+  def testRightOuterJoinEquiPred(): Unit = {
+    val util = streamTestUtil()
+    val t = util.addDataStream[(Int, Long, String)]("T", 'a, 'b, 'c)
+    val s = util.addDataStream[(Long, String, Int)]("S", 'x, 'y, 'z)
+
+    val joined = t.rightOuterJoin(s, 'a === 'z).select('b, 'y)
+
+    util.verifyPlan(joined)
+  }
+
+  @Test
+  def testRightOuterJoinEquiAndLocalPred(): Unit = {
+    val util = streamTestUtil()
+    val t = util.addDataStream[(Int, Long, String)]("T", 'a, 'b, 'c)
+    val s = util.addDataStream[(Long, String, Int)]("S", 'x, 'y, 'z)
+
+    val joined = t.rightOuterJoin(s, 'a === 'z && 'x < 2).select('b, 'x)
+
+    util.verifyPlan(joined)
+  }
+
+  @Test
+  def testRightOuterJoinEquiAndNonEquiPred(): Unit = {
+    val util = streamTestUtil()
+    val t = util.addDataStream[(Int, Long, String)]("T", 'a, 'b, 'c)
+    val s = util.addDataStream[(Long, String, Int)]("S", 'x, 'y, 'z)
+
+    val joined = t.rightOuterJoin(s, 'a === 'z && 'b < 'x).select('b, 'y)
+
+    util.verifyPlan(joined)
+  }
+}
diff --git a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/plan/stream/table/OverWindowTest.scala b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/plan/stream/table/OverWindowTest.scala
new file mode 100644
index 0000000..91721ee
--- /dev/null
+++ b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/plan/stream/table/OverWindowTest.scala
@@ -0,0 +1,222 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.flink.table.plan.stream.table
+
+import org.apache.flink.api.scala._
+import org.apache.flink.table.api.scala._
+import org.apache.flink.table.api.{Over, Table}
+import org.apache.flink.table.expressions.utils.Func1
+import org.apache.flink.table.plan.util.JavaUserDefinedAggFunctions.WeightedAvgWithRetract
+import org.apache.flink.table.util.{StreamTableTestUtil, TableTestBase}
+
+import org.junit.Test
+
+class OverWindowTest extends TableTestBase {
+  private val streamUtil: StreamTableTestUtil = streamTestUtil()
+  val table: Table = streamUtil.addDataStream[(Int, String, Long)]("MyTable",
+    'a, 'b, 'c, 'proctime.proctime, 'rowtime.rowtime)
+
+  @Test
+  def testScalarFunctionsOnOverWindow() = {
+    val weightedAvg = new WeightedAvgWithRetract
+    val plusOne = Func1
+
+    val result = table
+      .window(Over partitionBy 'b orderBy 'proctime preceding UNBOUNDED_ROW as 'w)
+      .select(
+        plusOne('a.sum over 'w as 'wsum) as 'd,
+        ('a.count over 'w).exp(),
+        (weightedAvg('c, 'a) over 'w) + 1,
+        "AVG:".toExpr + (weightedAvg('c, 'a) over 'w),
+        array(weightedAvg('c, 'a) over 'w, 'a.count over 'w))
+    streamUtil.verifyPlan(result)
+  }
+
+  @Test
+  def testProcTimeBoundedPartitionedRowsOver() = {
+    val weightedAvg = new WeightedAvgWithRetract
+
+    val result = table
+      .window(Over partitionBy 'b orderBy 'proctime preceding 2.rows following CURRENT_ROW as 'w)
+      .select('c, weightedAvg('c, 'a) over 'w)
+    streamUtil.verifyPlan(result)
+  }
+
+  @Test
+  def testProcTimeBoundedPartitionedRangeOver() = {
+    val weightedAvg = new WeightedAvgWithRetract
+
+    val result = table
+      .window(
+        Over partitionBy 'a orderBy 'proctime preceding 2.hours following CURRENT_RANGE as 'w)
+      .select('a, weightedAvg('c, 'a) over 'w as 'myAvg)
+    streamUtil.verifyPlan(result)
+  }
+
+  @Test
+  def testProcTimeBoundedNonPartitionedRangeOver() = {
+    val result = table
+      .window(Over orderBy 'proctime preceding 10.second as 'w)
+      .select('a, 'c.count over 'w)
+    streamUtil.verifyPlan(result)
+  }
+
+  @Test
+  def testProcTimeBoundedNonPartitionedRowsOver() = {
+    val result = table
+      .window(Over orderBy 'proctime preceding 2.rows as 'w)
+      .select('c, 'a.count over 'w)
+    streamUtil.verifyPlan(result)
+  }
+
+  @Test
+  def testProcTimeUnboundedPartitionedRangeOver() = {
+    val weightedAvg = new WeightedAvgWithRetract
+
+    val result = table
+      .window(Over partitionBy 'c orderBy 'proctime preceding UNBOUNDED_RANGE as 'w)
+      .select('a, 'c, 'a.count over 'w, weightedAvg('c, 'a) over 'w)
+
+    val result2 = table
+      .window(Over partitionBy 'c orderBy 'proctime as 'w)
+      .select('a, 'c, 'a.count over 'w, weightedAvg('c, 'a) over 'w)
+
+    verifyTableEquals(result, result2)
+    streamUtil.verifyPlan(result)
+  }
+
+  @Test
+  def testProcTimeUnboundedPartitionedRowsOver() = {
+    val weightedAvg = new WeightedAvgWithRetract
+
+    val result = table
+      .window(
+        Over partitionBy 'c orderBy 'proctime preceding UNBOUNDED_ROW following CURRENT_ROW as 'w)
+      .select('c, 'a.count over 'w, weightedAvg('c, 'a) over 'w)
+    streamUtil.verifyPlan(result)
+  }
+
+  @Test
+  def testProcTimeUnboundedNonPartitionedRangeOver() = {
+    val result = table
+      .window(
+        Over orderBy 'proctime preceding UNBOUNDED_RANGE as 'w)
+      .select('a, 'c, 'a.count over 'w, 'a.sum over 'w)
+    streamUtil.verifyPlan(result)
+  }
+
+  @Test
+  def testProcTimeUnboundedNonPartitionedRowsOver() = {
+    val result = table
+      .window(Over orderBy 'proctime preceding UNBOUNDED_ROW as 'w)
+      .select('c, 'a.count over 'w)
+
+    streamUtil.verifyPlan(result)
+  }
+
+  @Test
+  def testRowTimeBoundedPartitionedRowsOver() = {
+    val weightedAvg = new WeightedAvgWithRetract
+
+    val result = table
+      .window(
+        Over partitionBy 'b orderBy 'rowtime preceding 2.rows following CURRENT_ROW as 'w)
+      .select('c, 'b.count over 'w, weightedAvg('c, 'a) over 'w as 'wAvg)
+
+    streamUtil.verifyPlan(result)
+  }
+
+  @Test
+  def testRowTimeBoundedPartitionedRangeOver() = {
+    val weightedAvg = new WeightedAvgWithRetract
+
+    val result = table
+      .window(
+        Over partitionBy 'a orderBy 'rowtime preceding 2.hours following CURRENT_RANGE as 'w)
+      .select('a, 'c.avg over 'w, weightedAvg('c, 'a) over 'w as 'wAvg)
+
+    streamUtil.verifyPlan(result)
+  }
+
+  @Test
+  def testRowTimeBoundedNonPartitionedRangeOver() = {
+    val result = table
+      .window(Over orderBy 'rowtime preceding 10.second as 'w)
+      .select('a, 'c.count over 'w)
+
+    streamUtil.verifyPlan(result)
+  }
+
+  @Test
+  def testRowTimeBoundedNonPartitionedRowsOver() = {
+    val result = table
+      .window(Over orderBy 'rowtime preceding 2.rows as 'w)
+      .select('c, 'a.count over 'w)
+
+    streamUtil.verifyPlan(result)
+  }
+
+  @Test
+  def testRowTimeUnboundedPartitionedRangeOver() = {
+    val weightedAvg = new WeightedAvgWithRetract
+
+    val result = table
+      .window(Over partitionBy 'c orderBy 'rowtime preceding UNBOUNDED_RANGE following
+         CURRENT_RANGE as 'w)
+      .select('a, 'c, 'a.count over 'w, weightedAvg('c, 'a) over 'w as 'wAvg)
+
+    val result2 = table
+      .window(Over partitionBy 'c orderBy 'rowtime as 'w)
+      .select('a, 'c, 'a.count over 'w, weightedAvg('c, 'a) over 'w as 'wAvg)
+
+    verifyTableEquals(result, result2)
+
+    streamUtil.verifyPlan(result)
+  }
+
+  @Test
+  def testRowTimeUnboundedPartitionedRowsOver() = {
+    val weightedAvg = new WeightedAvgWithRetract
+
+    val result = table
+      .window(Over partitionBy 'c orderBy 'rowtime preceding UNBOUNDED_ROW following
+         CURRENT_ROW as 'w)
+      .select('c, 'a.count over 'w, weightedAvg('c, 'a) over 'w as 'wAvg)
+    streamUtil.verifyPlan(result)
+  }
+
+  @Test
+  def testRowTimeUnboundedNonPartitionedRangeOver() = {
+    val result = table
+      .window(
+        Over orderBy 'rowtime preceding UNBOUNDED_RANGE as 'w)
+      .select('a, 'c, 'a.count over 'w, 'a.sum over 'w)
+    streamUtil.verifyPlan(result)
+  }
+
+  @Test
+  def testRowTimeUnboundedNonPartitionedRowsOver() = {
+    val result = table
+      .window(Over orderBy 'rowtime preceding UNBOUNDED_ROW as 'w)
+      .select('c, 'a.count over 'w)
+
+    streamUtil.verifyPlan(result)
+  }
+}
+
+
diff --git a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/plan/stream/table/SetOperatorsTest.scala b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/plan/stream/table/SetOperatorsTest.scala
new file mode 100644
index 0000000..587d2f4
--- /dev/null
+++ b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/plan/stream/table/SetOperatorsTest.scala
@@ -0,0 +1,87 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.table.plan.stream.table
+
+import org.apache.flink.api.scala._
+import org.apache.flink.table.api.scala._
+import org.apache.flink.table.util.TableTestBase
+
+import org.junit.Test
+
+class SetOperatorsTest extends TableTestBase {
+
+  @Test
+  def testFilterUnionTranspose(): Unit = {
+    val util = streamTestUtil()
+    val left = util.addTableSource[(Int, Long, String)]("left", 'a, 'b, 'c)
+    val right = util.addTableSource[(Int, Long, String)]("right", 'a, 'b, 'c)
+
+    val result = left.unionAll(right)
+      .where('a > 0)
+      .groupBy('b)
+      .select('a.sum as 'a, 'b as 'b, 'c.count as 'c)
+    util.verifyPlan(result)
+  }
+
+  @Test
+  def testProjectUnionTranspose(): Unit = {
+    val util = streamTestUtil()
+    val left = util.addTableSource[(Int, Long, String)]("left", 'a, 'b, 'c)
+    val right = util.addTableSource[(Int, Long, String)]("right", 'a, 'b, 'c)
+
+    val result = left.select('a, 'b, 'c)
+      .unionAll(right.select('a, 'b, 'c))
+      .select('b, 'c)
+
+    util.verifyPlan(result)
+  }
+
+  @Test
+  def testInUncorrelated(): Unit = {
+    val util = streamTestUtil()
+    val tableA = util.addTableSource[(Int, Long, String)]('a, 'b, 'c)
+    val tableB = util.addTableSource[(Int, String)]('x, 'y)
+
+    val result = tableA.where('a.in(tableB.select('x)))
+    util.verifyPlan(result)
+  }
+
+  @Test
+  def testInUncorrelatedWithConditionAndAgg(): Unit = {
+    val util = streamTestUtil()
+    val tableA = util.addTableSource[(Int, Long, String)]("tableA", 'a, 'b, 'c)
+    val tableB = util.addTableSource[(Int, String)]("tableB", 'x, 'y)
+
+    val result = tableA
+      .where('a.in(tableB.where('y.like("%Hanoi%")).groupBy('y).select('x.sum)))
+    util.verifyPlan(result)
+  }
+
+  @Test
+  def testInWithMultiUncorrelatedCondition(): Unit = {
+    val util = streamTestUtil()
+    val tableA = util.addTableSource[(Int, Long, String)]("tableA", 'a, 'b, 'c)
+    val tableB = util.addTableSource[(Int, String)]("tableB", 'x, 'y)
+    val tableC = util.addTableSource[(Long, Int)]("tableC", 'w, 'z)
+
+    val result = tableA
+      .where('a.in(tableB.select('x)) && 'b.in(tableC.select('w)))
+    util.verifyPlan(result)
+  }
+}
diff --git a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/plan/stream/table/TableSourceTest.scala b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/plan/stream/table/TableSourceTest.scala
new file mode 100644
index 0000000..bf0d1940
--- /dev/null
+++ b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/plan/stream/table/TableSourceTest.scala
@@ -0,0 +1,302 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.table.plan.stream.table
+
+import org.apache.flink.api.common.typeinfo.TypeInformation
+import org.apache.flink.api.java.typeutils.RowTypeInfo
+import org.apache.flink.table.api.scala._
+import org.apache.flink.table.api.{Over, TableSchema, Tumble, Types}
+import org.apache.flink.table.util.{TableTestBase, TestNestedProjectableTableSource, TestProjectableTableSource, TestTableSourceWithTime}
+import org.apache.flink.types.Row
+
+import org.junit.Test
+
+class TableSourceTest extends TableTestBase {
+
+  @Test
+  def testTableSourceWithLongRowTimeField(): Unit = {
+
+    val tableSchema = new TableSchema(
+      Array("id", "rowtime", "val", "name"),
+      Array(Types.INT, Types.SQL_TIMESTAMP, Types.LONG, Types.STRING))
+    val returnType = new RowTypeInfo(
+      Array(Types.INT, Types.LONG, Types.LONG, Types.STRING)
+        .asInstanceOf[Array[TypeInformation[_]]],
+      Array("id", "rowtime", "val", "name"))
+
+    val util = streamTestUtil()
+    util.tableEnv.registerTableSource(
+      "rowTimeT",
+      new TestTableSourceWithTime[Row](
+        false, tableSchema, returnType, Seq(), rowtime = "rowtime"))
+
+    val t = util.tableEnv.scan("rowTimeT").select("rowtime, id, name, val")
+    util.verifyPlan(t)
+  }
+
+  @Test
+  def testTableSourceWithTimestampRowTimeField(): Unit = {
+
+    val tableSchema = new TableSchema(
+      Array("id", "rowtime", "val", "name"),
+      Array(Types.INT, Types.SQL_TIMESTAMP, Types.LONG, Types.STRING))
+    val returnType = new RowTypeInfo(
+      Array(Types.INT, Types.SQL_TIMESTAMP, Types.LONG, Types.STRING)
+        .asInstanceOf[Array[TypeInformation[_]]],
+      Array("id", "rowtime", "val", "name"))
+
+    val util = streamTestUtil()
+    util.tableEnv.registerTableSource(
+      "rowTimeT",
+      new TestTableSourceWithTime[Row](
+        false, tableSchema, returnType, Seq(), rowtime = "rowtime"))
+
+    val t = util.tableEnv.scan("rowTimeT").select("rowtime, id, name, val")
+    util.verifyPlan(t)
+  }
+
+  @Test
+  def testRowTimeTableSourceGroupWindow(): Unit = {
+
+    val tableSchema = new TableSchema(
+      Array("id", "rowtime", "val", "name"),
+      Array(Types.INT, Types.SQL_TIMESTAMP, Types.LONG, Types.STRING))
+    val returnType = new RowTypeInfo(
+      Array(Types.INT, Types.SQL_TIMESTAMP, Types.LONG, Types.STRING)
+        .asInstanceOf[Array[TypeInformation[_]]],
+      Array("id", "rowtime", "val", "name"))
+
+    val util = streamTestUtil()
+    util.tableEnv.registerTableSource(
+      "rowTimeT",
+      new TestTableSourceWithTime[Row](
+        false, tableSchema, returnType, Seq(), rowtime = "rowtime"))
+
+    val t = util.tableEnv.scan("rowTimeT")
+      .filter("val > 100")
+      .window(Tumble over 10.minutes on 'rowtime as 'w)
+      .groupBy('name, 'w)
+      .select('name, 'w.end, 'val.avg)
+    util.verifyPlan(t)
+  }
+
+  @Test
+  def testProcTimeTableSourceSimple(): Unit = {
+
+    val tableSchema = new TableSchema(
+      Array("id", "proctime", "val", "name"),
+      Array(Types.INT, Types.SQL_TIMESTAMP, Types.LONG, Types.STRING))
+    val returnType = new RowTypeInfo(
+      Array(Types.INT, Types.LONG, Types.STRING).asInstanceOf[Array[TypeInformation[_]]],
+      Array("id", "val", "name"))
+
+    val util = streamTestUtil()
+    util.tableEnv.registerTableSource(
+      "procTimeT",
+      new TestTableSourceWithTime[Row](
+        false, tableSchema, returnType, Seq(), proctime = "proctime"))
+
+    val t = util.tableEnv.scan("procTimeT").select("proctime, id, name, val")
+    util.verifyPlan(t)
+  }
+
+  @Test
+  def testProcTimeTableSourceOverWindow(): Unit = {
+
+    val tableSchema = new TableSchema(
+      Array("id", "proctime", "val", "name"),
+      Array(Types.INT, Types.SQL_TIMESTAMP, Types.LONG, Types.STRING))
+    val returnType = new RowTypeInfo(
+      Array(Types.INT, Types.LONG, Types.STRING).asInstanceOf[Array[TypeInformation[_]]],
+      Array("id", "val", "name"))
+
+    val util = streamTestUtil()
+    util.tableEnv.registerTableSource(
+      "procTimeT",
+      new TestTableSourceWithTime[Row](
+        false, tableSchema, returnType, Seq(), proctime = "proctime"))
+
+    val t = util.tableEnv.scan("procTimeT")
+      .window(Over partitionBy 'id orderBy 'proctime preceding 2.hours as 'w)
+      .select('id, 'name, 'val.sum over 'w as 'valSum)
+      .filter('valSum > 100)
+    util.verifyPlan(t)
+  }
+
+  @Test
+  def testProjectWithRowtimeProctime(): Unit = {
+    val tableSchema = new TableSchema(
+      Array("id", "rtime", "val", "ptime", "name"),
+      Array(Types.INT, Types.SQL_TIMESTAMP, Types.LONG, Types.SQL_TIMESTAMP, Types.STRING))
+    val returnType = new RowTypeInfo(
+      Array(Types.INT, Types.STRING, Types.LONG, Types.LONG)
+        .asInstanceOf[Array[TypeInformation[_]]],
+      Array("id", "name", "val", "rtime"))
+
+    val util = streamTestUtil()
+    util.tableEnv.registerTableSource(
+      "T",
+      new TestProjectableTableSource(
+        false, tableSchema, returnType, Seq(), "rtime", "ptime"))
+
+    val t = util.tableEnv.scan("T").select('name, 'val, 'id)
+    util.verifyPlan(t)
+  }
+
+  @Test
+  def testProjectWithoutRowtime(): Unit = {
+    val tableSchema = new TableSchema(
+      Array("id", "rtime", "val", "ptime", "name"),
+      Array(Types.INT, Types.SQL_TIMESTAMP, Types.LONG, Types.SQL_TIMESTAMP, Types.STRING))
+    val returnType = new RowTypeInfo(
+      Array(Types.INT, Types.STRING, Types.LONG, Types.LONG)
+        .asInstanceOf[Array[TypeInformation[_]]],
+      Array("id", "name", "val", "rtime"))
+
+    val util = streamTestUtil()
+    util.tableEnv.registerTableSource(
+      "T",
+      new TestProjectableTableSource(
+        false, tableSchema, returnType, Seq(), "rtime", "ptime"))
+
+    val t = util.tableEnv.scan("T").select('ptime, 'name, 'val, 'id)
+    util.verifyPlan(t)
+  }
+
+  def testProjectWithoutProctime(): Unit = {
+    val tableSchema = new TableSchema(
+      Array("id", "rtime", "val", "ptime", "name"),
+      Array(Types.INT, Types.SQL_TIMESTAMP, Types.LONG, Types.SQL_TIMESTAMP, Types.STRING))
+    val returnType = new RowTypeInfo(
+      Array(Types.INT, Types.LONG, Types.LONG, Types.STRING)
+        .asInstanceOf[Array[TypeInformation[_]]],
+      Array("id", "rtime", "val", "name"))
+
+    val util = streamTestUtil()
+    util.tableEnv.registerTableSource(
+      "T",
+      new TestProjectableTableSource(
+        false, tableSchema, returnType, Seq(), "rtime", "ptime"))
+
+    val t = util.tableEnv.scan("T").select('name, 'val, 'rtime, 'id)
+    util.verifyPlan(t)
+  }
+
+  def testProjectOnlyProctime(): Unit = {
+    val tableSchema = new TableSchema(
+      Array("id", "rtime", "val", "ptime", "name"),
+      Array(Types.INT, Types.SQL_TIMESTAMP, Types.LONG, Types.SQL_TIMESTAMP, Types.STRING))
+    val returnType = new RowTypeInfo(
+      Array(Types.INT, Types.LONG, Types.LONG, Types.STRING)
+        .asInstanceOf[Array[TypeInformation[_]]],
+      Array("id", "rtime", "val", "name"))
+
+    val util = streamTestUtil()
+    util.tableEnv.registerTableSource(
+      "T",
+      new TestProjectableTableSource(
+        false, tableSchema, returnType, Seq(), "rtime", "ptime"))
+
+    val t = util.tableEnv.scan("T").select('ptime)
+    util.verifyPlan(t)
+  }
+
+  def testProjectOnlyRowtime(): Unit = {
+    val tableSchema = new TableSchema(
+      Array("id", "rtime", "val", "ptime", "name"),
+      Array(Types.INT, Types.SQL_TIMESTAMP, Types.LONG, Types.SQL_TIMESTAMP, Types.STRING))
+    val returnType = new RowTypeInfo(
+      Array(Types.INT, Types.LONG, Types.LONG, Types.STRING)
+        .asInstanceOf[Array[TypeInformation[_]]],
+      Array("id", "rtime", "val", "name"))
+
+    val util = streamTestUtil()
+    util.tableEnv.registerTableSource(
+      "T",
+      new TestProjectableTableSource(
+        false, tableSchema, returnType, Seq(), "rtime", "ptime"))
+
+    val t = util.tableEnv.scan("T").select('rtime)
+    util.verifyPlan(t)
+  }
+
+  @Test
+  def testProjectWithMapping(): Unit = {
+    val tableSchema = new TableSchema(
+      Array("id", "rtime", "val", "ptime", "name"),
+      Array(Types.INT, Types.SQL_TIMESTAMP, Types.LONG, Types.SQL_TIMESTAMP, Types.STRING))
+    val returnType = new RowTypeInfo(
+      Array(Types.LONG, Types.INT, Types.STRING, Types.LONG)
+        .asInstanceOf[Array[TypeInformation[_]]],
+      Array("p-rtime", "p-id", "p-name", "p-val"))
+    val mapping = Map("rtime" -> "p-rtime", "id" -> "p-id", "val" -> "p-val", "name" -> "p-name")
+
+    val util = streamTestUtil()
+    util.tableEnv.registerTableSource(
+      "T",
+      new TestProjectableTableSource(
+        false, tableSchema, returnType, Seq(), "rtime", "ptime", mapping))
+
+    val t = util.tableEnv.scan("T").select('name, 'rtime, 'val)
+    util.verifyPlan(t)
+  }
+
+  @Test
+  def testNestedProject(): Unit = {
+
+    val nested1 = new RowTypeInfo(
+      Array(Types.STRING, Types.INT).asInstanceOf[Array[TypeInformation[_]]],
+      Array("name", "value")
+    )
+
+    val nested2 = new RowTypeInfo(
+      Array(Types.INT, Types.BOOLEAN).asInstanceOf[Array[TypeInformation[_]]],
+      Array("num", "flag")
+    )
+
+    val deepNested = new RowTypeInfo(
+      Array(nested1, nested2).asInstanceOf[Array[TypeInformation[_]]],
+      Array("nested1", "nested2")
+    )
+
+    val tableSchema = new TableSchema(
+      Array("id", "deepNested", "nested", "name"),
+      Array(Types.INT, deepNested, nested1, Types.STRING))
+
+    val returnType = new RowTypeInfo(
+      Array(Types.INT, deepNested, nested1, Types.STRING).asInstanceOf[Array[TypeInformation[_]]],
+        Array("id", "deepNested", "nested", "name"))
+
+    val util = streamTestUtil()
+    util.tableEnv.registerTableSource(
+      "T",
+      new TestNestedProjectableTableSource(
+        false, tableSchema, returnType, Seq()))
+
+    val t = util.tableEnv
+      .scan("T")
+      .select('id,
+        'deepNested.get("nested1").get("name") as 'nestedName,
+        'nested.get("value") as 'nestedValue,
+        'deepNested.get("nested2").get("flag") as 'nestedFlag,
+        'deepNested.get("nested2").get("num") as 'nestedNum)
+    util.verifyPlan(t)
+  }
+
+}
diff --git a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/plan/stream/table/TemporalTableJoinTest.scala b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/plan/stream/table/TemporalTableJoinTest.scala
new file mode 100644
index 0000000..385ffb7
--- /dev/null
+++ b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/plan/stream/table/TemporalTableJoinTest.scala
@@ -0,0 +1,191 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.table.plan.stream.table
+
+import org.apache.flink.api.scala._
+import org.apache.flink.table.api.scala._
+import org.apache.flink.table.api.{DataTypes, TableSchema, ValidationException}
+import org.apache.flink.table.expressions.{Expression, FieldReferenceExpression}
+import org.apache.flink.table.functions.{TemporalTableFunction, TemporalTableFunctionImpl}
+import org.apache.flink.table.types.utils.TypeConversions.fromLegacyInfoToDataType
+import org.apache.flink.table.typeutils.TimeIndicatorTypeInfo.{PROCTIME_INDICATOR, ROWTIME_INDICATOR}
+import org.apache.flink.table.util.{TableTestBase, TableTestUtil}
+
+import org.hamcrest.Matchers.{equalTo, startsWith}
+import org.junit.Assert.{assertEquals, assertThat}
+import org.junit.{Ignore, Test}
+
+import java.sql.Timestamp
+
+class TemporalTableJoinTest extends TableTestBase {
+
+  val util: TableTestUtil = streamTestUtil()
+
+  val orders = util.addDataStream[(Long, String, Timestamp)](
+    "Orders", 'o_amount, 'o_currency, 'o_rowtime.rowtime)
+
+  val ratesHistory = util.addDataStream[(String, Int, Timestamp)](
+    "RatesHistory", 'currency, 'rate, 'rowtime.rowtime)
+
+  val rates = ratesHistory.createTemporalTableFunction('rowtime, 'currency)
+  util.addFunction("Rates", rates)
+
+  val proctimeOrders = util.addDataStream[(Long, String)](
+    "ProctimeOrders", 'o_amount, 'o_currency, 'o_proctime.proctime)
+
+  val proctimeRatesHistory = util.addDataStream[(String, Int)](
+    "ProctimeRatesHistory", 'currency, 'rate, 'proctime.proctime)
+
+  val proctimeRates = proctimeRatesHistory.createTemporalTableFunction('proctime, 'currency)
+
+  // TODO
+  @Ignore("Fix bug in LogicalCorrelateToTemporalTableJoinRule")
+  @Test
+  def testSimpleJoin(): Unit = {
+    val result = orders
+      .joinLateral(rates('o_rowtime), 'currency === 'o_currency)
+      .select("o_amount * rate").as("rate")
+
+    util.verifyPlan(result)
+  }
+
+  // TODO
+  @Ignore("Fix bug in LogicalCorrelateToTemporalTableJoinRule")
+  @Test
+  def testSimpleJoin2(): Unit = {
+    val resultJava = orders
+      .joinLateral("Rates(o_rowtime)", "currency = o_currency")
+      .select("o_amount * rate").as("rate")
+
+    util.verifyPlan(resultJava)
+  }
+
+  // TODO
+  @Ignore("Fix bug in LogicalCorrelateToTemporalTableJoinRule")
+  @Test
+  def testSimpleProctimeJoin(): Unit = {
+    val result = proctimeOrders
+      .joinLateral(proctimeRates('o_proctime), 'currency === 'o_currency)
+      .select("o_amount * rate").as("rate")
+
+    util.verifyPlan(result)
+  }
+
+  /**
+    * Test versioned joins with more complicated query.
+    * Important thing here is that we have complex OR join condition
+    * and there are some columns that are not being used (are being pruned).
+    */
+  // TODO
+  @Ignore("Fix bug in LogicalCorrelateToTemporalTableJoinRule")
+  @Test
+  def testComplexJoin(): Unit = {
+    val util = streamTestUtil()
+    val thirdTable = util.addDataStream[(String, Int)]("ThirdTable", 't3_comment, 't3_secondary_key)
+    val orders = util.addDataStream[(Timestamp, String, Long, String, Int)](
+      "Orders", 'rowtime, 'o_comment, 'o_amount, 'o_currency, 'o_secondary_key)
+      .as('o_rowtime, 'o_comment, 'o_amount, 'o_currency, 'o_secondary_key)
+
+    val ratesHistory = util.addDataStream[(Timestamp, String, String, Int, Int)](
+      "RatesHistory", 'rowtime, 'comment, 'currency, 'rate, 'secondary_key)
+    val rates = ratesHistory
+      .filter('rate > 110L)
+      .createTemporalTableFunction('rowtime, 'currency)
+    util.addFunction("Rates", rates)
+
+    val result = orders
+      .joinLateral(rates('o_rowtime))
+      .filter('currency === 'o_currency || 'secondary_key === 'o_secondary_key)
+      .select('o_amount * 'rate, 'secondary_key).as('rate, 'secondary_key)
+      .join(thirdTable, 't3_secondary_key === 'secondary_key)
+
+    util.verifyPlan(result)
+  }
+
+  // TODO
+  @Ignore("Fix bug in LogicalCorrelateToTemporalTableJoinRule")
+  @Test
+  def testTemporalTableFunctionOnTopOfQuery(): Unit = {
+    val filteredRatesHistory = ratesHistory
+      .filter('rate > 100)
+      .select('currency, 'rate * 2, 'rowtime)
+      .as('currency, 'rate, 'rowtime)
+
+    val filteredRates = filteredRatesHistory.createTemporalTableFunction('rowtime, 'currency)
+    util.addFunction("FilteredRates", filteredRates)
+
+    val result = orders
+      .joinLateral(filteredRates('o_rowtime), 'currency === 'o_currency)
+      .select("o_amount * rate")
+      .as('rate)
+
+    util.verifyPlan(result)
+  }
+
+  @Test
+  def testUncorrelatedJoin(): Unit = {
+    expectedException.expect(classOf[ValidationException])
+    expectedException.expectMessage(startsWith("Unsupported argument"))
+
+    val result = orders
+      .joinLateral(rates(
+        java.sql.Timestamp.valueOf("2016-06-27 10:10:42.123")),
+        'o_currency === 'currency)
+      .select("o_amount * rate")
+
+    util.verifyPlan(result)
+  }
+
+  @Test
+  def testProcessingTimeIndicatorVersion(): Unit = {
+    assertRatesFunction(proctimeRatesHistory.getSchema, proctimeRates, true)
+  }
+
+  @Test
+  def testValidStringFieldReference(): Unit = {
+    val rates = ratesHistory.createTemporalTableFunction("rowtime", "currency")
+    assertRatesFunction(ratesHistory.getSchema, rates)
+  }
+
+  private def assertRatesFunction(
+      expectedSchema: TableSchema,
+      inputRates: TemporalTableFunction,
+      proctime: Boolean = false): Unit = {
+    val rates = inputRates.asInstanceOf[TemporalTableFunctionImpl]
+    assertThat(rates.getPrimaryKey,
+      equalTo[Expression](new FieldReferenceExpression("currency", DataTypes.STRING(), 0, 0)))
+
+    val (timeFieldName, timeFieldType) =
+      if (proctime) {
+        ("proctime", fromLegacyInfoToDataType(PROCTIME_INDICATOR))
+      }
+      else {
+        ("rowtime", fromLegacyInfoToDataType(ROWTIME_INDICATOR))
+      }
+
+    assertThat(rates.getTimeAttribute,
+      equalTo[Expression](new FieldReferenceExpression(timeFieldName, timeFieldType, 0, 2)))
+
+    assertEquals(
+      expectedSchema.toRowType,
+      rates.getResultType)
+  }
+
+}
+
diff --git a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/plan/stream/table/TwoStageAggregateTest.scala b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/plan/stream/table/TwoStageAggregateTest.scala
new file mode 100644
index 0000000..902387f
--- /dev/null
+++ b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/plan/stream/table/TwoStageAggregateTest.scala
@@ -0,0 +1,111 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.table.plan.stream.table
+
+import org.apache.flink.api.common.time.Time
+import org.apache.flink.api.scala._
+import org.apache.flink.table.api.scala._
+import org.apache.flink.table.api.{DataTypes, ExecutionConfigOptions, OptimizerConfigOptions}
+import org.apache.flink.table.util.{AggregatePhaseStrategy, StreamTableTestUtil, TableTestBase}
+
+import org.junit.{Before, Test}
+
+class TwoStageAggregateTest extends TableTestBase {
+
+  private var util: StreamTableTestUtil = _
+  @Before
+  def before(): Unit = {
+    util = streamTestUtil()
+    util.tableEnv.getConfig
+      .setIdleStateRetentionTime(Time.hours(1), Time.hours(2))
+    util.tableEnv.getConfig.getConfiguration
+      .setString(ExecutionConfigOptions.SQL_EXEC_MINIBATCH_ALLOW_LATENCY, "1 s")
+    util.tableEnv.getConfig.getConfiguration
+        .setBoolean(ExecutionConfigOptions.SQL_EXEC_MINIBATCH_ENABLED, true)
+    util.tableEnv.getConfig.getConfiguration
+      .setLong(ExecutionConfigOptions.SQL_EXEC_MINIBATCH_SIZE, 3)
+    util.tableEnv.getConfig.getConfiguration.setString(
+      OptimizerConfigOptions.SQL_OPTIMIZER_AGG_PHASE_STRATEGY,
+      AggregatePhaseStrategy.TWO_PHASE.toString)
+  }
+
+  @Test
+  def testGroupAggregate(): Unit = {
+    val table = util.addTableSource[(Long, Int, String)]('a, 'b, 'c)
+    val resultTable = table
+      .groupBy('b)
+      .select('a.count)
+
+    util.verifyPlan(resultTable)
+  }
+
+  @Test
+  def testGroupAggregateWithConstant1(): Unit = {
+    val table = util.addTableSource[(Long, Int, String)]('a, 'b, 'c)
+    val resultTable = table
+      .select('a, 4 as 'four, 'b)
+      .groupBy('four, 'a)
+      .select('four, 'b.sum)
+
+    util.verifyPlan(resultTable)
+  }
+
+  @Test
+  def testGroupAggregateWithConstant2(): Unit = {
+    val table = util.addTableSource[(Long, Int, String)]('a, 'b, 'c)
+    val resultTable = table
+      .select('b, 4 as 'four, 'a)
+      .groupBy('b, 'four)
+      .select('four, 'a.sum)
+
+    util.verifyPlan(resultTable)
+  }
+
+  @Test
+  def testGroupAggregateWithExpressionInSelect(): Unit = {
+    val table = util.addTableSource[(Long, Int, String)]('a, 'b, 'c)
+    val resultTable = table
+      .select('a as 'a, 'b % 3 as 'd, 'c as 'c)
+      .groupBy('d)
+      .select('c.min, 'a.avg)
+
+    util.verifyPlan(resultTable)
+  }
+
+  @Test
+  def testGroupAggregateWithFilter(): Unit = {
+    val table = util.addTableSource[(Long, Int, String)]('a, 'b, 'c)
+    val resultTable = table
+      .groupBy('b)
+      .select('b, 'a.sum)
+      .where('b === 2)
+
+    util.verifyPlan(resultTable)
+  }
+
+  @Test
+  def testGroupAggregateWithAverage(): Unit = {
+    val table = util.addTableSource[(Long, Int, String)]('a, 'b, 'c)
+    val resultTable = table
+      .groupBy('b)
+      .select('b, 'a.cast(DataTypes.DOUBLE()).avg)
+
+    util.verifyPlan(resultTable)
+  }
+}
diff --git a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/plan/stream/table/stringexpr/AggregateStringExpressionTest.scala b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/plan/stream/table/stringexpr/AggregateStringExpressionTest.scala
new file mode 100644
index 0000000..5d0e8fb
--- /dev/null
+++ b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/plan/stream/table/stringexpr/AggregateStringExpressionTest.scala
@@ -0,0 +1,246 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.table.plan.stream.table.stringexpr
+
+import org.apache.flink.api.scala._
+import org.apache.flink.table.api.Tumble
+import org.apache.flink.table.api.scala._
+import org.apache.flink.table.plan.util.JavaUserDefinedAggFunctions.{WeightedAvg, WeightedAvgWithMergeAndReset}
+import org.apache.flink.table.util.{CountAggFunction, CountMinMax, TableTestBase}
+
+import org.junit.Test
+
+class AggregateStringExpressionTest extends TableTestBase {
+
+
+  @Test
+  def testDistinctNonGroupedAggregate(): Unit = {
+    val util = streamTestUtil()
+    val t = util.addTableSource[(Int, Long, String)]("Table3")
+
+    val t1 = t.select('_1.sum.distinct, '_1.count.distinct, '_1.avg.distinct)
+    val t2 = t.select("_1.sum.distinct, _1.count.distinct, _1.avg.distinct")
+    val t3 = t.select("sum.distinct(_1), count.distinct(_1), avg.distinct(_1)")
+
+    verifyTableEquals(t1, t2)
+    verifyTableEquals(t1, t3)
+  }
+
+  @Test
+  def testDistinctGroupedAggregate(): Unit = {
+    val util = streamTestUtil()
+    val t = util.addTableSource[(Int, Long, String)]("Table3", 'a, 'b, 'c)
+
+    val t1 = t.groupBy('b).select('b, 'a.sum.distinct, 'a.sum)
+    val t2 = t.groupBy("b").select("b, a.sum.distinct, a.sum")
+    val t3 = t.groupBy("b").select("b, sum.distinct(a), sum(a)")
+
+    verifyTableEquals(t1, t2)
+    verifyTableEquals(t1, t3)
+  }
+
+  @Test
+  def testDistinctNonGroupAggregateWithUDAGG(): Unit = {
+    val util = streamTestUtil()
+    val t = util.addTableSource[(Int, Long, String)]("Table3", 'a, 'b, 'c)
+
+    val myCnt = new CountAggFunction
+   util.addFunction("myCnt", myCnt)
+    val myWeightedAvg = new WeightedAvgWithMergeAndReset
+   util.addFunction("myWeightedAvg", myWeightedAvg)
+
+    val t1 = t.select(myCnt.distinct('a) as 'aCnt, myWeightedAvg.distinct('b, 'a) as 'wAvg)
+    val t2 = t.select("myCnt.distinct(a) as aCnt, myWeightedAvg.distinct(b, a) as wAvg")
+
+    verifyTableEquals(t1, t2)
+  }
+
+  @Test
+  def testDistinctGroupedAggregateWithUDAGG(): Unit = {
+    val util = streamTestUtil()
+    val t = util.addTableSource[(Int, Long, String)]("Table3", 'a, 'b, 'c)
+
+
+    val myCnt = new CountAggFunction
+   util.addFunction("myCnt", myCnt)
+    val myWeightedAvg = new WeightedAvgWithMergeAndReset
+   util.addFunction("myWeightedAvg", myWeightedAvg)
+
+    val t1 = t.groupBy('b)
+      .select('b,
+        myCnt.distinct('a) + 9 as 'aCnt,
+        myWeightedAvg.distinct('b, 'a) * 2 as 'wAvg,
+        myWeightedAvg.distinct('a, 'a) as 'distAgg,
+        myWeightedAvg('a, 'a) as 'agg)
+    val t2 = t.groupBy("b")
+      .select("b, myCnt.distinct(a) + 9 as aCnt, myWeightedAvg.distinct(b, a) * 2 as wAvg, " +
+        "myWeightedAvg.distinct(a, a) as distAgg, myWeightedAvg(a, a) as agg")
+
+    verifyTableEquals(t1, t2)
+  }
+
+  @Test
+  def testGroupedAggregate(): Unit = {
+    val util = streamTestUtil()
+    val t = util.addTableSource[(Int, Long, String)]('int, 'long, 'string)
+
+    val weightAvgFun = new WeightedAvg
+   util.addFunction("weightAvgFun", weightAvgFun)
+
+    // Expression / Scala API
+    val resScala = t
+      .groupBy('string)
+      .select('int.count as 'cnt, weightAvgFun('long, 'int))
+
+    // String / Java API
+    val resJava = t
+      .groupBy("string")
+      .select("int.count as cnt, weightAvgFun(long, int)")
+
+    verifyTableEquals(resJava, resScala)
+  }
+
+  @Test
+  def testNonGroupedAggregate(): Unit = {
+    val util = streamTestUtil()
+    val t = util.addTableSource[(Int, Long, String)]('int, 'long, 'string)
+
+    // Expression / Scala API
+    val resScala = t.select('int.count as 'cnt, 'long.sum)
+
+    // String / Java API
+    val resJava = t.select("int.count as cnt, long.sum")
+
+    verifyTableEquals(resJava, resScala)
+  }
+
+  @Test
+  def testProctimeRename(): Unit = {
+    val util = streamTestUtil()
+    val t = util.addDataStream[(Int, Long, String)](
+      "T1", 'int, 'long, 'string, 'proctime.proctime)
+
+    // Expression / Scala API
+    val resScala = t
+      .window(Tumble over 50.milli on 'proctime as 'w1)
+      .groupBy('w1, 'string)
+      .select('w1.proctime as 'proctime, 'w1.start as 'start, 'w1.end as 'end, 'string, 'int.count)
+
+    // String / Java API
+    val resJava = t
+      .window(Tumble.over("50.milli").on("proctime").as("w1"))
+      .groupBy("w1, string")
+      .select("w1.proctime as proctime, w1.start as start, w1.end as end, string, int.count")
+
+    verifyTableEquals(resJava, resScala)
+  }
+
+  @Test
+  def testRowtimeRename(): Unit = {
+    val util = streamTestUtil()
+    val t = util.addDataStream[TestPojo](
+      "T1",'int, 'rowtime.rowtime, 'string)
+
+    // Expression / Scala API
+    val resScala = t
+      .window(Tumble over 50.milli on 'rowtime as 'w1)
+      .groupBy('w1, 'string)
+      .select('w1.rowtime as 'rowtime, 'string, 'int.count)
+
+    // String / Java API
+    val resJava = t
+      .window(Tumble.over("50.milli").on("rowtime").as("w1"))
+      .groupBy("w1, string")
+      .select("w1.rowtime as rowtime, string, int.count")
+
+    verifyTableEquals(resJava, resScala)
+  }
+
+  def testNonGroupedRowBasedAggregate(): Unit = {
+    val util = streamTestUtil()
+    val t = util.addTableSource[(Int, Long, String)]('a, 'b, 'c)
+
+    val testAgg = new CountMinMax
+   util.addFunction("testAgg", testAgg)
+
+    // Expression / Scala API
+    val resScala = t
+      .aggregate(testAgg('a))
+      .select('f0, 'f1)
+
+    // String / Java API
+    val resJava = t
+      .aggregate("testAgg(a)")
+      .select("f0, f1")
+
+    verifyTableEquals(resScala, resJava)
+  }
+
+  @Test
+  def testGroupedRowBasedAggregate(): Unit = {
+    val util = streamTestUtil()
+    val t = util.addTableSource[(Int, Long, String)]('a, 'b, 'c)
+
+    val testAgg = new CountMinMax
+   util.addFunction("testAgg", testAgg)
+
+    // Expression / Scala API
+    val resScala = t
+      .groupBy('b)
+      .aggregate(testAgg('a))
+      .select('b, 'f0, 'f1)
+
+    // String / Java API
+    val resJava = t
+      .groupBy("b")
+      .aggregate("testAgg(a)")
+      .select("b, f0, f1")
+
+    verifyTableEquals(resScala, resJava)
+  }
+
+  @Test
+  def testAggregateWithAlias(): Unit = {
+    val util = streamTestUtil()
+    val t = util.addTableSource[(Int, Long, String)]('a, 'b, 'c)
+
+    val testAgg = new CountMinMax
+   util.addFunction("testAgg", testAgg)
+
+    // Expression / Scala API
+    val resScala = t
+      .groupBy('b)
+      .aggregate(testAgg('a) as ('x, 'y, 'z))
+      .select('b, 'x, 'y)
+
+    // String / Java API
+    val resJava = t
+      .groupBy("b")
+      .aggregate("testAgg(a) as (x, y, z)")
+      .select("b, x, y")
+
+    verifyTableEquals(resScala, resJava)
+  }
+}
+
+class TestPojo() {
+  var int: Int = _
+  var long: Long = _
+  var string: String = _
+}
diff --git a/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/plan/stream/table/stringexpr/CalcStringExpressionTest.scala b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/plan/stream/table/stringexpr/CalcStringExpressionTest.scala
new file mode 100644
index 0000000..755ba5b
--- /dev/null
+++ b/flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/plan/stream/table/stringexpr/CalcStringExpressionTest.scala
@@ -0,0 +1,183 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.table.plan.stream.table.stringexpr
+
+import org.apache.flink.api.scala._
+import org.apache.flink.table.api.scala._
+import org.apache.flink.table.expressions.utils.Func23
+import org.apache.flink.table.util.TableTestBase
+
+import org.junit.Test
+
+class CalcStringExpressionTest extends TableTestBase {
+
+  @Test
+  def testSimpleSelect(): Unit = {
+    val util = streamTestUtil()
+    val t = util.addTableSource[(Int, Long, String)]()
+
+    val resScala = t.select('_1, '_2)
+    val resJava = t.select("_1, _2")
+    verifyTableEquals(resJava, resScala)
+  }
+
+  @Test
+  def testSelectStar(): Unit = {
+    val util = streamTestUtil()
+    val t = util.addTableSource[(Int, Long, String)]('int, 'long, 'string)
+
+    val resScala = t.select('*)
+    val resJava = t.select("*")
+    verifyTableEquals(resJava, resScala)
+  }
+
+  @Test
+  def testSelectWithWhere(): Unit = {
+    val util = streamTestUtil()
+    val t = util.addTableSource[(Int, Long, String)]('int, 'long, 'string)
+    val resScala = t.where('string === "true").select('int)
+    val resJava = t.where("string === 'true'").select("int")
+    verifyTableEquals(resJava, resScala)
+  }
+
+  @Test
+  def testSimpleSelectWithNaming(): Unit = {
+    val util = streamTestUtil()
+    val t = util.addTableSource[(Int, Long, String)]('int, 'long, 'string)
+
+    val resScala = t.select('int, 'string)
+    val resJava = t.select("int, string")
+    verifyTableEquals(resJava, resScala)
+  }
+
+  @Test
+  def testSimpleSelectWithAlias(): Unit = {
+    val util = streamTestUtil()
+    val t = util.addTableSource[(Int, Long, String)]('int, 'long, 'string)
+    val resScala = t.select('int as 'myInt, 'string as 'myString)
+    val resJava = t.select("int as myInt, string as myString")
+    verifyTableEquals(resJava, resScala)
+  }
+
+  @Test
+  def testSimpleFilter(): Unit = {
+    val util = streamTestUtil()
+    val t = util.addTableSource[(Int, Long, String)]('int, 'long, 'string)
+
+    val resScala = t.filter('int === 3).select('int as 'myInt, 'string)
+    val resJava = t.filter("int === 3").select("int as myInt, string")
+    verifyTableEquals(resJava, resScala)
+  }
+
+  @Test
+  def testAllRejectingFilter(): Unit = {
+    val util = streamTestUtil()
+    val t = util.addTableSource[(Int, Long, String)]('int, 'long, 'string)
+
+    val resScala = t.filter(false).select('int as 'myInt, 'string)
+    val resJava = t.filter("false").select("int as myInt, string")
+    verifyTableEquals(resJava, resScala)
+  }
+
+  @Test
+  def testAllPassingFilter(): Unit = {
+    val util = streamTestUtil()
+    val t = util.addTableSource[(Int, Long, String)]('int, 'long, 'string)
+
+    val resScala = t.filter(true).select('int as 'myInt, 'string)
+    val resJava = t.filter("true").select("int as myInt, string")
+    verifyTableEquals(resJava, resScala)
+  }
+
+  @Test
+  def testNotEqualsFilter(): Unit = {
+    val util = streamTestUtil()
+    val t = util.addTableSource[(Int, Long, String)]('int, 'long, 'string)
+
+    val resScala = t.filter('int !== 2).filter('string.like("%world%")).select('int, 'string)
+    val resJava = t.filter("int !== 2").filter("string.like('%world%')").select("int, string")
+    verifyTableEquals(resJava, resScala)
+  }
+
+  @Test
+  def testFilterWithExpression(): Unit = {
+    val util = streamTestUtil()
+    val t = util.addTableSource[(Int, Long, String)]('int, 'long, 'string)
+
+    val resScala = t.filter('int % 2 === 0).select('int, 'string)
+    val resJava = t.filter("int % 2 === 0").select("int, string")
+    verifyTableEquals(resJava, resScala)
+  }
+
+  @Test
+  def testAddColumns(): Unit = {
+    val util = streamTestUtil()
+    val t = util.addTableSource[(Int, Long, String)]("Table3",'a, 'b, 'c)
... 3771 lines suppressed ...