You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@kylin.apache.org by xx...@apache.org on 2022/12/13 10:25:29 UTC

[kylin] 22/25: Update kylin-it

This is an automated email from the ASF dual-hosted git repository.

xxyu pushed a commit to branch kylin5
in repository https://gitbox.apache.org/repos/asf/kylin.git

commit cc2b7df1a7f4880737d9fa05b163996f0100e183
Author: Tengting Xu <34...@users.noreply.github.com>
AuthorDate: Wed Oct 19 14:55:11 2022 +0800

    Update kylin-it
---
 pom.xml                                            |     2 +-
 .../data/XXXXXXXXX_XXXXXXXXX.X_XXXXXXXX_XX_XX.csv  |     5 +
 .../XXXXXXXXX_XXXXXXXXX.X_XXXXXXXX_XX_XX.json      |   306 +
 .../org/apache/calcite/test/DiffRepository.java    |   779 ++
 .../java/org/apache/kylin/SparderCoverTest.java    |    94 +
 .../org/apache/kylin/event/HAMetadataTest.java     |   215 +
 .../apache/kylin/event/ITStorageCleanerTest.java   |   134 +
 .../org/apache/kylin/event/ModelSemanticTest.java  |   308 +
 .../org/apache/kylin/event/SchemaChangeTest.java   |   360 +
 .../org/apache/kylin/metadata/MetaUtilsTest.java   |    49 +
 .../apache/kylin/metadata/MetadataPerfTest.java    |   467 +
 .../org/apache/kylin/metadata/SQLConfTest.java     |    60 +
 .../org/apache/kylin/newten/AutoMergeTest.java     |   957 ++
 .../newten/BuildAndQueryEmptySegmentsTest.java     |   183 +
 .../apache/kylin/newten/CalciteDynamicTest.java    |    92 +
 .../org/apache/kylin/newten/CharNColumnTest.java   |    74 +
 .../apache/kylin/newten/ExtractLimitInfoTest.java  |   126 +
 .../kylin/newten/IndexDependencyParserTest.java    |   172 +
 .../kylin/newten/MultiPartitionPruningTest.java    |   545 +
 .../org/apache/kylin/newten/NAggPushDownTest.java  |   103 +
 .../kylin/newten/NBadQueryAndPushDownTest.java     |   269 +
 .../newten/NBitmapFunctionForCalciteExecTest.java  |    94 +
 .../apache/kylin/newten/NBitmapFunctionTest.java   |   369 +
 .../kylin/newten/NBuildAndQuerySnapshotTest.java   |   147 +
 .../apache/kylin/newten/NComputedColumnTest.java   |   116 +
 .../newten/NCountDistinctWithoutEncodeTest.java    |   114 +
 .../org/apache/kylin/newten/NFilePruningTest.java  |   679 ++
 .../apache/kylin/newten/NFilePruningV2Test.java    |   456 +
 .../newten/NFlattableJoinWithoutLookupTest.java    |   115 +
 .../java/org/apache/kylin/newten/NJoinOptTest.java |   210 +
 .../newten/NManualBuildAndQueryCuboidTest.java     |   233 +
 .../kylin/newten/NManualBuildAndQueryTest.java     |   285 +
 .../org/apache/kylin/newten/NMatchingTest.java     |   100 +
 .../kylin/newten/NMultiPartitionJobTest.java       |   207 +
 .../kylin/newten/NMultipleColumnsInTest.java       |   110 +
 .../kylin/newten/NOptIntersectCountTest.java       |   201 +
 .../apache/kylin/newten/NPartitionColumnTest.java  |   123 +
 .../kylin/newten/NQueryPartialMatchIndexTest.java  |   118 +
 .../kylin/newten/NSpanningTreeCubingJobTest.java   |   103 +
 .../org/apache/kylin/newten/NTopNResultTest.java   |    86 +
 .../apache/kylin/newten/NTopNWithChineseTest.java  |    81 +
 .../org/apache/kylin/newten/RetentionTest.java     |   305 +
 .../apache/kylin/newten/ReuseFlatTableTest.java    |    97 +
 .../kylin/newten/SimilarToEscapeFunctionTest.java  |   126 +
 .../apache/kylin/newten/SlowQueryDetectorTest.java |   196 +
 .../apache/kylin/newten/SupportTypeAnyTest.java    |    60 +
 .../org/apache/kylin/newten/TableIndexTest.java    |   175 +
 .../org/apache/kylin/newten/TimeZoneQueryTest.java |   274 +
 .../apache/kylin/query/CompareQueryBySuffix.java   |    37 +
 .../org/apache/kylin/query/HackedDbUnitAssert.java |   259 +
 .../kylin/query/ICompareQueryTranslator.java       |    29 +
 .../java/org/apache/kylin/query/KylinTestBase.java |   170 +
 .../java/org/apache/kylin/query/NKapQueryTest.java |   167 +
 .../query/engine/CountDistinctExprPlannerTest.java |    93 +
 .../apache/kylin/query/engine/QueryExecTest.java   |   192 +
 .../kylin/query/engine/SparderInitSQLConfTest.java |   106 +
 .../kylin/query/engine/SqlToRelNodeTest.java       |   118 +
 .../query/engine/SumCastDoublePlannerTest.java     |    76 +
 .../kylin/query/engine/SumExprPlannerTest.java     |   301 +
 .../kylin/query/rules/AggPushdownRuleTest.java     |   158 +
 .../kylin/query/rules/CalciteRuleTestBase.java     |   249 +
 .../kylin/query/rules/FilterSimplifyRuleTest.java  |    69 +
 .../rest/controller/NBuildAndQueryMetricsTest.java |   313 +
 .../rest/controller/NModelControllerTest.java      |   182 +
 .../rest/controller/NProjectControllerTest.java    |   108 +
 .../rest/controller/NQueryControllerTest.java      |   224 +
 .../kylin/rest/controller/NUserControllerTest.java |   157 +
 .../server/AbstractMVCIntegrationTestCase.java     |    88 +
 .../org/apache/kylin/server/IntegrationConfig.java |    31 +
 .../NModelControllerWithRealServiceTest.java       |    55 +
 .../apache/kylin/server/NQueryControllerTest.java  |   190 +
 .../kylin/streaming/StreamingMergeEntryTest.java   |   782 ++
 .../java/org/apache/kylin/util/ExecAndComp.java    |     7 +
 .../org/apache/kylin/util/JobFinishHelper.java     |    50 +
 .../apache/kylin/util/SegmentInitializeUtil.java   |    67 +
 .../query/engine/CountDistinctExprPlannerTest.xml  |   394 +
 .../apache/kylin/query/engine/SqlToRelNodeTest.xml |   150 +
 .../query/engine/SumCastDoublePlannerTest.xml      |   207 +
 .../kylin/query/engine/SumExprPlannerTest.xml      |  1418 +++
 .../kylin/query/rules/AggPushdownRuleTest.xml      |   583 ++
 .../kylin/query/rules/FilterSimplifyRuleTest.xml   |    87 +
 .../src/test/resources/query/h2/query07.sql        |    18 -
 .../src/test/resources/query/h2/query09.sql        |    32 -
 .../src/test/resources/query/h2/query10.sql        |    34 -
 .../query/h2/result-DEFAULT/query07.sql.json       |     1 -
 .../query/h2/result-DEFAULT/query07.sql.schema     |     1 -
 .../query/h2/result-DEFAULT/query09.sql.json       |     1 -
 .../query/h2/result-DEFAULT/query09.sql.schema     |     1 -
 .../query/h2/result-DEFAULT/query10.sql.json       |    13 -
 .../query/h2/result-DEFAULT/query10.sql.schema     |     1 -
 .../test/resources/query/sql-replace/query00.sql   |    25 -
 .../test/resources/query/sql-replace/query01.sql   |    25 -
 .../test/resources/query/sql-replace/query02.sql   |    26 -
 .../test/resources/query/sql-replace/query03.sql   |    26 -
 .../src/test/resources/query/sql/query00.sql       |    25 +-
 .../src/test/resources/query/sql/query01.sql       |    20 +-
 .../src/test/resources/query/sql/query02.sql       |    20 +-
 .../src/test/resources/query/sql/query03.sql       |    22 +-
 .../src/test/resources/query/sql/query04.sql       |    24 +-
 .../src/test/resources/query/sql/query05.sql       |    26 +-
 .../src/test/resources/query/sql/query06.sql       |    28 +-
 .../src/test/resources/query/sql/query07.sql       |    26 +-
 .../src/test/resources/query/sql/query08.sql       |    26 +-
 .../src/test/resources/query/sql/query09.sql       |    22 +-
 .../src/test/resources/query/sql/query10.sql       |    26 +-
 .../src/test/resources/query/sql/query100.sql      |    18 +-
 .../src/test/resources/query/sql/query101.sql      |    22 +-
 .../src/test/resources/query/sql/query102.sql      |    22 +-
 .../src/test/resources/query/sql/query103.sql      |    22 +-
 .../src/test/resources/query/sql/query104.sql      |    18 +-
 .../src/test/resources/query/sql/query105.sql      |    32 +-
 .../src/test/resources/query/sql/query106.sql      |    40 +-
 .../src/test/resources/query/sql/query107.sql      |    40 +-
 .../src/test/resources/query/sql/query108.sql      |    52 +-
 .../src/test/resources/query/sql/query109.sql      |    20 +-
 .../src/test/resources/query/sql/query11.sql       |    30 +-
 .../src/test/resources/query/sql/query110.sql      |    18 +-
 .../src/test/resources/query/sql/query111.sql      |    18 +-
 .../src/test/resources/query/sql/query112.sql      |    18 +-
 .../src/test/resources/query/sql/query113.sql      |    18 +-
 .../src/test/resources/query/sql/query114.sql      |    18 +-
 .../src/test/resources/query/sql/query115.sql      |    18 +-
 .../src/test/resources/query/sql/query116.sql      |    18 +-
 .../src/test/resources/query/sql/query117.sql      |    18 +-
 .../src/test/resources/query/sql/query118.sql      |    18 +-
 .../src/test/resources/query/sql/query119.sql      |    18 +-
 .../src/test/resources/query/sql/query12.sql       |    28 +-
 .../src/test/resources/query/sql/query120.sql      |    18 +-
 .../src/test/resources/query/sql/query121.sql      |    18 +-
 .../src/test/resources/query/sql/query122.sql      |    18 +-
 .../src/test/resources/query/sql/query123.sql      |    18 +-
 .../src/test/resources/query/sql/query13.sql       |    26 +-
 .../src/test/resources/query/sql/query130.sql      |    18 +-
 .../src/test/resources/query/sql/query131.sql      |    18 +-
 .../src/test/resources/query/sql/query132.sql      |    18 +-
 .../src/test/resources/query/sql/query133.sql      |    18 +-
 .../src/test/resources/query/sql/query134.sql      |    17 +-
 .../src/test/resources/query/sql/query135.sql      |    19 +-
 .../src/test/resources/query/sql/query136.sql      |    18 +-
 .../src/test/resources/query/sql/query138.sql      |    18 +-
 .../src/test/resources/query/sql/query139.sql      |    19 +-
 .../src/test/resources/query/sql/query14.sql       |    42 +-
 .../src/test/resources/query/sql/query15.sql       |    34 +-
 .../src/test/resources/query/sql/query16.sql       |    40 +-
 .../src/test/resources/query/sql/query17.sql       |    46 +-
 .../src/test/resources/query/sql/query18.sql       |    38 +-
 .../src/test/resources/query/sql/query19.sql       |    46 +-
 .../src/test/resources/query/sql/query20.sql       |    36 +-
 .../src/test/resources/query/sql/query21.sql       |    50 +-
 .../src/test/resources/query/sql/query22.sql       |    54 +-
 .../src/test/resources/query/sql/query23.sql       |    52 +-
 .../src/test/resources/query/sql/query24.sql       |    70 +-
 .../src/test/resources/query/sql/query25.sql       |    58 +-
 .../src/test/resources/query/sql/query26.sql       |    62 +-
 .../src/test/resources/query/sql/query27.sql       |    60 +-
 .../src/test/resources/query/sql/query28.sql       |    26 +-
 .../src/test/resources/query/sql/query29.sql       |    42 +-
 .../src/test/resources/query/sql/query30.sql       |    20 +-
 .../src/test/resources/query/sql/query31.sql       |    50 +-
 .../src/test/resources/query/sql/query32.sql       |    48 +-
 .../src/test/resources/query/sql/query33.sql       |    56 +-
 .../src/test/resources/query/sql/query34.sql       |    52 +-
 .../src/test/resources/query/sql/query35.sql       |    52 +-
 .../src/test/resources/query/sql/query36.sql       |    38 +-
 .../src/test/resources/query/sql/query37.sql       |    50 +-
 .../src/test/resources/query/sql/query38.sql       |    26 +-
 .../src/test/resources/query/sql/query39.sql       |    22 +-
 .../src/test/resources/query/sql/query40.sql       |    24 +-
 .../src/test/resources/query/sql/query41.sql       |    36 +-
 .../src/test/resources/query/sql/query42.sql       |    24 +-
 .../src/test/resources/query/sql/query43.sql       |    28 +-
 .../src/test/resources/query/sql/query44.sql       |    32 +-
 .../src/test/resources/query/sql/query49.sql       |    46 +-
 .../src/test/resources/query/sql/query50.sql       |    22 +-
 .../src/test/resources/query/sql/query51.sql       |    26 +-
 .../src/test/resources/query/sql/query52.sql       |    24 +-
 .../src/test/resources/query/sql/query53.sql       |    20 +-
 .../src/test/resources/query/sql/query54.sql       |    42 +-
 .../src/test/resources/query/sql/query55.sql       |    24 +-
 .../src/test/resources/query/sql/query56.sql       |    40 +-
 .../src/test/resources/query/sql/query57.sql       |    24 +-
 .../src/test/resources/query/sql/query58.sql       |    18 +-
 .../src/test/resources/query/sql/query59.sql       |    18 +-
 .../src/test/resources/query/sql/query60.sql       |    24 +-
 .../src/test/resources/query/sql/query61.sql       |    24 +-
 .../src/test/resources/query/sql/query62.sql       |    18 +-
 .../src/test/resources/query/sql/query63.sql       |    18 +-
 .../src/test/resources/query/sql/query64.sql       |    18 +-
 .../src/test/resources/query/sql/query65.sql       |    18 +-
 .../src/test/resources/query/sql/query66.sql       |    18 +-
 .../src/test/resources/query/sql/query67.sql       |    18 +-
 .../src/test/resources/query/sql/query68.sql       |    18 +-
 .../src/test/resources/query/sql/query69.sql       |    28 +-
 .../src/test/resources/query/sql/query70.sql       |    26 +-
 .../src/test/resources/query/sql/query71.sql       |    28 +-
 .../src/test/resources/query/sql/query72.sql       |    22 +-
 .../src/test/resources/query/sql/query73.sql       |    18 +-
 .../src/test/resources/query/sql/query74.sql       |    18 +-
 .../src/test/resources/query/sql/query75.sql       |    24 +-
 .../src/test/resources/query/sql/query76.sql       |    22 +-
 .../src/test/resources/query/sql/query77.sql       |    18 +-
 .../src/test/resources/query/sql/query78.sql       |    18 +-
 .../src/test/resources/query/sql/query79.sql       |    18 +-
 .../src/test/resources/query/sql/query80.sql       |    18 +-
 .../src/test/resources/query/sql/query83.sql       |    46 +-
 .../src/test/resources/query/sql/query84.sql       |    46 +-
 .../src/test/resources/query/sql/query85.sql       |    20 +-
 .../src/test/resources/query/sql/query86.sql       |    18 +-
 .../src/test/resources/query/sql/query87.sql       |    22 +-
 .../src/test/resources/query/sql/query88.sql       |    22 +-
 .../src/test/resources/query/sql/query89.sql       |    22 +-
 .../src/test/resources/query/sql/query90.sql       |    28 +-
 .../src/test/resources/query/sql/query91.sql       |    28 +-
 .../src/test/resources/query/sql/query92.sql       |    24 +-
 .../src/test/resources/query/sql/query93.sql       |    24 +-
 .../src/test/resources/query/sql/query94.sql       |    24 +-
 .../src/test/resources/query/sql/query95.sql       |    24 +-
 .../src/test/resources/query/sql/query96.sql       |    18 +-
 .../src/test/resources/query/sql/query97.sql       |    26 +-
 .../src/test/resources/query/sql/query98.sql       |    22 +-
 .../src/test/resources/query/sql/query99.sql       |    36 +-
 .../resources/query/sql_agg_pushdown/query00.sql   |    18 +-
 .../resources/query/sql_agg_pushdown/query01.sql   |    18 +-
 .../src/test/resources/query/sql_array/query00.sql |    18 +-
 .../src/test/resources/query/sql_array/query01.sql |    19 +-
 .../test/resources/query/sql_boolean/query00.sql   |    18 -
 .../test/resources/query/sql_boolean/query01.sql   |    21 -
 .../query/sql_boolean/query01.sql.expected         |     7 -
 .../test/resources/query/sql_boolean/query02.sql   |    20 -
 .../test/resources/query/sql_boolean/query03.sql   |    20 -
 .../src/test/resources/query/sql_cache/query01.sql |    18 +-
 .../src/test/resources/query/sql_cache/query02.sql |    18 +-
 .../src/test/resources/query/sql_cache/query03.sql |    18 +-
 .../src/test/resources/query/sql_cache/query04.sql |    18 +-
 .../src/test/resources/query/sql_cache/query05.sql |    18 +-
 .../src/test/resources/query/sql_cache/query06.sql |    18 +-
 .../src/test/resources/query/sql_cache/query07.sql |    18 +-
 .../src/test/resources/query/sql_cache/query08.sql |    18 +-
 .../src/test/resources/query/sql_cache/query09.sql |    18 +-
 .../test/resources/query/sql_casewhen/query00.sql  |    28 +-
 .../test/resources/query/sql_casewhen/query01.sql  |    32 +-
 .../test/resources/query/sql_casewhen/query02.sql  |    24 +-
 .../test/resources/query/sql_casewhen/query03.sql  |    30 +-
 .../test/resources/query/sql_casewhen/query04.sql  |    18 +-
 .../test/resources/query/sql_casewhen/query05.sql  |    18 +-
 .../test/resources/query/sql_casewhen/query06.sql  |    18 +-
 .../test/resources/query/sql_casewhen/query07.sql  |    18 +-
 .../test/resources/query/sql_casewhen/query08.sql  |    18 +-
 .../test/resources/query/sql_casewhen/query09.sql  |    18 +-
 .../test/resources/query/sql_casewhen/query55.sql  |    18 +-
 .../test/resources/query/sql_casewhen/query56.sql  |    18 +-
 .../resources/query/sql_computedcolumn/query01.sql |    20 +-
 .../resources/query/sql_computedcolumn/query02.sql |    20 +-
 .../resources/query/sql_computedcolumn/query03.sql |    20 +-
 .../resources/query/sql_computedcolumn/query04.sql |    20 +-
 .../query/sql_computedcolumn/sql_ccv2/01.sql       |    18 +-
 .../query/sql_computedcolumn/sql_ccv2/02.sql       |    18 +-
 .../query/sql_computedcolumn/sql_ccv2/03.sql       |    18 +-
 .../query/sql_computedcolumn/sql_ccv2/04.sql       |    18 +-
 .../query/sql_computedcolumn/sql_ccv2/05.sql       |    18 +-
 .../query/sql_computedcolumn/sql_ccv2/06.sql       |    18 +-
 .../query/sql_computedcolumn/sql_ccv2/07.sql       |    18 +-
 .../query/sql_computedcolumn/sql_ccv2/08.sql       |    18 +-
 .../query/sql_computedcolumn/sql_ccv2/09.sql       |    18 +-
 .../query/sql_computedcolumn/sql_ccv2/10.sql       |    18 +-
 .../query/sql_computedcolumn/sql_ccv2/11.sql       |    18 +-
 .../sql_computedcolumn_DateUDF/query00.sql         |    18 +-
 .../sql_computedcolumn_DateUDF/query02.sql         |    18 +-
 .../sql_computedcolumn_DateUDF/query03.sql         |    18 +-
 .../sql_computedcolumn_DateUDF/query04.sql         |    18 +-
 .../sql_computedcolumn_OtherUDF/query00.sql        |    18 +-
 .../sql_computedcolumn_OtherUDF/query01.sql        |    18 +-
 .../sql_computedcolumn_OtherUDF/query02.sql        |    20 +-
 .../sql_computedcolumn_OtherUDF/query03.sql        |    18 +-
 .../sql_computedcolumn_OtherUDF/query04.sql        |    18 +-
 .../sql_computedcolumn_StringUDF/query00.sql       |    18 +-
 .../sql_computedcolumn_StringUDF/query01.sql       |    18 +-
 .../sql_computedcolumn_StringUDF/query02.sql       |    18 +-
 .../sql_computedcolumn_StringUDF/query03.sql       |    18 +-
 .../sql_computedcolumn_StringUDF/query04.sql       |    18 +-
 .../sql_computedcolumn_StringUDF/query05.sql       |    18 +-
 .../sql_computedcolumn_StringUDF/query06.sql       |    18 +-
 .../sql_computedcolumn_common/query01.sql          |    20 +-
 .../sql_computedcolumn_common/query02.sql          |    20 +-
 .../sql_computedcolumn_common/query03.sql          |    20 +-
 .../sql_computedcolumn_formatUDF/query00.sql       |    18 +-
 .../sql_computedcolumn_formatUDF/query01.sql       |    18 +-
 .../sql_computedcolumn_formatUDF/query04.sql       |    18 +-
 .../sql_computedcolumn_formatUDF/query05.sql       |    18 +-
 .../sql_computedcolumn_leftjoin/query01.sql        |    20 +-
 .../sql_computedcolumn_leftjoin/query02.sql        |    20 +-
 .../sql_computedcolumn_leftjoin/query03.sql        |    20 +-
 .../sql_computedcolumn_leftjoin/query04.sql        |    20 +-
 .../sql_computedcolumn_nullHandling/query00.sql    |    18 +-
 .../sql_computedcolumn_nullHandling/query01.sql    |    18 +-
 .../sql_computedcolumn_nullHandling/query02.sql    |    18 +-
 .../sql_computedcolumn_nullHandling/query03.sql    |    18 +-
 .../query01.sql                                    |    18 +-
 .../query02.sql                                    |    18 +-
 .../query03.sql                                    |    18 +-
 .../query04.sql                                    |    18 +-
 .../sql_computedcolumn/sql_expression/query01.sql  |    18 +-
 .../sql_computedcolumn/sql_expression/query02.sql  |    18 +-
 .../sql_computedcolumn/sql_expression/query03.sql  |    18 +-
 .../sql_select_group_same_column/query01.sql       |    18 +-
 .../sql_select_group_same_column/query02.sql       |    18 +-
 .../sql_select_group_same_column/query03.sql       |    18 +-
 .../sql_select_group_same_column/query04.sql       |    18 +-
 .../sql_select_group_same_column/query05.sql       |    18 +-
 .../sql_select_group_same_column/query06.sql       |    18 +-
 .../resources/query/sql_conformance/query01.sql    |    17 -
 .../resources/query/sql_conformance/query02.sql    |    21 -
 .../test/resources/query/sql_constant/query01.sql  |    15 -
 .../query/sql_constant/query01.sql.expected        |     1 -
 .../test/resources/query/sql_constant/query02.sql  |    26 -
 .../sql_constant/result-DEFAULT/query01.sql.json   |     1 -
 .../sql_constant/result-DEFAULT/query01.sql.schema |     1 -
 .../sql_constant/result-DEFAULT/query02.sql.json   |     1 -
 .../sql_constant/result-DEFAULT/query02.sql.schema |     1 -
 .../src/test/resources/query/sql_corr/query01.sql  |    15 -
 .../src/test/resources/query/sql_corr/query02.sql  |    15 -
 .../resources/query/sql_cross_join/query01.sql     |    18 +-
 .../resources/query/sql_cross_join/query02.sql     |    18 +-
 .../resources/query/sql_cross_join/query03.sql     |    20 +-
 .../resources/query/sql_cross_join/query04.sql     |    18 +-
 .../resources/query/sql_cross_join/query05.sql     |    18 +-
 .../resources/query/sql_current_date/query01.sql   |    20 +-
 .../resources/query/sql_current_date/query02.sql   |    20 +-
 .../resources/query/sql_current_date/query03.sql   |    20 +-
 .../resources/query/sql_current_date/query05.sql   |    18 +-
 .../test/resources/query/sql_datetime/query00.sql  |    20 +-
 .../test/resources/query/sql_datetime/query02.sql  |    18 +-
 .../test/resources/query/sql_datetime/query03.sql  |    18 +-
 .../resources/query/sql_day_of_week/query01.sql    |    18 -
 .../resources/query/sql_day_of_week/query02.sql    |    19 -
 .../resources/query/sql_day_of_week/query03.sql    |    19 -
 .../resources/query/sql_day_of_week/query04.sql    |    18 -
 .../resources/query/sql_day_of_week/query05.sql    |    18 -
 .../resources/query/sql_day_of_week/query06.sql.bk |     5 -
 .../resources/query/sql_day_of_week/query07.sql    |    32 -
 .../resources/query/sql_day_of_week/query08.sql    |    32 -
 .../resources/query/sql_day_of_week/query09.sql    |    32 -
 .../resources/query/sql_day_of_week/query10.sql    |    39 -
 .../result-DEFAULT/query01.sql.json                |   731 --
 .../result-DEFAULT/query01.sql.schema              |     1 -
 .../result-DEFAULT/query02.sql.json                |   731 --
 .../result-DEFAULT/query02.sql.schema              |     1 -
 .../result-DEFAULT/query03.sql.json                |   731 --
 .../result-DEFAULT/query03.sql.schema              |     1 -
 .../result-DEFAULT/query04.sql.json                |   731 --
 .../result-DEFAULT/query04.sql.schema              |     1 -
 .../result-DEFAULT/query05.sql.json                |   731 --
 .../result-DEFAULT/query05.sql.schema              |     1 -
 .../result-DEFAULT/query07.sql.json                |  4289 --------
 .../result-DEFAULT/query07.sql.schema              |     1 -
 .../result-DEFAULT/query08.sql.json                |  4289 --------
 .../result-DEFAULT/query08.sql.schema              |     1 -
 .../result-DEFAULT/query09.sql.json                |  4289 --------
 .../result-DEFAULT/query09.sql.schema              |     1 -
 .../result-DEFAULT/query10.sql.json                | 10000 -------------------
 .../result-DEFAULT/query10.sql.schema              |     1 -
 .../test/resources/query/sql_derived/query01.sql   |    64 +-
 .../test/resources/query/sql_derived/query02.sql   |    64 +-
 .../test/resources/query/sql_derived/query03.sql   |    24 +-
 .../test/resources/query/sql_derived/query04.sql   |    22 +-
 .../test/resources/query/sql_derived/query05.sql   |    26 +-
 .../test/resources/query/sql_derived/query06.sql   |    22 +-
 .../test/resources/query/sql_derived/query07.sql   |    26 +-
 .../test/resources/query/sql_derived/query08.sql   |    20 +-
 .../test/resources/query/sql_derived/query09.sql   |    20 +-
 .../test/resources/query/sql_derived/query10.sql   |    56 +-
 .../test/resources/query/sql_derived/query11.sql   |    26 +-
 .../test/resources/query/sql_derived/query12.sql   |    18 +-
 .../test/resources/query/sql_derived/query13.sql   |    18 +-
 .../query/sql_derived_equi_join/query01.sql        |    18 -
 .../query/sql_derived_equi_join/query02.sql        |    20 -
 .../query/sql_derived_equi_join/query03.sql        |    17 -
 .../query/sql_derived_equi_join/query04.sql        |    19 -
 .../query/sql_derived_equi_join/query05.sql        |    17 -
 .../query/sql_derived_equi_join/query06.sql        |    18 -
 .../query/sql_derived_non_equi_join/query01.sql    |    18 -
 .../query/sql_derived_non_equi_join/query02.sql    |    18 -
 .../query/sql_derived_non_equi_join/query03.sql    |    18 -
 .../query/sql_derived_non_equi_join/query04.sql    |    17 -
 .../query/sql_derived_non_equi_join/query05.sql    |    17 -
 .../query/sql_derived_non_equi_join/query06.sql    |    18 -
 .../query/sql_derived_non_equi_join/query07.sql    |    19 -
 .../result-INNER/query02.sql.json                  |     0
 .../result-INNER/query02.sql.schema                |     1 -
 .../result-INNER/query07.sql.json                  |     0
 .../result-INNER/query07.sql.schema                |     1 -
 .../result-LEFT/query02.sql.json                   |  5000 ----------
 .../result-LEFT/query02.sql.schema                 |     1 -
 .../result-LEFT/query07.sql.json                   |     1 -
 .../result-LEFT/query07.sql.schema                 |     1 -
 .../query/sql_dimension_as_measure/query01.sql     |    22 -
 .../query/sql_dimension_as_measure/query02.sql     |    22 -
 .../query/sql_dimension_as_measure/query03.sql     |    21 -
 .../query/sql_dimension_as_measure/query04.sql     |    21 -
 .../test/resources/query/sql_distinct/query00.sql  |    26 +-
 .../test/resources/query/sql_distinct/query01.sql  |    32 +-
 .../test/resources/query/sql_distinct/query02.sql  |    34 +-
 .../test/resources/query/sql_distinct/query03.sql  |    32 +-
 .../test/resources/query/sql_distinct/query04.sql  |    34 +-
 .../test/resources/query/sql_distinct/query05.sql  |    32 +-
 .../test/resources/query/sql_distinct/query06.sql  |    34 +-
 .../test/resources/query/sql_distinct/query07.sql  |    30 +-
 .../test/resources/query/sql_distinct/query08.sql  |    18 +-
 .../sql_distinct/sql_distinct_hllc/query05.sql     |    24 +-
 .../resources/query/sql_distinct_dim/query100.sql  |    18 +-
 .../resources/query/sql_distinct_dim/query101.sql  |    18 +-
 .../resources/query/sql_distinct_dim/query102.sql  |    18 +-
 .../query/sql_distinct_precisely/query00.sql       |    21 -
 .../query/sql_distinct_precisely/query01.sql       |    22 -
 .../query/sql_distinct_precisely/query02.sql       |    23 -
 .../query/sql_distinct_precisely/query03.sql       |    37 -
 .../query/sql_distinct_precisely/query04.sql       |    38 -
 .../query/sql_distinct_precisely/query05.sql       |    19 -
 .../result-LEFT/query00.sql.json                   |  3392 -------
 .../result-LEFT/query00.sql.schema                 |     1 -
 .../result-LEFT/query01.sql.json                   |     1 -
 .../result-LEFT/query01.sql.schema                 |     1 -
 .../result-LEFT/query02.sql.json                   |     1 -
 .../result-LEFT/query02.sql.schema                 |     1 -
 .../result-LEFT/query03.sql.json                   |    13 -
 .../result-LEFT/query03.sql.schema                 |     1 -
 .../result-LEFT/query04.sql.json                   |    13 -
 .../result-LEFT/query04.sql.schema                 |     1 -
 .../result-LEFT/query05.sql.json                   |     1 -
 .../result-LEFT/query05.sql.schema                 |     1 -
 .../test/resources/query/sql_dynamic/query01.dat   |     2 -
 .../test/resources/query/sql_dynamic/query01.sql   |    25 -
 .../query/sql_dynamic/query01.sql.expected         |    28 -
 .../test/resources/query/sql_dynamic/query02.dat   |     2 -
 .../test/resources/query/sql_dynamic/query02.sql   |    27 -
 .../query/sql_dynamic/query02.sql.expected         |    12 -
 .../test/resources/query/sql_dynamic/query03.dat   |     2 -
 .../test/resources/query/sql_dynamic/query03.sql   |    30 -
 .../query/sql_dynamic/query03.sql.expected         |    16 -
 .../test/resources/query/sql_dynamic/query04.dat   |     2 -
 .../test/resources/query/sql_dynamic/query04.sql   |    17 -
 .../query/sql_dynamic/query04.sql.expected         |     2 -
 .../test/resources/query/sql_dynamic/query05.dat   |     2 -
 .../test/resources/query/sql_dynamic/query05.sql   |    26 -
 .../query/sql_dynamic/query05.sql.expected         |    11 -
 .../test/resources/query/sql_dynamic/query06.dat   |     2 -
 .../test/resources/query/sql_dynamic/query06.sql   |    25 -
 .../query/sql_dynamic/query06.sql.expected         |    10 -
 .../test/resources/query/sql_dynamic/query07.dat   |     2 -
 .../test/resources/query/sql_dynamic/query07.sql   |    30 -
 .../query/sql_dynamic/query07.sql.expected         |     8 -
 .../sql_dynamic/result-DEFAULT/query01.sql.json    |    16 -
 .../sql_dynamic/result-DEFAULT/query01.sql.schema  |     1 -
 .../sql_dynamic/result-DEFAULT/query02.sql.json    |     1 -
 .../sql_dynamic/result-DEFAULT/query02.sql.schema  |     1 -
 .../sql_dynamic/result-DEFAULT/query03.sql.json    |    13 -
 .../sql_dynamic/result-DEFAULT/query03.sql.schema  |     1 -
 .../sql_dynamic/result-DEFAULT/query04.sql.json    |     1 -
 .../sql_dynamic/result-DEFAULT/query04.sql.schema  |     1 -
 .../sql_dynamic/result-DEFAULT/query05.sql.json    |    16 -
 .../sql_dynamic/result-DEFAULT/query05.sql.schema  |     1 -
 .../sql_dynamic/result-DEFAULT/query06.sql.json    |    33 -
 .../sql_dynamic/result-DEFAULT/query06.sql.schema  |     1 -
 .../sql_dynamic/result-DEFAULT/query07.sql.json    |     1 -
 .../sql_dynamic/result-DEFAULT/query07.sql.schema  |     1 -
 .../test/resources/query/sql_escaped/query24.sql   |    16 -
 .../test/resources/query/sql_escaped/query25.sql   |    28 -
 .../test/resources/query/sql_except/query01.sql    |    23 -
 .../test/resources/query/sql_except/query02.sql    |    27 -
 .../test/resources/query/sql_except/query03.sql    |    25 -
 .../test/resources/query/sql_except/query04.sql    |    27 -
 .../test/resources/query/sql_except/query05.sql    |    29 -
 .../test/resources/query/sql_except/query06.sql    |    26 -
 .../test/resources/query/sql_except/query07.sql    |    28 -
 .../test/resources/query/sql_except/query08.sql    |    28 -
 .../test/resources/query/sql_except/query09.sql    |    23 -
 .../test/resources/query/sql_except/query10.sql    |    27 -
 .../test/resources/query/sql_except/query11.sql    |    26 -
 .../test/resources/query/sql_except/query12.sql    |    23 -
 .../test/resources/query/sql_except/query13.sql    |    37 -
 .../test/resources/query/sql_except/query14.sql    |    61 -
 .../test/resources/query/sql_except/query15.sql    |    20 -
 .../test/resources/query/sql_except/query16.sql    |    33 -
 .../sql_except/result-DEFAULT/query01.sql.json     |   237 -
 .../sql_except/result-DEFAULT/query01.sql.schema   |     1 -
 .../sql_except/result-DEFAULT/query02.sql.json     |   237 -
 .../sql_except/result-DEFAULT/query02.sql.schema   |     1 -
 .../sql_except/result-DEFAULT/query03.sql.json     |     5 -
 .../sql_except/result-DEFAULT/query03.sql.schema   |     1 -
 .../sql_except/result-DEFAULT/query04.sql.json     |     4 -
 .../sql_except/result-DEFAULT/query04.sql.schema   |     1 -
 .../sql_except/result-DEFAULT/query05.sql.json     |   237 -
 .../sql_except/result-DEFAULT/query05.sql.schema   |     1 -
 .../sql_except/result-DEFAULT/query06.sql.json     |     0
 .../sql_except/result-DEFAULT/query06.sql.schema   |     1 -
 .../sql_except/result-DEFAULT/query07.sql.json     |     4 -
 .../sql_except/result-DEFAULT/query07.sql.schema   |     1 -
 .../sql_except/result-DEFAULT/query08.sql.json     |   858 --
 .../sql_except/result-DEFAULT/query08.sql.schema   |     1 -
 .../sql_except/result-DEFAULT/query09.sql.json     |   237 -
 .../sql_except/result-DEFAULT/query09.sql.schema   |     1 -
 .../sql_except/result-DEFAULT/query10.sql.json     |   237 -
 .../sql_except/result-DEFAULT/query10.sql.schema   |     1 -
 .../sql_except/result-DEFAULT/query11.sql.json     |   997 --
 .../sql_except/result-DEFAULT/query11.sql.schema   |     1 -
 .../sql_except/result-DEFAULT/query12.sql.json     |   997 --
 .../sql_except/result-DEFAULT/query12.sql.schema   |     1 -
 .../sql_except/result-DEFAULT/query13.sql.json     |   954 --
 .../sql_except/result-DEFAULT/query13.sql.schema   |     1 -
 .../sql_except/result-DEFAULT/query14.sql.json     |     8 -
 .../sql_except/result-DEFAULT/query14.sql.schema   |     1 -
 .../sql_except/result-DEFAULT/query15.sql.json     |     0
 .../sql_except/result-DEFAULT/query15.sql.schema   |     1 -
 .../sql_except/result-DEFAULT/query16.sql.json     |     5 -
 .../sql_except/result-DEFAULT/query16.sql.schema   |     1 -
 .../query/sql_extended_column/query00.sql          |    36 -
 .../result-DEFAULT/query00.sql.json                |  4289 --------
 .../result-DEFAULT/query00.sql.schema              |     1 -
 .../query/sql_filter_simplify/query01.sql          |    18 +-
 .../query/sql_filter_simplify/query02.sql          |    18 +-
 .../query/sql_function/oracle_function/query01.sql |    18 +-
 .../test/resources/query/sql_function/query01.sql  |    18 +-
 .../test/resources/query/sql_function/query02.sql  |    18 +-
 .../test/resources/query/sql_function/query03.sql  |    18 +-
 .../test/resources/query/sql_function/query04.sql  |    20 +-
 .../test/resources/query/sql_function/query05.sql  |    20 +-
 .../test/resources/query/sql_function/query06.sql  |    18 +-
 .../test/resources/query/sql_function/query07.sql  |    18 +-
 .../test/resources/query/sql_function/query08.sql  |    18 +-
 .../test/resources/query/sql_function/query09.sql  |    18 +-
 .../test/resources/query/sql_function/query10.sql  |    18 +-
 .../test/resources/query/sql_function/query11.sql  |    18 +-
 .../test/resources/query/sql_function/query12.sql  |    18 +-
 .../test/resources/query/sql_function/query13.sql  |    18 +-
 .../test/resources/query/sql_function/query14.sql  |    18 +-
 .../test/resources/query/sql_function/query15.sql  |    18 +-
 .../test/resources/query/sql_function/query16.sql  |    18 +-
 .../test/resources/query/sql_function/query17.sql  |    18 +-
 .../test/resources/query/sql_function/query18.sql  |    18 +-
 .../test/resources/query/sql_function/query19.sql  |    18 +-
 .../test/resources/query/sql_function/query20.sql  |    18 +-
 .../test/resources/query/sql_function/query21.sql  |    18 +-
 .../test/resources/query/sql_function/query22.sql  |    18 +-
 .../test/resources/query/sql_function/query23.sql  |    18 +-
 .../test/resources/query/sql_function/query24.sql  |    18 +-
 .../test/resources/query/sql_function/query25.sql  |    18 +-
 .../test/resources/query/sql_function/query26.sql  |    18 +-
 .../test/resources/query/sql_function/query27.sql  |    18 +-
 .../test/resources/query/sql_function/query28.sql  |    18 +-
 .../test/resources/query/sql_function/query29.sql  |    18 +-
 .../test/resources/query/sql_function/query30.sql  |    18 +-
 .../test/resources/query/sql_function/query31.sql  |    18 +-
 .../test/resources/query/sql_function/query32.sql  |    18 +-
 .../test/resources/query/sql_function/query33.sql  |    18 +-
 .../test/resources/query/sql_function/query34.sql  |    18 +-
 .../test/resources/query/sql_function/query35.sql  |    18 +-
 .../test/resources/query/sql_function/query36.sql  |    18 +-
 .../test/resources/query/sql_function/query37.sql  |    18 +-
 .../test/resources/query/sql_function/query38.sql  |    18 +-
 .../sql_function/sql_function_DateUDF/query00.sql  |    18 +-
 .../sql_function/sql_function_DateUDF/query02.sql  |    18 +-
 .../sql_function/sql_function_DateUDF/query03.sql  |    18 +-
 .../sql_function/sql_function_DateUDF/query04.sql  |    18 +-
 .../sql_function/sql_function_DateUDF/query05.sql  |    18 +-
 .../sql_function_DateUDF/query05_1.sql             |    18 +-
 .../sql_function_DateUDF/query05_2.sql             |    18 +-
 .../sql_function/sql_function_DateUDF/query06.sql  |    18 +-
 .../sql_function/sql_function_OtherUDF/query00.sql |    18 +-
 .../sql_function/sql_function_OtherUDF/query01.sql |    18 +-
 .../sql_function/sql_function_OtherUDF/query02.sql |    18 +-
 .../sql_function/sql_function_OtherUDF/query03.sql |    18 +-
 .../sql_function/sql_function_OtherUDF/query04.sql |    18 +-
 .../sql_function/sql_function_OtherUDF/query05.sql |    18 +-
 .../constant_query-01.sql                          |    18 +-
 .../sql_function_formatUDF/query00.sql             |    18 +-
 .../sql_function_formatUDF/query01.sql             |    18 +-
 .../sql_function_formatUDF/query02.sql             |    18 +-
 .../sql_function_formatUDF/query03.sql             |    18 +-
 .../sql_function_ifnull_timestamp/query00.sql      |    18 +-
 .../sql_function_ifnull_timestamp/query01.sql      |    18 +-
 .../sql_function_ifnull_timestamp/query02.sql      |    18 +-
 .../sql_function_nullHandling/query00.sql          |    18 +-
 .../sql_function_nullHandling/query01.sql          |    18 +-
 .../sql_function_nullHandling/query02.sql          |    18 +-
 .../sql_function_nullHandling/query03.sql          |    18 +-
 .../sql_function_nullHandling/query04.sql          |    18 +-
 .../sql_function/sql_function_round/query01.sql    |    18 +-
 .../sql_function/sql_function_round/query02.sql    |    18 +-
 .../test/resources/query/sql_grouping/query00.sql  |    18 +-
 .../test/resources/query/sql_grouping/query01.sql  |    18 +-
 .../test/resources/query/sql_grouping/query02.sql  |    18 +-
 .../test/resources/query/sql_grouping/query03.sql  |    18 +-
 .../test/resources/query/sql_grouping/query04.sql  |    18 +-
 .../test/resources/query/sql_grouping/query05.sql  |    18 +-
 .../test/resources/query/sql_grouping/query06.sql  |    18 +-
 .../test/resources/query/sql_grouping/query07.sql  |    18 +-
 .../test/resources/query/sql_grouping/query08.sql  |    18 +-
 .../resources/query/sql_h2_uncapable/query01.sql   |    20 +-
 .../resources/query/sql_h2_uncapable/query02.sql   |    20 +-
 .../resources/query/sql_h2_uncapable/query03.sql   |    18 +-
 .../resources/query/sql_h2_uncapable/query04.sql   |    18 +-
 .../resources/query/sql_h2_uncapable/query06.sql   |    20 +-
 .../resources/query/sql_h2_uncapable/query07.sql   |    20 +-
 .../resources/query/sql_h2_uncapable/query08.sql   |    20 +-
 .../resources/query/sql_h2_uncapable/query09.sql   |    20 +-
 .../resources/query/sql_h2_uncapable/query10.sql   |    18 +-
 .../resources/query/sql_h2_uncapable/query11.sql   |    18 +-
 .../resources/query/sql_h2_uncapable/query12.sql   |    18 +-
 .../resources/query/sql_h2_uncapable/query13.sql   |    18 +-
 .../resources/query/sql_h2_uncapable/query14.sql   |    18 +-
 .../resources/query/sql_h2_uncapable/query15.sql   |    18 +-
 .../resources/query/sql_h2_uncapable/query16.sql   |    18 +-
 .../src/test/resources/query/sql_hive/query01.sql  |    22 +-
 .../src/test/resources/query/sql_hive/query10.sql  |    28 +-
 .../src/test/resources/query/sql_in/query01.sql    |    18 +-
 .../src/test/resources/query/sql_in/query02.sql    |    18 +-
 .../resources/query/sql_inner_column/query01.sql   |    36 -
 .../resources/query/sql_inner_column/query02.sql   |    40 -
 .../resources/query/sql_inner_column/query03.sql   |    18 -
 .../result-DEFAULT/query01.sql.json                |     2 -
 .../result-DEFAULT/query01.sql.schema              |     1 -
 .../result-DEFAULT/query02.sql.json                |   731 --
 .../result-DEFAULT/query02.sql.schema              |     1 -
 .../result-DEFAULT/query03.sql.json                |     1 -
 .../result-DEFAULT/query03.sql.schema              |     1 -
 .../query/sql_intersect_count/query00.sql          |    29 -
 .../query/sql_intersect_count/query01.sql          |    24 -
 .../query/sql_intersect_count/query02.sql          |    23 -
 .../query/sql_intersect_count/query03.sql          |    20 -
 .../query/sql_intersect_count/query04.sql          |    33 -
 .../query/sql_intersect_count/query05.sql          |    17 -
 .../query/sql_invalid/query29_invalid_SQL.sql      |    92 +-
 .../query/sql_invalid/query30_invalid_SQL.sql      |    90 +-
 .../query/sql_invalid/query31_invalid_SQL.sql      |    92 +-
 .../query/sql_invalid/query32_invalid_SQL.sql      |    94 +-
 .../query/sql_invalid/query33_invalid_SQL.sql      |    94 +-
 .../query/sql_invalid/query34_invalid_SQL.sql      |    30 +-
 .../src/test/resources/query/sql_join/query_00.sql |    19 +-
 .../src/test/resources/query/sql_join/query_01.sql |    24 +-
 .../src/test/resources/query/sql_join/query_02.sql |    24 +-
 .../src/test/resources/query/sql_join/query_03.sql |    22 +-
 .../src/test/resources/query/sql_join/query_05.sql |    18 +-
 .../src/test/resources/query/sql_join/query_08.sql |    18 +-
 .../src/test/resources/query/sql_join/query_09.sql |    19 +-
 .../src/test/resources/query/sql_join/query_10.sql |    19 +-
 .../src/test/resources/query/sql_join/query_11.sql |    19 +-
 .../src/test/resources/query/sql_join/query_13.sql |    19 +-
 .../src/test/resources/query/sql_join/query_14.sql |    18 +-
 .../src/test/resources/query/sql_join/query_19.sql |    19 +-
 .../src/test/resources/query/sql_join/query_20.sql |    18 +-
 .../src/test/resources/query/sql_join/query_21.sql |    18 +-
 .../src/test/resources/query/sql_join/query_22.sql |    18 +-
 .../src/test/resources/query/sql_join/query_23.sql |    18 +-
 .../src/test/resources/query/sql_join/query_25.sql |    18 +-
 .../sql_join/sql_is_not_distinct_from/query_01.sql |    18 +-
 .../sql_join/sql_is_not_distinct_from/query_02.sql |    18 +-
 .../sql_join/sql_is_not_distinct_from/query_03.sql |    18 +-
 .../sql_join/sql_is_not_distinct_from/query_04.sql |    18 +-
 .../sql_join/sql_is_not_distinct_from/query_05.sql |    18 +-
 .../query/sql_join/sql_right_join/query_01.sql     |    18 +-
 .../test/resources/query/sql_joinorder/query01.sql |    26 -
 .../test/resources/query/sql_joinorder/query02.sql |    26 -
 .../test/resources/query/sql_joinorder/query03.sql |    26 -
 .../test/resources/query/sql_joinorder/query04.sql |    26 -
 .../test/resources/query/sql_joinorder/query05.sql |    26 -
 .../src/test/resources/query/sql_kap/query01.sql   |    18 +-
 .../src/test/resources/query/sql_kap/query01_a.sql |    18 +-
 .../src/test/resources/query/sql_kap/query03.sql   |    26 +-
 .../src/test/resources/query/sql_kap/query07.sql   |    18 +-
 .../src/test/resources/query/sql_kap/query08.sql   |    18 +-
 .../src/test/resources/query/sql_like/query01.sql  |    28 +-
 .../src/test/resources/query/sql_like/query02.sql  |    18 +-
 .../src/test/resources/query/sql_like/query03.sql  |    28 +-
 .../src/test/resources/query/sql_like/query04.sql  |    18 +-
 .../src/test/resources/query/sql_like/query05.sql  |    26 +-
 .../src/test/resources/query/sql_like/query06.sql  |    28 +-
 .../src/test/resources/query/sql_like/query10.sql  |    27 +-
 .../src/test/resources/query/sql_like/query15.sql  |    26 +-
 .../src/test/resources/query/sql_like/query16.sql  |    27 +-
 .../src/test/resources/query/sql_like/query17.sql  |    26 +-
 .../src/test/resources/query/sql_like/query18.sql  |    22 +-
 .../src/test/resources/query/sql_like/query20.sql  |    28 +-
 .../src/test/resources/query/sql_like/query21.sql  |    28 +-
 .../src/test/resources/query/sql_like/query22.sql  |    26 +-
 .../src/test/resources/query/sql_like/query23.sql  |    28 +-
 .../src/test/resources/query/sql_like/query24.sql  |    18 +-
 .../src/test/resources/query/sql_like/query25.sql  |    18 +-
 .../src/test/resources/query/sql_like/query26.sql  |    18 +-
 .../src/test/resources/query/sql_limit/query00.sql |    25 -
 .../src/test/resources/query/sql_limit/query01.sql |    34 -
 .../src/test/resources/query/sql_limit/query02.sql |    20 -
 .../src/test/resources/query/sql_limit/query03.sql |    36 -
 .../src/test/resources/query/sql_limit/query04.sql |    22 -
 .../src/test/resources/query/sql_limit/query05.sql |    18 -
 .../src/test/resources/query/sql_limit/query06.sql |    25 -
 .../sql_limit/result-DEFAULT/query00.sql.json      |    13 -
 .../sql_limit/result-DEFAULT/query00.sql.schema    |     1 -
 .../sql_limit/result-DEFAULT/query02.sql.json      |    20 -
 .../sql_limit/result-DEFAULT/query02.sql.schema    |     1 -
 .../sql_limit/result-DEFAULT/query03.sql.json      |     4 -
 .../sql_limit/result-DEFAULT/query03.sql.schema    |     1 -
 .../sql_limit/result-DEFAULT/query04.sql.json      |    20 -
 .../sql_limit/result-DEFAULT/query04.sql.schema    |     1 -
 .../sql_limit/result-DEFAULT/query05.sql.json      |     1 -
 .../sql_limit/result-DEFAULT/query05.sql.schema    |     1 -
 .../sql_limit/result-DEFAULT/query06.sql.json      |    20 -
 .../sql_limit/result-DEFAULT/query06.sql.schema    |     1 -
 .../resources/query/sql_limit_offset/query00.sql   |    24 -
 .../resources/query/sql_limit_offset/query01.sql   |    36 -
 .../resources/query/sql_limit_offset/query02.sql   |    20 -
 .../resources/query/sql_limit_offset/query03.sql   |    37 -
 .../resources/query/sql_limit_offset/query04.sql   |    23 -
 .../resources/query/sql_limit_offset/query05.sql   |    19 -
 .../resources/query/sql_limit_offset/query06.sql   |    26 -
 .../resources/query/sql_limit_offset/query07.sql   |    23 -
 .../test/resources/query/sql_lookup/query01.sql    |    18 +-
 .../test/resources/query/sql_lookup/query02.sql    |    18 +-
 .../test/resources/query/sql_lookup/query03.sql    |    18 +-
 .../test/resources/query/sql_lookup/query04.sql    |    18 +-
 .../test/resources/query/sql_lookup/query05.sql    |    18 +-
 .../test/resources/query/sql_lookup/query06.sql    |    18 +-
 .../test/resources/query/sql_lookup/query07.sql    |    18 +-
 .../test/resources/query/sql_lookup/query08.sql    |    18 +-
 .../test/resources/query/sql_lookup/query09.sql    |    18 +-
 .../test/resources/query/sql_lookup/query10.sql    |    18 +-
 .../test/resources/query/sql_lookup/query11.sql    |    18 +-
 .../test/resources/query/sql_lookup/query45.sql    |    20 +-
 .../test/resources/query/sql_lookup/query46.sql    |    18 +-
 .../test/resources/query/sql_lookup/query47.sql    |    18 +-
 .../test/resources/query/sql_lookup/query48.sql    |    18 +-
 .../test/resources/query/sql_lookup/query55.sql    |    18 +-
 .../test/resources/query/sql_magine/query01.sql    |    18 +-
 .../test/resources/query/sql_magine/query03.sql    |    18 +-
 .../test/resources/query/sql_magine/query04.sql    |    18 +-
 .../test/resources/query/sql_magine/query05.sql    |    18 +-
 .../test/resources/query/sql_magine/query06.sql    |    18 +-
 .../test/resources/query/sql_magine/query07.sql    |    18 +-
 .../test/resources/query/sql_magine/query09.sql    |    18 +-
 .../test/resources/query/sql_magine/query10.sql    |    18 +-
 .../test/resources/query/sql_magine/query11.sql    |    18 +-
 .../test/resources/query/sql_magine/query12.sql    |    18 +-
 .../test/resources/query/sql_magine/query13.sql    |    18 +-
 .../test/resources/query/sql_magine/query14.sql    |    18 +-
 .../test/resources/query/sql_magine/query15.sql    |    18 +-
 .../resources/query/sql_magine_inner/query03.sql   |    35 -
 .../resources/query/sql_magine_inner/query04.sql   |    40 -
 .../resources/query/sql_magine_inner/query05.sql   |    40 -
 .../resources/query/sql_magine_inner/query06.sql   |    25 -
 .../query/sql_magine_inner/query07.sql.disabled    |    40 -
 .../resources/query/sql_magine_inner/query09.sql   |    35 -
 .../result-DEFAULT/query03.sql.json                |  7161 -------------
 .../result-DEFAULT/query03.sql.schema              |     1 -
 .../result-DEFAULT/query04.sql.json                |  9896 ------------------
 .../result-DEFAULT/query04.sql.schema              |     1 -
 .../result-DEFAULT/query05.sql.json                |  9896 ------------------
 .../result-DEFAULT/query05.sql.schema              |     1 -
 .../result-DEFAULT/query06.sql.json                |  1000 --
 .../result-DEFAULT/query06.sql.schema              |     1 -
 .../result-DEFAULT/query09.sql.json                |     2 -
 .../result-DEFAULT/query09.sql.schema              |     1 -
 .../resources/query/sql_magine_left/query01.sql    |    18 +-
 .../resources/query/sql_magine_left/query02.sql    |    18 +-
 .../resources/query/sql_magine_left/query03.sql    |    18 +-
 .../test/resources/query/sql_massin/massin/and.sql |    16 -
 .../resources/query/sql_massin/massin/base.sql     |    16 -
 .../test/resources/query/sql_massin/massin/filter1 |     1 -
 .../test/resources/query/sql_massin/massin/filter2 |     1 -
 .../test/resources/query/sql_massin/massin/not.sql |    16 -
 .../test/resources/query/sql_massin/massin/or.sql  |    16 -
 .../test/resources/query/sql_massin/query01.sql    |    27 -
 .../test/resources/query/sql_massin/query02.sql    |    27 -
 .../test/resources/query/sql_massin/query03.sql    |    27 -
 .../test/resources/query/sql_massin/query04.sql    |    24 -
 .../query/sql_massin_distinct/query01.sql          |    28 -
 .../query/sql_massin_distinct/query02.sql          |    30 -
 .../query/sql_massin_distinct/query03.sql          |    28 -
 .../resources/query/sql_measures/count_col.sql     |    17 -
 .../query/sql_measures/inaccurate_sql/hllc.sql     |    17 -
 .../resources/query/sql_measures/sql_measure.sql   |    18 -
 .../resources/query/sql_multi_model/query01.sql    |    46 -
 .../result-DEFAULT/query01.sql.json                |   731 --
 .../result-DEFAULT/query01.sql.schema              |     1 -
 .../resources/query/sql_non_equi_join/query_00.sql |    30 -
 .../resources/query/sql_non_equi_join/query_01.sql |    34 -
 .../resources/query/sql_non_equi_join/query_02.sql |    42 -
 .../resources/query/sql_non_equi_join/query_03.sql |    33 -
 .../resources/query/sql_non_equi_join/query_04.sql |    22 -
 .../resources/query/sql_non_equi_join/query_05.sql |    23 -
 .../resources/query/sql_non_equi_join/query_06.sql |    28 -
 .../resources/query/sql_non_equi_join/query_07.sql |    26 -
 .../resources/query/sql_non_equi_join/query_08.sql |    30 -
 .../resources/query/sql_non_equi_join/query_09.sql |    30 -
 .../resources/query/sql_non_equi_join/query_10.sql |    30 -
 .../resources/query/sql_non_equi_join/query_11.sql |    30 -
 .../resources/query/sql_non_equi_join/query_12.sql |    27 -
 .../resources/query/sql_non_equi_join/query_13.sql |    27 -
 .../resources/query/sql_non_equi_join/query_14.sql |    28 -
 .../resources/query/sql_non_equi_join/query_15.sql |    28 -
 .../resources/query/sql_non_equi_join/query_16.sql |    28 -
 .../resources/query/sql_non_equi_join/query_17.sql |    20 -
 .../resources/query/sql_non_equi_join/query_18.sql |    27 -
 .../resources/query/sql_non_equi_join/query_19.sql |    28 -
 .../resources/query/sql_non_equi_join/query_20.sql |    28 -
 .../resources/query/sql_non_equi_join/query_21.sql |    28 -
 .../resources/query/sql_non_equi_join/query_22.sql |    32 -
 .../resources/query/sql_non_equi_join/query_23.sql |    32 -
 .../resources/query/sql_non_equi_join/query_24.sql |    32 -
 .../resources/query/sql_non_equi_join/query_25.sql |    22 -
 .../resources/query/sql_non_equi_join/query_26.sql |    21 -
 .../resources/query/sql_non_equi_join/query_27.sql |    18 -
 .../resources/query/sql_non_equi_join/query_28.sql |    23 -
 .../resources/query/sql_non_equi_join/query_29.sql |    23 -
 .../resources/query/sql_non_equi_join/query_30.sql |    23 -
 .../resources/query/sql_non_equi_join/query_31.sql |    21 -
 .../resources/query/sql_non_equi_join/query_32.sql |    21 -
 .../resources/query/sql_non_equi_join/query_33.sql |    27 -
 .../resources/query/sql_non_equi_join/query_34.sql |    28 -
 .../test/resources/query/sql_orderby/query01.sql   |    28 +-
 .../test/resources/query/sql_orderby/query02.sql   |    32 +-
 .../test/resources/query/sql_orderby/query03.sql   |    28 +-
 .../test/resources/query/sql_orderby/query06.sql   |    18 +-
 .../test/resources/query/sql_orderby/query07.sql   |    18 +-
 .../test/resources/query/sql_orderby/query08.sql   |    18 +-
 .../test/resources/query/sql_orderby/query09.sql   |    18 +-
 .../test/resources/query/sql_orderby/query10.sql   |    18 +-
 .../test/resources/query/sql_orderby/query13.sql   |    18 +-
 .../test/resources/query/sql_ordinal/query01.sql   |    28 -
 .../test/resources/query/sql_ordinal/query02.sql   |    25 -
 .../query/sql_parentheses_escape/query05.sql       |    34 -
 .../sql_parentheses_escape/query05.sql.expected    |    20 -
 .../query/sql_parentheses_escape/query06.sql       |    15 -
 .../sql_parentheses_escape/query06.sql.expected    |     1 -
 .../resources/query/sql_percentile/query01.sql     |    18 +-
 .../resources/query/sql_percentile/query03.sql     |    18 +-
 .../resources/query/sql_percentile/query04.sql     |    18 +-
 .../resources/query/sql_percentile/query05.sql     |    18 +-
 .../resources/query/sql_percentile/query06.sql     |    18 +-
 .../test/resources/query/sql_powerbi/query00.sql   |    18 +-
 .../test/resources/query/sql_powerbi/query01.sql   |    18 +-
 .../src/test/resources/query/sql_probe/query00.sql |    19 -
 .../src/test/resources/query/sql_probe/query01.sql |    24 -
 .../src/test/resources/query/sql_probe/query02.sql |    19 -
 .../test/resources/query/sql_pushdown/query01.sql  |    18 +-
 .../test/resources/query/sql_pushdown/query02.sql  |    18 +-
 .../test/resources/query/sql_pushdown/query03.sql  |    18 +-
 .../test/resources/query/sql_pushdown/query04.sql  |    20 +-
 .../test/resources/query/sql_pushdown/query05.sql  |    18 +-
 .../test/resources/query/sql_pushdown/query06.sql  |    18 +-
 .../test/resources/query/sql_pushdown/query07.sql  |    18 +-
 .../test/resources/query/sql_pushdown/query08.sql  |    18 +-
 .../test/resources/query/sql_pushdown/query09.sql  |    18 +-
 .../test/resources/query/sql_pushdown/query10.sql  |    18 +-
 .../test/resources/query/sql_pushdown/query11.sql  |    18 +-
 .../test/resources/query/sql_pushdown/query12.sql  |    18 +-
 .../src/test/resources/query/sql_raw/query02.sql   |    18 +-
 .../src/test/resources/query/sql_raw/query05.sql   |    18 +-
 .../src/test/resources/query/sql_raw/query21.sql   |    18 +-
 .../src/test/resources/query/sql_raw/query25.sql   |    18 +-
 .../test/resources/query/sql_rawtable/query01.sql  |    18 +-
 .../test/resources/query/sql_rawtable/query02.sql  |    18 +-
 .../test/resources/query/sql_rawtable/query03.sql  |    38 +-
 .../test/resources/query/sql_rawtable/query09.sql  |    18 +-
 .../test/resources/query/sql_rawtable/query10.sql  |    18 +-
 .../test/resources/query/sql_rawtable/query11.sql  |    18 +-
 .../test/resources/query/sql_rawtable/query12.sql  |    32 +-
 .../test/resources/query/sql_rawtable/query14.sql  |    22 +-
 .../test/resources/query/sql_rawtable/query26.sql  |    18 +-
 .../test/resources/query/sql_rawtable/query27.sql  |    30 +-
 .../test/resources/query/sql_rawtable/query28.sql  |    38 +-
 .../test/resources/query/sql_rawtable/query29.sql  |    38 +-
 .../test/resources/query/sql_rawtable/query30.sql  |    32 +-
 .../test/resources/query/sql_rawtable/query31.sql  |    18 +-
 .../test/resources/query/sql_rawtable/query32.sql  |    18 +-
 .../test/resources/query/sql_rawtable/query33.sql  |    18 +-
 .../test/resources/query/sql_rawtable/query36.sql  |    18 +-
 .../test/resources/query/sql_rawtable/query38.sql  |    18 +-
 .../query/sql_replace_special_symbol/query01.sql   |    19 -
 .../query/sql_replace_special_symbol/query02.sql   |    19 -
 .../query/sql_replace_special_symbol/query03.sql   |    19 -
 .../query/sql_replace_special_symbol/query04.sql   |    19 -
 .../query/sql_replace_special_symbol/query05.sql   |    19 -
 .../query/sql_replace_special_symbol/query06.sql   |    19 -
 .../query/sql_replace_special_symbol/query07.sql   |    19 -
 .../query/sql_replace_special_symbol/query08.sql   |    19 -
 .../query/sql_replace_special_symbol/query09.sql   |    19 -
 .../query/sql_replace_special_symbol/query10.sql   |    19 -
 .../query/sql_replace_special_symbol/query11.sql   |    19 -
 .../query/sql_replace_special_symbol/query12.sql   |    19 -
 .../query/sql_replace_special_symbol/query13.sql   |    19 -
 .../query/sql_replace_special_symbol/query14.sql   |    19 -
 .../query/sql_select_subquery/query01.sql          |    18 +-
 .../query/sql_select_subquery/query02.sql          |    18 +-
 .../query/sql_select_subquery/query03.sql          |    18 +-
 .../query/sql_select_subquery/query04.sql          |    18 +-
 .../query/sql_select_subquery/query05.sql          |    18 +-
 .../query/sql_select_subquery/query06.sql          |    18 +-
 .../query/sql_select_subquery/query07.sql          |    18 +-
 .../test/resources/query/sql_similar/query01.sql   |    28 -
 .../test/resources/query/sql_similar/query02.sql   |    19 -
 .../test/resources/query/sql_similar/query03.sql   |    30 -
 .../test/resources/query/sql_similar/query04.sql   |    19 -
 .../test/resources/query/sql_similar/query05.sql   |    30 -
 .../test/resources/query/sql_similar/query06.sql   |    30 -
 .../test/resources/query/sql_similar/query10.sql   |    26 -
 .../test/resources/query/sql_similar/query15.sql   |    27 -
 .../test/resources/query/sql_similar/query16.sql   |    26 -
 .../test/resources/query/sql_similar/query17.sql   |    27 -
 .../test/resources/query/sql_similar/query18.sql   |    27 -
 .../test/resources/query/sql_similar/query20.sql   |    28 -
 .../test/resources/query/sql_similar/query21.sql   |    28 -
 .../test/resources/query/sql_similar/query22.sql   |    28 -
 .../test/resources/query/sql_similar/query23.sql   |    28 -
 .../test/resources/query/sql_similar/query24.sql   |    16 -
 .../test/resources/query/sql_similar/query25.sql   |    28 -
 .../sql_similar/result-DEFAULT/query01.sql.json    |     4 -
 .../sql_similar/result-DEFAULT/query01.sql.schema  |     1 -
 .../sql_similar/result-DEFAULT/query02.sql.json    |     0
 .../sql_similar/result-DEFAULT/query02.sql.schema  |     1 -
 .../sql_similar/result-DEFAULT/query03.sql.json    |     1 -
 .../sql_similar/result-DEFAULT/query03.sql.schema  |     1 -
 .../sql_similar/result-DEFAULT/query04.sql.json    |     1 -
 .../sql_similar/result-DEFAULT/query04.sql.schema  |     1 -
 .../sql_similar/result-DEFAULT/query05.sql.json    |     1 -
 .../sql_similar/result-DEFAULT/query05.sql.schema  |     1 -
 .../sql_similar/result-DEFAULT/query06.sql.json    |     4 -
 .../sql_similar/result-DEFAULT/query06.sql.schema  |     1 -
 .../sql_similar/result-DEFAULT/query10.sql.json    |   153 -
 .../sql_similar/result-DEFAULT/query10.sql.schema  |     1 -
 .../sql_similar/result-DEFAULT/query15.sql.json    |     0
 .../sql_similar/result-DEFAULT/query15.sql.schema  |     1 -
 .../sql_similar/result-DEFAULT/query16.sql.json    |   153 -
 .../sql_similar/result-DEFAULT/query16.sql.schema  |     1 -
 .../sql_similar/result-DEFAULT/query17.sql.json    |   153 -
 .../sql_similar/result-DEFAULT/query17.sql.schema  |     1 -
 .../sql_similar/result-DEFAULT/query18.sql.json    |     2 -
 .../sql_similar/result-DEFAULT/query18.sql.schema  |     1 -
 .../sql_similar/result-DEFAULT/query20.sql.json    |     4 -
 .../sql_similar/result-DEFAULT/query20.sql.schema  |     1 -
 .../sql_similar/result-DEFAULT/query21.sql.json    |     1 -
 .../sql_similar/result-DEFAULT/query21.sql.schema  |     1 -
 .../sql_similar/result-DEFAULT/query22.sql.json    |    40 -
 .../sql_similar/result-DEFAULT/query22.sql.schema  |     1 -
 .../sql_similar/result-DEFAULT/query23.sql.json    |     4 -
 .../sql_similar/result-DEFAULT/query23.sql.schema  |     1 -
 .../sql_similar/result-DEFAULT/query24.sql.json    |  1964 ----
 .../sql_similar/result-DEFAULT/query24.sql.schema  |     1 -
 .../sql_similar/result-DEFAULT/query25.sql.json    |     0
 .../sql_similar/result-DEFAULT/query25.sql.schema  |     1 -
 .../resources/query/sql_single_value/query00.sql   |    16 -
 .../resources/query/sql_single_value/query01.sql   |    17 -
 .../resources/query/sql_single_value/query02.sql   |    16 -
 .../resources/query/sql_single_value/query03.sql   |    15 -
 .../resources/query/sql_single_value/query04.sql   |    17 -
 .../test/resources/query/sql_snowflake/query01.sql |    37 -
 .../test/resources/query/sql_snowflake/query02.sql |    35 -
 .../test/resources/query/sql_snowflake/query03.sql |    37 -
 .../sql_snowflake/result-DEFAULT/query01.sql.json  |     1 -
 .../result-DEFAULT/query01.sql.schema              |     1 -
 .../sql_snowflake/result-DEFAULT/query02.sql.json  |     8 -
 .../result-DEFAULT/query02.sql.schema              |     1 -
 .../sql_snowflake/result-DEFAULT/query03.sql.json  |     8 -
 .../result-DEFAULT/query03.sql.schema              |     1 -
 .../constant_query/constant_query-01.sql           |    44 -
 .../result-DEFAULT/constant_query-01.sql.json      |     1 -
 .../result-DEFAULT/constant_query-01.sql.schema    |     1 -
 .../query/sql_spark_func/math/bround-01.sql        |    20 -
 .../query/sql_spark_func/math/bround-02.sql        |    20 -
 .../query/sql_spark_func/math/bround-03.sql        |    20 -
 .../query/sql_spark_func/math/cbrt-01.sql          |    20 -
 .../query/sql_spark_func/math/cbrt-02.sql          |    20 -
 .../query/sql_spark_func/math/cbrt-03.sql          |    20 -
 .../query/sql_spark_func/math/conv-01.sql          |    20 -
 .../query/sql_spark_func/math/conv-02.sql          |    20 -
 .../query/sql_spark_func/math/conv-03.sql          |    21 -
 .../query/sql_spark_func/math/conv-04.sql          |    21 -
 .../query/sql_spark_func/math/cosh-01.sql          |    23 -
 .../query/sql_spark_func/math/cosh-02.sql          |    26 -
 .../query/sql_spark_func/math/cosh-03.sql          |    24 -
 .../query/sql_spark_func/math/expm1-01.sql         |    21 -
 .../query/sql_spark_func/math/expm1-02.sql         |    23 -
 .../query/sql_spark_func/math/expm1-03.sql         |    23 -
 .../query/sql_spark_func/math/factorial-01.sql     |    24 -
 .../query/sql_spark_func/math/factorial-02.sql     |    30 -
 .../query/sql_spark_func/math/factorial-03.sql     |    16 -
 .../query/sql_spark_func/math/hypot-01.sql         |    20 -
 .../query/sql_spark_func/math/hypot-02.sql         |    19 -
 .../resources/query/sql_spark_func/math/log-01.sql |    21 -
 .../resources/query/sql_spark_func/math/log-02.sql |    21 -
 .../resources/query/sql_spark_func/math/log-03.sql |    22 -
 .../query/sql_spark_func/math/log1p-01.sql         |    21 -
 .../query/sql_spark_func/math/log1p-02.sql         |    23 -
 .../query/sql_spark_func/math/log1p-03.sql         |    25 -
 .../query/sql_spark_func/math/log2-01.sql          |    21 -
 .../query/sql_spark_func/math/log2-02.sql          |    23 -
 .../query/sql_spark_func/math/log2-03.sql          |    24 -
 .../math/result-DEFAULT/bround-01.sql.schema       |     1 -
 .../math/result-DEFAULT/bround-02.sql.json         |  9999 ------------------
 .../math/result-DEFAULT/bround-02.sql.schema       |     1 -
 .../math/result-DEFAULT/bround-03.sql.json         |     1 -
 .../math/result-DEFAULT/bround-03.sql.schema       |     1 -
 .../math/result-DEFAULT/cbrt-01.sql.json           |     2 -
 .../math/result-DEFAULT/cbrt-01.sql.schema         |     1 -
 .../math/result-DEFAULT/cbrt-02.sql.json           |     2 -
 .../math/result-DEFAULT/cbrt-02.sql.schema         |     1 -
 .../math/result-DEFAULT/cbrt-03.sql.json           |    10 -
 .../math/result-DEFAULT/cbrt-03.sql.schema         |     1 -
 .../math/result-DEFAULT/conv-01.sql.json           |     2 -
 .../math/result-DEFAULT/conv-01.sql.schema         |     1 -
 .../math/result-DEFAULT/conv-02.sql.json           |     2 -
 .../math/result-DEFAULT/conv-02.sql.schema         |     1 -
 .../math/result-DEFAULT/conv-03.sql.json           |    10 -
 .../math/result-DEFAULT/conv-03.sql.schema         |     1 -
 .../math/result-DEFAULT/conv-04.sql.json           |    10 -
 .../math/result-DEFAULT/conv-04.sql.schema         |     1 -
 .../math/result-DEFAULT/cosh-01.sql.json           |    10 -
 .../math/result-DEFAULT/cosh-01.sql.schema         |     1 -
 .../math/result-DEFAULT/cosh-02.sql.json           |    12 -
 .../math/result-DEFAULT/cosh-02.sql.schema         |     1 -
 .../math/result-DEFAULT/cosh-03.sql.json           |    10 -
 .../math/result-DEFAULT/cosh-03.sql.schema         |     1 -
 .../math/result-DEFAULT/expm1-01.sql.json          |     1 -
 .../math/result-DEFAULT/expm1-01.sql.schema        |     1 -
 .../math/result-DEFAULT/expm1-02.sql.json          |    10 -
 .../math/result-DEFAULT/expm1-02.sql.schema        |     1 -
 .../math/result-DEFAULT/expm1-03.sql.json          |    10 -
 .../math/result-DEFAULT/expm1-03.sql.schema        |     1 -
 .../math/result-DEFAULT/factorial-01.sql.json      |     3 -
 .../math/result-DEFAULT/factorial-01.sql.schema    |     1 -
 .../math/result-DEFAULT/factorial-02.sql.json      |    10 -
 .../math/result-DEFAULT/factorial-02.sql.schema    |     1 -
 .../math/result-DEFAULT/factorial-03.sql.json      |    10 -
 .../math/result-DEFAULT/factorial-03.sql.schema    |     1 -
 .../math/result-DEFAULT/hypot-01.sql.json          |     1 -
 .../math/result-DEFAULT/hypot-01.sql.schema        |     1 -
 .../math/result-DEFAULT/hypot-02.sql.json          |     1 -
 .../math/result-DEFAULT/hypot-02.sql.schema        |     1 -
 .../math/result-DEFAULT/log-01.sql.json            |     2 -
 .../math/result-DEFAULT/log-01.sql.schema          |     1 -
 .../math/result-DEFAULT/log-02.sql.json            |     2 -
 .../math/result-DEFAULT/log-02.sql.schema          |     1 -
 .../math/result-DEFAULT/log-03.sql.json            |     2 -
 .../math/result-DEFAULT/log-03.sql.schema          |     1 -
 .../math/result-DEFAULT/log1p-01.sql.json          |    10 -
 .../math/result-DEFAULT/log1p-01.sql.schema        |     1 -
 .../math/result-DEFAULT/log1p-02.sql.json          |    10 -
 .../math/result-DEFAULT/log1p-02.sql.schema        |     1 -
 .../math/result-DEFAULT/log1p-03.sql.json          |    10 -
 .../math/result-DEFAULT/log1p-03.sql.schema        |     1 -
 .../math/result-DEFAULT/log2-01.sql.json           |     1 -
 .../math/result-DEFAULT/log2-01.sql.schema         |     1 -
 .../math/result-DEFAULT/log2-02.sql.json           |     1 -
 .../math/result-DEFAULT/log2-02.sql.schema         |     1 -
 .../math/result-DEFAULT/log2-03.sql.json           |    10 -
 .../math/result-DEFAULT/log2-03.sql.schema         |     1 -
 .../math/result-DEFAULT/rint-01.sql.json           |     2 -
 .../math/result-DEFAULT/rint-01.sql.schema         |     1 -
 .../math/result-DEFAULT/rint-02.sql.json           |    10 -
 .../math/result-DEFAULT/rint-02.sql.schema         |     1 -
 .../math/result-DEFAULT/rint-03.sql.json           |    10 -
 .../math/result-DEFAULT/rint-03.sql.schema         |     1 -
 .../math/result-DEFAULT/sinh-01.sql.json           |    10 -
 .../math/result-DEFAULT/sinh-01.sql.schema         |     1 -
 .../math/result-DEFAULT/sinh-02.sql.json           |     2 -
 .../math/result-DEFAULT/sinh-02.sql.schema         |     1 -
 .../math/result-DEFAULT/sinh-03.sql.json           |    10 -
 .../math/result-DEFAULT/sinh-03.sql.schema         |     1 -
 .../math/result-DEFAULT/tanh-01.sql.json           |     2 -
 .../math/result-DEFAULT/tanh-01.sql.schema         |     1 -
 .../math/result-DEFAULT/tanh-02.sql.json           |     2 -
 .../math/result-DEFAULT/tanh-02.sql.schema         |     1 -
 .../math/result-DEFAULT/tanh-03.sql.json           |    10 -
 .../math/result-DEFAULT/tanh-03.sql.schema         |     1 -
 .../query/sql_spark_func/math/rint-01.sql          |    21 -
 .../query/sql_spark_func/math/rint-02.sql          |    21 -
 .../query/sql_spark_func/math/rint-03.sql          |    21 -
 .../query/sql_spark_func/math/sinh-01.sql          |    20 -
 .../query/sql_spark_func/math/sinh-02.sql          |    24 -
 .../query/sql_spark_func/math/sinh-03.sql          |    24 -
 .../query/sql_spark_func/math/tanh-01.sql          |    18 -
 .../query/sql_spark_func/math/tanh-02.sql          |    24 -
 .../query/sql_spark_func/math/tanh-03.sql          |    24 -
 .../query/sql_spark_func/misc/crc32-01.sql         |    15 -
 .../query/sql_spark_func/misc/crc32-02.sql         |    15 -
 .../query/sql_spark_func/misc/crc32-03.sql         |    15 -
 .../resources/query/sql_spark_func/misc/md5-01.sql |    15 -
 .../resources/query/sql_spark_func/misc/md5-02.sql |    15 -
 .../resources/query/sql_spark_func/misc/md5-03.sql |    15 -
 .../misc/result-DEFAULT/crc32-01.sql.json          | 10000 -------------------
 .../misc/result-DEFAULT/crc32-01.sql.schema        |     1 -
 .../misc/result-DEFAULT/crc32-02.sql.json          | 10000 -------------------
 .../misc/result-DEFAULT/crc32-02.sql.schema        |     1 -
 .../misc/result-DEFAULT/crc32-03.sql.json          | 10000 -------------------
 .../misc/result-DEFAULT/crc32-03.sql.schema        |     1 -
 .../misc/result-DEFAULT/md5-01.sql.json            | 10000 -------------------
 .../misc/result-DEFAULT/md5-01.sql.schema          |     1 -
 .../misc/result-DEFAULT/md5-02.sql.json            | 10000 -------------------
 .../misc/result-DEFAULT/md5-02.sql.schema          |     1 -
 .../misc/result-DEFAULT/md5-03.sql.json            | 10000 -------------------
 .../misc/result-DEFAULT/md5-03.sql.schema          |     1 -
 .../misc/result-DEFAULT/sha-01.sql.json            | 10000 -------------------
 .../misc/result-DEFAULT/sha-01.sql.schema          |     1 -
 .../misc/result-DEFAULT/sha-02.sql.json            | 10000 -------------------
 .../misc/result-DEFAULT/sha-02.sql.schema          |     1 -
 .../misc/result-DEFAULT/sha-03.sql.json            | 10000 -------------------
 .../misc/result-DEFAULT/sha-03.sql.schema          |     1 -
 .../misc/result-DEFAULT/sha1-01.sql.json           | 10000 -------------------
 .../misc/result-DEFAULT/sha1-01.sql.schema         |     1 -
 .../misc/result-DEFAULT/sha1-02.sql.json           | 10000 -------------------
 .../misc/result-DEFAULT/sha1-02.sql.schema         |     1 -
 .../misc/result-DEFAULT/sha1-03.sql.json           | 10000 -------------------
 .../misc/result-DEFAULT/sha1-03.sql.schema         |     1 -
 .../misc/result-DEFAULT/sha2-01.sql.json           | 10000 -------------------
 .../misc/result-DEFAULT/sha2-01.sql.schema         |     1 -
 .../misc/result-DEFAULT/sha2-02.sql.json           | 10000 -------------------
 .../misc/result-DEFAULT/sha2-02.sql.schema         |     1 -
 .../misc/result-DEFAULT/sha2-03.sql.json           | 10000 -------------------
 .../misc/result-DEFAULT/sha2-03.sql.schema         |     1 -
 .../misc/result-DEFAULT/sha2-04.sql.json           |     5 -
 .../misc/result-DEFAULT/sha2-04.sql.schema         |     1 -
 .../resources/query/sql_spark_func/misc/sha-01.sql |    15 -
 .../resources/query/sql_spark_func/misc/sha-02.sql |    15 -
 .../resources/query/sql_spark_func/misc/sha-03.sql |    15 -
 .../query/sql_spark_func/misc/sha1-01.sql          |    15 -
 .../query/sql_spark_func/misc/sha1-02.sql          |    15 -
 .../query/sql_spark_func/misc/sha1-03.sql          |    15 -
 .../query/sql_spark_func/misc/sha2-01.sql          |    15 -
 .../query/sql_spark_func/misc/sha2-02.sql          |    15 -
 .../query/sql_spark_func/misc/sha2-03.sql          |    15 -
 .../query/sql_spark_func/misc/sha2-04.sql          |    17 -
 .../query/sql_spark_func/string/ascii-01.sql       |    15 -
 .../query/sql_spark_func/string/base64-01.sql      |    15 -
 .../query/sql_spark_func/string/base64-02.sql      |    15 -
 .../query/sql_spark_func/string/chr-01.sql         |    15 -
 .../query/sql_spark_func/string/decode-01.sql      |    15 -
 .../query/sql_spark_func/string/decode-02.sql      |    15 -
 .../query/sql_spark_func/string/decode-03.sql      |    17 -
 .../query/sql_spark_func/string/encode-01.sql      |    15 -
 .../query/sql_spark_func/string/encode-02.sql      |    15 -
 .../query/sql_spark_func/string/find_in_set-01.sql |    15 -
 .../query/sql_spark_func/string/find_in_set-02.sql |    15 -
 .../query/sql_spark_func/string/find_in_set-03.sql |    15 -
 .../query/sql_spark_func/string/find_in_set-04.sql |    17 -
 .../query/sql_spark_func/string/lcase-01.sql       |    15 -
 .../query/sql_spark_func/string/lcase-02.sql       |    15 -
 .../query/sql_spark_func/string/lcase-03.sql       |    15 -
 .../query/sql_spark_func/string/lcase-04.sql       |    17 -
 .../query/sql_spark_func/string/levenshtein-01.sql |    15 -
 .../query/sql_spark_func/string/levenshtein-02.sql |    15 -
 .../query/sql_spark_func/string/locate-01.sql      |    15 -
 .../query/sql_spark_func/string/locate-02.sql      |    15 -
 .../query/sql_spark_func/string/locate-03.sql      |    15 -
 .../query/sql_spark_func/string/locate-04.sql      |    15 -
 .../query/sql_spark_func/string/locate-05.sql      |    17 -
 .../query/sql_spark_func/string/lpad-01.sql        |    15 -
 .../query/sql_spark_func/string/lpad-02.sql        |    15 -
 .../query/sql_spark_func/string/lpad-03.sql        |    15 -
 .../query/sql_spark_func/string/lpad-04.sql        |    17 -
 .../query/sql_spark_func/string/replace-01.sql     |    15 -
 .../query/sql_spark_func/string/replace-02.sql     |    15 -
 .../string/result-DEFAULT/ascii-01.sql.json        | 10000 -------------------
 .../string/result-DEFAULT/ascii-01.sql.schema      |     1 -
 .../string/result-DEFAULT/base64-01.sql.json       | 10000 -------------------
 .../string/result-DEFAULT/base64-01.sql.schema     |     1 -
 .../string/result-DEFAULT/base64-02.sql.json       | 10000 -------------------
 .../string/result-DEFAULT/base64-02.sql.schema     |     1 -
 .../string/result-DEFAULT/chr-01.sql.json          | 10000 -------------------
 .../string/result-DEFAULT/chr-01.sql.schema        |     1 -
 .../string/result-DEFAULT/decode-01.sql.json       | 10000 -------------------
 .../string/result-DEFAULT/decode-01.sql.schema     |     1 -
 .../string/result-DEFAULT/decode-02.sql.json       | 10000 -------------------
 .../string/result-DEFAULT/decode-02.sql.schema     |     1 -
 .../string/result-DEFAULT/decode-03.sql.json       |     5 -
 .../string/result-DEFAULT/decode-03.sql.schema     |     1 -
 .../string/result-DEFAULT/encode-01.sql.json       | 10000 -------------------
 .../string/result-DEFAULT/encode-01.sql.schema     |     1 -
 .../string/result-DEFAULT/encode-02.sql.json       | 10000 -------------------
 .../string/result-DEFAULT/encode-02.sql.schema     |     1 -
 .../string/result-DEFAULT/find_in_set-01.sql.json  | 10000 -------------------
 .../result-DEFAULT/find_in_set-01.sql.schema       |     1 -
 .../string/result-DEFAULT/find_in_set-02.sql.json  | 10000 -------------------
 .../result-DEFAULT/find_in_set-02.sql.schema       |     1 -
 .../string/result-DEFAULT/find_in_set-03.sql.json  | 10000 -------------------
 .../result-DEFAULT/find_in_set-03.sql.schema       |     1 -
 .../string/result-DEFAULT/find_in_set-04.sql.json  |     5 -
 .../result-DEFAULT/find_in_set-04.sql.schema       |     1 -
 .../string/result-DEFAULT/lcase-01.sql.json        | 10000 -------------------
 .../string/result-DEFAULT/lcase-01.sql.schema      |     1 -
 .../string/result-DEFAULT/lcase-02.sql.json        | 10000 -------------------
 .../string/result-DEFAULT/lcase-02.sql.schema      |     1 -
 .../string/result-DEFAULT/lcase-03.sql.json        | 10000 -------------------
 .../string/result-DEFAULT/lcase-03.sql.schema      |     1 -
 .../string/result-DEFAULT/lcase-04.sql.json        |     4 -
 .../string/result-DEFAULT/lcase-04.sql.schema      |     1 -
 .../string/result-DEFAULT/levenshtein-01.sql.json  | 10000 -------------------
 .../result-DEFAULT/levenshtein-01.sql.schema       |     1 -
 .../string/result-DEFAULT/levenshtein-02.sql.json  | 10000 -------------------
 .../result-DEFAULT/levenshtein-02.sql.schema       |     1 -
 .../string/result-DEFAULT/locate-01.sql.json       | 10000 -------------------
 .../string/result-DEFAULT/locate-01.sql.schema     |     1 -
 .../string/result-DEFAULT/locate-02.sql.json       | 10000 -------------------
 .../string/result-DEFAULT/locate-02.sql.schema     |     1 -
 .../string/result-DEFAULT/locate-03.sql.json       | 10000 -------------------
 .../string/result-DEFAULT/locate-03.sql.schema     |     1 -
 .../string/result-DEFAULT/locate-04.sql.json       | 10000 -------------------
 .../string/result-DEFAULT/locate-04.sql.schema     |     1 -
 .../string/result-DEFAULT/locate-05.sql.json       |     5 -
 .../string/result-DEFAULT/locate-05.sql.schema     |     1 -
 .../string/result-DEFAULT/lpad-01.sql.json         | 10000 -------------------
 .../string/result-DEFAULT/lpad-01.sql.schema       |     1 -
 .../string/result-DEFAULT/lpad-02.sql.json         | 10000 -------------------
 .../string/result-DEFAULT/lpad-02.sql.schema       |     1 -
 .../string/result-DEFAULT/lpad-03.sql.json         | 10000 -------------------
 .../string/result-DEFAULT/lpad-03.sql.schema       |     1 -
 .../string/result-DEFAULT/lpad-04.sql.json         |     5 -
 .../string/result-DEFAULT/lpad-04.sql.schema       |     1 -
 .../string/result-DEFAULT/replace-01.sql.json      | 10000 -------------------
 .../string/result-DEFAULT/replace-01.sql.schema    |     1 -
 .../string/result-DEFAULT/replace-02.sql.json      | 10000 -------------------
 .../string/result-DEFAULT/replace-02.sql.schema    |     1 -
 .../string/result-DEFAULT/rpad-01.sql.json         | 10000 -------------------
 .../string/result-DEFAULT/rpad-01.sql.schema       |     1 -
 .../string/result-DEFAULT/rpad-02.sql.json         | 10000 -------------------
 .../string/result-DEFAULT/rpad-02.sql.schema       |     1 -
 .../string/result-DEFAULT/rpad-03.sql.json         | 10000 -------------------
 .../string/result-DEFAULT/rpad-03.sql.schema       |     1 -
 .../string/result-DEFAULT/rpad-04.sql.json         |     5 -
 .../string/result-DEFAULT/rpad-04.sql.schema       |     1 -
 .../string/result-DEFAULT/rtrim-01.sql.json        | 10000 -------------------
 .../string/result-DEFAULT/rtrim-01.sql.schema      |     1 -
 .../string/result-DEFAULT/rtrim-02.sql.json        | 10000 -------------------
 .../string/result-DEFAULT/rtrim-02.sql.schema      |     1 -
 .../string/result-DEFAULT/sentences-01.sql.json    | 10000 -------------------
 .../string/result-DEFAULT/sentences-01.sql.schema  |     1 -
 .../string/result-DEFAULT/space-01.sql.json        | 10000 -------------------
 .../string/result-DEFAULT/space-01.sql.schema      |     1 -
 .../string/result-DEFAULT/space-02.sql.json        |     1 -
 .../string/result-DEFAULT/space-02.sql.schema      |     1 -
 .../string/result-DEFAULT/split_part-01.sql.json   | 10000 -------------------
 .../string/result-DEFAULT/split_part-01.sql.schema |     1 -
 .../result-DEFAULT/substring_index-01.sql.json     | 10000 -------------------
 .../result-DEFAULT/substring_index-01.sql.schema   |     1 -
 .../result-DEFAULT/substring_index-02.sql.json     | 10000 -------------------
 .../result-DEFAULT/substring_index-02.sql.schema   |     1 -
 .../result-DEFAULT/substring_index-03.sql.json     | 10000 -------------------
 .../result-DEFAULT/substring_index-03.sql.schema   |     1 -
 .../result-DEFAULT/substring_index-04.sql.json     |     4 -
 .../result-DEFAULT/substring_index-04.sql.schema   |     1 -
 .../string/result-DEFAULT/trim_both-01.sql.json    |     1 -
 .../string/result-DEFAULT/trim_both-01.sql.schema  |     1 -
 .../string/result-DEFAULT/trim_both-02.sql.json    |   244 -
 .../string/result-DEFAULT/trim_both-02.sql.schema  |     1 -
 .../string/result-DEFAULT/trim_both-03.sql.json    |     1 -
 .../string/result-DEFAULT/trim_both-03.sql.schema  |     1 -
 .../trim_leading_trailing-01.sql.json              |     1 -
 .../trim_leading_trailing-01.sql.schema            |     1 -
 .../trim_leading_trailing-02.sql.json              |   244 -
 .../trim_leading_trailing-02.sql.schema            |     1 -
 .../string/result-DEFAULT/ucase-01.sql.json        | 10000 -------------------
 .../string/result-DEFAULT/ucase-01.sql.schema      |     1 -
 .../string/result-DEFAULT/ucase-02.sql.json        | 10000 -------------------
 .../string/result-DEFAULT/ucase-02.sql.schema      |     1 -
 .../string/result-DEFAULT/ucase-03.sql.json        | 10000 -------------------
 .../string/result-DEFAULT/ucase-03.sql.schema      |     1 -
 .../string/result-DEFAULT/ucase-04.sql.json        |     4 -
 .../string/result-DEFAULT/ucase-04.sql.schema      |     1 -
 .../string/result-DEFAULT/unbase64-01.sql.json     | 10000 -------------------
 .../string/result-DEFAULT/unbase64-01.sql.schema   |     1 -
 .../string/result-DEFAULT/unbase64-02.sql.json     | 10000 -------------------
 .../string/result-DEFAULT/unbase64-02.sql.schema   |     1 -
 .../string/result-DEFAULT/unbase64-03.sql.json     |     5 -
 .../string/result-DEFAULT/unbase64-03.sql.schema   |     1 -
 .../query/sql_spark_func/string/rpad-01.sql        |    15 -
 .../query/sql_spark_func/string/rpad-02.sql        |    15 -
 .../query/sql_spark_func/string/rpad-03.sql        |    15 -
 .../query/sql_spark_func/string/rpad-04.sql        |    17 -
 .../query/sql_spark_func/string/rtrim-01.sql       |    15 -
 .../query/sql_spark_func/string/rtrim-02.sql       |    15 -
 .../query/sql_spark_func/string/sentences-01.sql   |    15 -
 .../query/sql_spark_func/string/space-01.sql       |    15 -
 .../sql_spark_func/string/space-01.sql.expected    |     1 -
 .../query/sql_spark_func/string/space-02.sql       |    16 -
 .../sql_spark_func/string/space-02.sql.expected    |     1 -
 .../query/sql_spark_func/string/split_part-01.sql  |    16 -
 .../sql_spark_func/string/substring_index-01.sql   |    15 -
 .../sql_spark_func/string/substring_index-02.sql   |    15 -
 .../sql_spark_func/string/substring_index-03.sql   |    15 -
 .../sql_spark_func/string/substring_index-04.sql   |    17 -
 .../query/sql_spark_func/string/trim_both-01.sql   |    15 -
 .../query/sql_spark_func/string/trim_both-02.sql   |    16 -
 .../query/sql_spark_func/string/trim_both-03.sql   |    77 -
 .../string/trim_leading_trailing-01.sql            |    15 -
 .../string/trim_leading_trailing-02.sql            |    16 -
 .../query/sql_spark_func/string/ucase-01.sql       |    15 -
 .../query/sql_spark_func/string/ucase-02.sql       |    15 -
 .../query/sql_spark_func/string/ucase-03.sql       |    15 -
 .../query/sql_spark_func/string/ucase-04.sql       |    17 -
 .../query/sql_spark_func/string/unbase64-01.sql    |    15 -
 .../query/sql_spark_func/string/unbase64-02.sql    |    15 -
 .../query/sql_spark_func/string/unbase64-03.sql    |    17 -
 .../query/sql_spark_func/time/add_months-01.sql    |    15 -
 .../query/sql_spark_func/time/add_months-02.sql    |    15 -
 .../query/sql_spark_func/time/date_add-01.sql      |    15 -
 .../query/sql_spark_func/time/date_add-02.sql      |    15 -
 .../query/sql_spark_func/time/date_sub-01.sql      |    15 -
 .../query/sql_spark_func/time/date_sub-02.sql      |    15 -
 .../query/sql_spark_func/time/from_unixtime-01.sql |    15 -
 .../query/sql_spark_func/time/from_unixtime-02.sql |    15 -
 .../query/sql_spark_func/time/from_unixtime-03.sql |    15 -
 .../query/sql_spark_func/time/from_unixtime-04.sql |    15 -
 .../sql_spark_func/time/from_utc_timestamp-01.sql  |    15 -
 .../sql_spark_func/time/from_utc_timestamp-02.sql  |    15 -
 .../sql_spark_func/time/months_between-01.sql      |    15 -
 .../sql_spark_func/time/months_between-02.sql      |    15 -
 .../sql_spark_func/time/months_between-03.sql      |    15 -
 .../time/result-DEFAULT/add_months-01.sql.json     | 10000 -------------------
 .../time/result-DEFAULT/add_months-01.sql.schema   |     1 -
 .../time/result-DEFAULT/add_months-02.sql.json     | 10000 -------------------
 .../time/result-DEFAULT/add_months-02.sql.schema   |     1 -
 .../time/result-DEFAULT/date_add-01.sql.json       | 10000 -------------------
 .../time/result-DEFAULT/date_add-01.sql.schema     |     1 -
 .../time/result-DEFAULT/date_add-02.sql.json       | 10000 -------------------
 .../time/result-DEFAULT/date_add-02.sql.schema     |     1 -
 .../time/result-DEFAULT/date_sub-01.sql.json       | 10000 -------------------
 .../time/result-DEFAULT/date_sub-01.sql.schema     |     1 -
 .../time/result-DEFAULT/date_sub-02.sql.json       | 10000 -------------------
 .../time/result-DEFAULT/date_sub-02.sql.schema     |     1 -
 .../time/result-DEFAULT/from_unixtime-01.sql.json  | 10000 -------------------
 .../result-DEFAULT/from_unixtime-01.sql.schema     |     1 -
 .../time/result-DEFAULT/from_unixtime-02.sql.json  | 10000 -------------------
 .../result-DEFAULT/from_unixtime-02.sql.schema     |     1 -
 .../time/result-DEFAULT/from_unixtime-03.sql.json  | 10000 -------------------
 .../result-DEFAULT/from_unixtime-03.sql.schema     |     1 -
 .../time/result-DEFAULT/from_unixtime-04.sql.json  | 10000 -------------------
 .../result-DEFAULT/from_unixtime-04.sql.schema     |     1 -
 .../result-DEFAULT/from_utc_timestamp-01.sql.json  | 10000 -------------------
 .../from_utc_timestamp-01.sql.schema               |     1 -
 .../result-DEFAULT/from_utc_timestamp-02.sql.json  |  5000 ----------
 .../from_utc_timestamp-02.sql.schema               |     1 -
 .../time/result-DEFAULT/months_between-01.sql.json | 10000 -------------------
 .../result-DEFAULT/months_between-01.sql.schema    |     1 -
 .../time/result-DEFAULT/months_between-02.sql.json | 10000 -------------------
 .../result-DEFAULT/months_between-02.sql.schema    |     1 -
 .../time/result-DEFAULT/months_between-03.sql.json |  5000 ----------
 .../result-DEFAULT/months_between-03.sql.schema    |     1 -
 .../result-DEFAULT/to_utc_timestamp-01.sql.json    | 10000 -------------------
 .../result-DEFAULT/to_utc_timestamp-01.sql.schema  |     1 -
 .../result-DEFAULT/to_utc_timestamp-02.sql.json    |  5000 ----------
 .../result-DEFAULT/to_utc_timestamp-02.sql.schema  |     1 -
 .../time/result-DEFAULT/trunc-01.sql.json          | 10000 -------------------
 .../time/result-DEFAULT/trunc-01.sql.schema        |     1 -
 .../time/result-DEFAULT/trunc-02.sql.json          | 10000 -------------------
 .../time/result-DEFAULT/trunc-02.sql.schema        |     1 -
 .../time/result-DEFAULT/weekofyear-01.sql.json     |    10 -
 .../time/result-DEFAULT/weekofyear-01.sql.schema   |     1 -
 .../time/result-DEFAULT/weekofyear-02.sql.json     | 10000 -------------------
 .../time/result-DEFAULT/weekofyear-02.sql.schema   |     1 -
 .../time/result-DEFAULT/weekofyear-03.sql.json     |    10 -
 .../time/result-DEFAULT/weekofyear-03.sql.schema   |     1 -
 .../time/result-DEFAULT/weekofyear-04.sql.json     |    10 -
 .../time/result-DEFAULT/weekofyear-04.sql.schema   |     1 -
 .../sql_spark_func/time/to_utc_timestamp-01.sql    |    15 -
 .../sql_spark_func/time/to_utc_timestamp-02.sql    |    15 -
 .../query/sql_spark_func/time/trunc-01.sql         |    15 -
 .../query/sql_spark_func/time/trunc-02.sql         |    15 -
 .../query/sql_spark_func/time/weekofyear-01.sql    |    17 -
 .../query/sql_spark_func/time/weekofyear-02.sql    |    15 -
 .../query/sql_spark_func/time/weekofyear-03.sql    |    19 -
 .../query/sql_spark_func/time/weekofyear-04.sql    |    15 -
 .../resources/query/sql_special_join/query001.sql  |    28 -
 .../resources/query/sql_special_join/query002.sql  |    18 -
 .../resources/query/sql_special_join/query003.sql  |    18 -
 .../resources/query/sql_special_join/query004.sql  |    22 -
 .../resources/query/sql_special_join/query005.sql  |    24 -
 .../resources/query/sql_special_join/query006.sql  |    36 -
 .../resources/query/sql_special_join/query007.sql  |    30 -
 .../resources/query/sql_special_join/query008.sql  |    33 -
 .../resources/query/sql_special_join/query009.sql  |    32 -
 .../resources/query/sql_special_join/query010.sql  |    27 -
 .../resources/query/sql_special_join/query011.sql  |    27 -
 .../resources/query/sql_special_join/query012.sql  |    29 -
 .../resources/query/sql_special_join/query013.sql  |    17 -
 .../resources/query/sql_special_join/query014.sql  |    37 -
 .../resources/query/sql_special_join/query015.sql  |    32 -
 .../resources/query/sql_special_join/query016.sql  |    30 -
 .../resources/query/sql_special_join/query017.sql  |    25 -
 .../resources/query/sql_special_join/query018.sql  |    51 -
 .../resources/query/sql_special_join/query019.sql  |    28 -
 .../query/sql_special_join/query020.sql.disable    |    39 -
 .../query/sql_special_join/query021.sql.disable    |    48 -
 .../resources/query/sql_special_join/query022.sql  |    36 -
 .../resources/query/sql_special_join/query023.sql  |    42 -
 .../query/sql_special_join/query024.sql.disable    |    44 -
 .../query/sql_special_join/query025.sql.disable    |    44 -
 .../resources/query/sql_special_join/query026.sql  |    48 -
 .../query/sql_special_join_condition/query001.sql  |    33 -
 .../query/sql_special_join_condition/query002.sql  |    34 -
 .../query/sql_special_join_condition/query003.sql  |    35 -
 .../query/sql_special_join_condition/query004.sql  |    21 -
 .../query/sql_special_join_condition/query005.sql  |    22 -
 .../query/sql_special_join_condition/query006.sql  |    22 -
 .../test/resources/query/sql_streaming/query01.sql |    16 -
 .../test/resources/query/sql_streaming/query02.sql |    16 -
 .../test/resources/query/sql_streaming/query03.sql |    16 -
 .../test/resources/query/sql_streaming/query04.sql |    16 -
 .../test/resources/query/sql_streaming/query05.sql |    16 -
 .../test/resources/query/sql_streaming/query06.sql |    16 -
 .../test/resources/query/sql_streaming/query07.sql |    16 -
 .../test/resources/query/sql_streaming/query08.sql |    16 -
 .../test/resources/query/sql_streaming/query09.sql |    16 -
 .../test/resources/query/sql_streaming/query10.sql |    16 -
 .../test/resources/query/sql_subquery/query00.sql  |    20 +-
 .../test/resources/query/sql_subquery/query01.sql  |    30 +-
 .../test/resources/query/sql_subquery/query02.sql  |    24 +-
 .../test/resources/query/sql_subquery/query04.sql  |    20 +-
 .../test/resources/query/sql_subquery/query05.sql  |    22 +-
 .../test/resources/query/sql_subquery/query07.sql  |    20 +-
 .../test/resources/query/sql_subquery/query08.sql  |    18 +-
 .../test/resources/query/sql_subquery/query10.sql  |    18 +-
 .../test/resources/query/sql_subquery/query12.sql  |    20 +-
 .../test/resources/query/sql_subquery/query13.sql  |    18 +-
 .../test/resources/query/sql_subquery/query19.sql  |    32 +-
 .../test/resources/query/sql_subquery/query25.sql  |    34 +-
 .../test/resources/query/sql_subquery/query26.sql  |    32 +-
 .../test/resources/query/sql_subquery/query27.sql  |    30 +-
 .../test/resources/query/sql_subquery/query28.sql  |    28 +-
 .../test/resources/query/sql_subquery/query29.sql  |    28 +-
 .../test/resources/query/sql_subquery/query31.sql  |    22 +-
 .../test/resources/query/sql_subquery/query32.sql  |    26 +-
 .../test/resources/query/sql_subquery/query33.sql  |    20 +-
 .../test/resources/query/sql_subquery/query34.sql  |    26 +-
 .../test/resources/query/sql_subquery/query37.sql  |    18 +-
 .../test/resources/query/sql_subquery/query38.sql  |    18 +-
 .../test/resources/query/sql_sum_expr/query00.sql  |    26 +-
 .../test/resources/query/sql_sum_expr/query01.sql  |    18 +-
 .../test/resources/query/sql_sum_expr/query02.sql  |    18 +-
 .../test/resources/query/sql_sum_expr/query03.sql  |    18 +-
 .../test/resources/query/sql_sum_expr/query04.sql  |    18 +-
 .../test/resources/query/sql_sum_expr/query05.sql  |    18 +-
 .../test/resources/query/sql_sum_expr/query06.sql  |    18 +-
 .../test/resources/query/sql_sum_expr/query07.sql  |    18 +-
 .../test/resources/query/sql_sum_expr/query10.sql  |    18 +-
 .../test/resources/query/sql_sum_expr/query11.sql  |    18 +-
 .../test/resources/query/sql_sum_expr/query12.sql  |    18 +-
 .../test/resources/query/sql_sum_expr/query13.sql  |    18 +-
 .../test/resources/query/sql_sum_expr/query14.sql  |    18 +-
 .../test/resources/query/sql_sum_expr/query15.sql  |    18 +-
 .../test/resources/query/sql_sum_expr/query21.sql  |    18 +-
 .../test/resources/query/sql_sum_expr/query22.sql  |    18 +-
 .../test/resources/query/sql_sum_expr/query25.sql  |    24 +-
 .../test/resources/query/sql_sum_expr/query26.sql  |    22 +-
 .../test/resources/query/sql_sum_expr/query27.sql  |    22 +-
 .../test/resources/query/sql_sum_expr/query28.sql  |    18 +-
 .../test/resources/query/sql_sum_expr/query29.sql  |    22 +-
 .../test/resources/query/sql_sum_expr/query30.sql  |    20 +-
 .../test/resources/query/sql_sum_expr/query31.sql  |    20 +-
 .../test/resources/query/sql_sum_expr/query32.sql  |    18 +-
 .../test/resources/query/sql_sum_expr/query33.sql  |    18 +-
 .../test/resources/query/sql_sum_expr/query34.sql  |    18 +-
 .../test/resources/query/sql_sum_expr/query35.sql  |    20 +-
 .../test/resources/query/sql_sum_expr/query36.sql  |    18 +-
 .../test/resources/query/sql_sum_expr/query37.sql  |    26 +-
 .../test/resources/query/sql_sum_expr/query41.sql  |    18 +-
 .../query00.sql                                    |    48 -
 .../query01.sql                                    |    62 -
 .../test/resources/query/sql_tableau/query00.sql   |    22 +-
 .../test/resources/query/sql_tableau/query01.sql   |    44 +-
 .../test/resources/query/sql_tableau/query02.sql   |    38 +-
 .../test/resources/query/sql_tableau/query03.sql   |    42 +-
 .../test/resources/query/sql_tableau/query04.sql   |    50 +-
 .../test/resources/query/sql_tableau/query05.sql   |    52 +-
 .../test/resources/query/sql_tableau/query06.sql   |    52 +-
 .../test/resources/query/sql_tableau/query07.sql   |    30 +-
 .../test/resources/query/sql_tableau/query08.sql   |    18 +-
 .../test/resources/query/sql_tableau/query09.sql   |    18 +-
 .../test/resources/query/sql_tableau/query10.sql   |    26 +-
 .../test/resources/query/sql_tableau/query11.sql   |    28 +-
 .../test/resources/query/sql_tableau/query12.sql   |    26 +-
 .../test/resources/query/sql_tableau/query13.sql   |    24 +-
 .../test/resources/query/sql_tableau/query14.sql   |    24 +-
 .../test/resources/query/sql_tableau/query15.sql   |    24 +-
 .../test/resources/query/sql_tableau/query16.sql   |    26 +-
 .../test/resources/query/sql_tableau/query17.sql   |    24 +-
 .../test/resources/query/sql_tableau/query18.sql   |    24 +-
 .../test/resources/query/sql_tableau/query19.sql   |    32 +-
 .../test/resources/query/sql_tableau/query20.sql   |    32 +-
 .../test/resources/query/sql_tableau/query21.sql   |    28 +-
 .../test/resources/query/sql_tableau/query23.sql   |    20 +-
 .../test/resources/query/sql_tableau/query24.sql   |    26 +-
 .../test/resources/query/sql_tableau/query25.sql   |    22 +-
 .../test/resources/query/sql_tableau/query27.sql   |    22 +-
 .../test/resources/query/sql_tableau/query28.sql   |    22 +-
 .../test/resources/query/sql_tableau/query29.sql   |    36 +-
 .../test/resources/query/sql_tableau/query30.sql   |    18 +-
 .../test/resources/query/sql_tableau/query31.sql   |    18 +-
 .../test/resources/query/sql_tableau/query32.sql   |    18 +-
 .../test/resources/query/sql_tableau/query67.sql   |    18 +-
 .../test/resources/query/sql_timeout/query01.sql   |    18 +-
 .../test/resources/query/sql_timeout/query02.sql   |    18 +-
 .../test/resources/query/sql_timestamp/query01.sql |    22 +-
 .../test/resources/query/sql_timestamp/query02.sql |    22 +-
 .../resources/query/sql_timestamp/query02_a.sql    |    18 +-
 .../test/resources/query/sql_timestamp/query03.sql |    22 +-
 .../resources/query/sql_timestamp/query03_b.sql    |    22 +-
 .../resources/query/sql_timestamp/query03_c.sql    |    22 +-
 .../resources/query/sql_timestamp/query03_d.sql    |    22 +-
 .../test/resources/query/sql_timestamp/query04.sql |    22 +-
 .../test/resources/query/sql_timestamp/query05.sql |    22 +-
 .../test/resources/query/sql_timestamp/query11.sql |    20 +-
 .../test/resources/query/sql_timestamp/query12.sql |    20 +-
 .../test/resources/query/sql_timestamp/query13.sql |    20 +-
 .../query/sql_timestamp/query14_spark.sql          |    18 +-
 .../test/resources/query/sql_timestamp/query21.sql |    22 +-
 .../resources/query/sql_timestamp/query21_a.sql    |    22 +-
 .../test/resources/query/sql_timestamp/query22.sql |    22 +-
 .../resources/query/sql_timestamp/query22_a.sql    |    18 +-
 .../resources/query/sql_timestamp/query22_b.sql    |    22 +-
 .../resources/query/sql_timestamp/query22_c.sql    |    22 +-
 .../resources/query/sql_timestamp/query22_d.sql    |    20 +-
 .../resources/query/sql_timestamp/query22_e.sql    |    22 +-
 .../test/resources/query/sql_timestamp/query23.sql |    22 +-
 .../resources/query/sql_timestamp/query23_a.sql    |    22 +-
 .../test/resources/query/sql_timestamp/query24.sql |    22 +-
 .../resources/query/sql_timestamp/query24_a.sql    |    20 +-
 .../test/resources/query/sql_timestamp/query25.sql |    22 +-
 .../resources/query/sql_timestamp/query25_a.sql    |    22 +-
 .../test/resources/query/sql_timestamp/query26.sql |    18 +-
 .../test/resources/query/sql_timestamp/query27.sql |    18 +-
 .../test/resources/query/sql_timestamp/query28.sql |    18 +-
 .../test/resources/query/sql_timestamp/query29.sql |    18 +-
 .../test/resources/query/sql_timestamp/query30.sql |    18 +-
 .../test/resources/query/sql_timestamp/query31.sql |    18 +-
 .../test/resources/query/sql_timestamp/query32.sql |    18 +-
 .../test/resources/query/sql_timestamp/query33.sql |    18 +-
 .../test/resources/query/sql_timestamp/query34.sql |    18 +-
 .../test/resources/query/sql_timestamp/query35.sql |    18 +-
 .../test/resources/query/sql_timestamp/query36.sql |    18 +-
 .../test/resources/query/sql_timestamp/query37.sql |    18 +-
 .../test/resources/query/sql_timestamp/query38.sql |    18 +-
 .../test/resources/query/sql_timestamp/query39.sql |    18 +-
 .../test/resources/query/sql_timestamp/query40.sql |    18 +-
 .../test/resources/query/sql_timestamp/query41.sql |    18 +-
 .../test/resources/query/sql_timestamp/query42.sql |    18 +-
 .../test/resources/query/sql_timestamp/query43.sql |    18 +-
 .../test/resources/query/sql_timestamp/query44.sql |    18 +-
 .../test/resources/query/sql_timestamp/query45.sql |    18 +-
 .../test/resources/query/sql_timestamp/query46.sql |    18 +-
 .../test/resources/query/sql_timestamp/query47.sql |    18 +-
 .../test/resources/query/sql_timestamp/query48.sql |    18 +-
 .../test/resources/query/sql_timestamp/query49.sql |    18 +-
 .../test/resources/query/sql_timestamp/query50.sql |    18 +-
 .../test/resources/query/sql_timestamp/query51.sql |    18 +-
 .../test/resources/query/sql_timestamp/query52.sql |    18 +-
 .../test/resources/query/sql_timestamp/query53.sql |    18 +-
 .../test/resources/query/sql_timestamp/query54.sql |    18 +-
 .../test/resources/query/sql_timestamp/query55.sql |    18 +-
 .../test/resources/query/sql_timestamp/query56.sql |    18 +-
 .../test/resources/query/sql_timestamp/query57.sql |    18 +-
 .../test/resources/query/sql_timestamp/query58.sql |    18 +-
 .../test/resources/query/sql_timestamp/query59.sql |    18 +-
 .../test/resources/query/sql_timestamp/query60.sql |    18 +-
 .../test/resources/query/sql_timestamp/query61.sql |    18 +-
 .../test/resources/query/sql_timestamp/query62.sql |    18 +-
 .../test/resources/query/sql_timestamp/query63.sql |    18 +-
 .../test/resources/query/sql_timestamp/query64.sql |    18 +-
 .../test/resources/query/sql_timestamp/query65.sql |    18 +-
 .../test/resources/query/sql_timestamp/query66.sql |    18 +-
 .../test/resources/query/sql_timestamp/query67.sql |    18 +-
 .../test/resources/query/sql_timestamp/query68.sql |    18 +-
 .../test/resources/query/sql_timestamp/query69.sql |    18 +-
 .../test/resources/query/sql_timestamp/query70.sql |    18 +-
 .../test/resources/query/sql_timestamp/query71.sql |    18 +-
 .../src/test/resources/query/sql_topn/query45.sql  |    20 +-
 .../src/test/resources/query/sql_topn/query81.sql  |    20 +-
 .../src/test/resources/query/sql_topn/query82.sql  |    20 +-
 .../src/test/resources/query/sql_topn/query83.sql  |    24 +-
 .../test/resources/query/sql_truncate/query00.sql  |    29 -
 .../test/resources/query/sql_truncate/query01.sql  |    30 -
 .../test/resources/query/sql_truncate/query02.sql  |    33 -
 .../test/resources/query/sql_truncate/query03.sql  |    36 -
 .../test/resources/query/sql_truncate/query04.sql  |    39 -
 .../test/resources/query/sql_truncate/query05.sql  |    38 -
 .../test/resources/query/sql_truncate/query06.sql  |    28 -
 .../test/resources/query/sql_truncate/query07.sql  |    43 -
 .../src/test/resources/query/sql_type/query01.sql  |    18 -
 .../resources/query/sql_type/query01.sql.expected  |     4 -
 .../src/test/resources/query/sql_udf/query02.sql   |    18 +-
 .../src/test/resources/query/sql_udf/query03.sql   |    18 +-
 .../src/test/resources/query/sql_udf/query05.sql   |    18 +-
 .../src/test/resources/query/sql_udf/query07.sql   |    18 +-
 .../src/test/resources/query/sql_union/query01.sql |    19 +-
 .../src/test/resources/query/sql_union/query02.sql |    19 +-
 .../src/test/resources/query/sql_union/query03.sql |    29 +-
 .../src/test/resources/query/sql_union/query04.sql |    18 +-
 .../src/test/resources/query/sql_union/query05.sql |    18 +-
 .../src/test/resources/query/sql_union/query06.sql |    18 +-
 .../src/test/resources/query/sql_union/query07.sql |    18 +-
 .../src/test/resources/query/sql_union/query08.sql |    18 +-
 .../src/test/resources/query/sql_union/query09.sql |    18 +-
 .../src/test/resources/query/sql_union/query10.sql |    18 +-
 .../resources/query/sql_union_cache/query01.sql    |    26 -
 .../resources/query/sql_union_cache/query02.sql    |    26 -
 .../resources/query/sql_union_cache/query03.sql    |    25 -
 .../resources/query/sql_union_cache/query04.sql    |    25 -
 .../resources/query/sql_union_cache/query05.sql    |    27 -
 .../resources/query/sql_union_cache/query06.sql    |    27 -
 .../resources/query/sql_union_cache/query07.sql    |    26 -
 .../resources/query/sql_union_cache/query08.sql    |    26 -
 .../resources/query/sql_union_cache/query09.sql    |    26 -
 .../resources/query/sql_union_cache/query10.sql    |    26 -
 .../resources/query/sql_union_cache/query11.sql    |    27 -
 .../resources/query/sql_union_cache/query12.sql    |    27 -
 .../resources/query/sql_union_cache/query13.sql    |    27 -
 .../resources/query/sql_union_cache/query14.sql    |    27 -
 .../src/test/resources/query/sql_value/query02.sql |    18 +-
 .../src/test/resources/query/sql_value/query03.sql |    18 +-
 .../src/test/resources/query/sql_value/query04.sql |    18 +-
 .../src/test/resources/query/sql_value/query05.sql |    18 +-
 .../query/sql_verifyContent/query01.sql.disabled   |    21 -
 .../resources/query/sql_verifyContent/query02.sql  |    33 -
 .../result-DEFAULT/query02.sql.json                |     1 -
 .../result-DEFAULT/query02.sql.schema              |     1 -
 .../resources/query/sql_verifyCount/query01.sql    |    16 -
 .../resources/query/sql_verifyCount/query02.sql    |    17 -
 .../resources/query/sql_verifyCount/query03.sql    |    19 -
 .../resources/query/sql_verifyCount/query04.sql    |    16 -
 .../resources/query/sql_verifyCount/query05.sql    |    16 -
 .../resources/query/sql_verifyCount/query06.sql    |    16 -
 .../resources/query/sql_verifyCount/query07.sql    |    16 -
 .../resources/query/sql_verifyCount/query08.sql    |    17 -
 .../resources/query/sql_verifyCount/query09.sql    |    31 -
 .../query/sql_verifyCount/query09.sql.expected     |    34 -
 .../resources/query/sql_verifyCount/query10.sql    |    31 -
 .../resources/query/sql_verifyCount/query11.sql    |    33 -
 .../resources/query/sql_verifyCount/query12.sql    |    19 -
 .../result-DEFAULT/query01.sql.json                | 10000 -------------------
 .../result-DEFAULT/query01.sql.schema              |     1 -
 .../result-DEFAULT/query02.sql.json                |  4558 ---------
 .../result-DEFAULT/query02.sql.schema              |     1 -
 .../result-DEFAULT/query04.sql.json                |   100 -
 .../result-DEFAULT/query04.sql.schema              |     1 -
 .../result-DEFAULT/query05.sql.json                |   100 -
 .../result-DEFAULT/query05.sql.schema              |     1 -
 .../result-DEFAULT/query06.sql.json                |   100 -
 .../result-DEFAULT/query06.sql.schema              |     1 -
 .../result-DEFAULT/query07.sql.json                |   100 -
 .../result-DEFAULT/query07.sql.schema              |     1 -
 .../result-DEFAULT/query08.sql.json                |     1 -
 .../result-DEFAULT/query08.sql.schema              |     1 -
 .../result-DEFAULT/query09.sql.json                | 10000 -------------------
 .../result-DEFAULT/query09.sql.schema              |     1 -
 .../result-DEFAULT/query10.sql.json                | 10000 -------------------
 .../result-DEFAULT/query10.sql.schema              |     1 -
 .../result-DEFAULT/query11.sql.json                |    10 -
 .../result-DEFAULT/query11.sql.schema              |     1 -
 .../result-DEFAULT/query12.sql.json                |     1 -
 .../result-DEFAULT/query12.sql.schema              |     1 -
 .../query/sql_window/new_sql_window/query00.sql    |    18 +-
 .../query/sql_window/new_sql_window/query01.sql    |    18 +-
 .../query/sql_window/new_sql_window/query02.sql    |    18 +-
 .../query/sql_window/new_sql_window/query03.sql    |    18 +-
 .../query/sql_window/new_sql_window/query04.sql    |    18 +-
 .../query/sql_window/new_sql_window/query05.sql    |    18 +-
 .../query/sql_window/new_sql_window/query06.sql    |    18 +-
 .../query/sql_window/new_sql_window/query07.sql    |    18 +-
 .../query/sql_window/new_sql_window/query08.sql    |    18 +-
 .../query/sql_window/new_sql_window/query11.sql    |    18 +-
 .../query/sql_window/new_sql_window/query12.sql    |    18 +-
 .../query/sql_window/new_sql_window/query14.sql    |    18 +-
 .../query/sql_window/new_sql_window/query15.sql    |    18 +-
 .../query/sql_window/new_sql_window/query16.sql    |    18 +-
 .../query/sql_window/new_sql_window/query17.sql    |    18 +-
 .../query/sql_window/new_sql_window/query18.sql    |    18 +-
 .../query/sql_window/new_sql_window/query19.sql    |    18 +-
 .../query/sql_window/new_sql_window/query20.sql    |    18 +-
 .../query/sql_window/new_sql_window/query21.sql    |    18 +-
 .../query/sql_window/new_sql_window/query22.sql    |    18 +-
 .../test/resources/query/sql_window/query00.sql    |    18 +-
 .../test/resources/query/sql_window/query01.sql    |    18 +-
 .../test/resources/query/sql_window/query02.sql    |    18 +-
 .../test/resources/query/sql_window/query03.sql    |    18 +-
 .../test/resources/query/sql_window/query04.sql    |    18 +-
 .../test/resources/query/sql_window/query05.sql    |    18 +-
 .../test/resources/query/sql_window/query06.sql    |    18 +-
 .../test/resources/query/sql_window/query07.sql    |    18 +-
 .../test/resources/query/sql_window/query08.sql    |    18 +-
 .../test/resources/query/sql_window/query09.sql    |    18 +-
 .../test/resources/query/sql_window/query11.sql    |    18 +-
 .../test/resources/query/sql_window/query12.sql    |    18 +-
 .../test/resources/query/sql_window/query13.sql    |    18 +-
 .../test/resources/query/sql_window/query14.sql    |    18 +-
 .../resources/query/sql_window/query14_spark.sql   |    18 +-
 .../test/resources/query/sql_window/query15.sql    |    18 +-
 .../src/test/resources/query/temp/.gitignore       |     1 -
 .../query/unchecked_layout_list/unchecked_list.txt |    26 -
 .../springframework/conf/applicationContext.xml    |    52 +
 .../springframework/conf/kylinSecurity.xml         |   713 ++
 .../src/test/resources/sql_sinai_poc/query15.sql   |    23 +
 .../metadata/_global/project/subquery.json         |     6 +
 .../a749e414-c40e-45b7-92e4-bbfe63af705d.json      |    38 +
 .../ce2057da-54c8-4e05-b0bf-d225a6bbb62c.json      |    59 +
 .../dd09e223-b013-418e-8038-e7c5d6347f10.json      |    34 +
 .../55d92193-cadf-4157-a3d8-f85ba51149a3.json      |    85 +
 .../a749e414-c40e-45b7-92e4-bbfe63af705d.json      |    38 +
 .../ce2057da-54c8-4e05-b0bf-d225a6bbb62c.json      |    63 +
 .../a749e414-c40e-45b7-92e4-bbfe63af705d.json      |   183 +
 .../ce2057da-54c8-4e05-b0bf-d225a6bbb62c.json      |   141 +
 .../subquery/table/DEFAULT.TEST_ACCOUNT.json       |    41 +
 .../subquery/table/DEFAULT.TEST_KYLIN_FACT.json    |    75 +
 .../subquery/table_exd/DEFAULT.TEST_ACCOUNT.json   |    18 +
 .../table_exd/DEFAULT.TEST_KYLIN_FACT.json         |    17 +
 .../metadata/_global/project/comput_column.json    |     6 +
 .../4a45dc4d-937e-43cc-8faa-34d59d4e11d3.json      |    18 +
 .../4a45dc4d-937e-43cc-8faa-34d59d4e11d3.json      |    72 +
 .../4a45dc4d-937e-43cc-8faa-34d59d4e11d3.json      |   257 +
 .../metadata/comput_column/table/SSB.CUSTOMER.json |    58 +
 .../comput_column/table/SSB.P_LINEORDER.json       |   113 +
 .../_global/project/count_distinct_no_encode.json  |     6 +
 .../b06eee9f-3e6d-41de-ac96-89dbf170b99b.json      |    13 +
 .../b06eee9f-3e6d-41de-ac96-89dbf170b99b.json      |    51 +
 .../b06eee9f-3e6d-41de-ac96-89dbf170b99b.json      |   169 +
 .../table/DEFAULT.TEST_COUNT_DISTINCT.json         |    18 +
 .../metadata/_global/project/file_pruning.json     |     6 +
 .../0da6c3d6-be35-4e91-b030-3b720b4043e1.json      |    18 +
 .../3f152495-44de-406c-9abf-b11d4132aaed.json      |    18 +
 .../8c670664-8d05-466a-802f-83c023b56c77.json      |    13 +
 .../8c670664-8d05-466a-802f-83c023b56c78.json      |    13 +
 .../8c670664-8d05-466a-802f-83c023b56c79.json      |    13 +
 .../8c670664-8d05-466a-802f-83c023b56c80.json      |    13 +
 .../9cde9d25-9334-4b92-b229-a00f49453757.json      |    13 +
 .../0da6c3d6-be35-4e91-b030-3b720b4043e1.json      |    56 +
 .../3f152495-44de-406c-9abf-b11d4132aaed.json      |    42 +
 .../8c670664-8d05-466a-802f-83c023b56c77.json      |   233 +
 .../8c670664-8d05-466a-802f-83c023b56c78.json      |   234 +
 .../8c670664-8d05-466a-802f-83c023b56c79.json      |   233 +
 .../8c670664-8d05-466a-802f-83c023b56c80.json      |   233 +
 .../9cde9d25-9334-4b92-b229-a00f49453757.json      |    43 +
 .../0da6c3d6-be35-4e91-b030-3b720b4043e1.json      |   169 +
 .../3f152495-44de-406c-9abf-b11d4132aaed.json      |   166 +
 .../8c670664-8d05-466a-802f-83c023b56c77.json      |   101 +
 .../8c670664-8d05-466a-802f-83c023b56c78.json      |   101 +
 .../8c670664-8d05-466a-802f-83c023b56c79.json      |   101 +
 .../8c670664-8d05-466a-802f-83c023b56c80.json      |   101 +
 .../9cde9d25-9334-4b92-b229-a00f49453757.json      |    38 +
 .../file_pruning/table/DEFAULT.TEST_ACCOUNT.json   |    43 +
 .../table/DEFAULT.TEST_KYLIN_FACT.json             |    74 +
 .../file_pruning/table/DEFAULT.TEST_MEASURE.json   |   112 +
 .../file_pruning/table/DEFAULT.TEST_ORDER.json     |    36 +
 .../table/DEFAULT.TEST_ORDER_STRING_TS.json        |    36 +
 .../project/flattable_without_join_lookup.json     |     6 +
 .../8c670664-8d05-466a-802f-83c023b56c77.json      |    13 +
 .../9cde9d25-9334-4b92-b229-a00f49453757.json      |    13 +
 .../8c670664-8d05-466a-802f-83c023b56c77.json      |    40 +
 .../9cde9d25-9334-4b92-b229-a00f49453757.json      |    42 +
 .../8c670664-8d05-466a-802f-83c023b56c77.json      |   140 +
 .../9cde9d25-9334-4b92-b229-a00f49453757.json      |   138 +
 .../table/DEFAULT.TEST_KYLIN_FACT.json             |    74 +
 .../table/DEFAULT.TEST_MEASURE.json                |   112 +
 .../table/DEFAULT.TEST_ORDER.json                  |    36 +
 .../table/DEFAULT.TEST_ORDER_STRING.json           |    36 +
 .../_global/project/heterogeneous_segment_2.json   |    35 +
 ...gment_with_snapshot_large_than_input_bytes.json |    35 +
 .../heterogeneous_segment_without_snapshot.json    |    35 +
 .../4802b471-fb69-4b08-a45e-ab3e314e2f6c.json      |   272 +
 .../3f2860d5-0a4c-4f52-b27b-2627caafe769.json      |   213 +
 .../2805396d-4fe5-4541-8bc0-944caacaa1a3.json      |    23 +
 .../2b0b5bb9-df78-5817-5245-2a28c451035d.json      |    21 +
 .../a7cef448-34e9-4acf-8632-0a3db101cef4.json      |    21 +
 .../da9b8d84-bd69-455f-ab1a-4718d9d5b5a1.json      |    21 +
 .../3f2860d5-0a4c-4f52-b27b-2627caafe769.json      |    42 +
 .../3f2860d5-0a4c-4f52-b27b-2627caafe769.json      |   216 +
 .../table/DEFAULT.KYLIN_ACCOUNT.json               |    53 +
 .../table/DEFAULT.KYLIN_CAL_DT.json                |   529 +
 .../table/DEFAULT.KYLIN_CATEGORY_GROUPINGS.json    |   213 +
 .../table/DEFAULT.KYLIN_COUNTRY.json               |    44 +
 .../table/DEFAULT.KYLIN_SALES.json                 |   107 +
 .../table_exd/DEFAULT.KYLIN_ACCOUNT.json           |    18 +
 .../3f2860d5-0a4c-4f52-b27b-2627caafe769.json      |   213 +
 .../2805396d-4fe5-4541-8bc0-944caacaa1a3.json      |    23 +
 .../2b0b5bb9-df78-5817-5245-2a28c451035d.json      |    21 +
 .../a7cef448-34e9-4acf-8632-0a3db101cef4.json      |    21 +
 .../da9b8d84-bd69-455f-ab1a-4718d9d5b5a1.json      |    21 +
 .../3f2860d5-0a4c-4f52-b27b-2627caafe769.json      |    42 +
 .../3f2860d5-0a4c-4f52-b27b-2627caafe769.json      |   216 +
 .../table/DEFAULT.KYLIN_ACCOUNT.json               |    53 +
 .../table/DEFAULT.KYLIN_CAL_DT.json                |   529 +
 .../table/DEFAULT.KYLIN_CATEGORY_GROUPINGS.json    |   213 +
 .../table/DEFAULT.KYLIN_COUNTRY.json               |    44 +
 .../table/DEFAULT.KYLIN_SALES.json                 |   107 +
 .../table_exd/DEFAULT.KYLIN_ACCOUNT.json           |    18 +
 .../31a71d6b-49ac-4607-9dc5-d419d32a821c.json      |   213 +
 .../2805396d-4fe5-4541-8bc0-944caacaa1a3.json      |    23 +
 .../2b0b5bb9-df78-5817-5245-2a28c451035d.json      |    21 +
 .../a7cef448-34e9-4acf-8632-0a3db101cef4.json      |    21 +
 .../da9b8d84-bd69-455f-ab1a-4718d9d5b5a1.json      |    21 +
 .../31a71d6b-49ac-4607-9dc5-d419d32a821c.json      |    42 +
 .../31a71d6b-49ac-4607-9dc5-d419d32a821c.json      |   216 +
 .../table/DEFAULT.KYLIN_ACCOUNT.json               |    53 +
 .../table/DEFAULT.KYLIN_CAL_DT.json                |   529 +
 .../table/DEFAULT.KYLIN_CATEGORY_GROUPINGS.json    |   213 +
 .../table/DEFAULT.KYLIN_COUNTRY.json               |    44 +
 .../table/DEFAULT.KYLIN_SALES.json                 |   107 +
 .../table_exd/DEFAULT.KYLIN_ACCOUNT.json           |    18 +
 .../metadata/_global/project/join_opt.json         |     6 +
 .../8c670664-8d05-466a-802f-83c023b56c77.json      |    13 +
 .../8c670664-8d05-466a-802f-83c023b56c77.json      |    98 +
 .../8c670664-8d05-466a-802f-83c023b56c77.json      |    69 +
 .../join_opt/table/DEFAULT.TEST_KYLIN_FACT.json    |    74 +
 .../metadata/_global/project/multi_partition.json  |     6 +
 .../0080e4e4-69af-449e-b09f-05c90dfa04b6.json      |    18 +
 .../b780e4e4-69af-449e-b09f-05c90dfa04b6.json      |   291 +
 .../0080e4e4-69af-449e-b09f-05c90dfa04b6.json      |    63 +
 .../b780e4e4-69af-449e-b09f-05c90dfa04b6.json      |    40 +
 .../0080e4e4-69af-449e-b09f-05c90dfa04b6.json      |   112 +
 .../b780e4e4-69af-449e-b09f-05c90dfa04b6.json      |   137 +
 .../table/DEFAULT.TEST_BANK_INCOME.json            |    37 +
 .../table/DEFAULT.TEST_BANK_LOCATION.json          |    32 +
 .../_global/project/multi_partition_pruning.json   |     6 +
 .../8c670664-8d05-466a-802f-83c023b56c76.json      |    13 +
 .../8c670664-8d05-466a-802f-83c023b56c77.json      |    13 +
 .../8c670664-8d05-466a-802f-83c023b56c78.json      |    13 +
 .../8c670664-8d05-466a-802f-83c023b56c79.json      |    13 +
 .../8c670664-8d05-466a-802f-83c023b56c80.json      |    13 +
 .../9cde9d25-9334-4b92-b229-a00f49453757.json      |    13 +
 .../8c670664-8d05-466a-802f-83c023b56c76.json      |   238 +
 .../8c670664-8d05-466a-802f-83c023b56c77.json      |   235 +
 .../8c670664-8d05-466a-802f-83c023b56c78.json      |   236 +
 .../8c670664-8d05-466a-802f-83c023b56c79.json      |   235 +
 .../8c670664-8d05-466a-802f-83c023b56c80.json      |    62 +
 .../9cde9d25-9334-4b92-b229-a00f49453757.json      |    42 +
 .../8c670664-8d05-466a-802f-83c023b56c76.json      |   148 +
 .../8c670664-8d05-466a-802f-83c023b56c77.json      |   142 +
 .../8c670664-8d05-466a-802f-83c023b56c78.json      |   142 +
 .../8c670664-8d05-466a-802f-83c023b56c79.json      |   142 +
 .../8c670664-8d05-466a-802f-83c023b56c80.json      |   142 +
 .../9cde9d25-9334-4b92-b229-a00f49453757.json      |    65 +
 .../table/DEFAULT.TEST_ACCOUNT.json                |    43 +
 .../table/DEFAULT.TEST_KYLIN_FACT.json             |    74 +
 .../table/DEFAULT.TEST_MEASURE.json                |   112 +
 .../table/DEFAULT.TEST_ORDER.json                  |    36 +
 .../table/DEFAULT.TEST_ORDER_STRING_TS.json        |    36 +
 .../_global/project/multiple_columns_in.json       |     6 +
 .../7c670664-8d05-466a-802f-83c023b56c77.json      |    13 +
 .../7c670664-8d05-466a-802f-83c023b56c77.json      |    42 +
 .../7c670664-8d05-466a-802f-83c023b56c77.json      |    40 +
 .../table/DEFAULT.TEST_KYLIN_FACT.json             |    74 +
 .../metadata/_global/project/multiple_cuboids.json |     6 +
 .../b58bfe46-78e7-4c67-9850-5abd6abd6cf4.json      |    13 +
 .../b58bfe46-78e7-4c67-9850-5abd6abd6cf4.json      |    61 +
 .../b58bfe46-78e7-4c67-9850-5abd6abd6cf4.json      |   102 +
 .../table/DEFAULT.TEST_KYLIN_FACT.json             |    74 +
 .../metadata/_global/project/multiple_topn.json    |     6 +
 .../c6381db2-802f-4a25-98f0-bfe021c304eg.json      |    22 +
 .../d9f564ce-bf63-498e-b346-db982fcf91f9.json      |    22 +
 .../c6381db2-802f-4a25-98f0-bfe021c304eg.json      |    40 +
 .../d9f564ce-bf63-498e-b346-db982fcf91f9.json      |    74 +
 .../c6381db2-802f-4a25-98f0-bfe021c304eg.json      |   128 +
 .../d9f564ce-bf63-498e-b346-db982fcf91f9.json      |   114 +
 .../table/DEFAULT.TEST_KYLIN_FACT.json             |    75 +
 .../_global/project/opt_intersect_count.json       |     6 +
 .../c9ddd37e-c870-4ccf-a131-5eef8fe6cb7e.json      |    13 +
 .../c9ddd37e-c870-4ccf-a131-5eef8fe6cb7e.json      |   110 +
 .../c9ddd37e-c870-4ccf-a131-5eef8fe6cb7e.json      |    80 +
 .../table/DEFAULT.TEST_INTERSECT_COUNT.json        |    40 +
 .../metadata/_global/project/kylin.json            |    35 +
 .../cce7b90d-c1ac-49ef-abc3-f8971eb91544.json      |    79 +
 .../5d450d1a-170e-4410-9c93-6a34903fe7af.json      |     8 +
 .../a1fc70d4-3640-4554-9d53-b0d67b98de13.json      |   188 +
 .../cce7b90d-c1ac-49ef-abc3-f8971eb91544.json      |    40 +
 .../cce7b90d-c1ac-49ef-abc3-f8971eb91544.json      |   113 +
 .../kylin/table/DEFAULT.TEST_KYLIN_FACT.json       |    95 +
 .../metadata/_global/project/partition_col.json    |     6 +
 .../partition_col/dataflow/INT_PAR_COL.json        |    13 +
 .../partition_col/dataflow/LONG_PAR_COL.json       |    13 +
 .../partition_col/dataflow/STR_PAR_COL1.json       |    13 +
 .../partition_col/dataflow/STR_PAR_COL2.json       |    13 +
 .../partition_col/dataflow/STR_PAR_COL3.json       |    13 +
 .../partition_col/dataflow/STR_PAR_COL4.json       |    13 +
 .../partition_col/dataflow/STR_PAR_COL5.json       |    13 +
 .../partition_col/dataflow/STR_PAR_COL6.json       |    13 +
 .../partition_col/index_plan/INT_PAR_COL.json      |    40 +
 .../partition_col/index_plan/LONG_PAR_COL.json     |    40 +
 .../partition_col/index_plan/STR_PAR_COL1.json     |    40 +
 .../partition_col/index_plan/STR_PAR_COL2.json     |    40 +
 .../partition_col/index_plan/STR_PAR_COL3.json     |    40 +
 .../partition_col/index_plan/STR_PAR_COL4.json     |    40 +
 .../partition_col/index_plan/STR_PAR_COL5.json     |    40 +
 .../partition_col/index_plan/STR_PAR_COL6.json     |    40 +
 .../partition_col/model_desc/INT_PAR_COL.json      |    75 +
 .../partition_col/model_desc/LONG_PAR_COL.json     |    75 +
 .../partition_col/model_desc/STR_PAR_COL1.json     |    75 +
 .../partition_col/model_desc/STR_PAR_COL2.json     |    75 +
 .../partition_col/model_desc/STR_PAR_COL3.json     |    75 +
 .../partition_col/model_desc/STR_PAR_COL4.json     |    75 +
 .../partition_col/model_desc/STR_PAR_COL5.json     |    75 +
 .../partition_col/model_desc/STR_PAR_COL6.json     |    75 +
 .../partition_col/table/DEFAULT.TEST_PAR_COL.json  |    57 +
 .../metadata/_global/project/reuse_flattable.json  |     6 +
 .../75080248-367e-4bac-9fd7-322517ee0227.json      |    13 +
 .../75080248-367e-4bac-9fd7-322517ee0227.json      |    42 +
 .../75080248-367e-4bac-9fd7-322517ee0227.json      |    86 +
 .../table/DEFAULT.TEST_KYLIN_FACT.json             |    74 +
 .../metadata/_global/project/spanning_tree.json    |     6 +
 .../75080248-367e-4bac-9fd7-322517ee0227.json      |    13 +
 .../75080248-367e-4bac-9fd7-322517ee0227.json      |   185 +
 .../75080248-367e-4bac-9fd7-322517ee0227.json      |    62 +
 .../table/DEFAULT.TEST_KYLIN_FACT.json             |    74 +
 .../metadata/_global/project/char_n_column.json    |     6 +
 .../c9ddd37e-c870-4ccf-a131-5eef8fe6cb7e.json      |    13 +
 .../c9ddd37e-c870-4ccf-a131-5eef8fe6cb7e.json      |   110 +
 .../c9ddd37e-c870-4ccf-a131-5eef8fe6cb7e.json      |    80 +
 .../char_n_column/table/DEFAULT.TEST_CHAR_N.json   |    40 +
 .../metadata/_global/project/timezone.json         |     6 +
 .../8c670664-8d05-466a-802f-83c023b56c77.json      |    13 +
 .../9cde9d25-9334-4b92-b229-a00f49453757.json      |    13 +
 .../8c670664-8d05-466a-802f-83c023b56c77.json      |    58 +
 .../9cde9d25-9334-4b92-b229-a00f49453757.json      |    43 +
 .../8c670664-8d05-466a-802f-83c023b56c77.json      |   140 +
 .../9cde9d25-9334-4b92-b229-a00f49453757.json      |    38 +
 .../timezone/table/DEFAULT.TEST_KYLIN_FACT.json    |    74 +
 .../timezone/table/DEFAULT.TEST_MEASURE.json       |   112 +
 .../timezone/table/DEFAULT.TEST_ORDER.json         |    36 +
 .../timezone/table/DEFAULT.TEST_ORDER_STRING.json  |    36 +
 .../_global/project/topn_with_chinese.json         |     6 +
 .../a6e11e79-40e1-40a6-927b-2c05cfbba81e.json      |    18 +
 .../a6e11e79-40e1-40a6-927b-2c05cfbba81e.json      |    39 +
 .../a6e11e79-40e1-40a6-927b-2c05cfbba81e.json      |    78 +
 .../table/DEFAULT.TOPN_WITH_CHINESE.json           |    25 +
 .../engine/spark/NLocalWithSparkSessionTest.java   |     2 +-
 1928 files changed, 44607 insertions(+), 883740 deletions(-)

diff --git a/pom.xml b/pom.xml
index 485b7b4a4d..0a68c32b53 100644
--- a/pom.xml
+++ b/pom.xml
@@ -65,7 +65,7 @@
 
         <!-- Spark versions -->
         <delta.version>1.2.1</delta.version>
-        <spark.version>3.2.0-kylin-4.5.20.0-SNAPSHOT</spark.version>
+        <spark.version>3.2.0-kylin-4.5.20.0</spark.version>
 
         <roaring.version>0.9.2-kylin-r4</roaring.version>
 
diff --git a/src/examples/test_case_data/localmeta/data/XXXXXXXXX_XXXXXXXXX.X_XXXXXXXX_XX_XX.csv b/src/examples/test_case_data/localmeta/data/XXXXXXXXX_XXXXXXXXX.X_XXXXXXXX_XX_XX.csv
new file mode 100755
index 0000000000..3344801a89
--- /dev/null
+++ b/src/examples/test_case_data/localmeta/data/XXXXXXXXX_XXXXXXXXX.X_XXXXXXXX_XX_XX.csv
@@ -0,0 +1,5 @@
+10,210,1022,19,754,5012,1284,532,41,48,432,421,532,532,67,43,643,754,345,25,64,34,64,34,1,432,532,1,23,65,23,54,43,64,23,53,64,25,75,24,53,64,23,53,65,35,64,23,64,54,67,dfg,f23,gfd,2008-11-09,gfd,d35,gfd,gfd,hf,male,gds,gds,hoboken,NJ,07030,2014252942,gms,Tom Smith,good,gs,gvc,ghd,gds
+11,211,1025,12,734,5013,1284,532,41,48,432,421,532,532,67,43,643,754,345,25,64,34,64,34,643,1,1,2,23,65,23,54,43,64,23,53,64,25,75,24,53,64,23,53,65,35,64,23,64,54,67,dfg,f23,gfd,2014-07-09,gfd,d35,gfd,gfd,hf,male,gds,gds,jersey city,NJ,07307,2014252942,gms,MUKVIN XU,good,gs,gvc,ghd,gds
+12,211,1023,11,744,5012,1284,532,41,48,432,421,532,532,67,43,643,754,345,25,64,37,64,34,2,432,532,2,2,65,23,54,43,64,23,53,64,25,75,24,53,64,23,53,65,35,64,23,64,54,67,dfg,f23,gfd,2010-09-18,gfd,d35,gfd,gfd,hf,male,gds,gds,jersey city,NJ,07307,2014252942,gms,MUKVIN XU,good,gs,gvc,ghd,gds
+13,214,1022,13,766,5012,1284,532,41,48,432,421,532,532,67,43,643,754,345,25,64,31,64,1,643,1,532,2,1,65,23,54,43,64,23,53,64,25,75,24,53,64,23,53,65,35,64,23,64,54,67,dfg,f23,gfd,2005-05-09,gfd,d35,gfd,gfd,hf,male,gds,gds,jersey city,NJ,07307,2014252942,gms,MUKVIN XU,good,gs,gvc,ghd,gds
+16,256,1027,16,754,5012,1284,532,41,48,432,421,532,532,67,43,643,754,345,25,64,30,64,34,1,432,532,1,23,65,23,54,43,64,23,53,64,25,75,24,53,64,23,53,65,35,64,23,64,54,67,dfg,f23,gfd,2011-09-11,gfd,d35,gfd,gfd,hf,male,gds,gds,hoboken,NJ,07030,2014252942,gms,Tom Smith,good,gs,gvc,ghd,gds
diff --git a/src/examples/test_case_data/localmeta/metadata/newten/table/XXXXXXXXX_XXXXXXXXX.X_XXXXXXXX_XX_XX.json b/src/examples/test_case_data/localmeta/metadata/newten/table/XXXXXXXXX_XXXXXXXXX.X_XXXXXXXX_XX_XX.json
new file mode 100755
index 0000000000..597be1e311
--- /dev/null
+++ b/src/examples/test_case_data/localmeta/metadata/newten/table/XXXXXXXXX_XXXXXXXXX.X_XXXXXXXX_XX_XX.json
@@ -0,0 +1,306 @@
+{
+  "uuid" : "44587432-95e9-48c7-8a24-23a9ea8b73xx",
+  "last_modified" : 1509549988629,
+  "version" : "2.3.0.20500",
+  "name" : "X_XXXXXXXX_XX_XX",
+  "columns" : [ {
+    "id" : "1",
+    "name" : "EFFMONTH",
+    "datatype" : "varchar(5)"
+  }, {
+    "id" : "2",
+    "name" : "EFFPER",
+    "datatype" : "varchar(10)"
+  }, {
+    "id" : "3",
+    "name" : "EFFYEAR",
+    "datatype" : "decimal(20,0)"
+  }, {
+    "id" : "4",
+    "name" : "MEMBER_MONTHS",
+    "datatype" : "decimal(10,0)"
+  }, {
+    "id" : "5",
+    "name" : "TOT_MED_SPEND",
+    "datatype" : "decimal(10,0)"
+  }, {
+    "id" : "6",
+    "name" : "TOT_IP_SPEND",
+    "datatype" : "decimal(10,0)"
+  }, {
+    "id" : "7",
+    "name" : "IP_ADMITS",
+    "datatype" : "decimal(10,0)"
+  }, {
+    "id" : "8",
+    "name" : "IP_HOS_ADMITS",
+    "datatype" : "decimal(10,0)"
+  }, {
+    "id" : "9",
+    "name" : "IP_HOS_SPEND",
+    "datatype" : "decimal(10,0)"
+  }, {
+    "id" : "10",
+    "name" : "TOT_ED_SPEND",
+    "datatype" : "decimal(10,0)"
+  }, {
+    "id" : "11",
+    "name" : "ED_VISITS",
+    "datatype" : "decimal(10,0)"
+  }, {
+    "id" : "12",
+    "name" : "PCP_VISITS",
+    "datatype" : "decimal(10,0)"
+  }, {
+    "id" : "13",
+    "name" : "SPECIALTY_CONSULT_VISITS",
+    "datatype" : "decimal(10,0)"
+  }, {
+    "id" : "14",
+    "name" : "TOT_LAB_SPEND",
+    "datatype" : "decimal(10,0)"
+  }, {
+    "id" : "15",
+    "name" : "TOT_RAD_SPEND",
+    "datatype" : "decimal(10,0)"
+  }, {
+    "id" : "16",
+    "name" : "TOT_RX_SPEND",
+    "datatype" : "decimal(10,0)"
+  }, {
+    "id" : "17",
+    "name" : "TOT_RX_LINES",
+    "datatype" : "decimal(10,0)"
+  }, {
+    "id" : "18",
+    "name" : "TOT_OUT_SPEND",
+    "datatype" : "decimal(10,0)"
+  }, {
+    "id" : "19",
+    "name" : "TOT_PROF_SPEND",
+    "datatype" : "decimal(10,0)"
+  }, {
+    "id" : "20",
+    "name" : "TOT_OTHER_SPEND",
+    "datatype" : "decimal(10,0)"
+  }, {
+    "id" : "21",
+    "name" : "MARA_CONCURRENT_RISK",
+    "datatype" : "decimal(5,2)"
+  }, {
+    "id" : "22",
+    "name" : "MARA_PROSPECTIVE_RISK",
+    "datatype" : "decimal(5,2)"
+  }, {
+    "id" : "23",
+    "name" : "MARA_MEDICARE_RISK_SCORE",
+    "datatype" : "decimal(5,2)"
+  }, {
+    "id" : "24",
+    "name" : "DIABETES",
+    "datatype" : "decimal(10,0)"
+  }, {
+    "id" : "25",
+    "name" : "VASCULAR_DISEASE",
+    "datatype" : "decimal(10,0)"
+  }, {
+    "id" : "26",
+    "name" : "COPD",
+    "datatype" : "decimal(10,0)"
+  }, {
+    "id" : "27",
+    "name" : "CHF",
+    "datatype" : "decimal(10,0)"
+  }, {
+    "id" : "28",
+    "name" : "AMI",
+    "datatype" : "decimal(10,0)"
+  }, {
+    "id" : "29",
+    "name" : "STROKE",
+    "datatype" : "decimal(10,0)"
+  }, {
+    "id" : "30",
+    "name" : "CHRONIC_CONDITIONS_CNT",
+    "datatype" : "decimal(10,0)"
+  }, {
+    "id" : "31",
+    "name" : "CLAIM_COUNT",
+    "datatype" : "decimal(10,0)"
+  }, {
+    "id" : "32",
+    "name" : "PAR_DOMESTIC",
+    "datatype" : "decimal(10,0)"
+  }, {
+    "id" : "33",
+    "name" : "NON_PAR_DOMESTIC",
+    "datatype" : "decimal(10,0)"
+  }, {
+    "id" : "34",
+    "name" : "PAR_NON_DOMESTIC",
+    "datatype" : "decimal(10,0)"
+  }, {
+    "id" : "35",
+    "name" : "NON_PAR_NON_DOMESTIC",
+    "datatype" : "decimal(10,0)"
+  }, {
+    "id" : "36",
+    "name" : "PAR_DOMESTIC_SPEND",
+    "datatype" : "decimal(10,0)"
+  }, {
+    "id" : "37",
+    "name" : "NON_PAR_DOMESTIC_SPEND",
+    "datatype" : "decimal(10,0)"
+  }, {
+    "id" : "38",
+    "name" : "PAR_NON_DOMESTIC_SPEND",
+    "datatype" : "decimal(10,0)"
+  }, {
+    "id" : "39",
+    "name" : "NON_PAR_NON_DOMESTIC_SPEND",
+    "datatype" : "decimal(10,0)"
+  }, {
+    "id" : "40",
+    "name" : "PCP_PAR_DOMESTIC",
+    "datatype" : "decimal(10,0)"
+  }, {
+    "id" : "41",
+    "name" : "PCP_NON_PAR_DOMESTIC",
+    "datatype" : "decimal(10,0)"
+  }, {
+    "id" : "42",
+    "name" : "PCP_PAR_NON_DOMESTIC",
+    "datatype" : "decimal(10,0)"
+  }, {
+    "id" : "43",
+    "name" : "PCP_NON_PAR_NON_DOMESTIC",
+    "datatype" : "decimal(10,0)"
+  }, {
+    "id" : "44",
+    "name" : "SPEC_PAR_DOMESTIC",
+    "datatype" : "decimal(10,0)"
+  }, {
+    "id" : "45",
+    "name" : "SPEC_NON_PAR_DOMESTIC",
+    "datatype" : "decimal(10,0)"
+  }, {
+    "id" : "46",
+    "name" : "SPEC_PAR_NON_DOMESTIC",
+    "datatype" : "decimal(10,0)"
+  }, {
+    "id" : "47",
+    "name" : "SPEC_NON_PAR_NON_DOMESTIC",
+    "datatype" : "decimal(10,0)"
+  }, {
+    "id" : "48",
+    "name" : "OP_PAR_DOMESTIC",
+    "datatype" : "decimal(10,0)"
+  }, {
+    "id" : "49",
+    "name" : "OP_NON_PAR_DOMESTIC",
+    "datatype" : "decimal(10,0)"
+  }, {
+    "id" : "50",
+    "name" : "OP_PAR_NON_DOMESTIC",
+    "datatype" : "decimal(10,0)"
+  }, {
+    "id" : "51",
+    "name" : "OP_NON_PAR_NON_DOMESTIC",
+    "datatype" : "decimal(10,0)"
+  }, {
+    "id" : "52",
+    "name" : "PAYER",
+    "datatype" : "varchar(20)"
+  }, {
+    "id" : "53",
+    "name" : "XXXXXX_XX",
+    "datatype" : "varchar(100)"
+  }, {
+    "id" : "54",
+    "name" : "LOB",
+    "datatype" : "varchar(50)"
+  }, {
+    "id" : "55",
+    "name" : "DOB",
+    "datatype" : "date"
+  }, {
+    "id" : "56",
+    "name" : "HICN",
+    "datatype" : "varchar(60)"
+  }, {
+    "id" : "57",
+    "name" : "SUBSCRIBER_ID",
+    "datatype" : "varchar(45)"
+  }, {
+    "id" : "58",
+    "name" : "FIRST_NAME",
+    "datatype" : "varchar(100)"
+  }, {
+    "id" : "59",
+    "name" : "LAST_NAME",
+    "datatype" : "varchar(100)"
+  }, {
+    "id" : "60",
+    "name" : "MEMBER_NAME",
+    "datatype" : "varchar(200)"
+  }, {
+    "id" : "61",
+    "name" : "GENDER",
+    "datatype" : "varchar(10)"
+  }, {
+    "id" : "62",
+    "name" : "ADDRESS1",
+    "datatype" : "varchar(100)"
+  }, {
+    "id" : "63",
+    "name" : "ADDRESS2",
+    "datatype" : "varchar(100)"
+  }, {
+    "id" : "64",
+    "name" : "CITY",
+    "datatype" : "varchar(50)"
+  }, {
+    "id" : "65",
+    "name" : "STATE",
+    "datatype" : "varchar(26)"
+  }, {
+    "id" : "66",
+    "name" : "ZIP",
+    "datatype" : "varchar(45)"
+  }, {
+    "id" : "67",
+    "name" : "PHONE",
+    "datatype" : "varchar(40)"
+  }, {
+    "id" : "68",
+    "name" : "NPI",
+    "datatype" : "varchar(20)"
+  }, {
+    "id" : "69",
+    "name" : "FULL_NAME",
+    "datatype" : "varchar(100)"
+  }, {
+    "id" : "70",
+    "name" : "EMP_STATUS",
+    "datatype" : "varchar(25)"
+  }, {
+    "id" : "71",
+    "name" : "DEFAULT_TIN",
+    "datatype" : "varchar(20)"
+  }, {
+    "id" : "72",
+    "name" : "DEFAULT_PRACTICE",
+    "datatype" : "varchar(100)"
+  }, {
+    "id" : "73",
+    "name" : "CLASSIFICATION",
+    "datatype" : "varchar(256)"
+  }, {
+    "id" : "74",
+    "name" : "SPECIALIZATION",
+    "datatype" : "varchar(256)"
+  } ],
+  "source_type" : 9,
+  "table_type" : "MANAGED_TABLE",
+  "database" : "XXXXXXXXX_XXXXXXXXX"
+}
diff --git a/src/kylin-it/src/test/java/org/apache/calcite/test/DiffRepository.java b/src/kylin-it/src/test/java/org/apache/calcite/test/DiffRepository.java
new file mode 100644
index 0000000000..a38b4d53b3
--- /dev/null
+++ b/src/kylin-it/src/test/java/org/apache/calcite/test/DiffRepository.java
@@ -0,0 +1,779 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to you under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.calcite.test;
+
+import org.apache.calcite.avatica.util.Spaces;
+import org.apache.calcite.util.Pair;
+import org.apache.calcite.util.Util;
+import org.apache.calcite.util.XmlOutput;
+import org.junit.Assert;
+import org.junit.ComparisonFailure;
+import org.w3c.dom.CDATASection;
+import org.w3c.dom.Comment;
+import org.w3c.dom.Document;
+import org.w3c.dom.Element;
+import org.w3c.dom.NamedNodeMap;
+import org.w3c.dom.Node;
+import org.w3c.dom.NodeList;
+import org.w3c.dom.Text;
+import org.xml.sax.SAXException;
+
+import javax.xml.parsers.DocumentBuilder;
+import javax.xml.parsers.DocumentBuilderFactory;
+import javax.xml.parsers.ParserConfigurationException;
+import java.io.File;
+import java.io.IOException;
+import java.io.Writer;
+import java.net.URL;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * A collection of resources used by tests.
+ *
+ * <p>Loads files containing test input and output into memory. If there are
+ * differences, writes out a log file containing the actual output.
+ *
+ * <p>Typical usage is as follows. A test case class defines a method
+ *
+ * <blockquote><pre><code>
+ * package com.acme.test;
+ * &nbsp;
+ * public class MyTest extends TestCase {
+ *   public DiffRepository getDiffRepos() {
+ *     return DiffRepository.lookup(MyTest.class);
+ *   }
+ * &nbsp;
+ *   &#64;Test public void testToUpper() {
+ *     getDiffRepos().assertEquals("${result}", "${string}");
+ *   }
+ * &nbsp;
+ *   &#64;Test public void testToLower() {
+ *     getDiffRepos().assertEquals("Multi-line\nstring", "${string}");
+ *   }
+ * }
+ * </code></pre></blockquote>
+ *
+ * <p>There is an accompanying reference file named after the class,
+ * <code>src/test/resources/com/acme/test/MyTest.xml</code>:</p>
+ *
+ * <blockquote><pre><code>
+ * &lt;Root&gt;
+ *     &lt;TestCase name="testToUpper"&gt;
+ *         &lt;Resource name="string"&gt;
+ *             &lt;![CDATA[String to be converted to upper case]]&gt;
+ *         &lt;/Resource&gt;
+ *         &lt;Resource name="result"&gt;
+ *             &lt;![CDATA[STRING TO BE CONVERTED TO UPPER CASE]]&gt;
+ *         &lt;/Resource&gt;
+ *     &lt;/TestCase&gt;
+ *     &lt;TestCase name="testToLower"&gt;
+ *         &lt;Resource name="result"&gt;
+ *             &lt;![CDATA[multi-line
+ * string]]&gt;
+ *         &lt;/Resource&gt;
+ *     &lt;/TestCase&gt;
+ * &lt;/Root&gt;
+ *
+ * </code></pre></blockquote>
+ *
+ * <p>If any of the test cases fails, a log file is generated, called
+ * <code>target/surefire/com/acme/test/MyTest.xml</code>, containing the actual
+ * output.</p>
+ *
+ * <p>(Maven sometimes removes this file; if it is not present, run maven with
+ * an extra {@code -X} flag.
+ * See <a href="http://jira.codehaus.org/browse/SUREFIRE-846">[SUREFIRE-846]</a>
+ * for details.)</p>
+ *
+ * <p>The log
+ * file is otherwise identical to the reference log, so once the log file has
+ * been verified, it can simply be copied over to become the new reference
+ * log:</p>
+ *
+ * <blockquote><code>cp target/surefire/com/acme/test/MyTest.xml
+ * src/test/resources/com/acme/test/MyTest.xml</code></blockquote>
+ *
+ * <p>If a resource or test case does not exist, <code>DiffRepository</code>
+ * creates them in the log file. Because DiffRepository is so forgiving, it is
+ * very easy to create new tests and test cases.</p>
+ *
+ * <p>The {@link #lookup} method ensures that all test cases share the same
+ * instance of the repository. This is important more than one one test case
+ * fails. The shared instance ensures that the generated
+ * <code>target/surefire/com/acme/test/MyTest.xml</code>
+ * file contains the actual for <em>both</em> test cases.
+ */
+public class DiffRepository {
+  //~ Static fields/initializers ---------------------------------------------
+
+/*
+      Example XML document:
+
+      <Root>
+        <TestCase name="testFoo">
+          <Resource name="sql">
+            <![CDATA[select from emps]]>
+           </Resource>
+           <Resource name="plan">
+             <![CDATA[MockTableImplRel.FENNEL_EXEC(table=[SALES, EMP])]]>
+           </Resource>
+         </TestCase>
+         <TestCase name="testBar">
+           <Resource name="sql">
+             <![CDATA[select * from depts where deptno = 10]]>
+           </Resource>
+           <Resource name="output">
+             <![CDATA[10, 'Sales']]>
+           </Resource>
+         </TestCase>
+       </Root>
+*/
+  private static final String ROOT_TAG = "Root";
+  private static final String TEST_CASE_TAG = "TestCase";
+  private static final String TEST_CASE_NAME_ATTR = "name";
+  private static final String TEST_CASE_OVERRIDES_ATTR = "overrides";
+  private static final String RESOURCE_TAG = "Resource";
+  private static final String RESOURCE_NAME_ATTR = "name";
+
+  /**
+   * Holds one diff-repository per class. It is necessary for all test cases in
+   * the same class to share the same diff-repository: if the repository gets
+   * loaded once per test case, then only one diff is recorded.
+   */
+  private static final Map<Class, DiffRepository> MAP_CLASS_TO_REPOSITORY =
+      new HashMap<>();
+
+  //~ Instance fields --------------------------------------------------------
+
+  private final DiffRepository baseRepository;
+  private final int indent;
+  private Document doc;
+  private final Element root;
+  private final File logFile;
+  private final Filter filter;
+
+  //~ Constructors -----------------------------------------------------------
+
+  /**
+   * Creates a DiffRepository.
+   *
+   * @param refFile   Reference file
+   * @param logFile   Log file
+   * @param baseRepository Parent repository or null
+   * @param filter    Filter or null
+   */
+  private DiffRepository(
+      URL refFile,
+      File logFile,
+      DiffRepository baseRepository,
+      Filter filter) {
+    this.baseRepository = baseRepository;
+    this.filter = filter;
+    if (refFile == null) {
+      throw new IllegalArgumentException("url must not be null");
+    }
+    this.logFile = logFile;
+
+    // Load the document.
+    DocumentBuilderFactory fac = DocumentBuilderFactory.newInstance();
+    try {
+      DocumentBuilder docBuilder = fac.newDocumentBuilder();
+      try {
+        // Parse the reference file.
+        this.doc = docBuilder.parse(refFile.openStream());
+        // Don't write a log file yet -- as far as we know, it's still
+        // identical.
+      } catch (IOException e) {
+        // There's no reference file. Create and write a log file.
+        this.doc = docBuilder.newDocument();
+        this.doc.appendChild(
+            doc.createElement(ROOT_TAG));
+        flushDoc();
+      }
+      this.root = doc.getDocumentElement();
+      if (!root.getNodeName().equals(ROOT_TAG)) {
+        throw new RuntimeException("expected root element of type '" + ROOT_TAG
+            + "', but found '" + root.getNodeName() + "'");
+      }
+    } catch (ParserConfigurationException | SAXException e) {
+      throw new RuntimeException("error while creating xml parser", e);
+    }
+    indent = logFile.getPath().contains("RelOptRulesTest")
+        || logFile.getPath().contains("SqlToRelConverterTest")
+        || logFile.getPath().contains("SqlLimitsTest") ? 4 : 2;
+  }
+
+  //~ Methods ----------------------------------------------------------------
+
+  private static URL findFile(Class clazz, final String suffix) {
+    // The reference file for class "com.foo.Bar" is "com/foo/Bar.xml"
+    String rest = "/" + clazz.getName().replace('.', File.separatorChar)
+        + suffix;
+    return clazz.getResource(rest);
+  }
+
+  /**
+   * Expands a string containing one or more variables. (Currently only works
+   * if there is one variable.)
+   */
+  public synchronized String expand(String tag, String text) {
+    if (text == null) {
+      return null;
+    } else if (text.startsWith("${")
+        && text.endsWith("}")) {
+      final String testCaseName = getCurrentTestCaseName(true);
+      final String token = text.substring(2, text.length() - 1);
+      if (tag == null) {
+        tag = token;
+      }
+      assert token.startsWith(tag) : "token '" + token
+          + "' does not match tag '" + tag + "'";
+      String expanded = get(testCaseName, token);
+      if (expanded == null) {
+        // Token is not specified. Return the original text: this will
+        // cause a diff, and the actual value will be written to the
+        // log file.
+        return text;
+      }
+      if (filter != null) {
+        expanded =
+            filter.filter(this, testCaseName, tag, text, expanded);
+      }
+      return expanded;
+    } else {
+      // Make sure what appears in the resource file is consistent with
+      // what is in the Java. It helps to have a redundant copy in the
+      // resource file.
+      final String testCaseName = getCurrentTestCaseName(true);
+      if (baseRepository == null || baseRepository.get(testCaseName, tag) == null) {
+        set(tag, text);
+      }
+      return text;
+    }
+  }
+
+  /**
+   * Sets the value of a given resource of the current test case.
+   *
+   * @param resourceName Name of the resource, e.g. "sql"
+   * @param value        Value of the resource
+   */
+  public synchronized void set(String resourceName, String value) {
+    assert resourceName != null;
+    final String testCaseName = getCurrentTestCaseName(true);
+    update(testCaseName, resourceName, value);
+  }
+
+  public void amend(String expected, String actual) {
+    if (expected.startsWith("${")
+        && expected.endsWith("}")) {
+      String token = expected.substring(2, expected.length() - 1);
+      set(token, actual);
+    }
+  }
+
+  /**
+   * Returns a given resource from a given test case.
+   *
+   * @param testCaseName Name of test case, e.g. "testFoo"
+   * @param resourceName Name of resource, e.g. "sql", "plan"
+   * @return The value of the resource, or null if not found
+   */
+  private synchronized String get(
+      final String testCaseName,
+      String resourceName) {
+    Element testCaseElement = getTestCaseElement(testCaseName, true, null);
+    if (testCaseElement == null) {
+      if (baseRepository != null) {
+        return baseRepository.get(testCaseName, resourceName);
+      } else {
+        return null;
+      }
+    }
+    final Element resourceElement =
+        getResourceElement(testCaseElement, resourceName);
+    if (resourceElement != null) {
+      return getText(resourceElement);
+    }
+    return null;
+  }
+
+  /**
+   * Returns the text under an element.
+   */
+  private static String getText(Element element) {
+    // If there is a <![CDATA[ ... ]]> child, return its text and ignore
+    // all other child elements.
+    final NodeList childNodes = element.getChildNodes();
+    for (int i = 0; i < childNodes.getLength(); i++) {
+      Node node = childNodes.item(i);
+      if (node instanceof CDATASection) {
+        return node.getNodeValue();
+      }
+    }
+
+    // Otherwise return all the text under this element (including
+    // whitespace).
+    StringBuilder buf = new StringBuilder();
+    for (int i = 0; i < childNodes.getLength(); i++) {
+      Node node = childNodes.item(i);
+      if (node instanceof Text) {
+        buf.append(((Text) node).getWholeText());
+      }
+    }
+    return buf.toString();
+  }
+
+  /**
+   * Returns the &lt;TestCase&gt; element corresponding to the current test
+   * case.
+   *
+   * @param testCaseName  Name of test case
+   * @param checkOverride Make sure that if an element overrides an element in
+   *                      a base repository, it has overrides="true"
+   * @return TestCase element, or null if not found
+   */
+  private synchronized Element getTestCaseElement(
+      final String testCaseName,
+      boolean checkOverride,
+      List<Pair<String, Element>> elements) {
+    final NodeList childNodes = root.getChildNodes();
+    for (int i = 0; i < childNodes.getLength(); i++) {
+      Node child = childNodes.item(i);
+      if (child.getNodeName().equals(TEST_CASE_TAG)) {
+        Element testCase = (Element) child;
+        final String name = testCase.getAttribute(TEST_CASE_NAME_ATTR);
+        if (testCaseName.equals(name)) {
+          if (checkOverride
+              && (baseRepository != null)
+              && (baseRepository.getTestCaseElement(testCaseName, false, null) != null)
+              && !"true".equals(
+                  testCase.getAttribute(TEST_CASE_OVERRIDES_ATTR))) {
+            throw new RuntimeException(
+                "TestCase  '" + testCaseName + "' overrides a "
+                + "test case in the base repository, but does "
+                + "not specify 'overrides=true'");
+          }
+          return testCase;
+        }
+        if (elements != null) {
+          elements.add(Pair.of(name, testCase));
+        }
+      }
+    }
+    return null;
+  }
+
+  /**
+   * Returns the name of the current test case by looking up the call stack for
+   * a method whose name starts with "test", for example "testFoo".
+   *
+   * @param fail Whether to fail if no method is found
+   * @return Name of current test case, or null if not found
+   */
+  private String getCurrentTestCaseName(boolean fail) {
+    // REVIEW jvs 12-Mar-2006: Too clever by half.  Someone might not know
+    // about this and use a private helper method whose name also starts
+    // with test. Perhaps just require them to pass in getName() from the
+    // calling TestCase's setUp method and store it in a thread-local,
+    // failing here if they forgot?
+
+    // Clever, this. Dump the stack and look up it for a method which
+    // looks like a test case name, e.g. "testFoo".
+    final StackTraceElement[] stackTrace;
+    Throwable runtimeException = new Throwable();
+    runtimeException.fillInStackTrace();
+    stackTrace = runtimeException.getStackTrace();
+    for (StackTraceElement stackTraceElement : stackTrace) {
+      final String methodName = stackTraceElement.getMethodName();
+      if (methodName.startsWith("test")) {
+        return methodName;
+      }
+    }
+    if (fail) {
+      throw new RuntimeException("no test case on current call stack");
+    } else {
+      return null;
+    }
+  }
+
+  public void assertEquals(String tag, String expected, String actual) {
+    final String testCaseName = getCurrentTestCaseName(true);
+    String expected2 = expand(tag, expected);
+    if (expected2 == null) {
+      update(testCaseName, expected, actual);
+      throw new AssertionError("reference file does not contain resource '"
+          + expected + "' for test case '" + testCaseName + "'");
+    } else {
+      try {
+        // TODO jvs 25-Apr-2006:  reuse bulk of
+        // DiffTestCase.diffTestLog here; besides newline
+        // insensitivity, it can report on the line
+        // at which the first diff occurs, which is useful
+        // for largish snippets
+        String expected2Canonical =
+            expected2.replace(Util.LINE_SEPARATOR, "\n");
+        String actualCanonical =
+            actual.replace(Util.LINE_SEPARATOR, "\n");
+        Assert.assertEquals(
+            tag,
+            expected2Canonical,
+            actualCanonical);
+      } catch (ComparisonFailure e) {
+        amend(expected, actual);
+        throw e;
+      }
+    }
+  }
+
+  /**
+   * Creates a new document with a given resource.
+   *
+   * <p>This method is synchronized, in case two threads are running test
+   * cases of this test at the same time.
+   *
+   * @param testCaseName Test case name
+   * @param resourceName Resource name
+   * @param value        New value of resource
+   */
+  private synchronized void update(
+      String testCaseName,
+      String resourceName,
+      String value) {
+    final List<Pair<String, Element>> map = new ArrayList<>();
+    Element testCaseElement = getTestCaseElement(testCaseName, true, map);
+    if (testCaseElement == null) {
+      testCaseElement = doc.createElement(TEST_CASE_TAG);
+      testCaseElement.setAttribute(TEST_CASE_NAME_ATTR, testCaseName);
+      Node refElement = ref(testCaseName, map);
+      root.insertBefore(testCaseElement, refElement);
+    }
+    Element resourceElement =
+        getResourceElement(testCaseElement, resourceName, true);
+    if (resourceElement == null) {
+      resourceElement = doc.createElement(RESOURCE_TAG);
+      resourceElement.setAttribute(RESOURCE_NAME_ATTR, resourceName);
+      testCaseElement.appendChild(resourceElement);
+    } else {
+      removeAllChildren(resourceElement);
+    }
+    if (!value.equals("")) {
+      resourceElement.appendChild(doc.createCDATASection(value));
+    }
+
+    // Write out the document.
+    flushDoc();
+  }
+
+  private Node ref(String testCaseName, List<Pair<String, Element>> map) {
+    if (map.isEmpty()) {
+      return null;
+    }
+    // Compute the position that the new element should be if the map were
+    // sorted.
+    int i = 0;
+    final List<String> names = Pair.left(map);
+    for (String s : names) {
+      if (s.compareToIgnoreCase(testCaseName) <= 0) {
+        ++i;
+      }
+    }
+    // Starting at a proportional position in the list,
+    // move forwards through lesser names, then
+    // move backwards through greater names.
+    //
+    // The intended effect is that if the list is already sorted, the new item
+    // will end up in exactly the right position, and if the list is not sorted,
+    // the new item will end up in approximately the right position.
+    while (i < map.size()
+        && names.get(i).compareToIgnoreCase(testCaseName) < 0) {
+      ++i;
+    }
+    if (i >= map.size() - 1) {
+      return null;
+    }
+    while (i >= 0 && names.get(i).compareToIgnoreCase(testCaseName) > 0) {
+      --i;
+    }
+    return map.get(i + 1).right;
+  }
+
+  /**
+   * Flushes the reference document to the file system.
+   */
+  private void flushDoc() {
+    try {
+      boolean b = logFile.getParentFile().mkdirs();
+      Util.discard(b);
+      try (Writer w = Util.printWriter(logFile)) {
+        write(doc, w, indent);
+      }
+    } catch (IOException e) {
+      throw new RuntimeException("error while writing test reference log '"
+          + logFile + "'", e);
+    }
+  }
+
+  /**
+   * Returns a given resource from a given test case.
+   *
+   * @param testCaseElement The enclosing TestCase element, e.g. <code>
+   *                        &lt;TestCase name="testFoo"&gt;</code>.
+   * @param resourceName    Name of resource, e.g. "sql", "plan"
+   * @return The value of the resource, or null if not found
+   */
+  private static Element getResourceElement(
+      Element testCaseElement,
+      String resourceName) {
+    return getResourceElement(testCaseElement, resourceName, false);
+  }
+
+  /**
+   * Returns a given resource from a given test case.
+   *
+   * @param testCaseElement The enclosing TestCase element, e.g. <code>
+   *                        &lt;TestCase name="testFoo"&gt;</code>.
+   * @param resourceName    Name of resource, e.g. "sql", "plan"
+   * @param killYoungerSiblings Whether to remove resources with the same
+   *                        name and the same parent that are eclipsed
+   * @return The value of the resource, or null if not found
+   */
+  private static Element getResourceElement(Element testCaseElement,
+      String resourceName, boolean killYoungerSiblings) {
+    final NodeList childNodes = testCaseElement.getChildNodes();
+    Element found = null;
+    final List<Node> kills = new ArrayList<>();
+    for (int i = 0; i < childNodes.getLength(); i++) {
+      Node child = childNodes.item(i);
+      if (child.getNodeName().equals(RESOURCE_TAG)
+          && resourceName.equals(
+              ((Element) child).getAttribute(RESOURCE_NAME_ATTR))) {
+        if (found == null) {
+          found = (Element) child;
+        } else if (killYoungerSiblings) {
+          kills.add(child);
+        }
+      }
+    }
+    for (Node kill : kills) {
+      testCaseElement.removeChild(kill);
+    }
+    return found;
+  }
+
+  private static void removeAllChildren(Element element) {
+    final NodeList childNodes = element.getChildNodes();
+    while (childNodes.getLength() > 0) {
+      element.removeChild(childNodes.item(0));
+    }
+  }
+
+  /**
+   * Serializes an XML document as text.
+   *
+   * <p>FIXME: I'm sure there's a library call to do this, but I'm danged if I
+   * can find it. -- jhyde, 2006/2/9.
+   */
+  private static void write(Document doc, Writer w, int indent) {
+    final XmlOutput out = new XmlOutput(w);
+    out.setGlob(true);
+    out.setIndentString(Spaces.of(indent));
+    writeNode(doc, out);
+  }
+
+  private static void writeNode(Node node, XmlOutput out) {
+    final NodeList childNodes;
+    switch (node.getNodeType()) {
+    case Node.DOCUMENT_NODE:
+      out.print("<?xml version=\"1.0\" ?>\n");
+      childNodes = node.getChildNodes();
+      for (int i = 0; i < childNodes.getLength(); i++) {
+        Node child = childNodes.item(i);
+        writeNode(child, out);
+      }
+
+      //            writeNode(((Document) node).getDocumentElement(),
+      // out);
+      break;
+
+    case Node.ELEMENT_NODE:
+      Element element = (Element) node;
+      final String tagName = element.getTagName();
+      out.beginBeginTag(tagName);
+
+      // Attributes.
+      final NamedNodeMap attributeMap = element.getAttributes();
+      for (int i = 0; i < attributeMap.getLength(); i++) {
+        final Node att = attributeMap.item(i);
+        out.attribute(
+            att.getNodeName(),
+            att.getNodeValue());
+      }
+      out.endBeginTag(tagName);
+
+      // Write child nodes, ignoring attributes but including text.
+      childNodes = node.getChildNodes();
+      for (int i = 0; i < childNodes.getLength(); i++) {
+        Node child = childNodes.item(i);
+        if (child.getNodeType() == Node.ATTRIBUTE_NODE) {
+          continue;
+        }
+        writeNode(child, out);
+      }
+      out.endTag(tagName);
+      break;
+
+    case Node.ATTRIBUTE_NODE:
+      out.attribute(
+          node.getNodeName(),
+          node.getNodeValue());
+      break;
+
+    case Node.CDATA_SECTION_NODE:
+      CDATASection cdata = (CDATASection) node;
+      out.cdata(
+          cdata.getNodeValue(),
+          true);
+      break;
+
+    case Node.TEXT_NODE:
+      Text text = (Text) node;
+      final String wholeText = text.getNodeValue();
+      if (!isWhitespace(wholeText)) {
+        out.cdata(wholeText, false);
+      }
+      break;
+
+    case Node.COMMENT_NODE:
+      Comment comment = (Comment) node;
+      out.print("<!--" + comment.getNodeValue() + "-->\n");
+      break;
+
+    default:
+      throw new RuntimeException("unexpected node type: " + node.getNodeType()
+          + " (" + node + ")");
+    }
+  }
+
+  private static boolean isWhitespace(String text) {
+    for (int i = 0, count = text.length(); i < count; ++i) {
+      final char c = text.charAt(i);
+      switch (c) {
+      case ' ':
+      case '\t':
+      case '\n':
+        break;
+      default:
+        return false;
+      }
+    }
+    return true;
+  }
+
+  /**
+   * Finds the repository instance for a given class, with no base
+   * repository or filter.
+   *
+   * @param clazz Test case class
+   * @return The diff repository shared between test cases in this class.
+   */
+  public static DiffRepository lookup(Class clazz) {
+    return lookup(clazz, null);
+  }
+
+  /**
+   * Finds the repository instance for a given class and inheriting from
+   * a given repository.
+   *
+   * @param clazz     Test case class
+   * @param baseRepository Base class of test class
+   * @return The diff repository shared between test cases in this class.
+   */
+  public static DiffRepository lookup(
+      Class clazz,
+      DiffRepository baseRepository) {
+    return lookup(clazz, baseRepository, null);
+  }
+
+  /**
+   * Finds the repository instance for a given class.
+   *
+   * <p>It is important that all test cases in a class share the same
+   * repository instance. This ensures that, if two or more test cases fail,
+   * the log file will contains the actual results of both test cases.
+   *
+   * <p>The <code>baseRepository</code> parameter is useful if the test is an
+   * extension to a previous test. If the test class has a base class which
+   * also has a repository, specify the repository here. DiffRepository will
+   * look for resources in the base class if it cannot find them in this
+   * repository. If test resources from test cases in the base class are
+   * missing or incorrect, it will not write them to the log file -- you
+   * probably need to fix the base test.
+   *
+   * <p>Use the <code>filter</code> parameter if you expect the test to
+   * return results slightly different than in the repository. This happens
+   * if the behavior of a derived test is slightly different than a base
+   * test. If you do not specify a filter, no filtering will happen.
+   *
+   * @param clazz     Test case class
+   * @param baseRepository Base repository
+   * @param filter    Filters each string returned by the repository
+   * @return The diff repository shared between test cases in this class.
+   */
+  public static synchronized DiffRepository lookup(
+      Class clazz,
+      DiffRepository baseRepository,
+      Filter filter) {
+    DiffRepository diffRepository = MAP_CLASS_TO_REPOSITORY.get(clazz);
+    if (diffRepository == null) {
+      final URL refFile = findFile(clazz, ".xml");
+      final File logFile =
+          new File(refFile.getFile().replace("test-classes", "surefire"));
+      diffRepository =
+          new DiffRepository(refFile, logFile, baseRepository, filter);
+      MAP_CLASS_TO_REPOSITORY.put(clazz, diffRepository);
+    }
+    return diffRepository;
+  }
+
+  /**
+   * Callback to filter strings before returning them.
+   */
+  public interface Filter {
+    /**
+     * Filters a string.
+     *
+     * @param diffRepository Repository
+     * @param testCaseName   Test case name
+     * @param tag            Tag being expanded
+     * @param text           Text being expanded
+     * @param expanded       Expanded text
+     * @return Expanded text after filtering
+     */
+    String filter(
+        DiffRepository diffRepository,
+        String testCaseName,
+        String tag,
+        String text,
+        String expanded);
+  }
+}
+
+// End DiffRepository.java
diff --git a/src/kylin-it/src/test/java/org/apache/kylin/SparderCoverTest.java b/src/kylin-it/src/test/java/org/apache/kylin/SparderCoverTest.java
new file mode 100644
index 0000000000..80089382b6
--- /dev/null
+++ b/src/kylin-it/src/test/java/org/apache/kylin/SparderCoverTest.java
@@ -0,0 +1,94 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+package org.apache.kylin;
+
+import java.io.IOException;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.kylin.common.KapConfig;
+import org.apache.kylin.common.util.HadoopUtil;
+import org.apache.kylin.common.util.NLocalFileMetadataTestCase;
+import org.apache.kylin.engine.spark.NSparkCubingEngine;
+import org.apache.kylin.engine.spark.storage.ParquetStorage;
+import org.apache.kylin.storage.ParquetDataStorage;
+import org.junit.After;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ *  This class use for improve java coverage with scala used, to be remove after scala coverage ready.
+ */
+public class SparderCoverTest extends NLocalFileMetadataTestCase {
+    private static final Logger log = LoggerFactory.getLogger(SparderCoverTest.class);
+
+    @Before
+    public void init() {
+        createTestMetadata();
+
+    }
+
+    @After
+    public void clean() {
+        cleanupTestMetadata();
+    }
+
+    @Test
+    public void testParquetDataStorageCubingStorage() {
+        ParquetDataStorage parquetDataStorage = new ParquetDataStorage();
+        NSparkCubingEngine.NSparkCubingStorage nSparkCubingStorage = parquetDataStorage
+                .adaptToBuildEngine(NSparkCubingEngine.NSparkCubingStorage.class);
+        Assert.assertTrue(nSparkCubingStorage instanceof ParquetStorage);
+    }
+    //
+    //    @Test
+    //    public void testParquetDataStorage() {
+    //        ParquetDataStorage parquetDataStorage = new ParquetDataStorage();
+    //        NDataflow nDataflow = new NDataflow();
+    //        IStorageQuery query = parquetDataStorage.createQuery(nDataflow);
+    //        Assert.assertTrue(query instanceof NDataStorageQuery);
+    //    }
+
+    @Test
+    public void testKapConf() {
+        KapConfig kapConfig = KapConfig.getInstanceFromEnv();
+        assert kapConfig.getListenerBusBusyThreshold() == 5000;
+        assert kapConfig.getBlockNumBusyThreshold() == 5000;
+    }
+
+    @Test
+    public void testHadoopUtil() throws IOException {
+        FileSystem readFileSystem = HadoopUtil.getWorkingFileSystem();
+        String scheme = readFileSystem.getScheme();
+        assert scheme.equals("file");
+        readFileSystem = HadoopUtil.getWorkingFileSystem(new Configuration());
+        scheme = readFileSystem.getScheme();
+        assert scheme.equals("file");
+        readFileSystem = HadoopUtil.getWorkingFileSystem(new Configuration());
+        scheme = readFileSystem.getScheme();
+        assert scheme.equals("file");
+        readFileSystem = HadoopUtil.getWorkingFileSystem();
+        scheme = readFileSystem.getScheme();
+        assert scheme.equals("file");
+    }
+}
diff --git a/src/kylin-it/src/test/java/org/apache/kylin/event/HAMetadataTest.java b/src/kylin-it/src/test/java/org/apache/kylin/event/HAMetadataTest.java
new file mode 100644
index 0000000000..7eda1a20c8
--- /dev/null
+++ b/src/kylin-it/src/test/java/org/apache/kylin/event/HAMetadataTest.java
@@ -0,0 +1,215 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.kylin.event;
+
+import static org.apache.kylin.common.persistence.metadata.jdbc.JdbcUtil.datasourceParameters;
+import static org.awaitility.Awaitility.await;
+
+import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
+import java.util.Comparator;
+import java.util.Locale;
+import java.util.concurrent.TimeUnit;
+import java.util.stream.Stream;
+
+import org.apache.commons.dbcp2.BasicDataSourceFactory;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.Path;
+import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.common.persistence.ImageDesc;
+import org.apache.kylin.common.persistence.ResourceStore;
+import org.apache.kylin.common.persistence.StringEntity;
+import org.apache.kylin.common.persistence.metadata.JdbcAuditLogStore;
+import org.apache.kylin.common.persistence.transaction.UnitOfWork;
+import org.apache.kylin.common.util.HadoopUtil;
+import org.apache.kylin.common.util.JsonUtil;
+import org.apache.kylin.common.util.NLocalFileMetadataTestCase;
+import org.apache.kylin.common.util.RandomUtil;
+import org.apache.kylin.tool.MetadataTool;
+import org.junit.After;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Ignore;
+import org.junit.Test;
+import org.springframework.jdbc.core.JdbcTemplate;
+
+import io.kyligence.kap.guava20.shaded.common.io.ByteSource;
+import lombok.val;
+import lombok.extern.slf4j.Slf4j;
+
+@Slf4j
+public class HAMetadataTest extends NLocalFileMetadataTestCase {
+
+    private KylinConfig queryKylinConfig;
+    private ResourceStore queryResourceStore;
+    private final Charset charset = StandardCharsets.UTF_8;
+
+    @Before
+    public void setUp() throws Exception {
+        overwriteSystemProp("kylin.metadata.audit-log.catchup-interval", "1s");
+        createTestMetadata();
+        getTestConfig().setProperty("kylin.auditlog.replay-groupby-project-reload-enable", "false");
+        getTestConfig().setMetadataUrl("test" + System.currentTimeMillis()
+                + "@jdbc,driverClassName=org.h2.Driver,url=jdbc:h2:mem:db_default;DB_CLOSE_DELAY=-1,username=sa,password=");
+        UnitOfWork.doInTransactionWithRetry(() -> {
+            val resourceStore = ResourceStore.getKylinMetaStore(KylinConfig.getInstanceFromEnv());
+            resourceStore.checkAndPutResource("/UUID", new StringEntity(RandomUtil.randomUUIDStr()),
+                    StringEntity.serializer);
+            return null;
+        }, "");
+        queryKylinConfig = KylinConfig.createKylinConfig(getTestConfig());
+        val auditLogStore = new JdbcAuditLogStore(queryKylinConfig);
+        queryKylinConfig.setMetadataUrl("test@hdfs");
+        queryResourceStore = ResourceStore.getKylinMetaStore(queryKylinConfig);
+        queryResourceStore.getMetadataStore().setAuditLogStore(auditLogStore);
+    }
+
+    @After
+    public void tearDown() throws Exception {
+        val jdbcTemplate = getJdbcTemplate();
+        jdbcTemplate.batchUpdate("DROP ALL OBJECTS");
+        cleanupTestMetadata();
+        queryResourceStore.close();
+        ((JdbcAuditLogStore) queryResourceStore.getAuditLogStore()).forceClose();
+    }
+
+    @Test
+    public void testMetadataCatchup_EmptyBackup() throws InterruptedException {
+        queryResourceStore.catchup();
+        UnitOfWork.doInTransactionWithRetry(() -> {
+            val resourceStore = getStore();
+            resourceStore.checkAndPutResource("/p0/path1", ByteSource.wrap("path1".getBytes(charset)), -1);
+            resourceStore.checkAndPutResource("/p0/path2", ByteSource.wrap("path2".getBytes(charset)), -1);
+            resourceStore.checkAndPutResource("/p0/path3", ByteSource.wrap("path3".getBytes(charset)), -1);
+            resourceStore.checkAndPutResource("/p0/path4", ByteSource.wrap("path4".getBytes(charset)), -1);
+            return 0;
+        }, "p0");
+        await().atMost(3, TimeUnit.SECONDS).until(() -> 5 == queryResourceStore.listResourcesRecursively("/").size());
+    }
+
+    @Test
+    public void testMetadataCatchupWithBackup() throws Exception {
+        UnitOfWork.doInTransactionWithRetry(() -> {
+            val resourceStore = getStore();
+            resourceStore.checkAndPutResource("/p0/path1", ByteSource.wrap("path1".getBytes(charset)), -1);
+            resourceStore.checkAndPutResource("/p0/path2", ByteSource.wrap("path2".getBytes(charset)), -1);
+            resourceStore.checkAndPutResource("/p0/path3", ByteSource.wrap("path3".getBytes(charset)), -1);
+            resourceStore.checkAndPutResource("/p0/path4", ByteSource.wrap("path4".getBytes(charset)), -1);
+            return 0;
+        }, "p0");
+        String[] args = new String[] { "-backup", "-dir", HadoopUtil.getBackupFolder(getTestConfig()) };
+        val metadataTool = new MetadataTool(getTestConfig());
+        metadataTool.execute(args);
+
+        queryResourceStore.catchup();
+        Assert.assertEquals(5, queryResourceStore.listResourcesRecursively("/").size());
+
+        UnitOfWork.doInTransactionWithRetry(() -> {
+            val resourceStore = getStore();
+            resourceStore.checkAndPutResource("/p0/path1", ByteSource.wrap("path1".getBytes(charset)), 0);
+            resourceStore.checkAndPutResource("/p0/path2", ByteSource.wrap("path2".getBytes(charset)), 0);
+            resourceStore.checkAndPutResource("/p0/path3", ByteSource.wrap("path3".getBytes(charset)), 0);
+            resourceStore.deleteResource("/p0/path4");
+            resourceStore.checkAndPutResource("/p0/path5", ByteSource.wrap("path5".getBytes(charset)), -1);
+            resourceStore.checkAndPutResource("/p0/path6", ByteSource.wrap("path6".getBytes(charset)), -1);
+            resourceStore.checkAndPutResource("/p0/path7", ByteSource.wrap("path7".getBytes(charset)), -1);
+            return 0;
+        }, "p0");
+
+        await().atMost(3, TimeUnit.SECONDS).until(() -> 7 == queryResourceStore.listResourcesRecursively("/").size());
+        String table = getTestConfig().getMetadataUrl().getIdentifier() + "_audit_log";
+        val auditCount = getJdbcTemplate().queryForObject(String.format(Locale.ROOT, "select count(*) from %s", table),
+                Long.class);
+        Assert.assertEquals(12L, auditCount.longValue());
+    }
+
+    @Ignore("unstable in daily ut")
+    @Test
+    public void testMetadata_RemoveAuditLog_Restore() throws Exception {
+        UnitOfWork.doInTransactionWithRetry(() -> {
+            val resourceStore = getStore();
+            resourceStore.checkAndPutResource("/_global/project/p0.json", ByteSource
+                    .wrap("{  \"uuid\": \"1eaca32a-a33e-4b69-83dd-0bb8b1f8c91b\"}".getBytes(charset)), -1);
+            resourceStore.checkAndPutResource("/p0/path1.json",
+                    ByteSource.wrap("{ \"mvcc\": 0 }".getBytes(charset)), -1);
+            resourceStore.checkAndPutResource("/p0/path2.json",
+                    ByteSource.wrap("{ \"mvcc\": 0 }".getBytes(charset)), -1);
+            resourceStore.checkAndPutResource("/p0/path3.json",
+                    ByteSource.wrap("{ \"mvcc\": 0 }".getBytes(charset)), -1);
+            resourceStore.checkAndPutResource("/p0/path4.json",
+                    ByteSource.wrap("{ \"mvcc\": 0 }".getBytes(charset)), -1);
+            resourceStore.checkAndPutResource("/p0/path3.json",
+                    ByteSource.wrap("{ \"mvcc\": 1 }".getBytes(charset)), 0);
+            resourceStore.checkAndPutResource("/p0/path4.json",
+                    ByteSource.wrap("{ \"mvcc\": 1 }".getBytes(charset)), 0);
+            resourceStore.checkAndPutResource("/p0/path3.json",
+                    ByteSource.wrap("{ \"mvcc\": 2 }".getBytes(charset)), 1);
+            resourceStore.checkAndPutResource("/p0/path4.json",
+                    ByteSource.wrap("{ \"mvcc\": 2 }".getBytes(charset)), 1);
+            resourceStore.checkAndPutResource("/p0/path3.json",
+                    ByteSource.wrap("{ \"mvcc\": 3 }".getBytes(charset)), 2);
+            return 0;
+        }, "p0");
+        String table = getTestConfig().getMetadataUrl().getIdentifier() + "_audit_log";
+        getJdbcTemplate().update(String.format(Locale.ROOT, "delete from %s where id=7", table));
+        try {
+            queryResourceStore.catchup();
+            Assert.fail();
+        } catch (Exception e) {
+            queryResourceStore.close();
+            ((JdbcAuditLogStore) queryResourceStore.getAuditLogStore()).forceClose();
+        }
+        await().pollDelay(1000, TimeUnit.MILLISECONDS).until(() -> true);
+        String[] args = new String[] { "-backup", "-dir", HadoopUtil.getBackupFolder(getTestConfig()) };
+        MetadataTool metadataTool = new MetadataTool(getTestConfig());
+        metadataTool.execute(args);
+
+        await().pollDelay(1000, TimeUnit.MILLISECONDS).until(() -> true);
+        val path = HadoopUtil.getBackupFolder(getTestConfig());
+        val fs = HadoopUtil.getWorkingFileSystem();
+        val rootPath = Stream.of(fs.listStatus(new Path(path)))
+                .max(Comparator.comparing(FileStatus::getModificationTime)).map(FileStatus::getPath)
+                .orElse(new Path(path + "/backup_1/"));
+        args = new String[] { "-restore", "-dir", rootPath.toString().substring(5), "--after-truncate" };
+        metadataTool = new MetadataTool(getTestConfig());
+        metadataTool.execute(args);
+
+        queryKylinConfig = KylinConfig.createKylinConfig(getTestConfig());
+        val auditLogStore = new JdbcAuditLogStore(queryKylinConfig);
+        queryKylinConfig.setMetadataUrl(getTestConfig().getMetadataUrl().getIdentifier() + "@hdfs");
+        queryResourceStore = ResourceStore.getKylinMetaStore(queryKylinConfig);
+        queryResourceStore.getMetadataStore().setAuditLogStore(auditLogStore);
+        queryResourceStore.catchup();
+
+        Assert.assertEquals(7, queryResourceStore.listResourcesRecursively("/").size());
+        val auditCount = getJdbcTemplate().queryForObject(String.format(Locale.ROOT, "select count(*) from %s", table),
+                Long.class);
+        Assert.assertEquals(15, auditCount.longValue());
+        val imageDesc = JsonUtil.readValue(queryResourceStore.getResource("/_image").getByteSource().read(),
+                ImageDesc.class);
+        Assert.assertEquals(16, imageDesc.getOffset().longValue());
+    }
+
+    JdbcTemplate getJdbcTemplate() throws Exception {
+        val url = getTestConfig().getMetadataUrl();
+        val props = datasourceParameters(url);
+        val dataSource = BasicDataSourceFactory.createDataSource(props);
+        return new JdbcTemplate(dataSource);
+    }
+}
diff --git a/src/kylin-it/src/test/java/org/apache/kylin/event/ITStorageCleanerTest.java b/src/kylin-it/src/test/java/org/apache/kylin/event/ITStorageCleanerTest.java
new file mode 100644
index 0000000000..4e8a19c6ae
--- /dev/null
+++ b/src/kylin-it/src/test/java/org/apache/kylin/event/ITStorageCleanerTest.java
@@ -0,0 +1,134 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.kylin.event;
+
+import static org.awaitility.Awaitility.await;
+
+import java.io.File;
+import java.util.Arrays;
+import java.util.Map;
+import java.util.Set;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicBoolean;
+import java.util.stream.Collectors;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.kylin.common.annotation.Clarification;
+import org.apache.kylin.engine.spark.NLocalWithSparkSessionTest;
+import org.apache.kylin.job.engine.JobEngineConfig;
+import org.apache.kylin.job.impl.threadpool.NDefaultScheduler;
+import org.apache.kylin.metadata.cube.model.NDataLayout;
+import org.apache.kylin.metadata.cube.model.NDataSegment;
+import org.apache.kylin.metadata.cube.model.NDataflowManager;
+import org.apache.kylin.metadata.cube.model.NIndexPlanManager;
+import org.apache.kylin.metadata.model.NTableMetadataManager;
+import org.apache.kylin.metadata.project.NProjectManager;
+import org.apache.kylin.tool.garbage.StorageCleaner;
+import org.apache.kylin.util.SegmentInitializeUtil;
+import org.junit.After;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Ignore;
+import org.junit.Test;
+
+import com.google.common.collect.Sets;
+
+import lombok.val;
+import lombok.extern.slf4j.Slf4j;
+
+@Slf4j
+@Clarification(priority = Clarification.Priority.MAJOR, msg = "Enterprise")
+public class ITStorageCleanerTest extends NLocalWithSparkSessionTest {
+
+    private NDefaultScheduler scheduler;
+
+    @Before
+    public void setUp() throws Exception {
+        overwriteSystemProp("kylin.job.event.poll-interval-second", "1");
+        overwriteSystemProp("kylin.job.scheduler.poll-interval-second", "2");
+        overwriteSystemProp("kylin.engine.spark.build-class-name",
+                "org.apache.kylin.engine.spark.job.MockedDFBuildJob");
+        overwriteSystemProp("kylin.garbage.storage.cuboid-layout-survival-time-threshold", "0s");
+        this.createTestMetadata();
+
+        val projectMgr = NProjectManager.getInstance(getTestConfig());
+        for (String project : Arrays.asList("bad_query_test", "broken_test", "demo", "match", "newten", "smart", "ssb",
+                "top_n")) {
+            projectMgr.forceDropProject(project);
+        }
+        NDefaultScheduler.destroyInstance();
+        scheduler = NDefaultScheduler.getInstance(getProject());
+        scheduler.init(new JobEngineConfig(getTestConfig()));
+
+        val tableMgr = NTableMetadataManager.getInstance(getTestConfig(), getProject());
+        val table = tableMgr.getTableDesc("DEFAULT.TEST_KYLIN_FACT");
+        table.setIncrementLoading(true);
+        tableMgr.updateTableDesc(table);
+    }
+
+    @After
+    public void tearDown() throws Exception {
+        NDefaultScheduler.destroyInstance();
+        this.cleanupTestMetadata();
+    }
+
+    @Test
+    @Ignore("TODO: remove or adapt")
+    public void testStorageCleanWithJob_MultiThread() throws InterruptedException {
+        val dataflowManager = NDataflowManager.getInstance(getTestConfig(), getProject());
+        val indexManager = NIndexPlanManager.getInstance(getTestConfig(), getProject());
+        val df = dataflowManager.getDataflow("741ca86a-1f13-46da-a59f-95fb68615e3a");
+        val MAX_WAIT = 500 * 1000;
+        val start = System.currentTimeMillis() + MAX_WAIT;
+        val finished = new AtomicBoolean(false);
+        new Thread(() -> {
+            while (System.currentTimeMillis() < start && !finished.get()) {
+                try {
+                    val cleaner = new StorageCleaner();
+                    cleaner.execute();
+                    await().pollDelay(1100, TimeUnit.MILLISECONDS).until(() -> true);
+                } catch (Exception e) {
+                    log.warn("gc failed", e);
+                }
+            }
+        }).start();
+        SegmentInitializeUtil.prepareSegment(getTestConfig(), getProject(), df.getUuid(), "2012-01-01", "2012-06-01",
+                true);
+        SegmentInitializeUtil.prepareSegment(getTestConfig(), getProject(), df.getUuid(), "2012-06-01", "2012-09-01",
+                false);
+
+        indexManager.updateIndexPlan(df.getId(), copyForWrite -> {
+            copyForWrite.removeLayouts(Sets.newHashSet(30001L, 20001L), true, true);
+        });
+        val df2 = dataflowManager.getDataflow(df.getUuid());
+
+        await().pollDelay(3000, TimeUnit.MILLISECONDS).until(() -> true);
+        val root = getTestConfig().getHdfsWorkingDirectory().substring(7) + "default/parquet/";
+        val layoutFolders = FileUtils.listFiles(new File(root), new String[] { "parquet" }, true).stream()
+                .map(File::getParent).distinct().sorted().collect(Collectors.toList());
+        Set<String> expected = Sets.newTreeSet();
+        for (NDataSegment segment : df2.getSegments()) {
+            for (Map.Entry<Long, NDataLayout> entry : segment.getLayoutsMap().entrySet()) {
+                expected.add(root + df2.getId() + "/" + segment.getId() + "/" + entry.getKey());
+            }
+        }
+        finished.set(true);
+        Assert.assertEquals(String.join(";\n", expected), String.join(";\n", layoutFolders));
+    }
+}
diff --git a/src/kylin-it/src/test/java/org/apache/kylin/event/ModelSemanticTest.java b/src/kylin-it/src/test/java/org/apache/kylin/event/ModelSemanticTest.java
new file mode 100644
index 0000000000..6205165e12
--- /dev/null
+++ b/src/kylin-it/src/test/java/org/apache/kylin/event/ModelSemanticTest.java
@@ -0,0 +1,308 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.kylin.event;
+
+import static org.apache.kylin.common.constant.HttpConstant.HTTP_VND_APACHE_KYLIN_JSON;
+
+import java.io.IOException;
+import java.util.List;
+import java.util.stream.Collectors;
+
+import org.apache.hadoop.util.Shell;
+import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.common.util.JsonUtil;
+import org.apache.kylin.common.util.RandomUtil;
+import org.apache.kylin.common.util.TempMetadataBuilder;
+import org.apache.kylin.engine.spark.ExecutableUtils;
+import org.apache.kylin.job.engine.JobEngineConfig;
+import org.apache.kylin.job.execution.NExecutableManager;
+import org.apache.kylin.job.impl.threadpool.NDefaultScheduler;
+import org.apache.kylin.metadata.cube.cuboid.NAggregationGroup;
+import org.apache.kylin.metadata.cube.model.IndexEntity;
+import org.apache.kylin.metadata.cube.model.NDataLoadingRange;
+import org.apache.kylin.metadata.cube.model.NDataLoadingRangeManager;
+import org.apache.kylin.metadata.cube.model.NDataSegment;
+import org.apache.kylin.metadata.cube.model.NDataflow;
+import org.apache.kylin.metadata.cube.model.NDataflowManager;
+import org.apache.kylin.metadata.cube.model.NDataflowUpdate;
+import org.apache.kylin.metadata.cube.model.NIndexPlanManager;
+import org.apache.kylin.metadata.model.ManagementType;
+import org.apache.kylin.metadata.model.NDataModel;
+import org.apache.kylin.metadata.model.NDataModelManager;
+import org.apache.kylin.metadata.model.NTableMetadataManager;
+import org.apache.kylin.metadata.model.SegmentRange;
+import org.apache.kylin.metadata.model.SegmentStatusEnum;
+import org.apache.kylin.metadata.model.Segments;
+import org.apache.kylin.rest.request.ModelRequest;
+import org.apache.kylin.rest.request.UpdateRuleBasedCuboidRequest;
+import org.apache.kylin.rest.response.SimplifiedMeasure;
+import org.apache.kylin.rest.util.SCD2SimplificationConvertUtil;
+import org.apache.kylin.server.AbstractMVCIntegrationTestCase;
+import org.apache.kylin.util.JobFinishHelper;
+import org.apache.spark.SparkConf;
+import org.apache.spark.sql.SparderEnv;
+import org.apache.spark.sql.SparkSession;
+import org.apache.spark.sql.internal.StaticSQLConf;
+import org.junit.After;
+import org.junit.AfterClass;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.mockito.Mockito;
+import org.springframework.http.MediaType;
+import org.springframework.test.web.servlet.request.MockMvcRequestBuilders;
+import org.springframework.test.web.servlet.result.MockMvcResultMatchers;
+
+import com.google.common.collect.Lists;
+
+import lombok.val;
+import lombok.var;
+import lombok.extern.slf4j.Slf4j;
+
+@Slf4j
+public class ModelSemanticTest extends AbstractMVCIntegrationTestCase {
+
+    public static final String MODEL_ID = "89af4ee2-2cdb-4b07-b39e-4c29856309aa";
+    protected NExecutableManager executableManager;
+    NIndexPlanManager indexPlanManager;
+    NDataflowManager dataflowManager;
+
+    protected static SparkConf sparkConf;
+    protected static SparkSession ss;
+
+    @BeforeClass
+    public static void beforeClass() {
+        ExecutableUtils.initJobFactory();
+        if (Shell.MAC)
+            overwriteSystemPropBeforeClass("org.xerial.snappy.lib.name", "libsnappyjava.jnilib");//for snappy
+
+        sparkConf = new SparkConf().setAppName(RandomUtil.randomUUIDStr()).setMaster("local[4]");
+        sparkConf.set("spark.serializer", "org.apache.spark.serializer.JavaSerializer");
+        sparkConf.set(StaticSQLConf.CATALOG_IMPLEMENTATION().key(), "in-memory");
+        sparkConf.set("spark.sql.shuffle.partitions", "1");
+        sparkConf.set(StaticSQLConf.WAREHOUSE_PATH().key(),
+                TempMetadataBuilder.TEMP_TEST_METADATA + "/spark-warehouse");
+
+        ss = SparkSession.builder().config(sparkConf).getOrCreate();
+        SparderEnv.setSparkSession(ss);
+    }
+
+    @AfterClass
+    public static void afterClass() {
+        ss.close();
+    }
+
+    @Before
+    public void setupHandlers() {
+        overwriteSystemProp("kylin.job.scheduler.poll-interval-second", "3");
+        overwriteSystemProp("kylin.job.event.poll-interval-second", "1");
+        overwriteSystemProp("kylin.engine.spark.build-class-name",
+                "org.apache.kylin.engine.spark.job.MockedDFBuildJob");
+        NDefaultScheduler.destroyInstance();
+        val scheduler = NDefaultScheduler.getInstance(getProject());
+        scheduler.init(new JobEngineConfig(getTestConfig()));
+
+        val dfManager = NDataflowManager.getInstance(getTestConfig(), getProject());
+        var df = dfManager.getDataflow(MODEL_ID);
+
+        String tableName = df.getModel().getRootFactTable().getTableIdentity();
+        NDataLoadingRange dataLoadingRange = new NDataLoadingRange();
+        dataLoadingRange.setUuid(RandomUtil.randomUUIDStr());
+        dataLoadingRange.setTableName(tableName);
+        dataLoadingRange.setColumnName(df.getModel().getPartitionDesc().getPartitionDateColumn());
+        dataLoadingRange.setCoveredRange(new SegmentRange.TimePartitionedSegmentRange(
+                SegmentRange.dateToLong("2012-01-01"), SegmentRange.dateToLong("2012-05-01")));
+        NDataLoadingRangeManager.getInstance(KylinConfig.getInstanceFromEnv(), getProject())
+                .createDataLoadingRange(dataLoadingRange);
+
+        val tableMgr = NTableMetadataManager.getInstance(getTestConfig(), getProject());
+        val table = tableMgr.getTableDesc(tableName);
+        table.setIncrementLoading(true);
+        tableMgr.updateTableDesc(table);
+
+        val update = new NDataflowUpdate(df.getUuid());
+        update.setToRemoveSegs(df.getSegments().toArray(new NDataSegment[0]));
+        dfManager.updateDataflow(update);
+
+        dfManager.appendSegment(df, new SegmentRange.TimePartitionedSegmentRange(SegmentRange.dateToLong("2012-01-01"),
+                SegmentRange.dateToLong("2012-03-01")));
+        df = dfManager.getDataflow(MODEL_ID);
+        dfManager.appendSegment(df, new SegmentRange.TimePartitionedSegmentRange(SegmentRange.dateToLong("2012-03-01"),
+                SegmentRange.dateToLong("2012-05-01")));
+
+        val modelManager = NDataModelManager.getInstance(getTestConfig(), getProject());
+        modelManager.updateDataModel(MODEL_ID, copyForWrite -> {
+            copyForWrite.setAllMeasures(
+                    copyForWrite.getAllMeasures().stream().filter(m -> m.getId() != 1011).collect(Collectors.toList()));
+            copyForWrite.setManagementType(ManagementType.MODEL_BASED);
+        });
+
+        NExecutableManager originExecutableManager = NExecutableManager.getInstance(getTestConfig(), getProject());
+        executableManager = Mockito.spy(originExecutableManager);
+        indexPlanManager = NIndexPlanManager.getInstance(getTestConfig(), getProject());
+        dataflowManager = NDataflowManager.getInstance(getTestConfig(), getProject());
+    }
+
+    @After
+    public void tearDown() throws IOException {
+        NDefaultScheduler.getInstance(getProject()).shutdown();
+        super.tearDown();
+    }
+
+    public String getProject() {
+        return "default";
+    }
+
+    @Test
+    public void testSemanticChangedHappy() throws Exception {
+        NDataflowManager dfManager = NDataflowManager.getInstance(getTestConfig(), getProject());
+        executableManager.getJobs().forEach(jobId -> waitForJobFinish(jobId, 500 * 1000));
+        changeModelRequest();
+
+        List<String> jobs = executableManager.getJobs();
+        Assert.assertEquals(1, jobs.size());
+        waitForJobFinish(jobs.get(0), 500 * 1000);
+
+        NDataflow df = dfManager.getDataflow(MODEL_ID);
+        Assert.assertEquals(2, df.getSegments().size());
+        Assert.assertEquals(df.getIndexPlan().getAllLayouts().size(),
+                df.getSegments().getLatestReadySegment().getLayoutsMap().size());
+    }
+
+    @Test
+    // see issue #8740
+    public void testChange_WithReadySegment() throws Exception {
+        changeModelRequest();
+        executableManager.getJobs().forEach(jobId -> waitForJobFinish(jobId, 600 * 1000));
+
+        indexPlanManager.updateIndexPlan(MODEL_ID, copyForWrite -> {
+            List<IndexEntity> indexes = copyForWrite.getIndexes() //
+                    .stream().filter(x -> x.getId() != 1000000) //
+                    .collect(Collectors.toList());
+            copyForWrite.setIndexes(indexes);
+        });
+
+        // update measure
+        updateMeasureRequest();
+        executableManager.getJobs().forEach(jobId -> waitForJobFinish(jobId, 600 * 1000));
+        Segments<NDataSegment> segments = dataflowManager.getDataflow(MODEL_ID).getSegments();
+        long storageSize = 0;
+        for (NDataSegment seg : segments) {
+            Assert.assertEquals(SegmentStatusEnum.READY, seg.getStatus());
+            storageSize += seg.getLayout(30001).getByteSize();
+        }
+        Assert.assertEquals(246, storageSize);
+    }
+
+    @Test
+    // see issue #8820
+    public void testChange_ModelWithAggGroup() throws Exception {
+        changeModelRequest();
+        executableManager.getJobs().forEach(jobId -> waitForJobFinish(jobId, 600 * 1000));
+
+        // init agg group
+        val group1 = JsonUtil.readValue("{\n" + //
+                "        \"includes\": [1,2,3,4],\n" + //
+                "        \"select_rule\": {\n" + //
+                "          \"hierarchy_dims\": [],\n" + //
+                "          \"mandatory_dims\": [3],\n" + //
+                "          \"joint_dims\": [\n" + //
+                "            [1,2]\n" + //
+                "          ]\n" + //
+                "        }\n" + //
+                "}", NAggregationGroup.class);
+        val request = UpdateRuleBasedCuboidRequest.builder().project(getProject()).modelId(MODEL_ID)
+                .aggregationGroups(Lists.newArrayList(group1)).build();
+        mockMvc.perform(MockMvcRequestBuilders.put("/api/index_plans/rule").contentType(MediaType.APPLICATION_JSON)
+                .content(JsonUtil.writeValueAsString(request))
+                .accept(MediaType.parseMediaType(HTTP_VND_APACHE_KYLIN_JSON)))
+                .andExpect(MockMvcResultMatchers.status().isOk()).andReturn();
+        executableManager.getJobs().forEach(jobId -> waitForJobFinish(jobId, 600 * 1000));
+
+        // update measures, throws an exception
+        updateMeasureWithAgg();
+    }
+
+    private void changeModelRequest() throws Exception {
+        val modelManager = NDataModelManager.getInstance(getTestConfig(), getProject());
+        val model = modelManager.getDataModelDesc(MODEL_ID);
+        val request = JsonUtil.readValue(JsonUtil.writeValueAsString(model), ModelRequest.class);
+        request.setProject(getProject());
+        request.setUuid(MODEL_ID);
+        request.setSimplifiedMeasures(model.getAllMeasures().stream().filter(m -> !m.isTomb())
+                .map(SimplifiedMeasure::fromMeasure).collect(Collectors.toList()));
+        request.setComputedColumnDescs(model.getComputedColumnDescs());
+        request.setSimplifiedDimensions(model.getAllNamedColumns().stream()
+                .filter(c -> c.getStatus() == NDataModel.ColumnStatus.DIMENSION).collect(Collectors.toList()));
+        request.setJoinTables(
+                request.getJoinTables().stream().peek(j -> j.getJoin().setType("inner")).collect(Collectors.toList()));
+        request.setSimplifiedJoinTableDescs(
+                SCD2SimplificationConvertUtil.simplifiedJoinTablesConvert(request.getJoinTables()));
+        mockMvc.perform(MockMvcRequestBuilders.put("/api/models/semantic").contentType(MediaType.APPLICATION_JSON)
+                .content(JsonUtil.writeValueAsString(request))
+                .accept(MediaType.parseMediaType(HTTP_VND_APACHE_KYLIN_JSON)))
+                .andExpect(MockMvcResultMatchers.status().isOk()).andReturn();
+    }
+
+    private void updateMeasureRequest() throws Exception {
+        mockMvc.perform(MockMvcRequestBuilders.put("/api/models/semantic").contentType(MediaType.APPLICATION_JSON)
+                .content(JsonUtil.writeValueAsString(getModelRequest()))
+                .accept(MediaType.parseMediaType(HTTP_VND_APACHE_KYLIN_JSON)))
+                .andExpect(MockMvcResultMatchers.status().isOk()).andReturn();
+    }
+
+    private void updateMeasureWithAgg() throws Exception {
+        val errorMessage = mockMvc
+                .perform(MockMvcRequestBuilders.put("/api/models/semantic").contentType(MediaType.APPLICATION_JSON)
+                        .content(JsonUtil.writeValueAsString(getModelRequest()))
+                        .accept(MediaType.parseMediaType(HTTP_VND_APACHE_KYLIN_JSON)))
+                .andExpect(MockMvcResultMatchers.status().isInternalServerError()).andReturn().getResponse().getContentAsString();
+
+        Assert.assertTrue(errorMessage.contains("The measure SUM_DEAL_AMOUNT is referenced by indexes or aggregate groups. "
+                + "Please go to the Data Asset - Model - Index page to view, delete referenced aggregate groups and indexes."));
+    }
+
+    private ModelRequest getModelRequest() throws Exception {
+        val modelManager = NDataModelManager.getInstance(getTestConfig(), getProject());
+        val model = modelManager.getDataModelDesc(MODEL_ID);
+        val request = JsonUtil.readValue(JsonUtil.writeValueAsString(model), ModelRequest.class);
+        request.setProject(getProject());
+        request.setUuid(MODEL_ID);
+        request.setSimplifiedMeasures(model.getAllMeasures().stream().filter(m -> !m.isTomb())
+                .map(SimplifiedMeasure::fromMeasure).peek(sm -> {
+                    if (sm.getId() == 100016) {
+                        sm.setExpression("MAX");
+                        sm.setName("MAX_DEAL_AMOUNT");
+                    }
+                }).collect(Collectors.toList()));
+        request.setComputedColumnDescs(model.getComputedColumnDescs());
+        request.setSimplifiedDimensions(model.getAllNamedColumns().stream()
+                .filter(c -> c.getStatus() == NDataModel.ColumnStatus.DIMENSION).collect(Collectors.toList()));
+        request.setJoinTables(request.getJoinTables());
+        request.setSimplifiedJoinTableDescs(
+                SCD2SimplificationConvertUtil.simplifiedJoinTablesConvert(request.getJoinTables()));
+
+        return request;
+    }
+
+    private void waitForJobFinish(String jobId, long maxWaitMilliseconds) {
+        JobFinishHelper.waitJobFinish(getTestConfig(), getProject(), jobId, maxWaitMilliseconds);
+    }
+}
diff --git a/src/kylin-it/src/test/java/org/apache/kylin/event/SchemaChangeTest.java b/src/kylin-it/src/test/java/org/apache/kylin/event/SchemaChangeTest.java
new file mode 100644
index 0000000000..831bf2f6dd
--- /dev/null
+++ b/src/kylin-it/src/test/java/org/apache/kylin/event/SchemaChangeTest.java
@@ -0,0 +1,360 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.kylin.event;
+
+import static org.awaitility.Awaitility.with;
+
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.List;
+import java.util.Set;
+import java.util.concurrent.TimeUnit;
+import java.util.function.Consumer;
+import java.util.stream.Collectors;
+import java.util.stream.Stream;
+
+import org.apache.hadoop.util.Shell;
+import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.common.util.JsonUtil;
+import org.apache.kylin.common.util.Pair;
+import org.apache.kylin.common.util.RandomUtil;
+import org.apache.kylin.common.util.TempMetadataBuilder;
+import org.apache.kylin.engine.spark.ExecutableUtils;
+import org.apache.kylin.engine.spark.job.NSparkCubingJob;
+import org.apache.kylin.engine.spark.merger.AfterBuildResourceMerger;
+import org.apache.kylin.job.engine.JobEngineConfig;
+import org.apache.kylin.job.execution.ExecutableState;
+import org.apache.kylin.job.execution.NExecutableManager;
+import org.apache.kylin.job.impl.threadpool.NDefaultScheduler;
+import org.apache.kylin.metadata.cube.model.IndexEntity;
+import org.apache.kylin.metadata.cube.model.LayoutEntity;
+import org.apache.kylin.metadata.cube.model.NDataSegment;
+import org.apache.kylin.metadata.cube.model.NDataflowManager;
+import org.apache.kylin.metadata.cube.model.NDataflowUpdate;
+import org.apache.kylin.metadata.cube.model.NIndexPlanManager;
+import org.apache.kylin.metadata.model.ColumnDesc;
+import org.apache.kylin.metadata.model.NTableMetadataManager;
+import org.apache.kylin.metadata.model.SegmentRange;
+import org.apache.kylin.metadata.model.TableDesc;
+import org.apache.kylin.metadata.project.NProjectManager;
+import org.apache.kylin.metadata.project.ProjectInstance;
+import org.apache.kylin.metadata.user.ManagedUser;
+import org.apache.kylin.rest.constant.Constant;
+import org.apache.kylin.rest.request.SQLRequest;
+import org.apache.kylin.rest.service.QueryService;
+import org.apache.kylin.rest.service.TableService;
+import org.apache.kylin.rest.service.UserGrantedAuthority;
+import org.apache.kylin.rest.service.UserService;
+import org.apache.kylin.server.AbstractMVCIntegrationTestCase;
+import org.apache.kylin.source.jdbc.H2Database;
+import org.apache.kylin.util.JobFinishHelper;
+import org.apache.spark.SparkConf;
+import org.apache.spark.sql.SparderEnv;
+import org.apache.spark.sql.SparkSession;
+import org.apache.spark.sql.internal.StaticSQLConf;
+import org.junit.After;
+import org.junit.AfterClass;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Ignore;
+import org.junit.Test;
+import org.mockito.Mockito;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.security.authentication.TestingAuthenticationToken;
+import org.springframework.security.core.context.SecurityContextHolder;
+
+import com.google.common.base.Preconditions;
+import com.google.common.collect.Lists;
+import com.google.common.collect.Sets;
+
+import lombok.val;
+import lombok.var;
+import lombok.extern.slf4j.Slf4j;
+
+@Ignore("disable unstable test")
+@Slf4j
+public class SchemaChangeTest extends AbstractMVCIntegrationTestCase {
+
+    private static final String SQL_LOOKUP = "select cal_dt, week_beg_dt from edw.test_cal_dt";
+    private static final String SQL_DERIVED = "select test_sites.site_name, test_kylin_fact.lstg_format_name, sum(test_kylin_fact.price) as gmv, count(*) as trans_cnt \n"
+            + " from test_kylin_fact left join edw.test_cal_dt as test_cal_dt\n"
+            + " on test_kylin_fact.cal_dt = test_cal_dt.cal_dt  left join test_category_groupings\n"
+            + " on test_kylin_fact.leaf_categ_id = test_category_groupings.leaf_categ_id and test_kylin_fact.lstg_site_id = test_category_groupings.site_id\n"
+            + " left join edw.test_sites as test_sites  on test_kylin_fact.lstg_site_id = test_sites.site_id\n"
+            + " group by   test_sites.site_name, test_kylin_fact.lstg_format_name";
+    private static final String SQL_LOOKUP2 = "select categ_lvl3_name, categ_lvl2_name, site_id, meta_categ_name, leaf_categ_id  from test_category_groupings";
+    private static final String SQL_DERIVED2 = "select upd_user,count(1) as cnt\n"
+            + "from test_kylin_fact as test_kylin_fact\n"
+            + "left join test_category_groupings as test_category_groupings\n"
+            + "on test_kylin_fact.leaf_categ_id = test_category_groupings.leaf_categ_id and test_kylin_fact.lstg_site_id = test_category_groupings.site_id\n"
+            + "where upd_user not in ('user_y') group by upd_user";
+
+    private static final String TABLE_IDENTITY = "DEFAULT.TEST_CATEGORY_GROUPINGS";
+
+    protected static SparkConf sparkConf;
+    protected static SparkSession ss;
+
+    @Autowired
+    TableService tableService;
+
+    @Autowired
+    QueryService queryService;
+
+    @Autowired
+    protected UserService userService;
+
+    @BeforeClass
+    public static void beforeClass() {
+
+        if (Shell.MAC)
+            overwriteSystemPropBeforeClass("org.xerial.snappy.lib.name", "libsnappyjava.jnilib");//for snappy
+
+        sparkConf = new SparkConf().setAppName(RandomUtil.randomUUIDStr()).setMaster("local[4]");
+        sparkConf.set("spark.serializer", "org.apache.spark.serializer.JavaSerializer");
+        sparkConf.set(StaticSQLConf.CATALOG_IMPLEMENTATION().key(), "in-memory");
+        sparkConf.set("spark.sql.shuffle.partitions", "1");
+        sparkConf.set(StaticSQLConf.WAREHOUSE_PATH().key(),
+                TempMetadataBuilder.TEMP_TEST_METADATA + "/spark-warehouse");
+
+        ss = SparkSession.builder().config(sparkConf).getOrCreate();
+        SparderEnv.setSparkSession(ss);
+    }
+
+    @AfterClass
+    public static void afterClass() {
+        ss.close();
+    }
+
+    @Before
+    public void setup() throws Exception {
+        setupPushdownEnv();
+        SecurityContextHolder.getContext()
+                .setAuthentication(new TestingAuthenticationToken("ADMIN", "ADMIN", Constant.ROLE_ADMIN));
+
+        NProjectManager projectManager = NProjectManager.getInstance(KylinConfig.getInstanceFromEnv());
+        ProjectInstance projectInstance = projectManager.getProject(getProject());
+        val overrideKylinProps = projectInstance.getOverrideKylinProps();
+        overrideKylinProps.put("kylin.query.force-limit", "-1");
+        overrideKylinProps.put("kylin.source.default", "9");
+        ProjectInstance projectInstanceUpdate = ProjectInstance.create(projectInstance.getName(),
+                projectInstance.getOwner(), projectInstance.getDescription(), overrideKylinProps);
+        projectManager.updateProject(projectInstance, projectInstanceUpdate.getName(),
+                projectInstanceUpdate.getDescription(), projectInstanceUpdate.getOverrideKylinProps());
+        projectManager.forceDropProject("broken_test");
+        projectManager.forceDropProject("bad_query_test");
+
+        val scheduler = NDefaultScheduler.getInstance(getProject());
+        scheduler.init(new JobEngineConfig(KylinConfig.getInstanceFromEnv()));
+
+        NExecutableManager originExecutableManager = NExecutableManager.getInstance(getTestConfig(), getProject());
+        NExecutableManager executableManager = Mockito.spy(originExecutableManager);
+
+        val config = KylinConfig.getInstanceFromEnv();
+        val dsMgr = NDataflowManager.getInstance(config, getProject());
+        // ready dataflow, segment, cuboid layout
+        var df = dsMgr.getDataflowByModelAlias("nmodel_basic");
+        // cleanup all segments first
+        val update = new NDataflowUpdate(df.getUuid());
+        update.setToRemoveSegsWithArray(df.getSegments().toArray(new NDataSegment[0]));
+        dsMgr.updateDataflow(update);
+        df = dsMgr.getDataflowByModelAlias("nmodel_basic");
+        val layouts = df.getIndexPlan().getAllLayouts();
+        val round1 = Lists.newArrayList(layouts);
+        val segmentRange = SegmentRange.TimePartitionedSegmentRange.createInfinite();
+        val toBuildLayouts = Sets.newLinkedHashSet(round1);
+        val execMgr = NExecutableManager.getInstance(config, getProject());
+        // ready dataflow, segment, cuboid layout
+        val oneSeg = dsMgr.appendSegment(df, segmentRange);
+        val job = NSparkCubingJob.create(Sets.newHashSet(oneSeg), toBuildLayouts, "ADMIN", null);
+        // launch the job
+        execMgr.addJob(job);
+        JobFinishHelper.waitJobFinish(config, getProject(), job.getId(), 600 * 1000);
+        Preconditions.checkArgument(executableManager.getJob(job.getId()).getStatus() == ExecutableState.SUCCEED);
+
+        val buildStore = ExecutableUtils.getRemoteStore(config, job.getSparkCubingStep());
+        val merger = new AfterBuildResourceMerger(config, getProject());
+        val layoutIds = toBuildLayouts.stream().map(LayoutEntity::getId).collect(Collectors.toSet());
+        merger.mergeAfterIncrement(df.getUuid(), oneSeg.getId(), layoutIds, buildStore);
+
+        val indexManager = NIndexPlanManager.getInstance(getTestConfig(), getProject());
+        indexManager.updateIndexPlan("abe3bf1a-c4bc-458d-8278-7ea8b00f5e96", copyForWrite -> {
+            List<IndexEntity> indexes = copyForWrite.getIndexes().stream().peek(i -> {
+                if (i.getId() == 0) {
+                    i.setLayouts(Lists.newArrayList(i.getLayouts().get(0)));
+                }
+            }).collect(Collectors.toList());
+            copyForWrite.setIndexes(indexes);
+        });
+        userService.createUser(new ManagedUser("ADMIN", "KYLIN", false,
+                Collections.singletonList(new UserGrantedAuthority("ROLE_ADMIN"))));
+    }
+
+    @After
+    public void teardown() throws Exception {
+        cleanPushdownEnv();
+
+        NDefaultScheduler.destroyInstance();
+    }
+
+    @Test
+    public void testSnapshotModifyTimeAfterReloadTable() {
+        val tableManager = NTableMetadataManager.getInstance(getTestConfig(), getProject());
+        val table = tableManager.getTableDesc(TABLE_IDENTITY);
+        long snapshotLastModify = System.currentTimeMillis();
+        table.setLastSnapshotPath("mockpath");
+        table.setSnapshotLastModified(snapshotLastModify);
+        tableManager.saveSourceTable(table);
+        tableService.reloadTable(getProject(), TABLE_IDENTITY, false, -1, true);
+        val newTable = tableManager.getTableDesc(TABLE_IDENTITY);
+        Assert.assertEquals(snapshotLastModify, newTable.getSnapshotLastModified());
+    }
+
+    @Test
+    public void testAddColumn() throws Exception {
+        addColumn(TABLE_IDENTITY, new ColumnDesc("", "tmp1", "bigint", "", "", "", null));
+        tableService.reloadTable(getProject(), TABLE_IDENTITY, false, -1, true);
+        assertSqls();
+    }
+
+    @Test
+    public void testRemoveColumn() throws Exception {
+        removeColumn(TABLE_IDENTITY, "SRC_ID");
+        tableService.reloadTable(getProject(), TABLE_IDENTITY, false, -1, true);
+        assertSqls();
+    }
+
+    @Ignore("TODO: remove or adapt")
+    @Test
+    public void testChangeColumnType() throws Exception {
+        changeColumns(TABLE_IDENTITY, Sets.newHashSet("SRC_ID"), columnDesc -> columnDesc.setDatatype("string"));
+        tableService.reloadTable(getProject(), TABLE_IDENTITY, false, -1, true);
+        assertSqls();
+    }
+
+    @Test
+    public void testChangeColumnOrder() throws Exception {
+        changeColumns(TABLE_IDENTITY, Sets.newHashSet("SRC_ID", "GCS_ID"), columnDesc -> {
+            if ("SRC_ID".equals(columnDesc.getName())) {
+                columnDesc.setId("32");
+            } else {
+                columnDesc.setId("35");
+            }
+        });
+        Pair<String, List<String>> pair = tableService.reloadTable(getProject(), TABLE_IDENTITY, false, -1, true);
+        //don't need to reload
+        Assert.assertEquals(0, pair.getSecond().size());
+    }
+
+    private void assertSqls() throws Exception {
+        for (Pair<String, Boolean> pair : Arrays.asList(Pair.newPair(SQL_LOOKUP, false),
+                Pair.newPair(SQL_DERIVED, false), Pair.newPair(SQL_LOOKUP2, true), Pair.newPair(SQL_DERIVED2, true))) {
+            val req = new SQLRequest();
+            req.setSql(pair.getFirst());
+            req.setProject(getProject());
+            req.setUsername("ADMIN");
+            val response = queryService.query(req);
+            with().pollInterval(10, TimeUnit.MILLISECONDS) //
+                    .and().with().pollDelay(10, TimeUnit.MILLISECONDS) //
+                    .await().atMost(100000, TimeUnit.MILLISECONDS) //
+                    .untilAsserted(() -> {
+                        String message = pair.getFirst() + " failed";
+                        Assert.assertEquals(message, pair.getSecond(), response.isQueryPushDown());
+                    });
+        }
+    }
+
+    private void changeColumns(String tableIdentity, Set<String> columns, Consumer<ColumnDesc> changer)
+            throws IOException {
+        val tableManager = NTableMetadataManager.getInstance(getTestConfig(), getProject());
+        val factTable = tableManager.getTableDesc(tableIdentity);
+        String resPath = KylinConfig.getInstanceFromEnv().getMetadataUrl().getIdentifier();
+        String tablePath = resPath + "/../data/tableDesc/" + tableIdentity + ".json";
+        val tableMeta = JsonUtil.readValue(new File(tablePath), TableDesc.class);
+        val newColumns = Stream.of(tableManager.copyForWrite(factTable).getColumns()).peek(col -> {
+            if (columns.contains(col.getName())) {
+                changer.accept(col);
+            }
+        }).sorted(Comparator.comparing(col -> Integer.parseInt(col.getId()))).toArray(ColumnDesc[]::new);
+        tableMeta.setColumns(newColumns);
+        JsonUtil.writeValueIndent(new FileOutputStream(tablePath), tableMeta);
+    }
+
+    private void addColumn(String tableIdentity, ColumnDesc... columns) throws IOException {
+        val tableManager = NTableMetadataManager.getInstance(getTestConfig(), getProject());
+        val factTable = tableManager.getTableDesc(tableIdentity);
+        String resPath = KylinConfig.getInstanceFromEnv().getMetadataUrl().getIdentifier();
+        String tablePath = resPath + "/../data/tableDesc/" + tableIdentity + ".json";
+        val tableMeta = JsonUtil.readValue(new File(tablePath), TableDesc.class);
+        val newColumns = Lists.newArrayList(factTable.getColumns());
+        long maxId = newColumns.stream().mapToLong(col -> Long.parseLong(col.getId())).max().orElse(0);
+        for (ColumnDesc column : columns) {
+            maxId++;
+            column.setId("" + maxId);
+            newColumns.add(column);
+        }
+        tableMeta.setColumns(newColumns.toArray(new ColumnDesc[0]));
+        JsonUtil.writeValueIndent(new FileOutputStream(tablePath), tableMeta);
+    }
+
+    private void removeColumn(String tableIdentity, String... column) throws IOException {
+        val tableManager = NTableMetadataManager.getInstance(getTestConfig(), getProject());
+        val factTable = tableManager.getTableDesc(tableIdentity);
+        String resPath = KylinConfig.getInstanceFromEnv().getMetadataUrl().getIdentifier();
+        String tablePath = resPath + "/../data/tableDesc/" + tableIdentity + ".json";
+        val tableMeta = JsonUtil.readValue(new File(tablePath), TableDesc.class);
+        val columns = Sets.newHashSet(column);
+        val newColumns = Stream.of(factTable.getColumns()).filter(col -> !columns.contains(col.getName()))
+                .toArray(ColumnDesc[]::new);
+        tableMeta.setColumns(newColumns);
+        JsonUtil.writeValueIndent(new FileOutputStream(tablePath), tableMeta);
+    }
+
+    private void setupPushdownEnv() throws Exception {
+        getTestConfig().setProperty("kylin.query.pushdown.runner-class-name",
+                "io.kyligence.kap.query.pushdown.PushDownRunnerJdbcImpl");
+        getTestConfig().setProperty("kylin.query.pushdown-enabled", "true");
+        // Load H2 Tables (inner join)
+        Connection h2Connection = DriverManager.getConnection("jdbc:h2:mem:db_default;DB_CLOSE_DELAY=-1", "sa", "");
+        H2Database h2DB = new H2Database(h2Connection, getTestConfig(), "default");
+        h2DB.loadAllTables();
+
+        overwriteSystemProp("kylin.query.pushdown.jdbc.url", "jdbc:h2:mem:db_default;SCHEMA=DEFAULT");
+        overwriteSystemProp("kylin.query.pushdown.jdbc.driver", "org.h2.Driver");
+        overwriteSystemProp("kylin.query.pushdown.jdbc.username", "sa");
+        overwriteSystemProp("kylin.query.pushdown.jdbc.password", "");
+    }
+
+    private void cleanPushdownEnv() throws Exception {
+        getTestConfig().setProperty("kylin.query.pushdown-enabled", "false");
+        // Load H2 Tables (inner join)
+        Connection h2Connection = DriverManager.getConnection("jdbc:h2:mem:db_default", "sa", "");
+        h2Connection.close();
+    }
+
+    protected String getProject() {
+        return "default";
+    }
+}
diff --git a/src/kylin-it/src/test/java/org/apache/kylin/metadata/MetaUtilsTest.java b/src/kylin-it/src/test/java/org/apache/kylin/metadata/MetaUtilsTest.java
new file mode 100644
index 0000000000..1bd4cece4d
--- /dev/null
+++ b/src/kylin-it/src/test/java/org/apache/kylin/metadata/MetaUtilsTest.java
@@ -0,0 +1,49 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.kylin.metadata;
+
+import org.apache.kylin.common.util.NLocalFileMetadataTestCase;
+import org.apache.kylin.metadata.filter.function.LikeMatchers;
+import org.junit.After;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+
+import lombok.val;
+
+
+public class MetaUtilsTest extends NLocalFileMetadataTestCase {
+
+    @Before
+    public void setup() throws Exception {
+        createTestMetadata();
+    }
+
+    @After
+    public void cleanup() {
+        cleanupTestMetadata();
+    }
+
+    @Test
+    public void LikeMatchersTest() {
+        String likePattern = "abc\\_\\%%";
+        String target = "abc_%abc";
+        val matcher = new LikeMatchers.DefaultLikeMatcher(likePattern, "\\");
+        Assert.assertTrue(matcher.matches(target));
+    }
+}
diff --git a/src/kylin-it/src/test/java/org/apache/kylin/metadata/MetadataPerfTest.java b/src/kylin-it/src/test/java/org/apache/kylin/metadata/MetadataPerfTest.java
new file mode 100644
index 0000000000..5ab7b19e68
--- /dev/null
+++ b/src/kylin-it/src/test/java/org/apache/kylin/metadata/MetadataPerfTest.java
@@ -0,0 +1,467 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.kylin.metadata;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.nio.charset.StandardCharsets;
+import java.nio.file.Files;
+import java.nio.file.Paths;
+import java.sql.Types;
+import java.time.LocalDate;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Comparator;
+import java.util.List;
+import java.util.Locale;
+import java.util.Map;
+import java.util.Objects;
+import java.util.concurrent.ConcurrentSkipListSet;
+import java.util.concurrent.atomic.AtomicLong;
+import java.util.stream.Collectors;
+import java.util.stream.IntStream;
+import java.util.stream.Stream;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.commons.io.IOUtils;
+import org.apache.kylin.common.persistence.metadata.MetadataStore;
+import org.apache.kylin.common.util.JsonUtil;
+import org.apache.kylin.common.util.NLocalFileMetadataTestCase;
+import org.apache.kylin.common.util.Pair;
+import org.apache.kylin.common.util.RandomUtil;
+import org.apache.kylin.common.util.Unsafe;
+import org.apache.kylin.metadata.cube.model.IndexEntity;
+import org.apache.kylin.metadata.cube.model.IndexPlan;
+import org.apache.kylin.metadata.cube.model.LayoutEntity;
+import org.apache.kylin.metadata.cube.model.NDataLayout;
+import org.apache.kylin.metadata.cube.model.NDataSegDetails;
+import org.apache.kylin.metadata.cube.model.NDataSegment;
+import org.apache.kylin.metadata.cube.model.NDataflow;
+import org.apache.kylin.metadata.model.NDataModel;
+import org.apache.kylin.metadata.model.SegmentRange;
+import org.apache.kylin.metadata.model.SegmentStatusEnum;
+import org.apache.kylin.metadata.model.Segments;
+import org.apache.kylin.tool.MetadataTool;
+import org.junit.After;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Ignore;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.TemporaryFolder;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+import org.springframework.jdbc.core.JdbcTemplate;
+import org.springframework.jdbc.datasource.DataSourceTransactionManager;
+
+import com.google.common.base.Joiner;
+import com.google.common.collect.Lists;
+import com.google.common.collect.Maps;
+
+import lombok.val;
+import lombok.var;
+import lombok.extern.slf4j.Slf4j;
+import scala.Tuple3;
+
+@Slf4j
+@RunWith(Parameterized.class)
+@Ignore
+public class MetadataPerfTest extends NLocalFileMetadataTestCase {
+
+    private static final String INSERT_SQL = "insert into %s ( META_TABLE_KEY, META_TABLE_CONTENT, META_TABLE_TS, META_TABLE_MVCC) values (?, ?, ?, ?)";
+
+    private static final String TEMPLATE_FOLDER = "project_0";
+
+    private static final String TEMPLATE_UUID = "dc2efa94-76b5-4a82-b080-5c783ead85f8";
+
+    private static final String TEMPLATE_EXEC_UUID = "d5a549fb-275f-4464-b2b0-a96c7cdadbe2";
+
+    private static final int SEGMENT_SIZE = 100;
+
+    @Parameterized.Parameters
+    public static Collection<Object[]> params() {
+        return Arrays.asList(new Object[][] { //
+                { 1, 100 }, { 2, 100 }, { 5, 100 }, //
+                //                { 10, 100 }, { 20, 100 }, { 50, 100 } //
+        });
+    }
+
+    private final int projectSize;
+
+    private final int modelSize;
+
+    public MetadataPerfTest(int pSize, int mSize) {
+        this.projectSize = pSize;
+        this.modelSize = mSize;
+    }
+
+    @Before
+    public void setup() throws Exception {
+        createTestMetadata();
+        val config = getTestConfig();
+        config.setProperty("kylin.metadata.url", "kylin2_" + projectSize + "_" + modelSize
+                + "@jdbc,url=jdbc:mysql://sandbox:3306/kylin?rewriteBatchedStatements=true");
+    }
+
+    @After
+    public void cleanup() {
+        cleanupTestMetadata();
+    }
+
+    @Test
+    public void backup() throws IOException {
+        long start = System.currentTimeMillis();
+        val file = Paths.get("meta_backups", projectSize + "", modelSize + "").toFile();
+        FileUtils.forceMkdir(file);
+        log.info("start backup for {}", getTestConfig().getMetadataUrl());
+        log.info("backup dir is {}", file.getAbsolutePath());
+        val tool = new MetadataTool(getTestConfig());
+        tool.execute(new String[] { "-backup", "-dir", file.getAbsolutePath() });
+        log.info("backup finished for {}", getTestConfig().getMetadataUrl());
+        long end = System.currentTimeMillis();
+        log.info("usage time: {} seconds", (end - start) / 1000.0);
+    }
+
+    @Test
+    public void restore() {
+        long start = System.currentTimeMillis();
+        val file = Paths.get("meta_backups", projectSize + "", modelSize + "").toFile();
+        log.info("start restore for {}", getTestConfig().getMetadataUrl());
+        log.info("restore dir is {}", file.getAbsolutePath());
+        val tool = new MetadataTool(getTestConfig());
+        tool.execute(new String[] { "-restore", "-dir", file.getAbsolutePath(), "--after-truncate" });
+        log.info("restore finished for {}", getTestConfig().getMetadataUrl());
+        long end = System.currentTimeMillis();
+        log.info("usage time: {} seconds", (end - start) / 1000.0);
+    }
+
+    private static final String COUNT_ALL_SQL = "select count(1) from %s";
+    private static final String SELECT_ALL_KEY_SQL = "select meta_table_key from %s where META_TABLE_KEY > '%s' order by META_TABLE_KEY limit %s";
+
+    @Test
+    public void loadIds() throws Exception {
+        val jdbcTemplate = getJdbcTemplate();
+        val table = getTestConfig().getMetadataUrl().getIdentifier();
+        long count = jdbcTemplate.queryForObject(String.format(Locale.ROOT, COUNT_ALL_SQL, table), Long.class);
+        long offset = 0;
+        long pageSize = 1000;
+        List<String> result = Lists.newArrayList();
+        var prevKey = "/";
+        while (offset < count) {
+            for (String resource : jdbcTemplate.queryForList(
+                    String.format(Locale.ROOT, SELECT_ALL_KEY_SQL, table, prevKey, pageSize), String.class)) {
+                //                result.add(resource);
+                log.debug("just print it {}", resource);
+            }
+            offset += pageSize;
+        }
+        log.info("all path size: {}", result.size());
+    }
+
+    @Test
+    public void prepareData() throws Exception {
+        val skip = Boolean.parseBoolean(System.getProperty("skipPrepare", "false"));
+        if (skip) {
+            return;
+        }
+        val jdbcTemplate = getJdbcTemplate();
+        log.debug("drop table if exists");
+        val table = getTestConfig().getMetadataUrl().getIdentifier();
+        jdbcTemplate.update("drop table if exists " + table);
+        val metaStore = MetadataStore.createMetadataStore(getTestConfig());
+        log.debug("create a new table");
+        val method = metaStore.getClass().getDeclaredMethod("createIfNotExist");
+        Unsafe.changeAccessibleObject(method, true);
+        method.invoke(metaStore);
+
+        val START_ID = 1000;
+        generateProject(START_ID, projectSize, TEMPLATE_FOLDER);
+
+        val allIds = IntStream.range(START_ID, projectSize + START_ID).parallel().boxed()
+                .collect(Collectors.toCollection(ConcurrentSkipListSet::new));
+        val projectParams = IntStream.range(START_ID, projectSize + START_ID).mapToObj(i -> {
+            val projectFile = new File(new File(TEMPLATE_FOLDER).getParentFile(),
+                    "tmp_" + i + "/project_" + i + "/project.json");
+            try {
+                return new Object[] { "/_global/project/project_" + i,
+                        IOUtils.toByteArray(new FileInputStream(projectFile)), projectFile.lastModified(), 0L };
+            } catch (IOException e) {
+                return null;
+            }
+        }).filter(Objects::nonNull).collect(Collectors.toList());
+        jdbcTemplate.batchUpdate(String.format(Locale.ROOT, INSERT_SQL, table), projectParams);
+        Runnable run = () -> IntStream.range(START_ID, projectSize + START_ID).forEach(i -> {
+            try {
+                val dstFolder = new File(new File(TEMPLATE_FOLDER).getParentFile(), "tmp_" + i + "/project_" + i);
+                val root = dstFolder.getParentFile();
+                val files = FileUtils.listFiles(root, null, true);
+                var sorted = Lists.newArrayList(files);
+                sorted.sort(Comparator.comparing(f -> f.getPath().replace(root.getPath(), "")));
+                log.info("start import to DB, all size is {}", sorted.size());
+                var params = Lists.<Object[]> newArrayList();
+                int index = 1;
+                int[] argTypes = new int[] { Types.VARCHAR, Types.BINARY, Types.BIGINT, Types.BIGINT };
+                for (File f : sorted) {
+                    if (f.getName().startsWith(".")) {
+                        continue;
+                    }
+                    try (val fis = new FileInputStream(f)) {
+                        val resPath = f.getPath().replace(root.getPath(), "");
+                        val bs = IOUtils.toByteArray(fis);
+                        params.add(new Object[] { resPath, bs, f.lastModified(), 0L });
+                    } catch (IOException e) {
+                        throw new IllegalArgumentException("cannot not read file " + f, e);
+                    }
+                    if (index % 2000 == 0) {
+                        log.debug("batch {} {}", index, params.size());
+                        jdbcTemplate.batchUpdate(String.format(Locale.ROOT, INSERT_SQL, table), params, argTypes);
+                        params = Lists.newArrayList();
+                    }
+                    index++;
+                }
+                if (params.size() > 0) {
+                    jdbcTemplate.batchUpdate(String.format(Locale.ROOT, INSERT_SQL, table), params);
+                }
+                allIds.remove(i);
+                FileUtils.deleteQuietly(dstFolder.getParentFile());
+            } catch (Exception e) {
+                log.warn("some error", e);
+            }
+        });
+        run.run();
+        //        new ForkJoinPool(Math.min(projectSize, 30)).submit(run).join();
+        log.debug("finish");
+        if (!allIds.isEmpty()) {
+            log.info("these are failed: {}", allIds);
+            Assert.fail();
+        }
+    }
+
+    private void generateProject(int startId, int size, String templateFolder) throws IOException {
+        if (size < 1) {
+            return;
+        }
+        val detailJobMap = Maps.<String, String> newHashMap();
+        for (File file : FileUtils.listFiles(new File(TEMPLATE_FOLDER, "dataflow_details/" + TEMPLATE_UUID), null,
+                false)) {
+            if (file.getName().startsWith(".")) {
+                continue;
+            }
+            val details = JsonUtil.readValue(file, NDataSegDetails.class);
+            detailJobMap.put(details.getId(), details.getLayoutById(1).getBuildJobId());
+        }
+        val projectName = "project_" + startId;
+        log.info("start generate data for {}", projectName);
+        val dstFolder = new File(new File(templateFolder).getParentFile(), "tmp_" + startId + "/" + projectName);
+        FileUtils.copyDirectory(new File(templateFolder), dstFolder,
+                pathname -> !pathname.getPath().contains("execute"));
+        for (int j = 1; j < modelSize; j++) {
+            val newId = RandomUtil.randomUUIDStr();
+            for (String sub : new String[] { "model_desc", "index_plan", "dataflow" }) {
+                val file = new File(dstFolder, sub + "/" + TEMPLATE_UUID + ".json");
+                val newFile = new File(dstFolder, sub + "/" + newId + ".json");
+                FileUtils.copyFile(file, newFile);
+                replaceInFile(newFile,
+                        Arrays.asList(Pair.newPair(TEMPLATE_UUID, newId), Pair.newPair("model_0", "model_" + j)));
+            }
+            FileUtils.copyDirectory(new File(TEMPLATE_FOLDER, "dataflow_details/" + TEMPLATE_UUID),
+                    new File(dstFolder, "dataflow_details/" + newId));
+            val templateJobs = FileUtils.listFiles(new File(TEMPLATE_FOLDER, "execute"), null, false);
+            val newJobsMap = templateJobs.stream().map(f -> Pair.newPair(f.getName(), RandomUtil.randomUUIDStr()))
+                    .collect(Collectors.toMap(Pair::getFirst, Pair::getSecond));
+            newJobsMap.forEach((k, v) -> {
+                try {
+                    val name = "execute/" + k;
+                    val file = new File(TEMPLATE_FOLDER, name);
+                    val newFile = new File(dstFolder, name.replace(k, v));
+                    FileUtils.copyFile(file, newFile);
+                    replaceInFile(newFile, Arrays.asList(Pair.newPair(TEMPLATE_UUID, newId), Pair.newPair(k, v),
+                            Pair.newPair("project_0", projectName)));
+                } catch (IOException ignore) {
+                }
+            });
+            for (File file : FileUtils.listFiles(new File(dstFolder, "dataflow_details/" + newId), null, false)) {
+                if (file.getName().startsWith(".")) {
+                    continue;
+                }
+                val oldId = detailJobMap.get(file.getName().split("\\.")[0]);
+                replaceInFile(file,
+                        Arrays.asList(Pair.newPair(TEMPLATE_UUID, newId), Pair.newPair(oldId, newJobsMap.get(oldId))));
+            }
+        }
+
+        for (int i = 1; i < size; i++) {
+            val projectName2 = "project_" + (startId + i);
+            val dstFolder2 = new File(new File(templateFolder).getParentFile(),
+                    "tmp_" + (startId + i) + "/" + projectName2);
+            if (dstFolder2.exists())
+                continue;
+            FileUtils.copyDirectory(dstFolder, dstFolder2);
+
+            File projectJson = new File(dstFolder2 + File.separator, "project.json");
+            var projectJsonContent = new String(Files.readAllBytes(projectJson.toPath()), StandardCharsets.UTF_8);
+            projectJsonContent = projectJsonContent.replaceAll("958983a5-fad8-4057-9d70-cd6e5a2374af",
+                    RandomUtil.randomUUIDStr());
+            Files.write(projectJson.toPath(), projectJsonContent.getBytes(StandardCharsets.UTF_8));
+
+            val sub = "execute";
+            for (File file : FileUtils.listFiles(new File(dstFolder, sub), null, true)) {
+                var content = new String(Files.readAllBytes(file.toPath()), StandardCharsets.UTF_8);
+                content = content.replaceAll("project_0", projectName2);
+                Files.write(file.toPath(), content.getBytes(StandardCharsets.UTF_8));
+            }
+        }
+    }
+
+    private void replaceInFile(File file, List<Pair<String, String>> pairs) throws IOException {
+        var content = FileUtils.readFileToString(file);
+        for (Pair<String, String> pair : pairs) {
+            content = content.replaceAll(pair.getFirst(), pair.getSecond());
+        }
+        FileUtils.writeStringToFile(file, content, false);
+    }
+
+    private JdbcTemplate getJdbcTemplate() throws Exception {
+        val metaStore = MetadataStore.createMetadataStore(getTestConfig());
+        val field = metaStore.getClass().getDeclaredField("jdbcTemplate");
+        Unsafe.changeAccessibleObject(field, true);
+        return (JdbcTemplate) field.get(metaStore);
+    }
+
+    private DataSourceTransactionManager getTransactionManager() throws Exception {
+        val metaStore = MetadataStore.createMetadataStore(getTestConfig());
+        val field = metaStore.getClass().getDeclaredField("transactionManager");
+        Unsafe.changeAccessibleObject(field, true);
+        return (DataSourceTransactionManager) field.get(metaStore);
+    }
+
+    @Test
+    public void prepareTemplate() throws IOException {
+        val indexFile = new File(TEMPLATE_FOLDER, "index_plan/" + TEMPLATE_UUID + ".json");
+        val indexPlan = JsonUtil.readValue(indexFile, IndexPlan.class);
+        List<Long> layoutIds = Lists.newArrayList();
+        List<IndexEntity> indexes = Stream
+                .of(Tuple3.apply(Lists.newArrayList(1, 2, 3, 4), Lists.newArrayList(NDataModel.MEASURE_ID_BASE), 0L),
+                        Tuple3.apply(Lists.newArrayList(5, 6, 7, 8),
+                                Lists.newArrayList(NDataModel.MEASURE_ID_BASE + 1, NDataModel.MEASURE_ID_BASE + 2),
+                                IndexEntity.INDEX_ID_STEP),
+                        Tuple3.apply(Lists.newArrayList(1, 2), Lists.newArrayList(NDataModel.MEASURE_ID_BASE),
+                                IndexEntity.TABLE_INDEX_START_ID))
+                .map(t -> {
+                    val index1 = new IndexEntity();
+                    index1.setId(t._3());
+                    index1.setDimensions(t._1());
+                    index1.setMeasures(t._2());
+                    val id = new AtomicLong(t._3() + 1);
+                    index1.setLayouts(permutation(index1.getDimensions()).stream().map(ds -> {
+                        val entity = new LayoutEntity();
+                        entity.setId(id.getAndIncrement());
+                        ds.addAll(index1.getMeasures());
+                        entity.setColOrder(ds);
+                        entity.setAuto(true);
+                        layoutIds.add(entity.getId());
+                        return entity;
+                    }).collect(Collectors.toList()));
+                    return index1;
+                }).collect(Collectors.toList());
+        indexPlan.setIndexes(indexes);
+        JsonUtil.writeValueIndent(new FileOutputStream(indexFile), indexPlan);
+
+        val dfFile = new File(TEMPLATE_FOLDER, "dataflow/" + TEMPLATE_UUID + ".json");
+        val df = JsonUtil.readValue(dfFile, NDataflow.class);
+        val segments = new Segments<NDataSegment>();
+        for (int i = 0; i < SEGMENT_SIZE; i++) {
+            val start = LocalDate.parse("2000-01-01").plusMonths(i);
+            val end = start.plusMonths(1);
+            val seg = NDataSegment.empty();
+            val segRange = new SegmentRange.TimePartitionedSegmentRange(start.toString(), end.toString());
+            seg.setId(RandomUtil.randomUUIDStr());
+            seg.setName(Segments.makeSegmentName(segRange));
+            seg.setCreateTimeUTC(System.currentTimeMillis());
+            seg.setSegmentRange(segRange);
+            seg.setStatus(SegmentStatusEnum.READY);
+            segments.add(seg);
+        }
+        df.setSegments(segments);
+        JsonUtil.writeValueIndent(new FileOutputStream(dfFile), df);
+
+        val detailJobMap = Maps.<String, String> newHashMap();
+        for (NDataSegment segment : segments) {
+            val detailFile = new File(TEMPLATE_FOLDER,
+                    "dataflow_details/" + TEMPLATE_UUID + "/" + segment.getId() + ".json");
+            detailJobMap.put(segment.getId(), RandomUtil.randomUUIDStr());
+            val detail = new NDataSegDetails();
+            detail.setUuid(segment.getId());
+            detail.setDataflowId(df.getUuid());
+            detail.setLayouts(layoutIds.stream().map(id -> {
+                val layout = new NDataLayout();
+                layout.setLayoutId(id);
+                layout.setBuildJobId(detailJobMap.get(segment.getId()));
+                layout.setByteSize(1000);
+                layout.setFileCount(1);
+                layout.setRows(1024);
+                layout.setSourceRows(1025);
+                layout.setSourceByteSize(0);
+                return layout;
+            }).collect(Collectors.toList()));
+            JsonUtil.writeValueIndent(new FileOutputStream(detailFile), detail);
+        }
+
+        for (Map.Entry<String, String> entry : detailJobMap.entrySet()) {
+            val newExecId = entry.getValue();
+            val name = "execute/" + TEMPLATE_EXEC_UUID;
+            val file = new File(TEMPLATE_FOLDER, name);
+            val newFile = new File(TEMPLATE_FOLDER, name.replace(TEMPLATE_EXEC_UUID, newExecId));
+            FileUtils.copyFile(file, newFile);
+            replaceInFile(newFile,
+                    Arrays.asList(Pair.newPair("1,20001,10001", Joiner.on(",").join(layoutIds)),
+                            Pair.newPair(TEMPLATE_EXEC_UUID, newExecId),
+                            Pair.newPair("facd8577-8fca-48f5-803c-800ea75c8495", entry.getKey())));
+        }
+
+    }
+
+    public static List<List<Integer>> permutation(List<Integer> s) {
+        List<List<Integer>> res = new ArrayList<>();
+        if (s.size() == 1) {
+            res.add(s);
+        } else if (s.size() > 1) {
+            int lastIndex = s.size() - 1;
+            List<Integer> last = s.subList(lastIndex, lastIndex + 1);
+            List<Integer> rest = s.subList(0, lastIndex);
+            res = merge(permutation(rest), last);
+        }
+        return res;
+    }
+
+    public static List<List<Integer>> merge(List<List<Integer>> list, List<Integer> c) {
+        ArrayList<List<Integer>> res = new ArrayList<>();
+        for (List<Integer> s : list) {
+            for (int i = 0; i <= s.size(); ++i) {
+                List<Integer> ps = Lists.newArrayList(s);
+                ps.addAll(i, c);
+                res.add(ps);
+            }
+        }
+        return res;
+    }
+
+}
diff --git a/src/kylin-it/src/test/java/org/apache/kylin/metadata/SQLConfTest.java b/src/kylin-it/src/test/java/org/apache/kylin/metadata/SQLConfTest.java
new file mode 100644
index 0000000000..b443a12e96
--- /dev/null
+++ b/src/kylin-it/src/test/java/org/apache/kylin/metadata/SQLConfTest.java
@@ -0,0 +1,60 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.kylin.metadata;
+
+import org.apache.kylin.common.util.RandomUtil;
+import org.apache.kylin.common.util.TempMetadataBuilder;
+import org.apache.kylin.engine.spark.NLocalWithSparkSessionTest;
+import org.apache.spark.SparkConf;
+import org.apache.spark.sql.KylinSession$;
+import org.apache.spark.sql.SparkSession;
+import org.apache.spark.sql.internal.SQLConf;
+import org.apache.spark.sql.internal.StaticSQLConf;
+import org.junit.Assert;
+import org.junit.Test;
+
+public class SQLConfTest extends NLocalWithSparkSessionTest {
+
+    @Test
+    public void testSQLConf() {
+        String shuffleName = "spark.sql.shuffle.partitions";
+        SparkConf sparkConf = new SparkConf().setAppName(RandomUtil.randomUUIDStr()).setMaster("local[1]");
+        sparkConf.set("spark.serializer", "org.apache.spark.serializer.JavaSerializer");
+        sparkConf.set(StaticSQLConf.CATALOG_IMPLEMENTATION().key(), "in-memory");
+        sparkConf.set(shuffleName, "1");
+        sparkConf.set(StaticSQLConf.WAREHOUSE_PATH().key(),
+                TempMetadataBuilder.TEMP_TEST_METADATA + "/spark-warehouse");
+        sparkConf.set("spark.sql.ansi.enabled", "true");
+
+        SparkSession.Builder sessionBuilder = SparkSession.builder()
+                .enableHiveSupport().config(sparkConf)
+                .config("mapreduce.fileoutputcommitter.marksuccessfuljobs", "false");
+        SparkSession.clearActiveSession();
+        SparkSession.clearDefaultSession();
+        SparkSession kylinSession = KylinSession$.MODULE$.KylinBuilder(sessionBuilder).buildCluster().getOrCreateKylinSession();
+        SQLConf defaultSQLConf = SQLConf.get();
+        SQLConf kylinSessionSQLConf = kylinSession.sessionState().conf();
+        Assert.assertEquals(kylinSessionSQLConf, defaultSQLConf);
+        kylinSession.newSession();
+        SQLConf defaultSQLConfAfter = SQLConf.get();
+        SQLConf kylinSessionSQLConfAfter = kylinSession.sessionState().conf();
+        Assert.assertEquals(defaultSQLConf, defaultSQLConfAfter);
+        Assert.assertEquals(kylinSessionSQLConf, kylinSessionSQLConfAfter);
+    }
+
+}
diff --git a/src/kylin-it/src/test/java/org/apache/kylin/newten/AutoMergeTest.java b/src/kylin-it/src/test/java/org/apache/kylin/newten/AutoMergeTest.java
new file mode 100644
index 0000000000..5ec27dae8f
--- /dev/null
+++ b/src/kylin-it/src/test/java/org/apache/kylin/newten/AutoMergeTest.java
@@ -0,0 +1,957 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.kylin.newten;
+
+import java.io.IOException;
+import java.lang.reflect.Field;
+import java.lang.reflect.InvocationTargetException;
+import java.util.ArrayList;
+import java.util.Calendar;
+import java.util.List;
+import java.util.Locale;
+import java.util.TimeZone;
+
+import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.common.persistence.RootPersistentEntity;
+import org.apache.kylin.common.util.DateFormat;
+import org.apache.kylin.common.util.NLocalFileMetadataTestCase;
+import org.apache.kylin.common.util.Unsafe;
+import org.apache.kylin.engine.spark.job.NSparkMergingJob;
+import org.apache.kylin.job.execution.AbstractExecutable;
+import org.apache.kylin.job.execution.NExecutableManager;
+import org.apache.kylin.job.manager.SegmentAutoMergeUtil;
+import org.apache.kylin.junit.TimeZoneTestRunner;
+import org.apache.kylin.metadata.cube.model.NDataLoadingRange;
+import org.apache.kylin.metadata.cube.model.NDataLoadingRangeManager;
+import org.apache.kylin.metadata.cube.model.NDataSegment;
+import org.apache.kylin.metadata.cube.model.NDataflow;
+import org.apache.kylin.metadata.cube.model.NDataflowManager;
+import org.apache.kylin.metadata.cube.model.NDataflowUpdate;
+import org.apache.kylin.metadata.model.AutoMergeTimeEnum;
+import org.apache.kylin.metadata.model.ManagementType;
+import org.apache.kylin.metadata.model.NDataModel;
+import org.apache.kylin.metadata.model.NDataModelManager;
+import org.apache.kylin.metadata.model.SegmentRange;
+import org.apache.kylin.metadata.model.SegmentStatusEnum;
+import org.apache.kylin.metadata.model.Segments;
+import org.apache.kylin.metadata.model.VolatileRange;
+import org.apache.kylin.metadata.project.NProjectManager;
+import org.junit.After;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Ignore;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+
+import lombok.val;
+
+@RunWith(TimeZoneTestRunner.class)
+public class AutoMergeTest extends NLocalFileMetadataTestCase {
+
+    private static final String DEFAULT_PROJECT = "default";
+
+    @BeforeClass
+    public static void beforeAll() {
+        // load NSparkMergingJob class
+        new NSparkMergingJob();
+    }
+
+    @Before
+    public void setUp() throws Exception {
+        this.createTestMetadata();
+    }
+
+    @After
+    public void tearDown() throws Exception {
+        this.cleanupTestMetadata();
+    }
+
+    private void removeAllSegments() {
+        NDataflowManager dataflowManager = NDataflowManager.getInstance(getTestConfig(), DEFAULT_PROJECT);
+        NDataflow df = dataflowManager.getDataflow("89af4ee2-2cdb-4b07-b39e-4c29856309aa");
+        // remove the existed seg
+        NDataflowUpdate update = new NDataflowUpdate(df.getUuid());
+        update.setToRemoveSegs(df.getSegments().toArray(new NDataSegment[0]));
+        dataflowManager.updateDataflow(update);
+    }
+
+    private void mockAddSegmentSuccess()
+            throws InvocationTargetException, IllegalAccessException, NoSuchMethodException {
+        val dataflowManager = NDataflowManager.getInstance(getTestConfig(), DEFAULT_PROJECT);
+        val df = dataflowManager.getDataflowByModelAlias("nmodel_basic");
+        val prjManager = NProjectManager.getInstance(getTestConfig());
+        val prj = prjManager.getProject(DEFAULT_PROJECT);
+        val copy = prjManager.copyForWrite(prj);
+        copy.getSegmentConfig().setAutoMergeEnabled(true);
+        prjManager.updateProject(copy);
+        SegmentAutoMergeUtil.autoMergeSegments(DEFAULT_PROJECT, df.getUuid(), "ADMIN");
+    }
+
+    private void createDataloadingRange() throws IOException {
+        NDataLoadingRange dataLoadingRange = new NDataLoadingRange();
+        dataLoadingRange.setTableName("DEFAULT.TEST_KYLIN_FACT");
+        dataLoadingRange.setColumnName("TEST_KYLIN_FACT.CAL_DT");
+        NDataLoadingRangeManager.getInstance(getTestConfig(), DEFAULT_PROJECT).createDataLoadingRange(dataLoadingRange);
+    }
+
+    @Test
+    public void testRetention_2Week() throws Exception {
+        removeAllSegments();
+        createDataloadingRange();
+        NDataflowManager dataflowManager = NDataflowManager.getInstance(getTestConfig(), DEFAULT_PROJECT);
+        NDataflow df = dataflowManager.getDataflowByModelAlias("nmodel_basic");
+        List<NDataSegment> segments = new ArrayList<>();
+        long start;
+        long end;
+        //two days,not enough for a week ,not merge
+        for (int i = 0; i <= 1; i++) {
+            //01-01 friday
+            start = addDay("2010-01-01", i);
+            end = addDay("2010-01-02", i);
+            SegmentRange segmentRange = new SegmentRange.TimePartitionedSegmentRange(start, end);
+            df = dataflowManager.getDataflowByModelAlias("nmodel_basic");
+            NDataSegment dataSegment = dataflowManager.appendSegment(df, segmentRange);
+            dataSegment.setStatus(SegmentStatusEnum.READY);
+            segments.add(dataSegment);
+        }
+        NDataflowUpdate update = new NDataflowUpdate(df.getUuid());
+        update.setToUpdateSegs(segments.toArray(new NDataSegment[segments.size()]));
+        dataflowManager.updateDataflow(update);
+        deleteAllJobs(DEFAULT_PROJECT);
+        mockAddSegmentSuccess();
+        val executables = getRunningExecutables(DEFAULT_PROJECT, df.getModel().getId());
+        Assert.assertEquals(0, executables.size());
+    }
+
+    @Test
+    public void testAutoMergeSegmentsByWeek_FridayAndSaturday_NotMerge() throws Exception {
+        removeAllSegments();
+        createDataloadingRange();
+        NDataflowManager dataflowManager = NDataflowManager.getInstance(getTestConfig(), DEFAULT_PROJECT);
+        NDataflow df = dataflowManager.getDataflow("89af4ee2-2cdb-4b07-b39e-4c29856309aa");
+        List<NDataSegment> segments = new ArrayList<>();
+        long start;
+        long end;
+        //two days,not enough for a week ,not merge
+        for (int i = 0; i <= 1; i++) {
+            //01-01 friday
+            start = addDay("2010-01-01", i);
+            end = addDay("2010-01-02", i);
+            SegmentRange segmentRange = new SegmentRange.TimePartitionedSegmentRange(start, end);
+            df = dataflowManager.getDataflow("89af4ee2-2cdb-4b07-b39e-4c29856309aa");
+            NDataSegment dataSegment = dataflowManager.appendSegment(df, segmentRange);
+            dataSegment.setStatus(SegmentStatusEnum.READY);
+            segments.add(dataSegment);
+        }
+        NDataflowUpdate update = new NDataflowUpdate(df.getUuid());
+        update.setToUpdateSegs(segments.toArray(new NDataSegment[segments.size()]));
+        dataflowManager.updateDataflow(update);
+        //clear all events
+        deleteAllJobs(DEFAULT_PROJECT);
+        mockAddSegmentSuccess();
+        val executables = getRunningExecutables(DEFAULT_PROJECT, df.getModel().getId());
+        Assert.assertEquals(0, executables.size());
+    }
+
+    @Test
+    public void testAutoMergeSegmentsByWeek_WithoutVolatileRange_Merge() throws Exception {
+        removeAllSegments();
+        createDataloadingRange();
+        NDataflowManager dataflowManager = NDataflowManager.getInstance(getTestConfig(), DEFAULT_PROJECT);
+        NDataflow df = dataflowManager.getDataflow("89af4ee2-2cdb-4b07-b39e-4c29856309aa");
+        List<NDataSegment> segments = new ArrayList<>();
+        long start;
+        long end;
+        //test 4 days ,2010/01/01 8:00 - 2010/01/04 8:00, friday to monday, merge
+        for (int i = 0; i <= 3; i++) {
+            //01-01 friday
+            start = addDay("2010-01-01", i);
+            end = addDay("2010-01-02", i);
+            SegmentRange segmentRange = new SegmentRange.TimePartitionedSegmentRange(start, end);
+            df = dataflowManager.getDataflow("89af4ee2-2cdb-4b07-b39e-4c29856309aa");
+            NDataSegment dataSegment = dataflowManager.appendSegment(df, segmentRange);
+            dataSegment.setStatus(SegmentStatusEnum.READY);
+            segments.add(dataSegment);
+        }
+
+        NDataflowUpdate update = new NDataflowUpdate(df.getUuid());
+        update.setToUpdateSegs(segments.toArray(new NDataSegment[segments.size()]));
+        dataflowManager.updateDataflow(update);
+        //clear all events
+        deleteAllJobs(DEFAULT_PROJECT);
+        mockAddSegmentSuccess();
+        val executables = getRunningExecutables(DEFAULT_PROJECT, df.getModel().getId());
+        Assert.assertEquals(1, executables.size());
+
+        for (val executable : executables) {
+            if (executable instanceof NSparkMergingJob) {
+                val segId = executable.getTargetSegments().get(0);
+                //merge 2010/01/01 00:00 - 2010/01/04 00:00
+                Assert.assertEquals(DateFormat.stringToMillis("2010-01-01 00:00:00"),
+                        dataflowManager.getDataflow("89af4ee2-2cdb-4b07-b39e-4c29856309aa").getSegment(segId)
+                                .getSegRange().getStart());
+                Assert.assertEquals(DateFormat.stringToMillis("2010-01-04 00:00:00"), dataflowManager
+                        .getDataflow("89af4ee2-2cdb-4b07-b39e-4c29856309aa").getSegment(segId).getSegRange().getEnd());
+            }
+        }
+    }
+
+    @Test
+    public void testAutoMergeSegmentsByWeek_WithThreeDaysVolatileRange_MergeFirstWeek() throws Exception {
+        removeAllSegments();
+        NDataflowManager dataflowManager = NDataflowManager.getInstance(getTestConfig(), DEFAULT_PROJECT);
+        NDataModelManager dataModelManager = NDataModelManager.getInstance(getTestConfig(), DEFAULT_PROJECT);
+        NDataflow df = dataflowManager.getDataflow("89af4ee2-2cdb-4b07-b39e-4c29856309aa");
+        NDataModel model = dataModelManager.getDataModelDesc("89af4ee2-2cdb-4b07-b39e-4c29856309aa");
+        List<NDataSegment> segments = new ArrayList<>();
+        long start;
+        long end;
+        //test 9 days ,2010/01/01 - 2010/01/10, volatileRange 3 days
+        for (int i = 0; i <= 9; i++) {
+            //01-01 friday
+            start = addDay("2010-01-01", i);
+            end = addDay("2010-01-02", i);
+            SegmentRange segmentRange = new SegmentRange.TimePartitionedSegmentRange(start, end);
+            df = dataflowManager.getDataflow("89af4ee2-2cdb-4b07-b39e-4c29856309aa");
+            NDataSegment dataSegment = dataflowManager.appendSegment(df, segmentRange);
+            dataSegment.setStatus(SegmentStatusEnum.READY);
+            segments.add(dataSegment);
+        }
+
+        NDataflowUpdate update = new NDataflowUpdate(df.getUuid());
+        update.setToUpdateSegs(segments.toArray(new NDataSegment[segments.size()]));
+        dataflowManager.updateDataflow(update);
+
+        //set 3days volatile ,and just merge the first week
+        NDataModel modelUpdate = dataModelManager.copyForWrite(model);
+        VolatileRange volatileRange = new VolatileRange();
+        volatileRange.setVolatileRangeNumber(3);
+        volatileRange.setVolatileRangeEnabled(true);
+        volatileRange.setVolatileRangeType(AutoMergeTimeEnum.DAY);
+        modelUpdate.getSegmentConfig().setVolatileRange(volatileRange);
+        modelUpdate.setManagementType(ManagementType.MODEL_BASED);
+        dataModelManager.updateDataModelDesc(modelUpdate);
+
+        deleteAllJobs(DEFAULT_PROJECT);
+        mockAddSegmentSuccess();
+        val executables = getRunningExecutables(DEFAULT_PROJECT, df.getModel().getId());
+        Assert.assertEquals(1, executables.size());
+        for (val executable : executables) {
+            if (executable instanceof NSparkMergingJob) {
+                val segId = executable.getTargetSegments().get(0);
+                Assert.assertEquals(DateFormat.stringToMillis("2010-01-01 00:00:00"),
+                        dataflowManager.getDataflow("89af4ee2-2cdb-4b07-b39e-4c29856309aa").getSegment(segId)
+                                .getSegRange().getStart());
+                Assert.assertEquals(DateFormat.stringToMillis("2010-01-04 00:00:00"), dataflowManager
+                        .getDataflow("89af4ee2-2cdb-4b07-b39e-4c29856309aa").getSegment(segId).getSegRange().getEnd());
+            }
+        }
+    }
+
+    @Test
+    public void testAutoMergeSegmentsByWeek_WithOneDaysVolatileRange_CornerCase() throws Exception {
+        removeAllSegments();
+        NDataflowManager dataflowManager = NDataflowManager.getInstance(getTestConfig(), DEFAULT_PROJECT);
+        NDataModelManager dataModelManager = NDataModelManager.getInstance(getTestConfig(), DEFAULT_PROJECT);
+        NDataflow df = dataflowManager.getDataflow("89af4ee2-2cdb-4b07-b39e-4c29856309aa");
+        NDataModel model = dataModelManager.getDataModelDesc("89af4ee2-2cdb-4b07-b39e-4c29856309aa");
+        List<NDataSegment> segments = new ArrayList<>();
+        long start;
+        long end;
+        //test 8 days ,2010/01/04 - 2010/01/11, volatileRange 1 days
+        for (int i = 0; i <= 7; i++) {
+            //01-01 friday
+            start = addDay("2010-01-04", i);
+            end = addDay("2010-01-05", i);
+            SegmentRange segmentRange = new SegmentRange.TimePartitionedSegmentRange(start, end);
+            df = dataflowManager.getDataflow("89af4ee2-2cdb-4b07-b39e-4c29856309aa");
+            NDataSegment dataSegment = dataflowManager.appendSegment(df, segmentRange);
+            dataSegment.setStatus(SegmentStatusEnum.READY);
+            segments.add(dataSegment);
+        }
+
+        NDataflowUpdate update = new NDataflowUpdate(df.getUuid());
+        update.setToUpdateSegs(segments.toArray(new NDataSegment[segments.size()]));
+        dataflowManager.updateDataflow(update);
+
+        //set 1 days volatile ,and just merge the first week
+        NDataModel modelUpdate = dataModelManager.copyForWrite(model);
+        VolatileRange volatileRange = new VolatileRange();
+        volatileRange.setVolatileRangeNumber(1);
+        volatileRange.setVolatileRangeEnabled(true);
+        volatileRange.setVolatileRangeType(AutoMergeTimeEnum.DAY);
+        modelUpdate.getSegmentConfig().setVolatileRange(volatileRange);
+        modelUpdate.setManagementType(ManagementType.MODEL_BASED);
+        dataModelManager.updateDataModelDesc(modelUpdate);
+
+        deleteAllJobs(DEFAULT_PROJECT);
+        mockAddSegmentSuccess();
+        val executables = getRunningExecutables(DEFAULT_PROJECT, df.getModel().getId());
+        //merge 1/4/-1/10
+        Assert.assertEquals(1, executables.size());
+        for (val executable : executables) {
+            if (executable instanceof NSparkMergingJob) {
+                val segId = executable.getTargetSegments().get(0);
+                Assert.assertEquals(DateFormat.stringToMillis("2010-01-04 00:00:00"), dataflowManager
+                        .getDataflowByModelAlias("nmodel_basic").getSegment(segId).getSegRange().getStart());
+                Assert.assertEquals(DateFormat.stringToMillis("2010-01-11 00:00:00"), dataflowManager
+                        .getDataflowByModelAlias("nmodel_basic").getSegment(segId).getSegRange().getEnd());
+            }
+        }
+    }
+
+    @Test
+    public void testAutoMergeSegmentsByWeek_SegmentsHasOneDayGap_MergeSecondWeek() throws Exception {
+
+        removeAllSegments();
+        createDataloadingRange();
+        NDataflowManager dataflowManager = NDataflowManager.getInstance(getTestConfig(), DEFAULT_PROJECT);
+        NDataflow df = dataflowManager.getDataflow("89af4ee2-2cdb-4b07-b39e-4c29856309aa");
+        List<NDataSegment> segments = new ArrayList<>();
+        long start;
+        long end;
+        //test 2 week,and the first week has gap segment
+        //test 9 days ,2010/01/01 00:00 - 2010/01/10 00:00,remove 2010/01/02,merge the second week
+        for (int i = 0; i <= 9; i++) {
+            //01-01 friday
+            start = addDay("2010-01-01", i);
+            end = addDay("2010-01-02", i);
+            SegmentRange segmentRange = new SegmentRange.TimePartitionedSegmentRange(start, end);
+            df = dataflowManager.getDataflow("89af4ee2-2cdb-4b07-b39e-4c29856309aa");
+            NDataSegment dataSegment = dataflowManager.appendSegment(df, segmentRange);
+            dataSegment.setStatus(SegmentStatusEnum.READY);
+            segments.add(dataSegment);
+        }
+
+        NDataflowUpdate update = new NDataflowUpdate(df.getUuid());
+        update.setToUpdateSegs(segments.toArray(new NDataSegment[segments.size()]));
+        dataflowManager.updateDataflow(update);
+
+        df = dataflowManager.getDataflow("89af4ee2-2cdb-4b07-b39e-4c29856309aa");
+        update = new NDataflowUpdate(df.getUuid());
+        //remove 2010-01-02
+        update.setToRemoveSegs(new NDataSegment[] { df.getSegments().get(1) });
+        dataflowManager.updateDataflow(update);
+
+        deleteAllJobs(DEFAULT_PROJECT);
+        mockAddSegmentSuccess();
+        val executables = getRunningExecutables(DEFAULT_PROJECT, df.getModel().getId());
+        Assert.assertEquals(1, executables.size());
+        for (val executable : executables) {
+            if (executable instanceof NSparkMergingJob) {
+                val segId = executable.getTargetSegments().get(0);
+                start = Long.parseLong(dataflowManager.getDataflow("89af4ee2-2cdb-4b07-b39e-4c29856309aa")
+                        .getSegment(segId).getSegRange().getStart().toString());
+                end = Long.parseLong(dataflowManager.getDataflow("89af4ee2-2cdb-4b07-b39e-4c29856309aa")
+                        .getSegment(segId).getSegRange().getEnd().toString());
+                Assert.assertEquals(DateFormat.stringToMillis("2010-01-04 00:00:00"), start);
+                Assert.assertEquals(DateFormat.stringToMillis("2010-01-11 00:00:00"), end);
+            }
+        }
+
+    }
+
+    @Test
+    public void testAutoMergeSegmentsByWeek_WhenSegmentsContainBuildingSegment() throws Exception {
+        removeAllSegments();
+        createDataloadingRange();
+        NDataflowManager dataflowManager = NDataflowManager.getInstance(getTestConfig(), DEFAULT_PROJECT);
+        NDataflow df = dataflowManager.getDataflow("89af4ee2-2cdb-4b07-b39e-4c29856309aa");
+        List<NDataSegment> segments = new ArrayList<>();
+        long start;
+        long end;
+        //test 2 week,and the first week has building segment
+        //test 9 days ,2010/01/01 00:00 - 2010/01/10 00:00, 2010/01/02 building,merge the second week
+        for (int i = 0; i <= 9; i++) {
+            //01-01 friday
+            start = addDay("2010-01-01", i);
+            end = addDay("2010-01-02", i);
+            SegmentRange segmentRange = new SegmentRange.TimePartitionedSegmentRange(start, end);
+            df = dataflowManager.getDataflow("89af4ee2-2cdb-4b07-b39e-4c29856309aa");
+            NDataSegment dataSegment = dataflowManager.appendSegment(df, segmentRange);
+            if (i != 1) {
+                dataSegment.setStatus(SegmentStatusEnum.READY);
+            }
+            segments.add(dataSegment);
+        }
+
+        NDataflowUpdate update = new NDataflowUpdate(df.getUuid());
+        update.setToUpdateSegs(segments.toArray(new NDataSegment[segments.size()]));
+        dataflowManager.updateDataflow(update);
+
+        deleteAllJobs(DEFAULT_PROJECT);
+        mockAddSegmentSuccess();
+        val executables = getRunningExecutables(DEFAULT_PROJECT, df.getModel().getId());
+        Assert.assertEquals(1, executables.size());
+        for (val executable : executables) {
+            if (executable instanceof NSparkMergingJob) {
+                val segId = executable.getTargetSegments().get(0);
+                start = Long.parseLong(dataflowManager.getDataflow("89af4ee2-2cdb-4b07-b39e-4c29856309aa")
+                        .getSegment(segId).getSegRange().getStart().toString());
+                end = Long.parseLong(dataflowManager.getDataflow("89af4ee2-2cdb-4b07-b39e-4c29856309aa")
+                        .getSegment(segId).getSegRange().getEnd().toString());
+                Assert.assertEquals(DateFormat.stringToMillis("2010-01-04 00:00:00"), start);
+                Assert.assertEquals(DateFormat.stringToMillis("2010-01-11 00:00:00"), end);
+            }
+        }
+    }
+
+    @Test
+    public void testAutoMergeSegmentsByWeek_HasBigSegment_Merge() throws Exception {
+        removeAllSegments();
+        createDataloadingRange();
+        NDataflowManager dataflowManager = NDataflowManager.getInstance(getTestConfig(), DEFAULT_PROJECT);
+        List<NDataSegment> segments = new ArrayList<>();
+        long start;
+        long end;
+        //test 2 days and a big segment,merge the first week
+        for (int i = 0; i <= 1; i++) {
+            //01-01 friday
+            start = addDay("2010-01-01", i);
+            end = addDay("2010-01-02", i);
+            SegmentRange segmentRange = new SegmentRange.TimePartitionedSegmentRange(start, end);
+            NDataflow df = dataflowManager.getDataflow("89af4ee2-2cdb-4b07-b39e-4c29856309aa");
+            NDataSegment dataSegment = dataflowManager.appendSegment(df, segmentRange);
+            dataSegment.setStatus(SegmentStatusEnum.READY);
+            segments.add(dataSegment);
+        }
+
+        //a big segment
+        start = DateFormat.stringToMillis("2010-01-03 00:00:00");
+        end = DateFormat.stringToMillis("2010-01-11 00:00:00");
+        SegmentRange segmentRange = new SegmentRange.TimePartitionedSegmentRange(start, end);
+        NDataflow df = dataflowManager.getDataflow("89af4ee2-2cdb-4b07-b39e-4c29856309aa");
+        NDataSegment dataSegment = dataflowManager.appendSegment(df, segmentRange);
+        dataSegment.setStatus(SegmentStatusEnum.READY);
+        segments.add(dataSegment);
+
+        NDataflowUpdate update = new NDataflowUpdate(df.getUuid());
+        update.setToUpdateSegs(segments.toArray(new NDataSegment[segments.size()]));
+        dataflowManager.updateDataflow(update);
+
+        deleteAllJobs(DEFAULT_PROJECT);
+        mockAddSegmentSuccess();
+        val executables = getRunningExecutables(DEFAULT_PROJECT, df.getModel().getId());
+        Assert.assertEquals(1, executables.size());
+        for (val executable : executables) {
+            if (executable instanceof NSparkMergingJob) {
+                val segId = executable.getTargetSegments().get(0);
+                start = Long.parseLong(dataflowManager.getDataflow("89af4ee2-2cdb-4b07-b39e-4c29856309aa")
+                        .getSegment(segId).getSegRange().getStart().toString());
+                end = Long.parseLong(dataflowManager.getDataflow("89af4ee2-2cdb-4b07-b39e-4c29856309aa")
+                        .getSegment(segId).getSegRange().getEnd().toString());
+                Assert.assertEquals(DateFormat.stringToMillis("2010-01-01 00:00:00"), start);
+                Assert.assertEquals(DateFormat.stringToMillis("2010-01-03 00:00:00"), end);
+            }
+        }
+    }
+
+    @Test
+    public void testAutoMergeSegmentsByWeek_FirstDayOfWeekWSunday_Merge() throws Exception {
+        getTestConfig().setProperty("kylin.metadata.first-day-of-week", "sunday");
+        removeAllSegments();
+        createDataloadingRange();
+        NDataflowManager dataflowManager = NDataflowManager.getInstance(getTestConfig(), DEFAULT_PROJECT);
+        NDataflow df = dataflowManager.getDataflow("89af4ee2-2cdb-4b07-b39e-4c29856309aa");
+        List<NDataSegment> segments = new ArrayList<>();
+        long start;
+        long end;
+        //test 2 days and a big segment,merge the first week
+        for (int i = 0; i <= 9; i++) {
+            //01-01 friday
+            start = addDay("2010-01-03", i);
+            end = addDay("2010-01-04", i);
+            SegmentRange segmentRange = new SegmentRange.TimePartitionedSegmentRange(start, end);
+            df = dataflowManager.getDataflow("89af4ee2-2cdb-4b07-b39e-4c29856309aa");
+            NDataSegment dataSegment = dataflowManager.appendSegment(df, segmentRange);
+            dataSegment.setStatus(SegmentStatusEnum.READY);
+            segments.add(dataSegment);
+        }
+
+        NDataflowUpdate update = new NDataflowUpdate(df.getUuid());
+        update.setToUpdateSegs(segments.toArray(new NDataSegment[segments.size()]));
+        dataflowManager.updateDataflow(update);
+
+        deleteAllJobs(DEFAULT_PROJECT);
+        mockAddSegmentSuccess();
+        val executables = getRunningExecutables(DEFAULT_PROJECT, df.getModel().getId());
+        Assert.assertEquals(1, executables.size());
+        for (val executable : executables) {
+            if (executable instanceof NSparkMergingJob) {
+                val segId = executable.getTargetSegments().get(0);
+                start = Long.parseLong(dataflowManager.getDataflow("89af4ee2-2cdb-4b07-b39e-4c29856309aa")
+                        .getSegment(segId).getSegRange().getStart().toString());
+                end = Long.parseLong(dataflowManager.getDataflow("89af4ee2-2cdb-4b07-b39e-4c29856309aa")
+                        .getSegment(segId).getSegRange().getEnd().toString());
+                //2010/1/3 sunday - 2010/1/10 sunday
+                Assert.assertEquals(DateFormat.stringToMillis("2010-01-03 00:00:00"), start);
+                Assert.assertEquals(DateFormat.stringToMillis("2010-01-10 00:00:00"), end);
+            }
+        }
+    }
+
+    @Test
+    public void testAutoMergeSegmentsByHour_PASS() throws Exception {
+        removeAllSegments();
+        NDataflowManager dataflowManager = NDataflowManager.getInstance(getTestConfig(), DEFAULT_PROJECT);
+        NDataModelManager dataModelManager = NDataModelManager.getInstance(getTestConfig(), DEFAULT_PROJECT);
+        NDataflow df = dataflowManager.getDataflow("89af4ee2-2cdb-4b07-b39e-4c29856309aa");
+        NDataModel model = dataModelManager.getDataModelDesc("89af4ee2-2cdb-4b07-b39e-4c29856309aa");
+
+        List<NDataSegment> segments = new ArrayList<>();
+        long start;
+        long end;
+        //13min per segment
+        for (int i = 0; i <= 5; i++) {
+            //01-01 00:00 - 01:05 merge one hour
+            start = SegmentRange.dateToLong("2010-01-01") + i * 1000 * 60 * 13;
+            end = SegmentRange.dateToLong("2010-01-01") + (i + 1) * 1000 * 60 * 13;
+            SegmentRange segmentRange = new SegmentRange.TimePartitionedSegmentRange(start, end);
+            df = dataflowManager.getDataflow("89af4ee2-2cdb-4b07-b39e-4c29856309aa");
+            NDataSegment dataSegment = dataflowManager.appendSegment(df, segmentRange);
+            dataSegment.setStatus(SegmentStatusEnum.READY);
+            segments.add(dataSegment);
+        }
+        NDataflowUpdate update = new NDataflowUpdate(df.getUuid());
+        update.setToUpdateSegs(segments.toArray(new NDataSegment[segments.size()]));
+        dataflowManager.updateDataflow(update);
+
+        NDataModel modelUpdate = dataModelManager.copyForWrite(model);
+        List<AutoMergeTimeEnum> ranges = new ArrayList<>();
+        ranges.add(AutoMergeTimeEnum.HOUR);
+        modelUpdate.getSegmentConfig().setAutoMergeTimeRanges(ranges);
+        modelUpdate.setManagementType(ManagementType.MODEL_BASED);
+        dataModelManager.updateDataModelDesc(modelUpdate);
+
+        deleteAllJobs(DEFAULT_PROJECT);
+        mockAddSegmentSuccess();
+        val executables = getRunningExecutables(DEFAULT_PROJECT, df.getModel().getId());
+        Assert.assertEquals(1, executables.size());
+        for (val executable : executables) {
+            if (executable instanceof NSparkMergingJob) {
+                val segId = executable.getTargetSegments().get(0);
+                Assert.assertEquals(DateFormat.stringToMillis("2010-01-01 00:00:00"),
+                        Long.parseLong(dataflowManager.getDataflow("89af4ee2-2cdb-4b07-b39e-4c29856309aa")
+                                .getSegment(segId).getSegRange().getStart().toString()));
+                Assert.assertEquals(DateFormat.stringToMillis("2010-01-01 00:52:00"),
+                        Long.parseLong(dataflowManager.getDataflow("89af4ee2-2cdb-4b07-b39e-4c29856309aa")
+                                .getSegment(segId).getSegRange().getEnd().toString()));
+            }
+        }
+    }
+
+    @Test
+    public void testAutoMergeSegmentsByMonth() throws Exception {
+        removeAllSegments();
+        createDataloadingRange();
+        NDataflowManager dataflowManager = NDataflowManager.getInstance(getTestConfig(), DEFAULT_PROJECT);
+        NDataModelManager dataModelManager = NDataModelManager.getInstance(getTestConfig(), DEFAULT_PROJECT);
+        NDataflow df = dataflowManager.getDataflow("89af4ee2-2cdb-4b07-b39e-4c29856309aa");
+        NDataModel model = dataModelManager.getDataModelDesc("89af4ee2-2cdb-4b07-b39e-4c29856309aa");
+
+        List<NDataSegment> segments = new ArrayList<>();
+        long start;
+        long end;
+        NDataflowUpdate update = new NDataflowUpdate(df.getUuid());
+        update.setToRemoveSegs(df.getSegments().toArray(new NDataSegment[0]));
+        dataflowManager.updateDataflow(update);
+        NDataModel modelUpdate = dataModelManager.copyForWrite(model);
+        List<AutoMergeTimeEnum> ranges = new ArrayList<>();
+        ranges.add(AutoMergeTimeEnum.MONTH);
+        modelUpdate.getSegmentConfig().setAutoMergeTimeRanges(ranges);
+        dataModelManager.updateDataModelDesc(modelUpdate);
+
+        //4week segment ,and four one day segment ,2010/12/01 - 2011/1/02
+        for (int i = 0; i <= 3; i++) {
+            start = addDay("2010-12-01", 7 * i);
+            end = addDay("2010-12-08", 7 * i);
+            SegmentRange segmentRange = new SegmentRange.TimePartitionedSegmentRange(start, end);
+            df = dataflowManager.getDataflow("89af4ee2-2cdb-4b07-b39e-4c29856309aa");
+            NDataSegment dataSegment = dataflowManager.appendSegment(df, segmentRange);
+            dataSegment.setStatus(SegmentStatusEnum.READY);
+            segments.add(dataSegment);
+        }
+
+        for (int i = 0; i <= 3; i++) {
+            start = addDay("2010-12-29", i);
+            end = addDay("2010-12-30", i);
+            SegmentRange segmentRange = new SegmentRange.TimePartitionedSegmentRange(start, end);
+            df = dataflowManager.getDataflow("89af4ee2-2cdb-4b07-b39e-4c29856309aa");
+            NDataSegment dataSegment = dataflowManager.appendSegment(df, segmentRange);
+            dataSegment.setStatus(SegmentStatusEnum.READY);
+            segments.add(dataSegment);
+        }
+        update = new NDataflowUpdate(df.getUuid());
+        update.setToUpdateSegs(segments.toArray(new NDataSegment[segments.size()]));
+        dataflowManager.updateDataflow(update);
+        //clear all events
+        deleteAllJobs(DEFAULT_PROJECT);
+        mockAddSegmentSuccess();
+        val executables = getRunningExecutables(DEFAULT_PROJECT, df.getModel().getId());
+        Assert.assertEquals(1, executables.size());
+        for (val executable : executables) {
+            if (executable instanceof NSparkMergingJob) {
+                val segId = executable.getTargetSegments().get(0);
+                Assert.assertEquals(DateFormat.stringToMillis("2010-12-01 00:00:00"),
+                        Long.parseLong(dataflowManager.getDataflow("89af4ee2-2cdb-4b07-b39e-4c29856309aa")
+                                .getSegment(segId).getSegRange().getStart().toString()));
+                Assert.assertEquals(DateFormat.stringToMillis("2011-01-01 00:00:00"),
+                        Long.parseLong(dataflowManager.getDataflow("89af4ee2-2cdb-4b07-b39e-4c29856309aa")
+                                .getSegment(segId).getSegRange().getEnd().toString()));
+            }
+        }
+    }
+
+    @Test
+    public void testAutoMergeSegmentsByYear() throws Exception {
+        removeAllSegments();
+        createDataloadingRange();
+        NDataflowManager dataflowManager = NDataflowManager.getInstance(getTestConfig(), DEFAULT_PROJECT);
+        NDataModelManager dataModelManager = NDataModelManager.getInstance(getTestConfig(), DEFAULT_PROJECT);
+        NDataflow df = dataflowManager.getDataflow("89af4ee2-2cdb-4b07-b39e-4c29856309aa");
+        NDataModel model = dataModelManager.getDataModelDesc("89af4ee2-2cdb-4b07-b39e-4c29856309aa");
+        List<NDataSegment> segments = new ArrayList<>();
+        long start;
+        long end;
+        // 2010/10月 -2011/2月 merge 2010/10-2010/12
+        // use calendar to void Daylight Saving Time problem
+        for (int i = 0; i <= 4; i++) {
+            start = addDay("2010-10-01", 30 * i);
+            end = addDay("2010-10-31", 30 * i);
+            SegmentRange segmentRange = new SegmentRange.TimePartitionedSegmentRange(start, end);
+            df = dataflowManager.getDataflow("89af4ee2-2cdb-4b07-b39e-4c29856309aa");
+            NDataSegment dataSegment = dataflowManager.appendSegment(df, segmentRange);
+            dataSegment.setStatus(SegmentStatusEnum.READY);
+            segments.add(dataSegment);
+        }
+
+        NDataModel modelUpdate = dataModelManager.copyForWrite(model);
+        List<AutoMergeTimeEnum> timeRanges = new ArrayList<>();
+        timeRanges.add(AutoMergeTimeEnum.YEAR);
+        modelUpdate.getSegmentConfig().setAutoMergeTimeRanges(timeRanges);
+        modelUpdate.setManagementType(ManagementType.MODEL_BASED);
+        dataModelManager.updateDataModelDesc(modelUpdate);
+        NDataflowUpdate update = new NDataflowUpdate(df.getUuid());
+        update.setToUpdateSegs(segments.toArray(new NDataSegment[segments.size()]));
+        dataflowManager.updateDataflow(update);
+
+        deleteAllJobs(DEFAULT_PROJECT);
+        mockAddSegmentSuccess();
+        val executables = getRunningExecutables(DEFAULT_PROJECT, df.getModel().getId());
+        Assert.assertEquals(1, executables.size());
+        for (val executable : executables) {
+            if (executable instanceof NSparkMergingJob) {
+                val segId = executable.getTargetSegments().get(0);
+                Assert.assertEquals(DateFormat.stringToMillis("2010-10-01 00:00:00"),
+                        Long.parseLong(dataflowManager.getDataflow("89af4ee2-2cdb-4b07-b39e-4c29856309aa")
+                                .getSegment(segId).getSegRange().getStart().toString()));
+                Assert.assertEquals(DateFormat.stringToMillis("2010-12-30 00:00:00"),
+                        Long.parseLong(dataflowManager.getDataflow("89af4ee2-2cdb-4b07-b39e-4c29856309aa")
+                                .getSegment(segId).getSegRange().getEnd().toString()));
+            }
+        }
+    }
+
+    @Test
+    public void testAutoMergeSegmentsByDay() throws Exception {
+        removeAllSegments();
+        NDataflowManager dataflowManager = NDataflowManager.getInstance(getTestConfig(), DEFAULT_PROJECT);
+        NDataModelManager dataModelManager = NDataModelManager.getInstance(getTestConfig(), DEFAULT_PROJECT);
+        NDataflow df = dataflowManager.getDataflow("89af4ee2-2cdb-4b07-b39e-4c29856309aa");
+        NDataModel model = dataModelManager.getDataModelDesc("89af4ee2-2cdb-4b07-b39e-4c29856309aa");
+
+        List<NDataSegment> segments = new ArrayList<>();
+        long start;
+        long end;
+        //2010/10/01 8:00 - 2010/10/02 06:15
+        for (int i = 0; i <= 9; i++) {
+            start = SegmentRange.dateToLong("2010-10-01 08:00:00") + i * 8100000L;
+            end = start + 8100000L;
+            SegmentRange segmentRange = new SegmentRange.TimePartitionedSegmentRange(start, end);
+            df = dataflowManager.getDataflow("89af4ee2-2cdb-4b07-b39e-4c29856309aa");
+            NDataSegment dataSegment = dataflowManager.appendSegment(df, segmentRange);
+            dataSegment.setStatus(SegmentStatusEnum.READY);
+            segments.add(dataSegment);
+        }
+
+        NDataModel modelUpdate = dataModelManager.copyForWrite(model);
+        List<AutoMergeTimeEnum> timeRanges = new ArrayList<>();
+        timeRanges.add(AutoMergeTimeEnum.DAY);
+        modelUpdate.getSegmentConfig().setAutoMergeTimeRanges(timeRanges);
+        modelUpdate.setManagementType(ManagementType.MODEL_BASED);
+        dataModelManager.updateDataModelDesc(modelUpdate);
+        NDataflowUpdate update = new NDataflowUpdate(df.getUuid());
+        update.setToUpdateSegs(segments.toArray(new NDataSegment[segments.size()]));
+        dataflowManager.updateDataflow(update);
+
+        deleteAllJobs(DEFAULT_PROJECT);
+
+        mockAddSegmentSuccess();
+        val executables = getRunningExecutables(DEFAULT_PROJECT, df.getModel().getId());
+        Assert.assertEquals(1, executables.size());
+        for (val executable : executables) {
+            if (executable instanceof NSparkMergingJob) {
+                val segId = executable.getTargetSegments().get(0);
+                Assert.assertEquals(DateFormat.stringToMillis("2010-10-01 08:00:00"),
+                        Long.parseLong(dataflowManager.getDataflow("89af4ee2-2cdb-4b07-b39e-4c29856309aa")
+                                .getSegment(segId).getSegRange().getStart().toString()));
+                Assert.assertEquals(DateFormat.stringToMillis("2010-10-01 23:45:00"),
+                        Long.parseLong(dataflowManager.getDataflow("89af4ee2-2cdb-4b07-b39e-4c29856309aa")
+                                .getSegment(segId).getSegRange().getEnd().toString()));
+            }
+        }
+    }
+
+    @Test
+    public void testAutoMergeSegmentsByWeek_BigGapOverlapTwoSection_NotMerge() throws Exception {
+        removeAllSegments();
+        NDataflowManager dataflowManager = NDataflowManager.getInstance(getTestConfig(), DEFAULT_PROJECT);
+        NDataModelManager dataModelManager = NDataModelManager.getInstance(getTestConfig(), DEFAULT_PROJECT);
+        NDataflow df = dataflowManager.getDataflow("89af4ee2-2cdb-4b07-b39e-4c29856309aa");
+        NDataModel model = dataModelManager.getDataModelDesc("89af4ee2-2cdb-4b07-b39e-4c29856309aa");
+
+        List<NDataSegment> segments = new ArrayList<>();
+        long start;
+        long end;
+        //2010/10/04 2010/10/05
+        for (int i = 0; i <= 1; i++) {
+            //01-01 friday
+            start = addDay("2010-01-04", i);
+            end = addDay("2010-01-05", i);
+            SegmentRange segmentRange = new SegmentRange.TimePartitionedSegmentRange(start, end);
+            df = dataflowManager.getDataflow("89af4ee2-2cdb-4b07-b39e-4c29856309aa");
+            NDataSegment dataSegment = dataflowManager.appendSegment(df, segmentRange);
+            dataSegment.setStatus(SegmentStatusEnum.READY);
+            segments.add(dataSegment);
+        }
+
+        //2010/10/11 2010/10/12
+        for (int i = 2; i <= 3; i++) {
+            //01-01 friday
+            start = addDay("2010-01-09", i);
+            end = addDay("2010-01-10", i);
+            SegmentRange segmentRange = new SegmentRange.TimePartitionedSegmentRange(start, end);
+            df = dataflowManager.getDataflow("89af4ee2-2cdb-4b07-b39e-4c29856309aa");
+            NDataSegment dataSegment = dataflowManager.appendSegment(df, segmentRange);
+            dataSegment.setStatus(SegmentStatusEnum.READY);
+            segments.add(dataSegment);
+        }
+
+        NDataModel modelUpdate = dataModelManager.copyForWrite(model);
+        List<AutoMergeTimeEnum> timeRanges = new ArrayList<>();
+        timeRanges.add(AutoMergeTimeEnum.WEEK);
+        modelUpdate.getSegmentConfig().setAutoMergeTimeRanges(timeRanges);
+        modelUpdate.setManagementType(ManagementType.MODEL_BASED);
+        dataModelManager.updateDataModelDesc(modelUpdate);
+        NDataflowUpdate update = new NDataflowUpdate(df.getUuid());
+        update.setToUpdateSegs(segments.toArray(new NDataSegment[segments.size()]));
+        dataflowManager.updateDataflow(update);
+
+        deleteAllJobs(DEFAULT_PROJECT);
+
+        mockAddSegmentSuccess();
+        val executables = getRunningExecutables(DEFAULT_PROJECT, df.getModel().getId());
+        Assert.assertEquals(0, executables.size());
+
+    }
+
+    @Test
+    public void testAutoMergeSegmentsByWeek_FirstWeekNoSegment_NotMerge() throws Exception {
+        removeAllSegments();
+        NDataflowManager dataflowManager = NDataflowManager.getInstance(getTestConfig(), DEFAULT_PROJECT);
+        NDataModelManager dataModelManager = NDataModelManager.getInstance(getTestConfig(), DEFAULT_PROJECT);
+        NDataflow df = dataflowManager.getDataflow("89af4ee2-2cdb-4b07-b39e-4c29856309aa");
+        NDataModel model = dataModelManager.getDataModelDesc("89af4ee2-2cdb-4b07-b39e-4c29856309aa");
+
+        List<NDataSegment> segments = new ArrayList<>();
+        long start;
+        long end;
+        //2010/10/04 2010/10/05
+        for (int i = 0; i <= 1; i++) {
+            //01-01 friday
+            start = addDay("2010-01-03", 2 * i);
+            end = addDay("2010-01-05", 2 * i);
+            SegmentRange segmentRange = new SegmentRange.TimePartitionedSegmentRange(start, end);
+            df = dataflowManager.getDataflow("89af4ee2-2cdb-4b07-b39e-4c29856309aa");
+            NDataSegment dataSegment = dataflowManager.appendSegment(df, segmentRange);
+            dataSegment.setStatus(SegmentStatusEnum.READY);
+            segments.add(dataSegment);
+        }
+
+        NDataModel modelUpdate = dataModelManager.copyForWrite(model);
+        modelUpdate.setManagementType(ManagementType.MODEL_BASED);
+        dataModelManager.updateDataModelDesc(modelUpdate);
+        NDataflowUpdate update = new NDataflowUpdate(df.getUuid());
+        update.setToUpdateSegs(segments.toArray(new NDataSegment[segments.size()]));
+        dataflowManager.updateDataflow(update);
+
+        deleteAllJobs(DEFAULT_PROJECT);
+
+        mockAddSegmentSuccess();
+        val executables = getRunningExecutables(DEFAULT_PROJECT, df.getModel().getId());
+        Assert.assertEquals(0, executables.size());
+
+    }
+
+    @Ignore("TODO: remove or adapt")
+    @Test
+    public void testAutoMergeSegments_2YearsRange_OneHourPerSegment() throws Exception {
+        removeAllSegments();
+        NDataflowManager dataflowManager = NDataflowManager.getInstance(getTestConfig(), DEFAULT_PROJECT);
+        NDataModelManager dataModelManager = NDataModelManager.getInstance(getTestConfig(), DEFAULT_PROJECT);
+        NDataflow df = dataflowManager.getDataflow("89af4ee2-2cdb-4b07-b39e-4c29856309aa");
+        Class<RootPersistentEntity> clazz = RootPersistentEntity.class;
+        Field field = clazz.getDeclaredField("isCachedAndShared");
+        Unsafe.changeAccessibleObject(field, true);
+        field.set(df, false);
+        NDataModel model = dataModelManager.getDataModelDesc("89af4ee2-2cdb-4b07-b39e-4c29856309aa");
+        NDataModel modelUpdate = dataModelManager.copyForWrite(model);
+        List<AutoMergeTimeEnum> autoMergeTimeEnumList = new ArrayList<>();
+        autoMergeTimeEnumList.add(AutoMergeTimeEnum.YEAR);
+        autoMergeTimeEnumList.add(AutoMergeTimeEnum.MONTH);
+        autoMergeTimeEnumList.add(AutoMergeTimeEnum.WEEK);
+        autoMergeTimeEnumList.add(AutoMergeTimeEnum.DAY);
+        modelUpdate.getSegmentConfig().setAutoMergeTimeRanges(autoMergeTimeEnumList);
+        modelUpdate.setManagementType(ManagementType.MODEL_BASED);
+        dataModelManager.updateDataModelDesc(modelUpdate);
+        int eventsMergeYear = 0;
+        int eventsMergeMonth = 0;
+        int eventsMergeWeek = 0;
+        int eventsMergeDay = 0;
+        int allEvent = 0;
+        Segments<NDataSegment> segments = df.getSegments();
+        //2010/01/10 00:00:00 -2012-02-10 01:00:00 one hour per segment
+        long start = 1263081600000L;
+        long end = 1263081600000L + 3600000L;
+        int i = 0;
+        long mergeStart;
+        long mergeEnd;
+        while (end <= 1328835600000L) {
+            SegmentRange segmentRange = new SegmentRange.TimePartitionedSegmentRange(start, end);
+            NDataSegment newSegment = NDataSegment.empty();
+            newSegment.setSegmentRange(segmentRange);
+            newSegment.setStatus(SegmentStatusEnum.READY);
+            segments.add(newSegment);
+            df = dataflowManager.getDataflow("89af4ee2-2cdb-4b07-b39e-4c29856309aa");
+            field.set(df, false);
+            df.setSegments(segments);
+            deleteAllJobs(DEFAULT_PROJECT);
+            mockAddSegmentSuccess();
+            val executables = getRunningExecutables(DEFAULT_PROJECT, df.getModel().getId());
+            if (executables.size() > 0) {
+                allEvent++;
+                for (val executable : executables) {
+                    if (executable instanceof NSparkMergingJob) {
+                        val segId = executable.getTargetSegments().get(0);
+                        mergeStart = Long.parseLong(df.getSegment(segId).getSegRange().getStart().toString());
+                        mergeEnd = Long.parseLong(df.getSegment(segId).getSegRange().getEnd().toString());
+
+                        if (mergeEnd - mergeStart > 2678400000L) {
+                            eventsMergeYear++;
+                        } else if (mergeEnd - mergeStart > 604800000L) {
+                            eventsMergeMonth++;
+                        } else if (mergeEnd - mergeStart > 86400000L
+                                || segments.getMergeEnd(mergeStart, AutoMergeTimeEnum.WEEK) == mergeEnd) {
+                            eventsMergeWeek++;
+                        } else {
+                            eventsMergeDay++;
+                        }
+
+                        mockMergeSegments(i, dataflowManager.getDataflow("89af4ee2-2cdb-4b07-b39e-4c29856309aa")
+                                .getSegment((executables.get(0)).getTargetSegments().get(0)).getSegRange());
+                    }
+                }
+                i += 2;
+            } else {
+                i++;
+            }
+            start += 3600000L;
+            end += 3600000L;
+        }
+        Assert.assertEquals(2, eventsMergeYear);
+        Assert.assertEquals(23, eventsMergeMonth);
+        Assert.assertEquals(105, eventsMergeWeek);
+        Assert.assertEquals(631, eventsMergeDay);
+        Assert.assertEquals(761, allEvent);
+
+        //check final segments
+        Segments<NDataSegment> finalSegments = df.getSegments();
+        Assert.assertEquals(9, finalSegments.size());
+        //2010/01/10 - 2011/01/01 00:00:00
+        Assert.assertEquals(1263081600000L, Long.parseLong(finalSegments.get(0).getSegRange().getStart().toString()));
+        Assert.assertEquals(1293840000000L, Long.parseLong(finalSegments.get(0).getSegRange().getEnd().toString()));
+        //2011/01/01 - 2012/01/01 00:00:00
+        Assert.assertEquals(1293840000000L, Long.parseLong(finalSegments.get(1).getSegRange().getStart().toString()));
+        Assert.assertEquals(1325376000000L, Long.parseLong(finalSegments.get(1).getSegRange().getEnd().toString()));
+        //2012/01/01 - 2012/02/01 00:00:00
+        Assert.assertEquals(1325376000000L, Long.parseLong(finalSegments.get(2).getSegRange().getStart().toString()));
+        Assert.assertEquals(1328054400000L, Long.parseLong(finalSegments.get(2).getSegRange().getEnd().toString()));
+        //2012/02/01 - 2012/02/06 00:00:00 week
+        Assert.assertEquals(1328054400000L, Long.parseLong(finalSegments.get(3).getSegRange().getStart().toString()));
+        Assert.assertEquals(1328486400000L, Long.parseLong(finalSegments.get(3).getSegRange().getEnd().toString()));
+        //2012/02/06 - 2012/02/07 00:00:00 day
+        Assert.assertEquals(1328486400000L, Long.parseLong(finalSegments.get(4).getSegRange().getStart().toString()));
+        Assert.assertEquals(1328572800000L, Long.parseLong(finalSegments.get(4).getSegRange().getEnd().toString()));
+        //2012/02/07 - 2012/02/08 00:00:00 day
+        Assert.assertEquals(1328572800000L, Long.parseLong(finalSegments.get(5).getSegRange().getStart().toString()));
+        Assert.assertEquals(1328659200000L, Long.parseLong(finalSegments.get(5).getSegRange().getEnd().toString()));
+        //2012/02/08 - 2012/02/09 00:00:00 day
+        Assert.assertEquals(1328659200000L, Long.parseLong(finalSegments.get(6).getSegRange().getStart().toString()));
+        Assert.assertEquals(1328745600000L, Long.parseLong(finalSegments.get(6).getSegRange().getEnd().toString()));
+        //2012/02/09 - 2012/02/10 00:00:00 day
+        Assert.assertEquals(1328745600000L, Long.parseLong(finalSegments.get(7).getSegRange().getStart().toString()));
+        Assert.assertEquals(1328832000000L, Long.parseLong(finalSegments.get(7).getSegRange().getEnd().toString()));
+        //2012/02/10 00:00 - 2012/02/10 01:00:00 hour
+        Assert.assertEquals(1328832000000L, Long.parseLong(finalSegments.get(8).getSegRange().getStart().toString()));
+        Assert.assertEquals(1328835600000L, Long.parseLong(finalSegments.get(8).getSegRange().getEnd().toString()));
+    }
+
+    private void mockMergeSegments(int i, SegmentRange segmentRange) {
+        NDataflowManager dataflowManager = NDataflowManager.getInstance(KylinConfig.getInstanceFromEnv(), "default");
+        NDataflow dataflow = dataflowManager.getDataflow("89af4ee2-2cdb-4b07-b39e-4c29856309aa");
+        Segments<NDataSegment> segments = new Segments<>();
+        for (NDataSegment segment : dataflow.getSegments()) {
+            if (segmentRange.contains(segment.getSegRange())) {
+                segments.add(segment);
+            }
+        }
+        NDataSegment mergedSegment = NDataSegment.empty();
+        mergedSegment.setStatus(SegmentStatusEnum.READY);
+        mergedSegment.setId(i + "");
+        mergedSegment.setSegmentRange(segmentRange);
+        dataflow.getSegments().removeAll(segments);
+        dataflow.getSegments().add(mergedSegment);
+
+    }
+
+    public long addDay(String base, int inc) {
+        Calendar calendar = Calendar.getInstance(TimeZone.getDefault(), Locale.getDefault(Locale.Category.FORMAT));
+        calendar.setTimeInMillis(SegmentRange.dateToLong(base));
+        calendar.add(Calendar.DAY_OF_MONTH, inc);
+        return calendar.getTimeInMillis();
+    }
+
+    private List<AbstractExecutable> getRunningExecutables(String project, String model) {
+        return NExecutableManager.getInstance(getTestConfig(), project).getRunningExecutables(project, model);
+    }
+
+    private void deleteAllJobs(String project) {
+        NExecutableManager.getInstance(getTestConfig(), project).deleteAllJob();
+    }
+}
diff --git a/src/kylin-it/src/test/java/org/apache/kylin/newten/BuildAndQueryEmptySegmentsTest.java b/src/kylin-it/src/test/java/org/apache/kylin/newten/BuildAndQueryEmptySegmentsTest.java
new file mode 100644
index 0000000000..6c8337e6ba
--- /dev/null
+++ b/src/kylin-it/src/test/java/org/apache/kylin/newten/BuildAndQueryEmptySegmentsTest.java
@@ -0,0 +1,183 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.kylin.newten;
+
+import java.util.List;
+import java.util.Set;
+import java.util.stream.Collectors;
+
+import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.common.util.RandomUtil;
+import org.apache.kylin.engine.spark.IndexDataConstructor;
+import org.apache.kylin.engine.spark.NLocalWithSparkSessionTest;
+import org.apache.kylin.engine.spark.job.NSparkMergingJob;
+import org.apache.kylin.engine.spark.merger.AfterMergeOrRefreshResourceMerger;
+import org.apache.kylin.job.execution.ExecutableState;
+import org.apache.kylin.job.execution.NExecutableManager;
+import org.apache.kylin.job.impl.threadpool.NDefaultScheduler;
+import org.apache.kylin.metadata.cube.model.IndexPlan;
+import org.apache.kylin.metadata.cube.model.LayoutEntity;
+import org.apache.kylin.metadata.cube.model.NDataSegment;
+import org.apache.kylin.metadata.cube.model.NDataflow;
+import org.apache.kylin.metadata.cube.model.NDataflowManager;
+import org.apache.kylin.metadata.cube.model.NDataflowUpdate;
+import org.apache.kylin.metadata.cube.model.NIndexPlanManager;
+import org.apache.kylin.metadata.model.SegmentRange;
+import org.apache.kylin.metadata.realization.RealizationStatusEnum;
+import org.apache.kylin.util.ExecAndComp;
+import org.apache.spark.sql.Dataset;
+import org.apache.spark.sql.SparderEnv;
+import org.junit.After;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+
+import com.google.common.collect.Sets;
+
+public class BuildAndQueryEmptySegmentsTest extends NLocalWithSparkSessionTest {
+
+    private static final String DF_NAME1 = "89af4ee2-2cdb-4b07-b39e-4c29856309aa";
+    private static final String DF_NAME2 = "abe3bf1a-c4bc-458d-8278-7ea8b00f5e96";
+
+    private static final String SQL = "select\n" + " count(1) as TRANS_CNT \n" + " from test_kylin_fact \n"
+            + " group by trans_id";
+
+    private static final String SQL_DERIVED = "SELECT \n" + "test_cal_dt.season_beg_dt\n"
+            + "FROM test_kylin_fact LEFT JOIN edw.test_cal_dt as test_cal_dt \n"
+            + "ON test_kylin_fact.cal_dt=test_cal_dt.cal_dt \n"
+            + "WHERE test_kylin_fact.cal_dt>'2009-06-01' and test_kylin_fact.cal_dt<'2013-01-01' \n"
+            + "GROUP BY test_cal_dt.season_beg_dt";
+
+    private static final String SQL_DERIVED_AGG = "select count(*) from (SELECT \n" + "test_cal_dt.season_beg_dt\n"
+            + "FROM test_kylin_fact LEFT JOIN edw.test_cal_dt as test_cal_dt \n"
+            + "ON test_kylin_fact.cal_dt=test_cal_dt.cal_dt \n"
+            + "WHERE test_kylin_fact.cal_dt>'2009-06-01' and test_kylin_fact.cal_dt<'2013-01-01' \n"
+            + "GROUP BY test_cal_dt.season_beg_dt)";
+
+    private KylinConfig config;
+    private NDataflowManager dsMgr;
+    private NExecutableManager execMgr;
+
+    @Before
+    public void init() throws Exception {
+        super.init();
+        config = KylinConfig.getInstanceFromEnv();
+        dsMgr = NDataflowManager.getInstance(config, getProject());
+        execMgr = NExecutableManager.getInstance(config, getProject());
+        NIndexPlanManager ipMgr = NIndexPlanManager.getInstance(config, getProject());
+        String cubeId = dsMgr.getDataflow(DF_NAME1).getIndexPlan().getUuid();
+        IndexPlan cube = ipMgr.getIndexPlan(cubeId);
+        Set<Long> tobeRemovedLayouts = cube.getAllLayouts().stream().filter(layout -> layout.getId() != 10001L)
+                .map(LayoutEntity::getId).collect(Collectors.toSet());
+
+        cube = ipMgr.updateIndexPlan(dsMgr.getDataflow(DF_NAME1).getIndexPlan().getUuid(), copyForWrite -> {
+            copyForWrite.removeLayouts(tobeRemovedLayouts, true, true);
+        });
+        System.out.println(cube.getAllLayouts());
+    }
+
+    @After
+    public void cleanup() {
+        NDefaultScheduler.destroyInstance();
+        super.cleanupTestMetadata();
+    }
+
+    @Test
+    public void testEmptySegments() throws Exception {
+        NDataflowManager dataflowManager = NDataflowManager.getInstance(config, getProject());
+        dataflowManager.updateDataflowStatus(DF_NAME2, RealizationStatusEnum.OFFLINE);
+
+        cleanupSegments(DF_NAME1);
+
+        populateSSWithCSVData(config, getProject(), SparderEnv.getSparkSession());
+
+        buildCube(DF_NAME1, SegmentRange.dateToLong("2009-01-01"), SegmentRange.dateToLong("2009-06-01"));
+        Assert.assertEquals(0, dsMgr.getDataflow(DF_NAME1).getSegments().get(0).getSegDetails().getTotalRowCount());
+
+        testQueryUnequal(SQL);
+        testQueryUnequal(SQL_DERIVED);
+        testQuery(SQL_DERIVED_AGG);
+
+        buildCube(DF_NAME1, SegmentRange.dateToLong("2009-06-01"), SegmentRange.dateToLong("2010-01-01"));
+        Assert.assertEquals(0, dsMgr.getDataflow(DF_NAME1).getSegments().get(1).getSegDetails().getTotalRowCount());
+        buildCube(DF_NAME1, SegmentRange.dateToLong("2010-01-01"), SegmentRange.dateToLong("2012-01-01"));
+        Assert.assertEquals(0, dsMgr.getDataflow(DF_NAME1).getSegments().get(2).getSegDetails().getTotalRowCount());
+        buildCube(DF_NAME1, SegmentRange.dateToLong("2012-01-01"), SegmentRange.dateToLong("2015-01-01"));
+        Assert.assertNotEquals(0, dsMgr.getDataflow(DF_NAME1).getSegments().get(3).getSegDetails().getTotalRowCount());
+
+        mergeSegments("2009-01-01", "2010-01-01", true);
+        mergeSegments("2010-01-01", "2015-01-01", true);
+
+        testQuery(SQL);
+        testQuery(SQL_DERIVED);
+        testQuery(SQL_DERIVED_AGG);
+
+        dataflowManager.updateDataflowStatus(DF_NAME2, RealizationStatusEnum.ONLINE);
+    }
+
+    private void cleanupSegments(String dfName) {
+        NDataflow df = dsMgr.getDataflow(dfName);
+        NDataflowUpdate update = new NDataflowUpdate(df.getUuid());
+        update.setToRemoveSegs(df.getSegments().toArray(new NDataSegment[0]));
+        dsMgr.updateDataflow(update);
+    }
+
+    private void buildCube(String dfName, long start, long end) throws Exception {
+        NDataflow df = dsMgr.getDataflow(dfName);
+        List<LayoutEntity> layouts = df.getIndexPlan().getAllLayouts();
+        indexDataConstructor.buildIndex(dfName, new SegmentRange.TimePartitionedSegmentRange(start, end),
+                Sets.<LayoutEntity> newLinkedHashSet(layouts), true);
+    }
+
+    private void mergeSegments(String start, String end, boolean force) throws Exception {
+        NDataflow df = dsMgr.getDataflow(DF_NAME1);
+        List<LayoutEntity> layouts = df.getIndexPlan().getAllLayouts();
+        NDataSegment emptyMergeSeg = dsMgr.mergeSegments(df, new SegmentRange.TimePartitionedSegmentRange(
+                SegmentRange.dateToLong(start), SegmentRange.dateToLong(end)), force);
+        NSparkMergingJob emptyMergeJob = NSparkMergingJob.merge(emptyMergeSeg, Sets.newLinkedHashSet(layouts), "ADMIN",
+                RandomUtil.randomUUIDStr());
+        execMgr.addJob(emptyMergeJob);
+        Assert.assertEquals(ExecutableState.SUCCEED, IndexDataConstructor.wait(emptyMergeJob));
+        AfterMergeOrRefreshResourceMerger merger = new AfterMergeOrRefreshResourceMerger(config, getProject());
+        merger.merge(emptyMergeJob.getSparkMergingStep());
+    }
+
+    private void testQuery(String sqlStr) {
+        Dataset dsFromCube = ExecAndComp.queryModelWithoutCompute(getProject(), sqlStr);
+        Assert.assertNotEquals(0L, dsFromCube.count());
+        String sql = convertToSparkSQL(sqlStr);
+        Dataset dsFromSpark = ExecAndComp.querySparkSql(sql);
+        Assert.assertEquals(dsFromCube.count(), dsFromSpark.count());
+    }
+
+    private void testQueryUnequal(String sqlStr) {
+
+        Dataset dsFromCube = ExecAndComp.queryModelWithoutCompute(getProject(), sqlStr);
+        if (dsFromCube != null) {
+            Assert.assertEquals(0L, dsFromCube.count());
+            String sql = convertToSparkSQL(sqlStr);
+            Dataset dsFromSpark = ExecAndComp.querySparkSql(sql);
+            Assert.assertNotEquals(dsFromCube.count(), dsFromSpark.count());
+        }
+    }
+
+    private String convertToSparkSQL(String sqlStr) {
+        return sqlStr.replaceAll("edw\\.", "");
+    }
+
+}
diff --git a/src/kylin-it/src/test/java/org/apache/kylin/newten/CalciteDynamicTest.java b/src/kylin-it/src/test/java/org/apache/kylin/newten/CalciteDynamicTest.java
new file mode 100644
index 0000000000..ceacaea174
--- /dev/null
+++ b/src/kylin-it/src/test/java/org/apache/kylin/newten/CalciteDynamicTest.java
@@ -0,0 +1,92 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.kylin.newten;
+
+import static org.junit.Assert.fail;
+
+import java.sql.Timestamp;
+import java.util.Arrays;
+import java.util.List;
+import java.util.stream.Collectors;
+
+import org.apache.commons.collections.ListUtils;
+import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.engine.spark.NLocalWithSparkSessionTest;
+import org.apache.kylin.job.impl.threadpool.NDefaultScheduler;
+import org.apache.kylin.util.ExecAndComp;
+import org.apache.parquet.Strings;
+import org.apache.spark.sql.Row;
+import org.apache.spark.sql.SparderEnv;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+import scala.collection.JavaConversions;
+
+public class CalciteDynamicTest extends NLocalWithSparkSessionTest {
+
+    @Before
+    public void setup() throws Exception {
+        super.init();
+    }
+
+    @After
+    public void after() {
+        NDefaultScheduler.destroyInstance();
+    }
+
+    @Test
+    public void testCalciteGroupByDynamicParam() throws Exception {
+        fullBuild("89af4ee2-2cdb-4b07-b39e-4c29856309aa");
+        populateSSWithCSVData(KylinConfig.getInstanceFromEnv(), getProject(), SparderEnv.getSparkSession());
+        String sqlOrigin = "SELECT (case when 1=1 then SELLER_ID else TRANS_ID end) as id,  SUM(price) as PRICE\n"
+                + "FROM TEST_KYLIN_FACT\n" + "GROUP BY (case when 1=1 then SELLER_ID else TRANS_ID end) limit 5";
+        String parameter = "1";
+        // benchmark
+        List<List<String>> benchmark = ExecAndComp.queryCubeWithJDBC(getProject(), sqlOrigin);
+        // setTimestamp
+        String sqlWithPlaceholder = sqlOrigin.replace("case when 1=1", "case when ?=1");
+        List<Row> rows = ExecAndComp
+                .queryModel(getProject(), sqlWithPlaceholder, Arrays.asList(new String[] { parameter, parameter }))
+                .collectAsList();
+        List<List<String>> results = transformToString(rows);
+        for (int i = 0; i < benchmark.size(); i++) {
+            if (!ListUtils.isEqualList(benchmark.get(i), results.get(i))) {
+                String expected = Strings.join(benchmark.get(i), ",");
+                String actual1 = Strings.join(results.get(i), ",");
+                fail("expected: " + expected + ", results: " + actual1);
+            }
+        }
+    }
+
+    private List<List<String>> transformToString(List<Row> rows) {
+        return rows.stream().map(row -> JavaConversions.seqAsJavaList(row.toSeq()).stream().map(r -> {
+            if (r == null) {
+                return null;
+            } else {
+                String s = r.toString();
+                if (r instanceof Timestamp) {
+                    return s.substring(0, s.length() - 2);
+                } else {
+                    return s;
+                }
+            }
+        }).collect(Collectors.toList())).collect(Collectors.toList());
+    }
+}
diff --git a/src/kylin-it/src/test/java/org/apache/kylin/newten/CharNColumnTest.java b/src/kylin-it/src/test/java/org/apache/kylin/newten/CharNColumnTest.java
new file mode 100644
index 0000000000..b41a87eafd
--- /dev/null
+++ b/src/kylin-it/src/test/java/org/apache/kylin/newten/CharNColumnTest.java
@@ -0,0 +1,74 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.kylin.newten;
+
+import java.util.List;
+import java.util.stream.Collectors;
+
+import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.engine.spark.NLocalWithSparkSessionTest;
+import org.apache.kylin.job.engine.JobEngineConfig;
+import org.apache.kylin.job.impl.threadpool.NDefaultScheduler;
+import org.apache.kylin.util.ExecAndComp;
+import org.apache.spark.sql.SparderEnv;
+import org.junit.After;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+
+public class CharNColumnTest extends NLocalWithSparkSessionTest {
+
+    @Before
+    public void setup() throws Exception {
+        overwriteSystemProp("kylin.job.scheduler.poll-interval-second", "1");
+        overwriteSystemProp("kylin.engine.persist-flattable-enabled", "false");
+        this.createTestMetadata("src/test/resources/ut_meta/test_char_n_column");
+        NDefaultScheduler scheduler = NDefaultScheduler.getInstance(getProject());
+        scheduler.init(new JobEngineConfig(KylinConfig.getInstanceFromEnv()));
+        if (!scheduler.hasStarted()) {
+            throw new RuntimeException("scheduler has not been started");
+        }
+    }
+
+    @After
+    public void after() throws Exception {
+        NDefaultScheduler.destroyInstance();
+        cleanupTestMetadata();
+    }
+
+    @Override
+    public String getProject() {
+        return "char_n_column";
+    }
+
+    @Test
+    public void testCharNColumn() throws Exception {
+        fullBuild("c9ddd37e-c870-4ccf-a131-5eef8fe6cb7e");
+
+        KylinConfig config = KylinConfig.getInstanceFromEnv();
+        populateSSWithCSVData(config, getProject(), SparderEnv.getSparkSession());
+        String query1 = "select AGE, CITY, " + "intersect_count(USER_ID, TAG, array['rich','tall','handsome']) "
+                + "from TEST_CHAR_N where city=\'Beijing  \' group by AGE, CITY ";
+        List<String> r1 = ExecAndComp.queryModel(getProject(), query1).collectAsList().stream()
+                .map(row -> row.toSeq().mkString(",")).collect(Collectors.toList());
+
+        Assert.assertEquals("19,Beijing   ,0", r1.get(0));
+
+    }
+}
diff --git a/src/kylin-it/src/test/java/org/apache/kylin/newten/ExtractLimitInfoTest.java b/src/kylin-it/src/test/java/org/apache/kylin/newten/ExtractLimitInfoTest.java
new file mode 100644
index 0000000000..e7f9ff4ea6
--- /dev/null
+++ b/src/kylin-it/src/test/java/org/apache/kylin/newten/ExtractLimitInfoTest.java
@@ -0,0 +1,126 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.kylin.newten;
+
+import java.sql.SQLException;
+import java.util.UUID;
+import java.util.concurrent.atomic.AtomicLong;
+
+import org.apache.hadoop.util.Shell;
+import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.engine.spark.NLocalWithSparkSessionTest;
+import org.apache.kylin.job.engine.JobEngineConfig;
+import org.apache.kylin.job.impl.threadpool.NDefaultScheduler;
+import org.apache.kylin.query.runtime.plan.ResultPlan;
+import org.apache.kylin.util.ExecAndComp;
+import org.apache.spark.SparkConf;
+import org.apache.spark.sql.SparderEnv;
+import org.apache.spark.sql.SparkSession;
+import org.apache.spark.sql.execution.SparkPlan;
+import org.apache.spark.sql.internal.StaticSQLConf;
+import org.junit.After;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+import lombok.val;
+
+public class ExtractLimitInfoTest extends NLocalWithSparkSessionTest {
+
+    @BeforeClass
+    public static void initSpark() {
+        if (Shell.MAC)
+            overwriteSystemPropBeforeClass("org.xerial.snappy.lib.name", "libsnappyjava.jnilib");//for snappy
+        if (ss != null && !ss.sparkContext().isStopped()) {
+            ss.stop();
+        }
+        sparkConf = new SparkConf().setAppName(UUID.randomUUID().toString()).setMaster("local[4]");
+        sparkConf.set("spark.serializer", "org.apache.spark.serializer.JavaSerializer");
+        sparkConf.set(StaticSQLConf.CATALOG_IMPLEMENTATION().key(), "in-memory");
+        sparkConf.set("spark.sql.shuffle.partitions", "1");
+        sparkConf.set("spark.memory.fraction", "0.1");
+        // opt memory
+        sparkConf.set("spark.shuffle.detectCorrupt", "false");
+        // For sinai_poc/query03, enable implicit cross join conversion
+        sparkConf.set("spark.sql.crossJoin.enabled", "true");
+        sparkConf.set("spark.sql.adaptive.enabled", "false");
+        sparkConf.set("spark.sql.autoBroadcastJoinThreshold", "1");
+        ss = SparkSession.builder().config(sparkConf).getOrCreate();
+        SparderEnv.setSparkSession(ss);
+    }
+
+    @Before
+    public void setup() throws Exception {
+        overwriteSystemProp("kylin.job.scheduler.poll-interval-second", "1");
+        this.createTestMetadata("src/test/resources/ut_meta/join_opt");
+        NDefaultScheduler scheduler = NDefaultScheduler.getInstance(getProject());
+        scheduler.init(new JobEngineConfig(KylinConfig.getInstanceFromEnv()));
+        if (!scheduler.hasStarted()) {
+            throw new RuntimeException("scheduler has not been started");
+        }
+    }
+
+    @After
+    public void after() throws Exception {
+        NDefaultScheduler.destroyInstance();
+        cleanupTestMetadata();
+    }
+
+    @Test
+    public void testExtractLimitInfo() throws Exception {
+        overwriteSystemProp("kylin.storage.columnar.shard-rowcount", "100");
+        fullBuild("8c670664-8d05-466a-802f-83c023b56c77");
+        populateSSWithCSVData(getTestConfig(), getProject(), SparderEnv.getSparkSession());
+
+        SparkPlan sparkPlan;
+        AtomicLong accumRowsCounter;
+
+        val sql1 = "select * from "
+                + "(select TRANS_ID,LSTG_FORMAT_NAME from TEST_KYLIN_FACT group by TRANS_ID,LSTG_FORMAT_NAME) lside left join "
+                + "(select TRANS_ID,LSTG_FORMAT_NAME from TEST_KYLIN_FACT group by TRANS_ID,LSTG_FORMAT_NAME) rside "
+                + "on lside.TRANS_ID = rside.TRANS_ID limit 10";
+        sparkPlan = getSparkExecutedPlan(sql1);
+        accumRowsCounter = new AtomicLong();
+        ResultPlan.extractEachStageLimitRows(sparkPlan, -1, accumRowsCounter);
+        Assert.assertEquals(10010, accumRowsCounter.get());
+
+        val sql2 = "select TRANS_ID,LSTG_FORMAT_NAME from TEST_KYLIN_FACT group by TRANS_ID,LSTG_FORMAT_NAME limit 10";
+        sparkPlan = getSparkExecutedPlan(sql2);
+        accumRowsCounter = new AtomicLong();
+        ResultPlan.extractEachStageLimitRows(sparkPlan, -1, accumRowsCounter);
+        Assert.assertEquals(10, accumRowsCounter.get());
+
+        val sql3 = "select count(*) from (select TRANS_ID,LSTG_FORMAT_NAME from TEST_KYLIN_FACT group by TRANS_ID,LSTG_FORMAT_NAME limit 10)";
+        sparkPlan = getSparkExecutedPlan(sql3);
+        accumRowsCounter = new AtomicLong();
+        ResultPlan.extractEachStageLimitRows(sparkPlan, -1, accumRowsCounter);
+        Assert.assertEquals(10, accumRowsCounter.get());
+    }
+
+    private SparkPlan getSparkExecutedPlan(String sql) throws SQLException {
+        return ExecAndComp.queryModel(getProject(), sql).queryExecution().executedPlan();
+    }
+
+    @Override
+    public String getProject() {
+        return "join_opt";
+    }
+
+}
diff --git a/src/kylin-it/src/test/java/org/apache/kylin/newten/IndexDependencyParserTest.java b/src/kylin-it/src/test/java/org/apache/kylin/newten/IndexDependencyParserTest.java
new file mode 100644
index 0000000000..72ab707049
--- /dev/null
+++ b/src/kylin-it/src/test/java/org/apache/kylin/newten/IndexDependencyParserTest.java
@@ -0,0 +1,172 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.kylin.newten;
+
+import java.util.Collection;
+import java.util.Set;
+
+import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.common.util.NLocalFileMetadataTestCase;
+import org.apache.kylin.engine.spark.smarter.IndexDependencyParser;
+import org.apache.kylin.metadata.model.NDataModelManager;
+import org.apache.kylin.metadata.model.TableRef;
+import org.apache.kylin.metadata.model.TblColRef;
+import org.junit.After;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.ExpectedException;
+
+import lombok.val;
+import lombok.var;
+
+public class IndexDependencyParserTest extends NLocalFileMetadataTestCase {
+    @Rule
+    public ExpectedException thrown = ExpectedException.none();
+
+    @Before
+    public void setUp() {
+        this.createTestMetadata("src/test/resources/ut_meta/heterogeneous_segment_2");
+        overwriteSystemProp("kylin.query.engine.sparder-additional-files", "../../../kylin/build/conf/spark-executor-log4j.xml");
+    }
+
+    @After
+    public void after() {
+        this.cleanupTestMetadata();
+    }
+
+    @Test
+    public void unwrapComputeColumn() {
+        val dataModelManager = NDataModelManager.getInstance(KylinConfig.getInstanceFromEnv(), "cc_test");
+        var dataModelDesc = dataModelManager.getDataModelDesc("4a45dc4d-937e-43cc-8faa-34d59d4e11d3");
+        var computedColumn = dataModelDesc.getEffectiveCols().values().stream()
+                .filter(tblColRef -> tblColRef.getColumnDesc().getName().equals("CC_NUM")).findAny().get();
+
+        IndexDependencyParser parser = new IndexDependencyParser(dataModelDesc);
+
+        Assert.assertNotNull(computedColumn);
+        Assert.assertTrue(computedColumn.getColumnDesc().isComputedColumn());
+        // 1+2
+        Set<TblColRef> tblColRefList = parser.unwrapComputeColumn(computedColumn.getExpressionInSourceDB());
+        Assert.assertEquals(0, tblColRefList.size());
+
+        computedColumn = dataModelDesc.getEffectiveCols().values().stream()
+                .filter(tblColRef -> tblColRef.getColumnDesc().getName().equals("CC_LTAX")).findAny().get();
+
+        // `LINEORDER`.`LO_TAX` +1
+        tblColRefList = parser.unwrapComputeColumn(computedColumn.getExpressionInSourceDB());
+        Assert.assertEquals(1, tblColRefList.size());
+        Assert.assertTrue(
+                tblColRefList.stream().anyMatch(tblColRef -> "LINEORDER.LO_TAX".equals(tblColRef.getIdentity())));
+
+        dataModelDesc = dataModelManager.getDataModelDesc("0d146f1a-bdd3-4548-87ac-21c2c6f9a0da");
+
+        computedColumn = dataModelDesc.getEffectiveCols().values().stream()
+                .filter(tblColRef -> tblColRef.getColumnDesc().getName().equals("CC_TOTAL_TAX")).findAny().get();
+
+        Assert.assertTrue(computedColumn.getColumnDesc().isComputedColumn());
+        // LINEORDER.LO_QUANTITY*LINEORDER.LO_TAX
+        tblColRefList = parser.unwrapComputeColumn(computedColumn.getExpressionInSourceDB());
+        Assert.assertEquals(2, tblColRefList.size());
+
+        Assert.assertTrue(
+                tblColRefList.stream().anyMatch(tblColRef -> tblColRef.getIdentity().equals("LINEORDER.LO_QUANTITY")));
+        Assert.assertTrue(
+                tblColRefList.stream().anyMatch(tblColRef -> tblColRef.getIdentity().equals("LINEORDER.LO_TAX")));
+
+        computedColumn = dataModelDesc.getAllTableRefs().stream()
+                .filter(tableRef -> tableRef.getTableIdentity().equals("SSB.LINEORDER")).map(TableRef::getColumns)
+                .flatMap(Collection::stream)
+                .filter(tblColRef -> tblColRef.getColumnDesc().getName().equals("CC_EXTRACT")).findAny().get();
+
+        Assert.assertTrue(computedColumn.getColumnDesc().isComputedColumn());
+        // MINUTE(`LINEORDER`.`LO_ORDERDATE`)
+        tblColRefList = parser.unwrapComputeColumn(computedColumn.getExpressionInSourceDB());
+        Assert.assertEquals(1, tblColRefList.size());
+
+        Assert.assertTrue(
+                tblColRefList.stream().anyMatch(tblColRef -> tblColRef.getIdentity().equals("LINEORDER.LO_ORDERDATE")));
+
+        computedColumn = dataModelDesc.getAllTableRefs().stream()
+                .filter(tableRef -> tableRef.getTableIdentity().equals("SSB.LINEORDER")).map(TableRef::getColumns)
+                .flatMap(Collection::stream)
+                .filter(tblColRef -> tblColRef.getColumnDesc().getName().equals("CC_CAST_LO_ORDERKEY")).findAny().get();
+
+        Assert.assertTrue(computedColumn.getColumnDesc().isComputedColumn());
+        // cast(`lineorder`.`lo_orderkey` as double)
+        tblColRefList = parser.unwrapComputeColumn(computedColumn.getExpressionInSourceDB());
+        Assert.assertEquals(1, tblColRefList.size());
+
+        Assert.assertTrue(
+                tblColRefList.stream().anyMatch(tblColRef -> tblColRef.getIdentity().equals("LINEORDER.LO_ORDERKEY")));
+
+    }
+
+    @Test
+    public void unwrapNestComputeColumn() {
+        val dataModelManager = NDataModelManager.getInstance(KylinConfig.getInstanceFromEnv(), "cc_test");
+        var dataModelDesc = dataModelManager.getDataModelDesc("4802b471-fb69-4b08-a45e-ab3e314e2f6c");
+        var computedColumn = dataModelDesc.getEffectiveCols().values().stream()
+                .filter(tblColRef -> tblColRef.getColumnDesc().getName().equals("CC_LTAX_NEST")).findAny().get();
+
+        IndexDependencyParser parser = new IndexDependencyParser(dataModelDesc);
+
+        Assert.assertNotNull(computedColumn);
+        Assert.assertTrue(computedColumn.getColumnDesc().isComputedColumn());
+        Set<TblColRef> tblColRefList = parser.unwrapComputeColumn(computedColumn.getExpressionInSourceDB());
+        Assert.assertEquals(1, tblColRefList.size());
+        Assert.assertTrue(
+                tblColRefList.stream().anyMatch(tblColRef -> "LINEORDER.LO_TAX".equals(tblColRef.getIdentity())));
+    }
+
+    @Test
+    public void unwrapDateComputeColumn() {
+        val dataModelManager = NDataModelManager.getInstance(KylinConfig.getInstanceFromEnv(),
+                "heterogeneous_segment_2");
+        var dataModelDesc = dataModelManager.getDataModelDesc("3f2860d5-0a4c-4f52-b27b-2627caafe769");
+        var computedColumn = dataModelDesc.getEffectiveCols().values().stream()
+                .filter(tblColRef -> tblColRef.getColumnDesc().getName().equals("CC2")).findAny().get();
+
+        IndexDependencyParser parser = new IndexDependencyParser(dataModelDesc);
+
+        Assert.assertNotNull(computedColumn);
+        Assert.assertTrue(computedColumn.getColumnDesc().isComputedColumn());
+        Set<TblColRef> tblColRefList = parser.unwrapComputeColumn(computedColumn.getExpressionInSourceDB());
+        Assert.assertEquals(1, tblColRefList.size());
+        Assert.assertTrue(tblColRefList.stream()
+                .anyMatch(tblColRef -> tblColRef.getExpressionInSourceDB().equals("KYLIN_SALES.PART_DT")));
+    }
+
+    @Test
+    public void unwrapComputeColumnWithChineseAndSpecialChar() {
+        val dataModelManager = NDataModelManager.getInstance(KylinConfig.getInstanceFromEnv(),
+                "special_character_in_column");
+        var dataModelDesc = dataModelManager.getDataModelDesc("8c08822f-296a-b097-c910-e38d8934b6f9");
+        var computedColumn = dataModelDesc.getEffectiveCols().values().stream()
+                .filter(tblColRef -> tblColRef.getColumnDesc().getName().equals("CC_CHINESE_AND_SPECIAL_CHAR")).findAny().get();
+
+        IndexDependencyParser parser = new IndexDependencyParser(dataModelDesc);
+
+        Assert.assertNotNull(computedColumn);
+        Assert.assertTrue(computedColumn.getColumnDesc().isComputedColumn());
+        Set<TblColRef> tblColRefList = parser.unwrapComputeColumn(computedColumn.getExpressionInSourceDB());
+        Assert.assertEquals(3, tblColRefList.size());
+    }
+}
diff --git a/src/kylin-it/src/test/java/org/apache/kylin/newten/MultiPartitionPruningTest.java b/src/kylin-it/src/test/java/org/apache/kylin/newten/MultiPartitionPruningTest.java
new file mode 100644
index 0000000000..51f9c46d62
--- /dev/null
+++ b/src/kylin-it/src/test/java/org/apache/kylin/newten/MultiPartitionPruningTest.java
@@ -0,0 +1,545 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+package org.apache.kylin.newten;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.commons.collections.CollectionUtils;
+import org.apache.hadoop.util.Shell;
+import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.common.util.DateFormat;
+import org.apache.kylin.common.util.Pair;
+import org.apache.kylin.common.util.RandomUtil;
+import org.apache.kylin.engine.spark.NLocalWithSparkSessionTest;
+import org.apache.kylin.job.engine.JobEngineConfig;
+import org.apache.kylin.job.impl.threadpool.NDefaultScheduler;
+import org.apache.kylin.junit.TimeZoneTestRunner;
+import org.apache.kylin.metadata.cube.model.NDataSegment;
+import org.apache.kylin.metadata.model.NDataModelManager;
+import org.apache.kylin.metadata.realization.NoRealizationFoundException;
+import org.apache.kylin.query.relnode.ContextUtil;
+import org.apache.kylin.util.ExecAndComp;
+import org.apache.spark.SparkConf;
+import org.apache.spark.sql.SparderEnv;
+import org.apache.spark.sql.SparkSession;
+import org.apache.spark.sql.execution.KylinFileSourceScanExec;
+import org.apache.spark.sql.execution.SparkPlan;
+import org.apache.spark.sql.execution.adaptive.AdaptiveSparkPlanHelper;
+import org.apache.spark.sql.internal.StaticSQLConf;
+import org.assertj.core.util.Lists;
+import org.junit.After;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+
+import lombok.val;
+import scala.runtime.AbstractFunction1;
+
+@RunWith(TimeZoneTestRunner.class)
+public class MultiPartitionPruningTest extends NLocalWithSparkSessionTest implements AdaptiveSparkPlanHelper {
+    private final String sql = "select count(*) from test_kylin_fact left join test_order on test_kylin_fact.order_id = test_order.order_id ";
+
+    @BeforeClass
+    public static void initSpark() {
+        if (Shell.MAC)
+            overwriteSystemPropBeforeClass("org.xerial.snappy.lib.name", "libsnappyjava.jnilib");//for snappy
+        if (ss != null && !ss.sparkContext().isStopped()) {
+            ss.stop();
+        }
+        sparkConf = new SparkConf().setAppName(RandomUtil.randomUUIDStr()).setMaster("local[4]");
+        sparkConf.set("spark.serializer", "org.apache.spark.serializer.JavaSerializer");
+        sparkConf.set(StaticSQLConf.CATALOG_IMPLEMENTATION().key(), "in-memory");
+        sparkConf.set("spark.sql.shuffle.partitions", "1");
+        sparkConf.set("spark.memory.fraction", "0.1");
+        // opt memory
+        sparkConf.set("spark.shuffle.detectCorrupt", "false");
+        // For sinai_poc/query03, enable implicit cross join conversion
+        sparkConf.set("spark.sql.crossJoin.enabled", "true");
+        sparkConf.set("spark.sql.adaptive.enabled", "true");
+        ss = SparkSession.builder().config(sparkConf).getOrCreate();
+        SparderEnv.setSparkSession(ss);
+    }
+
+    @Before
+    public void setup() throws Exception {
+        overwriteSystemProp("kylin.job.scheduler.poll-interval-second", "1");
+        this.createTestMetadata("src/test/resources/ut_meta/multi_partition_pruning");
+        NDefaultScheduler scheduler = NDefaultScheduler.getInstance(getProject());
+        scheduler.init(new JobEngineConfig(KylinConfig.getInstanceFromEnv()));
+        if (!scheduler.hasStarted()) {
+            throw new RuntimeException("scheduler has not been started");
+        }
+        overwriteSystemProp("kylin.model.multi-partition-enabled", "true");
+    }
+
+    @After
+    public void after() throws Exception {
+        NDefaultScheduler.destroyInstance();
+        cleanupTestMetadata();
+    }
+
+    @Override
+    public String getProject() {
+        return "multi_partition_pruning";
+    }
+
+    @Test
+    public void testPartitionPruningVarchar() throws Exception {
+        val dfName = "8c670664-8d05-466a-802f-83c023b56c78";
+
+        // segment1 [2009-01-01, 2011-01-01] partition value Others, ABIN, FP-non GTC
+        // segment2 [2011-01-01, 2013-01-01] partition value Others, ABIN
+        // segment3 [2013-01-01, 2015-01-01] partition value Others, ABIN, FP-GTC
+        indexDataConstructor.buildMultiSegmentPartitions(dfName, "2009-01-01 00:00:00", "2011-01-01 00:00:00",
+                Lists.newArrayList(10001L), Lists.newArrayList(0L, 1L, 2L));
+        indexDataConstructor.buildMultiSegmentPartitions(dfName, "2011-01-01 00:00:00", "2013-01-01 00:00:00",
+                Lists.newArrayList(10001L), Lists.newArrayList(0L, 1L));
+        indexDataConstructor.buildMultiSegmentPartitions(dfName, "2013-01-01 00:00:00", "2015-01-01 00:00:00",
+                Lists.newArrayList(10001L), Lists.newArrayList(0L, 1L, 3L));
+
+        populateSSWithCSVData(getTestConfig(), getProject(), SparderEnv.getSparkSession());
+
+        val expectedRanges = Lists.<Pair<String, String>> newArrayList();
+        val segmentRange1 = Pair.newPair("2009-01-01", "2011-01-01");
+        val segmentRange2 = Pair.newPair("2011-01-01", "2013-01-01");
+        val segmentRange3 = Pair.newPair("2013-01-01", "2015-01-01");
+        val expectedPartitions = Lists.<List<Long>> newArrayList();
+
+        val noPartitionFilterSql = sql + "where cal_dt between '2008-01-01' and '2012-01-01' ";
+        val andSql = sql + "where cal_dt between '2009-01-01' and '2012-01-01' and lstg_format_name = 'ABIN' ";
+        val notInSql = sql
+                + "where cal_dt > '2009-01-01' and cal_dt < '2012-01-01' and lstg_format_name not in ('ABIN', 'FP-non GTC', 'FP-GTC', 'Auction')";
+        val emptyResultSql = sql
+                + "where cal_dt > '2012-01-01' and cal_dt < '2014-01-01' and lstg_format_name = 'NOT-EXIST-VALUE' ";
+        val pushdownSql = sql
+                + "where cal_dt > '2012-01-01' and cal_dt < '2014-01-01' and lstg_format_name = 'FP-GTC' ";
+
+        expectedRanges.add(segmentRange1);
+        expectedRanges.add(segmentRange2);
+        expectedRanges.add(segmentRange3);
+        expectedPartitions.add(Lists.newArrayList(0L, 1L, 2L));
+        expectedPartitions.add(Lists.newArrayList(0L, 1L));
+        expectedPartitions.add(Lists.newArrayList(0L, 1L, 3L));
+        assertResultsAndScanFiles(dfName, sql, 5, false, expectedRanges, expectedPartitions);
+
+        expectedRanges.clear();
+        expectedPartitions.clear();
+        expectedRanges.add(segmentRange1);
+        expectedRanges.add(segmentRange2);
+        expectedPartitions.add(Lists.newArrayList(0L, 1L, 2L));
+        expectedPartitions.add(Lists.newArrayList(0L, 1L));
+        assertResultsAndScanFiles(dfName, noPartitionFilterSql, 2, false, expectedRanges, expectedPartitions);
+
+        expectedPartitions.clear();
+        expectedPartitions.add(Lists.newArrayList(1L));
+        expectedPartitions.add(Lists.newArrayList(1L));
+        assertResultsAndScanFiles(dfName, andSql, 1, false, expectedRanges, expectedPartitions);
+
+        expectedPartitions.clear();
+        expectedPartitions.add(Lists.newArrayList(0L));
+        expectedPartitions.add(Lists.newArrayList(0L));
+        assertResultsAndScanFiles(dfName, notInSql, 1, false, expectedRanges, expectedPartitions);
+
+        assertResultsAndScanFiles(dfName, emptyResultSql, 0, true, null, null);
+
+        try {
+            assertResultsAndScanFiles(dfName, pushdownSql, 0, false, null, null);
+        } catch (Exception ex) {
+            Assert.assertTrue(ex.getCause() instanceof NoRealizationFoundException);
+        }
+
+        List<Pair<String, String>> query = new ArrayList<>();
+        query.add(Pair.newPair("", andSql));
+        query.add(Pair.newPair("", notInSql));
+        ExecAndComp.execAndCompare(query, getProject(), ExecAndComp.CompareLevel.SAME, "left");
+    }
+
+    @Test
+    public void testPartitionPruningInteger() throws Exception {
+        val dfName = "8c670664-8d05-466a-802f-83c023b56c76";
+
+        // segment1 [2009-01-01, 2011-01-01] partition value 0, 2, 3
+        // segment2 [2011-01-01, 2013-01-01] partition value 0, 2
+        // segment3 [2013-01-01, 2015-01-01] partition value 0, 2, 15
+        indexDataConstructor.buildMultiSegmentPartitions(dfName, "2009-01-01 00:00:00", "2011-01-01 00:00:00",
+                Lists.newArrayList(10001L), Lists.newArrayList(0L, 1L, 2L));
+        indexDataConstructor.buildMultiSegmentPartitions(dfName, "2011-01-01 00:00:00", "2013-01-01 00:00:00",
+                Lists.newArrayList(10001L), Lists.newArrayList(0L, 1L));
+        indexDataConstructor.buildMultiSegmentPartitions(dfName, "2013-01-01 00:00:00", "2015-01-01 00:00:00",
+                Lists.newArrayList(10001L), Lists.newArrayList(0L, 1L, 3L));
+
+        populateSSWithCSVData(getTestConfig(), getProject(), SparderEnv.getSparkSession());
+
+        val expectedRanges = Lists.<Pair<String, String>> newArrayList();
+        val segmentRange1 = Pair.newPair("2009-01-01", "2011-01-01");
+        val segmentRange2 = Pair.newPair("2011-01-01", "2013-01-01");
+        val segmentRange3 = Pair.newPair("2013-01-01", "2015-01-01");
+        val expectedPartitions = Lists.<List<Long>> newArrayList();
+
+        val noPartitionFilterSql = sql + "where cal_dt between '2008-01-01' and '2012-01-01' ";
+        val andSql = sql + "where cal_dt between '2009-01-01' and '2012-01-01' and lstg_site_id = 2 ";
+        val notInSql = sql
+                + "where cal_dt > '2009-01-01' and cal_dt < '2012-01-01' and lstg_site_id not in (2, 3, 15, 23)";
+        val emptyResultSql = sql + "where cal_dt > '2012-01-01' and cal_dt < '2014-01-01' and lstg_site_id = 10000 ";
+        val pushdownSql = sql + "where cal_dt > '2012-01-01' and cal_dt < '2014-01-01' and lstg_site_id = 15 ";
+
+        expectedRanges.add(segmentRange1);
+        expectedRanges.add(segmentRange2);
+        expectedRanges.add(segmentRange3);
+        expectedPartitions.add(Lists.newArrayList(0L, 1L, 2L));
+        expectedPartitions.add(Lists.newArrayList(0L, 1L));
+        expectedPartitions.add(Lists.newArrayList(0L, 1L, 3L));
+        assertResultsAndScanFiles(dfName, sql, 5, false, expectedRanges, expectedPartitions);
+
+        expectedRanges.clear();
+        expectedPartitions.clear();
+        expectedRanges.add(segmentRange1);
+        expectedRanges.add(segmentRange2);
+        expectedPartitions.add(Lists.newArrayList(0L, 1L, 2L));
+        expectedPartitions.add(Lists.newArrayList(0L, 1L));
+        assertResultsAndScanFiles(dfName, noPartitionFilterSql, 2, false, expectedRanges, expectedPartitions);
+
+        expectedPartitions.clear();
+        expectedPartitions.add(Lists.newArrayList(1L));
+        expectedPartitions.add(Lists.newArrayList(1L));
+        assertResultsAndScanFiles(dfName, andSql, 1, false, expectedRanges, expectedPartitions);
+
+        expectedPartitions.clear();
+        expectedPartitions.add(Lists.newArrayList(0L));
+        expectedPartitions.add(Lists.newArrayList(0L));
+        assertResultsAndScanFiles(dfName, notInSql, 1, false, expectedRanges, expectedPartitions);
+
+        assertResultsAndScanFiles(dfName, emptyResultSql, 0, true, null, null);
+
+        try {
+            assertResultsAndScanFiles(dfName, pushdownSql, 0, false, null, null);
+        } catch (Exception ex) {
+            Assert.assertTrue(ex.getCause() instanceof NoRealizationFoundException);
+        }
+
+        List<Pair<String, String>> query = new ArrayList<>();
+        query.add(Pair.newPair("", andSql));
+        query.add(Pair.newPair("", notInSql));
+        ExecAndComp.execAndCompare(query, getProject(), ExecAndComp.CompareLevel.SAME, "left");
+    }
+
+    @Test
+    public void testPartitionPruningDate() throws Exception {
+        val dfName = "8c670664-8d05-466a-802f-83c023b56c77";
+
+        // segment1 [2009-01-01, 2011-01-01] partition value 2010-01-01, 2011-01-01
+        // segment2 [2011-01-01, 2013-01-01] partition value 2011-01-01, 2012-01-01, 2013-01-01
+        // segment3 [2013-01-01, 2015-01-01] partition value 2012-01-01, 2013-01-01, 2014-01-01
+        indexDataConstructor.buildMultiSegmentPartitions(dfName, "2009-01-01 00:00:00", "2011-01-01 00:00:00",
+                Lists.newArrayList(10001L), Lists.newArrayList(0L, 1L));
+        indexDataConstructor.buildMultiSegmentPartitions(dfName, "2011-01-01 00:00:00", "2013-01-01 00:00:00",
+                Lists.newArrayList(10001L), Lists.newArrayList(1L, 2L, 3L));
+        indexDataConstructor.buildMultiSegmentPartitions(dfName, "2013-01-01 00:00:00", "2015-01-01 00:00:00",
+                Lists.newArrayList(10001L), Lists.newArrayList(2L, 3L, 4L));
+
+        populateSSWithCSVData(getTestConfig(), getProject(), SparderEnv.getSparkSession());
+
+        val expectedRanges = Lists.<Pair<String, String>> newArrayList();
+        val segmentRange1 = Pair.newPair("2009-01-01 00:00:00", "2011-01-01 00:00:00");
+        val segmentRange2 = Pair.newPair("2011-01-01 00:00:00", "2013-01-01 00:00:00");
+        val segmentRange3 = Pair.newPair("2013-01-01 00:00:00", "2015-01-01 00:00:00");
+        val expectedPartitions = Lists.<List<Long>> newArrayList();
+
+        val baseSql = "select count(*) from test_order left join test_kylin_fact on test_order.order_id = test_kylin_fact.order_id ";
+        val noPartitionFilterSql = baseSql
+                + "where test_time_enc between '2010-01-01 00:00:00' and '2012-01-01 00:00:00' ";
+        val andSql = baseSql
+                + "where test_time_enc > '2012-01-01 00:00:00' and test_time_enc < '2014-01-01 00:00:00' and test_date_enc = '2013-01-01' ";
+        val inSql = baseSql
+                + "where test_time_enc > '2012-01-01 00:00:00' and test_time_enc < '2014-01-01 00:00:00' and test_date_enc in ('2012-01-01', '2013-01-01') ";
+        val notInSql = baseSql
+                + "where test_time_enc between '2009-01-01 00:00:00' and '2011-01-01 00:00:00' and test_date_enc not in ('2010-01-01', '2012-01-01', '2013-01-01', '2014-01-01') ";
+        val emptyResultSql = baseSql
+                + "where test_time_enc between '2009-01-01 00:00:00' and '2011-01-01 00:00:00' and test_date_enc = '2020-01-01' ";
+        val pushdownSql = baseSql
+                + "where test_time_enc between '2011-01-01 00:00:00' and '2015-01-01 00:00:00' and test_date_enc = '2011-01-01' ";
+
+        expectedRanges.add(segmentRange1);
+        expectedRanges.add(segmentRange2);
+        expectedRanges.add(segmentRange3);
+        expectedPartitions.add(Lists.newArrayList(0L, 1L));
+        expectedPartitions.add(Lists.newArrayList(1L, 2L, 3L));
+        expectedPartitions.add(Lists.newArrayList(2L, 3L, 4L));
+        assertResultsAndScanFiles(dfName, baseSql, 8, false, expectedRanges, expectedPartitions);
+
+        expectedRanges.clear();
+        expectedPartitions.clear();
+        expectedRanges.add(segmentRange1);
+        expectedRanges.add(segmentRange2);
+        expectedPartitions.add(Lists.newArrayList(0L, 1L));
+        expectedPartitions.add(Lists.newArrayList(1L, 2L, 3L));
+        assertResultsAndScanFiles(dfName, noPartitionFilterSql, 5, false, expectedRanges, expectedPartitions);
+
+        expectedRanges.clear();
+        expectedPartitions.clear();
+        expectedRanges.add(segmentRange2);
+        expectedRanges.add(segmentRange3);
+        expectedPartitions.add(Lists.newArrayList(3L));
+        expectedPartitions.add(Lists.newArrayList(3L));
+        assertResultsAndScanFiles(dfName, andSql, 2, false, expectedRanges, expectedPartitions);
+
+        expectedPartitions.clear();
+        expectedPartitions.add(Lists.newArrayList(2L, 3L));
+        expectedPartitions.add(Lists.newArrayList(2L, 3L));
+        assertResultsAndScanFiles(dfName, inSql, 4, false, expectedRanges, expectedPartitions);
+
+        expectedRanges.clear();
+        expectedPartitions.clear();
+        expectedRanges.add(segmentRange1);
+        expectedRanges.add(segmentRange2);
+        expectedPartitions.add(Lists.newArrayList(1L));
+        expectedPartitions.add(Lists.newArrayList(1L));
+        assertResultsAndScanFiles(dfName, notInSql, 2, false, expectedRanges, expectedPartitions);
+
+        assertResultsAndScanFiles(dfName, emptyResultSql, 0, true, null, null);
+
+        try {
+            assertResultsAndScanFiles(dfName, pushdownSql, 0, false, null, null);
+        } catch (Exception ex) {
+            Assert.assertTrue(ex.getCause() instanceof NoRealizationFoundException);
+        }
+
+        List<Pair<String, String>> query = new ArrayList<>();
+        query.add(Pair.newPair("", andSql));
+        query.add(Pair.newPair("", inSql));
+        ExecAndComp.execAndCompare(query, getProject(), ExecAndComp.CompareLevel.SAME, "left");
+    }
+
+    @Test
+    public void testPartitionPruningTimestamp() throws Exception {
+        val dfName = "8c670664-8d05-466a-802f-83c023b56c79";
+
+        // segment1 [2009-01-01, 2011-01-01] partition value 2010-01-01 00:56:38, 2010-01-01 04:03:59
+        // segment2 [2011-01-01, 2013-01-01] partition value 2010-01-01 04:03:59, 2010-01-01 08:16:36, 2010-01-02 14:24:50
+        // segment3 [2013-01-01, 2015-01-01] partition value 2010-01-01 08:16:36, 2010-01-02 14:24:50, 2010-01-03 05:15:09
+        indexDataConstructor.buildMultiSegmentPartitions(dfName, "2009-01-01 00:00:00", "2011-01-01 00:00:00",
+                Lists.newArrayList(10001L), Lists.newArrayList(0L, 1L));
+        indexDataConstructor.buildMultiSegmentPartitions(dfName, "2011-01-01 00:00:00", "2013-01-01 00:00:00",
+                Lists.newArrayList(10001L), Lists.newArrayList(1L, 2L, 3L));
+        indexDataConstructor.buildMultiSegmentPartitions(dfName, "2013-01-01 00:00:00", "2015-01-01 00:00:00",
+                Lists.newArrayList(10001L), Lists.newArrayList(2L, 3L, 4L));
+
+        populateSSWithCSVData(getTestConfig(), getProject(), SparderEnv.getSparkSession());
+
+        val expectedRanges = Lists.<Pair<String, String>> newArrayList();
+        val segmentRange1 = Pair.newPair("2009-01-01", "2011-01-01");
+        val segmentRange2 = Pair.newPair("2011-01-01", "2013-01-01");
+        val segmentRange3 = Pair.newPair("2013-01-01", "2015-01-01");
+        val expectedPartitions = Lists.<List<Long>> newArrayList();
+
+        val baseSql = "select count(*) from test_order left join test_kylin_fact on test_order.order_id = test_kylin_fact.order_id ";
+        val noPartitionFilterSql = baseSql + "where test_date_enc between '2010-01-01' and '2012-01-01' ";
+        val andSql = baseSql
+                + "where test_date_enc > '2012-01-01' and test_date_enc < '2014-01-01' and test_time_enc = '2010-01-02 14:24:50' ";
+        val inSql = baseSql
+                + "where test_date_enc > '2012-01-01' and test_date_enc < '2014-01-01' and test_time_enc in ('2010-01-01 08:16:36', '2010-01-02 14:24:50') ";
+        val notInSql = baseSql
+                + "where test_date_enc between '2009-01-01' and '2011-01-01' and test_time_enc not in ('2010-01-01 00:56:38', '2010-01-01 08:16:36', '2010-01-02 14:24:50', '2010-01-03 05:15:09') ";
+        val emptyResultSql = baseSql
+                + "where test_date_enc between '2009-01-01' and '2011-01-01' and test_time_enc = '2020-01-01 00:00:00' ";
+        val pushdownSql = baseSql
+                + "where test_date_enc between '2011-01-01' and '2015-01-01' and test_time_enc = '2010-01-01 04:03:59' ";
+
+        expectedRanges.add(segmentRange1);
+        expectedRanges.add(segmentRange2);
+        expectedRanges.add(segmentRange3);
+        expectedPartitions.add(Lists.newArrayList(0L, 1L));
+        expectedPartitions.add(Lists.newArrayList(1L, 2L, 3L));
+        expectedPartitions.add(Lists.newArrayList(2L, 3L, 4L));
+        assertResultsAndScanFiles(dfName, baseSql, 8, false, expectedRanges, expectedPartitions);
+
+        expectedRanges.clear();
+        expectedPartitions.clear();
+        expectedRanges.add(segmentRange1);
+        expectedRanges.add(segmentRange2);
+        expectedPartitions.add(Lists.newArrayList(0L, 1L));
+        expectedPartitions.add(Lists.newArrayList(1L, 2L, 3L));
+        assertResultsAndScanFiles(dfName, noPartitionFilterSql, 5, false, expectedRanges, expectedPartitions);
+
+        expectedRanges.clear();
+        expectedPartitions.clear();
+        expectedRanges.add(segmentRange2);
+        expectedRanges.add(segmentRange3);
+        expectedPartitions.add(Lists.newArrayList(3L));
+        expectedPartitions.add(Lists.newArrayList(3L));
+        assertResultsAndScanFiles(dfName, andSql, 2, false, expectedRanges, expectedPartitions);
+
+        expectedPartitions.clear();
+        expectedPartitions.add(Lists.newArrayList(2L, 3L));
+        expectedPartitions.add(Lists.newArrayList(2L, 3L));
+        assertResultsAndScanFiles(dfName, inSql, 4, false, expectedRanges, expectedPartitions);
+
+        expectedRanges.clear();
+        expectedPartitions.clear();
+        expectedRanges.add(segmentRange1);
+        expectedRanges.add(segmentRange2);
+        expectedPartitions.add(Lists.newArrayList(1L));
+        expectedPartitions.add(Lists.newArrayList(1L));
+        assertResultsAndScanFiles(dfName, notInSql, 2, false, expectedRanges, expectedPartitions);
+
+        assertResultsAndScanFiles(dfName, emptyResultSql, 0, true, null, null);
+
+        try {
+            assertResultsAndScanFiles(dfName, pushdownSql, 0, false, null, null);
+        } catch (Exception ex) {
+            Assert.assertTrue(ex.getCause() instanceof NoRealizationFoundException);
+        }
+
+        List<Pair<String, String>> query = new ArrayList<>();
+        query.add(Pair.newPair("", andSql));
+        query.add(Pair.newPair("", inSql));
+        ExecAndComp.execAndCompare(query, getProject(), ExecAndComp.CompareLevel.SAME, "left");
+    }
+
+    @Test
+    public void testPartitionPruningChinese() throws Exception {
+        val dfName = "9cde9d25-9334-4b92-b229-a00f49453757";
+
+        // segment1 [2012-01-01, 2013-01-01] partition value FT, 中国
+        // segment2 [2013-01-01, 2014-01-01] partition value 中国
+        indexDataConstructor.buildMultiSegmentPartitions(dfName, "2012-01-01 00:00:00", "2013-01-01 00:00:00",
+                Lists.newArrayList(100001L), Lists.newArrayList(0L, 1L));
+        indexDataConstructor.buildMultiSegmentPartitions(dfName, "2013-01-01 00:00:00", "2014-01-01 00:00:00",
+                Lists.newArrayList(100001L), Lists.newArrayList(0L, 1L));
+
+        populateSSWithCSVData(getTestConfig(), getProject(), SparderEnv.getSparkSession());
+
+        val expectedRanges = Lists.<Pair<String, String>> newArrayList();
+        val segmentRange1 = Pair.newPair("2012-01-01", "2013-01-01");
+        val segmentRange2 = Pair.newPair("2013-01-01", "2014-01-01");
+        val expectedPartitions = Lists.<List<Long>> newArrayList();
+
+        val chineseSql = "select count(*), time1 from test_measure where time1 > '2012-01-01' and time1 < '2013-01-01' and name1 = '中国' group by time1";
+
+        expectedRanges.add(segmentRange1);
+        expectedPartitions.add(Lists.newArrayList(1L));
+        assertResultsAndScanFiles(dfName, chineseSql, 0, false, expectedRanges, expectedPartitions);
+
+        val queries = Lists.<Pair<String, String>> newArrayList();
+        queries.add(Pair.newPair("", chineseSql));
+        ExecAndComp.execAndCompare(queries, getProject(), ExecAndComp.CompareLevel.SAME, "left");
+    }
+
+    @Test
+    public void testExactlyMatch() throws Exception {
+        val dfName = "8c670664-8d05-466a-802f-83c023b56c80";
+
+        // segment1 [2009-01-01, 2011-01-01] build all partitions
+        // segment2 [2011-01-01, 2013-01-01] build all partitions
+        indexDataConstructor.buildMultiSegmentPartitions(dfName, "2009-01-01 00:00:00", "2011-01-01 00:00:00",
+                Lists.newArrayList(10001L, 11001L), Lists.newArrayList(0L, 1L, 2L, 3L, 4L));
+        indexDataConstructor.buildMultiSegmentPartitions(dfName, "2011-01-01 00:00:00", "2013-01-01 00:00:00",
+                Lists.newArrayList(10001L, 11001L), Lists.newArrayList(0L, 1L, 2L, 3L, 4L));
+
+        populateSSWithCSVData(getTestConfig(), getProject(), SparderEnv.getSparkSession());
+
+        val expectedRanges = Lists.<Pair<String, String>> newArrayList();
+        val segmentRange1 = Pair.newPair("2009-01-01", "2011-01-01");
+        val segmentRange2 = Pair.newPair("2011-01-01", "2013-01-01");
+        val expectedPartitions = Lists.<List<Long>> newArrayList();
+
+        val sql1 = "select\n" + "  count(*), cal_dt\n" + "from\n" + "  test_kylin_fact\n"
+                + "  left join test_order on test_kylin_fact.order_id = test_order.order_id\n" + "where\n"
+                + "  cal_dt between '2009-01-01'\n" + "  and '2012-01-01'\n" + "group by\n" + "  cal_dt\n"
+                + "order by\n" + "  cal_dt\n";
+        val sql2 = "select\n" + "  count(*), cal_dt\n" + "from\n" + "  test_kylin_fact\n"
+                + "  left join test_order on test_kylin_fact.order_id = test_order.order_id\n" + "where\n"
+                + "  cal_dt between '2009-01-01'\n"
+                + "  and '2012-01-01' and lstg_format_name in ('ABIN', 'FP-non GTC') \n" + "group by\n"
+                + "  cal_dt, lstg_format_name\n" + "order by\n" + "  cal_dt\n";
+
+        expectedRanges.add(segmentRange1);
+        expectedRanges.add(segmentRange2);
+        expectedPartitions.add(Lists.newArrayList(0L, 1L, 2L, 3L, 4L));
+        expectedPartitions.add(Lists.newArrayList(0L, 1L, 2L, 3L, 4L));
+        assertResultsAndScanFiles(dfName, sql1, 5, false, expectedRanges, expectedPartitions);
+
+        expectedPartitions.clear();
+        expectedPartitions.add(Lists.newArrayList(1L, 2L));
+        expectedPartitions.add(Lists.newArrayList(1L, 2L));
+        assertResultsAndScanFiles(dfName, sql2, 2, false, expectedRanges, expectedPartitions);
+
+        val queries = Lists.<Pair<String, String>> newArrayList();
+        queries.add(Pair.newPair("", sql1));
+        queries.add(Pair.newPair("", sql2));
+        ExecAndComp.execAndCompare(queries, getProject(), ExecAndComp.CompareLevel.SAME, "left");
+    }
+
+    private long assertResultsAndScanFiles(String modelId, String sql, long numScanFiles, boolean emptyLayout,
+            List<Pair<String, String>> expectedRanges, List<List<Long>> expectedPartitions) throws Exception {
+        val df = ExecAndComp.queryModelWithoutCompute(getProject(), sql);
+        val context = ContextUtil.listContexts().get(0);
+        if (emptyLayout) {
+            Assert.assertTrue(context.storageContext.isEmptyLayout());
+            Assert.assertEquals(Long.valueOf(-1), context.storageContext.getLayoutId());
+            return numScanFiles;
+        }
+        df.collect();
+        val actualNum = findFileSourceScanExec(df.queryExecution().executedPlan()).metrics().get("numFiles").get()
+                .value();
+        Assert.assertEquals(numScanFiles, actualNum);
+        val segmentIds = context.storageContext.getPrunedSegments();
+        val partitions = context.storageContext.getPrunedPartitions();
+        assertPrunedSegmentRange(modelId, segmentIds, partitions, expectedRanges, expectedPartitions);
+        return actualNum;
+    }
+
+    private KylinFileSourceScanExec findFileSourceScanExec(SparkPlan plan) {
+        return (KylinFileSourceScanExec) find(plan, new AbstractFunction1<SparkPlan, Object>() {
+            @Override
+            public Object apply(SparkPlan v1) {
+                return v1 instanceof KylinFileSourceScanExec;
+            }
+        }).get();
+    }
+
+    private void assertPrunedSegmentRange(String dfId, List<NDataSegment> prunedSegments,
+            Map<String, List<Long>> prunedPartitions, List<Pair<String, String>> expectedRanges,
+            List<List<Long>> expectedPartitions) {
+        val model = NDataModelManager.getInstance(getTestConfig(), getProject()).getDataModelDesc(dfId);
+        val partitionColDateFormat = model.getPartitionDesc().getPartitionDateFormat();
+
+        if (CollectionUtils.isEmpty(expectedRanges)) {
+            return;
+        }
+        Assert.assertEquals(expectedRanges.size(), prunedSegments.size());
+        Assert.assertEquals(expectedPartitions.size(), prunedSegments.size());
+        for (int i = 0; i < prunedSegments.size(); i++) {
+            val segment = prunedSegments.get(i);
+            val start = DateFormat.formatToDateStr(segment.getTSRange().getStart(), partitionColDateFormat);
+            val end = DateFormat.formatToDateStr(segment.getTSRange().getEnd(), partitionColDateFormat);
+            val expectedRange = expectedRanges.get(i);
+            Assert.assertEquals(expectedRange.getFirst(), start);
+            Assert.assertEquals(expectedRange.getSecond(), end);
+
+            val actualPartitions = prunedPartitions.get(segment.getId());
+            Assert.assertEquals(expectedPartitions.get(i), actualPartitions);
+        }
+    }
+}
diff --git a/src/kylin-it/src/test/java/org/apache/kylin/newten/NAggPushDownTest.java b/src/kylin-it/src/test/java/org/apache/kylin/newten/NAggPushDownTest.java
new file mode 100644
index 0000000000..cb6a6c428c
--- /dev/null
+++ b/src/kylin-it/src/test/java/org/apache/kylin/newten/NAggPushDownTest.java
@@ -0,0 +1,103 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.kylin.newten;
+
+import java.io.File;
+import java.util.List;
+
+import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.common.util.Pair;
+import org.apache.kylin.engine.spark.NLocalWithSparkSessionTest;
+import org.apache.kylin.job.engine.JobEngineConfig;
+import org.apache.kylin.job.impl.threadpool.NDefaultScheduler;
+import org.apache.kylin.util.ExecAndComp;
+import org.apache.spark.sql.SparderEnv;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.sun.tools.javac.util.Assert;
+
+public class NAggPushDownTest extends NLocalWithSparkSessionTest {
+    private static final Logger logger = LoggerFactory.getLogger(NAggPushDownTest.class);
+    private String sqlFolder = "sql_select_subquery";
+    private String joinType = "inner"; // only support inner join
+
+    @Before
+    public void setup() throws Exception {
+        overwriteSystemProp("kylin.job.scheduler.poll-interval-second", "1");
+        overwriteSystemProp("kylin.query.match-partial-inner-join-model", "true");
+        overwriteSystemProp("kylin.query.calcite.aggregate-pushdown-enabled", "true");
+        this.createTestMetadata("src/test/resources/ut_meta/agg_push_down");
+        NDefaultScheduler scheduler = NDefaultScheduler.getInstance(getProject());
+        scheduler.init(new JobEngineConfig(KylinConfig.getInstanceFromEnv()));
+        if (!scheduler.hasStarted()) {
+            throw new RuntimeException("scheduler has not been started");
+        }
+    }
+
+    @After
+    public void after() throws Exception {
+        NDefaultScheduler.destroyInstance();
+        cleanupTestMetadata();
+    }
+
+    @Override
+    public String getProject() {
+        return "subquery";
+    }
+
+    @Test
+    public void testBasic() throws Exception {
+        fullBuild("a749e414-c40e-45b7-92e4-bbfe63af705d");
+        KylinConfig config = KylinConfig.getInstanceFromEnv();
+        ExecAndComp.CompareLevel compareLevel = ExecAndComp.CompareLevel.SAME;
+        populateSSWithCSVData(config, getProject(), SparderEnv.getSparkSession());
+        String identity = "sqlFolder:" + sqlFolder + ", joinType:" + joinType + ", compareLevel:" + compareLevel;
+        try {
+            List<Pair<String, String>> queries = ExecAndComp
+                    .fetchQueries(KYLIN_SQL_BASE_DIR + File.separator + sqlFolder);
+            ExecAndComp.execAndCompare(queries, getProject(), compareLevel, joinType);
+        } catch (Throwable th) {
+            logger.error("Query fail on: {}", identity);
+            Assert.error();
+        }
+        logger.info("Query succeed on: {}", identity);
+    }
+
+    @Test
+    public void testAggPushDown() throws Exception {
+        fullBuild("ce2057da-54c8-4e05-b0bf-d225a6bbb62c");
+        KylinConfig config = KylinConfig.getInstanceFromEnv();
+        ExecAndComp.CompareLevel compareLevel = ExecAndComp.CompareLevel.SAME;
+        populateSSWithCSVData(config, getProject(), SparderEnv.getSparkSession());
+        String identity = "sqlFolder:" + "sql_agg_pushdown" + ", joinType:" + joinType + ", compareLevel:" + compareLevel;
+        try {
+            List<Pair<String, String>> queries = ExecAndComp
+                    .fetchQueries(KYLIN_SQL_BASE_DIR + File.separator + "sql_agg_pushdown");
+            ExecAndComp.execAndCompare(queries, getProject(), compareLevel, joinType);
+        } catch (Throwable th) {
+            logger.error("Query fail on: {}", identity);
+            Assert.error();
+        }
+        logger.info("Query succeed on: {}", identity);
+    }
+}
diff --git a/src/kylin-it/src/test/java/org/apache/kylin/newten/NBadQueryAndPushDownTest.java b/src/kylin-it/src/test/java/org/apache/kylin/newten/NBadQueryAndPushDownTest.java
new file mode 100644
index 0000000000..fc56ec0377
--- /dev/null
+++ b/src/kylin-it/src/test/java/org/apache/kylin/newten/NBadQueryAndPushDownTest.java
@@ -0,0 +1,269 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.kylin.newten;
+
+import java.io.File;
+import java.nio.charset.StandardCharsets;
+import java.nio.file.Files;
+import java.sql.SQLException;
+import java.util.List;
+
+import org.apache.calcite.sql.validate.SqlValidatorException;
+import org.apache.commons.lang3.exception.ExceptionUtils;
+import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.common.debug.BackdoorToggles;
+import org.apache.kylin.common.exception.KylinException;
+import org.apache.kylin.common.exception.QueryErrorCode;
+import org.apache.kylin.common.msg.MsgPicker;
+import org.apache.kylin.common.util.Pair;
+import org.apache.kylin.engine.spark.NLocalWithSparkSessionTest;
+import org.apache.kylin.job.impl.threadpool.NDefaultScheduler;
+import org.apache.kylin.metadata.querymeta.SelectedColumnMeta;
+import org.apache.kylin.metadata.realization.NoRealizationFoundException;
+import org.apache.kylin.query.util.KapQueryUtil;
+import org.apache.kylin.query.util.PushDownUtil;
+import org.apache.kylin.query.util.QueryParams;
+import org.apache.kylin.util.ExecAndComp;
+import org.apache.spark.sql.AnalysisException;
+import org.apache.spark.sql.SparderEnv;
+import org.junit.After;
+import org.junit.Assert;
+import org.junit.Test;
+
+import com.google.common.base.Throwables;
+
+import lombok.val;
+
+public class NBadQueryAndPushDownTest extends NLocalWithSparkSessionTest {
+    private static final String PUSHDOWN_RUNNER_KEY = "kylin.query.pushdown.runner-class-name";
+    private static final String PUSHDOWN_ENABLED = "kylin.query.pushdown-enabled";
+    private final static String PROJECT_NAME = "bad_query_test";
+    private final static String DEFAULT_PROJECT_NAME = "default";
+
+    @Override
+    public String getProject() {
+        return PROJECT_NAME;
+    }
+
+    @After
+    public void teardown() {
+        NDefaultScheduler.destroyInstance();
+        super.cleanupTestMetadata();
+    }
+
+    @Test
+    public void testTableNotFoundInDatabase() throws Exception {
+        //from tpch database
+        final String sql = "select * from lineitem where l_orderkey = o.o_orderkey and l_commitdate < l_receiptdate";
+        KylinConfig.getInstanceFromEnv().setProperty(PUSHDOWN_RUNNER_KEY,
+                "org.apache.kylin.query.pushdown.PushDownRunnerSparkImpl");
+        KylinConfig.getInstanceFromEnv().setProperty(PUSHDOWN_ENABLED, "true");
+        try {
+            ExecAndComp.queryModelWithoutCompute(getProject(), sql);
+        } catch (Exception sqlException) {
+            Assert.assertTrue(sqlException instanceof SQLException);
+            Assert.assertTrue(ExceptionUtils.getRootCause(sqlException) instanceof SqlValidatorException);
+        }
+    }
+
+    @Test
+    public void testPushdownCCWithFn() throws Exception {
+        final String sql = "select LSTG_FORMAT_NAME,sum(NEST4) from TEST_KYLIN_FACT where ( not { fn convert( \"LSTG_FORMAT_NAME\", SQL_WVARCHAR ) } = 'ABIN' or \"LSTG_FORMAT_NAME\" is null) group by LSTG_FORMAT_NAME limit 1";
+        KylinConfig.getInstanceFromEnv().setProperty(PUSHDOWN_RUNNER_KEY,
+                "org.apache.kylin.query.pushdown.PushDownRunnerSparkImpl");
+        KylinConfig.getInstanceFromEnv().setProperty(PUSHDOWN_ENABLED, "true");
+
+        // success
+        pushDownSql(DEFAULT_PROJECT_NAME, sql, 10, 0, null, true);
+
+        // failed for wrong pushdown converter order
+        overwriteSystemProp("kylin.query.pushdown.converter-class-names",
+                "org.apache.kylin.query.util.SparkSQLFunctionConverter,org.apache.kylin.query.util.PowerBIConverter,org.apache.kylin.query.util.RestoreFromComputedColumn,org.apache.kylin.query.security.RowFilter,org.apache.kylin.query.security.HackSelectStarWithColumnACL");
+        try {
+            pushDownSql(DEFAULT_PROJECT_NAME, sql, 10, 0, null, true);
+            Assert.fail();
+        } catch (Exception e) {
+            Assert.assertTrue(e instanceof AnalysisException);
+            Assert.assertTrue(e.getMessage().contains("cannot resolve 'NEST4' given input columns"));
+        }
+    }
+
+    @Test
+    public void testPushDownToNonExistentDB() throws Exception {
+        //from tpch database
+        try {
+            final String sql = "select * from lineitem where l_orderkey = o.o_orderkey and l_commitdate < l_receiptdate";
+            KylinConfig.getInstanceFromEnv().setProperty(PUSHDOWN_RUNNER_KEY,
+                    "org.apache.kylin.query.pushdown.PushDownRunnerSparkImpl");
+            KylinConfig.getInstanceFromEnv().setProperty(PUSHDOWN_ENABLED, "true");
+            pushDownSql(getProject(), sql, 0, 0,
+                    new SQLException(new NoRealizationFoundException("testPushDownToNonExistentDB")), true);
+        } catch (Exception e) {
+            Assert.assertTrue(ExceptionUtils.getRootCause(e) instanceof AnalysisException);
+            Assert.assertTrue(ExceptionUtils.getRootCauseMessage(e).contains("Table or view not found: LINEITEM"));
+        }
+    }
+
+    @Test
+    public void testPushDownForFileNotExist() throws Exception {
+        final String sql = "select max(price) from test_kylin_fact";
+        KylinConfig.getInstanceFromEnv().setProperty(PUSHDOWN_RUNNER_KEY,
+                "org.apache.kylin.query.pushdown.PushDownRunnerSparkImpl");
+        try {
+            ExecAndComp.queryModelWithoutCompute(getProject(), sql);
+        } catch (Exception sqlException) {
+            if (sqlException instanceof SQLException) {
+                Assert.assertTrue(ExceptionUtils.getRootCauseMessage(sqlException).contains("Path does not exist"));
+                pushDownSql(getProject(), sql, 0, 0, (SQLException) sqlException);
+            }
+        }
+    }
+
+    @Test
+    public void testPushDownWithSemicolonQuery() throws Exception {
+        final String sql = "select 1 from test_kylin_fact;";
+        KylinConfig.getInstanceFromEnv().setProperty(PUSHDOWN_RUNNER_KEY,
+                "org.apache.kylin.query.pushdown.PushDownRunnerSparkImpl");
+        KylinConfig.getInstanceFromEnv().setProperty(PUSHDOWN_ENABLED, "true");
+        pushDownSql(getProject(), sql, 10, 0,
+                new SQLException(new NoRealizationFoundException("test for semicolon query push down")), true);
+        try {
+            pushDownSql(getProject(), sql, 10, 1,
+                    new SQLException(new NoRealizationFoundException("test for semicolon query push down")), true);
+        } catch (Exception sqlException) {
+            Assert.assertTrue(ExceptionUtils.getRootCauseMessage(sqlException).contains("input 'OFFSET'"));
+        }
+    }
+
+    @Test
+    public void testPushDownNonEquiSql() throws Exception {
+        KylinConfig.getInstanceFromEnv().setProperty("kylin.query.pushdown.converter-class-names",
+                "org.apache.kylin.query.util.RestoreFromComputedColumn,org.apache.kylin.query.util.SparkSQLFunctionConverter");
+        File sqlFile = new File("src/test/resources/query/sql_pushdown/query11.sql");
+        String sql = new String(Files.readAllBytes(sqlFile.toPath()), StandardCharsets.UTF_8);
+        KylinConfig.getInstanceFromEnv().setProperty(PUSHDOWN_ENABLED, "false");
+        try {
+            ExecAndComp.queryModelWithoutCompute(DEFAULT_PROJECT_NAME, sql);
+        } catch (Exception e) {
+            KylinConfig.getInstanceFromEnv().setProperty(PUSHDOWN_RUNNER_KEY,
+                    "org.apache.kylin.query.pushdown.PushDownRunnerSparkImpl");
+            KylinConfig.getInstanceFromEnv().setProperty(PUSHDOWN_ENABLED, "true");
+            pushDownSql(DEFAULT_PROJECT_NAME, sql, 0, 0, (SQLException) e);
+        }
+    }
+
+    @Test
+    public void testPushDownUdf() throws Exception {
+        KylinConfig.getInstanceFromEnv().setProperty(PUSHDOWN_RUNNER_KEY,
+                "org.apache.kylin.query.pushdown.PushDownRunnerSparkImpl");
+        KylinConfig.getInstanceFromEnv().setProperty(PUSHDOWN_ENABLED, "true");
+        KylinConfig.getInstanceFromEnv().setProperty("kylin.query.pushdown.converter-class-names",
+                "org.apache.kylin.query.util.RestoreFromComputedColumn,org.apache.kylin.query.util.SparkSQLFunctionConverter");
+
+        String prjName = "tdvt";
+        // timstampDiff
+        String sql = "SELECT {fn TIMESTAMPDIFF(SQL_TSI_DAY,{d '1900-01-01'},\"CALCS\".\"DATE0\")} AS \"TEMP_Test__2048215813__0_\"\n"
+                + "FROM \"CALCS\" \"CALCS\"\n"
+                + "GROUP BY {fn TIMESTAMPDIFF(SQL_TSI_DAY,{d '1900-01-01'},\"CALCS\".\"DATE0\")}";
+        val result = pushDownSql(prjName, sql, 0, 0,
+                new SQLException(new NoRealizationFoundException("test for  query push down")), true);
+        Assert.assertNotNull(result);
+
+        //timestampAdd
+        sql = "  SELECT {fn TIMESTAMPADD(SQL_TSI_DAY,1,\"CALCS\".\"DATE2\")} AS \"TEMP_Test__3825428522__0_\"\n"
+                + "FROM \"CALCS\" \"CALCS\"\n" + "GROUP BY {fn TIMESTAMPADD(SQL_TSI_DAY,1,\"CALCS\".\"DATE2\")}";
+        val result1 = pushDownSql(prjName, sql, 0, 0,
+                new SQLException(new NoRealizationFoundException("test for  query push down")), true);
+        Assert.assertNotNull(result1);
+
+        // TRUNCATE
+        sql = "SELECT {fn CONVERT({fn TRUNCATE(\"CALCS\".\"NUM4\",0)}, SQL_BIGINT)} AS \"TEMP_Test__4269159351__0_\"\n"
+                + "FROM \"CALCS\" \"CALCS\"\n"
+                + "GROUP BY {fn CONVERT({fn TRUNCATE(\"CALCS\".\"NUM4\",0)}, SQL_BIGINT)}";
+        val result2 = pushDownSql(prjName, sql, 0, 0,
+                new SQLException(new NoRealizationFoundException("test for  query push down")), true);
+        Assert.assertNotNull(result2);
+    }
+
+    @Test
+    public void testPushDownForced() throws Exception {
+        //test for KE-14218
+
+        KylinConfig.getInstanceFromEnv().setProperty(PUSHDOWN_RUNNER_KEY,
+                "org.apache.kylin.query.pushdown.PushDownRunnerSparkImpl");
+        KylinConfig.getInstanceFromEnv().setProperty(PUSHDOWN_ENABLED, "true");
+        KylinConfig.getInstanceFromEnv().setProperty("kylin.query.pushdown.converter-class-names",
+                "org.apache.kylin.query.util.RestoreFromComputedColumn,org.apache.kylin.query.util.SparkSQLFunctionConverter");
+
+        String prjName = "tdvt";
+        // timstampDiff
+        String sql = "SELECT {fn TIMESTAMPDIFF(SQL_TSI_DAY,{d '1900-01-01'},\"CALCS\".\"DATE0\")} AS \"TEMP_Test__2048215813__0_\"\n"
+                + "FROM \"CALCS\" \"CALCS\"\n"
+                + "GROUP BY {fn TIMESTAMPDIFF(SQL_TSI_DAY,{d '1900-01-01'},\"CALCS\".\"DATE0\")}";
+        val resultForced = pushDownSql(prjName, sql, 0, 0, null, true);
+        Assert.assertNotNull(resultForced);
+
+        //test for error when  execption is null and is not forced
+        try {
+            pushDownSql(prjName, sql, 0, 0, null, false);
+            Assert.fail();
+        } catch (Exception e) {
+            Throwable rootCause = Throwables.getRootCause(e);
+            Assert.assertTrue(rootCause instanceof IllegalArgumentException);
+        }
+
+        //test for error when  pushdown turn off, and force to push down
+        try {
+            KylinConfig.getInstanceFromEnv().setProperty(PUSHDOWN_ENABLED, "false");
+            Assert.assertFalse(KylinConfig.getInstanceFromEnv().isPushDownEnabled());
+            pushDownSql(prjName, sql, 0, 0, null, true);
+            Assert.fail();
+        } catch (Exception e) {
+            Assert.assertTrue(e instanceof KylinException);
+            Assert.assertEquals(((KylinException) e).getErrorCode(),
+                    QueryErrorCode.INVALID_PARAMETER_PUSH_DOWN.toErrorCode());
+            Assert.assertEquals(MsgPicker.getMsg().getDisablePushDownPrompt(), Throwables.getRootCause(e).getMessage());
+        } finally {
+            KylinConfig.getInstanceFromEnv().setProperty(PUSHDOWN_ENABLED, "true");
+        }
+    }
+
+    private Pair<List<List<String>>, List<SelectedColumnMeta>> pushDownSql(String prjName, String sql, int limit,
+            int offset, SQLException sqlException) throws Exception {
+        return pushDownSql(prjName, sql, limit, offset, sqlException, false);
+    }
+
+    private Pair<List<List<String>>, List<SelectedColumnMeta>> pushDownSql(String prjName, String sql, int limit,
+            int offset, SQLException sqlException, boolean isForced) throws Exception {
+        populateSSWithCSVData(KylinConfig.getInstanceFromEnv(), prjName, SparderEnv.getSparkSession());
+        String pushdownSql = ExecAndComp.removeDataBaseInSql(sql);
+        String massagedSql = KapQueryUtil.normalMassageSql(KylinConfig.getInstanceFromEnv(), pushdownSql, limit,
+                offset);
+        QueryParams queryParams = new QueryParams(prjName, massagedSql, "DEFAULT", BackdoorToggles.getPrepareOnly(),
+                sqlException, isForced);
+        queryParams.setSelect(true);
+        queryParams.setLimit(limit);
+        queryParams.setOffset(offset);
+        Pair<List<List<String>>, List<SelectedColumnMeta>> result = PushDownUtil.tryPushDownQuery(queryParams);
+        if (result == null) {
+            throw sqlException;
+        }
+        return result;
+    }
+}
diff --git a/src/kylin-it/src/test/java/org/apache/kylin/newten/NBitmapFunctionForCalciteExecTest.java b/src/kylin-it/src/test/java/org/apache/kylin/newten/NBitmapFunctionForCalciteExecTest.java
new file mode 100644
index 0000000000..ea2e21146c
--- /dev/null
+++ b/src/kylin-it/src/test/java/org/apache/kylin/newten/NBitmapFunctionForCalciteExecTest.java
@@ -0,0 +1,94 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+package org.apache.kylin.newten;
+
+import java.io.File;
+import java.util.List;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.common.util.Unsafe;
+import org.apache.kylin.engine.spark.NLocalWithSparkSessionTest;
+import org.apache.kylin.job.engine.JobEngineConfig;
+import org.apache.kylin.job.impl.threadpool.NDefaultScheduler;
+import org.apache.kylin.query.engine.QueryRoutingEngine;
+import org.apache.kylin.query.engine.data.QueryResult;
+import org.apache.kylin.query.util.QueryParams;
+import org.junit.After;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+import org.mockito.Mock;
+import org.mockito.Mockito;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class NBitmapFunctionForCalciteExecTest extends NLocalWithSparkSessionTest {
+
+    private Logger logger = LoggerFactory.getLogger(NBitmapFunctionForCalciteExecTest.class);
+
+    @Mock
+    private QueryRoutingEngine queryRoutingEngine = Mockito.spy(QueryRoutingEngine.class);
+
+    @Before
+    public void setup() {
+        overwriteSystemProp("kylin.job.scheduler.poll-interval-second", "1");
+        NDefaultScheduler scheduler = NDefaultScheduler.getInstance(getProject());
+        scheduler.init(new JobEngineConfig(KylinConfig.getInstanceFromEnv()));
+        if (!scheduler.hasStarted()) {
+            throw new RuntimeException("scheduler has not been started");
+        }
+        populateSSWithCSVData(getTestConfig(), getProject(), ss);
+        Unsafe.setProperty("kylin.query.engine.run-constant-query-locally", "true");
+    }
+
+    @After
+    public void after() throws Exception {
+        NDefaultScheduler.destroyInstance();
+        cleanupTestMetadata();
+        FileUtils.deleteQuietly(new File("../kap-it/metastore_db"));
+        Unsafe.clearProperty("kylin.query.engine.run-constant-query-locally");
+    }
+
+    @Override
+    public String getProject() {
+        return "intersect_count";
+    }
+
+    @Test
+    public void testIntersectCountForFalseFilter() throws Exception {
+        logger.info("comming....");
+        String query = "select "
+                + "intersect_count_v2(TEST_COUNT_DISTINCT_BITMAP, LSTG_FORMAT_NAME, array['FP-.*GTC', 'Others'], 'REGEXP') as b, "
+                + "intersect_count_v2(TEST_COUNT_DISTINCT_BITMAP, LSTG_FORMAT_NAME, array['FP-GTC|FP-non GTC', 'Others'], 'RAWSTRING') as c "
+                + "from test_kylin_fact where 1=2";
+        QueryParams queryParams = new QueryParams();
+        queryParams.setProject(getProject());
+        queryParams.setSql(query);
+        queryParams.setKylinConfig(getTestConfig());
+        queryParams.setSelect(true);
+        logger.info("comming....2222 queryRoutingEngine:" + queryRoutingEngine);
+        QueryResult result = queryRoutingEngine.queryWithSqlMassage(queryParams);
+        List<String> rows = result.getRows().get(0);
+        Assert.assertEquals("null", rows.get(0));
+        Assert.assertEquals("null", rows.get(1));
+    }
+
+}
diff --git a/src/kylin-it/src/test/java/org/apache/kylin/newten/NBitmapFunctionTest.java b/src/kylin-it/src/test/java/org/apache/kylin/newten/NBitmapFunctionTest.java
new file mode 100644
index 0000000000..cbc886a288
--- /dev/null
+++ b/src/kylin-it/src/test/java/org/apache/kylin/newten/NBitmapFunctionTest.java
@@ -0,0 +1,369 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+package org.apache.kylin.newten;
+
+import java.io.File;
+import java.sql.SQLException;
+import java.util.List;
+import java.util.stream.Collectors;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.engine.spark.NLocalWithSparkSessionTest;
+import org.apache.kylin.job.engine.JobEngineConfig;
+import org.apache.kylin.job.impl.threadpool.NDefaultScheduler;
+import org.apache.kylin.util.ExecAndComp;
+import org.apache.spark.sql.Column;
+import org.apache.spark.sql.Dataset;
+import org.apache.spark.sql.Row;
+import org.junit.After;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+
+public class NBitmapFunctionTest extends NLocalWithSparkSessionTest {
+
+    @Before
+    public void setup() {
+        overwriteSystemProp("kylin.job.scheduler.poll-interval-second", "1");
+        NDefaultScheduler scheduler = NDefaultScheduler.getInstance(getProject());
+        scheduler.init(new JobEngineConfig(KylinConfig.getInstanceFromEnv()));
+        if (!scheduler.hasStarted()) {
+            throw new RuntimeException("scheduler has not been started");
+        }
+        populateSSWithCSVData(getTestConfig(), getProject(), ss);
+    }
+
+    @After
+    public void after() throws Exception {
+        NDefaultScheduler.destroyInstance();
+        cleanupTestMetadata();
+        FileUtils.deleteQuietly(new File("../kylin-it/metastore_db"));
+    }
+
+    @Override
+    public String getProject() {
+        return "intersect_count";
+    }
+
+    @Test
+    public void testBitmapFunction() throws Exception {
+        fullBuild("741ca86a-1f13-46da-a59f-95fb68615e3b");
+        fullBuild("741ca86a-1f13-46da-a59f-95fb68615e3z");
+
+        testDateType();
+
+        testMultiMeasures();
+
+        testCommomCase1();
+
+        testCommomCase2();
+
+        testWithUnion();
+
+        testWithLimit();
+
+        testIntersectCountByCol();
+
+        testIntersectCountByColMultiRows();
+
+        testIntersectCount();
+
+        testIntersectValue();
+
+        testExplodeIntersectValue();
+
+        testHllcCanNotAnswerBitmapUUID();
+
+        testSubtractBitmapValue();
+
+        testSubtractBitmapUUID();
+
+        testBitmapBuild();
+    }
+
+    private void testBitmapBuild() throws SQLException {
+        List<String> result;
+
+        //================= constant case
+        String query1 = "select bitmap_build(1)";
+        result = ExecAndComp.queryModel(getProject(), query1).collectAsList().stream()
+                .map(row -> row.toSeq().mkString(",")).collect(Collectors.toList());
+        Assert.assertEquals("AAAAAAEAAAAAOjAAAAEAAAAAAAAAEAAAAAEA", result.get(0));
+
+        //================= normal case
+        String query2 = "select CAL_DT, "
+                + "bitmap_build(TEST_COUNT_DISTINCT_BITMAP) as first_day "
+                + "from test_kylin_fact " + "where CAL_DT in (date'2012-01-01',date'2012-01-02',date'2012-01-03') "
+                + "group by CAL_DT " + "order by CAL_DT ";
+        result = ExecAndComp.queryModel(getProject(), query2).collectAsList().stream()
+                .map(row -> row.toSeq().mkString(",")).collect(Collectors.toList());
+        Assert.assertEquals("2012-01-01,AAAAAAEAAAAAOzAAAAEAAA0AAQABAA0A", result.get(0));
+        Assert.assertEquals("2012-01-02,AAAAAAEAAAAAOzAAAAEAAAkAAgAFAAAADwAIAA==", result.get(1));
+        Assert.assertEquals("2012-01-03,AAAAAAEAAAAAOjAAAAEAAAAAAAQAEAAAABMAGAAZABoAGwA=", result.get(2));
+
+        //================= pushdown case
+        String query3 = "select CAL_DT, "
+                + "bitmap_build(LEAF_CATEG_ID)"
+                + "from test_kylin_fact " + "where CAL_DT in (date'2012-01-01',date'2012-01-02',date'2012-01-03') "
+                + "group by CAL_DT " + "order by CAL_DT";
+        result = ExecAndComp.querySparkSql(query3).collectAsList().stream()
+                .map(row -> row.toSeq().mkString(",")).collect(Collectors.toList());
+        Assert.assertEquals("2012-01-01,AAAAAAEAAAAAOjAAAAMAAAAAAAUAAQABAAIAAwAgAAAALAAAADAAAADDA0UFIi0FUPKK4/LFc7h1yiVkQ05shq4=", result.get(0));
+        Assert.assertEquals("2012-01-02,AAAAAAEAAAAAOjAAAAIAAAAAAAYAAQACABgAAAAmAAAATQVKJ31ABVDdX3uckfmRJ7h1CpM=", result.get(1));
+        Assert.assertEquals("2012-01-03,AAAAAAEAAAAAOjAAAAMAAAAAAAEAAQAAAAIAAQAgAAAAJAAAACYAAADSJIFRkSdaXuWn", result.get(2));
+    }
+
+    private void testDateType() throws SQLException {
+        String query = "select CAL_DT, "
+                + "intersect_count(TEST_COUNT_DISTINCT_BITMAP, CAL_DT, array[date'2012-01-01']) as first_day, "
+                + "intersect_count(TEST_COUNT_DISTINCT_BITMAP, CAL_DT, array[date'2012-01-02']) as second_day, "
+                + "intersect_count(TEST_COUNT_DISTINCT_BITMAP, CAL_DT, array[date'2012-01-03']) as third_day, "
+                + "intersect_count(TEST_COUNT_DISTINCT_BITMAP, CAL_DT, array[date'2012-01-01',date'2012-01-02']) as retention_oneday, "
+                + "intersect_count(TEST_COUNT_DISTINCT_BITMAP, CAL_DT, array[date'2012-01-01',date'2012-01-02',date'2012-01-03']) as retention_twoday "
+                + "from test_kylin_fact " + "where CAL_DT in (date'2012-01-01',date'2012-01-02',date'2012-01-03') "
+                + "group by CAL_DT " + "order by CAL_DT ";
+        List<String> result = ExecAndComp.queryModel(getProject(), query).collectAsList().stream()
+                .map(row -> row.toSeq().mkString(",")).collect(Collectors.toList());
+        Assert.assertEquals("2012-01-01,14,0,0,0,0", result.get(0));
+        Assert.assertEquals("2012-01-02,0,10,0,0,0", result.get(1));
+        Assert.assertEquals("2012-01-03,0,0,5,0,0", result.get(2));
+    }
+
+    private void testMultiMeasures() throws SQLException {
+        String query = "select week_beg_dt as week, "
+                + "intersect_count( TEST_COUNT_DISTINCT_BITMAP, lstg_format_name, array['FP-GTC']) as a, "
+                + "intersect_count( TEST_COUNT_DISTINCT_BITMAP, lstg_format_name, array['Auction']) as b, "
+                + "intersect_count( TEST_COUNT_DISTINCT_BITMAP, lstg_format_name, array['Others']) as c, "
+                + "intersect_count( TEST_COUNT_DISTINCT_BITMAP, lstg_format_name, array['FP-GTC', 'Auction']) as ab, "
+                + "intersect_count( TEST_COUNT_DISTINCT_BITMAP, lstg_format_name, array['FP-GTC', 'Others']) as ac, "
+                + "intersect_count( TEST_COUNT_DISTINCT_BITMAP, lstg_format_name, array['FP-GTC', 'Auction', 'Others']) as abc, "
+                + "count(distinct TEST_COUNT_DISTINCT_BITMAP) as sellers, count(*) as cnt "
+                + "from test_kylin_fact left join edw.test_cal_dt on test_kylin_fact.cal_dt = edw.test_cal_dt.CAL_DT "
+                + "where week_beg_dt in (DATE '2013-12-22', DATE '2012-06-23') group by week_beg_dt order by week_beg_dt";
+        List<String> result = ExecAndComp.queryModel(getProject(), query).collectAsList().stream()
+                .map(row -> row.toSeq().mkString(",")).collect(Collectors.toList());
+        Assert.assertEquals("2012-06-23,21,17,13,0,0,0,90,94", result.get(0));
+        Assert.assertEquals("2013-12-22,18,22,13,0,0,0,98,99", result.get(1));
+    }
+
+    private void testCommomCase1() throws SQLException {
+        String query = "select LSTG_FORMAT_NAME, "
+                + "intersect_count(TEST_COUNT_DISTINCT_BITMAP, CAL_DT, array[date'2012-01-01']) as first_day, "
+                + "intersect_count(TEST_COUNT_DISTINCT_BITMAP, CAL_DT, array[date'2012-01-02']) as second_day, "
+                + "intersect_count(TEST_COUNT_DISTINCT_BITMAP, CAL_DT, array[date'2012-01-03']) as third_day, "
+                + "intersect_count(TEST_COUNT_DISTINCT_BITMAP, CAL_DT, array[date'2012-01-01',date'2012-01-02']) as retention_oneday, "
+                + "intersect_count(TEST_COUNT_DISTINCT_BITMAP, CAL_DT, array[date'2012-01-01',date'2012-01-02',date'2012-01-03']) as retention_twoday "
+                + "from test_kylin_fact where CAL_DT in (date'2012-01-01',date'2012-01-02',date'2012-01-03') "
+                + "group by LSTG_FORMAT_NAME order by LSTG_FORMAT_NAME";
+        List<String> result = ExecAndComp.queryModel(getProject(), query).collectAsList().stream()
+                .map(row -> row.toSeq().mkString(",")).collect(Collectors.toList());
+        Assert.assertEquals("ABIN,6,4,2,0,0", result.get(0));
+        Assert.assertEquals("Auction,4,3,1,0,0", result.get(1));
+        Assert.assertEquals("FP-GTC,2,2,0,0,0", result.get(2));
+        Assert.assertEquals("FP-non GTC,2,1,0,0,0", result.get(3));
+        Assert.assertEquals("Others,0,0,2,0,0", result.get(4));
+    }
+
+    private void testCommomCase2() throws SQLException {
+        String query4 = "select LEAF_CATEG_ID, "
+                + "intersect_count(TEST_COUNT_DISTINCT_BITMAP, CAL_DT, array[date'2012-01-01']) as first_day "
+                + "from test_kylin_fact where CAL_DT in (date'2012-01-01',date'2012-01-02',date'2012-01-03') "
+                + "group by LEAF_CATEG_ID " + "order by LEAF_CATEG_ID";
+        List<String> result = ExecAndComp.queryModel(getProject(), query4).collectAsList().stream()
+                .map(row -> row.toSeq().mkString(",")).collect(Collectors.toList());
+        Assert.assertEquals("963,1", result.get(0));
+        Assert.assertEquals("1349,1", result.get(1));
+        Assert.assertEquals("11554,1", result.get(2));
+        Assert.assertEquals("20485,1", result.get(3));
+        Assert.assertEquals("35570,1", result.get(4));
+        Assert.assertEquals("62179,2", result.get(5));
+        Assert.assertEquals("95173,1", result.get(6));
+        Assert.assertEquals("95672,2", result.get(7));
+        Assert.assertEquals("140746,1", result.get(8));
+        Assert.assertEquals("148324,1", result.get(9));
+        Assert.assertEquals("158798,1", result.get(10));
+        Assert.assertEquals("175750,1", result.get(11));
+    }
+
+    private void testWithUnion() throws SQLException {
+        String query = "SELECT (SELECT '2012-01-01') AS sdate, "
+                + "       intersect_count(TEST_COUNT_DISTINCT_BITMAP, cal_dt, array[date'2012-01-01',date'2012-01-01']),"
+                + "       intersect_count(TEST_COUNT_DISTINCT_BITMAP, cal_dt, array[date'2012-01-01',date'2012-01-02']),"
+                + "       intersect_count(TEST_COUNT_DISTINCT_BITMAP, cal_dt, array[date'2012-01-01',date'2012-01-03'])"
+                + "FROM   test_kylin_fact WHERE cal_dt >= date '2012-01-01' AND cal_dt <  date'2012-01-07' "
+                + "UNION ALL " + "SELECT (SELECT '2012-01-02') AS sdate, "
+                + "       intersect_count(TEST_COUNT_DISTINCT_BITMAP, cal_dt, array[date'2012-01-02',date'2012-01-02']),"
+                + "       intersect_count(TEST_COUNT_DISTINCT_BITMAP, cal_dt, array[date'2012-01-02',date'2012-01-03']),"
+                + "       intersect_count(TEST_COUNT_DISTINCT_BITMAP, cal_dt, array[date'2012-01-02',date'2012-01-04'])"
+                + "FROM   test_kylin_fact WHERE  cal_dt >= date '2012-01-02' AND cal_dt < date'2012-01-07'"
+                + "order by sdate";
+        List<String> result = ExecAndComp.queryModel(getProject(), query).collectAsList().stream()
+                .map(row -> row.toSeq().mkString(",")).collect(Collectors.toList());
+        Assert.assertEquals("2012-01-01,14,1,0", result.get(0));
+        Assert.assertEquals("2012-01-02,10,1,0", result.get(1));
+    }
+
+    private void testWithLimit() throws SQLException {
+        String query = "select intersect_count(TEST_COUNT_DISTINCT_BITMAP, CAL_DT, array[date'2012-01-01']) as first_day "
+                + "from test_kylin_fact " + "limit 1";
+        List<String> result = ExecAndComp.queryModel(getProject(), query).collectAsList().stream()
+                .map(row -> row.toSeq().mkString(",")).collect(Collectors.toList());
+        Assert.assertEquals("14", result.get(0));
+    }
+
+    private void testIntersectCountByColMultiRows() throws SQLException {
+        String query1 = "select intersect_count_by_col(Array[t1.a1]), LSTG_FORMAT_NAME from "
+                + "    (select bitmap_uuid(SELLER_ID) as a1, LSTG_FORMAT_NAME "
+                + "        from TEST_KYLIN_FACT group by LSTG_FORMAT_NAME) t1 order by LSTG_FORMAT_NAME";
+
+        List<String> result1 = ExecAndComp.queryModel(getProject(), query1).collectAsList().stream()
+                .map(row -> row.toSeq().mkString(",")).collect(Collectors.toList());
+        Assert.assertEquals("855,ABIN", result1.get(0));
+        Assert.assertEquals("896,Auction", result1.get(1));
+        Assert.assertEquals("858,FP-GTC", result1.get(2));
+        Assert.assertEquals("870,FP-non GTC", result1.get(3));
+        Assert.assertEquals("855,Others", result1.get(4));
+    }
+
+    private void testIntersectCountByCol() throws Exception {
+        String query1 = "select intersect_count_by_col(Array[t1.a1,t2.a2]) from "
+                + "    (select bitmap_uuid(SELLER_ID) as a1 " + "        from TEST_KYLIN_FACT) t1, "
+                + "    (select intersect_bitmap_uuid( " + "        SELLER_ID, LSTG_FORMAT_NAME, "
+                + "        array['FP-GTC|FP-non GTC', 'Others']) as a2 " + "from TEST_KYLIN_FACT) t2 " + "union all "
+                + "select intersect_count_by_col(Array[t1.a1,t2.a2]) from "
+                + "    (select bitmap_uuid(SELLER_ID) as a1 " + "        from TEST_KYLIN_FACT) t1, "
+                + "    (select intersect_bitmap_uuid_v2( " + "        SELLER_ID, LSTG_FORMAT_NAME, "
+                + "        array['FP-.*GTC', 'Others'], 'REGEXP') as a2 " + "from TEST_KYLIN_FACT) t2 " + "union all "
+                + "select intersect_count_by_col(Array[t1.a1,t2.a2]) from "
+                + "    (select bitmap_uuid(SELLER_ID) as a1 " + "        from TEST_KYLIN_FACT) t1, "
+                + "    (select intersect_bitmap_uuid_v2( " + "        SELLER_ID, LSTG_FORMAT_NAME, "
+                + "        array['FP-GTC|FP-non GTC', 'Others'], 'RAWSTRING') as a2 " + "from TEST_KYLIN_FACT) t2";
+
+        List<String> result1 = ExecAndComp.queryModel(getProject(), query1).collectAsList().stream()
+                .map(row -> row.toSeq().mkString(",")).collect(Collectors.toList());
+        Assert.assertEquals("841", result1.get(0));
+        Assert.assertEquals("841", result1.get(1));
+        Assert.assertEquals("841", result1.get(2));
+
+        String query2 = "select intersect_count_by_col(Array[t1.a1,t2.a2]) from "
+                + "    (select bitmap_uuid(TEST_COUNT_DISTINCT_BITMAP) as a1 " + "        from TEST_KYLIN_FACT) t1, "
+                + "    (select intersect_bitmap_uuid( " + "        TEST_COUNT_DISTINCT_BITMAP, LSTG_FORMAT_NAME, "
+                + "        array['FP-GTC|FP-non GTC', 'Others']) as a2 " + "from TEST_KYLIN_FACT) t2 " + "union all "
+                + "select intersect_count_by_col(Array[t1.a1,t2.a2]) from "
+                + "    (select bitmap_uuid(TEST_COUNT_DISTINCT_BITMAP) as a1 " + "        from TEST_KYLIN_FACT) t1, "
+                + "    (select intersect_bitmap_uuid_v2( " + "        TEST_COUNT_DISTINCT_BITMAP, LSTG_FORMAT_NAME, "
+                + "        array['FP-.*GTC', 'Others'], 'REGEXP') as a2 " + "from TEST_KYLIN_FACT) t2 " + "union all "
+                + "select intersect_count_by_col(Array[t1.a1,t2.a2]) from "
+                + "    (select bitmap_uuid(TEST_COUNT_DISTINCT_BITMAP) as a1 " + "        from TEST_KYLIN_FACT) t1, "
+                + "    (select intersect_bitmap_uuid_v2( " + "        TEST_COUNT_DISTINCT_BITMAP, LSTG_FORMAT_NAME, "
+                + "        array['FP-GTC|FP-non GTC', 'Others'], 'RAWSTRING') as a2 " + "from TEST_KYLIN_FACT) t2";
+        List<String> result2 = ExecAndComp.queryModel(getProject(), query2).collectAsList().stream()
+                .map(row -> row.toSeq().mkString(",")).collect(Collectors.toList());
+        Assert.assertEquals("862", result2.get(0));
+        Assert.assertEquals("862", result2.get(1));
+        Assert.assertEquals("862", result2.get(2));
+    }
+
+    private void testIntersectCount() throws SQLException {
+        String query = "select "
+                + "intersect_count(TEST_COUNT_DISTINCT_BITMAP, lstg_format_name, array['FP-GTC|FP-non GTC', 'Others']) as a, "
+                + "intersect_count_v2(TEST_COUNT_DISTINCT_BITMAP, LSTG_FORMAT_NAME, array['FP-.*GTC', 'Others'], 'REGEXP') as b, "
+                + "intersect_count_v2(TEST_COUNT_DISTINCT_BITMAP, LSTG_FORMAT_NAME, array['FP-GTC|FP-non GTC', 'Others'], 'RAWSTRING') as c "
+                + "from test_kylin_fact";
+        List<String> result = ExecAndComp.queryModel(getProject(), query).collectAsList().stream()
+                .map(row -> row.toSeq().mkString(",")).collect(Collectors.toList());
+        Assert.assertEquals("862,862,862", result.get(0));
+    }
+
+    private void testIntersectValue() throws SQLException {
+        String query = "select "
+                + "intersect_value(LSTG_SITE_ID, lstg_format_name, array['FP-GTC|FP-non GTC', 'Others']) as a, "
+                + "intersect_value_v2(LSTG_SITE_ID, LSTG_FORMAT_NAME, array['FP-.*GTC', 'Others'], 'REGEXP') as b, "
+                + "intersect_value_v2(LSTG_SITE_ID, LSTG_FORMAT_NAME, array['FP-GTC|FP-non GTC', 'Others'], 'RAWSTRING') as c "
+                + "from test_kylin_fact ";
+        List<String> result = ExecAndComp.queryModel(getProject(), query).collectAsList().stream()
+                .map(row -> row.toSeq().mkString(",")).collect(Collectors.toList());
+        Assert.assertEquals("WrappedArray(0, 2, 3, 15, 23, 100, 101, 211),"
+                + "WrappedArray(0, 2, 3, 15, 23, 100, 101, 211)," + "WrappedArray(0, 2, 3, 15, 23, 100, 101, 211)",
+                result.get(0));
+    }
+
+    private void testExplodeIntersectValue() throws SQLException {
+        String query = "select "
+                + "explode(intersect_value(LSTG_SITE_ID, lstg_format_name, array['FP-GTC|FP-non GTC', 'Others'])) as a "
+                + "from test_kylin_fact ";
+        List<String> result = ExecAndComp.queryModel(getProject(), query).collectAsList().stream()
+                .map(row -> row.toSeq().mkString(",")).collect(Collectors.toList());
+        Assert.assertEquals("0", result.get(0));
+        Assert.assertEquals("2", result.get(1));
+        Assert.assertEquals("3", result.get(2));
+        Assert.assertEquals("15", result.get(3));
+        Assert.assertEquals("23", result.get(4));
+        Assert.assertEquals("100", result.get(5));
+        Assert.assertEquals("101", result.get(6));
+        Assert.assertEquals("211", result.get(7));
+    }
+
+    private void testHllcCanNotAnswerBitmapUUID() throws SQLException {
+        String query = "select intersect_count_by_col(Array[t1.a1]), LSTG_FORMAT_NAME from"
+                + " (select bitmap_uuid(SELLER_ID) as a1, LSTG_FORMAT_NAME from TEST_KYLIN_FACT group by LSTG_FORMAT_NAME) t1"
+                + " order by LSTG_FORMAT_NAME";
+        List<String> result = ExecAndComp.queryModel(getProject(), query).collectAsList().stream()
+                .map(row -> row.toSeq().mkString(",")).collect(Collectors.toList());
+        Assert.assertEquals("855,ABIN", result.get(0));
+        Assert.assertEquals("896,Auction", result.get(1));
+        Assert.assertEquals("858,FP-GTC", result.get(2));
+        Assert.assertEquals("870,FP-non GTC", result.get(3));
+        Assert.assertEquals("855,Others", result.get(4));
+    }
+
+    private void testSubtractBitmapValue() throws SQLException {
+        String query = "select subtract_bitmap_value("
+                + "intersect_bitmap_uuid_v2(SELLER_ID, LSTG_FORMAT_NAME, array['FP-GTC|FP-non GTC', 'Others'], 'RAWSTRING'),"
+                + "intersect_bitmap_uuid_v2(SELLER_ID, LSTG_FORMAT_NAME, array['ABIN', 'Auction'], 'RAWSTRING'))"
+                + "from TEST_KYLIN_FACT";
+        List<Integer> acutal = ExecAndComp.queryModel(getProject(), query).collectAsList().get(0).getList(0).stream()
+                .map(row -> Integer.parseInt(row.toString())).collect(Collectors.toList());
+
+        Dataset<Row> fg = ss.sql("select distinct SELLER_ID from TEST_KYLIN_FACT where LSTG_FORMAT_NAME = 'FP-GTC'");
+        Dataset<Row> fng = ss
+                .sql("select distinct SELLER_ID from TEST_KYLIN_FACT where LSTG_FORMAT_NAME = 'FP-non GTC'");
+        Dataset<Row> ot = ss.sql("select distinct SELLER_ID from TEST_KYLIN_FACT where LSTG_FORMAT_NAME = 'Others'");
+        Dataset<Row> ab = ss.sql("select distinct SELLER_ID from TEST_KYLIN_FACT where LSTG_FORMAT_NAME = 'ABIN'");
+        Dataset<Row> au = ss.sql("select distinct SELLER_ID from TEST_KYLIN_FACT where LSTG_FORMAT_NAME = 'Auction'");
+        List<Integer> expect = fg.union(fng).intersect(ot).except(ab.intersect(au)).sort(new Column("SELLER_ID"))
+                .collectAsList().stream().map(row -> row.getInt(0)).collect(Collectors.toList());
+        Assert.assertEquals(expect.size(), acutal.size());
+        for (int i = 0; i < acutal.size(); i++) {
+            Assert.assertEquals(expect.get(i), acutal.get(i));
+        }
+    }
+
+    private void testSubtractBitmapUUID() throws SQLException {
+        String query = "select intersect_count_by_col(Array[t1.a1, t2.a2]) from " + "(select subtract_bitmap_uuid("
+                + "intersect_bitmap_uuid_v2(SELLER_ID, LSTG_FORMAT_NAME, array['FP-GTC|FP-non GTC', 'Others'], 'RAWSTRING'),"
+                + "intersect_bitmap_uuid_v2(SELLER_ID, LSTG_FORMAT_NAME, array['ABIN', 'Auction'], 'RAWSTRING')) as a1 "
+                + "from TEST_KYLIN_FACT) t1, " + "(select bitmap_uuid(SELLER_ID) as a2 from TEST_KYLIN_FACT) t2";
+        List<String> result = ExecAndComp.queryModel(getProject(), query).collectAsList().stream()
+                .map(row -> row.toSeq().mkString(",")).collect(Collectors.toList());
+        Assert.assertEquals("210", result.get(0));
+    }
+}
diff --git a/src/kylin-it/src/test/java/org/apache/kylin/newten/NBuildAndQuerySnapshotTest.java b/src/kylin-it/src/test/java/org/apache/kylin/newten/NBuildAndQuerySnapshotTest.java
new file mode 100644
index 0000000000..43867cc5f5
--- /dev/null
+++ b/src/kylin-it/src/test/java/org/apache/kylin/newten/NBuildAndQuerySnapshotTest.java
@@ -0,0 +1,147 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.kylin.newten;
+
+import java.util.List;
+import java.util.Set;
+import java.util.stream.Collectors;
+
+import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.common.util.RandomUtil;
+import org.apache.kylin.engine.spark.IndexDataConstructor;
+import org.apache.kylin.engine.spark.NLocalWithSparkSessionTest;
+import org.apache.kylin.engine.spark.job.NSparkSnapshotJob;
+import org.apache.kylin.job.execution.ExecutableState;
+import org.apache.kylin.job.execution.JobTypeEnum;
+import org.apache.kylin.job.execution.NExecutableManager;
+import org.apache.kylin.metadata.cube.model.IndexPlan;
+import org.apache.kylin.metadata.cube.model.LayoutEntity;
+import org.apache.kylin.metadata.cube.model.NDataSegment;
+import org.apache.kylin.metadata.cube.model.NDataflow;
+import org.apache.kylin.metadata.cube.model.NDataflowManager;
+import org.apache.kylin.metadata.cube.model.NDataflowUpdate;
+import org.apache.kylin.metadata.cube.model.NIndexPlanManager;
+import org.apache.kylin.metadata.model.NTableMetadataManager;
+import org.apache.kylin.metadata.model.SegmentRange;
+import org.apache.kylin.metadata.model.TableDesc;
+import org.apache.kylin.query.engine.QueryExec;
+import org.apache.kylin.util.ExecAndComp;
+import org.apache.parquet.Strings;
+import org.apache.spark.sql.Dataset;
+import org.apache.spark.sql.SparderEnv;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+
+import com.google.common.collect.ImmutableSet;
+import com.google.common.collect.Sets;
+
+import lombok.val;
+
+public class NBuildAndQuerySnapshotTest extends NLocalWithSparkSessionTest {
+
+    private KylinConfig config;
+    private NDataflowManager dsMgr;
+
+    @Before
+    public void setUp() throws Exception {
+        super.init();
+        config = KylinConfig.getInstanceFromEnv();
+        dsMgr = NDataflowManager.getInstance(config, getProject());
+        indexDataConstructor = new IndexDataConstructor(getProject());
+    }
+
+    @Test
+    public void testBasic() throws Exception {
+        String dataflowName = "89af4ee2-2cdb-4b07-b39e-4c29856309aa";
+        cleanUpSegmentsAndIndexPlan(dataflowName);
+
+        // before build snapshot
+        NTableMetadataManager tableMetadataManager = NTableMetadataManager.getInstance(config, getProject());
+        Assert.assertNull(tableMetadataManager.getTableDesc("DEFAULT.TEST_COUNTRY").getLastSnapshotPath());
+
+        // build
+        populateSSWithCSVData(config, getProject(), SparderEnv.getSparkSession());
+        buildCube(dataflowName, SegmentRange.dateToLong("2012-01-01"), SegmentRange.dateToLong("2012-02-01"));
+
+        // after build
+        String lastSnapshotPath = tableMetadataManager.getTableDesc("DEFAULT.TEST_COUNTRY").getLastSnapshotPath();
+        Assert.assertNotNull(lastSnapshotPath);
+        Dataset dataset = ExecAndComp.queryModelWithoutCompute(getProject(), "select NAME from TEST_COUNTRY");
+        Assert.assertEquals(244, dataset.collectAsList().size());
+    }
+
+    private void cleanUpSegmentsAndIndexPlan(String dfName) {
+        NIndexPlanManager ipMgr = NIndexPlanManager.getInstance(config, getProject());
+        String cubeId = dsMgr.getDataflow("89af4ee2-2cdb-4b07-b39e-4c29856309aa").getIndexPlan().getUuid();
+        IndexPlan cube = ipMgr.getIndexPlan(cubeId);
+        Set<Long> tobeRemovedLayouts = cube.getAllLayouts().stream().filter(layout -> layout.getId() != 10001L)
+                .map(LayoutEntity::getId).collect(Collectors.toSet());
+
+        ipMgr.updateIndexPlan(dsMgr.getDataflow("89af4ee2-2cdb-4b07-b39e-4c29856309aa").getIndexPlan().getUuid(),
+                copyForWrite -> {
+                    copyForWrite.removeLayouts(tobeRemovedLayouts, true, true);
+                });
+
+        NDataflow df = dsMgr.getDataflow(dfName);
+        NDataflowUpdate update = new NDataflowUpdate(df.getUuid());
+        update.setToRemoveSegs(df.getSegments().toArray(new NDataSegment[0]));
+        dsMgr.updateDataflow(update);
+    }
+
+    private void buildCube(String dfName, long start, long end) throws Exception {
+        NDataflow df = dsMgr.getDataflow(dfName);
+        List<LayoutEntity> layouts = df.getIndexPlan().getAllLayouts();
+        indexDataConstructor.buildIndex(dfName, new SegmentRange.TimePartitionedSegmentRange(start, end),
+                Sets.newLinkedHashSet(layouts), true);
+    }
+
+    @Test
+    public void testQueryPartitionSnapshot() throws Exception {
+        String tableName = "EDW.TEST_SELLER_TYPE_DIM";
+        String partitionCol = "SELLER_TYPE_CD";
+        Set<String> partitions = ImmutableSet.of("5", "16");
+        NTableMetadataManager tableManager = NTableMetadataManager.getInstance(config, getProject());
+        TableDesc table = tableManager.getTableDesc(tableName);
+        table.setSelectedSnapshotPartitionCol(partitionCol);
+        table.setPartitionColumn(partitionCol);
+        tableManager.updateTableDesc(table);
+
+        NExecutableManager execMgr = NExecutableManager.getInstance(config, getProject());
+        NSparkSnapshotJob job = NSparkSnapshotJob.create(tableManager.getTableDesc(tableName), "ADMIN",
+                JobTypeEnum.SNAPSHOT_BUILD, RandomUtil.randomUUIDStr(), partitionCol, false, null, null, null);
+        setPartitions(job, partitions);
+        execMgr.addJob(job);
+
+        // wait job done
+        ExecutableState status = IndexDataConstructor.wait(job);
+        Assert.assertEquals(ExecutableState.SUCCEED, status);
+
+        String sql = "select * from EDW.TEST_SELLER_TYPE_DIM";
+        QueryExec queryExec = new QueryExec(getProject(), KylinConfig.getInstanceFromEnv());
+        val resultSet = queryExec.executeQuery(sql);
+        Assert.assertEquals(2, resultSet.getRows().size());
+    }
+
+    private void setPartitions(NSparkSnapshotJob job, Set<String> partitions) {
+        job.setParam("partitions", Strings.join(partitions, ","));
+        job.getSnapshotBuildingStep().setParam("partitions", Strings.join(partitions, ","));
+    }
+
+}
diff --git a/src/kylin-it/src/test/java/org/apache/kylin/newten/NComputedColumnTest.java b/src/kylin-it/src/test/java/org/apache/kylin/newten/NComputedColumnTest.java
new file mode 100644
index 0000000000..d5e17ebdeb
--- /dev/null
+++ b/src/kylin-it/src/test/java/org/apache/kylin/newten/NComputedColumnTest.java
@@ -0,0 +1,116 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.kylin.newten;
+
+import java.util.List;
+import java.util.stream.Collectors;
+
+import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.engine.spark.NLocalWithSparkSessionTest;
+import org.apache.kylin.job.engine.JobEngineConfig;
+import org.apache.kylin.job.impl.threadpool.NDefaultScheduler;
+import org.apache.kylin.metadata.cube.model.NDataflow;
+import org.apache.kylin.metadata.cube.model.NDataflowManager;
+import org.apache.kylin.metadata.model.ComputedColumnDesc;
+import org.apache.kylin.metadata.model.NDataModel;
+import org.apache.kylin.metadata.model.NDataModelManager;
+import org.apache.kylin.metadata.model.SegmentRange;
+import org.apache.kylin.util.ExecAndComp;
+import org.apache.spark.sql.Row;
+import org.junit.After;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+import org.sparkproject.guava.collect.Sets;
+
+import lombok.val;
+
+public class NComputedColumnTest extends NLocalWithSparkSessionTest {
+    @Before
+    public void setup() throws Exception {
+        overwriteSystemProp("kylin.job.scheduler.poll-interval-second", "1");
+        this.createTestMetadata("src/test/resources/ut_meta/comput_column");
+        NDefaultScheduler scheduler = NDefaultScheduler.getInstance(getProject());
+        scheduler.init(new JobEngineConfig(KylinConfig.getInstanceFromEnv()));
+        if (!scheduler.hasStarted()) {
+            throw new RuntimeException("scheduler has not been started");
+        }
+    }
+
+    @After
+    public void after() {
+        NDefaultScheduler.destroyInstance();
+        cleanupTestMetadata();
+    }
+
+    @Override
+    public String getProject() {
+        return "comput_column";
+    }
+
+    @Test
+    public void testConstantComputeColumn() throws Exception {
+        String dfID = "4a45dc4d-937e-43cc-8faa-34d59d4e11d3";
+        NDataflowManager dsMgr = NDataflowManager.getInstance(getTestConfig(), getProject());
+        NDataflow df = dsMgr.getDataflow(dfID);
+        val layouts = df.getIndexPlan().getAllLayouts();
+        indexDataConstructor.buildIndex(dfID, SegmentRange.TimePartitionedSegmentRange.createInfinite(), Sets.newLinkedHashSet(layouts),
+                true);
+        String sqlHitCube = "select (1+2) as c1,(LINEORDER.LO_TAX +1) as c2,(CUSTOMER.C_NAME +'USA') as c3 "
+                + "from SSB.P_LINEORDER as LINEORDER join SSB.CUSTOMER as CUSTOMER on LINEORDER.LO_CUSTKEY = CUSTOMER.C_CUSTKEY "
+                + "group by (1+2),(LINEORDER.LO_TAX +1),(CUSTOMER.C_NAME +'USA') ";
+        List<String> hitCubeResult = ExecAndComp.queryModelWithoutCompute(getProject(), sqlHitCube).collectAsList().stream()
+                .map(Row::toString).collect(Collectors.toList());
+        Assert.assertEquals(9, hitCubeResult.size());
+    }
+
+    @Test
+    public void testCCNamedEqualsDimensionName() throws Exception {
+        String modelId = "4a45dc4d-937e-43cc-8faa-34d59d4e11d3";
+        String FACT_TABLE = "SSB.P_LINEORDER";
+        val modelManager = NDataModelManager.getInstance(KylinConfig.getInstanceFromEnv(), getProject());
+        modelManager.updateDataModel(modelId, copyForWrite -> {
+            ComputedColumnDesc cc1 = new ComputedColumnDesc();
+            cc1.setTableAlias("P_LINEORDER");
+            cc1.setTableIdentity(FACT_TABLE);
+            cc1.setComment("");
+            cc1.setColumnName("C_NAME");
+            cc1.setDatatype("varchar");
+            cc1.setExpression("CUSTOMER.c_NAME");
+            cc1.setInnerExpression("CUSTOMER.c_NAME");
+            copyForWrite.getComputedColumnDescs().add(cc1);
+
+            NDataModel.NamedColumn column1 = new NDataModel.NamedColumn();
+            column1.setName("c_NAME");
+            column1.setId(copyForWrite.getAllNamedColumns().size());
+            column1.setAliasDotColumn("P_LINEORDER.c_NAME");
+            column1.setStatus(NDataModel.ColumnStatus.DIMENSION);
+            copyForWrite.getAllNamedColumns().add(column1);
+        });
+
+        getTestConfig().setProperty("kylin.query.security.acl-tcr-enabled", "true");
+
+        try {
+            ExecAndComp.queryModelWithoutCompute(getProject(),
+                    "select C_NAME from SSB.P_LINEORDER as LINEORDER join SSB.CUSTOMER as CUSTOMER on LINEORDER.LO_CUSTKEY = CUSTOMER.C_CUSTKEY limit 500");
+        } catch (Exception | StackOverflowError e) {
+            Assert.assertFalse(e instanceof StackOverflowError);
+        }
+    }
+}
diff --git a/src/kylin-it/src/test/java/org/apache/kylin/newten/NCountDistinctWithoutEncodeTest.java b/src/kylin-it/src/test/java/org/apache/kylin/newten/NCountDistinctWithoutEncodeTest.java
new file mode 100644
index 0000000000..08660edb9a
--- /dev/null
+++ b/src/kylin-it/src/test/java/org/apache/kylin/newten/NCountDistinctWithoutEncodeTest.java
@@ -0,0 +1,114 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+package org.apache.kylin.newten;
+
+import java.util.Arrays;
+import java.util.List;
+import java.util.stream.Collectors;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.Path;
+import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.common.util.HadoopUtil;
+import org.apache.kylin.engine.spark.NLocalWithSparkSessionTest;
+import org.apache.kylin.job.engine.JobEngineConfig;
+import org.apache.kylin.job.impl.threadpool.NDefaultScheduler;
+import org.apache.kylin.metadata.cube.model.NIndexPlanManager;
+import org.apache.kylin.util.ExecAndComp;
+import org.junit.After;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+
+
+public class NCountDistinctWithoutEncodeTest extends NLocalWithSparkSessionTest {
+    @Before
+    public void setup() throws Exception {
+        overwriteSystemProp("kylin.job.scheduler.poll-interval-second", "1");
+        this.createTestMetadata("src/test/resources/ut_meta/count_distinct_no_encode");
+        NDefaultScheduler scheduler = NDefaultScheduler.getInstance(getProject());
+        scheduler.init(new JobEngineConfig(KylinConfig.getInstanceFromEnv()));
+        if (!scheduler.hasStarted()) {
+            throw new RuntimeException("scheduler has not been started");
+        }
+    }
+
+    @After
+    public void after() throws Exception {
+        NDefaultScheduler.destroyInstance();
+        cleanupTestMetadata();
+    }
+
+    @Override
+    public String getProject() {
+        return "count_distinct_no_encode";
+    }
+
+    @Test
+    public void testWithoutEncode() throws Exception {
+        NIndexPlanManager indexPlanManager = NIndexPlanManager.getInstance(KylinConfig.getInstanceFromEnv(),
+                getProject());
+        indexPlanManager.updateIndexPlan("b06eee9f-3e6d-41de-ac96-89dbf170b99b",
+                copyForWrite -> copyForWrite.getOverrideProps().put("kylin.query.skip-encode-integer-enabled", "true"));
+        fullBuild("b06eee9f-3e6d-41de-ac96-89dbf170b99b");
+        List<String> results1 = ExecAndComp
+                .queryModel(getProject(),
+                        "select city, " + "count(distinct string_id), " + "count(distinct tinyint_id), "
+                                + "count(distinct smallint_id), " + "count(distinct int_id), "
+                                + "count(distinct bigint_id) from test_count_distinct group by city order by city")
+                .collectAsList().stream().map(row -> row.toSeq().mkString(",")).collect(Collectors.toList());
+        Assert.assertEquals(3, results1.size());
+        Assert.assertEquals("上海,4,4,4,4,4", results1.get(0));
+        Assert.assertEquals("北京,3,3,3,3,3", results1.get(1));
+        Assert.assertEquals("广州,5,5,5,5,5", results1.get(2));
+
+        List<String> results2 = ExecAndComp
+                .queryModel(getProject(),
+                        "select " + "count(distinct string_id), " + "count(distinct tinyint_id), "
+                                + "count(distinct smallint_id), " + "count(distinct int_id), "
+                                + "count(distinct bigint_id) from test_count_distinct")
+                .collectAsList().stream().map(row -> row.toSeq().mkString(",")).collect(Collectors.toList());
+        Assert.assertEquals(1, results2.size());
+        Assert.assertEquals("5,5,5,5,5", results2.get(0));
+
+        String dictPath = KylinConfig.getInstanceFromEnv().getHdfsWorkingDirectory() + "/" + getProject()
+                + HadoopUtil.GLOBAL_DICT_STORAGE_ROOT + "/DEFAULT.TEST_COUNT_DISTINCT";
+        FileStatus[] fileStatuses = new Path(dictPath).getFileSystem(new Configuration())
+                .listStatus(new Path(dictPath));
+        Assert.assertEquals(1, fileStatuses.length);
+        Assert.assertEquals("STRING_ID", fileStatuses[0].getPath().getName());
+    }
+
+    @Test
+    public void testWithEncode() throws Exception {
+        fullBuild("b06eee9f-3e6d-41de-ac96-89dbf170b99b");
+        String dictPath = KylinConfig.getInstanceFromEnv().getHdfsWorkingDirectory() + "/" + getProject()
+                + HadoopUtil.GLOBAL_DICT_STORAGE_ROOT + "/DEFAULT.TEST_COUNT_DISTINCT";
+        FileStatus[] fileStatuses = new Path(dictPath).getFileSystem(new Configuration())
+                .listStatus(new Path(dictPath));
+        Assert.assertEquals(5, fileStatuses.length);
+
+        String[] expected = { "BIGINT_ID", "INT_ID", "SMALLINT_ID", "STRING_ID", "TINYINT_ID" };
+        Assert.assertArrayEquals(expected,
+                Arrays.stream(fileStatuses).map(fileStatus -> fileStatus.getPath().getName()).sorted().toArray());
+    }
+
+}
diff --git a/src/kylin-it/src/test/java/org/apache/kylin/newten/NFilePruningTest.java b/src/kylin-it/src/test/java/org/apache/kylin/newten/NFilePruningTest.java
new file mode 100644
index 0000000000..a82a28dc2f
--- /dev/null
+++ b/src/kylin-it/src/test/java/org/apache/kylin/newten/NFilePruningTest.java
@@ -0,0 +1,679 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+package org.apache.kylin.newten;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+
+import org.apache.commons.collections.CollectionUtils;
+import org.apache.hadoop.util.Shell;
+import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.common.util.DateFormat;
+import org.apache.kylin.common.util.Pair;
+import org.apache.kylin.common.util.RandomUtil;
+import org.apache.kylin.common.util.TempMetadataBuilder;
+import org.apache.kylin.engine.spark.NLocalWithSparkSessionTest;
+import org.apache.kylin.job.engine.JobEngineConfig;
+import org.apache.kylin.job.impl.threadpool.NDefaultScheduler;
+import org.apache.kylin.junit.TimeZoneTestRunner;
+import org.apache.kylin.metadata.cube.model.IndexPlan;
+import org.apache.kylin.metadata.cube.model.LayoutEntity;
+import org.apache.kylin.metadata.cube.model.NDataSegment;
+import org.apache.kylin.metadata.cube.model.NDataflow;
+import org.apache.kylin.metadata.cube.model.NDataflowManager;
+import org.apache.kylin.metadata.model.NDataModelManager;
+import org.apache.kylin.metadata.model.SegmentRange;
+import org.apache.kylin.metadata.model.Segments;
+import org.apache.kylin.metadata.project.NProjectManager;
+import org.apache.kylin.query.relnode.ContextUtil;
+import org.apache.kylin.util.ExecAndComp;
+import org.apache.spark.SparkConf;
+import org.apache.spark.sql.SparderEnv;
+import org.apache.spark.sql.SparkSession;
+import org.apache.spark.sql.execution.KylinFileSourceScanExec;
+import org.apache.spark.sql.execution.SparkPlan;
+import org.apache.spark.sql.execution.adaptive.AdaptiveSparkPlanHelper;
+import org.apache.spark.sql.internal.StaticSQLConf;
+import org.junit.After;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Ignore;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.sparkproject.guava.collect.Sets;
+
+import com.google.common.collect.Lists;
+
+import lombok.val;
+import scala.runtime.AbstractFunction1;
+
+@RunWith(TimeZoneTestRunner.class)
+public class NFilePruningTest extends NLocalWithSparkSessionTest implements AdaptiveSparkPlanHelper {
+
+    private final String base = "select count(*)  FROM TEST_ORDER LEFT JOIN TEST_KYLIN_FACT ON TEST_KYLIN_FACT.ORDER_ID = TEST_ORDER.ORDER_ID ";
+
+    @BeforeClass
+    public static void initSpark() {
+        if (Shell.MAC)
+            overwriteSystemPropBeforeClass("org.xerial.snappy.lib.name", "libsnappyjava.jnilib");//for snappy
+        if (ss != null && !ss.sparkContext().isStopped()) {
+            ss.stop();
+        }
+        sparkConf = new SparkConf().setAppName(RandomUtil.randomUUIDStr()).setMaster("local[4]");
+        sparkConf.set("spark.serializer", "org.apache.spark.serializer.JavaSerializer");
+        sparkConf.set(StaticSQLConf.CATALOG_IMPLEMENTATION().key(), "in-memory");
+        sparkConf.set("spark.sql.shuffle.partitions", "1");
+        sparkConf.set("spark.memory.fraction", "0.1");
+        // opt memory
+        sparkConf.set("spark.shuffle.detectCorrupt", "false");
+        // For sinai_poc/query03, enable implicit cross join conversion
+        sparkConf.set("spark.sql.crossJoin.enabled", "true");
+        sparkConf.set("spark.sql.adaptive.enabled", "true");
+        sparkConf.set(StaticSQLConf.WAREHOUSE_PATH().key(),
+                TempMetadataBuilder.TEMP_TEST_METADATA + "/spark-warehouse");
+        ss = SparkSession.builder().config(sparkConf).getOrCreate();
+        SparderEnv.setSparkSession(ss);
+
+    }
+
+    @Before
+    public void setup() throws Exception {
+        overwriteSystemProp("kylin.job.scheduler.poll-interval-second", "1");
+        this.createTestMetadata("src/test/resources/ut_meta/file_pruning");
+        NDefaultScheduler scheduler = NDefaultScheduler.getInstance(getProject());
+        scheduler.init(new JobEngineConfig(KylinConfig.getInstanceFromEnv()));
+        if (!scheduler.hasStarted()) {
+            throw new RuntimeException("scheduler has not been started");
+        }
+    }
+
+    @After
+    public void after() throws Exception {
+        NDefaultScheduler.destroyInstance();
+        cleanupTestMetadata();
+    }
+
+    @Test
+    public void testNonExistTimeRangeExcludeEmpty() throws Exception {
+        val start = SegmentRange.dateToLong("2023-01-01 00:00:00");
+        val end = SegmentRange.dateToLong("2025-01-01 00:00:00");
+        val dfName = "8c670664-8d05-466a-802f-83c023b56c77";
+        NDataflowManager dsMgr = NDataflowManager.getInstance(getTestConfig(), getProject());
+        NDataflow df = dsMgr.getDataflow(dfName);
+        val layouts = df.getIndexPlan().getAllLayouts();
+        indexDataConstructor.buildIndex(dfName, new SegmentRange.TimePartitionedSegmentRange(start, end),
+                Sets.newLinkedHashSet(layouts), true);
+        assertResultsAndScanFiles(dfName, base, 0, false, Lists.newArrayList());
+    }
+
+    @Test
+    public void testNonExistTimeRangeIncludeEmpty() throws Exception {
+        overwriteSystemProp("kylin.query.skip-empty-segments", "false");
+        val start = SegmentRange.dateToLong("2023-01-01 00:00:00");
+        val end = SegmentRange.dateToLong("2025-01-01 00:00:00");
+        val dfName = "8c670664-8d05-466a-802f-83c023b56c77";
+        NDataflowManager dsMgr = NDataflowManager.getInstance(getTestConfig(), getProject());
+        NDataflow df = dsMgr.getDataflow(dfName);
+        val layouts = df.getIndexPlan().getAllLayouts();
+        indexDataConstructor.buildIndex(dfName, new SegmentRange.TimePartitionedSegmentRange(start, end),
+                Sets.newLinkedHashSet(layouts), true);
+        assertResultsAndScanFiles(dfName, base, 1, false, Lists.newArrayList());
+    }
+
+    @Test
+    public void testExistTimeRangeExcludeEmpty() throws Exception {
+        val start = SegmentRange.dateToLong("2013-01-01 00:00:00");
+        val end = SegmentRange.dateToLong("2025-01-01 00:00:00");
+        val dfName = "8c670664-8d05-466a-802f-83c023b56c77";
+        NDataflowManager dsMgr = NDataflowManager.getInstance(getTestConfig(), getProject());
+        NDataflow df = dsMgr.getDataflow(dfName);
+        val layouts = df.getIndexPlan().getAllLayouts();
+        indexDataConstructor.buildIndex(dfName, new SegmentRange.TimePartitionedSegmentRange(start, end),
+                Sets.newLinkedHashSet(layouts), true);
+        assertResultsAndScanFiles(dfName, base, 1, false, Lists.newArrayList());
+    }
+
+    @Test
+    public void testSegPruningWithTimeStamp() throws Exception {
+        // build three segs
+        // [2009-01-01 00:00:00, 2011-01-01 00:00:00)
+        // [2011-01-01 00:00:00, 2013-01-01 00:00:00)
+        // [2013-01-01 00:00:00, 2015-01-01 00:00:00)
+        val dfId = "8c670664-8d05-466a-802f-83c023b56c77";
+        buildMultiSegs(dfId, 10001);
+        populateSSWithCSVData(getTestConfig(), getProject(), SparderEnv.getSparkSession());
+
+        String and_pruning0 = base
+                + "where TEST_TIME_ENC > TIMESTAMP '2011-01-01 00:00:00' and TEST_TIME_ENC < TIMESTAMP '2013-01-01 00:00:00'";
+        String and_pruning1 = base
+                + "where TEST_TIME_ENC > TIMESTAMP '2011-01-01 00:00:00' and TEST_TIME_ENC = TIMESTAMP '2016-01-01 00:00:00'";
+
+        String or_pruning0 = base
+                + "where TEST_TIME_ENC > TIMESTAMP '2011-01-01 00:00:00' or TEST_TIME_ENC = TIMESTAMP '2016-01-01 00:00:00'";
+        String or_pruning1 = base
+                + "where TEST_TIME_ENC < TIMESTAMP '2009-01-01 00:00:00' or TEST_TIME_ENC > TIMESTAMP '2015-01-01 00:00:00'";
+
+        String pruning0 = base + "where TEST_TIME_ENC < TIMESTAMP '2009-01-01 00:00:00'";
+        String pruning1 = base + "where TEST_TIME_ENC <= TIMESTAMP '2009-01-01 00:00:00'";
+        String pruning2 = base + "where TEST_TIME_ENC >= TIMESTAMP '2015-01-01 00:00:00'";
+
+        String not_equal0 = base + "where TEST_TIME_ENC <> TIMESTAMP '2012-01-01 00:00:00'";
+
+        String not0 = base
+                + "where not (TEST_TIME_ENC < TIMESTAMP '2011-01-01 00:00:00' or TEST_TIME_ENC >= TIMESTAMP '2013-01-01 00:00:00')";
+
+        String in_pruning0 = base
+                + "where TEST_TIME_ENC in (TIMESTAMP '2009-01-01 00:00:00',TIMESTAMP '2008-01-01 00:00:00',TIMESTAMP '2016-01-01 00:00:00')";
+        String in_pruning1 = base
+                + "where TEST_TIME_ENC in (TIMESTAMP '2008-01-01 00:00:00',TIMESTAMP '2016-01-01 00:00:00')";
+
+        val expectedRanges = Lists.<Pair<String, String>> newArrayList();
+        val segmentRange1 = Pair.newPair("2009-01-01 00:00:00", "2011-01-01 00:00:00");
+        val segmentRange2 = Pair.newPair("2011-01-01 00:00:00", "2013-01-01 00:00:00");
+        val segmentRange3 = Pair.newPair("2013-01-01 00:00:00", "2015-01-01 00:00:00");
+
+        expectedRanges.add(segmentRange1);
+        expectedRanges.add(segmentRange2);
+        expectedRanges.add(segmentRange3);
+        assertResultsAndScanFiles(dfId, base, 3, false, expectedRanges);
+
+        expectedRanges.clear();
+        expectedRanges.add(segmentRange2);
+        assertResultsAndScanFiles(dfId, and_pruning0, 1, false, expectedRanges);
+        expectedRanges.clear();
+        assertResultsAndScanFiles(dfId, and_pruning1, 0, true, expectedRanges);
+
+        expectedRanges.add(segmentRange2);
+        expectedRanges.add(segmentRange3);
+        assertResultsAndScanFiles(dfId, or_pruning0, 2, false, expectedRanges);
+        expectedRanges.clear();
+        assertResultsAndScanFiles(dfId, or_pruning1, 0, true, expectedRanges);
+
+        assertResultsAndScanFiles(dfId, pruning0, 0, true, expectedRanges);
+        expectedRanges.add(segmentRange1);
+        assertResultsAndScanFiles(dfId, pruning1, 1, false, expectedRanges);
+        expectedRanges.clear();
+        assertResultsAndScanFiles(dfId, pruning2, 0, true, expectedRanges);
+
+        // pruning with "not equal" is not supported
+        expectedRanges.add(segmentRange1);
+        expectedRanges.add(segmentRange2);
+        expectedRanges.add(segmentRange3);
+        assertResultsAndScanFiles(dfId, not_equal0, 3, false, expectedRanges);
+
+        expectedRanges.clear();
+        expectedRanges.add(segmentRange2);
+        assertResultsAndScanFiles(dfId, not0, 1, false, expectedRanges);
+
+        expectedRanges.clear();
+        expectedRanges.add(segmentRange1);
+        assertResultsAndScanFiles(dfId, in_pruning0, 1, false, expectedRanges);
+        assertResultsAndScanFiles(dfId, in_pruning1, 0, true, expectedRanges);
+
+        List<Pair<String, String>> query = new ArrayList<>();
+        query.add(Pair.newPair("base", base));
+        query.add(Pair.newPair("and_pruning0", and_pruning0));
+        query.add(Pair.newPair("or_pruning0", or_pruning0));
+        query.add(Pair.newPair("pruning1", pruning1));
+        query.add(Pair.newPair("not_equal0", not_equal0));
+        query.add(Pair.newPair("not0", not0));
+        query.add(Pair.newPair("in_pruning0", in_pruning0));
+        ExecAndComp.execAndCompare(query, getProject(), ExecAndComp.CompareLevel.SAME, "default");
+    }
+
+    @Test
+    public void testShardPruning() throws Exception {
+        overwriteSystemProp("kylin.storage.columnar.shard-rowcount", "100");
+
+        val dfId = "8c670664-8d05-466a-802f-83c023b56c77";
+        buildMultiSegs(dfId);
+
+        populateSSWithCSVData(getTestConfig(), getProject(), SparderEnv.getSparkSession());
+
+        basicPruningScenario(dfId);
+        pruningWithVariousTypesScenario(dfId);
+    }
+
+    @Test
+    public void testPruningWithChineseCharacter() throws Exception {
+        overwriteSystemProp("kylin.storage.columnar.shard-rowcount", "1");
+        val dfId = "9cde9d25-9334-4b92-b229-a00f49453757";
+        fullBuild(dfId);
+        populateSSWithCSVData(getTestConfig(), getProject(), SparderEnv.getSparkSession());
+
+        val chinese0 = "select count(*) from TEST_MEASURE where name1 = '中国'";
+        val chinese1 = "select count(*) from TEST_MEASURE where name1 <> '中国'";
+
+        assertResultsAndScanFiles(dfId, chinese0, 1, false, Lists.newArrayList());
+        assertResultsAndScanFiles(dfId, chinese1, 4, false, Lists.newArrayList());
+
+        List<Pair<String, String>> query = new ArrayList<>();
+        query.add(Pair.newPair("", chinese0));
+        query.add(Pair.newPair("", chinese1));
+        ExecAndComp.execAndCompare(query, getProject(), ExecAndComp.CompareLevel.SAME, "left");
+    }
+
+    private void pruningWithVariousTypesScenario(String dfId) throws Exception {
+        // int type is tested #basicPruningScenario
+
+        // xx0 means can pruning, while xx1 can not.
+        val bool0 = base + "where IS_EFFECTUAL = true";
+        val bool1 = base + "where IS_EFFECTUAL <> true";
+
+        val decimal0 = base + "where PRICE = 290.48";
+        val decimal1 = base + "where PRICE > 290.48";
+
+        val short0 = base + "where SLR_SEGMENT_CD = 16";
+        val short1 = base + "where SLR_SEGMENT_CD > 16";
+
+        val string0 = base + "where LSTG_FORMAT_NAME = 'Auction'";
+        val string1 = base + "where LSTG_FORMAT_NAME <> 'Auction'";
+
+        val long0 = base + "where TEST_ORDER.ORDER_ID = 2662";
+        val long1 = base + "where TEST_ORDER.ORDER_ID <> 2662";
+
+        val date0 = base + "where TEST_DATE_ENC = DATE '2011-07-10'";
+        val date1 = base + "where TEST_DATE_ENC <> DATE '2011-07-10'";
+
+        val ts0 = base + "where TEST_TIME_ENC = TIMESTAMP '2013-06-18 07:07:10'";
+
+        val ts1 = base + "where TEST_TIME_ENC > TIMESTAMP '2013-01-01 00:00:00' "
+                + "and TEST_TIME_ENC < TIMESTAMP '2015-01-01 00:00:00' "
+                + "and TEST_TIME_ENC <> TIMESTAMP '2013-06-18 07:07:10'";
+
+        assertResultsAndScanFiles(dfId, bool0, 3, false, Lists.newArrayList());
+        assertResultsAndScanFiles(dfId, bool1, 11, false, Lists.newArrayList());
+
+        assertResultsAndScanFiles(dfId, decimal0, 3, false, Lists.newArrayList());
+        assertResultsAndScanFiles(dfId, decimal1, 52, false, Lists.newArrayList());
+
+        // calcite will treat short as int. So pruning will not work.
+        assertResultsAndScanFiles(dfId, short0, 3, false, Lists.newArrayList());
+        assertResultsAndScanFiles(dfId, short1, 25, false, Lists.newArrayList());
+
+        assertResultsAndScanFiles(dfId, string0, 3, false, Lists.newArrayList());
+        assertResultsAndScanFiles(dfId, string1, 12, false, Lists.newArrayList());
+
+        assertResultsAndScanFiles(dfId, long0, 3, false, Lists.newArrayList());
+        assertResultsAndScanFiles(dfId, long1, 28, false, Lists.newArrayList());
+
+        assertResultsAndScanFiles(dfId, date0, 3, false, Lists.newArrayList());
+        assertResultsAndScanFiles(dfId, date1, 19, false, Lists.newArrayList());
+
+        // segment pruning first, then shard pruning
+        // so the scanned files is 1 not 3(each segment per shard)
+        assertResultsAndScanFiles(dfId, ts0, 1, false, Lists.newArrayList());
+        assertResultsAndScanFiles(dfId, ts1, 11, false, Lists.newArrayList());
+
+        List<Pair<String, String>> query = new ArrayList<>();
+        query.add(Pair.newPair("", bool0));
+        query.add(Pair.newPair("", bool1));
+        query.add(Pair.newPair("", decimal0));
+        query.add(Pair.newPair("", decimal1));
+        query.add(Pair.newPair("", short0));
+        query.add(Pair.newPair("", short1));
+        query.add(Pair.newPair("", string0));
+        query.add(Pair.newPair("", string1));
+        query.add(Pair.newPair("", long0));
+        query.add(Pair.newPair("", long1));
+        query.add(Pair.newPair("", date0));
+        query.add(Pair.newPair("", date1));
+
+        // see #11598
+        query.add(Pair.newPair("", ts0));
+        query.add(Pair.newPair("", ts1));
+        ExecAndComp.execAndCompare(query, getProject(), ExecAndComp.CompareLevel.SAME, "left");
+    }
+
+    @Test
+    @Ignore("TODO: remove or adapt")
+    public void testSegmentPruningDate() throws Exception {
+        val modelId = "8c670664-8d05-466a-802f-83c023b56c80";
+        buildMultiSegs(modelId, 10005);
+        populateSSWithCSVData(getTestConfig(), getProject(), SparderEnv.getSparkSession());
+        val sql = "select test_date_enc, count(*) FROM TEST_ORDER LEFT JOIN TEST_KYLIN_FACT ON TEST_KYLIN_FACT.ORDER_ID = TEST_ORDER.ORDER_ID ";
+
+        val and_pruning0 = sql
+                + "where test_date_enc > (Date '2011-01-01') and test_date_enc < (Date '2012-01-01') group by test_date_enc";
+        val and_pruning1 = sql
+                + "where test_date_enc > '2011-01-01' and test_date_enc < '2012-01-01' group by test_date_enc";
+
+        val or_pruning0 = sql
+                + "where test_date_enc > '2012-01-01' or test_date_enc = '2008-01-01' group by test_date_enc";
+        val or_pruning1 = sql
+                + "where test_date_enc < '2011-01-01' or test_date_enc > '2013-01-01' group by test_date_enc";
+
+        val pruning0 = sql + "where test_date_enc > '2020-01-01' group by test_date_enc";
+        val pruning1 = sql + "where test_date_enc < '2008-01-01' group by test_date_enc";
+        val pruning2 = sql + "where test_date_enc = '2012-01-01' group by test_date_enc";
+
+        val not_pruning0 = sql
+                + "where not (test_date_enc < '2011-01-01' or test_date_enc >= '2013-01-01') group by test_date_enc";
+        val not_pruning1 = sql + "where not test_date_enc = '2012-01-01' group by test_date_enc";
+
+        val nested_query0 = "with test_order as (select * from \"default\".test_order where test_date_enc > '2012-01-01' and test_date_enc < '2013-01-01')"
+                + sql + "group by test_date_enc";
+        val nested_query1 = "select * from (select * from (" + sql
+                + "where test_date_enc > '2011-01-01' group by test_date_enc) where test_date_enc < '2012-01-01')";
+
+        // date functions are not supported yet
+        val date_function_query0 = "select * from (select year(test_date_enc) as test_date_enc_year from (" + sql
+                + "where test_date_enc > '2011-01-01' and test_date_enc < '2013-01-01' group by test_date_enc)) where test_date_enc_year = '2014'";
+
+        val between_query0 = sql + "where test_date_enc between '2011-01-01' and '2012-12-31' group by test_date_enc";
+
+        val in_query0 = sql
+                + "where test_date_enc in (Date '2011-06-01', Date '2012-06-01', Date '2012-12-31') group by test_date_enc";
+        val in_query1 = sql
+                + "where test_date_enc in ('2011-06-01', '2012-06-01', '2012-12-31') group by test_date_enc";
+        val not_in_query0 = sql
+                + "where test_date_enc not in (Date '2011-06-01', Date '2012-06-01', Date '2013-06-01') group by test_date_enc";
+        val not_in_query1 = sql
+                + "where test_date_enc not in ('2011-06-01', '2012-06-01', '2013-06-01') group by test_date_enc";
+
+        val complex_query0 = sql
+                + "where test_date_enc in ('2011-01-01', '2012-01-01', '2013-01-01', '2014-01-01') and test_date_enc > '2013-01-01' group by test_date_enc";
+        val complex_query1 = sql
+                + "where test_date_enc in (Date '2011-01-01', Date '2012-01-01', Date '2013-01-01', Date '2014-01-01') and test_date_enc > Date '2013-01-01' group by test_date_enc";
+
+        val expectedRanges = Lists.<Pair<String, String>> newArrayList();
+        val segmentRange1 = Pair.newPair("2009-01-01", "2011-01-01");
+        val segmentRange2 = Pair.newPair("2011-01-01", "2013-01-01");
+        val segmentRange3 = Pair.newPair("2013-01-01", "2015-01-01");
+
+        expectedRanges.add(segmentRange2);
+        assertResultsAndScanFiles(modelId, and_pruning0, 1, false, expectedRanges);
+        assertResultsAndScanFiles(modelId, and_pruning1, 1, false, expectedRanges);
+
+        expectedRanges.clear();
+        expectedRanges.add(segmentRange2);
+        expectedRanges.add(segmentRange3);
+        assertResultsAndScanFiles(modelId, or_pruning0, 2, false, expectedRanges);
+        expectedRanges.clear();
+        expectedRanges.add(segmentRange1);
+        expectedRanges.add(segmentRange3);
+        assertResultsAndScanFiles(modelId, or_pruning1, 2, false, expectedRanges);
+
+        expectedRanges.clear();
+        assertResultsAndScanFiles(modelId, pruning0, 0, true, expectedRanges);
+        assertResultsAndScanFiles(modelId, pruning1, 0, true, expectedRanges);
+        expectedRanges.add(segmentRange2);
+        assertResultsAndScanFiles(modelId, pruning2, 1, false, expectedRanges);
+
+        expectedRanges.clear();
+        expectedRanges.add(segmentRange2);
+        assertResultsAndScanFiles(modelId, not_pruning0, 1, false, expectedRanges);
+        expectedRanges.clear();
+        expectedRanges.add(segmentRange1);
+        expectedRanges.add(segmentRange2);
+        expectedRanges.add(segmentRange3);
+        assertResultsAndScanFiles(modelId, not_pruning1, 3, false, expectedRanges);
+
+        expectedRanges.clear();
+        expectedRanges.add(segmentRange2);
+        assertResultsAndScanFiles(modelId, nested_query0, 1, false, expectedRanges);
+        assertResultsAndScanFiles(modelId, nested_query1, 1, false, expectedRanges);
+        assertResultsAndScanFiles(modelId, between_query0, 1, false, expectedRanges);
+        assertResultsAndScanFiles(modelId, in_query0, 1, false, expectedRanges);
+        assertResultsAndScanFiles(modelId, in_query1, 1, false, expectedRanges);
+        expectedRanges.clear();
+        expectedRanges.add(segmentRange1);
+        expectedRanges.add(segmentRange2);
+        expectedRanges.add(segmentRange3);
+        assertResultsAndScanFiles(modelId, not_in_query0, 3, false, expectedRanges);
+        assertResultsAndScanFiles(modelId, not_in_query1, 3, false, expectedRanges);
+
+        expectedRanges.clear();
+        expectedRanges.add(segmentRange2);
+        assertResultsAndScanFiles(modelId, date_function_query0, 1, false, expectedRanges);
+
+        expectedRanges.clear();
+        expectedRanges.add(segmentRange3);
+        assertResultsAndScanFiles(modelId, complex_query0, 1, false, expectedRanges);
+        assertResultsAndScanFiles(modelId, complex_query1, 1, false, expectedRanges);
+
+        List<Pair<String, String>> query = Lists.newArrayList(//
+                Pair.newPair("", and_pruning0), Pair.newPair("", and_pruning1), //
+                Pair.newPair("", or_pruning0), Pair.newPair("", or_pruning1), //
+                Pair.newPair("", pruning2), //
+                Pair.newPair("", not_pruning0), Pair.newPair("", not_pruning1), //
+                Pair.newPair("", nested_query0), Pair.newPair("", nested_query1), //
+                Pair.newPair("", in_query0), Pair.newPair("", in_query1), //
+                Pair.newPair("", date_function_query0), //
+                Pair.newPair("", complex_query0), Pair.newPair("", complex_query1));
+        ExecAndComp.execAndCompare(query, getProject(), ExecAndComp.CompareLevel.SAME, "left");
+
+        // kylin.query.heterogeneous-segment-enabled is turned off
+        val projectManager = NProjectManager.getInstance(getTestConfig());
+        projectManager.updateProject(getProject(), copyForWrite -> {
+            copyForWrite.getOverrideKylinProps().put("kylin.query.heterogeneous-segment-enabled", "false");
+        });
+
+        expectedRanges.clear();
+        val sqls = Lists.<String> newArrayList();
+        Collections.addAll(sqls, and_pruning0, and_pruning1, or_pruning0, or_pruning1, pruning0, pruning1, pruning2,
+                not_pruning0, not_pruning1, nested_query0, nested_query1, between_query0, in_query0, in_query1,
+                date_function_query0, complex_query0, complex_query1);
+        assertResultAndScanFilesForPruningDate(modelId, sqls, expectedRanges);
+    }
+
+    private void assertResultAndScanFilesForPruningDate(String modelId, List<String> sqls,
+            List<Pair<String, String>> expectedRanges) throws Exception {
+        assertResultsAndScanFiles(modelId, sqls.get(0), 1, false, expectedRanges);
+        assertResultsAndScanFiles(modelId, sqls.get(1), 1, false, expectedRanges);
+
+        assertResultsAndScanFiles(modelId, sqls.get(2), 2, false, expectedRanges);
+        assertResultsAndScanFiles(modelId, sqls.get(3), 2, false, expectedRanges);
+
+        assertResultsAndScanFiles(modelId, sqls.get(4), 0, false, expectedRanges);
+        assertResultsAndScanFiles(modelId, sqls.get(5), 0, false, expectedRanges);
+        assertResultsAndScanFiles(modelId, sqls.get(6), 1, false, expectedRanges);
+
+        assertResultsAndScanFiles(modelId, sqls.get(7), 1, false, expectedRanges);
+        assertResultsAndScanFiles(modelId, sqls.get(8), 3, false, expectedRanges);
+
+        assertResultsAndScanFiles(modelId, sqls.get(9), 1, false, expectedRanges);
+        assertResultsAndScanFiles(modelId, sqls.get(10), 1, false, expectedRanges);
+
+        assertResultsAndScanFiles(modelId, sqls.get(11), 1, false, expectedRanges);
+
+        assertResultsAndScanFiles(modelId, sqls.get(12), 1, false, expectedRanges);
+        assertResultsAndScanFiles(modelId, sqls.get(13), 3, false, expectedRanges);
+
+        assertResultsAndScanFiles(modelId, sqls.get(14), 1, false, expectedRanges);
+
+        assertResultsAndScanFiles(modelId, sqls.get(15), 1, false, expectedRanges);
+        assertResultsAndScanFiles(modelId, sqls.get(16), 1, false, expectedRanges);
+    }
+
+    @Test
+    public void testDimRangePruningAfterMerge() throws Exception {
+        String modelId = "3f152495-44de-406c-9abf-b11d4132aaed";
+        overwriteSystemProp("kylin.engine.persist-flattable-enabled", "true");
+        buildMultiSegAndMerge("3f152495-44de-406c-9abf-b11d4132aaed");
+        populateSSWithCSVData(getTestConfig(), getProject(), SparderEnv.getSparkSession());
+
+        val lessThanEquality = base + "where TEST_KYLIN_FACT.ORDER_ID <= 10";
+        val in = base + "where TEST_KYLIN_FACT.ORDER_ID in (4998, 4999)";
+        val lessThan = base + "where TEST_KYLIN_FACT.ORDER_ID < 10";
+        val and = base + "where PRICE < -99 AND TEST_KYLIN_FACT.ORDER_ID = 1";
+        val or = base + "where TEST_KYLIN_FACT.ORDER_ID = 1 or TEST_KYLIN_FACT.ORDER_ID = 10";
+        val notSupported0 = base + "where SELLER_ID <> 10000233";
+        val notSupported1 = base + "where SELLER_ID > 10000233";
+
+        val expectedRanges = Lists.<Pair<String, String>> newArrayList();
+        val segmentRange1 = Pair.newPair("2009-01-01 00:00:00", "2011-01-01 00:00:00");
+        val segmentRange2 = Pair.newPair("2011-01-01 00:00:00", "2015-01-01 00:00:00");
+        expectedRanges.add(segmentRange1);
+        expectedRanges.add(segmentRange2);
+
+        assertResultsAndScanFiles(modelId, lessThanEquality, 2, false, expectedRanges);
+        assertResultsAndScanFiles(modelId, in, 1, false, expectedRanges);
+        assertResultsAndScanFiles(modelId, lessThan, 1, false, expectedRanges);
+        assertResultsAndScanFiles(modelId, and, 1, false, expectedRanges);
+        assertResultsAndScanFiles(modelId, or, 2, false, expectedRanges);
+        assertResultsAndScanFiles(modelId, notSupported0, 2, false, expectedRanges);
+        assertResultsAndScanFiles(modelId, notSupported1, 2, false, expectedRanges);
+        List<Pair<String, String>> query = new ArrayList<>();
+        query.add(Pair.newPair("", lessThanEquality));
+        query.add(Pair.newPair("", in));
+        query.add(Pair.newPair("", lessThan));
+        query.add(Pair.newPair("", and));
+        query.add(Pair.newPair("", or));
+        query.add(Pair.newPair("", notSupported0));
+        query.add(Pair.newPair("", notSupported1));
+        ExecAndComp.execAndCompare(query, getProject(), ExecAndComp.CompareLevel.SAME, "left");
+    }
+
+    @Test
+    public void testMergeDimRange() throws Exception {
+        String dataflowId = "3f152495-44de-406c-9abf-b11d4132aaed";
+        String modelId = dataflowId;
+        overwriteSystemProp("kylin.engine.persist-flattable-enabled", "false");
+        buildMultiSegAndMerge(dataflowId);
+        populateSSWithCSVData(getTestConfig(), getProject(), SparderEnv.getSparkSession());
+        NDataflowManager dataflowManager = NDataflowManager.getInstance(getTestConfig(), getProject());
+        NDataflow dataflow = dataflowManager.getDataflow(dataflowId);
+        Segments<NDataSegment> segments = dataflow.getSegments();
+        Assert.assertEquals(2, segments.size());
+        NDataSegment mergedSegment = segments.get(1);
+        Assert.assertEquals(14, mergedSegment.getDimensionRangeInfoMap().size());
+
+        val priceTest = base + "where PRICE <= -99.7900";
+
+        val expectedRanges = Lists.<Pair<String, String>> newArrayList();
+        val segmentRange1 = Pair.newPair("2009-01-01 00:00:00", "2011-01-01 00:00:00");
+        val segmentRange2 = Pair.newPair("2011-01-01 00:00:00", "2015-01-01 00:00:00");
+        expectedRanges.add(segmentRange1);
+        expectedRanges.add(segmentRange2);
+
+        assertResultsAndScanFiles(modelId, priceTest, 1, false, expectedRanges);
+        List<Pair<String, String>> query = new ArrayList<>();
+        query.add(Pair.newPair("", priceTest));
+        ExecAndComp.execAndCompare(query, getProject(), ExecAndComp.CompareLevel.SAME, "left");
+    }
+
+    @Test
+    public void testMergeDimRangeFalse() throws Exception {
+        String dataflowId = "3f152495-44de-406c-9abf-b11d4132aaed";
+        overwriteSystemProp("kylin.engine.persist-flattable-enabled", "false");
+        buildMultiSegs(dataflowId);
+        populateSSWithCSVData(getTestConfig(), getProject(), SparderEnv.getSparkSession());
+        NDataflowManager dataflowManager = NDataflowManager.getInstance(getTestConfig(), getProject());
+        NDataflow dataflow = dataflowManager.getDataflow(dataflowId);
+        Segments<NDataSegment> segments = dataflow.getSegments();
+        Assert.assertEquals(3, segments.size());
+        segments.get(1).getDimensionRangeInfoMap().clear();
+        NDataflowManager dsMgr = NDataflowManager.getInstance(getTestConfig(), getProject());
+        NDataflow df = dsMgr.getDataflow(dataflowId);
+        IndexPlan indexPlan = df.getIndexPlan();
+        List<LayoutEntity> layouts = indexPlan.getAllLayouts();
+        mergeSegments(dataflowId, Sets.newLinkedHashSet(layouts));
+        segments = dataflowManager.getDataflow(dataflowId).getSegments();
+        Assert.assertEquals(2, segments.size());
+        NDataSegment segment = segments.get(1);
+        Assert.assertTrue(segment.getDimensionRangeInfoMap().isEmpty());
+    }
+
+    private void basicPruningScenario(String dfId) throws Exception {
+        // shard pruning supports: Equality/In/IsNull/And/Or
+        // other expression(gt/lt/like/cast/substr, etc.) will select all files.
+
+        val equality = base + "where SELLER_ID = 10000233";
+        val in = base + "where SELLER_ID in (10000233,10000234,10000235)";
+        val isNull = base + "where SELLER_ID is NULL";
+        val and = base + "where SELLER_ID in (10000233,10000234,10000235) and SELLER_ID = 10000233 ";
+        val or = base + "where SELLER_ID = 10000233 or SELLER_ID = 1 ";
+        val notSupported0 = base + "where SELLER_ID <> 10000233";
+        val notSupported1 = base + "where SELLER_ID > 10000233";
+
+        assertResultsAndScanFiles(dfId, equality, 3, false, Lists.newArrayList());
+        assertResultsAndScanFiles(dfId, in, 9, false, Lists.newArrayList());
+        assertResultsAndScanFiles(dfId, isNull, 3, false, Lists.newArrayList());
+        assertResultsAndScanFiles(dfId, and, 3, false, Lists.newArrayList());
+        assertResultsAndScanFiles(dfId, or, 4, false, Lists.newArrayList());
+        assertResultsAndScanFiles(dfId, notSupported0, 17, false, Lists.newArrayList());
+        assertResultsAndScanFiles(dfId, notSupported1, 17, false, Lists.newArrayList());
+
+        List<Pair<String, String>> query = new ArrayList<>();
+        query.add(Pair.newPair("", equality));
+        query.add(Pair.newPair("", in));
+        query.add(Pair.newPair("", isNull));
+        query.add(Pair.newPair("", and));
+        query.add(Pair.newPair("", or));
+        query.add(Pair.newPair("", notSupported0));
+        query.add(Pair.newPair("", notSupported1));
+        ExecAndComp.execAndCompare(query, getProject(), ExecAndComp.CompareLevel.SAME, "left");
+    }
+
+    @Override
+    public String getProject() {
+        return "file_pruning";
+    }
+
+    private long assertResultsAndScanFiles(String modelId, String sql, long numScanFiles, boolean emptyLayout,
+            List<Pair<String, String>> expectedRanges) throws Exception {
+        val df = ExecAndComp.queryModelWithoutCompute(getProject(), sql);
+        val context = ContextUtil.listContexts().get(0);
+        if (emptyLayout) {
+            Assert.assertTrue(context.storageContext.isEmptyLayout());
+            Assert.assertEquals(Long.valueOf(-1), context.storageContext.getLayoutId());
+            return numScanFiles;
+        }
+        df.collect();
+
+        val actualNum = findFileSourceScanExec(df.queryExecution().executedPlan()).metrics().get("numFiles").get()
+                .value();
+        Assert.assertEquals(numScanFiles, actualNum);
+        val segmentIds = context.storageContext.getPrunedSegments();
+        assertPrunedSegmentRange(modelId, segmentIds, expectedRanges);
+        return actualNum;
+    }
+
+    private KylinFileSourceScanExec findFileSourceScanExec(SparkPlan plan) {
+        return (KylinFileSourceScanExec) find(plan, new AbstractFunction1<SparkPlan, Object>() {
+            @Override
+            public Object apply(SparkPlan v1) {
+                return v1 instanceof KylinFileSourceScanExec;
+            }
+        }).get();
+    }
+
+    private void assertPrunedSegmentRange(String dfId, List<NDataSegment> prunedSegments,
+            List<Pair<String, String>> expectedRanges) {
+        val model = NDataModelManager.getInstance(getTestConfig(), getProject()).getDataModelDesc(dfId);
+        val partitionColDateFormat = model.getPartitionDesc().getPartitionDateFormat();
+
+        if (CollectionUtils.isEmpty(expectedRanges)) {
+            return;
+        }
+        Assert.assertEquals(expectedRanges.size(), prunedSegments.size());
+        for (int i = 0; i < prunedSegments.size(); i++) {
+            val segment = prunedSegments.get(i);
+            val start = DateFormat.formatToDateStr(segment.getTSRange().getStart(), partitionColDateFormat);
+            val end = DateFormat.formatToDateStr(segment.getTSRange().getEnd(), partitionColDateFormat);
+            val expectedRange = expectedRanges.get(i);
+            Assert.assertEquals(expectedRange.getFirst(), start);
+            Assert.assertEquals(expectedRange.getSecond(), end);
+        }
+    }
+}
diff --git a/src/kylin-it/src/test/java/org/apache/kylin/newten/NFilePruningV2Test.java b/src/kylin-it/src/test/java/org/apache/kylin/newten/NFilePruningV2Test.java
new file mode 100644
index 0000000000..7f8d3c340d
--- /dev/null
+++ b/src/kylin-it/src/test/java/org/apache/kylin/newten/NFilePruningV2Test.java
@@ -0,0 +1,456 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+package org.apache.kylin.newten;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.hadoop.util.Shell;
+import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.common.util.Pair;
+import org.apache.kylin.common.util.RandomUtil;
+import org.apache.kylin.common.util.TempMetadataBuilder;
+import org.apache.kylin.engine.spark.NLocalWithSparkSessionTest;
+import org.apache.kylin.job.engine.JobEngineConfig;
+import org.apache.kylin.job.impl.threadpool.NDefaultScheduler;
+import org.apache.kylin.junit.TimeZoneTestRunner;
+import org.apache.kylin.metadata.cube.model.NDataflow;
+import org.apache.kylin.metadata.cube.model.NDataflowManager;
+import org.apache.kylin.metadata.model.NDataModelManager;
+import org.apache.kylin.metadata.model.SegmentRange;
+import org.apache.kylin.util.ExecAndComp;
+import org.apache.spark.SparkConf;
+import org.apache.spark.sql.SparderEnv;
+import org.apache.spark.sql.SparkSession;
+import org.apache.spark.sql.execution.LayoutFileSourceScanExec;
+import org.apache.spark.sql.execution.SparkPlan;
+import org.apache.spark.sql.internal.StaticSQLConf;
+import org.junit.After;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Ignore;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.sparkproject.guava.collect.Sets;
+
+import lombok.val;
+import scala.runtime.AbstractFunction1;
+
+@RunWith(TimeZoneTestRunner.class)
+@Ignore
+public class NFilePruningV2Test extends NLocalWithSparkSessionTest {
+
+    private final String base = "select count(*)  FROM TEST_ORDER LEFT JOIN TEST_KYLIN_FACT ON TEST_KYLIN_FACT.ORDER_ID = TEST_ORDER.ORDER_ID ";
+
+    @BeforeClass
+    public static void initSpark() {
+        if (Shell.MAC)
+            overwriteSystemPropBeforeClass("org.xerial.snappy.lib.name", "libsnappyjava.jnilib");//for snappy
+        if (ss != null && !ss.sparkContext().isStopped()) {
+            ss.stop();
+        }
+        sparkConf = new SparkConf().setAppName(RandomUtil.randomUUIDStr()).setMaster("local[4]");
+        sparkConf.set("spark.serializer", "org.apache.spark.serializer.JavaSerializer");
+        sparkConf.set(StaticSQLConf.CATALOG_IMPLEMENTATION().key(), "in-memory");
+        sparkConf.set("spark.sql.shuffle.partitions", "1");
+        sparkConf.set("spark.memory.fraction", "0.1");
+        // opt memory
+        sparkConf.set("spark.shuffle.detectCorrupt", "false");
+        // For sinai_poc/query03, enable implicit cross join conversion
+        sparkConf.set("spark.sql.crossJoin.enabled", "true");
+        sparkConf.set("spark.sql.adaptive.enabled", "true");
+        sparkConf.set("spark.sql.sources.bucketing.enabled", "false");
+        sparkConf.set("spark.sql.adaptive.shuffle.maxTargetPostShuffleInputSize", "1");
+        sparkConf.set(StaticSQLConf.WAREHOUSE_PATH().key(),
+                TempMetadataBuilder.TEMP_TEST_METADATA + "/spark-warehouse");
+        ss = SparkSession.builder().config(sparkConf).getOrCreate();
+        SparderEnv.setSparkSession(ss);
+    }
+
+    @Before
+    public void setup() throws Exception {
+        overwriteSystemProp("kylin.job.scheduler.poll-interval-second", "1");
+        this.createTestMetadata("src/test/resources/ut_meta/file_pruning");
+        NDefaultScheduler scheduler = NDefaultScheduler.getInstance(getProject());
+        scheduler.init(new JobEngineConfig(KylinConfig.getInstanceFromEnv()));
+        if (!scheduler.hasStarted()) {
+            throw new RuntimeException("scheduler has not been started");
+        }
+        NDataModelManager instance = NDataModelManager.getInstance(getTestConfig(), getProject());
+        instance.updateDataModel("8c670664-8d05-466a-802f-83c023b56c77", write -> write.setStorageType(2));
+        instance.updateDataModel("8c670664-8d05-466a-802f-83c023b56c78", write -> write.setStorageType(2));
+        instance.updateDataModel("8c670664-8d05-466a-802f-83c023b56c79", write -> write.setStorageType(2));
+        instance.updateDataModel("9cde9d25-9334-4b92-b229-a00f49453757", write -> write.setStorageType(2));
+    }
+
+    @After
+    public void after() throws Exception {
+        NDefaultScheduler.destroyInstance();
+        cleanupTestMetadata();
+    }
+
+    @Test
+    public void testNonExistTimeRange() throws Exception {
+        val start = SegmentRange.dateToLong("2023-01-01 00:00:00");
+        val end = SegmentRange.dateToLong("2025-01-01 00:00:00");
+        val dfName = "8c670664-8d05-466a-802f-83c023b56c77";
+        NDataflowManager dsMgr = NDataflowManager.getInstance(getTestConfig(), getProject());
+        NDataflow df = dsMgr.getDataflow(dfName);
+        val layouts = df.getIndexPlan().getAllLayouts();
+        indexDataConstructor.buildIndex(dfName, new SegmentRange.TimePartitionedSegmentRange(start, end), Sets.newLinkedHashSet(layouts),
+                true);
+        assertResultsAndScanFiles(base, 0);
+    }
+
+    @Test
+    public void testSegPruningWithStringDate() throws Exception {
+        // build three segs
+        // [2009-01-01 00:00:00, 2011-01-01 00:00:00)
+        // [2011-01-01 00:00:00, 2013-01-01 00:00:00)
+        // [2013-01-01 00:00:00, 2015-01-01 00:00:00)
+        buildMultiSegs("8c670664-8d05-466a-802f-83c023b56c78", 10001);
+        populateSSWithCSVData(getTestConfig(), getProject(), SparderEnv.getSparkSession());
+        val no_pruning1 = "select count(*) from TEST_KYLIN_FACT";
+        val no_pruning2 = "select count(*) from TEST_KYLIN_FACT where CAL_DT > '2010-01-01' and CAL_DT < '2015-01-01'";
+        val seg_pruning1 = "select count(*) from TEST_KYLIN_FACT where CAL_DT < '2013-01-01'";
+        val seg_pruning2 = "select count(*) from TEST_KYLIN_FACT where CAL_DT > '2013-01-01'";
+        val seg_pruning3 = "select count(*) from TEST_KYLIN_FACT where CAL_DT = '2013-05-16'";
+        val seg_pruning4 = "select count(*) from TEST_KYLIN_FACT where CAL_DT in ('2013-05-16', '2013-03-22')";
+        val seg_pruning5 = "select count(*) from TEST_KYLIN_FACT where CAL_DT not in ('2013-05-16', '2013-03-22')";
+        val seg_pruning6 = "select count(*) from TEST_KYLIN_FACT where CAL_DT <> '2013-05-16'";
+        assertResultsAndScanFiles(no_pruning1, 731);
+        assertResultsAndScanFiles(no_pruning2, 731);
+        assertResultsAndScanFiles(seg_pruning1, 365);
+        assertResultsAndScanFiles(seg_pruning2, 365);
+        assertResultsAndScanFiles(seg_pruning3, 1);
+        assertResultsAndScanFiles(seg_pruning4, 2);
+        assertResultsAndScanFiles(seg_pruning5, 729);
+        assertResultsAndScanFiles(seg_pruning6, 730);
+    }
+
+    @Ignore("Unsupport timestamp")
+    public void testSegPruningWithStringTimeStamp() throws Exception {
+        // build three segs
+        // [2009-01-01 00:00:00, 2011-01-01 00:00:00)
+        // [2011-01-01 00:00:00, 2013-01-01 00:00:00)
+        // [2013-01-01 00:00:00, 2015-01-01 00:00:00)
+        buildMultiSegs("8c670664-8d05-466a-802f-83c023b56c79", 10001);
+        populateSSWithCSVData(getTestConfig(), getProject(), SparderEnv.getSparkSession());
+        String base = "select count(*)  FROM TEST_ORDER_STRING_TS LEFT JOIN TEST_KYLIN_FACT ON TEST_KYLIN_FACT.ORDER_ID = TEST_ORDER_STRING_TS.ORDER_ID ";
+
+        String and_pruning0 = base
+                + "where TEST_TIME_ENC > '2011-01-01 00:00:00' and TEST_TIME_ENC < '2013-01-01 00:00:00'";
+        String and_pruning1 = base
+                + "where TEST_TIME_ENC > '2011-01-01 00:00:00' and TEST_TIME_ENC = '2016-01-01 00:00:00'";
+
+        String or_pruning0 = base
+                + "where TEST_TIME_ENC > '2011-01-01 00:00:00' or TEST_TIME_ENC = '2016-01-01 00:00:00'";
+        String or_pruning1 = base
+                + "where TEST_TIME_ENC < '2009-01-01 00:00:00' or TEST_TIME_ENC > '2015-01-01 00:00:00'";
+
+        String pruning0 = base + "where TEST_TIME_ENC < '2009-01-01 00:00:00'";
+        String pruning1 = base + "where TEST_TIME_ENC <= '2009-01-01 00:00:00'";
+        String pruning2 = base + "where TEST_TIME_ENC >= '2015-01-01 00:00:00'";
+
+        String not0 = base + "where TEST_TIME_ENC <> '2012-01-01 00:00:00'";
+
+        String in_pruning0 = base
+                + "where TEST_TIME_ENC in ('2009-01-01 00:00:00', '2008-01-01 00:00:00', '2016-01-01 00:00:00')";
+        String in_pruning1 = base + "where TEST_TIME_ENC in ('2008-01-01 00:00:00', '2016-01-01 00:00:00')";
+
+        assertResultsAndScanFiles(base, 3);
+
+        assertResultsAndScanFiles(and_pruning0, 1);
+        assertResultsAndScanFiles(and_pruning1, 0);
+
+        assertResultsAndScanFiles(or_pruning0, 2);
+        assertResultsAndScanFiles(or_pruning1, 0);
+
+        assertResultsAndScanFiles(pruning0, 0);
+        assertResultsAndScanFiles(pruning1, 1);
+        assertResultsAndScanFiles(pruning2, 0);
+
+        // pruning with "not" is not supported
+        assertResultsAndScanFiles(not0, 3);
+
+        assertResultsAndScanFiles(in_pruning0, 1);
+        assertResultsAndScanFiles(in_pruning1, 0);
+
+        List<Pair<String, String>> query = new ArrayList<>();
+        query.add(Pair.newPair("base", base));
+        query.add(Pair.newPair("and_pruning0", and_pruning0));
+        query.add(Pair.newPair("and_pruning1", and_pruning1));
+        query.add(Pair.newPair("or_pruning0", or_pruning0));
+        query.add(Pair.newPair("or_pruning1", or_pruning1));
+        query.add(Pair.newPair("pruning0", pruning0));
+        query.add(Pair.newPair("pruning1", pruning1));
+        query.add(Pair.newPair("pruning2", pruning2));
+        ExecAndComp.execAndCompare(query, getProject(), ExecAndComp.CompareLevel.SAME, "default");
+    }
+
+    @Ignore("Unsupport timestamp")
+    public void testSegPruningWithTimeStamp() throws Exception {
+        // build three segs
+        // [2009-01-01 00:00:00, 2011-01-01 00:00:00)
+        // [2011-01-01 00:00:00, 2013-01-01 00:00:00)
+        // [2013-01-01 00:00:00, 2015-01-01 00:00:00)
+        buildMultiSegs("8c670664-8d05-466a-802f-83c023b56c77", 10001);
+        populateSSWithCSVData(getTestConfig(), getProject(), SparderEnv.getSparkSession());
+
+        String and_pruning0 = base
+                + "where TEST_TIME_ENC > TIMESTAMP '2011-01-01 00:00:00' and TEST_TIME_ENC < TIMESTAMP '2013-01-01 00:00:00'";
+        String and_pruning1 = base
+                + "where TEST_TIME_ENC > TIMESTAMP '2011-01-01 00:00:00' and TEST_TIME_ENC = TIMESTAMP '2016-01-01 00:00:00'";
+
+        String or_pruning0 = base
+                + "where TEST_TIME_ENC > TIMESTAMP '2011-01-01 00:00:00' or TEST_TIME_ENC = TIMESTAMP '2016-01-01 00:00:00'";
+        String or_pruning1 = base
+                + "where TEST_TIME_ENC < TIMESTAMP '2009-01-01 00:00:00' or TEST_TIME_ENC > TIMESTAMP '2015-01-01 00:00:00'";
+
+        String pruning0 = base + "where TEST_TIME_ENC < TIMESTAMP '2009-01-01 00:00:00'";
+        String pruning1 = base + "where TEST_TIME_ENC <= TIMESTAMP '2009-01-01 00:00:00'";
+        String pruning2 = base + "where TEST_TIME_ENC >= TIMESTAMP '2015-01-01 00:00:00'";
+
+        String not0 = base + "where TEST_TIME_ENC <> TIMESTAMP '2012-01-01 00:00:00'";
+
+        String in_pruning0 = base
+                + "where TEST_TIME_ENC in (TIMESTAMP '2009-01-01 00:00:00',TIMESTAMP '2008-01-01 00:00:00',TIMESTAMP '2016-01-01 00:00:00')";
+        String in_pruning1 = base
+                + "where TEST_TIME_ENC in (TIMESTAMP '2008-01-01 00:00:00',TIMESTAMP '2016-01-01 00:00:00')";
+
+        assertResultsAndScanFiles(base, 3);
+
+        assertResultsAndScanFiles(and_pruning0, 1);
+        assertResultsAndScanFiles(and_pruning1, 0);
+
+        assertResultsAndScanFiles(or_pruning0, 2);
+        assertResultsAndScanFiles(or_pruning1, 0);
+
+        assertResultsAndScanFiles(pruning0, 0);
+        assertResultsAndScanFiles(pruning1, 1);
+        assertResultsAndScanFiles(pruning2, 0);
+
+        // pruning with "not" is not supported
+        assertResultsAndScanFiles(not0, 3);
+
+        assertResultsAndScanFiles(in_pruning0, 1);
+        assertResultsAndScanFiles(in_pruning1, 0);
+
+        List<Pair<String, String>> query = new ArrayList<>();
+        query.add(Pair.newPair("base", base));
+        query.add(Pair.newPair("and_pruning0", and_pruning0));
+        query.add(Pair.newPair("and_pruning1", and_pruning1));
+        query.add(Pair.newPair("or_pruning0", or_pruning0));
+        query.add(Pair.newPair("or_pruning1", or_pruning1));
+        query.add(Pair.newPair("pruning0", pruning0));
+        query.add(Pair.newPair("pruning1", pruning1));
+        query.add(Pair.newPair("pruning2", pruning2));
+        ExecAndComp.execAndCompare(query, getProject(), ExecAndComp.CompareLevel.SAME, "default");
+    }
+
+    @Test
+    public void testShardPruning() throws Exception {
+        overwriteSystemProp("kylin.storage.columnar.shard-rowcount", "100");
+        overwriteSystemProp("kylin.storage.columnar.bucket-num", "10");
+        buildMultiSegs("8c670664-8d05-466a-802f-83c023b56c77");
+
+        populateSSWithCSVData(getTestConfig(), getProject(), SparderEnv.getSparkSession());
+
+        basicPruningScenario();
+        pruningWithVariousTypesScenario();
+    }
+
+    @Test
+    public void testDimRangePruning() throws Exception {
+        buildMultiSegs("8c670664-8d05-466a-802f-83c023b56c77");
+
+        populateSSWithCSVData(getTestConfig(), getProject(), SparderEnv.getSparkSession());
+
+        val lessThanEquality = base + "where TEST_ORDER.ORDER_ID <= 2";
+        val in = base + "where TEST_ORDER.ORDER_ID in (4998, 4999)";
+        val lessThan = base + "where TEST_ORDER.ORDER_ID < 2";
+        val and = base + "where PRICE < -99 AND TEST_ORDER.ORDER_ID = 1";
+        val or = base + "where TEST_ORDER.ORDER_ID = 1 or TEST_ORDER.ORDER_ID = 2 ";
+        val notSupported0 = base + "where SELLER_ID <> 10000233";
+        val notSupported1 = base + "where SELLER_ID > 10000233";
+
+        assertResultsAndScanFiles(lessThanEquality, 3);
+        assertResultsAndScanFiles(in, 3);
+        assertResultsAndScanFiles(lessThan, 3);
+        assertResultsAndScanFiles(and, 3);
+        assertResultsAndScanFiles(or, 3);
+        assertResultsAndScanFiles(notSupported0, 3);
+        assertResultsAndScanFiles(notSupported1, 3);
+
+        List<Pair<String, String>> query = new ArrayList<>();
+        query.add(Pair.newPair("", lessThanEquality));
+        query.add(Pair.newPair("", in));
+        query.add(Pair.newPair("", lessThan));
+        query.add(Pair.newPair("", and));
+        query.add(Pair.newPair("", or));
+        query.add(Pair.newPair("", notSupported0));
+        query.add(Pair.newPair("", notSupported1));
+        ExecAndComp.execAndCompare(query, getProject(), ExecAndComp.CompareLevel.SAME, "left");
+    }
+
+    @Test
+    public void testPruningWithChineseCharacter() throws Exception {
+        overwriteSystemProp("kylin.storage.columnar.shard-rowcount", "1");
+        fullBuild("9cde9d25-9334-4b92-b229-a00f49453757");
+        populateSSWithCSVData(getTestConfig(), getProject(), SparderEnv.getSparkSession());
+
+        val chinese0 = "select count(*) from TEST_MEASURE where name1 = '中国'";
+        val chinese1 = "select count(*) from TEST_MEASURE where name1 <> '中国'";
+
+        assertResultsAndScanFiles(chinese0, 1);
+        assertResultsAndScanFiles(chinese1, 4);
+
+        List<Pair<String, String>> query = new ArrayList<>();
+        query.add(Pair.newPair("", chinese0));
+        query.add(Pair.newPair("", chinese1));
+        ExecAndComp.execAndCompare(query, getProject(), ExecAndComp.CompareLevel.SAME, "left");
+    }
+
+    private void pruningWithVariousTypesScenario() throws Exception {
+        // int type is tested #basicPruningScenario
+
+        // xx0 means can pruning, while xx1 can not.
+        val bool0 = base + "where IS_EFFECTUAL = true";
+        val bool1 = base + "where IS_EFFECTUAL <> true";
+
+        val decimal0 = base + "where PRICE = 290.48";
+        val decimal1 = base + "where PRICE > 290.48";
+
+        val short0 = base + "where SLR_SEGMENT_CD = 16";
+        val short1 = base + "where SLR_SEGMENT_CD > 16";
+
+        val string0 = base + "where LSTG_FORMAT_NAME = 'Auction'";
+        val string1 = base + "where LSTG_FORMAT_NAME <> 'Auction'";
+
+        val long0 = base + "where TEST_ORDER.ORDER_ID = 2662";
+        val long1 = base + "where TEST_ORDER.ORDER_ID <> 2662";
+
+        val date0 = base + "where TEST_DATE_ENC = DATE '2011-07-10'";
+        val date1 = base + "where TEST_DATE_ENC <> DATE '2011-07-10'";
+
+        val ts0 = base + "where TEST_TIME_ENC = TIMESTAMP '2013-06-18 07:07:10'";
+
+        val ts1 = base + "where TEST_TIME_ENC > TIMESTAMP '2013-01-01 00:00:00' "
+                + "and TEST_TIME_ENC < TIMESTAMP '2015-01-01 00:00:00' "
+                + "and TEST_TIME_ENC <> TIMESTAMP '2013-06-18 07:07:10'";
+
+        assertResultsAndScanFiles(bool0, 3);
+        assertResultsAndScanFiles(bool1, 9);
+
+        assertResultsAndScanFiles(decimal0, 3);
+        assertResultsAndScanFiles(decimal1, 33);
+
+        // calcite will treat short as int. So pruning will not work.
+        assertResultsAndScanFiles(short0, 24);
+        assertResultsAndScanFiles(short1, 24);
+
+        assertResultsAndScanFiles(string0, 3);
+        assertResultsAndScanFiles(string1, 15);
+
+        assertResultsAndScanFiles(long0, 3);
+        assertResultsAndScanFiles(long1, 30);
+
+        assertResultsAndScanFiles(date0, 3);
+        assertResultsAndScanFiles(date1, 30);
+
+        // segment pruning first, then shard pruning
+        // so the scanned files is 1 not 3(each segment per shard)
+        assertResultsAndScanFiles(ts0, 3);
+        assertResultsAndScanFiles(ts1, 30);
+
+        List<Pair<String, String>> query = new ArrayList<>();
+        query.add(Pair.newPair("", bool0));
+        query.add(Pair.newPair("", bool1));
+        query.add(Pair.newPair("", decimal0));
+        query.add(Pair.newPair("", decimal1));
+        query.add(Pair.newPair("", short0));
+        query.add(Pair.newPair("", short1));
+        query.add(Pair.newPair("", string0));
+        query.add(Pair.newPair("", string1));
+        query.add(Pair.newPair("", long0));
+        query.add(Pair.newPair("", long1));
+        query.add(Pair.newPair("", date0));
+        query.add(Pair.newPair("", date1));
+
+        // see #11598
+        query.add(Pair.newPair("", ts0));
+        query.add(Pair.newPair("", ts1));
+        ExecAndComp.execAndCompare(query, getProject(), ExecAndComp.CompareLevel.SAME, "left");
+    }
+
+    private void basicPruningScenario() throws Exception {
+        // shard pruning supports: Equality/In/IsNull/And/Or
+        // other expression(gt/lt/like/cast/substr, etc.) will select all files.
+
+        val equality = base + "where SELLER_ID = 10000233";
+        val in = base + "where SELLER_ID in (10000233,10000234,10000235)";
+        val isNull = base + "where SELLER_ID is NULL";
+        val and = base + "where SELLER_ID in (10000233,10000234,10000235) and SELLER_ID = 10000233 ";
+        val or = base + "where SELLER_ID = 10000233 or SELLER_ID = 1 ";
+        val notSupported0 = base + "where SELLER_ID <> 10000233";
+        val notSupported1 = base + "where SELLER_ID > 10000233";
+
+        assertResultsAndScanFiles(equality, 3);
+        assertResultsAndScanFiles(in, 9);
+        assertResultsAndScanFiles(isNull, 3);
+        assertResultsAndScanFiles(and, 3);
+        assertResultsAndScanFiles(or, 6);
+        assertResultsAndScanFiles(notSupported0, 30);
+        assertResultsAndScanFiles(notSupported1, 30);
+
+        List<Pair<String, String>> query = new ArrayList<>();
+        query.add(Pair.newPair("", equality));
+        query.add(Pair.newPair("", in));
+        query.add(Pair.newPair("", isNull));
+        query.add(Pair.newPair("", and));
+        query.add(Pair.newPair("", or));
+        query.add(Pair.newPair("", notSupported0));
+        query.add(Pair.newPair("", notSupported1));
+        ExecAndComp.execAndCompare(query, getProject(), ExecAndComp.CompareLevel.SAME, "left");
+    }
+
+    @Override
+    public String getProject() {
+        return "file_pruning";
+    }
+
+    private long assertResultsAndScanFiles(String sql, long numScanFiles) throws Exception {
+        val df = ExecAndComp.queryModelWithoutCompute(getProject(), sql);
+        df.collect();
+        val actualNum = findFileSourceScanExec(df.queryExecution().sparkPlan()).metrics().get("numFiles").get().value();
+        Assert.assertEquals(numScanFiles, actualNum);
+        return actualNum;
+    }
+
+    private LayoutFileSourceScanExec findFileSourceScanExec(SparkPlan plan) {
+        return (LayoutFileSourceScanExec) plan.find(new AbstractFunction1<SparkPlan, Object>() {
+            @Override
+            public Object apply(SparkPlan p) {
+                return p instanceof LayoutFileSourceScanExec;
+            }
+        }).get();
+    }
+}
diff --git a/src/kylin-it/src/test/java/org/apache/kylin/newten/NFlattableJoinWithoutLookupTest.java b/src/kylin-it/src/test/java/org/apache/kylin/newten/NFlattableJoinWithoutLookupTest.java
new file mode 100644
index 0000000000..746fda8c99
--- /dev/null
+++ b/src/kylin-it/src/test/java/org/apache/kylin/newten/NFlattableJoinWithoutLookupTest.java
@@ -0,0 +1,115 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.kylin.newten;
+
+import java.io.File;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.engine.spark.NLocalWithSparkSessionTest;
+import org.apache.kylin.job.engine.JobEngineConfig;
+import org.apache.kylin.job.impl.threadpool.NDefaultScheduler;
+import org.apache.kylin.metadata.cube.model.IndexPlan;
+import org.apache.kylin.metadata.cube.model.LayoutEntity;
+import org.apache.kylin.metadata.cube.model.NDataflow;
+import org.apache.kylin.metadata.cube.model.NDataflowManager;
+import org.apache.kylin.metadata.model.SegmentRange;
+import org.apache.kylin.util.ExecAndComp;
+import org.apache.spark.sql.Dataset;
+import org.apache.spark.sql.Row;
+import org.apache.spark.sql.SparderEnv;
+import org.apache.spark.sql.common.SparderQueryTest;
+import org.junit.After;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+import org.sparkproject.guava.collect.Sets;
+
+public class NFlattableJoinWithoutLookupTest extends NLocalWithSparkSessionTest {
+
+    private NDataflowManager dfMgr = null;
+
+    @Before
+    public void setup() throws Exception {
+        overwriteSystemProp("kylin.job.scheduler.poll-interval-second", "1");
+        overwriteSystemProp("kylin.job.flat-table-join-without-lookup", "true");
+        overwriteSystemProp("kylin.engine.persist-flattable-enabled", "false");
+        this.createTestMetadata("src/test/resources/ut_meta/flattable_without_join_lookup");
+        dfMgr = NDataflowManager.getInstance(getTestConfig(), getProject());
+        NDefaultScheduler scheduler = NDefaultScheduler.getInstance(getProject());
+        scheduler.init(new JobEngineConfig(KylinConfig.getInstanceFromEnv()));
+        if (!scheduler.hasStarted()) {
+            throw new RuntimeException("scheduler has not been started");
+        }
+    }
+
+    @After
+    public void after() throws Exception {
+        NDefaultScheduler.destroyInstance();
+        cleanupTestMetadata();
+        FileUtils.deleteQuietly(new File("../kap-it/metastore_db"));
+    }
+
+    @Override
+    public String getProject() {
+        return "flattable_without_join_lookup";
+    }
+
+    private String sql = "select  CAL_DT as dt1, cast (TEST_ORDER_STRING.TEST_TIME_ENC as timestamp) as ts2, cast(TEST_ORDER_STRING.TEST_DATE_ENC  as date) as dt2,TEST_ORDER.ORDER_ID, count(*) FROM TEST_ORDER LEFT JOIN TEST_KYLIN_FACT ON TEST_KYLIN_FACT.ORDER_ID = TEST_ORDER.ORDER_ID LEFT JOIN TEST_ORDER_STRING on TEST_ORDER.ORDER_ID = TEST_ORDER_STRING.ORDER_ID group by TEST_ORDER.ORDER_ID ,TEST_ORDER_STRING.TEST_TIME_ENC , TEST_ORDER_STRING.TEST_DATE_ENC ,CAL_DT order by TEST_ORDER.ORD [...]
+
+    @Test
+    public void testFlattableWithoutLookup() throws Exception {
+        buildSegs("8c670664-8d05-466a-802f-83c023b56c77", 10001L);
+        populateSSWithCSVData(getTestConfig(), getProject(), SparderEnv.getSparkSession());
+        Dataset<Row> cube = ExecAndComp.queryModel(getProject(), sql);
+        Dataset<Row> pushDown = ExecAndComp.querySparkSql(sql);
+        String msg = SparderQueryTest.checkAnswer(cube, pushDown, true);
+        Assert.assertNull(msg);
+    }
+
+    @Test
+    public void testFlattableJoinLookup() throws Exception {
+        buildSegs("9cde9d25-9334-4b92-b229-a00f49453757", 10001L);
+        populateSSWithCSVData(getTestConfig(), getProject(), SparderEnv.getSparkSession());
+        Dataset<Row> cube = ExecAndComp.queryModel(getProject(), sql);
+        Dataset<Row> pushDown = ExecAndComp.querySparkSql(sql);
+        String msg = SparderQueryTest.checkAnswer(cube, pushDown, true);
+        Assert.assertNull(msg);
+    }
+
+    private void buildSegs(String dfName, long... layoutID) throws Exception {
+        NDataflowManager dsMgr = NDataflowManager.getInstance(getTestConfig(), getProject());
+        NDataflow df = dsMgr.getDataflow(dfName);
+        List<LayoutEntity> layouts = new ArrayList<>();
+        IndexPlan indexPlan = df.getIndexPlan();
+        if (layoutID.length == 0) {
+            layouts = indexPlan.getAllLayouts();
+        } else {
+            for (long id : layoutID) {
+                layouts.add(indexPlan.getLayoutEntity(id));
+            }
+        }
+        long start = SegmentRange.dateToLong("2009-01-01 00:00:00");
+        long end = SegmentRange.dateToLong("2015-01-01 00:00:00");
+        indexDataConstructor.buildIndex(dfName, new SegmentRange.TimePartitionedSegmentRange(start, end), Sets.newLinkedHashSet(layouts),
+                true);
+    }
+}
diff --git a/src/kylin-it/src/test/java/org/apache/kylin/newten/NJoinOptTest.java b/src/kylin-it/src/test/java/org/apache/kylin/newten/NJoinOptTest.java
new file mode 100644
index 0000000000..478208c2ec
--- /dev/null
+++ b/src/kylin-it/src/test/java/org/apache/kylin/newten/NJoinOptTest.java
@@ -0,0 +1,210 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+package org.apache.kylin.newten;
+
+import java.sql.SQLException;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.UUID;
+
+import org.apache.hadoop.util.Shell;
+import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.engine.spark.NLocalWithSparkSessionTest;
+import org.apache.kylin.job.engine.JobEngineConfig;
+import org.apache.kylin.job.impl.threadpool.NDefaultScheduler;
+import org.apache.kylin.metadata.project.NProjectManager;
+import org.apache.kylin.util.ExecAndComp;
+import org.apache.spark.SparkConf;
+import org.apache.spark.sql.SparderEnv;
+import org.apache.spark.sql.SparkSession;
+import org.apache.spark.sql.execution.SortExec;
+import org.apache.spark.sql.execution.SparkPlan;
+import org.apache.spark.sql.execution.exchange.Exchange;
+import org.apache.spark.sql.execution.joins.SortMergeJoinExec;
+import org.apache.spark.sql.internal.StaticSQLConf;
+import org.junit.After;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Ignore;
+import org.junit.Test;
+
+import lombok.val;
+import scala.Option;
+import scala.runtime.AbstractFunction1;
+
+public class NJoinOptTest extends NLocalWithSparkSessionTest {
+
+    @BeforeClass
+    public static void initSpark() {
+        if (Shell.MAC)
+            overwriteSystemPropBeforeClass("org.xerial.snappy.lib.name", "libsnappyjava.jnilib");//for snappy
+        if (ss != null && !ss.sparkContext().isStopped()) {
+            ss.stop();
+        }
+        sparkConf = new SparkConf().setAppName(UUID.randomUUID().toString()).setMaster("local[4]");
+        sparkConf.set("spark.serializer", "org.apache.spark.serializer.JavaSerializer");
+        sparkConf.set(StaticSQLConf.CATALOG_IMPLEMENTATION().key(), "in-memory");
+        sparkConf.set("spark.sql.shuffle.partitions", "1");
+        sparkConf.set("spark.memory.fraction", "0.1");
+        // opt memory
+        sparkConf.set("spark.shuffle.detectCorrupt", "false");
+        // For sinai_poc/query03, enable implicit cross join conversion
+        sparkConf.set("spark.sql.crossJoin.enabled", "true");
+        sparkConf.set("spark.sql.adaptive.enabled", "false");
+        sparkConf.set("spark.sql.autoBroadcastJoinThreshold", "1");
+        ss = SparkSession.builder().config(sparkConf).getOrCreate();
+        SparderEnv.setSparkSession(ss);
+    }
+
+    @Before
+    public void setup() throws Exception {
+        overwriteSystemProp("kylin.job.scheduler.poll-interval-second", "1");
+        this.createTestMetadata("src/test/resources/ut_meta/join_opt");
+        NDefaultScheduler scheduler = NDefaultScheduler.getInstance(getProject());
+        scheduler.init(new JobEngineConfig(KylinConfig.getInstanceFromEnv()));
+        if (!scheduler.hasStarted()) {
+            throw new RuntimeException("scheduler has not been started");
+        }
+    }
+
+    @After
+    public void after() throws Exception {
+        NDefaultScheduler.destroyInstance();
+        cleanupTestMetadata();
+    }
+
+    @Ignore("KE-30387")
+    @Test
+    public void testShardJoinInOneSeg() throws Exception {
+        overwriteSystemProp("kylin.storage.columnar.shard-rowcount", "100");
+        fullBuild("8c670664-8d05-466a-802f-83c023b56c77");
+        populateSSWithCSVData(getTestConfig(), getProject(), SparderEnv.getSparkSession());
+        // calcite will transform this "in" to join
+        val sql1 = "select count(*) from TEST_KYLIN_FACT where SELLER_ID in (select SELLER_ID from TEST_KYLIN_FACT group by SELLER_ID)";
+        val sql2 = "select count(*) from TEST_KYLIN_FACT where LSTG_FORMAT_NAME in (select LSTG_FORMAT_NAME from TEST_KYLIN_FACT group by LSTG_FORMAT_NAME)";
+        val sql3 = "select count(*) from TEST_KYLIN_FACT t1 join "
+                + "(select TRANS_ID,LSTG_FORMAT_NAME from TEST_KYLIN_FACT group by TRANS_ID,LSTG_FORMAT_NAME) t2 "
+                + "on t1.TRANS_ID = t2.TRANS_ID and t1.LSTG_FORMAT_NAME = t2.LSTG_FORMAT_NAME";
+        List<String> query = new ArrayList<>();
+        query.add(sql1);
+        query.add(sql2);
+        query.add(sql3);
+        ExecAndComp.execAndCompareQueryList(query, getProject(), ExecAndComp.CompareLevel.SAME, "default");
+
+        basicScenario(sql1);
+        testExchangePruningAfterAgg(sql2);
+        testMultiShards(sql3);
+    }
+
+    private void testMultiShards(String sql) throws SQLException {
+        // assert no exchange
+        // assert no sort
+        assertPlan(sql, false, false);
+    }
+
+    private void testExchangePruningAfterAgg(String sql) throws SQLException {
+        // assert no exchange
+        // data after agg will lost its sorting characteristics
+        assertPlan(sql, false, true);
+    }
+
+    private void basicScenario(String sql) throws SQLException {
+        // assert no exchange
+        // assert no sort
+        assertPlan(sql, false, false);
+    }
+
+    @Test
+    public void testShardJoinInMultiSeg() throws Exception {
+        overwriteSystemProp("kylin.storage.columnar.shard-rowcount", "100");
+        buildMultiSegs("8c670664-8d05-466a-802f-83c023b56c77");
+        populateSSWithCSVData(getTestConfig(), getProject(), SparderEnv.getSparkSession());
+        // calcite will transform this "in" to join
+        val sql = "select count(*) from TEST_KYLIN_FACT where SELLER_ID in (select SELLER_ID from TEST_KYLIN_FACT group by SELLER_ID)";
+        List<String> query = new ArrayList<>();
+        query.add(sql);
+        ExecAndComp.execAndCompareQueryList(query, getProject(), ExecAndComp.CompareLevel.SAME, "default");
+
+        // assert exists exchange
+        // assert exists sort
+        assertPlan(sql, true, true);
+    }
+
+    @Ignore("KE-30387")
+    @Test
+    public void testShardJoinInMultiSegWithFixedShardNum() throws Exception {
+        KylinConfig config = KylinConfig.getInstanceFromEnv();
+        val projectManager = NProjectManager.getInstance(config);
+
+        Map<String, String> overrideKylinProps = new HashMap<>();
+        overrideKylinProps.put("kylin.engine.shard-num-json",
+                "{\"DEFAULT.TEST_KYLIN_FACT.SELLER_ID\":\"10\",\"DEFAULT.TEST_KYLIN_FACT.LSTG_FORMAT_NAME,DEFAULT.TEST_KYLIN_FACT.TRANS_ID\":\"15\",\"e\":\"300\"}");
+        projectManager.updateProject(getProject(), copyForWrite -> {
+            copyForWrite.getOverrideKylinProps().putAll(overrideKylinProps);
+        });
+
+        buildMultiSegs("8c670664-8d05-466a-802f-83c023b56c77");
+        populateSSWithCSVData(getTestConfig(), getProject(), SparderEnv.getSparkSession());
+        // calcite will transform this "in" to join
+        val sql1 = "select count(*) from TEST_KYLIN_FACT where SELLER_ID in (select SELLER_ID from TEST_KYLIN_FACT group by SELLER_ID)";
+        val sql2 = "select count(*) from TEST_KYLIN_FACT t1 join "
+                + "(select TRANS_ID,LSTG_FORMAT_NAME from TEST_KYLIN_FACT group by TRANS_ID,LSTG_FORMAT_NAME) t2 "
+                + "on t1.TRANS_ID = t2.TRANS_ID and t1.LSTG_FORMAT_NAME = t2.LSTG_FORMAT_NAME";
+
+        List<String> query = new ArrayList<>();
+        query.add(sql1);
+        query.add(sql2);
+        ExecAndComp.execAndCompareQueryList(query, getProject(), ExecAndComp.CompareLevel.SAME, "default");
+
+        // assert no exchange, cuz we unified the num of shards in different segments.
+        // assert exists sort
+        assertPlan(sql1, false, true);
+        assertPlan(sql2, false, true);
+    }
+
+    private void assertPlan(String sql, boolean existsExchange, boolean existsSort) throws SQLException {
+        SortMergeJoinExec joinExec = getSortMergeJoinExec(sql);
+        Assert.assertEquals(existsExchange, findSpecPlan(joinExec, Exchange.class).isDefined());
+
+        Assert.assertEquals(existsSort, findSpecPlan(joinExec, SortExec.class).isDefined());
+    }
+
+    private SortMergeJoinExec getSortMergeJoinExec(String sql) throws SQLException {
+        val plan = ExecAndComp.queryModel(getProject(), sql).queryExecution().executedPlan();
+        return (SortMergeJoinExec) findSpecPlan(plan, SortMergeJoinExec.class).get();
+    }
+
+    private Option<SparkPlan> findSpecPlan(SparkPlan plan, Class<?> cls) {
+        return plan.find(new AbstractFunction1<SparkPlan, Object>() {
+            @Override
+            public Object apply(SparkPlan v1) {
+                return cls.isInstance(v1);
+            }
+        });
+    }
+
+    @Override
+    public String getProject() {
+        return "join_opt";
+    }
+}
diff --git a/src/kylin-it/src/test/java/org/apache/kylin/newten/NManualBuildAndQueryCuboidTest.java b/src/kylin-it/src/test/java/org/apache/kylin/newten/NManualBuildAndQueryCuboidTest.java
new file mode 100644
index 0000000000..90807701d1
--- /dev/null
+++ b/src/kylin-it/src/test/java/org/apache/kylin/newten/NManualBuildAndQueryCuboidTest.java
@@ -0,0 +1,233 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.kylin.newten;
+
+import java.nio.ByteBuffer;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.engine.spark.NSparkCubingEngine;
+import org.apache.kylin.engine.spark.builder.CreateFlatTable;
+import org.apache.kylin.engine.spark.job.CuboidAggregator;
+import org.apache.kylin.engine.spark.job.NSparkCubingUtil;
+import org.apache.kylin.job.impl.threadpool.NDefaultScheduler;
+import org.apache.kylin.measure.bitmap.BitmapCounter;
+import org.apache.kylin.measure.bitmap.BitmapSerializer;
+import org.apache.kylin.metadata.cube.model.IndexEntity;
+import org.apache.kylin.metadata.cube.model.LayoutEntity;
+import org.apache.kylin.metadata.cube.model.NCubeJoinedFlatTableDesc;
+import org.apache.kylin.metadata.cube.model.NDataLayout;
+import org.apache.kylin.metadata.cube.model.NDataSegment;
+import org.apache.kylin.metadata.cube.model.NDataflow;
+import org.apache.kylin.metadata.cube.model.NDataflowManager;
+import org.apache.kylin.metadata.datatype.DataType;
+import org.apache.kylin.metadata.model.MeasureDesc;
+import org.apache.kylin.metadata.model.NDataModel;
+import org.apache.kylin.metadata.model.SegmentRange;
+import org.apache.kylin.storage.StorageFactory;
+import org.apache.spark.api.java.function.MapFunction;
+import org.apache.spark.sql.Dataset;
+import org.apache.spark.sql.Row;
+import org.apache.spark.sql.RowFactory;
+import org.apache.spark.sql.catalyst.encoders.RowEncoder;
+import org.apache.spark.sql.common.SparderQueryTest;
+import org.apache.spark.sql.types.DataTypes;
+import org.apache.spark.sql.types.StructField;
+import org.apache.spark.sql.types.StructType;
+import org.junit.After;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.sparkproject.guava.collect.Sets;
+
+import com.google.common.collect.ImmutableBiMap;
+import com.google.common.collect.Lists;
+
+public class NManualBuildAndQueryCuboidTest extends NManualBuildAndQueryTest {
+
+    private static final Logger logger = LoggerFactory.getLogger(NManualBuildAndQueryTest.class);
+
+    private static final String DEFAULT_PROJECT = "default";
+
+    private static StructType OUT_SCHEMA = null;
+
+    @Before
+    public void setup() throws Exception {
+        super.init();
+        overwriteSystemProp("spark.local", "true");
+        overwriteSystemProp("noBuild", "false");
+        overwriteSystemProp("isDeveloperMode", "false");
+    }
+
+    @After
+    public void after() {
+        NDefaultScheduler.destroyInstance();
+        super.cleanupTestMetadata();
+    }
+
+    @Override
+    public String getProject() {
+        return DEFAULT_PROJECT;
+    }
+
+    @Test
+    public void testBasics() throws Exception {
+        final KylinConfig config = KylinConfig.getInstanceFromEnv();
+        buildCubes();
+        compareCuboidParquetWithSparkSql("89af4ee2-2cdb-4b07-b39e-4c29856309aa");
+        compareCuboidParquetWithSparkSql("741ca86a-1f13-46da-a59f-95fb68615e3a");
+    }
+
+    private void compareCuboidParquetWithSparkSql(String dfName) {
+        KylinConfig config = KylinConfig.getInstanceFromEnv();
+
+        NDataflowManager dsMgr = NDataflowManager.getInstance(config, DEFAULT_PROJECT);
+        Assert.assertTrue(config.getHdfsWorkingDirectory().startsWith("file:"));
+        List<NDataLayout> dataLayouts = Lists.newArrayList();
+        NDataflow df = dsMgr.getDataflow(dfName);
+        for (NDataSegment segment : df.getSegments()) {
+            dataLayouts.addAll(segment.getSegDetails().getLayouts());
+        }
+        for (NDataLayout cuboid : dataLayouts) {
+            Set<Integer> rowKeys = cuboid.getLayout().getOrderedDimensions().keySet();
+
+            Dataset<Row> layoutDataset = StorageFactory
+                    .createEngineAdapter(cuboid.getLayout(), NSparkCubingEngine.NSparkCubingStorage.class)
+                    .getFrom(NSparkCubingUtil.getStoragePath(cuboid.getSegDetails().getDataSegment(),
+                            cuboid.getLayoutId()), ss);
+            layoutDataset = layoutDataset.select(NSparkCubingUtil.getColumns(rowKeys, chooseMeas(cuboid)))
+                    .sort(NSparkCubingUtil.getColumns(rowKeys));
+            logger.debug("Query cuboid ------------ " + cuboid.getLayoutId());
+            layoutDataset = dsConvertToOriginal(layoutDataset, cuboid.getLayout());
+            logger.debug(layoutDataset.showString(10, 20, false));
+
+            NDataSegment segment = cuboid.getSegDetails().getDataSegment();
+            Dataset<Row> ds = initFlatTable(dfName, new SegmentRange.TimePartitionedSegmentRange(
+                    segment.getTSRange().getStart(), segment.getTSRange().getEnd()));
+
+            if (cuboid.getLayout().getIndex().getId() < IndexEntity.TABLE_INDEX_START_ID) {
+                ds = queryCuboidLayout(cuboid.getLayout(), ds);
+            }
+
+            Dataset<Row> exceptDs = ds.select(NSparkCubingUtil.getColumns(rowKeys, chooseMeas(cuboid)))
+                    .sort(NSparkCubingUtil.getColumns(rowKeys));
+
+            logger.debug("Spark sql ------------ ");
+            logger.debug(exceptDs.showString(10, 20, false));
+
+            Assert.assertEquals(layoutDataset.count(), exceptDs.count());
+            String msg = SparderQueryTest.checkAnswer(layoutDataset, exceptDs, false);
+            Assert.assertNull(msg);
+        }
+    }
+
+    private Set<Integer> chooseMeas(NDataLayout cuboid) {
+        Set<Integer> meaSet = Sets.newHashSet();
+        for (Map.Entry<Integer, NDataModel.Measure> entry : cuboid.getLayout().getOrderedMeasures().entrySet()) {
+            String funName = entry.getValue().getFunction().getReturnDataType().getName();
+            if (funName.equals("hllc") || funName.equals("topn") || funName.equals("percentile")) {
+                continue;
+            }
+            meaSet.add(entry.getKey());
+        }
+        return meaSet;
+    }
+
+    private Dataset<Row> queryCuboidLayout(LayoutEntity layout, Dataset<Row> ds) {
+        NCubeJoinedFlatTableDesc tableDesc = new NCubeJoinedFlatTableDesc(layout.getIndex().getIndexPlan());
+        return CuboidAggregator.aggregateJava(ds, layout.getIndex().getEffectiveDimCols().keySet(), //
+                layout.getIndex().getIndexPlan().getEffectiveMeasures(), //
+                tableDesc, true);
+    }
+
+    private Dataset<Row> dsConvertToOriginal(Dataset<Row> layoutDs, LayoutEntity layout) {
+        ImmutableBiMap<Integer, NDataModel.Measure> orderedMeasures = layout.getOrderedMeasures();
+
+        for (final Map.Entry<Integer, NDataModel.Measure> entry : orderedMeasures.entrySet()) {
+            MeasureDesc measureDesc = entry.getValue();
+            if (measureDesc != null) {
+                final String[] columns = layoutDs.columns();
+                String function = measureDesc.getFunction().getReturnDataType().getName();
+
+                if ("bitmap".equals(function)) {
+                    final int finalIndex = convertOutSchema(layoutDs, entry.getKey().toString(), DataTypes.LongType);
+                    layoutDs = layoutDs.map((MapFunction<Row, Row>) value -> {
+                        Object[] ret = new Object[value.size()];
+                        for (int i = 0; i < columns.length; i++) {
+                            if (i == finalIndex) {
+                                BitmapSerializer serializer = new BitmapSerializer(DataType.ANY);
+                                byte[] bytes = (byte[]) value.get(i);
+                                ByteBuffer buf = ByteBuffer.wrap(bytes);
+                                BitmapCounter bitmapCounter = serializer.deserialize(buf);
+                                ret[i] = bitmapCounter.getCount();
+                            } else {
+                                ret[i] = value.get(i);
+                            }
+                        }
+                        return RowFactory.create(ret);
+                    }, RowEncoder.apply(OUT_SCHEMA));
+                }
+            }
+        }
+        return layoutDs;
+    }
+
+    private Integer convertOutSchema(Dataset<Row> layoutDs, String fieldName,
+            org.apache.spark.sql.types.DataType dataType) {
+        StructField[] structFieldList = layoutDs.schema().fields();
+        String[] columns = layoutDs.columns();
+
+        int index = 0;
+        StructField[] outStructFieldList = new StructField[structFieldList.length];
+        for (int i = 0; i < structFieldList.length; i++) {
+            if (columns[i].equalsIgnoreCase(fieldName)) {
+                index = i;
+                StructField structField = structFieldList[i];
+                outStructFieldList[i] = new StructField(structField.name(), dataType, false, structField.metadata());
+            } else {
+                outStructFieldList[i] = structFieldList[i];
+            }
+        }
+
+        OUT_SCHEMA = new StructType(outStructFieldList);
+
+        return index;
+    }
+
+    private Dataset<Row> initFlatTable(String dfName, SegmentRange segmentRange) {
+        System.out.println(getTestConfig().getMetadataUrl());
+        NDataflowManager dsMgr = NDataflowManager.getInstance(getTestConfig(), DEFAULT_PROJECT);
+        NDataflow df = dsMgr.getDataflow(dfName);
+        NDataModel model = df.getModel();
+
+        NCubeJoinedFlatTableDesc flatTableDesc = new NCubeJoinedFlatTableDesc(df.getIndexPlan(), segmentRange, true);
+        CreateFlatTable flatTable = new CreateFlatTable(flatTableDesc, null, null, ss, null);
+        Dataset<Row> ds = flatTable.generateDataset(false, true);
+
+        StructType schema = ds.schema();
+        for (StructField field : schema.fields()) {
+            Assert.assertNotNull(model.findColumn(model.getColumnNameByColumnId(Integer.parseInt(field.name()))));
+        }
+        return ds;
+    }
+}
diff --git a/src/kylin-it/src/test/java/org/apache/kylin/newten/NManualBuildAndQueryTest.java b/src/kylin-it/src/test/java/org/apache/kylin/newten/NManualBuildAndQueryTest.java
new file mode 100644
index 0000000000..4879a2f22a
--- /dev/null
+++ b/src/kylin-it/src/test/java/org/apache/kylin/newten/NManualBuildAndQueryTest.java
@@ -0,0 +1,285 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.kylin.newten;
+
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.stream.Collectors;
+
+import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.common.util.RandomUtil;
+import org.apache.kylin.engine.spark.IndexDataConstructor;
+import org.apache.kylin.engine.spark.NLocalWithSparkSessionTest;
+import org.apache.kylin.engine.spark.job.NSparkMergingJob;
+import org.apache.kylin.engine.spark.merger.AfterMergeOrRefreshResourceMerger;
+import org.apache.kylin.job.execution.ExecutableState;
+import org.apache.kylin.job.execution.NExecutableManager;
+import org.apache.kylin.job.impl.threadpool.NDefaultScheduler;
... 939417 lines suppressed ...