You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@kylin.apache.org by xx...@apache.org on 2023/02/27 08:00:40 UTC

[kylin] branch kylin5 updated (b2b3a6a8d2 -> 7fe52b8741)

This is an automated email from the ASF dual-hosted git repository.

xxyu pushed a change to branch kylin5
in repository https://gitbox.apache.org/repos/asf/kylin.git


    from b2b3a6a8d2 KYLIN-5387 cost based index planner phase1 (#2089)
     new ca4f1d6000 KYLIN-5445  set epoch_target as primary key of epoch table
     new 4742188bde KYLIN-5445 minor fix isPrimaryKeyExists throw exception when using low version mysql driver
     new 665400c2bc KYLIN-5445 minor fix sonar
     new b5c3fb16d4 KYLIN-5445 fix connection
     new 306af607b2  KYLIN-5445 minor fix log
     new 8be8d54aa1 KYLIN-5446 remove systools module
     new 90e908b69e KYLIN-5447 Support Logical View
     new 22e0a9d8d2 KYLIN-5447 delete ddl pom
     new 7e9495c17c KYLIN-5448 [FOLLOWUP] Update sprint-boot version & independent tomcat config
     new c9769b0ca2 KYLIN-5449 refactor common-service kylin-tool
     new 80d5df4df2 KYLIN-5448 fix snyk vulnerabilities, upgrade protobuf-java from 3.16.1 to 3.16.3
     new 5b41bca2ae KYLIN-5450 check if shard by columns included in col orders
     new 857a79edef KYLIN-5449 fix startup issue
     new f25edf0fca KYLIN-5448 fix snyk vulnerabilities, upgrade jettison from 1.1 to 1.5.2
     new f044cdd593 KYLIN-5448 fix snyk vulnerabilities, upgrade netty-codec and netty-codec-haproxy from 4.1.85.Final to 4.1.86.Final
     new 7501e54df7 KYLIN-5448 update spring-boot-admin to 2.6.10
     new b0c2589044 KYLIN-5451 Avoid multiple local ip acquisitions
     new 89b96f4743 KYLIN-5452 Optimise constant queries, add cache for getBindable
     new db624c3e89 KYLIN-5454 Downloading the async query result may cause OOM
     new cf0b7a83d0 KYLIN-5455 Added parameter check items for /api/models/semantic API.  If PartitionDesc is null then set MultiPartitionDesc to null
     new eeaa487d31 KYLIN-5456 fix duplicate key when exporting tds
     new 85efb638f2 KYLIN-5457 Optimize NKylinUserManager and NUserGroupManager
     new 3aff4aa461 KYLIN-5447 wrap logical view response
     new 299bb67077 KYLIN-5458 max dimension combination doesn't work
     new 858d0ae6da KYLIN-5459 Partial Log Governance
     new 823300b39c KYLIN-5449 fix diagnose tool issue
     new 0074b3d29a [DIRTY] fix unstable UT
     new beabe662fd KYLIN-5457 fix user group delete
     new f27ee33528 KYLIN-5460 fix upgrade in resource group
     new 8f7f5eb5ff [DIRTY] fix unstable test
     new 19be14f9e6 KYLIN-5449 fix storage quota issue
     new 1cebc04393 KYLIN-5457 fix ldap authorize user group project permissions
     new a81cecac2c KYLIN-5461 Improve logical view descriptions
     new 7fe52b8741 KYLIN-5450 fix NPE while col_order is not required

The 34 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 build/conf/kylin-server-log4j.xml                  |  48 +-
 pom.xml                                            | 135 +++--
 src/common-server/pom.xml                          |  37 --
 .../kylin/rest/advice/BaseExceptionHandler.java    |  60 +++
 .../rest/config/initialize/BroadcastListener.java  |   5 +
 .../kylin/rest/controller/NAdminController.java    |   4 +-
 .../kylin/rest/controller/NBasicController.java    |  85 +--
 .../kylin/rest/controller/NSystemController.java   |  10 +-
 .../rest/controller/fixture/FixtureController.java |   9 +-
 .../rest/controller/NBasicControllerTest.java      |  11 +-
 src/common-service/pom.xml                         |  15 +-
 .../org/apache/kylin/helper/HelperConstants.java}  |  15 +-
 .../apache/kylin/helper/MetadataToolHelper.java}   | 569 +++++++++------------
 .../org/apache/kylin/helper/RoutineToolHelper.java | 116 +++++
 .../kylin/helper/UpdateUserAclToolHelper.java      |  80 +++
 .../apache/kylin/rest/KylinPrepareEnvListener.java |   3 +
 .../kylin/rest/aspect/SchedulerEnhancer.java       |   2 +-
 .../apache/kylin/rest/broadcaster/Broadcaster.java |   8 +-
 .../apache/kylin/rest/monitor/MonitorReporter.java |  20 +-
 .../org/apache/kylin/rest/request/SQLRequest.java  |   2 +
 .../rest/response/ExecutorMemoryResponse.java      |   0
 .../rest/response/ExecutorThreadInfoResponse.java  |   0
 .../kylin/rest/security/AdminUserAspect.java       |  16 +-
 .../security/FillEmptyAuthorizationFilter.java     |   0
 .../apache/kylin/rest/service/AccessService.java   |  19 +-
 .../kylin/rest/service/AsyncTaskService.java       |   0
 .../apache/kylin/rest/service/AuditLogService.java |  10 +-
 .../service/CaseInsensitiveKylinUserService.java   |  62 +--
 .../kylin/rest/service/KylinUserService.java       |  80 ++-
 .../kylin/rest/service/LdapUserGroupService.java   |  13 +
 .../kylin/rest/service/MetadataBackupService.java  |  49 +-
 .../kylin/rest/service/NUserGroupService.java      |  86 ++--
 .../kylin/rest/service/OpenUserGroupService.java   |  15 +-
 .../apache/kylin/rest/service/ProjectService.java  |   7 +-
 .../apache/kylin/rest/service/SystemService.java   |  36 +-
 .../apache/kylin/rest/service/UserAclService.java  |  11 +-
 .../service/task/QueryHistoryTaskScheduler.java    |   8 +-
 .../org/apache/kylin/tool/HDFSMetadataTool.java    |   4 +
 .../apache/kylin/tool/constant/DiagTypeEnum.java   |   0
 .../org/apache/kylin/tool/constant/StageEnum.java  |   0
 .../org/apache/kylin/tool/daemon/CheckResult.java  |   0
 .../apache/kylin/tool/daemon/CheckStateEnum.java   |   0
 .../apache/kylin/tool/daemon/HealthChecker.java    |   0
 .../kylin/tool/daemon/KapGuardianHATask.java       |   0
 .../kylin/tool/daemon/ServiceOpLevelEnum.java      |   0
 .../java/org/apache/kylin/tool/daemon/Worker.java  |   4 +-
 .../tool/daemon/checker/AbstractHealthChecker.java |   0
 .../tool/daemon/checker/FullGCDurationChecker.java |   0
 .../tool/daemon/checker/KEProcessChecker.java      |   0
 .../kylin/tool/daemon/checker/KEStatusChecker.java |  10 +-
 .../kylin/tool/garbage/ExecutableCleaner.java      |   5 +-
 .../apache/kylin/tool/garbage/GarbageCleaner.java  |   4 -
 .../apache/kylin/tool/garbage/IndexCleaner.java    |   0
 .../apache/kylin/tool/garbage/MetadataCleaner.java |   4 +-
 .../apache/kylin/tool/garbage/SnapshotCleaner.java |   5 +-
 .../kylin/tool/garbage/SourceUsageCleaner.java     |   0
 .../apache/kylin/tool/garbage/StorageCleaner.java  |  88 ++--
 .../tool/kerberos/DelegationTokenManager.java      |   0
 .../kylin/tool/kerberos/KerberosLoginUtil.java     | 164 ++----
 .../java/org/apache/kylin/tool/util/LdapUtils.java |   0
 .../util/ProjectTemporaryTableCleanerHelper.java   |   0
 .../java/org/apache/kylin/tool/util/ToolUtil.java  |  25 +-
 .../CaseInsensitiveKylinUserServiceTest.java       |  12 +-
 .../kylin/rest/service/KylinUserServiceTest.java   |  58 ++-
 .../kylin/rest/service/LdapUserServiceTest.java    |  90 ++--
 .../kylin/rest/service/NUserGroupServiceTest.java  |  40 +-
 .../kylin/rest/service/OpenUserServiceTest.java    |  32 +-
 .../kylin/rest/service/UserAclServiceTest.java     |   5 +-
 .../ProjectTemporaryTableCleanerHelperTest.java    |   0
 .../org/apache/kylin/tool/util/ToolUtilTest.java   |   0
 .../org/apache/kylin/common/KylinConfigBase.java   |  33 +-
 .../java/org/apache/kylin/common/QueryContext.java |   1 +
 .../apache/kylin/common/constant/LogConstant.java} |  20 +-
 .../exception/OutOfMaxCombinationException.java    |   9 +-
 .../kylin/common/exception/ServerErrorCode.java    |   1 +
 .../common/exception/code/ErrorCodeServer.java     |   8 +
 .../org/apache/kylin/common/msg/CnMessage.java     |  39 +-
 .../java/org/apache/kylin/common/msg/Message.java  |  44 +-
 .../kylin/common/persistence/ResourceStore.java    |   1 +
 .../persistence/metadata/JdbcEpochStore.java       |  32 +-
 .../common/persistence/metadata/jdbc/JdbcUtil.java |  26 +
 .../transaction/AbstractAuditLogReplayWorker.java  |  10 +-
 .../transaction/AuditLogReplayWorker.java          |  18 +-
 .../transaction/LogicalViewBroadcastNotifier.java} |  17 +-
 .../common/persistence/transaction/UnitOfWork.java |  52 +-
 .../kylin/common/scheduler/EventBusFactory.java    |  16 +-
 .../org/apache/kylin/common/util/AddressUtil.java  |  20 +-
 .../java/org/apache/kylin/common/util/Pair.java    |   2 +-
 .../kylin/query/exception/BusyQueryException.java  |   0
 .../org/apache/kylin/query/util/QueryLimiter.java  |   2 +-
 .../kylin/rest/exception/ForbiddenException.java   |   4 -
 .../rest/exception/InternalErrorException.java     |   4 -
 .../kylin/rest/exception/NotFoundException.java    |   4 -
 .../rest/exception/UnauthorizedException.java      |   3 -
 .../org/apache/kylin/tool/util/HashFunction.java   |   0
 .../resources/kylin_error_msg_conf_cn.properties   |   8 +-
 .../resources/kylin_error_msg_conf_en.properties   |  10 +
 .../main/resources/kylin_errorcode_conf.properties |   8 +
 .../resources/kylin_errorcode_conf_en.properties   |   1 +
 .../resources/kylin_errorcode_conf_zh.properties   |   1 +
 .../resources/metadata-jdbc-default.properties     |   4 +-
 .../src/main/resources/metadata-jdbc-h2.properties |   4 +-
 .../main/resources/metadata-jdbc-mysql.properties  |   4 +-
 .../resources/metadata-jdbc-postgresql.properties  |   4 +-
 .../apache/kylin/common/KylinConfigBaseTest.java   |  18 +
 .../metadata/epochstore/JdbcEpochStoreTest.java    |  15 +
 .../common/util/NLocalFileMetadataTestCase.java    |  11 +
 .../apache/kylin/tool/util/HashFunctionTest.java   |   0
 .../kylin/job/execution/DefaultExecutable.java     |   2 +
 .../kylin/job/execution/NExecutableManager.java    |   2 -
 .../kylin/job/runners/QuotaStorageCheckRunner.java |   4 +-
 src/core-metadata/pom.xml                          |  11 +-
 .../org/apache/kylin/constants/AclConstants.java}  |  20 +-
 .../metadata/cube/cuboid/CuboidScheduler.java      |   6 +-
 .../metadata/cube/cuboid/KECuboidSchedulerV1.java  |  24 +-
 .../metadata/cube/cuboid/KECuboidSchedulerV2.java  |  21 +-
 .../cube/storage/GarbageStorageCollector.java      |   5 +
 .../cube/storage/ProjectStorageInfoCollector.java  |  16 +-
 .../cube/storage/StorageInfoCollector.java         |   2 +
 .../cube/storage/StorageQuotaCollector.java        |   5 +
 .../cube/storage/TotalStorageCollector.java        |  13 +-
 .../apache/kylin/metadata/epoch/EpochManager.java  | 116 +++--
 .../apache/kylin/metadata/model/ISourceAware.java  |  16 +-
 .../apache/kylin/metadata/model/NDataModel.java    |   4 +-
 .../apache/kylin/metadata/model/PartitionDesc.java |   9 -
 .../org/apache/kylin/metadata/model/TableDesc.java |   4 +
 .../metadata/recommendation/ref/OptRecV2.java      |   4 +-
 .../kylin/metadata/user/NKylinUserManager.java     |  32 +-
 .../metadata/usergroup/NUserGroupManager.java      |  26 +-
 .../apache/kylin/metadata/view/LogicalView.java    |  71 +++
 .../kylin/metadata/view/LogicalViewManager.java    | 137 +++++
 .../apache/kylin/metrics/HdfsCapacityMetrics.java  | 118 +++--
 .../org/apache/kylin/rest/security/AceImpl.java    |  13 +-
 .../kylin/rest/security/AclEntityFactory.java      |  24 +-
 .../apache/kylin/rest/security/AclEntityType.java  |   9 +-
 .../org/apache/kylin/rest/security/AclManager.java |  22 +-
 .../apache/kylin/rest/security/AclPermission.java  |   0
 .../kylin/rest/security/AclPermissionEnum.java     |  28 +-
 .../kylin/rest/security/AclPermissionFactory.java  |  11 +-
 .../org/apache/kylin/rest/security/AclRecord.java  |   6 +-
 .../rest/security/CompositeAclPermission.java      |   0
 .../kylin/rest/security/ExternalAclProvider.java   |  45 +-
 .../kylin/rest/security/KerberosLoginManager.java  |  29 +-
 .../rest/security/KylinAclPermissionEvaluator.java |   0
 .../security/KylinPermissionGrantingStrategy.java  |   0
 .../apache/kylin/rest/security/LegacyAceInfo.java  |   0
 .../kylin/rest/security/MutableAclRecord.java      |   2 +-
 .../rest/security/MutableHttpServletRequest.java   |   0
 .../rest/security/NoneBCryptPasswordEncoder.java   |   0
 .../kylin/rest/security/ObjectIdentityImpl.java    |   9 +-
 .../security/PasswordPlaceholderConfigurer.java    |   7 +-
 .../org/apache/kylin/rest/security/SidInfo.java    |   5 +-
 .../org/apache/kylin/rest/security/UserAcl.java    |   0
 .../apache/kylin/rest/security/UserAclManager.java |   0
 .../kylin/rest/security/UserLockRuleUtil.java      |   5 +-
 .../org/apache/kylin/rest/service/AclService.java  |   0
 .../apache/kylin/rest/util/AclPermissionUtil.java  |   9 +-
 .../java/org/apache/kylin/rest/util/AclUtil.java   |   0
 .../kylin/rest/util/CreateTableFromJson.java       |   2 +-
 .../java/org/apache/kylin/util/DataRangeUtils.java |  96 ++++
 .../cube/model/RuleBasedCuboidDescTest.java        |  56 +-
 .../storage/ProjectStorageInfoCollectorTest.java   |  23 +-
 .../kylin/metadata/epoch/EpochManagerTest.java     | 110 ++++
 .../streaming/StreamingJobRecordManagerTest.java   |   1 -
 .../kylin/metadata/user/NKylinUserManagerTest.java |  71 +++
 .../metadata/usergroup/NUserGroupManagerTest.java  |  44 +-
 .../kylin/metrics/HdfsCapacityMetricsTest.java     |  74 ++-
 .../rest/security/ExternalAclProviderTest.java     |   0
 .../KylinPermissionGrantingStrategyTest.java       |   0
 .../kylin/rest/security/UserAclManagerTest.java    |   0
 .../kylin/rest/util/AclPermissionUtilTest.java     |   0
 .../common/metrics/MetricsInfluxdbReporter.java    |   4 +-
 src/data-loading-server/pom.xml                    |   2 +-
 .../kylin/rest/controller/BaseController.java      |  57 ---
 .../kylin/rest/controller/SampleController.java    |   3 +-
 .../kylin/rest/controller/SegmentController.java   |  11 +-
 .../controller/open/OpenSegmentController.java     |   3 +-
 .../rest/controller/v2/SegmentControllerV2.java    |   3 +-
 .../kylin/rest/controller/BaseControllerTest.java  |  23 +-
 src/data-loading-service/pom.xml                   |  11 +
 .../rest/scheduler/AutoRefreshSnapshotRunner.java  |   8 +-
 .../org/apache/kylin/rest/service/JobService.java  |  30 +-
 .../apache/kylin/rest/service/JobErrorTest.java    |   0
 .../apache/kylin/rest/service/JobServiceTest.java  |  12 +-
 .../kylin/rest/service/MockClusterManager.java     |   0
 .../org/apache/kylin/rest/service/StageTest.java   |   0
 src/datasource-service/pom.xml                     |   9 -
 .../apache/kylin/rest}/ddl/SourceTableCheck.java   |  61 ++-
 .../{ViewDDLRequest.java => ViewRequest.java}      |  19 +-
 .../kylin/rest/response/LogicalViewResponse.java   |  60 +++
 .../apache/kylin/rest/service/SparkDDLService.java | 121 ++++-
 .../apache/kylin/rest/service/TableExtService.java |  43 +-
 .../apache/kylin/rest/service/TableService.java    |   7 +-
 .../org/apache/kylin/rest/ddl/ViewCheck.scala      | 254 +++++++++
 .../apache/kylin/rest/service/SparkDDLTest.java    | 261 ++++++++--
 .../spark/sql/common/SparkDDLTestUtils.scala       |   3 +-
 src/job-service/pom.xml                            |  20 -
 .../apache/kylin/rest/service/ScheduleService.java |  20 +-
 .../config/initialize/SchedulerEventBusTest.java   | 147 +++---
 src/kylin-it/pom.xml                               |   4 +
 .../org/apache/kylin/newten/LogicalViewTest.java   |  91 ++++
 .../kylin/rest/broadcaster/BroadcasterTest.java    |  35 ++
 .../_global/logical_view/LOGICAL_VIEW_TABLE        |   9 +
 .../metadata/_global/project/logical_view.json     |   6 +
 .../451e127a-b684-1474-744b-c9afc14378af.json      |  18 +
 .../451e127a-b684-1474-744b-c9afc14378af.json      |  63 +++
 .../451e127a-b684-1474-744b-c9afc14378af.json      | 149 ++++++
 .../KYLIN_LOGICAL_VIEW.LOGICAL_VIEW_TABLE.json     |  68 +++
 .../metadata/logical_view/table/SSB.CUSTOMER.json  |  68 +++
 src/metadata-server/pom.xml                        |  33 +-
 .../rest/controller/open/OpenModelController.java  |   5 +-
 .../rest/controller/NMetaStoreController.java      |   3 +-
 .../kylin/rest/controller/NModelController.java    |   5 +-
 .../kylin/rest/controller/SparkDDLController.java  |  41 +-
 .../controller/v2/NProjectControllerKylin.java     |   3 +-
 .../rest/controller/IndexPlanControllerTest.java   |  21 +-
 .../rest/controller/SparkDDLControllerTest.java    |  37 +-
 src/modeling-service/pom.xml                       |   8 +-
 .../rest/request/UpdateRuleBasedCuboidRequest.java |   2 +-
 .../kylin/rest/service/IndexPlanService.java       |   4 +
 .../kylin/rest/service/MetaStoreService.java       |  31 +-
 .../apache/kylin/rest/service/ModelTdsService.java |  38 +-
 .../org/apache/kylin/tool/bisync/BISyncModel.java  |   0
 .../kylin/tool/bisync/BISyncModelConverter.java    |   0
 .../org/apache/kylin/tool/bisync/BISyncTool.java   |   0
 .../org/apache/kylin/tool/bisync/SyncContext.java  |   0
 .../apache/kylin/tool/bisync/SyncModelBuilder.java |  12 +-
 .../apache/kylin/tool/bisync/model/ColumnDef.java  |   6 +-
 .../kylin/tool/bisync/model/JoinTreeNode.java      |   3 +-
 .../apache/kylin/tool/bisync/model/MeasureDef.java |   0
 .../apache/kylin/tool/bisync/model/SyncModel.java  |   0
 .../bisync/tableau/TableauDataSourceConverter.java |   2 +-
 .../bisync/tableau/TableauDatasourceModel.java     |   4 +-
 .../tool/bisync/tableau/datasource/Aliases.java    |   0
 .../tool/bisync/tableau/datasource/DrillPath.java  |  13 -
 .../tool/bisync/tableau/datasource/DrillPaths.java |   0
 .../tool/bisync/tableau/datasource/Layout.java     |   0
 .../bisync/tableau/datasource/SemanticValue.java   |   0
 .../tableau/datasource/SemanticValueList.java      |   0
 .../tableau/datasource/TableauConnection.java      |   0
 .../tableau/datasource/TableauDatasource.java      |   0
 .../tableau/datasource/column/Calculation.java     |   0
 .../bisync/tableau/datasource/column/Column.java   |   0
 .../bisync/tableau/datasource/connection/Col.java  |   0
 .../bisync/tableau/datasource/connection/Cols.java |   0
 .../tableau/datasource/connection/Connection.java  |   0
 .../connection/ConnectionCustomization.java        |   0
 .../datasource/connection/NamedConnection.java     |   0
 .../datasource/connection/NamedConnectionList.java |   0
 .../connection/customization/Customization.java    |   0
 .../customization/CustomizationList.java           |   0
 .../connection/customization/Driver.java           |   0
 .../connection/customization/Vendor.java           |   0
 .../datasource/connection/metadata/Attribute.java  |   0
 .../connection/metadata/AttributeList.java         |   0
 .../datasource/connection/metadata/Collation.java  |   0
 .../connection/metadata/MetadataRecord.java        |   0
 .../connection/metadata/MetadataRecordList.java    |   0
 .../datasource/connection/relation/Clause.java     |   0
 .../datasource/connection/relation/Expression.java |   0
 .../datasource/connection/relation/Relation.java   |   0
 .../bisync/tableau/mapping/FunctionMapping.java    |   0
 .../tool/bisync/tableau/mapping/Mappings.java      |   0
 .../tool/bisync/tableau/mapping/TypeMapping.java   |   0
 .../bisync/tds/tableau.connector.template.xml      |   0
 .../main/resources/bisync/tds/tableau.mappings.xml |   0
 .../main/resources/bisync/tds/tableau.template.xml |   0
 .../kylin/rest/service/IndexPlanServiceTest.java   | 148 +++++-
 .../kylin/rest/service/ModelServiceTest.java       |   2 +-
 .../service/ModelTdsServiceColumnNameTest.java     | 128 +++++
 .../kylin/rest/service/ModelTdsServiceTest.java    |  12 +-
 .../kylin/tool/bisync/SyncModelBuilderTest.java    |   0
 .../kylin/tool/bisync/SyncModelTestUtil.java       |   0
 .../tool/bisync/tableau/TableauDatasourceTest.java |   0
 .../bisync_tableau/nmodel_basic_all_cols.tds       |   0
 .../bisync_tableau/nmodel_basic_inner_all_cols.tds |   0
 .../nmodel_full_measure_test.connector.tds         |   2 +-
 .../nmodel_full_measure_test.connector_cc.tds      |   0
 ...nmodel_full_measure_test.connector_cc_admin.tds |   0
 ...del_full_measure_test.connector_hierarchies.tds |   0
 ..._full_measure_test.connector_no_hierarchies.tds |   0
 ...odel_full_measure_test.connector_permission.tds |   0
 ...ure_test.connector_permission_agg_index_col.tds |   0
 ...l_measure_test.connector_permission_all_col.tds |   0
 ...easure_test.connector_permission_no_measure.tds |   0
 ...del_full_measure_test.table_index_connector.tds |   0
 .../bisync_tableau/nmodel_full_measure_test.tds    |   0
 .../metadata/_global/project/test_tds_export.json  |  35 ++
 .../8b6fa01d-1607-9459-81aa-115b9419b830.json      |  93 ++++
 .../8b6fa01d-1607-9459-81aa-115b9419b830.json      |  63 +++
 .../8b6fa01d-1607-9459-81aa-115b9419b830.json      | 262 ++++++++++
 .../test_tds_export/table/SSB.LINEORDER.json       | 113 ++++
 .../test_tds_export/table/SSB.P_LINEORDER.json     | 118 +++++
 .../apache/kylin/query/util/AsyncQueryUtil.java    |   9 +-
 src/query-server/pom.xml                           |  10 +-
 .../rest/controller/NAsyncQueryController.java     |  39 +-
 .../rest/controller/NAsyncQueryControllerV2.java   |  24 +-
 .../kylin/rest/controller/NQueryController.java    |   3 +-
 .../rest/controller/SparkSourceController.java     |   0
 .../rest/controller/NAsyncQueryControllerTest.java |  98 ++--
 .../controller/NAsyncQueryControllerV2Test.java    |  40 +-
 .../rest/controller/SparkSourceControllerTest.java |   0
 src/query-service/pom.xml                          |  51 +-
 .../kylin/rest/request/AsyncQuerySQLRequestV2.java |   3 +
 .../kylin/rest/service/AsyncQueryService.java      |  83 +--
 .../org/apache/kylin/rest/service/CSVWriter.java   | 120 -----
 .../apache/kylin/rest/service/MonitorService.java  |  39 +-
 .../kylin/rest/service/QueryCacheManager.java      |   8 +-
 .../apache/kylin/rest/service/QueryService.java    |   8 +-
 .../apache/kylin/rest/service/XLSXExcelWriter.java | 155 ------
 .../kylin/rest/service/AysncQueryServiceTest.java  | 496 ++++++++++--------
 src/second-storage/core-ui/pom.xml                 |   2 +-
 src/server/pom.xml                                 |   4 +
 .../org/apache/kylin/rest/HAConfigurationTest.java |   6 +-
 .../engine/spark/application/SparkApplication.java |  81 ++-
 .../engine/spark/job/NResourceDetectStep.java      |   4 +-
 .../kylin/engine/spark/job/NSparkCubingStep.java   |   6 +-
 .../kylin/engine/spark/job/NSparkExecutable.java   |  26 +
 .../kylin/engine/spark/job/NSparkMergingStep.java  |   6 +-
 .../spark/job/NSparkSnapshotBuildingStep.java      |   1 +
 .../kylin/engine/spark/job/NTableSamplingJob.java  |   2 +-
 .../kylin/engine/spark/mockup/CsvSource.java       |  19 +-
 .../spark/source/NSparkMetadataExplorer.java       |  30 +-
 .../engine/spark/builder/SegmentFlatTable.scala    |  22 +-
 .../engine/spark/job/RDSegmentBuildExec.scala      |   5 +-
 .../kylin/engine/spark/job/SegmentBuildJob.java    |   2 +
 .../apache/kylin/engine/spark/job/SegmentJob.java  |   4 +-
 .../kylin/engine/spark/job/exec/BuildExec.scala    |   8 +-
 .../kylin/engine/spark/job/exec/MergeExec.scala    |   2 +
 .../kylin/engine/spark/job/exec/SnapshotExec.scala |   2 +
 .../engine/spark/job/exec/TableAnalyzerExec.scala  |   2 +
 .../kylin/engine/spark/job/stage/StageExec.scala   |   2 +
 .../engine/spark/job/stage/WaiteForResource.scala  |   2 +
 .../engine/spark/job/stage/build/BuildDict.scala   |   2 +
 .../engine/spark/job/stage/build/BuildLayer.scala  |   2 +
 .../spark/job/stage/build/CostBasedPlanner.scala   |   2 +
 .../job/stage/build/FlatTableAndDictBase.scala     |   5 +
 .../job/stage/build/GatherFlatTableStats.scala     |   2 +
 .../spark/job/stage/build/GenerateFlatTable.scala  |   2 +
 .../stage/build/MaterializedFactTableView.scala    |   2 +
 .../spark/job/stage/build/RefreshColumnBytes.scala |   2 +
 .../spark/job/stage/build/RefreshSnapshots.scala   |   2 +
 .../stage/build/partition/PartitionBuildDict.scala |   2 +
 .../build/partition/PartitionBuildLayer.scala      |   2 +
 .../partition/PartitionCostBasedPlanner.scala      |   2 +
 .../partition/PartitionGatherFlatTableStats.scala  |   1 +
 .../partition/PartitionGenerateFlatTable.scala     |   2 +
 .../PartitionMaterializedFactTableView.scala       |   2 +
 .../partition/PartitionRefreshColumnBytes.scala    |   2 +
 .../spark/job/stage/merge/MergeColumnBytes.scala   |   2 +
 .../spark/job/stage/merge/MergeFlatTable.scala     |   2 +
 .../spark/job/stage/merge/MergeIndices.scala       |   2 +
 .../partition/PartitionMergeColumnBytes.scala      |   2 +
 .../merge/partition/PartitionMergeFlatTable.scala  |   2 +
 .../merge/partition/PartitionMergeIndices.scala    |   2 +
 .../spark/job/stage/snapshots/SnapshotsBuild.scala |   2 +
 .../job/stage/tablesampling/AnalyzerTable.scala    |   2 +
 .../engine/spark/NLocalWithSparkSessionTest.java   |   5 +-
 .../rest/security/KerberosLoginManagerTest.java    |   0
 .../spark/job/stage/WaiteForResourceTest.scala}    |  20 +-
 .../job/stage/build/RefreshColumnBytesTest.scala}  |  37 +-
 .../job/stage/build/RefreshSnapshotsTest.scala}    |  18 +-
 .../PartitionRefreshColumnBytesTest.scala}         |  30 +-
 .../job/stage/merge/MergeColumnBytesTest.scala}    |  18 +-
 .../spark/job/stage/merge/MergeStageTest.scala     |   2 +
 .../partition/PartitionMergeColumnBytesTest.scala} |  18 +-
 .../org/apache/spark/utils/TestResourceUtils.scala |   6 +
 .../main/java/org/apache}/spark/ddl/DDLCheck.java  |  15 +-
 .../org/apache}/spark/ddl/DDLCheckContext.java     |  31 +-
 .../java/org/apache/spark/ddl/DDLConstant.java}    |  29 +-
 .../org/apache/spark/sql/LogicalViewLoader.java    | 195 +++++++
 .../kylin/query/pushdown/SparkSqlClient.scala      |  51 +-
 .../kylin/query/runtime/plan/ResultPlan.scala      | 214 ++++++--
 .../scala/org/apache/spark/sql/KylinSession.scala  |  22 +-
 .../scala/org/apache/spark/sql/SparderEnv.scala    |  20 +-
 .../common/asyncprofiler/AsyncProfilerUtils.java   |   2 +-
 .../org/apache/kylin/common/CustomUtils.scala}     |  15 +-
 src/spark-project/spark-ddl-plugin/pom.xml         |  77 ---
 .../services/org.apache.kylin.spark.ddl.DDLCheck   |   2 -
 .../org/apache/kylin/spark/ddl/ViewCheck.scala     | 123 -----
 src/systools/pom.xml                               | 115 -----
 src/tool/pom.xml                                   |  14 +-
 .../kylin/tool/AbstractInfoExtractorTool.java      |   2 +-
 .../java/org/apache/kylin/tool/KylinLogTool.java   |   2 +-
 .../java/org/apache/kylin/tool/MetadataTool.java   | 454 ++--------------
 .../java/org/apache/kylin/tool/RollbackTool.java   |  28 +-
 .../daemon/handler/AbstractCheckStateHandler.java  |   4 +-
 .../apache/kylin/tool/routine/FastRoutineTool.java |  15 +-
 .../org/apache/kylin/tool/routine/RoutineTool.java | 118 +----
 .../kylin/tool/upgrade/UpdateUserAclTool.java      |  55 +-
 .../org/apache/kylin/tool/util/MetadataUtil.java   |   9 +-
 .../org/apache/kylin/tool/MetadataToolTest.java    |  53 +-
 .../tool/security/KylinPasswordResetCLITest.java   |   9 +-
 .../kylin/tool/upgrade/UpdateUserAclToolTest.java  |   5 +-
 .../nmodel_full_measure_test.connector.tds         | 125 -----
 395 files changed, 6367 insertions(+), 4034 deletions(-)
 create mode 100644 src/common-server/src/main/java/org/apache/kylin/rest/advice/BaseExceptionHandler.java
 rename src/{common-service => common-server}/src/main/java/org/apache/kylin/rest/controller/NBasicController.java (89%)
 rename src/{common-service/src/test => common-server/src/main}/java/org/apache/kylin/rest/controller/fixture/FixtureController.java (82%)
 rename src/{common-service => common-server}/src/test/java/org/apache/kylin/rest/controller/NBasicControllerTest.java (96%)
 copy src/{systools/src/main/java/org/apache/kylin/rest/security/AclEntityType.java => common-service/src/main/java/org/apache/kylin/helper/HelperConstants.java} (65%)
 copy src/{tool/src/main/java/org/apache/kylin/tool/MetadataTool.java => common-service/src/main/java/org/apache/kylin/helper/MetadataToolHelper.java} (59%)
 create mode 100644 src/common-service/src/main/java/org/apache/kylin/helper/RoutineToolHelper.java
 create mode 100644 src/common-service/src/main/java/org/apache/kylin/helper/UpdateUserAclToolHelper.java
 rename src/{data-loading-service => common-service}/src/main/java/org/apache/kylin/rest/response/ExecutorMemoryResponse.java (100%)
 rename src/{data-loading-service => common-service}/src/main/java/org/apache/kylin/rest/response/ExecutorThreadInfoResponse.java (100%)
 rename src/{systools => common-service}/src/main/java/org/apache/kylin/rest/security/FillEmptyAuthorizationFilter.java (100%)
 rename src/{job-service => common-service}/src/main/java/org/apache/kylin/rest/service/AsyncTaskService.java (100%)
 rename src/{tool => common-service}/src/main/java/org/apache/kylin/tool/HDFSMetadataTool.java (98%)
 rename src/{tool => common-service}/src/main/java/org/apache/kylin/tool/constant/DiagTypeEnum.java (100%)
 rename src/{tool => common-service}/src/main/java/org/apache/kylin/tool/constant/StageEnum.java (100%)
 rename src/{tool => common-service}/src/main/java/org/apache/kylin/tool/daemon/CheckResult.java (100%)
 rename src/{tool => common-service}/src/main/java/org/apache/kylin/tool/daemon/CheckStateEnum.java (100%)
 rename src/{tool => common-service}/src/main/java/org/apache/kylin/tool/daemon/HealthChecker.java (100%)
 rename src/{tool => common-service}/src/main/java/org/apache/kylin/tool/daemon/KapGuardianHATask.java (100%)
 rename src/{tool => common-service}/src/main/java/org/apache/kylin/tool/daemon/ServiceOpLevelEnum.java (100%)
 rename src/{tool => common-service}/src/main/java/org/apache/kylin/tool/daemon/Worker.java (97%)
 rename src/{tool => common-service}/src/main/java/org/apache/kylin/tool/daemon/checker/AbstractHealthChecker.java (100%)
 rename src/{tool => common-service}/src/main/java/org/apache/kylin/tool/daemon/checker/FullGCDurationChecker.java (100%)
 rename src/{tool => common-service}/src/main/java/org/apache/kylin/tool/daemon/checker/KEProcessChecker.java (100%)
 rename src/{tool => common-service}/src/main/java/org/apache/kylin/tool/daemon/checker/KEStatusChecker.java (96%)
 rename src/{tool => common-service}/src/main/java/org/apache/kylin/tool/garbage/ExecutableCleaner.java (95%)
 rename src/{tool => common-service}/src/main/java/org/apache/kylin/tool/garbage/GarbageCleaner.java (94%)
 rename src/{tool => common-service}/src/main/java/org/apache/kylin/tool/garbage/IndexCleaner.java (100%)
 rename src/{tool => common-service}/src/main/java/org/apache/kylin/tool/garbage/MetadataCleaner.java (92%)
 rename src/{tool => common-service}/src/main/java/org/apache/kylin/tool/garbage/SnapshotCleaner.java (96%)
 rename src/{tool => common-service}/src/main/java/org/apache/kylin/tool/garbage/SourceUsageCleaner.java (100%)
 rename src/{tool => common-service}/src/main/java/org/apache/kylin/tool/garbage/StorageCleaner.java (92%)
 rename src/{tool => common-service}/src/main/java/org/apache/kylin/tool/kerberos/DelegationTokenManager.java (100%)
 rename src/{tool => common-service}/src/main/java/org/apache/kylin/tool/kerberos/KerberosLoginUtil.java (71%)
 rename src/{tool => common-service}/src/main/java/org/apache/kylin/tool/util/LdapUtils.java (100%)
 rename src/{tool => common-service}/src/main/java/org/apache/kylin/tool/util/ProjectTemporaryTableCleanerHelper.java (100%)
 rename src/{tool => common-service}/src/main/java/org/apache/kylin/tool/util/ToolUtil.java (90%)
 rename src/{tool => common-service}/src/test/java/org/apache/kylin/tool/util/ProjectTemporaryTableCleanerHelperTest.java (100%)
 rename src/{tool => common-service}/src/test/java/org/apache/kylin/tool/util/ToolUtilTest.java (100%)
 copy src/{query-common/src/main/java/org/apache/kylin/tool/bisync/model/MeasureDef.java => core-common/src/main/java/org/apache/kylin/common/constant/LogConstant.java} (70%)
 copy src/{datasource-service/src/main/java/org/apache/kylin/rest/request/ViewDDLRequest.java => core-common/src/main/java/org/apache/kylin/common/persistence/transaction/LogicalViewBroadcastNotifier.java} (75%)
 rename src/{query => core-common}/src/main/java/org/apache/kylin/query/exception/BusyQueryException.java (100%)
 rename src/{query => core-common}/src/main/java/org/apache/kylin/query/util/QueryLimiter.java (98%)
 rename src/{systools => core-common}/src/main/java/org/apache/kylin/rest/exception/ForbiddenException.java (92%)
 rename src/{systools => core-common}/src/main/java/org/apache/kylin/rest/exception/InternalErrorException.java (93%)
 rename src/{systools => core-common}/src/main/java/org/apache/kylin/rest/exception/NotFoundException.java (92%)
 rename src/{systools => core-common}/src/main/java/org/apache/kylin/rest/exception/UnauthorizedException.java (88%)
 rename src/{tool => core-common}/src/main/java/org/apache/kylin/tool/util/HashFunction.java (100%)
 rename src/{tool => core-common}/src/test/java/org/apache/kylin/tool/util/HashFunctionTest.java (100%)
 copy src/{query-common/src/main/java/org/apache/kylin/tool/bisync/model/MeasureDef.java => core-metadata/src/main/java/org/apache/kylin/constants/AclConstants.java} (65%)
 create mode 100644 src/core-metadata/src/main/java/org/apache/kylin/metadata/view/LogicalView.java
 create mode 100644 src/core-metadata/src/main/java/org/apache/kylin/metadata/view/LogicalViewManager.java
 rename src/{systools => core-metadata}/src/main/java/org/apache/kylin/rest/security/AceImpl.java (94%)
 rename src/{systools => core-metadata}/src/main/java/org/apache/kylin/rest/security/AclEntityFactory.java (78%)
 rename src/{systools => core-metadata}/src/main/java/org/apache/kylin/rest/security/AclEntityType.java (81%)
 rename src/{systools => core-metadata}/src/main/java/org/apache/kylin/rest/security/AclManager.java (93%)
 rename src/{systools => core-metadata}/src/main/java/org/apache/kylin/rest/security/AclPermission.java (100%)
 rename src/{systools => core-metadata}/src/main/java/org/apache/kylin/rest/security/AclPermissionEnum.java (70%)
 rename src/{systools => core-metadata}/src/main/java/org/apache/kylin/rest/security/AclPermissionFactory.java (93%)
 rename src/{systools => core-metadata}/src/main/java/org/apache/kylin/rest/security/AclRecord.java (98%)
 rename src/{systools => core-metadata}/src/main/java/org/apache/kylin/rest/security/CompositeAclPermission.java (100%)
 rename src/{systools => core-metadata}/src/main/java/org/apache/kylin/rest/security/ExternalAclProvider.java (78%)
 rename src/{systools => core-metadata}/src/main/java/org/apache/kylin/rest/security/KerberosLoginManager.java (85%)
 rename src/{systools => core-metadata}/src/main/java/org/apache/kylin/rest/security/KylinAclPermissionEvaluator.java (100%)
 rename src/{systools => core-metadata}/src/main/java/org/apache/kylin/rest/security/KylinPermissionGrantingStrategy.java (100%)
 rename src/{systools => core-metadata}/src/main/java/org/apache/kylin/rest/security/LegacyAceInfo.java (100%)
 rename src/{systools => core-metadata}/src/main/java/org/apache/kylin/rest/security/MutableAclRecord.java (98%)
 rename src/{systools => core-metadata}/src/main/java/org/apache/kylin/rest/security/MutableHttpServletRequest.java (100%)
 rename src/{systools => core-metadata}/src/main/java/org/apache/kylin/rest/security/NoneBCryptPasswordEncoder.java (100%)
 rename src/{systools => core-metadata}/src/main/java/org/apache/kylin/rest/security/ObjectIdentityImpl.java (96%)
 rename src/{systools => core-metadata}/src/main/java/org/apache/kylin/rest/security/PasswordPlaceholderConfigurer.java (96%)
 rename src/{systools => core-metadata}/src/main/java/org/apache/kylin/rest/security/SidInfo.java (96%)
 rename src/{systools => core-metadata}/src/main/java/org/apache/kylin/rest/security/UserAcl.java (100%)
 rename src/{systools => core-metadata}/src/main/java/org/apache/kylin/rest/security/UserAclManager.java (100%)
 rename src/{systools => core-metadata}/src/main/java/org/apache/kylin/rest/security/UserLockRuleUtil.java (95%)
 rename src/{systools => core-metadata}/src/main/java/org/apache/kylin/rest/service/AclService.java (100%)
 rename src/{systools => core-metadata}/src/main/java/org/apache/kylin/rest/util/AclPermissionUtil.java (97%)
 rename src/{systools => core-metadata}/src/main/java/org/apache/kylin/rest/util/AclUtil.java (100%)
 create mode 100644 src/core-metadata/src/main/java/org/apache/kylin/util/DataRangeUtils.java
 create mode 100644 src/core-metadata/src/test/java/org/apache/kylin/metadata/user/NKylinUserManagerTest.java
 rename src/{systools => core-metadata}/src/test/java/org/apache/kylin/rest/security/ExternalAclProviderTest.java (100%)
 rename src/{systools => core-metadata}/src/test/java/org/apache/kylin/rest/security/KylinPermissionGrantingStrategyTest.java (100%)
 rename src/{systools => core-metadata}/src/test/java/org/apache/kylin/rest/security/UserAclManagerTest.java (100%)
 rename src/{systools => core-metadata}/src/test/java/org/apache/kylin/rest/util/AclPermissionUtilTest.java (100%)
 rename src/{job-service => data-loading-service}/src/test/java/org/apache/kylin/rest/service/JobErrorTest.java (100%)
 rename src/{job-service => data-loading-service}/src/test/java/org/apache/kylin/rest/service/JobServiceTest.java (99%)
 rename src/{job-service => data-loading-service}/src/test/java/org/apache/kylin/rest/service/MockClusterManager.java (100%)
 rename src/{job-service => data-loading-service}/src/test/java/org/apache/kylin/rest/service/StageTest.java (100%)
 rename src/{spark-project/spark-ddl-plugin/src/main/java/org/apache/kylin/spark => datasource-service/src/main/java/org/apache/kylin/rest}/ddl/SourceTableCheck.java (54%)
 rename src/datasource-service/src/main/java/org/apache/kylin/rest/request/{ViewDDLRequest.java => ViewRequest.java} (72%)
 create mode 100644 src/datasource-service/src/main/java/org/apache/kylin/rest/response/LogicalViewResponse.java
 create mode 100644 src/datasource-service/src/main/scala/org/apache/kylin/rest/ddl/ViewCheck.scala
 create mode 100644 src/kylin-it/src/test/java/org/apache/kylin/newten/LogicalViewTest.java
 create mode 100644 src/kylin-it/src/test/resources/ut_meta/logical_view/metadata/_global/logical_view/LOGICAL_VIEW_TABLE
 create mode 100644 src/kylin-it/src/test/resources/ut_meta/logical_view/metadata/_global/project/logical_view.json
 create mode 100644 src/kylin-it/src/test/resources/ut_meta/logical_view/metadata/logical_view/dataflow/451e127a-b684-1474-744b-c9afc14378af.json
 create mode 100644 src/kylin-it/src/test/resources/ut_meta/logical_view/metadata/logical_view/index_plan/451e127a-b684-1474-744b-c9afc14378af.json
 create mode 100644 src/kylin-it/src/test/resources/ut_meta/logical_view/metadata/logical_view/model_desc/451e127a-b684-1474-744b-c9afc14378af.json
 create mode 100644 src/kylin-it/src/test/resources/ut_meta/logical_view/metadata/logical_view/table/KYLIN_LOGICAL_VIEW.LOGICAL_VIEW_TABLE.json
 create mode 100644 src/kylin-it/src/test/resources/ut_meta/logical_view/metadata/logical_view/table/SSB.CUSTOMER.json
 rename src/{query-common => modeling-service}/src/main/java/org/apache/kylin/tool/bisync/BISyncModel.java (100%)
 rename src/{query-common => modeling-service}/src/main/java/org/apache/kylin/tool/bisync/BISyncModelConverter.java (100%)
 rename src/{query-common => modeling-service}/src/main/java/org/apache/kylin/tool/bisync/BISyncTool.java (100%)
 rename src/{query-common => modeling-service}/src/main/java/org/apache/kylin/tool/bisync/SyncContext.java (100%)
 rename src/{query-common => modeling-service}/src/main/java/org/apache/kylin/tool/bisync/SyncModelBuilder.java (97%)
 rename src/{query-common => modeling-service}/src/main/java/org/apache/kylin/tool/bisync/model/ColumnDef.java (90%)
 rename src/{query-common => modeling-service}/src/main/java/org/apache/kylin/tool/bisync/model/JoinTreeNode.java (96%)
 rename src/{query-common => modeling-service}/src/main/java/org/apache/kylin/tool/bisync/model/MeasureDef.java (100%)
 rename src/{query-common => modeling-service}/src/main/java/org/apache/kylin/tool/bisync/model/SyncModel.java (100%)
 rename src/{query-common => modeling-service}/src/main/java/org/apache/kylin/tool/bisync/tableau/TableauDataSourceConverter.java (99%)
 rename src/{query-common => modeling-service}/src/main/java/org/apache/kylin/tool/bisync/tableau/TableauDatasourceModel.java (95%)
 rename src/{query-common => modeling-service}/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/Aliases.java (100%)
 rename src/{query-common => modeling-service}/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/DrillPath.java (80%)
 rename src/{query-common => modeling-service}/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/DrillPaths.java (100%)
 rename src/{query-common => modeling-service}/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/Layout.java (100%)
 rename src/{query-common => modeling-service}/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/SemanticValue.java (100%)
 rename src/{query-common => modeling-service}/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/SemanticValueList.java (100%)
 rename src/{query-common => modeling-service}/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/TableauConnection.java (100%)
 rename src/{query-common => modeling-service}/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/TableauDatasource.java (100%)
 rename src/{query-common => modeling-service}/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/column/Calculation.java (100%)
 rename src/{query-common => modeling-service}/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/column/Column.java (100%)
 rename src/{query-common => modeling-service}/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/connection/Col.java (100%)
 rename src/{query-common => modeling-service}/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/connection/Cols.java (100%)
 rename src/{query-common => modeling-service}/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/connection/Connection.java (100%)
 rename src/{query-common => modeling-service}/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/connection/ConnectionCustomization.java (100%)
 rename src/{query-common => modeling-service}/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/connection/NamedConnection.java (100%)
 rename src/{query-common => modeling-service}/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/connection/NamedConnectionList.java (100%)
 rename src/{query-common => modeling-service}/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/connection/customization/Customization.java (100%)
 rename src/{query-common => modeling-service}/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/connection/customization/CustomizationList.java (100%)
 rename src/{query-common => modeling-service}/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/connection/customization/Driver.java (100%)
 rename src/{query-common => modeling-service}/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/connection/customization/Vendor.java (100%)
 rename src/{query-common => modeling-service}/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/connection/metadata/Attribute.java (100%)
 rename src/{query-common => modeling-service}/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/connection/metadata/AttributeList.java (100%)
 rename src/{query-common => modeling-service}/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/connection/metadata/Collation.java (100%)
 rename src/{query-common => modeling-service}/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/connection/metadata/MetadataRecord.java (100%)
 rename src/{query-common => modeling-service}/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/connection/metadata/MetadataRecordList.java (100%)
 rename src/{query-common => modeling-service}/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/connection/relation/Clause.java (100%)
 rename src/{query-common => modeling-service}/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/connection/relation/Expression.java (100%)
 rename src/{query-common => modeling-service}/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/connection/relation/Relation.java (100%)
 rename src/{query-common => modeling-service}/src/main/java/org/apache/kylin/tool/bisync/tableau/mapping/FunctionMapping.java (100%)
 rename src/{query-common => modeling-service}/src/main/java/org/apache/kylin/tool/bisync/tableau/mapping/Mappings.java (100%)
 rename src/{query-common => modeling-service}/src/main/java/org/apache/kylin/tool/bisync/tableau/mapping/TypeMapping.java (100%)
 rename src/{tool => modeling-service}/src/main/resources/bisync/tds/tableau.connector.template.xml (100%)
 rename src/{tool => modeling-service}/src/main/resources/bisync/tds/tableau.mappings.xml (100%)
 rename src/{tool => modeling-service}/src/main/resources/bisync/tds/tableau.template.xml (100%)
 create mode 100644 src/modeling-service/src/test/java/org/apache/kylin/rest/service/ModelTdsServiceColumnNameTest.java
 rename src/{tool => modeling-service}/src/test/java/org/apache/kylin/tool/bisync/SyncModelBuilderTest.java (100%)
 rename src/{tool => modeling-service}/src/test/java/org/apache/kylin/tool/bisync/SyncModelTestUtil.java (100%)
 rename src/{tool => modeling-service}/src/test/java/org/apache/kylin/tool/bisync/tableau/TableauDatasourceTest.java (100%)
 rename src/{tool => modeling-service}/src/test/resources/bisync_tableau/nmodel_basic_all_cols.tds (100%)
 rename src/{tool => modeling-service}/src/test/resources/bisync_tableau/nmodel_basic_inner_all_cols.tds (100%)
 rename src/{tool => modeling-service}/src/test/resources/bisync_tableau/nmodel_full_measure_test.connector_cc.tds (100%)
 rename src/{tool => modeling-service}/src/test/resources/bisync_tableau/nmodel_full_measure_test.connector_cc_admin.tds (100%)
 rename src/{tool => modeling-service}/src/test/resources/bisync_tableau/nmodel_full_measure_test.connector_hierarchies.tds (100%)
 rename src/{tool => modeling-service}/src/test/resources/bisync_tableau/nmodel_full_measure_test.connector_no_hierarchies.tds (100%)
 rename src/{tool => modeling-service}/src/test/resources/bisync_tableau/nmodel_full_measure_test.connector_permission.tds (100%)
 rename src/{tool => modeling-service}/src/test/resources/bisync_tableau/nmodel_full_measure_test.connector_permission_agg_index_col.tds (100%)
 rename src/{tool => modeling-service}/src/test/resources/bisync_tableau/nmodel_full_measure_test.connector_permission_all_col.tds (100%)
 rename src/{tool => modeling-service}/src/test/resources/bisync_tableau/nmodel_full_measure_test.connector_permission_no_measure.tds (100%)
 rename src/{tool => modeling-service}/src/test/resources/bisync_tableau/nmodel_full_measure_test.table_index_connector.tds (100%)
 rename src/{tool => modeling-service}/src/test/resources/bisync_tableau/nmodel_full_measure_test.tds (100%)
 create mode 100644 src/modeling-service/src/test/resources/ut_meta/tds_export_test/metadata/_global/project/test_tds_export.json
 create mode 100644 src/modeling-service/src/test/resources/ut_meta/tds_export_test/metadata/test_tds_export/dataflow/8b6fa01d-1607-9459-81aa-115b9419b830.json
 create mode 100644 src/modeling-service/src/test/resources/ut_meta/tds_export_test/metadata/test_tds_export/index_plan/8b6fa01d-1607-9459-81aa-115b9419b830.json
 create mode 100644 src/modeling-service/src/test/resources/ut_meta/tds_export_test/metadata/test_tds_export/model_desc/8b6fa01d-1607-9459-81aa-115b9419b830.json
 create mode 100644 src/modeling-service/src/test/resources/ut_meta/tds_export_test/metadata/test_tds_export/table/SSB.LINEORDER.json
 create mode 100644 src/modeling-service/src/test/resources/ut_meta/tds_export_test/metadata/test_tds_export/table/SSB.P_LINEORDER.json
 rename src/{common-server => query-server}/src/main/java/org/apache/kylin/rest/controller/SparkSourceController.java (100%)
 rename src/{common-server => query-server}/src/test/java/org/apache/kylin/rest/controller/SparkSourceControllerTest.java (100%)
 delete mode 100644 src/query-service/src/main/java/org/apache/kylin/rest/service/CSVWriter.java
 delete mode 100644 src/query-service/src/main/java/org/apache/kylin/rest/service/XLSXExcelWriter.java
 rename src/{systools => spark-project/engine-spark}/src/test/java/org/apache/kylin/rest/security/KerberosLoginManagerTest.java (100%)
 copy src/spark-project/engine-spark/src/{main/scala/org/apache/kylin/engine/spark/job/stage/snapshots/SnapshotsBuild.scala => test/scala/org/apache/kylin/engine/spark/job/stage/WaiteForResourceTest.scala} (62%)
 copy src/spark-project/engine-spark/src/{main/scala/org/apache/kylin/engine/spark/job/stage/build/GatherFlatTableStats.scala => test/scala/org/apache/kylin/engine/spark/job/stage/build/RefreshColumnBytesTest.scala} (52%)
 copy src/spark-project/engine-spark/src/{main/scala/org/apache/kylin/engine/spark/job/stage/build/BuildLayer.scala => test/scala/org/apache/kylin/engine/spark/job/stage/build/RefreshSnapshotsTest.scala} (69%)
 copy src/spark-project/engine-spark/src/{main/scala/org/apache/kylin/engine/spark/job/stage/build/partition/PartitionGenerateFlatTable.scala => test/scala/org/apache/kylin/engine/spark/job/stage/build/partition/PartitionRefreshColumnBytesTest.scala} (51%)
 copy src/spark-project/engine-spark/src/{main/scala/org/apache/kylin/engine/spark/job/stage/merge/MergeColumnBytes.scala => test/scala/org/apache/kylin/engine/spark/job/stage/merge/MergeColumnBytesTest.scala} (61%)
 copy src/spark-project/engine-spark/src/{main/scala/org/apache/kylin/engine/spark/job/stage/merge/partition/PartitionMergeColumnBytes.scala => test/scala/org/apache/kylin/engine/spark/job/stage/merge/partition/PartitionMergeColumnBytesTest.scala} (59%)
 rename src/spark-project/{spark-ddl-plugin/src/main/java/org/apache/kylin => sparder/src/main/java/org/apache}/spark/ddl/DDLCheck.java (77%)
 rename src/spark-project/{spark-ddl-plugin/src/main/java/org/apache/kylin => sparder/src/main/java/org/apache}/spark/ddl/DDLCheckContext.java (54%)
 copy src/spark-project/{engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/exec/TableAnalyzerExec.scala => sparder/src/main/java/org/apache/spark/ddl/DDLConstant.java} (58%)
 create mode 100644 src/spark-project/sparder/src/main/java/org/apache/spark/sql/LogicalViewLoader.java
 copy src/{core-common/src/main/java/org/apache/kylin/common/exception/OutOfMaxCombinationException.java => spark-project/spark-common/src/main/scala/org/apache/kylin/common/CustomUtils.scala} (74%)
 delete mode 100644 src/spark-project/spark-ddl-plugin/pom.xml
 delete mode 100644 src/spark-project/spark-ddl-plugin/src/main/resources/META-INF/services/org.apache.kylin.spark.ddl.DDLCheck
 delete mode 100644 src/spark-project/spark-ddl-plugin/src/main/scala/org/apache/kylin/spark/ddl/ViewCheck.scala
 delete mode 100644 src/tool/src/test/resources/bisync_tableau/nmodel_full_measure_test.connector.tds


[kylin] 04/34: KYLIN-5445 fix connection

Posted by xx...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

xxyu pushed a commit to branch kylin5
in repository https://gitbox.apache.org/repos/asf/kylin.git

commit b5c3fb16d441008c71dc56a56dff01419640deda
Author: jiawei.li <10...@qq.com>
AuthorDate: Sun Jan 8 10:41:54 2023 +0800

    KYLIN-5445 fix connection
---
 .../common/persistence/metadata/jdbc/JdbcUtil.java | 25 ++++++++++++----------
 1 file changed, 14 insertions(+), 11 deletions(-)

diff --git a/src/core-common/src/main/java/org/apache/kylin/common/persistence/metadata/jdbc/JdbcUtil.java b/src/core-common/src/main/java/org/apache/kylin/common/persistence/metadata/jdbc/JdbcUtil.java
index 5c24313fb2..1bd3883af8 100644
--- a/src/core-common/src/main/java/org/apache/kylin/common/persistence/metadata/jdbc/JdbcUtil.java
+++ b/src/core-common/src/main/java/org/apache/kylin/common/persistence/metadata/jdbc/JdbcUtil.java
@@ -123,20 +123,23 @@ public class JdbcUtil {
     }
 
     private static boolean isPrimaryKeyExists(Connection conn, String... tables) throws SQLException {
-
-        for (String table : tables) {
-            try {
-                val resultSet = conn.getMetaData().getPrimaryKeys(conn.getCatalog(), conn.getSchema(), table);
-                if (resultSet.next()) {
-                    return true;
+        try {
+            for (String table : tables) {
+                try {
+                    val resultSet = conn.getMetaData().getPrimaryKeys(conn.getCatalog(), conn.getSchema(), table);
+                    if (resultSet.next()) {
+                        return true;
+                    }
+                } catch (Exception e) {
+                    log.warn("get primary key from table {} failed", table, e);
                 }
-            } catch (Exception e) {
-                log.warn("get primary key from table {} failed", table, e);
+            }
+        } finally {
+            if (!conn.isClosed()) {
+                conn.close();
             }
         }
-        if (!conn.isClosed()) {
-            conn.close();
-        }
+
         return false;
 
     }


[kylin] 30/34: [DIRTY] fix unstable test

Posted by xx...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

xxyu pushed a commit to branch kylin5
in repository https://gitbox.apache.org/repos/asf/kylin.git

commit 8f7f5eb5ff8b93f50a13ae28ce27673b21808a5b
Author: qianhao.zhou <z....@gmail.com>
AuthorDate: Thu Jan 5 18:48:29 2023 +0800

    [DIRTY] fix unstable test
    
    Co-authored-by: qhzhou <qi...@kyligence.io>
---
 .../src/main/java/org/apache/kylin/helper/MetadataToolHelper.java      | 3 ++-
 1 file changed, 2 insertions(+), 1 deletion(-)

diff --git a/src/common-service/src/main/java/org/apache/kylin/helper/MetadataToolHelper.java b/src/common-service/src/main/java/org/apache/kylin/helper/MetadataToolHelper.java
index 2804bdc4c3..7ca9d6fd86 100644
--- a/src/common-service/src/main/java/org/apache/kylin/helper/MetadataToolHelper.java
+++ b/src/common-service/src/main/java/org/apache/kylin/helper/MetadataToolHelper.java
@@ -135,7 +135,8 @@ public class MetadataToolHelper {
                     + "_backup";
         }
         String backupPath = StringUtils.appendIfMissing(path, "/") + folder;
-        logger.info("The metadata backup path is {}}", backupPath);
+        //FIXME should replace printf with Logger while Logger MUST print this message to console, because test depends on it
+        System.out.printf(Locale.ROOT, "The metadata backup path is %s.%n", backupPath);
         val backupMetadataUrl = getMetadataUrl(backupPath, compress, kylinConfig);
         val backupConfig = KylinConfig.createKylinConfig(kylinConfig);
         backupConfig.setMetadataUrl(backupMetadataUrl);


[kylin] 02/34: KYLIN-5445 minor fix isPrimaryKeyExists throw exception when using low version mysql driver

Posted by xx...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

xxyu pushed a commit to branch kylin5
in repository https://gitbox.apache.org/repos/asf/kylin.git

commit 4742188bdee5c4300fb810b4cbdb2622123a6305
Author: Jiawei Li <10...@qq.com>
AuthorDate: Thu Jan 5 09:40:57 2023 +0800

    KYLIN-5445 minor fix isPrimaryKeyExists throw exception when using low version mysql driver
---
 .../kylin/common/persistence/metadata/jdbc/JdbcUtil.java   | 14 +++++++++-----
 1 file changed, 9 insertions(+), 5 deletions(-)

diff --git a/src/core-common/src/main/java/org/apache/kylin/common/persistence/metadata/jdbc/JdbcUtil.java b/src/core-common/src/main/java/org/apache/kylin/common/persistence/metadata/jdbc/JdbcUtil.java
index a4bc8e4221..0fa58ed773 100644
--- a/src/core-common/src/main/java/org/apache/kylin/common/persistence/metadata/jdbc/JdbcUtil.java
+++ b/src/core-common/src/main/java/org/apache/kylin/common/persistence/metadata/jdbc/JdbcUtil.java
@@ -125,12 +125,16 @@ public class JdbcUtil {
     private static boolean isPrimaryKeyExists(Connection conn, String... tables) throws SQLException {
         try {
             for (String table : tables) {
-                val resultSet = conn.getMetaData().getPrimaryKeys(conn.getCatalog(), conn.getSchema(), table);
-                if (resultSet.next()) {
-                    return true;
+                try {
+                    val resultSet = conn.getMetaData().getPrimaryKeys(conn.getCatalog(), conn.getSchema(), table);
+                    if (resultSet.next()) {
+                        return true;
+                    }
+                } catch (Exception e) {
+                    log.warn("get primary key from table {} failed", table, e);
                 }
             }
-            
+
             return false;
         } catch (Exception e) {
             logger.error("Fail to know if table {} primary key exists", tables, e);
@@ -157,7 +161,7 @@ public class JdbcUtil {
                     }
                 }
             }
-        } catch (Exception e) {
+        } catch (SQLException e) {
             logger.error("Fail to know if table {} index {} exists", tables, index, e);
         } finally {
             if (!conn.isClosed())


[kylin] 33/34: KYLIN-5461 Improve logical view descriptions

Posted by xx...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

xxyu pushed a commit to branch kylin5
in repository https://gitbox.apache.org/repos/asf/kylin.git

commit a81cecac2c0fd775777df64dfe64ed7546ef6088
Author: ChenLiang.Lu <31...@users.noreply.github.com>
AuthorDate: Fri Jan 6 16:54:42 2023 +0800

    KYLIN-5461 Improve logical view descriptions
---
 .../apache/kylin/rest/ddl/SourceTableCheck.java    | 20 ++++++++++-----
 .../kylin/rest/response/LogicalViewResponse.java   |  8 +++++-
 .../apache/kylin/rest/service/SparkDDLService.java | 27 +++++++++++++-------
 .../apache/kylin/rest/service/TableExtService.java |  3 ++-
 .../org/apache/kylin/rest/ddl/ViewCheck.scala      | 29 ++++++++++++++--------
 .../apache/kylin/rest/service/SparkDDLTest.java    |  2 +-
 .../spark/source/NSparkMetadataExplorer.java       | 14 ++++++++---
 .../tool/security/KylinPasswordResetCLITest.java   |  8 +++---
 8 files changed, 74 insertions(+), 37 deletions(-)

diff --git a/src/datasource-service/src/main/java/org/apache/kylin/rest/ddl/SourceTableCheck.java b/src/datasource-service/src/main/java/org/apache/kylin/rest/ddl/SourceTableCheck.java
index 2ad1223719..c8acb0a65f 100644
--- a/src/datasource-service/src/main/java/org/apache/kylin/rest/ddl/SourceTableCheck.java
+++ b/src/datasource-service/src/main/java/org/apache/kylin/rest/ddl/SourceTableCheck.java
@@ -17,6 +17,8 @@
  */
 package org.apache.kylin.rest.ddl;
 
+import static org.apache.spark.ddl.DDLConstant.SOURCE_TABLE_RULE_PRIORITY;
+
 import java.util.List;
 import java.util.stream.Collectors;
 
@@ -38,16 +40,22 @@ import lombok.val;
 
 import scala.collection.Seq;
 
-import static org.apache.spark.ddl.DDLConstant.SOURCE_TABLE_RULE_PRIORITY;
-
 public class SourceTableCheck implements DDLCheck {
 
   @Override
   public String[] description(String project, String pageType) {
-    return new String[] {
-        "The source table used to define the view needs to be loaded into the data source already",
-        "定义 view 用到的来源表需要已经加载到数据源"
-    };
+    if ("hive".equalsIgnoreCase(pageType)) {
+      return new String[] {
+          "The source table used to define the view needs to be loaded into the data source already",
+          "定义 view 用到的来源表需要已经加载到数据源"
+      };
+    } else {
+      return new String[] {
+          "The source tables in Logical View  should already be loaded into the project data source."
+              + "Users can only load Logical View created in the same project into the data source",
+          "定义 Logical View 用到的来源表需要已经加载到数据源,且用户仅能加载同一项目下创建的Logical View"
+      };
+    }
   }
 
   @Override
diff --git a/src/datasource-service/src/main/java/org/apache/kylin/rest/response/LogicalViewResponse.java b/src/datasource-service/src/main/java/org/apache/kylin/rest/response/LogicalViewResponse.java
index 2343207aa3..0691fb18a7 100644
--- a/src/datasource-service/src/main/java/org/apache/kylin/rest/response/LogicalViewResponse.java
+++ b/src/datasource-service/src/main/java/org/apache/kylin/rest/response/LogicalViewResponse.java
@@ -19,6 +19,7 @@
 package org.apache.kylin.rest.response;
 
 import org.apache.kylin.metadata.view.LogicalView;
+import org.jetbrains.annotations.NotNull;
 
 import com.fasterxml.jackson.annotation.JsonAutoDetect;
 import com.fasterxml.jackson.annotation.JsonProperty;
@@ -32,7 +33,7 @@ import lombok.NoArgsConstructor;
 @NoArgsConstructor
 @JsonAutoDetect(fieldVisibility = JsonAutoDetect.Visibility.NONE, getterVisibility = JsonAutoDetect.Visibility.NONE,
     isGetterVisibility = JsonAutoDetect.Visibility.NONE, setterVisibility = JsonAutoDetect.Visibility.NONE)
-public class LogicalViewResponse {
+public class LogicalViewResponse implements Comparable<LogicalViewResponse> {
   @JsonProperty("table_name")
   private String tableName;
 
@@ -51,4 +52,9 @@ public class LogicalViewResponse {
     this.modifiedUser = view.getModifiedUser();
     this.createdProject = view.getCreatedProject();
   }
+
+  @Override
+  public int compareTo(@NotNull LogicalViewResponse o) {
+    return this.getTableName().compareTo(o.getTableName());
+  }
 }
diff --git a/src/datasource-service/src/main/java/org/apache/kylin/rest/service/SparkDDLService.java b/src/datasource-service/src/main/java/org/apache/kylin/rest/service/SparkDDLService.java
index ef76e21e55..e8f2341770 100644
--- a/src/datasource-service/src/main/java/org/apache/kylin/rest/service/SparkDDLService.java
+++ b/src/datasource-service/src/main/java/org/apache/kylin/rest/service/SparkDDLService.java
@@ -110,8 +110,8 @@ public class SparkDDLService extends BasicService {
     List<String> descriptionCN = Lists.newArrayList();
     for (DDLCheck checker : ddlChecks) {
       String[] description = checker.description(project, pageType);
-      descriptionEN.addAll(Arrays.asList(description[0].split("\n")));
-      descriptionCN.addAll(Arrays.asList(description[1].split("\n")));
+      descriptionEN.addAll(Arrays.asList(description[0].split("\t")));
+      descriptionCN.addAll(Arrays.asList(description[1].split("\t")));
     }
     return Lists.newArrayList(descriptionEN, descriptionCN);
   }
@@ -143,13 +143,22 @@ public class SparkDDLService extends BasicService {
           .filter(table -> table.getTableName().toLowerCase().contains(tableName.toLowerCase()))
           .collect(Collectors.toList());
     }
-    List<LogicalViewResponse> viewResponses =
-        logicalViews.stream().map(LogicalViewResponse::new).collect(Collectors.toList());
-    viewResponses.forEach(table -> {
-      if (!table.getCreatedProject().equalsIgnoreCase(project)) {
-        table.setCreatedSql("***");
-      }
-    });
+    List<LogicalViewResponse> viewResponses = Lists.newArrayList();
+    List<LogicalViewResponse> viewResponsesInProject =
+        logicalViews.stream()
+            .filter(table -> table.getCreatedProject().equalsIgnoreCase(project))
+            .map(LogicalViewResponse::new)
+            .collect(Collectors.toList());
+    List<LogicalViewResponse> viewResponsesNotInProject =
+        logicalViews.stream()
+            .filter(table -> !table.getCreatedProject().equalsIgnoreCase(project))
+            .map(LogicalViewResponse::new)
+            .collect(Collectors.toList());
+    viewResponsesNotInProject.forEach(table -> table.setCreatedSql("***"));
+    Collections.sort(viewResponsesInProject);
+    Collections.sort(viewResponsesNotInProject);
+    viewResponses.addAll(viewResponsesInProject);
+    viewResponses.addAll(viewResponsesNotInProject);
     return viewResponses;
   }
 }
\ No newline at end of file
diff --git a/src/datasource-service/src/main/java/org/apache/kylin/rest/service/TableExtService.java b/src/datasource-service/src/main/java/org/apache/kylin/rest/service/TableExtService.java
index aabb60b613..cb13bf5944 100644
--- a/src/datasource-service/src/main/java/org/apache/kylin/rest/service/TableExtService.java
+++ b/src/datasource-service/src/main/java/org/apache/kylin/rest/service/TableExtService.java
@@ -138,6 +138,7 @@ public class TableExtService extends BasicService {
             canLoadTables.addAll(toLoadTables);
             return;
         }
+        String viewDB = config.getDDLLogicalViewDB();
         LogicalViewManager viewManager = LogicalViewManager.getInstance(config);
         toLoadTables.stream()
             .filter(table -> !table.getFirst().isLogicalView())
@@ -151,7 +152,7 @@ public class TableExtService extends BasicService {
                 if (logicalTable != null && viewProject.equalsIgnoreCase(project)) {
                     canLoadTables.add(table);
                 } else {
-                    tableResponse.getFailed().add(tableName);
+                    tableResponse.getFailed().add(viewDB + "." + tableName);
                 }
             });
     }
diff --git a/src/datasource-service/src/main/scala/org/apache/kylin/rest/ddl/ViewCheck.scala b/src/datasource-service/src/main/scala/org/apache/kylin/rest/ddl/ViewCheck.scala
index 879283b427..f86f2e0267 100644
--- a/src/datasource-service/src/main/scala/org/apache/kylin/rest/ddl/ViewCheck.scala
+++ b/src/datasource-service/src/main/scala/org/apache/kylin/rest/ddl/ViewCheck.scala
@@ -53,24 +53,31 @@ class ViewCheck extends DDLCheck {
     if ("hive".equalsIgnoreCase(pageType)) {
       databasesHasAccess.append(listAllDatabasesHasAccess(project))
       syntaxSupport.append("`create view`,`alter view`,`drop view`,`show create table`")
-      cnDescription.append("Hive View 名称需要以 `KE_` 开头\n")
-      enDescription.append("Hive View name should start with `KE_`\n")
+      cnDescription.append("Hive View 名称需要以 `KE_` 开头\t")
+      enDescription.append("Hive View name should start with `KE_`\t")
       cnDescription
-        .append(s"仅支持 ${syntaxSupport} 语法\n")
+        .append(s"仅支持 ${syntaxSupport} 语法\t")
       enDescription
-        .append(s"Only supports ${syntaxSupport} syntax\n")
-      cnDescription.append(s"仅支持创建 Hive View 在如下数据库: ${databasesHasAccess}\n")
-      enDescription.append(s"Only supports creating Hive Views in ${databasesHasAccess}\n")
+        .append(s"Only supports ${syntaxSupport} syntax\t")
+      cnDescription.append(s"仅支持创建 Hive View 在如下数据库: ${databasesHasAccess}\t")
+      enDescription.append(s"Only supports creating Hive Views in ${databasesHasAccess}\t")
     } else {
       cnDescription.append(s"创建不要加 database 名称,系统自动创建到 ${config.getDDLLogicalViewDB} 库中,"
-        + s"删除要加 ${config.getDDLLogicalViewDB} 库名称 \n")
-      enDescription.append(s"Creating does not require adding database, it is automatically created in"
-        + s" ${config.getDDLLogicalViewDB} , deleting should add ${config.getDDLLogicalViewDB} database\n")
+        + s"删除要加 ${config.getDDLLogicalViewDB} 库名称 \t")
+      enDescription.append(s"When creating a new Logical View,please do not use database name,it will be automatically"
+        + s" created in ${config.getDDLLogicalViewDB} database. When dropping a Logical View,"
+        + s"please add ${config.getDDLLogicalViewDB} database name in SQL.\t")
       syntaxSupport.append(" `create logical view`, `drop logical view` ")
       cnDescription
-        .append(s"仅支持 ${syntaxSupport} 语法\n")
+        .append(s"仅支持 ${syntaxSupport} 语法\t")
       enDescription
-        .append(s"Only supports ${syntaxSupport} syntax\n")
+        .append(s"Only ${syntaxSupport} SQL sentences are allowed to execute\t")
+      cnDescription
+        .append(s"操作举例:\n创建视图:CREATE LOGICAL VIEW your_logical_view AS select * from your_loaded_table\n"
+          + s"删除视图:DROP LOGICAL VIEW ${config.getDDLLogicalViewDB}.your_logical_view")
+      enDescription
+        .append(s"Operation Examples:\nCreate:CREATE LOGICAL VIEW your_logical_view AS select * from your_loaded_table"
+          + s";\nDrop:DROP LOGICAL VIEW ${config.getDDLLogicalViewDB}.your_logical_view;")
     }
 
 
diff --git a/src/datasource-service/src/test/java/org/apache/kylin/rest/service/SparkDDLTest.java b/src/datasource-service/src/test/java/org/apache/kylin/rest/service/SparkDDLTest.java
index 80e9a5f114..c0cf1de847 100644
--- a/src/datasource-service/src/test/java/org/apache/kylin/rest/service/SparkDDLTest.java
+++ b/src/datasource-service/src/test/java/org/apache/kylin/rest/service/SparkDDLTest.java
@@ -281,7 +281,7 @@ public class SparkDDLTest extends NLocalFileMetadataTestCase {
     Assert.assertEquals(4, description.get(0).size());
 
     description = ddlService.pluginsDescription("ssb", "logic");
-    Assert.assertEquals(3, description.get(0).size());
+    Assert.assertEquals(4, description.get(0).size());
 
     // view list in project
     List<LogicalViewResponse> logicalViewsInProject = ddlService.listAll("ssb", "");
diff --git a/src/spark-project/engine-spark/src/main/java/org/apache/kylin/engine/spark/source/NSparkMetadataExplorer.java b/src/spark-project/engine-spark/src/main/java/org/apache/kylin/engine/spark/source/NSparkMetadataExplorer.java
index c9543e11e0..c037c37bfe 100644
--- a/src/spark-project/engine-spark/src/main/java/org/apache/kylin/engine/spark/source/NSparkMetadataExplorer.java
+++ b/src/spark-project/engine-spark/src/main/java/org/apache/kylin/engine/spark/source/NSparkMetadataExplorer.java
@@ -17,6 +17,8 @@
  */
 package org.apache.kylin.engine.spark.source;
 
+import static org.apache.kylin.common.exception.ServerErrorCode.DDL_CHECK_ERROR;
+
 import java.io.IOException;
 import java.io.Serializable;
 import java.util.ArrayList;
@@ -41,11 +43,12 @@ import org.apache.kylin.common.util.Pair;
 import org.apache.kylin.common.util.RandomUtil;
 import org.apache.kylin.metadata.model.ColumnDesc;
 import org.apache.kylin.metadata.model.ISourceAware;
+import org.apache.kylin.metadata.model.NTableMetadataManager;
 import org.apache.kylin.metadata.model.TableDesc;
 import org.apache.kylin.metadata.model.TableExtDesc;
 import org.apache.kylin.source.ISampleDataDeployer;
 import org.apache.kylin.source.ISourceMetadataExplorer;
-import org.apache.kylin.metadata.model.NTableMetadataManager;
+
 import org.apache.spark.sql.AnalysisException;
 import org.apache.spark.sql.Dataset;
 import org.apache.spark.sql.Row;
@@ -54,6 +57,7 @@ import org.apache.spark.sql.SparkSession;
 import org.apache.spark.sql.catalog.Database;
 import org.apache.spark.sql.catalyst.catalog.CatalogTableType;
 import org.apache.spark.sql.internal.SQLConf;
+
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -61,7 +65,6 @@ import com.clearspring.analytics.util.Lists;
 import com.google.common.collect.Sets;
 
 import lombok.val;
-import static org.apache.kylin.common.exception.ServerErrorCode.DDL_CHECK_ERROR;
 
 public class NSparkMetadataExplorer implements ISourceMetadataExplorer, ISampleDataDeployer, Serializable {
 
@@ -104,12 +107,15 @@ public class NSparkMetadataExplorer implements ISourceMetadataExplorer, ISampleD
         if (KylinConfig.getInstanceFromEnv().isDDLLogicalViewEnabled()) {
             String logicalViewDB = KylinConfig.getInstanceFromEnv().getDDLLogicalViewDB();
             databases.forEach(db -> {
-                if(db.equalsIgnoreCase(logicalViewDB)){
+                if (db.equalsIgnoreCase(logicalViewDB)) {
                     throw new KylinException(DDL_CHECK_ERROR, "Logical view database should not be duplicated "
                         + "with normal hive database!!!");
                 }
             });
-            databases.add(logicalViewDB);
+            List<String> databasesWithLogicalDB = Lists.newArrayList();
+            databasesWithLogicalDB.add(logicalViewDB);
+            databasesWithLogicalDB.addAll(databases);
+            databases = databasesWithLogicalDB;
         }
         return databases;
     }
diff --git a/src/tool/src/test/java/org/apache/kylin/tool/security/KylinPasswordResetCLITest.java b/src/tool/src/test/java/org/apache/kylin/tool/security/KylinPasswordResetCLITest.java
index 6e677dd0e4..5b067f91fb 100644
--- a/src/tool/src/test/java/org/apache/kylin/tool/security/KylinPasswordResetCLITest.java
+++ b/src/tool/src/test/java/org/apache/kylin/tool/security/KylinPasswordResetCLITest.java
@@ -27,9 +27,9 @@ import java.nio.charset.Charset;
 import org.apache.commons.dbcp2.BasicDataSourceFactory;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.persistence.ResourceStore;
-import org.apache.kylin.rest.constant.Constant;
 import org.apache.kylin.common.persistence.metadata.jdbc.AuditLogRowMapper;
 import org.apache.kylin.common.util.LogOutputTestCase;
+import org.apache.kylin.rest.constant.Constant;
 import org.apache.kylin.metadata.user.ManagedUser;
 import org.apache.kylin.metadata.user.NKylinUserManager;
 import org.apache.kylin.tool.garbage.StorageCleaner;
@@ -65,7 +65,6 @@ public class KylinPasswordResetCLITest extends LogOutputTestCase {
 
     @Test
     public void testResetAdminPassword() throws Exception {
-        overwriteSystemProp("kylin.metadata.random-admin-password.enabled", "true");
         val pwdEncoder = new BCryptPasswordEncoder();
         overwriteSystemProp("kylin.security.user-password-encoder", pwdEncoder.getClass().getName());
         overwriteSystemProp("kylin.metadata.random-admin-password.enabled", "true");
@@ -93,9 +92,10 @@ public class KylinPasswordResetCLITest extends LogOutputTestCase {
         val afterManager = NKylinUserManager.getInstance(config);
 
         Assert.assertFalse(pwdEncoder.matches("KYLIN", afterManager.get(user.getUsername()).getPassword()));
+        Assert.assertTrue(output.toString(Charset.defaultCharset().name()).startsWith("The metadata backup path is"));
         Assert.assertTrue(output.toString(Charset.defaultCharset().name())
-                .startsWith(StorageCleaner.ANSI_RED + "Reset password of [" + StorageCleaner.ANSI_RESET + "ADMIN"
-                        + StorageCleaner.ANSI_RED + "] succeed. The password is "));
+            .contains(StorageCleaner.ANSI_RED + "Reset password of [" + StorageCleaner.ANSI_RESET + "ADMIN"
+                + StorageCleaner.ANSI_RED + "] succeed. The password is "));
         Assert.assertTrue(output.toString(Charset.defaultCharset().name())
                 .endsWith("Please keep the password properly." + StorageCleaner.ANSI_RESET + "\n"));
 


[kylin] 32/34: KYLIN-5457 fix ldap authorize user group project permissions

Posted by xx...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

xxyu pushed a commit to branch kylin5
in repository https://gitbox.apache.org/repos/asf/kylin.git

commit 1cebc04393638587e22631016ceb3cbf9bf6adba
Author: Jiale He <35...@users.noreply.github.com>
AuthorDate: Thu Jan 5 18:49:21 2023 +0800

    KYLIN-5457 fix ldap authorize user group project permissions
    
    * KYLIN-5457 fix ldap authorize user group project permissions
    * KYLIN-5457 fix OpenUserGroupService
---
 .../kylin/rest/service/LdapUserGroupService.java   | 13 ++++
 .../kylin/rest/service/OpenUserGroupService.java   | 15 +++-
 .../kylin/rest/service/LdapUserServiceTest.java    | 79 ++++++++++++----------
 .../kylin/rest/service/OpenUserServiceTest.java    | 25 ++++++-
 4 files changed, 92 insertions(+), 40 deletions(-)

diff --git a/src/common-service/src/main/java/org/apache/kylin/rest/service/LdapUserGroupService.java b/src/common-service/src/main/java/org/apache/kylin/rest/service/LdapUserGroupService.java
index de41aff9de..47a1dd17c4 100644
--- a/src/common-service/src/main/java/org/apache/kylin/rest/service/LdapUserGroupService.java
+++ b/src/common-service/src/main/java/org/apache/kylin/rest/service/LdapUserGroupService.java
@@ -233,4 +233,17 @@ public class LdapUserGroupService extends NUserGroupService {
     public String getUuidByGroupName(String groupName) {
         return groupName;
     }
+
+    @Override
+    public boolean exists(String name) {
+        return getAllUserGroups().contains(name);
+    }
+
+    @Override
+    public Set<String> listUserGroups(String username) {
+        return getAllUserGroups().stream()
+                .filter(group -> getGroupMembersByName(group).stream()
+                        .anyMatch(user -> StringUtils.equalsIgnoreCase(username, user.getUsername())))
+                .collect(Collectors.toSet());
+    }
 }
diff --git a/src/common-service/src/main/java/org/apache/kylin/rest/service/OpenUserGroupService.java b/src/common-service/src/main/java/org/apache/kylin/rest/service/OpenUserGroupService.java
index cb751efde3..d26e91e85d 100644
--- a/src/common-service/src/main/java/org/apache/kylin/rest/service/OpenUserGroupService.java
+++ b/src/common-service/src/main/java/org/apache/kylin/rest/service/OpenUserGroupService.java
@@ -20,11 +20,12 @@ package org.apache.kylin.rest.service;
 
 import java.util.List;
 import java.util.Locale;
+import java.util.Set;
 import java.util.stream.Collectors;
 
 import org.apache.commons.lang.StringUtils;
-import org.apache.kylin.common.msg.MsgPicker;
 import org.apache.kylin.common.annotation.ThirdPartyDependencies;
+import org.apache.kylin.common.msg.MsgPicker;
 import org.apache.kylin.metadata.user.ManagedUser;
 import org.apache.kylin.metadata.usergroup.UserGroup;
 
@@ -80,4 +81,16 @@ public abstract class OpenUserGroupService extends NUserGroupService {
                 .collect(Collectors.toList());
     }
 
+    @Override
+    public boolean exists(String name) {
+        return getAllUserGroups().contains(name);
+    }
+
+    @Override
+    public Set<String> listUserGroups(String username) {
+        return getAllUserGroups().stream()
+                .filter(group -> getGroupMembersByName(group).stream()
+                        .anyMatch(user -> StringUtils.equalsIgnoreCase(username, user.getUsername())))
+                .collect(Collectors.toSet());
+    }
 }
diff --git a/src/common-service/src/test/java/org/apache/kylin/rest/service/LdapUserServiceTest.java b/src/common-service/src/test/java/org/apache/kylin/rest/service/LdapUserServiceTest.java
index 1b07ad2985..b43d62b638 100644
--- a/src/common-service/src/test/java/org/apache/kylin/rest/service/LdapUserServiceTest.java
+++ b/src/common-service/src/test/java/org/apache/kylin/rest/service/LdapUserServiceTest.java
@@ -25,8 +25,8 @@ import static org.springframework.security.test.web.servlet.response.SecurityMoc
 import static org.springframework.security.test.web.servlet.response.SecurityMockMvcResultMatchers.unauthenticated;
 import static org.springframework.security.test.web.servlet.setup.SecurityMockMvcConfigurers.springSecurity;
 
-import java.io.FileInputStream;
 import java.io.IOException;
+import java.nio.file.Files;
 import java.util.Arrays;
 import java.util.Collections;
 import java.util.HashSet;
@@ -53,9 +53,7 @@ import org.junit.AfterClass;
 import org.junit.Assert;
 import org.junit.Before;
 import org.junit.BeforeClass;
-import org.junit.Rule;
 import org.junit.Test;
-import org.junit.rules.ExpectedException;
 import org.junit.runner.RunWith;
 import org.mockito.Mock;
 import org.mockito.Mockito;
@@ -79,7 +77,6 @@ import org.springframework.web.context.WebApplicationContext;
 
 import com.google.common.cache.Cache;
 import com.google.common.collect.ImmutableMap;
-import com.google.common.collect.Lists;
 import com.unboundid.ldap.listener.InMemoryDirectoryServer;
 import com.unboundid.ldap.listener.InMemoryDirectoryServerConfig;
 import com.unboundid.ldap.listener.InMemoryListenerConfig;
@@ -119,14 +116,11 @@ public class LdapUserServiceTest extends NLocalFileMetadataTestCase {
     @Qualifier("userGroupService")
     LdapUserGroupService userGroupService;
 
-    @Rule
-    public ExpectedException thrown = ExpectedException.none();
-
     @BeforeClass
     public static void setupResource() throws Exception {
         staticCreateTestMetadata();
         Properties ldapConfig = new Properties();
-        ldapConfig.load(new FileInputStream(new ClassPathResource(LDAP_CONFIG).getFile()));
+        ldapConfig.load(Files.newInputStream(new ClassPathResource(LDAP_CONFIG).getFile().toPath()));
         final KylinConfig kylinConfig = getTestConfig();
         overwriteSystemPropBeforeClass("kylin.security.ldap.max-page-size", "1");
         ldapConfig.forEach((k, v) -> kylinConfig.setProperty(k.toString(), v.toString()));
@@ -146,7 +140,7 @@ public class LdapUserServiceTest extends NLocalFileMetadataTestCase {
     }
 
     @AfterClass
-    public static void cleanupResource() throws Exception {
+    public static void cleanupResource() {
         directoryServer.shutDown(true);
         staticCleanupTestMetadata();
     }
@@ -181,26 +175,22 @@ public class LdapUserServiceTest extends NLocalFileMetadataTestCase {
 
     @Test
     public void testCreateUser() {
-        thrown.expect(UnsupportedOperationException.class);
-        ldapUserService.createUser(null);
+        Assert.assertThrows(UnsupportedOperationException.class, () -> ldapUserService.createUser(null));
     }
 
     @Test
     public void testUpdateUser() {
-        thrown.expect(UnsupportedOperationException.class);
-        ldapUserService.updateUser(null);
+        Assert.assertThrows(UnsupportedOperationException.class, () -> ldapUserService.updateUser(null));
     }
 
     @Test
     public void testDeleteUser() {
-        thrown.expect(UnsupportedOperationException.class);
-        ldapUserService.deleteUser("ben");
+        Assert.assertThrows(UnsupportedOperationException.class, () -> ldapUserService.deleteUser("ben"));
     }
 
     @Test
     public void testChangePassword() {
-        thrown.expect(UnsupportedOperationException.class);
-        ldapUserService.changePassword("old", "new");
+        Assert.assertThrows(UnsupportedOperationException.class, () -> ldapUserService.changePassword("old", "new"));
     }
 
     @Test
@@ -216,8 +206,8 @@ public class LdapUserServiceTest extends NLocalFileMetadataTestCase {
     }
 
     @Test
-    public void testListUsers() throws Exception {
-        Set<String> users = ldapUserService.listUsers().stream().map(x -> x.getUsername()).collect(toSet());
+    public void testListUsers() {
+        Set<String> users = ldapUserService.listUsers().stream().map(ManagedUser::getUsername).collect(toSet());
         Assert.assertEquals(6, users.size());
         List<ManagedUser> managedUserList = ldapUserService.listUsers();
         for (val user : managedUserList) {
@@ -231,7 +221,7 @@ public class LdapUserServiceTest extends NLocalFileMetadataTestCase {
     }
 
     @Test
-    public void testListSuperAdminUsers() throws Exception {
+    public void testListSuperAdminUsers() {
         getTestConfig().setProperty("kylin.security.acl.super-admin-username", "jenny");
         Assert.assertEquals("jenny", ldapUserService.listSuperAdminUsers().get(0));
     }
@@ -239,14 +229,15 @@ public class LdapUserServiceTest extends NLocalFileMetadataTestCase {
     @Test
     public void testLoadUserByUsername() {
         Assert.assertTrue(ldapUserService.loadUserByUsername("jenny").getAuthorities().stream()
-                .map(x -> x.getAuthority()).collect(toSet()).contains("ROLE_ADMIN"));
+                .map(GrantedAuthority::getAuthority).collect(toSet()).contains("ROLE_ADMIN"));
     }
 
     @Test
     public void testCompleteUserInfoInternal() {
         ManagedUser user = new ManagedUser("oliver", "", false);
         ldapUserService.completeUserInfoInternal(user);
-        Set<String> authorities = user.getAuthorities().stream().map(x -> x.getAuthority()).collect(toSet());
+        Set<String> authorities = user.getAuthorities().stream().map(SimpleGrantedAuthority::getAuthority)
+                .collect(toSet());
         Assert.assertFalse(authorities.contains("ROLE_ADMIN"));
         Assert.assertTrue(authorities.contains("itpeople"));
     }
@@ -261,34 +252,33 @@ public class LdapUserServiceTest extends NLocalFileMetadataTestCase {
     }
 
     @Test
-    public void testOnNewUserAdded() throws Exception {
+    public void testOnNewUserAdded() {
         Assert.assertTrue(ldapUserService.userExists("rick"));
         ldapUserService.onUserAuthenticated("rick");
         Assert.assertTrue(ldapUserService.userExists("rick"));
     }
 
     @Test
-    public void testOnUserWithoutPassword() throws Exception {
+    public void testOnUserWithoutPassword() {
         ldapUserService.onUserAuthenticated("ricky");
         Assert.assertTrue(ldapUserService.userExists("ricky"));
     }
 
     @Test
     public void testAddGroup() {
-        thrown.expect(UnsupportedOperationException.class);
-        userGroupService.addGroup("gg");
+        Assert.assertThrows(UnsupportedOperationException.class, () -> userGroupService.addGroup("gg"));
     }
 
     @Test
     public void testUpdateUserGroup() {
-        thrown.expect(UnsupportedOperationException.class);
-        userGroupService.modifyGroupUsers("gg", Lists.newArrayList());
+        List<String> emptyUserGroup = Collections.emptyList();
+        Assert.assertThrows(UnsupportedOperationException.class,
+                () -> userGroupService.modifyGroupUsers("gg", emptyUserGroup));
     }
 
     @Test
     public void testDeleteUserGroup() {
-        thrown.expect(UnsupportedOperationException.class);
-        userGroupService.deleteGroup("gg");
+        Assert.assertThrows(UnsupportedOperationException.class, () -> userGroupService.deleteGroup("gg"));
     }
 
     @Test
@@ -360,7 +350,7 @@ public class LdapUserServiceTest extends NLocalFileMetadataTestCase {
     public void testGroupNameByUuidAndUuidByGroupName() throws IOException {
         List<UserGroupResponseKI> userGroupResponse = userGroupService.getUserGroupResponse(
                 userGroupService.getAllUserGroups().stream().map(UserGroup::new).collect(Collectors.toList()));
-        userGroupResponse.stream().forEach(response -> {
+        userGroupResponse.forEach(response -> {
             String groupName = response.getGroupName();
             String uuidByGroupName = userGroupService.getUuidByGroupName(response.getGroupName());
             Assert.assertEquals(groupName, uuidByGroupName);
@@ -372,7 +362,7 @@ public class LdapUserServiceTest extends NLocalFileMetadataTestCase {
     }
 
     @Test
-    public void testGetDnMapperMap() throws Exception {
+    public void testGetDnMapperMap() {
         String cacheKey = ReflectionTestUtils.getField(LdapUserService.class, "LDAP_VALID_DN_MAP_KEY").toString();
         Cache cache = (Cache) ReflectionTestUtils.getField(LdapUserService.class, "LDAP_VALID_DN_MAP_CACHE");
         cache.invalidate(cacheKey);
@@ -384,8 +374,8 @@ public class LdapUserServiceTest extends NLocalFileMetadataTestCase {
     public void testSameNameUserInvalidation() {
         Assert.assertFalse(
                 ldapUserService.listUsers().stream().map(ManagedUser::getUsername).collect(toSet()).contains("user"));
-        Assert.assertFalse(userGroupService.getUserAndUserGroup().entrySet().stream().map(Map.Entry::getValue)
-                .map(HashSet::new).map(set -> set.contains("user")).reduce(Boolean::logicalOr).get().booleanValue());
+        Assert.assertFalse(userGroupService.getUserAndUserGroup().values().stream().map(HashSet::new)
+                .map(set -> set.contains("user")).reduce(Boolean::logicalOr).get());
     }
 
     @Test
@@ -429,12 +419,29 @@ public class LdapUserServiceTest extends NLocalFileMetadataTestCase {
         getTestConfig().setProperty("kylin.security.acl.data-permission-default-enabled", "false");
         userAclService.syncAdminUserAcl(Collections.emptyList(), false);
         Assert.assertNull(userAclManager.get("jenny"));
-        userAclService.syncAdminUserAcl(Arrays.asList("jenny"), true);
+        userAclService.syncAdminUserAcl(Collections.singletonList("jenny"), true);
         Assert.assertTrue(userAclManager.get("jenny").hasPermission(AclPermission.DATA_QUERY));
 
         getTestConfig().setProperty("kylin.security.acl.super-admin-username", "");
         userAclManager.delete("jenny");
-        userAclService.syncAdminUserAcl(Arrays.asList("jenny"), true);
+        userAclService.syncAdminUserAcl(Collections.singletonList("jenny"), true);
         Assert.assertFalse(userAclManager.get("jenny").hasPermission(AclPermission.DATA_QUERY));
     }
+
+    @Test
+    public void testUserGroupExists() {
+        Assert.assertTrue(userGroupService.exists("admin"));
+        Assert.assertFalse(userGroupService.exists("not_exist_group"));
+    }
+
+    @Test
+    public void testListUserGroupsByUsername() {
+        Assert.assertTrue(ldapUserService.userExists("johnny"));
+        Set<String> johnnyGroups = userGroupService.listUserGroups("johnny");
+        Assert.assertFalse(johnnyGroups.isEmpty());
+
+        Assert.assertFalse(ldapUserService.userExists("not_exist_user"));
+        Set<String> notExistUser = userGroupService.listUserGroups("not_exist_user");
+        Assert.assertTrue(notExistUser.isEmpty());
+    }
 }
diff --git a/src/common-service/src/test/java/org/apache/kylin/rest/service/OpenUserServiceTest.java b/src/common-service/src/test/java/org/apache/kylin/rest/service/OpenUserServiceTest.java
index 85852ff3ab..c8ad9e6b68 100644
--- a/src/common-service/src/test/java/org/apache/kylin/rest/service/OpenUserServiceTest.java
+++ b/src/common-service/src/test/java/org/apache/kylin/rest/service/OpenUserServiceTest.java
@@ -18,12 +18,13 @@
 
 package org.apache.kylin.rest.service;
 
-import java.io.FileInputStream;
+import java.nio.file.Files;
 import java.util.Arrays;
 import java.util.Collections;
 import java.util.List;
 import java.util.Map;
 import java.util.Properties;
+import java.util.Set;
 
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.scheduler.EventBusFactory;
@@ -96,7 +97,8 @@ public class OpenUserServiceTest extends NLocalFileMetadataTestCase {
     public static void setupResource() throws Exception {
         staticCreateTestMetadata();
         Properties ldapConfig = new Properties();
-        ldapConfig.load(new FileInputStream(new ClassPathResource("ut_custom/custom-config.properties").getFile()));
+        ldapConfig.load(
+                Files.newInputStream(new ClassPathResource("ut_custom/custom-config.properties").getFile().toPath()));
         final KylinConfig kylinConfig = getTestConfig();
         ldapConfig.forEach((k, v) -> kylinConfig.setProperty(k.toString(), v.toString()));
 
@@ -247,7 +249,7 @@ public class OpenUserServiceTest extends NLocalFileMetadataTestCase {
 
     @Test
     public void testDoAfterListAdminUsers() {
-        List adminUserList = Arrays.asList("admin", "sunny");
+        List<String> adminUserList = Arrays.asList("admin", "sunny");
         val adminUserAspect = SpringContext.getBean(AdminUserAspect.class);
         adminUserAspect.doAfterListAdminUsers(adminUserList);
         Assert.assertTrue(((List) ReflectionTestUtils.getField(adminUserAspect, "adminUserList")).contains("sunny"));
@@ -276,4 +278,21 @@ public class OpenUserServiceTest extends NLocalFileMetadataTestCase {
         userAclService.syncAdminUserAcl();
         Assert.assertTrue(userAclService.hasUserAclPermission("admin", AclPermission.DATA_QUERY));
     }
+
+    @Test
+    public void testUserGroupExists() {
+        Assert.assertTrue(userGroupService.exists("ROLE_ADMIN"));
+        Assert.assertFalse(userGroupService.exists("not_exist_group"));
+    }
+
+    @Test
+    public void testListUserGroupsByUsername() {
+        Assert.assertTrue(userService.userExists("test"));
+        Set<String> testGroups = userGroupService.listUserGroups("test");
+        Assert.assertFalse(testGroups.isEmpty());
+
+        Assert.assertFalse(userService.userExists("not_exist_user"));
+        Set<String> notExistUser = userGroupService.listUserGroups("not_exist_user");
+        Assert.assertTrue(notExistUser.isEmpty());
+    }
 }


[kylin] 14/34: KYLIN-5448 fix snyk vulnerabilities, upgrade jettison from 1.1 to 1.5.2

Posted by xx...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

xxyu pushed a commit to branch kylin5
in repository https://gitbox.apache.org/repos/asf/kylin.git

commit f25edf0fcaeba68aee5731d582509b57f0ad411d
Author: huangsheng <hu...@163.com>
AuthorDate: Thu Dec 29 10:54:30 2022 +0800

    KYLIN-5448 fix snyk vulnerabilities, upgrade jettison from 1.1 to 1.5.2
---
 pom.xml | 42 ++++++++++++++++++++++++++++++++++++++++++
 1 file changed, 42 insertions(+)

diff --git a/pom.xml b/pom.xml
index 4a87ab0a1a..bb4fc4f907 100644
--- a/pom.xml
+++ b/pom.xml
@@ -843,6 +843,10 @@
                 <version>${hadoop.version}</version>
                 <scope>provided</scope>
                 <exclusions>
+                    <exclusion>
+                        <groupId>org.codehaus.jettison</groupId>
+                        <artifactId>jettison</artifactId>
+                    </exclusion>
                     <exclusion>
                         <groupId>javax.servlet</groupId>
                         <artifactId>servlet-api</artifactId>
@@ -946,6 +950,10 @@
                 <version>${hadoop.version}</version>
                 <scope>provided</scope>
                 <exclusions>
+                    <exclusion>
+                        <groupId>org.codehaus.jettison</groupId>
+                        <artifactId>jettison</artifactId>
+                    </exclusion>
                     <exclusion>
                         <groupId>log4j</groupId>
                         <artifactId>*</artifactId>
@@ -981,6 +989,10 @@
                 <version>${hadoop.version}</version>
                 <scope>provided</scope>
                 <exclusions>
+                    <exclusion>
+                        <groupId>org.codehaus.jettison</groupId>
+                        <artifactId>jettison</artifactId>
+                    </exclusion>
                     <exclusion>
                         <groupId>javax.servlet</groupId>
                         <artifactId>servlet-api</artifactId>
@@ -1003,6 +1015,10 @@
                 <groupId>org.apache.hadoop</groupId>
                 <artifactId>hadoop-mapreduce-client-jobclient</artifactId>
                 <exclusions>
+                    <exclusion>
+                        <groupId>org.codehaus.jettison</groupId>
+                        <artifactId>jettison</artifactId>
+                    </exclusion>
                     <exclusion>
                         <groupId>com.sun.jersey</groupId>
                         <artifactId>*</artifactId>
@@ -1211,6 +1227,10 @@
                 <groupId>org.apache.hive</groupId>
                 <artifactId>hive-jdbc</artifactId>
                 <exclusions>
+                    <exclusion>
+                        <groupId>org.codehaus.jettison</groupId>
+                        <artifactId>jettison</artifactId>
+                    </exclusion>
                     <exclusion>
                         <groupId>com.sun.jersey</groupId>
                         <artifactId>*</artifactId>
@@ -1284,6 +1304,10 @@
                 <artifactId>hive-hcatalog-core</artifactId>
                 <version>${hive-hcatalog.version}</version>
                 <exclusions>
+                    <exclusion>
+                        <groupId>org.codehaus.jettison</groupId>
+                        <artifactId>jettison</artifactId>
+                    </exclusion>
                     <exclusion>
                         <groupId>com.twitter</groupId>
                         <artifactId>parquet-hadoop-bundle</artifactId>
@@ -1589,6 +1613,10 @@
                 <artifactId>hive-common</artifactId>
                 <version>${hive.version}</version>
                 <exclusions>
+                    <exclusion>
+                        <groupId>org.codehaus.jettison</groupId>
+                        <artifactId>jettison</artifactId>
+                    </exclusion>
                     <exclusion>
                         <groupId>org.apache.logging.log4j</groupId>
                         <artifactId>log4j-1.2-api</artifactId>
@@ -1680,6 +1708,10 @@
                 <artifactId>hive-metastore</artifactId>
                 <version>${hive.version}</version>
                 <exclusions>
+                    <exclusion>
+                        <groupId>org.codehaus.jettison</groupId>
+                        <artifactId>jettison</artifactId>
+                    </exclusion>
                     <exclusion>
                         <groupId>org.apache.logging.log4j</groupId>
                         <artifactId>log4j-1.2-api</artifactId>
@@ -1745,6 +1777,10 @@
                 <groupId>org.apache.hadoop</groupId>
                 <artifactId>hadoop-yarn-server-resourcemanager</artifactId>
                 <exclusions>
+                    <exclusion>
+                        <groupId>org.codehaus.jettison</groupId>
+                        <artifactId>jettison</artifactId>
+                    </exclusion>
                     <exclusion>
                         <groupId>com.sun.jersey</groupId>
                         <artifactId>*</artifactId>
@@ -2239,6 +2275,12 @@
                 <artifactId>opencsv</artifactId>
                 <version>${opencsv.version}</version>
             </dependency>
+            <dependency>
+                <groupId>org.codehaus.jettison</groupId>
+                <artifactId>jettison</artifactId>
+                <version>1.5.2</version>
+            </dependency>
+
 
             <!-- Spring Cloud -->
             <dependency>


[kylin] 13/34: KYLIN-5449 fix startup issue

Posted by xx...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

xxyu pushed a commit to branch kylin5
in repository https://gitbox.apache.org/repos/asf/kylin.git

commit 857a79edef27fadf9b5debdbda709e55c90480c2
Author: qianhao.zhou <z....@gmail.com>
AuthorDate: Wed Dec 28 12:13:21 2022 +0800

    KYLIN-5449 fix startup issue
    
    Co-authored-by: qhzhou <qi...@kyligence.io>
---
 src/server/pom.xml | 4 ++++
 1 file changed, 4 insertions(+)

diff --git a/src/server/pom.xml b/src/server/pom.xml
index dec842832d..01344da732 100644
--- a/src/server/pom.xml
+++ b/src/server/pom.xml
@@ -57,6 +57,10 @@
             <groupId>org.apache.kylin</groupId>
             <artifactId>kylin-common-server</artifactId>
         </dependency>
+        <dependency>
+            <groupId>org.apache.kylin</groupId>
+            <artifactId>kylin-tool</artifactId>
+        </dependency>
         <dependency>
             <groupId>org.apache.kylin</groupId>
             <artifactId>kap-second-storage-clickhouse</artifactId>


[kylin] 34/34: KYLIN-5450 fix NPE while col_order is not required

Posted by xx...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

xxyu pushed a commit to branch kylin5
in repository https://gitbox.apache.org/repos/asf/kylin.git

commit 7fe52b87411451debd40f2b92eee6111f764de6c
Author: Ruixuan Zhang <ru...@kyligence.io>
AuthorDate: Wed Jan 11 15:21:26 2023 +0800

    KYLIN-5450 fix NPE while col_order is not required
---
 .../rest/controller/NIndexPlanController.java      | 10 ---
 .../rest/controller/IndexPlanControllerTest.java   | 20 ++---
 .../kylin/rest/service/IndexPlanService.java       |  4 +
 .../kylin/rest/service/IndexPlanServiceTest.java   | 90 ++++++++++++++++++++++
 4 files changed, 104 insertions(+), 20 deletions(-)

diff --git a/src/metadata-server/src/main/java/org/apache/kylin/rest/controller/NIndexPlanController.java b/src/metadata-server/src/main/java/org/apache/kylin/rest/controller/NIndexPlanController.java
index 3bca9f0ecf..24124071a5 100644
--- a/src/metadata-server/src/main/java/org/apache/kylin/rest/controller/NIndexPlanController.java
+++ b/src/metadata-server/src/main/java/org/apache/kylin/rest/controller/NIndexPlanController.java
@@ -21,7 +21,6 @@ package org.apache.kylin.rest.controller;
 import static org.apache.kylin.common.constant.HttpConstant.HTTP_VND_APACHE_KYLIN_JSON;
 import static org.apache.kylin.common.constant.HttpConstant.HTTP_VND_APACHE_KYLIN_V4_PUBLIC_JSON;
 import static org.apache.kylin.common.exception.code.ErrorCodeServer.LAYOUT_LIST_EMPTY;
-import static org.apache.kylin.common.exception.code.ErrorCodeServer.SHARD_BY_COLUMN_NOT_IN_INDEX;
 
 import java.util.List;
 import java.util.Set;
@@ -144,19 +143,10 @@ public class NIndexPlanController extends NBasicController {
         checkRequiredArg(MODEL_ID, request.getModelId());
         checkRequiredArg("id", request.getId());
         modelService.validateCCType(request.getModelId(), request.getProject());
-        List<String> shardByColumns = request.getShardByColumns();
-        List<String> colOrder = request.getColOrder();
-        checkShardbyCol(shardByColumns, colOrder);
         val response = fusionIndexService.updateTableIndex(request.getProject(), request);
         return new EnvelopeResponse<>(KylinException.CODE_SUCCESS, response, "");
     }
 
-    private void checkShardbyCol(List<String> shardByColumns, List<String> colOrder) {
-        if (!colOrder.containsAll(shardByColumns)) {
-            throw new KylinException(SHARD_BY_COLUMN_NOT_IN_INDEX);
-        }
-    }
-
     @Deprecated
     @ApiOperation(value = "deleteTableIndex", tags = { "AI" }, notes = "Update URL: {project}, Update Param: project")
     @DeleteMapping(value = "/table_index/{id:.+}")
diff --git a/src/metadata-server/src/test/java/org/apache/kylin/rest/controller/IndexPlanControllerTest.java b/src/metadata-server/src/test/java/org/apache/kylin/rest/controller/IndexPlanControllerTest.java
index bcb9ef7ec1..7a1de9708c 100644
--- a/src/metadata-server/src/test/java/org/apache/kylin/rest/controller/IndexPlanControllerTest.java
+++ b/src/metadata-server/src/test/java/org/apache/kylin/rest/controller/IndexPlanControllerTest.java
@@ -18,9 +18,7 @@
 package org.apache.kylin.rest.controller;
 
 import static org.apache.kylin.common.constant.HttpConstant.HTTP_VND_APACHE_KYLIN_JSON;
-import static org.apache.kylin.common.exception.code.ErrorCodeServer.SHARD_BY_COLUMN_NOT_IN_INDEX;
 
-import org.apache.kylin.common.exception.KylinException;
 import org.apache.kylin.common.util.JsonUtil;
 import org.apache.kylin.common.util.NLocalFileMetadataTestCase;
 import org.apache.kylin.common.util.Pair;
@@ -36,7 +34,6 @@ import org.apache.kylin.rest.service.FusionIndexService;
 import org.apache.kylin.rest.service.IndexPlanService;
 import org.apache.kylin.rest.service.ModelService;
 import org.junit.After;
-import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Test;
 import org.mockito.InjectMocks;
@@ -162,13 +159,16 @@ public class IndexPlanControllerTest extends NLocalFileMetadataTestCase {
     }
 
     @Test
-    public void testUpdateTableIndex() {
+    public void testUpdateTableIndex() throws Exception {
         CreateTableIndexRequest tableIndexRequest = CreateTableIndexRequest.builder().project("default")
-                .modelId("89af4ee2-2cdb-4b07-b39e-4c29856309aa").id(20000010000L)
-                .colOrder(Lists.newArrayList("1", "0", "2")).shardByColumns(Lists.newArrayList("4"))
-                .sortByColumns(Lists.newArrayList("0", "2")).build();
-        Assert.assertThrows(SHARD_BY_COLUMN_NOT_IN_INDEX.getMsg(), KylinException.class, () -> {
-            indexPlanController.updateTableIndex(tableIndexRequest);
-        });
+                .modelId("89af4ee2-2cdb-4b07-b39e-4c29856309aa").id(20000010001L).colOrder(Lists
+                        .newArrayList("TEST_KYLIN_FACT.TRANS_ID", "TEST_SITES.SITE_NAME", "TEST_KYLIN_FACT.CAL_DT"))
+                .sortByColumns(Lists.newArrayList()).build();
+        Mockito.when(indexPlanService.updateTableIndex(Mockito.anyString(), Mockito.any(CreateTableIndexRequest.class)))
+                .thenReturn(new BuildIndexResponse());
+        mockMvc.perform(MockMvcRequestBuilders.put("/api/index_plans/table_index")
+                .contentType(MediaType.APPLICATION_JSON).content(JsonUtil.writeValueAsString(tableIndexRequest))
+                .accept(MediaType.parseMediaType(HTTP_VND_APACHE_KYLIN_JSON)))
+                .andExpect(MockMvcResultMatchers.status().isOk());
     }
 }
diff --git a/src/modeling-service/src/main/java/org/apache/kylin/rest/service/IndexPlanService.java b/src/modeling-service/src/main/java/org/apache/kylin/rest/service/IndexPlanService.java
index e45016aeeb..62d597daf5 100644
--- a/src/modeling-service/src/main/java/org/apache/kylin/rest/service/IndexPlanService.java
+++ b/src/modeling-service/src/main/java/org/apache/kylin/rest/service/IndexPlanService.java
@@ -21,6 +21,7 @@ import static org.apache.kylin.common.exception.ServerErrorCode.PERMISSION_DENIE
 import static org.apache.kylin.common.exception.code.ErrorCodeServer.INDEX_DUPLICATE;
 import static org.apache.kylin.common.exception.code.ErrorCodeServer.LAYOUT_LIST_EMPTY;
 import static org.apache.kylin.common.exception.code.ErrorCodeServer.LAYOUT_NOT_EXISTS;
+import static org.apache.kylin.common.exception.code.ErrorCodeServer.SHARD_BY_COLUMN_NOT_IN_INDEX;
 
 import java.util.ArrayList;
 import java.util.Arrays;
@@ -235,6 +236,9 @@ public class IndexPlanService extends BasicService implements TableIndexPlanSupp
         newLayout.setOwner(BasicService.getUsername());
         newLayout.setManual(true);
         newLayout.setIndexRange(request.getIndexRange());
+        if (!newLayout.getColOrder().containsAll(newLayout.getShardByColumns())) {
+            throw new KylinException(SHARD_BY_COLUMN_NOT_IN_INDEX);
+        }
 
         Map<Integer, String> layoutOverride = Maps.newHashMap();
         if (request.getLayoutOverrideIndexes() != null) {
diff --git a/src/modeling-service/src/test/java/org/apache/kylin/rest/service/IndexPlanServiceTest.java b/src/modeling-service/src/test/java/org/apache/kylin/rest/service/IndexPlanServiceTest.java
index 2a8db33ac3..c5f257e3fb 100644
--- a/src/modeling-service/src/test/java/org/apache/kylin/rest/service/IndexPlanServiceTest.java
+++ b/src/modeling-service/src/test/java/org/apache/kylin/rest/service/IndexPlanServiceTest.java
@@ -19,6 +19,7 @@ package org.apache.kylin.rest.service;
 
 import static org.apache.kylin.common.exception.code.ErrorCodeServer.INDEX_DUPLICATE;
 import static org.apache.kylin.common.exception.code.ErrorCodeServer.LAYOUT_NOT_EXISTS;
+import static org.apache.kylin.common.exception.code.ErrorCodeServer.SHARD_BY_COLUMN_NOT_IN_INDEX;
 import static org.apache.kylin.metadata.cube.model.IndexEntity.Source.CUSTOM_TABLE_INDEX;
 import static org.apache.kylin.metadata.cube.model.IndexEntity.Source.RECOMMENDED_TABLE_INDEX;
 import static org.apache.kylin.metadata.model.SegmentStatusEnum.READY;
@@ -1447,4 +1448,93 @@ public class IndexPlanServiceTest extends SourceTestCase {
         val response2 = indexPlanService.getShardByColumns("default", modelId);
         Assert.assertFalse(response2.isShowLoadData());
     }
+
+    @Test
+    public void testCheckShardByColumns() {
+        CreateTableIndexRequest tableIndexRequest = CreateTableIndexRequest.builder().project("default")
+                .modelId("89af4ee2-2cdb-4b07-b39e-4c29856309aa").id(20000010000L)
+                .colOrder(Lists.newArrayList("TEST_KYLIN_FACT.TRANS_ID", "TEST_SITES.SITE_NAME",
+                        "TEST_KYLIN_FACT.CAL_DT"))
+                .shardByColumns(Lists.newArrayList("TEST_KYLIN_FACT.LSTG_SITE_ID")).sortByColumns(Lists.newArrayList())
+                .build();
+
+        Assert.assertThrows(SHARD_BY_COLUMN_NOT_IN_INDEX.getMsg(), KylinException.class, () -> {
+            indexPlanService.updateTableIndex("default", tableIndexRequest);
+        });
+    }
+
+    @Test
+    public void testUpdateTableIndexWithNullColOrder() {
+        String project = "default";
+        String modelId = "89af4ee2-2cdb-4b07-b39e-4c29856309aa";
+        long layoutId = 20000010001L;
+        CreateTableIndexRequest tableIndexRequest = CreateTableIndexRequest.builder().project(project).modelId(modelId)
+                .id(layoutId).shardByColumns(Lists.newArrayList()).sortByColumns(Lists.newArrayList()).build();
+        BuildIndexResponse response = indexPlanService.updateTableIndex(project, tableIndexRequest);
+        Assert.assertEquals(BuildIndexResponse.BuildIndexType.NORM_BUILD, response.getType());
+        LayoutEntity layoutEntity = NIndexPlanManager.getInstance(KylinConfig.getInstanceFromEnv(), project)
+                .getIndexPlan(modelId).getLayoutEntity(layoutId);
+        Assert.assertEquals(Lists.newArrayList(1, 0, 2), layoutEntity.getColOrder());
+    }
+
+    @Test
+    public void testUpdateTableIndexWithNullColOrderThrowsException() {
+        String project = "default";
+        String modelId = "89af4ee2-2cdb-4b07-b39e-4c29856309aa";
+        long layoutId = 20000010001L;
+        CreateTableIndexRequest tableIndexRequest = CreateTableIndexRequest.builder().project(project).modelId(modelId)
+                .id(layoutId).shardByColumns(Lists.newArrayList("TEST_KYLIN_FACT.TRANS_ID"))
+                .sortByColumns(Lists.newArrayList()).build();
+        Assert.assertThrows(SHARD_BY_COLUMN_NOT_IN_INDEX.getMsg(), KylinException.class, () -> {
+            indexPlanService.updateTableIndex("default", tableIndexRequest);
+        });
+    }
+
+    @Test
+    public void testUpdateTableIndexWithNullShardByCols() {
+        String project = "default";
+        String modelId = "89af4ee2-2cdb-4b07-b39e-4c29856309aa";
+        long layoutId = 20000010001L;
+        CreateTableIndexRequest tableIndexRequest = CreateTableIndexRequest
+                .builder().project(project).modelId(modelId).id(layoutId).colOrder(Lists
+                        .newArrayList("TEST_KYLIN_FACT.TRANS_ID", "TEST_SITES.SITE_NAME", "TEST_KYLIN_FACT.CAL_DT"))
+                .sortByColumns(Lists.newArrayList()).build();
+        BuildIndexResponse response = indexPlanService.updateTableIndex(project, tableIndexRequest);
+        Assert.assertEquals(BuildIndexResponse.BuildIndexType.NORM_BUILD, response.getType());
+        LayoutEntity layoutEntity = NIndexPlanManager.getInstance(KylinConfig.getInstanceFromEnv(), project)
+                .getIndexPlan(modelId).getLayoutEntity(layoutId);
+        Assert.assertEquals(Lists.newArrayList(1, 0, 2), layoutEntity.getColOrder());
+    }
+
+    @Test
+    public void testUpdateTableIndexWithCreateEmptyIndex() {
+        String project = "default";
+        String modelId = "89af4ee2-2cdb-4b07-b39e-4c29856309aa";
+        long layoutId = 20000180001L;
+        CreateTableIndexRequest tableIndexRequest = CreateTableIndexRequest.builder().project(project).modelId(modelId)
+                .id(layoutId).colOrder(Lists.newArrayList()).sortByColumns(Lists.newArrayList()).build();
+        BuildIndexResponse response = indexPlanService.updateTableIndex(project, tableIndexRequest);
+        Assert.assertEquals(BuildIndexResponse.BuildIndexType.NORM_BUILD, response.getType());
+        IndexPlan indexPlan = NIndexPlanManager.getInstance(KylinConfig.getInstanceFromEnv(), project)
+                .getIndexPlan(modelId);
+        Assert.assertTrue(
+                indexPlan.getAllLayouts().stream().anyMatch(layoutEntity -> layoutEntity.getColOrder().size() == 0));
+    }
+
+    @Test
+    public void testUpdateTableIndexWithUpdateEmptyIndex() {
+        String project = "default";
+        String modelId = "89af4ee2-2cdb-4b07-b39e-4c29856309aa";
+        long layoutId = 20000010001L;
+        CreateTableIndexRequest tableIndexRequest = CreateTableIndexRequest.builder().project("default")
+                .modelId(modelId).id(layoutId).colOrder(Lists.newArrayList()).sortByColumns(Lists.newArrayList())
+                .build();
+        BuildIndexResponse response = indexPlanService.updateTableIndex("default", tableIndexRequest);
+        Assert.assertEquals(BuildIndexResponse.BuildIndexType.NORM_BUILD, response.getType());
+        IndexPlan indexPlan = NIndexPlanManager.getInstance(KylinConfig.getInstanceFromEnv(), project)
+                .getIndexPlan(modelId);
+        Assert.assertTrue(indexPlan.getLayoutEntity(layoutId).isToBeDeleted());
+        Assert.assertTrue(
+                indexPlan.getAllLayouts().stream().anyMatch(layoutEntity -> layoutEntity.getColOrder().size() == 0));
+    }
 }


[kylin] 17/34: KYLIN-5451 Avoid multiple local ip acquisitions

Posted by xx...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

xxyu pushed a commit to branch kylin5
in repository https://gitbox.apache.org/repos/asf/kylin.git

commit b0c258904464eff63deea05c738a655a657f8ed6
Author: Yaguang Jia <ji...@foxmail.com>
AuthorDate: Thu Dec 29 14:32:07 2022 +0800

    KYLIN-5451 Avoid multiple local ip acquisitions
    
    * KYLIN-5451 Avoid multiple local ip acquisitions
---
 .../org/apache/kylin/common/util/AddressUtil.java    | 20 ++++++++++++--------
 1 file changed, 12 insertions(+), 8 deletions(-)

diff --git a/src/core-common/src/main/java/org/apache/kylin/common/util/AddressUtil.java b/src/core-common/src/main/java/org/apache/kylin/common/util/AddressUtil.java
index 9f2c5a044d..71cd033143 100644
--- a/src/core-common/src/main/java/org/apache/kylin/common/util/AddressUtil.java
+++ b/src/core-common/src/main/java/org/apache/kylin/common/util/AddressUtil.java
@@ -32,11 +32,11 @@ import lombok.extern.slf4j.Slf4j;
 @Slf4j
 public class AddressUtil {
 
+    public static String MAINTAIN_MODE_MOCK_PORT = "0000";
+    private static String localIpAddressCache;
     @Setter
     private static HostInfoFetcher hostInfoFetcher = new DefaultHostInfoFetcher();
 
-    public static String MAINTAIN_MODE_MOCK_PORT = "0000";
-
     public static String getLocalInstance() {
         String serverIp = getLocalHostExactAddress();
         return serverIp + ":" + KylinConfig.getInstanceFromEnv().getServerPort();
@@ -79,13 +79,17 @@ public class AddressUtil {
     }
 
     public static String getLocalHostExactAddress() {
-        val localIpAddress = KylinConfig.getInstanceFromEnv().getServerIpAddress();
-        if (StringUtils.isNotBlank(localIpAddress)) {
-            return localIpAddress;
-        }
-        try (InetUtils inetUtils = new InetUtils(new InetUtilsProperties())) {
-            return inetUtils.findFirstNonLoopbackHostInfo().getIpAddress();
+        if (StringUtils.isEmpty(localIpAddressCache)) {
+            val localIpAddress = KylinConfig.getInstanceFromEnv().getServerIpAddress();
+            if (StringUtils.isNotBlank(localIpAddress)) {
+                localIpAddressCache = localIpAddress;
+            } else {
+                try (InetUtils inetUtils = new InetUtils(new InetUtilsProperties())) {
+                    localIpAddressCache = inetUtils.findFirstNonLoopbackHostInfo().getIpAddress();
+                }
+            }
         }
+        return localIpAddressCache;
     }
 
     public static boolean isSameHost(String driverHost) {


[kylin] 26/34: KYLIN-5449 fix diagnose tool issue

Posted by xx...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

xxyu pushed a commit to branch kylin5
in repository https://gitbox.apache.org/repos/asf/kylin.git

commit 823300b39c78f87adf80d41efc5709b07d5fd123
Author: qianhao.zhou <z....@gmail.com>
AuthorDate: Tue Jan 3 16:42:03 2023 +0800

    KYLIN-5449 fix diagnose tool issue
    
    Co-authored-by: qhzhou <qi...@kyligence.io>
---
 .../main/java/org/apache/kylin/rest/controller/NBasicController.java    | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/src/common-server/src/main/java/org/apache/kylin/rest/controller/NBasicController.java b/src/common-server/src/main/java/org/apache/kylin/rest/controller/NBasicController.java
index a403338984..9f7c1df5cd 100644
--- a/src/common-server/src/main/java/org/apache/kylin/rest/controller/NBasicController.java
+++ b/src/common-server/src/main/java/org/apache/kylin/rest/controller/NBasicController.java
@@ -542,7 +542,7 @@ public class NBasicController {
     public void downloadFromRemoteHost(final HttpServletRequest request, String url,
             HttpServletResponse servletResponse) throws IOException {
         File temporaryZipFile = KylinConfigBase.getDiagFileName();
-        Preconditions.checkState(temporaryZipFile.getParentFile().mkdirs(), "create temporary zip file folder failed");
+        temporaryZipFile.getParentFile().mkdirs();
         Preconditions.checkState(temporaryZipFile.createNewFile(), "create temporary zip file failed");
         RequestCallback requestCallback = x -> {
             Collections.list(request.getHeaderNames())


[kylin] 09/34: KYLIN-5448 [FOLLOWUP] Update sprint-boot version & independent tomcat config

Posted by xx...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

xxyu pushed a commit to branch kylin5
in repository https://gitbox.apache.org/repos/asf/kylin.git

commit 7e9495c17ce794b6ce7cbd41c30cd5d2761cc356
Author: Yinghao Lin <39...@users.noreply.github.com>
AuthorDate: Fri Dec 23 13:39:46 2022 +0800

    KYLIN-5448 [FOLLOWUP] Update sprint-boot version & independent tomcat config
---
 pom.xml | 52 ----------------------------------------------------
 1 file changed, 52 deletions(-)

diff --git a/pom.xml b/pom.xml
index 1ee54f23da..1d7ee3f267 100644
--- a/pom.xml
+++ b/pom.xml
@@ -190,7 +190,6 @@
         <cglib.version>3.2.4</cglib.version>
         <supercsv.version>2.4.0</supercsv.version>
         <cors.version>2.5</cors.version>
-        <tomcat.version>9.0.68</tomcat.version>
         <t-digest.version>3.1.1-kylin-r1</t-digest.version>
         <jsonpath.version>2.3.0</jsonpath.version>
         <lombok.version>1.18.2</lombok.version>
@@ -2264,27 +2263,6 @@
                 <scope>import</scope>
             </dependency>
 
-            <dependency>
-                <groupId>org.apache.tomcat</groupId>
-                <artifactId>tomcat-annotations-api</artifactId>
-                <version>${tomcat.version}</version>
-            </dependency>
-            <dependency>
-                <groupId>org.apache.tomcat.embed</groupId>
-                <artifactId>tomcat-embed-el</artifactId>
-                <version>${tomcat.version}</version>
-            </dependency>
-            <dependency>
-                <groupId>org.apache.tomcat.embed</groupId>
-                <artifactId>tomcat-embed-websocket</artifactId>
-                <version>${tomcat.version}</version>
-                <exclusions>
-                    <exclusion>
-                        <groupId>org.apache.tomcat.embed</groupId>
-                        <artifactId>tomcat-embed-core</artifactId>
-                    </exclusion>
-                </exclusions>
-            </dependency>
             <dependency>
                 <groupId>javax.servlet</groupId>
                 <artifactId>servlet-api</artifactId>
@@ -2292,40 +2270,10 @@
                 <version>2.5</version>
             </dependency>
 
-            <!-- Tomcat -->
-            <dependency>
-                <groupId>org.apache.tomcat.embed</groupId>
-                <artifactId>tomcat-embed-core</artifactId>
-                <version>${tomcat.version}</version>
-                <exclusions>
-                    <exclusion>
-                        <groupId>org.apache.tomcat</groupId>
-                        <artifactId>tomcat-annotations-api</artifactId>
-                    </exclusion>
-                </exclusions>
-            </dependency>
             <dependency>
                 <groupId>org.springframework.boot</groupId>
                 <artifactId>spring-boot-dependencies</artifactId>
                 <version>${spring.boot.version}</version>
-                <exclusions>
-                    <exclusion>
-                        <groupId>org.apache.tomcat.embed</groupId>
-                        <artifactId>tomcat-embed-core</artifactId>
-                    </exclusion>
-                    <exclusion>
-                        <groupId>org.apache.tomcat.embed</groupId>
-                        <artifactId>tomcat-embed-el</artifactId>
-                    </exclusion>
-                    <exclusion>
-                        <groupId>org.apache.tomcat.embed</groupId>
-                        <artifactId>tomcat-embed-websocket</artifactId>
-                    </exclusion>
-                    <exclusion>
-                        <groupId>org.apache.tomcat</groupId>
-                        <artifactId>tomcat-annotations-api</artifactId>
-                    </exclusion>
-                </exclusions>
                 <type>pom</type>
                 <scope>import</scope>
             </dependency>


[kylin] 08/34: KYLIN-5447 delete ddl pom

Posted by xx...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

xxyu pushed a commit to branch kylin5
in repository https://gitbox.apache.org/repos/asf/kylin.git

commit 22e0a9d8d2be498c8fe4598581fb1dbb574b1870
Author: ChenLiang.Lu <31...@users.noreply.github.com>
AuthorDate: Tue Dec 20 14:24:51 2022 +0800

    KYLIN-5447 delete ddl pom
---
 pom.xml                                    | 1 -
 src/spark-project/spark-ddl-plugin/pom.xml | 0
 2 files changed, 1 deletion(-)

diff --git a/pom.xml b/pom.xml
index 975ff8d1ed..1ee54f23da 100644
--- a/pom.xml
+++ b/pom.xml
@@ -380,7 +380,6 @@
         <module>src/spark-project/engine-spark</module>
         <module>src/spark-project/source-jdbc</module>
         <module>src/spark-project/engine-build-sdk</module>
-        <module>src/spark-project/spark-ddl-plugin</module>
         <module>src/spark-project/spark-it</module>
         <module>src/streaming</module>
         <module>src/assembly</module>
diff --git a/src/spark-project/spark-ddl-plugin/pom.xml b/src/spark-project/spark-ddl-plugin/pom.xml
deleted file mode 100644
index e69de29bb2..0000000000


[kylin] 22/34: KYLIN-5457 Optimize NKylinUserManager and NUserGroupManager

Posted by xx...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

xxyu pushed a commit to branch kylin5
in repository https://gitbox.apache.org/repos/asf/kylin.git

commit 85efb638f220fd72692d57076886454d5812903a
Author: Jiale He <35...@users.noreply.github.com>
AuthorDate: Fri Dec 30 15:28:47 2022 +0800

    KYLIN-5457 Optimize NKylinUserManager and NUserGroupManager
    
    * KYLIN-5457 Optimize NKylinUserManager and NUserGroupManager
    
    * KYLIN-5457 fix ut
    
    * KYLIN-5457 fix ut
---
 .../service/CaseInsensitiveKylinUserService.java   | 62 ++---------------
 .../kylin/rest/service/KylinUserService.java       | 80 ++++++++++++++--------
 .../kylin/rest/service/NUserGroupService.java      | 63 ++++++++---------
 .../CaseInsensitiveKylinUserServiceTest.java       | 12 ++--
 .../kylin/rest/service/KylinUserServiceTest.java   | 58 +++++++++++++---
 .../kylin/rest/service/NUserGroupServiceTest.java  | 19 ++---
 .../kylin/rest/service/UserAclServiceTest.java     |  5 +-
 .../kylin/metadata/user/NKylinUserManager.java     | 32 +++++++--
 .../metadata/usergroup/NUserGroupManager.java      | 26 +++++--
 .../kylin/metadata/user/NKylinUserManagerTest.java | 71 +++++++++++++++++++
 .../metadata/usergroup/NUserGroupManagerTest.java  | 44 ++++--------
 11 files changed, 289 insertions(+), 183 deletions(-)

diff --git a/src/common-service/src/main/java/org/apache/kylin/rest/service/CaseInsensitiveKylinUserService.java b/src/common-service/src/main/java/org/apache/kylin/rest/service/CaseInsensitiveKylinUserService.java
index a0cf392dca..ad8b54acea 100644
--- a/src/common-service/src/main/java/org/apache/kylin/rest/service/CaseInsensitiveKylinUserService.java
+++ b/src/common-service/src/main/java/org/apache/kylin/rest/service/CaseInsensitiveKylinUserService.java
@@ -18,32 +18,19 @@
 
 package org.apache.kylin.rest.service;
 
-import static org.apache.kylin.rest.constant.Constant.ROLE_ADMIN;
-
-import java.io.IOException;
-import java.util.HashSet;
 import java.util.List;
-import java.util.Set;
 import java.util.stream.Collectors;
 
 import org.apache.kylin.rest.constant.Constant;
+
 import org.apache.kylin.metadata.user.ManagedUser;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.springframework.security.core.userdetails.UserDetails;
+import lombok.extern.slf4j.Slf4j;
 
+@Slf4j
 public class CaseInsensitiveKylinUserService extends KylinUserService {
 
-    private final Logger logger = LoggerFactory.getLogger(CaseInsensitiveKylinUserService.class);
-
-    @Override
-    public boolean userExists(String userName) {
-        logger.trace("judge user exist: {}", userName);
-        return getKylinUserManager().exists(userName);
-    }
-
     @Override
-    public List<String> listAdminUsers() throws IOException {
+    public List<String> listAdminUsers() {
         return listUsers().parallelStream()
                 .filter(managedUser -> managedUser.getAuthorities().parallelStream().anyMatch(
                         simpleGrantedAuthority -> Constant.ROLE_ADMIN.equals(simpleGrantedAuthority.getAuthority())))
@@ -51,49 +38,10 @@ public class CaseInsensitiveKylinUserService extends KylinUserService {
     }
 
     @Override
-    public List<String> listNormalUsers() throws IOException {
+    public List<String> listNormalUsers() {
         return listUsers().parallelStream()
                 .filter(managedUser -> managedUser.getAuthorities().parallelStream().noneMatch(
                         simpleGrantedAuthority -> Constant.ROLE_ADMIN.equals(simpleGrantedAuthority.getAuthority())))
                 .map(ManagedUser::getUsername).collect(Collectors.toList());
     }
-
-    @Override
-    public boolean isGlobalAdmin(String username) throws IOException {
-        try {
-            UserDetails userDetails = loadUserByUsername(username);
-            return isGlobalAdmin(userDetails);
-        } catch (Exception e) {
-            logger.warn("Cat not load user by username {}", username, e);
-        }
-
-        return false;
-    }
-
-    @Override
-    public boolean isGlobalAdmin(UserDetails userDetails) throws IOException {
-        return userDetails != null && userDetails.getAuthorities().stream()
-                .anyMatch(grantedAuthority -> grantedAuthority.getAuthority().equals(ROLE_ADMIN));
-    }
-
-    @Override
-    public Set<String> retainsNormalUser(Set<String> usernames) throws IOException {
-        Set<String> results = new HashSet<>();
-        for (String username : usernames) {
-            if (!isGlobalAdmin(username)) {
-                results.add(username);
-            }
-        }
-        return results;
-    }
-
-    @Override
-    public boolean containsGlobalAdmin(Set<String> usernames) throws IOException {
-        for (String username : usernames) {
-            if (isGlobalAdmin(username)) {
-                return true;
-            }
-        }
-        return false;
-    }
 }
diff --git a/src/common-service/src/main/java/org/apache/kylin/rest/service/KylinUserService.java b/src/common-service/src/main/java/org/apache/kylin/rest/service/KylinUserService.java
index 8e0b99e18c..3f9511ca09 100644
--- a/src/common-service/src/main/java/org/apache/kylin/rest/service/KylinUserService.java
+++ b/src/common-service/src/main/java/org/apache/kylin/rest/service/KylinUserService.java
@@ -21,15 +21,15 @@ package org.apache.kylin.rest.service;
 import static org.apache.kylin.common.exception.ServerErrorCode.DUPLICATE_USER_NAME;
 import static org.apache.kylin.common.exception.ServerErrorCode.PERMISSION_DENIED;
 import static org.apache.kylin.common.exception.code.ErrorCodeServer.USER_LOGIN_FAILED;
+import static org.apache.kylin.rest.constant.Constant.ROLE_ADMIN;
 
-import java.io.IOException;
-import java.util.ArrayList;
 import java.util.Collections;
 import java.util.List;
 import java.util.Locale;
+import java.util.Objects;
+import java.util.Set;
 import java.util.stream.Collectors;
 
-import org.apache.commons.lang.StringUtils;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.exception.KylinException;
 import org.apache.kylin.common.msg.Message;
@@ -40,8 +40,6 @@ import org.apache.kylin.rest.aspect.Transaction;
 import org.apache.kylin.rest.constant.Constant;
 import org.apache.kylin.rest.exception.InternalErrorException;
 import org.apache.kylin.rest.security.AclPermission;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.beans.factory.annotation.Qualifier;
 import org.springframework.security.core.authority.SimpleGrantedAuthority;
@@ -55,11 +53,11 @@ import org.apache.kylin.metadata.user.ManagedUser;
 import org.apache.kylin.metadata.user.NKylinUserManager;
 import lombok.SneakyThrows;
 import lombok.val;
+import lombok.extern.slf4j.Slf4j;
 
+@Slf4j
 public class KylinUserService implements UserService {
 
-    private Logger logger = LoggerFactory.getLogger(KylinUserService.class);
-
     public static final String DIR_PREFIX = "/user/";
 
     public static final Serializer<ManagedUser> SERIALIZER = new JsonSerializer<>(ManagedUser.class);
@@ -91,7 +89,7 @@ public class KylinUserService implements UserService {
         }
         getKylinUserManager().update(managedUser);
         userAclService.updateUserAclPermission(user, AclPermission.DATA_QUERY);
-        logger.trace("update user : {}", user.getUsername());
+        log.trace("update user : {}", user.getUsername());
     }
 
     @SneakyThrows
@@ -106,7 +104,7 @@ public class KylinUserService implements UserService {
 
         userAclService.deleteUserAcl(userName);
         getKylinUserManager().delete(userName);
-        logger.trace("delete user : {}", userName);
+        log.trace("delete user : {}", userName);
     }
 
     @Override
@@ -116,14 +114,8 @@ public class KylinUserService implements UserService {
 
     @Override
     public boolean userExists(String userName) {
-        logger.trace("judge user exist: {}", userName);
-        val users = listUsers();
-        for (val user : users) {
-            if (StringUtils.equalsIgnoreCase(userName, user.getUsername())) {
-                return true;
-            }
-        }
-        return false;
+        log.trace("judge user exist: {}", userName);
+        return getKylinUserManager().exists(userName);
     }
 
     /**
@@ -133,17 +125,17 @@ public class KylinUserService implements UserService {
     @Override
     public UserDetails loadUserByUsername(String userName) throws UsernameNotFoundException {
         Message msg = MsgPicker.getMsg();
-        ManagedUser managedUser = null;
+        ManagedUser managedUser;
         try {
             managedUser = getKylinUserManager().get(userName);
         } catch (IllegalArgumentException e) {
-            logger.error("exception: ", e);
+            log.error("exception: ", e);
             throw new UsernameNotFoundException(USER_LOGIN_FAILED.getMsg());
         }
         if (managedUser == null) {
             throw new UsernameNotFoundException(String.format(Locale.ROOT, msg.getUserNotFound(), userName));
         }
-        logger.trace("load user : {}", userName);
+        log.trace("load user : {}", userName);
         return managedUser;
     }
 
@@ -158,14 +150,47 @@ public class KylinUserService implements UserService {
     }
 
     @Override
-    public List<String> listAdminUsers() throws IOException {
-        List<String> adminUsers = new ArrayList<>();
-        for (ManagedUser managedUser : listUsers()) {
-            if (managedUser.getAuthorities().contains(new SimpleGrantedAuthority(Constant.ROLE_ADMIN))) {
-                adminUsers.add(managedUser.getUsername());
-            }
+    public List<String> listAdminUsers() {
+        SimpleGrantedAuthority adminAuthority = new SimpleGrantedAuthority(Constant.ROLE_ADMIN);
+        return listUsers().stream().filter(user -> user.getAuthorities().contains(adminAuthority))
+                .map(ManagedUser::getUsername).collect(Collectors.toList());
+    }
+
+    @Override
+    public boolean isGlobalAdmin(String username) {
+        try {
+            UserDetails userDetails = loadUserByUsername(username);
+            return isGlobalAdmin(userDetails);
+        } catch (Exception e) {
+            logger.debug("Cat not load user by username {}", username, e);
+        }
+        return false;
+    }
+
+    @Override
+    public boolean isGlobalAdmin(UserDetails userDetails) {
+        if (Objects.isNull(userDetails)) {
+            return false;
         }
-        return adminUsers;
+        return userDetails.getAuthorities().stream()
+                .anyMatch(grantedAuthority -> grantedAuthority.getAuthority().equals(ROLE_ADMIN));
+    }
+
+    @Override
+    public boolean containsGlobalAdmin(Set<String> usernames) {
+        return usernames.stream().anyMatch(this::isGlobalAdmin);
+    }
+
+    @Override
+    public Set<String> retainsNormalUser(Set<String> usernames) {
+        return usernames.stream().filter(username -> !isGlobalAdmin(username)).collect(Collectors.toSet());
+    }
+
+    @Override
+    public List<String> listNormalUsers() {
+        SimpleGrantedAuthority adminAuthority = new SimpleGrantedAuthority(Constant.ROLE_ADMIN);
+        return listUsers().stream().filter(user -> !user.getAuthorities().contains(adminAuthority))
+                .map(ManagedUser::getUsername).collect(Collectors.toList());
     }
 
     @Override
@@ -184,5 +209,4 @@ public class KylinUserService implements UserService {
     protected NKylinUserManager getKylinUserManager() {
         return NKylinUserManager.getInstance(KylinConfig.getInstanceFromEnv());
     }
-
 }
diff --git a/src/common-service/src/main/java/org/apache/kylin/rest/service/NUserGroupService.java b/src/common-service/src/main/java/org/apache/kylin/rest/service/NUserGroupService.java
index 7ecd9297c8..c8a50fdbb4 100644
--- a/src/common-service/src/main/java/org/apache/kylin/rest/service/NUserGroupService.java
+++ b/src/common-service/src/main/java/org/apache/kylin/rest/service/NUserGroupService.java
@@ -29,22 +29,25 @@ import static org.apache.kylin.rest.constant.Constant.ROLE_ADMIN;
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Collection;
+import java.util.Collections;
 import java.util.HashSet;
 import java.util.List;
 import java.util.Locale;
 import java.util.Map;
+import java.util.Objects;
 import java.util.Set;
 import java.util.function.Function;
 import java.util.stream.Collectors;
 
 import org.apache.commons.collections.CollectionUtils;
-import org.apache.commons.lang.StringUtils;
+import org.apache.commons.lang3.StringUtils;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.exception.KylinException;
 import org.apache.kylin.common.msg.MsgPicker;
 import org.apache.kylin.common.persistence.ResourceStore;
 import org.apache.kylin.metadata.MetadataConstants;
 import org.apache.kylin.metadata.user.ManagedUser;
+import org.apache.kylin.metadata.user.NKylinUserManager;
 import org.apache.kylin.metadata.usergroup.NUserGroupManager;
 import org.apache.kylin.metadata.usergroup.UserGroup;
 import org.apache.kylin.rest.aspect.Transaction;
@@ -54,6 +57,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.beans.factory.annotation.Qualifier;
+import org.springframework.security.core.GrantedAuthority;
 import org.springframework.security.core.authority.SimpleGrantedAuthority;
 import org.springframework.stereotype.Component;
 
@@ -173,8 +177,9 @@ public class NUserGroupService implements IUserGroupService {
     public List<String> getAuthoritiesFilterByGroupName(String userGroupName) {
         aclEvaluate.checkIsGlobalAdmin();
         return StringUtils.isEmpty(userGroupName) ? getAllUserGroups()
-                : getAllUserGroups().stream().filter(userGroup -> userGroup.toUpperCase(Locale.ROOT)
-                        .contains(userGroupName.toUpperCase(Locale.ROOT))).collect(Collectors.toList());
+                : getAllUserGroups().stream()
+                        .filter(userGroup -> StringUtils.containsIgnoreCase(userGroup, userGroupName))
+                        .collect(Collectors.toList());
     }
 
     @Override
@@ -185,10 +190,14 @@ public class NUserGroupService implements IUserGroupService {
     @Override
     public List<UserGroup> getUserGroupsFilterByGroupName(String userGroupName) {
         aclEvaluate.checkIsGlobalAdmin();
-        return StringUtils.isEmpty(userGroupName) ? listUserGroups()
-                : getUserGroupManager().getAllGroups().stream().filter(userGroup -> userGroup.getGroupName()
-                        .toUpperCase(Locale.ROOT).contains(userGroupName.toUpperCase(Locale.ROOT)))
-                        .collect(Collectors.toList());
+        if (StringUtils.isEmpty(userGroupName)) {
+            return listUserGroups();
+        }
+        return getUserGroupManager().getAllUsers(path -> {
+            val pathPair = StringUtils.split(path, "/");
+            String groupName = pathPair[pathPair.length - 1];
+            return StringUtils.containsIgnoreCase(groupName, userGroupName);
+        });
     }
 
     @Override
@@ -205,18 +214,21 @@ public class NUserGroupService implements IUserGroupService {
 
     @Override
     public String getUuidByGroupName(String groupName) {
-        val groups = getUserGroupManager().getAllGroups();
-        for (val group : groups) {
-            if (StringUtils.equalsIgnoreCase(groupName, group.getGroupName())) {
-                return group.getUuid();
-            }
+        if (StringUtils.isEmpty(groupName)) {
+            throw new KylinException(USERGROUP_NOT_EXIST,
+                    String.format(Locale.ROOT, MsgPicker.getMsg().getUserGroupNotExist(), groupName));
         }
-        throw new KylinException(USERGROUP_NOT_EXIST,
-                String.format(Locale.ROOT, MsgPicker.getMsg().getUserGroupNotExist(), groupName));
+        List<UserGroup> userGroups = getUserGroupManager()
+                .getAllUsers(path -> StringUtils.endsWithIgnoreCase(path, groupName));
+        if (userGroups.isEmpty()) {
+            throw new KylinException(USERGROUP_NOT_EXIST,
+                    String.format(Locale.ROOT, MsgPicker.getMsg().getUserGroupNotExist(), groupName));
+        }
+        return userGroups.get(0).getUuid();
     }
 
     public boolean exists(String name) {
-        return getAllUserGroups().contains(name);
+        return getUserGroupManager().exists(name);
     }
 
     public ResourceStore getStore() {
@@ -244,23 +256,12 @@ public class NUserGroupService implements IUserGroupService {
     }
 
     public Set<String> listUserGroups(String username) {
-        try {
-            List<String> groups = getAllUserGroups();
-            Set<String> result = new HashSet<>();
-            for (String group : groups) {
-                val users = getGroupMembersByName(group);
-                for (val user : users) {
-                    if (StringUtils.equalsIgnoreCase(username, user.getUsername())) {
-                        result.add(group);
-                        break;
-                    }
-                }
-            }
-            return result;
-        } catch (IOException e) {
-            logger.error("List user groups failed...", e);
-            throw new RuntimeException(e);
+        ManagedUser user = NKylinUserManager.getInstance(KylinConfig.getInstanceFromEnv()).get(username);
+        if (Objects.isNull(user)) {
+            return Collections.emptySet();
         }
+        return user.getAuthorities().stream().map(GrantedAuthority::getAuthority).filter(this::exists)
+                .collect(Collectors.toSet());
     }
 
     private NUserGroupManager getUserGroupManager() {
diff --git a/src/common-service/src/test/java/org/apache/kylin/rest/service/CaseInsensitiveKylinUserServiceTest.java b/src/common-service/src/test/java/org/apache/kylin/rest/service/CaseInsensitiveKylinUserServiceTest.java
index b160c7f9d3..818f8c8c3a 100644
--- a/src/common-service/src/test/java/org/apache/kylin/rest/service/CaseInsensitiveKylinUserServiceTest.java
+++ b/src/common-service/src/test/java/org/apache/kylin/rest/service/CaseInsensitiveKylinUserServiceTest.java
@@ -18,7 +18,6 @@
 
 package org.apache.kylin.rest.service;
 
-import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
@@ -49,9 +48,6 @@ public class CaseInsensitiveKylinUserServiceTest extends NLocalFileMetadataTestC
     @Mock
     UserAclService userAclService = Mockito.spy(UserAclService.class);
 
-    @InjectMocks
-    private KylinUserService kylinUserService1;
-
     @InjectMocks
     @Spy
     private CaseInsensitiveKylinUserService kylinUserService;
@@ -117,14 +113,14 @@ public class CaseInsensitiveKylinUserServiceTest extends NLocalFileMetadataTestC
     }
 
     @Test
-    public void testListAdminUsers() throws IOException {
+    public void testListAdminUsers() {
         List<String> adminUsers = kylinUserService.listAdminUsers();
         Assert.assertEquals(1, adminUsers.size());
         Assert.assertTrue(adminUsers.contains("ADMIN"));
     }
 
     @Test
-    public void testIsGlobalAdmin() throws IOException {
+    public void testIsGlobalAdmin() {
         Assert.assertTrue(kylinUserService.isGlobalAdmin("ADMIN"));
         Assert.assertTrue(kylinUserService.isGlobalAdmin("AdMIN"));
 
@@ -132,14 +128,14 @@ public class CaseInsensitiveKylinUserServiceTest extends NLocalFileMetadataTestC
     }
 
     @Test
-    public void testRetainsNormalUser() throws IOException {
+    public void testRetainsNormalUser() {
         Set<String> normalUsers = kylinUserService.retainsNormalUser(Sets.newHashSet("ADMIN", "adMIN", "NOTEXISTS"));
         Assert.assertEquals(1, normalUsers.size());
         Assert.assertTrue(normalUsers.contains("NOTEXISTS"));
     }
 
     @Test
-    public void testContainsGlobalAdmin() throws IOException {
+    public void testContainsGlobalAdmin() {
         Assert.assertTrue(kylinUserService.containsGlobalAdmin(Sets.newHashSet("ADMIN")));
         Assert.assertTrue(kylinUserService.containsGlobalAdmin(Sets.newHashSet("adMIN")));
         Assert.assertFalse(kylinUserService.containsGlobalAdmin(Sets.newHashSet("adMI N")));
diff --git a/src/common-service/src/test/java/org/apache/kylin/rest/service/KylinUserServiceTest.java b/src/common-service/src/test/java/org/apache/kylin/rest/service/KylinUserServiceTest.java
index d039577498..da5e20f211 100644
--- a/src/common-service/src/test/java/org/apache/kylin/rest/service/KylinUserServiceTest.java
+++ b/src/common-service/src/test/java/org/apache/kylin/rest/service/KylinUserServiceTest.java
@@ -18,9 +18,9 @@
 
 package org.apache.kylin.rest.service;
 
-import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
+import java.util.Set;
 
 import org.apache.kylin.common.util.NLocalFileMetadataTestCase;
 import org.apache.kylin.rest.constant.Constant;
@@ -36,6 +36,9 @@ import org.springframework.security.core.userdetails.UserDetails;
 import org.springframework.security.core.userdetails.UsernameNotFoundException;
 import org.springframework.test.util.ReflectionTestUtils;
 
+import com.google.common.collect.Lists;
+import com.google.common.collect.Sets;
+
 import org.apache.kylin.metadata.user.ManagedUser;
 import org.apache.kylin.metadata.user.NKylinUserManager;
 import lombok.extern.slf4j.Slf4j;
@@ -98,18 +101,55 @@ public class KylinUserServiceTest extends NLocalFileMetadataTestCase {
 
     @Test
     public void testUserExists() {
-        ManagedUser user = new ManagedUser();
-        user.setUsername("tTtUser");
-        List<SimpleGrantedAuthority> roles = new ArrayList<>();
-        roles.add(new SimpleGrantedAuthority("ALL_USERS"));
-        user.setGrantedAuthorities(roles);
-        Mockito.doNothing().when(userAclService).updateUserAclPermission(Mockito.any(UserDetails.class),
-                Mockito.any(Permission.class));
-        kylinUserService.createUser(user);
+        createNormalUser("tTtUser");
         Assert.assertTrue(kylinUserService.userExists("tTtUser"));
         Assert.assertTrue(kylinUserService.userExists("tttuser"));
         Assert.assertTrue(kylinUserService.userExists("TTTUSER"));
         Assert.assertFalse(kylinUserService.userExists("NOTEXIST"));
     }
 
+    @Test
+    public void testIsGlobalAdmin() {
+        Assert.assertFalse(kylinUserService.isGlobalAdmin((UserDetails) null));
+
+        UserDetails adminUser = kylinUserService.loadUserByUsername("ADMIN");
+        Assert.assertTrue(kylinUserService.isGlobalAdmin(adminUser));
+        Assert.assertFalse(kylinUserService.isGlobalAdmin("notexist"));
+    }
+
+    @Test
+    public void testContainsGlobalAdmin() {
+        Assert.assertTrue(kylinUserService.containsGlobalAdmin(Sets.newHashSet("ADMIN")));
+        createNormalUser("normalUser1");
+        Assert.assertFalse(kylinUserService.containsGlobalAdmin(Sets.newHashSet("normalUser1")));
+        Assert.assertTrue(kylinUserService.containsGlobalAdmin(Sets.newHashSet("normalUser1", "ADMIN")));
+    }
+
+    @Test
+    public void testRetainsNormalUser() {
+        createNormalUser("normalUser2");
+        createNormalUser("normalUser3");
+
+        Set<String> normalUserSet = kylinUserService
+                .retainsNormalUser(Sets.newHashSet("normalUser2", "normalUser3", "ADMIN"));
+        Assert.assertFalse(normalUserSet.isEmpty());
+        Assert.assertEquals(2, normalUserSet.size());
+    }
+
+    @Test
+    public void testListNormalUsers() {
+        createNormalUser("normalUser4");
+        List<String> normalUsers = kylinUserService.listNormalUsers();
+        Assert.assertFalse(normalUsers.isEmpty());
+        Assert.assertTrue(normalUsers.contains("normalUser4"));
+    }
+
+    private void createNormalUser(String userName) {
+        ManagedUser user = new ManagedUser();
+        user.setUsername(userName);
+        user.setGrantedAuthorities(Lists.newArrayList(new SimpleGrantedAuthority("ALL_USERS")));
+        Mockito.doNothing().when(userAclService).updateUserAclPermission(Mockito.any(UserDetails.class),
+                Mockito.any(Permission.class));
+        kylinUserService.createUser(user);
+    }
 }
diff --git a/src/common-service/src/test/java/org/apache/kylin/rest/service/NUserGroupServiceTest.java b/src/common-service/src/test/java/org/apache/kylin/rest/service/NUserGroupServiceTest.java
index d231dfa8b8..cda761931c 100644
--- a/src/common-service/src/test/java/org/apache/kylin/rest/service/NUserGroupServiceTest.java
+++ b/src/common-service/src/test/java/org/apache/kylin/rest/service/NUserGroupServiceTest.java
@@ -24,10 +24,11 @@ import static org.apache.kylin.rest.constant.Constant.ROLE_ADMIN;
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Arrays;
+import java.util.Collections;
 import java.util.List;
 import java.util.Map;
 
-import org.apache.commons.lang.StringUtils;
+import org.apache.commons.lang3.StringUtils;
 import org.apache.commons.lang3.exception.ExceptionUtils;
 import org.apache.kylin.common.exception.KylinException;
 import org.apache.kylin.common.persistence.transaction.TransactionException;
@@ -62,7 +63,6 @@ public class NUserGroupServiceTest extends ServiceTestBase {
             userGroupService.deleteGroup(group);
         }
         //test group add and get
-        //        userGroupService.addGroup(GROUP_ALL_USERS);
         userGroupService.addGroup("g1");
         userGroupService.addGroup("g2");
         userGroupService.addGroup("g3");
@@ -72,6 +72,7 @@ public class NUserGroupServiceTest extends ServiceTestBase {
         Assert.assertEquals(Lists.newArrayList("g1"), userGroupService.getAuthoritiesFilterByGroupName("g1"));
         val groups = userGroupService.getUserGroupsFilterByGroupName("G");
         Assert.assertEquals(3, groups.size());
+        Assert.assertThrows(KylinException.class, () -> userGroupService.getUuidByGroupName("noexist_group"));
         for (val group : groups) {
             Assert.assertNotNull(group.getUuid());
             Assert.assertTrue(group.getGroupName().contains("g"));
@@ -144,9 +145,9 @@ public class NUserGroupServiceTest extends ServiceTestBase {
     }
 
     @Test
-    public void testAddUserToNotExistGroup() throws Exception {
+    public void testAddUserToNotExistGroup() {
         try {
-            userGroupService.modifyGroupUsers("UNKNOWN", Arrays.asList("ADMIN"));
+            userGroupService.modifyGroupUsers("UNKNOWN", Collections.singletonList("ADMIN"));
         } catch (TransactionException e) {
             Assert.assertTrue(e.getCause().getCause() instanceof KylinException);
             Assert.assertTrue(StringUtils.equals(e.getCause().getCause().getMessage(),
@@ -160,19 +161,21 @@ public class NUserGroupServiceTest extends ServiceTestBase {
     public void testListUserGroups() throws IOException {
         userGroupService.addGroup("t1");
         userGroupService.addGroup("t2");
-        userGroupService.modifyGroupUsers("t1", Arrays.asList("MODELER"));
-        userGroupService.modifyGroupUsers("t2", Arrays.asList("MODELER"));
+        userGroupService.modifyGroupUsers("t1", Collections.singletonList("MODELER"));
+        userGroupService.modifyGroupUsers("t2", Collections.singletonList("MODELER"));
 
+        var emptyGroups = userGroupService.listUserGroups("notexist");
+        Assert.assertTrue(emptyGroups.isEmpty());
         var groups = userGroupService.listUserGroups("MODELER");
         Assert.assertEquals(2, groups.size());
         Assert.assertTrue(groups.contains("t1"));
         Assert.assertTrue(groups.contains("t2"));
         userGroupService.addGroup("t3");
-        userGroupService.modifyGroupUsers("t3", Arrays.asList("MODELER"));
+        userGroupService.modifyGroupUsers("t3", Collections.singletonList("MODELER"));
         groups = userGroupService.listUserGroups("MODELER");
         Assert.assertEquals(3, groups.size());
         Assert.assertTrue(groups.contains("t3"));
-        List<String> userList = Arrays.asList("ADMIN");
+        List<String> userList = Collections.singletonList("ADMIN");
         Assert.assertThrows(RuntimeException.class, () -> userGroupService.modifyGroupUsers("t1", userList));
     }
 
diff --git a/src/common-service/src/test/java/org/apache/kylin/rest/service/UserAclServiceTest.java b/src/common-service/src/test/java/org/apache/kylin/rest/service/UserAclServiceTest.java
index 3814e8bc39..c1c67df8a5 100644
--- a/src/common-service/src/test/java/org/apache/kylin/rest/service/UserAclServiceTest.java
+++ b/src/common-service/src/test/java/org/apache/kylin/rest/service/UserAclServiceTest.java
@@ -53,6 +53,8 @@ import org.springframework.security.core.context.SecurityContextHolder;
 import org.springframework.security.core.userdetails.UserDetails;
 import org.springframework.test.util.ReflectionTestUtils;
 
+import lombok.SneakyThrows;
+
 public class UserAclServiceTest extends ServiceTestBase {
 
     @Mock
@@ -121,8 +123,9 @@ public class UserAclServiceTest extends ServiceTestBase {
     @Test
     public void testGetAllUsersHasGlobalPermission() {
         KylinUserService kylinUserService = new KylinUserService() {
+            @SneakyThrows
             @Override
-            public List<String> listAdminUsers() throws IOException {
+            public List<String> listAdminUsers() {
                 throw new IOException("test");
             }
         };
diff --git a/src/core-metadata/src/main/java/org/apache/kylin/metadata/user/NKylinUserManager.java b/src/core-metadata/src/main/java/org/apache/kylin/metadata/user/NKylinUserManager.java
index f268ea69a6..4b487a6841 100644
--- a/src/core-metadata/src/main/java/org/apache/kylin/metadata/user/NKylinUserManager.java
+++ b/src/core-metadata/src/main/java/org/apache/kylin/metadata/user/NKylinUserManager.java
@@ -23,13 +23,16 @@ import static org.apache.kylin.common.persistence.ResourceStore.USER_ROOT;
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.List;
+import java.util.NavigableSet;
+import java.util.Objects;
 import java.util.Set;
 import java.util.stream.Collectors;
 
+import org.apache.commons.lang3.StringUtils;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.persistence.ResourceStore;
-import org.apache.kylin.metadata.cachesync.CachedCrudAssist;
 import org.apache.kylin.common.persistence.transaction.UnitOfWork;
+import org.apache.kylin.metadata.cachesync.CachedCrudAssist;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.springframework.security.core.authority.SimpleGrantedAuthority;
@@ -81,12 +84,15 @@ public class NKylinUserManager {
     }
 
     public ManagedUser get(String name) {
+        if (StringUtils.isEmpty(name)) {
+            return null;
+        }
+        ManagedUser user = crud.get(name);
         if (getConfig().isMetadataKeyCaseInSensitiveEnabled()) {
-            return crud.get(name);
-        } else {
-            return crud.listAll().stream().filter(managedUser -> managedUser.getUsername().equalsIgnoreCase(name))
-                    .findAny().orElse(null);
+            return user;
         }
+        return Objects.nonNull(user) ? user
+                : crud.listPartial(path -> StringUtils.endsWithIgnoreCase(path, name)).stream().findAny().orElse(null);
     }
 
     public List<ManagedUser> list() {
@@ -116,7 +122,21 @@ public class NKylinUserManager {
     }
 
     public boolean exists(String username) {
-        return get(username) != null;
+        if (StringUtils.isEmpty(username)) {
+            return false;
+        }
+        ManagedUser user = crud.get(username);
+        if (getConfig().isMetadataKeyCaseInSensitiveEnabled()) {
+            return Objects.nonNull(user);
+        }
+        if (Objects.nonNull(user)) {
+            return true;
+        }
+        NavigableSet<String> users = getStore().listResources(USER_ROOT);
+        if (Objects.isNull(users)) {
+            return false;
+        }
+        return users.stream().anyMatch(path -> StringUtils.endsWithIgnoreCase(path, username));
     }
 
     public Set<String> getUserGroups(String userName) {
diff --git a/src/core-metadata/src/main/java/org/apache/kylin/metadata/usergroup/NUserGroupManager.java b/src/core-metadata/src/main/java/org/apache/kylin/metadata/usergroup/NUserGroupManager.java
index 1c51457eee..f9c5115f66 100644
--- a/src/core-metadata/src/main/java/org/apache/kylin/metadata/usergroup/NUserGroupManager.java
+++ b/src/core-metadata/src/main/java/org/apache/kylin/metadata/usergroup/NUserGroupManager.java
@@ -22,17 +22,21 @@ import static org.apache.kylin.common.exception.ServerErrorCode.DUPLICATE_USERGR
 import static org.apache.kylin.common.exception.ServerErrorCode.USERGROUP_NOT_EXIST;
 import static org.apache.kylin.common.persistence.ResourceStore.USER_GROUP_ROOT;
 
+import java.util.Collections;
 import java.util.List;
 import java.util.Locale;
+import java.util.NavigableSet;
+import java.util.Objects;
+import java.util.function.Predicate;
 import java.util.stream.Collectors;
 
-import org.apache.commons.lang.StringUtils;
+import org.apache.commons.lang3.StringUtils;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.exception.KylinException;
 import org.apache.kylin.common.msg.MsgPicker;
 import org.apache.kylin.common.persistence.ResourceStore;
-import org.apache.kylin.metadata.cachesync.CachedCrudAssist;
 import org.apache.kylin.common.persistence.transaction.UnitOfWork;
+import org.apache.kylin.metadata.cachesync.CachedCrudAssist;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -73,15 +77,29 @@ public class NUserGroupManager {
     }
 
     public List<String> getAllGroupNames() {
-        return ImmutableList.copyOf(crud.listAll().stream().map(UserGroup::getGroupName).collect(Collectors.toList()));
+        NavigableSet<String> userGroups = getStore().listResources(USER_GROUP_ROOT);
+        if (Objects.isNull(userGroups)) {
+            return Collections.emptyList();
+        }
+        return userGroups.stream().map(path -> {
+            String[] pathArray = StringUtils.split(path, "/");
+            return pathArray[pathArray.length - 1];
+        }).collect(Collectors.toList());
     }
 
     public List<UserGroup> getAllGroups() {
         return ImmutableList.copyOf(crud.listAll());
     }
 
+    public List<UserGroup> getAllUsers(Predicate<String> predicate) {
+        return ImmutableList.copyOf(crud.listPartial(predicate));
+    }
+
     public boolean exists(String name) {
-        return getAllGroupNames().contains(name);
+        if (StringUtils.isEmpty(name)) {
+            return false;
+        }
+        return Objects.nonNull(crud.get(name));
     }
 
     public UserGroup copyForWrite(UserGroup userGroup) {
diff --git a/src/core-metadata/src/test/java/org/apache/kylin/metadata/user/NKylinUserManagerTest.java b/src/core-metadata/src/test/java/org/apache/kylin/metadata/user/NKylinUserManagerTest.java
new file mode 100644
index 0000000000..c5246228dd
--- /dev/null
+++ b/src/core-metadata/src/test/java/org/apache/kylin/metadata/user/NKylinUserManagerTest.java
@@ -0,0 +1,71 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.kylin.metadata.user;
+
+import java.util.Arrays;
+
+import org.apache.kylin.common.util.NLocalFileMetadataTestCase;
+import org.apache.kylin.rest.constant.Constant;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+import org.springframework.security.core.authority.SimpleGrantedAuthority;
+
+class NKylinUserManagerTest extends NLocalFileMetadataTestCase {
+
+    @BeforeEach
+    void setUp() {
+        this.createTestMetadata();
+    }
+
+    @AfterEach
+    void tearDown() {
+        this.cleanupTestMetadata();
+    }
+
+    @Test
+    void testGetAndExist() {
+        NKylinUserManager manager = NKylinUserManager.getInstance(getTestConfig());
+
+        // no user
+        Assertions.assertFalse(manager.exists("noexist"));
+        Assertions.assertFalse(manager.exists(null));
+
+        // has ADMIN
+        ManagedUser adminUser = new ManagedUser("ADMIN", "KYLIN", false, Arrays.asList(//
+                new SimpleGrantedAuthority(Constant.ROLE_ADMIN), new SimpleGrantedAuthority(Constant.ROLE_ANALYST),
+                new SimpleGrantedAuthority(Constant.ROLE_MODELER)));
+        manager.update(adminUser);
+        Assertions.assertFalse(manager.exists("noexist"));
+
+        // admin exists
+        Assertions.assertTrue(manager.exists("ADMIN"));
+
+        getTestConfig().setProperty("kylin.metadata.key-case-insensitive", "true");
+        Assertions.assertTrue(manager.exists("ADMIN"));
+
+        // get
+        Assertions.assertNotNull(manager.get("ADMIN"));
+        getTestConfig().setProperty("kylin.metadata.key-case-insensitive", "false");
+        Assertions.assertNotNull(manager.get("ADMIN"));
+        Assertions.assertNull(manager.get("notexist"));
+        Assertions.assertNull(manager.get(null));
+    }
+}
diff --git a/src/core-metadata/src/test/java/org/apache/kylin/metadata/usergroup/NUserGroupManagerTest.java b/src/core-metadata/src/test/java/org/apache/kylin/metadata/usergroup/NUserGroupManagerTest.java
index 1ebbdd337b..96647be4f0 100644
--- a/src/core-metadata/src/test/java/org/apache/kylin/metadata/usergroup/NUserGroupManagerTest.java
+++ b/src/core-metadata/src/test/java/org/apache/kylin/metadata/usergroup/NUserGroupManagerTest.java
@@ -18,7 +18,10 @@
 
 package org.apache.kylin.metadata.usergroup;
 
+import java.util.Locale;
+
 import org.apache.kylin.common.exception.KylinException;
+import org.apache.kylin.common.msg.MsgPicker;
 import org.apache.kylin.common.util.NLocalFileMetadataTestCase;
 import org.junit.After;
 import org.junit.Assert;
@@ -45,27 +48,18 @@ public class NUserGroupManagerTest extends NLocalFileMetadataTestCase {
         group.add("g1");
         group.add("g2");
         group.add("g3");
+        Assert.assertFalse(group.exists(null));
         Assert.assertTrue(group.exists("g1"));
         Assert.assertFalse(group.exists("g4"));
         Assert.assertEquals(Lists.newArrayList("g1", "g2", "g3"), group.getAllGroupNames());
-        try {
-            group.add("g1");
-            Assert.fail("expecting some AlreadyExistsException here");
-        } catch (KylinException e) {
-            Assert.assertEquals("The user group \"g1\" already exists. Please check and try again.", e.getMessage());
-        }
+        Assert.assertEquals("g1", group.getAllUsers(path -> path.endsWith("g1")).get(0).getGroupName());
 
+        Assert.assertThrows(String.format(Locale.ROOT, MsgPicker.getMsg().getUserGroupExist(), "g1"),
+                KylinException.class, () -> group.add("g1"));
         group.delete("g1");
         Assert.assertFalse(group.exists("g1"));
-
-        try {
-            group.delete("g1");
-            Assert.fail("expecting some AlreadyExistsException here");
-        } catch (Exception e) {
-            Assert.assertTrue(e instanceof KylinException);
-            Assert.assertEquals("Invalid values in parameter “group_name“. The value g1 doesn’t exist.",
-                    e.getMessage());
-        }
+        Assert.assertThrows(String.format(Locale.ROOT, MsgPicker.getMsg().getUserGroupNotExist(), "g1"),
+                KylinException.class, () -> group.delete("g1"));
     }
 
     @Test
@@ -74,23 +68,11 @@ public class NUserGroupManagerTest extends NLocalFileMetadataTestCase {
         group.add("test1");
         group.add("test2");
         group.add("test3");
-        try {
-            group.add("TEST1");
-            Assert.fail("expecting some AlreadyExistsException here");
-        } catch (KylinException e) {
-            Assert.assertEquals("The user group \"test1\" already exists. Please check and try again.", e.getMessage());
-        }
-
+        Assert.assertThrows(String.format(Locale.ROOT, MsgPicker.getMsg().getUserGroupExist(), "TEST1"),
+                KylinException.class, () -> group.add("TEST1"));
         group.delete("Test1");
         Assert.assertFalse(group.exists("test1"));
-
-        try {
-            group.delete("test1");
-            Assert.fail("expecting some AlreadyExistsException here");
-        } catch (Exception e) {
-            Assert.assertTrue(e instanceof KylinException);
-            Assert.assertEquals("Invalid values in parameter “group_name“. The value test1 doesn’t exist.",
-                    e.getMessage());
-        }
+        Assert.assertThrows(String.format(Locale.ROOT, MsgPicker.getMsg().getUserGroupNotExist(), "test1"),
+                KylinException.class, () -> group.delete("test1"));
     }
 }


[kylin] 20/34: KYLIN-5455 Added parameter check items for /api/models/semantic API. If PartitionDesc is null then set MultiPartitionDesc to null

Posted by xx...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

xxyu pushed a commit to branch kylin5
in repository https://gitbox.apache.org/repos/asf/kylin.git

commit cf0b7a83d01c647a2d416c71c1117ad41452154e
Author: huangsheng <hu...@163.com>
AuthorDate: Fri Dec 30 14:00:13 2022 +0800

    KYLIN-5455 Added parameter check items for /api/models/semantic API.  If PartitionDesc is null then set MultiPartitionDesc to null
    
    KYLIN-5455 Optimize the multi partition model judgment logic
---
 .../src/main/java/org/apache/kylin/metadata/model/NDataModel.java     | 4 +++-
 .../src/test/java/org/apache/kylin/rest/service/ModelServiceTest.java | 2 +-
 .../src/main/scala/org/apache/kylin/engine/spark/job/SegmentJob.java  | 3 ++-
 3 files changed, 6 insertions(+), 3 deletions(-)

diff --git a/src/core-metadata/src/main/java/org/apache/kylin/metadata/model/NDataModel.java b/src/core-metadata/src/main/java/org/apache/kylin/metadata/model/NDataModel.java
index e8df524f38..9c6209ec59 100644
--- a/src/core-metadata/src/main/java/org/apache/kylin/metadata/model/NDataModel.java
+++ b/src/core-metadata/src/main/java/org/apache/kylin/metadata/model/NDataModel.java
@@ -1455,7 +1455,9 @@ public class NDataModel extends RootPersistentEntity {
     }
 
     public boolean isMultiPartitionModel() {
-        return multiPartitionDesc != null && CollectionUtils.isNotEmpty(multiPartitionDesc.getColumns());
+        // a multi-partition model can be determined only if neither partitionDesc nor multiPartitionDesc is null
+        return partitionDesc != null && multiPartitionDesc != null
+                && CollectionUtils.isNotEmpty(multiPartitionDesc.getColumns());
     }
 
     public List<Integer> getMeasureRelatedCols() {
diff --git a/src/modeling-service/src/test/java/org/apache/kylin/rest/service/ModelServiceTest.java b/src/modeling-service/src/test/java/org/apache/kylin/rest/service/ModelServiceTest.java
index df0b60b2b6..3e585b7392 100644
--- a/src/modeling-service/src/test/java/org/apache/kylin/rest/service/ModelServiceTest.java
+++ b/src/modeling-service/src/test/java/org/apache/kylin/rest/service/ModelServiceTest.java
@@ -4550,7 +4550,7 @@ public class ModelServiceTest extends SourceTestCase {
         Assert.assertEquals(3, model.getMultiPartitionDesc().getPartitions().size());
 
         // PartitionDesc change
-        modelService.updatePartitionColumn(getProject(), modelId, null, model.getMultiPartitionDesc());
+        modelService.updatePartitionColumn(getProject(), modelId, new PartitionDesc(), model.getMultiPartitionDesc());
         val df1 = dfm.getDataflow(modelId);
         val model1 = modelManager.getDataModelDesc(modelId);
         Assert.assertEquals(0, df1.getSegments().getSegments().size());
diff --git a/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/SegmentJob.java b/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/SegmentJob.java
index 0b969ed923..28174f8d4f 100644
--- a/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/SegmentJob.java
+++ b/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/SegmentJob.java
@@ -218,7 +218,8 @@ public abstract class SegmentJob extends SparkApplication {
     }
 
     protected boolean isPartitioned() {
-        return Objects.nonNull(indexPlan.getModel().getMultiPartitionDesc());
+        return Objects.nonNull(indexPlan.getModel().getPartitionDesc())
+                && Objects.nonNull(indexPlan.getModel().getMultiPartitionDesc());
     }
 
     private boolean needSkipSegment(NDataSegment dataSegment) {


[kylin] 27/34: [DIRTY] fix unstable UT

Posted by xx...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

xxyu pushed a commit to branch kylin5
in repository https://gitbox.apache.org/repos/asf/kylin.git

commit 0074b3d29a5a7a74ff7a08f3365eedffd24ae77b
Author: ChenLiang.Lu <31...@users.noreply.github.com>
AuthorDate: Tue Jan 3 16:47:24 2023 +0800

    [DIRTY] fix unstable UT
---
 .../apache/kylin/metadata/streaming/StreamingJobRecordManagerTest.java   | 1 -
 1 file changed, 1 deletion(-)

diff --git a/src/core-metadata/src/test/java/org/apache/kylin/metadata/streaming/StreamingJobRecordManagerTest.java b/src/core-metadata/src/test/java/org/apache/kylin/metadata/streaming/StreamingJobRecordManagerTest.java
index 9c078091b0..fac9c7f96b 100644
--- a/src/core-metadata/src/test/java/org/apache/kylin/metadata/streaming/StreamingJobRecordManagerTest.java
+++ b/src/core-metadata/src/test/java/org/apache/kylin/metadata/streaming/StreamingJobRecordManagerTest.java
@@ -105,7 +105,6 @@ public class StreamingJobRecordManagerTest extends NLocalFileMetadataTestCase {
     }
 
     @Test
-    @Ignore
     public void testDropTable() {
         try {
             val jdbcRawRecStore = (JdbcStreamingJobRecordStore) ReflectionUtils.getField(streamingJobRecordManager,


[kylin] 11/34: KYLIN-5448 fix snyk vulnerabilities, upgrade protobuf-java from 3.16.1 to 3.16.3

Posted by xx...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

xxyu pushed a commit to branch kylin5
in repository https://gitbox.apache.org/repos/asf/kylin.git

commit 80d5df4df2a475a27ea8697c484b306f14d05e32
Author: huangsheng <hu...@163.com>
AuthorDate: Tue Dec 27 14:37:48 2022 +0800

    KYLIN-5448 fix snyk vulnerabilities, upgrade protobuf-java from 3.16.1 to 3.16.3
    
    * KYLIN-5448 fix snyk vulnerabilities, upgrade protobuf-java from 3.16.1 to 3.16.3
---
 pom.xml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/pom.xml b/pom.xml
index 1d7ee3f267..4a87ab0a1a 100644
--- a/pom.xml
+++ b/pom.xml
@@ -223,7 +223,7 @@
 
         <groovy-all.version>2.4.21</groovy-all.version>
         <slf4j-ext.version>1.7.26</slf4j-ext.version>
-        <protobuf-java.version>3.16.1</protobuf-java.version>
+        <protobuf-java.version>3.16.3</protobuf-java.version>
 
         <!-- Sonar -->
         <sonar.scala.version>${scala.version}</sonar.scala.version>


[kylin] 24/34: KYLIN-5458 max dimension combination doesn't work

Posted by xx...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

xxyu pushed a commit to branch kylin5
in repository https://gitbox.apache.org/repos/asf/kylin.git

commit 299bb6707734dd2164392b594a5ff47dcfa6b65c
Author: Pengfei Zhan <de...@gmail.com>
AuthorDate: Fri Dec 30 17:53:49 2022 +0800

    KYLIN-5458 max dimension combination doesn't work
---
 .../exception/OutOfMaxCombinationException.java    |  9 ++--
 .../common/exception/code/ErrorCodeServer.java     |  1 +
 .../resources/kylin_error_msg_conf_cn.properties   |  1 +
 .../resources/kylin_error_msg_conf_en.properties   |  1 +
 .../main/resources/kylin_errorcode_conf.properties |  1 +
 .../metadata/cube/cuboid/CuboidScheduler.java      |  6 ++-
 .../metadata/cube/cuboid/KECuboidSchedulerV1.java  | 24 ++++-----
 .../metadata/cube/cuboid/KECuboidSchedulerV2.java  | 21 ++++----
 .../cube/model/RuleBasedCuboidDescTest.java        | 56 ++++++++++++++++++++-
 .../rest/request/UpdateRuleBasedCuboidRequest.java |  2 +-
 .../kylin/rest/service/IndexPlanServiceTest.java   | 58 +++++++++++-----------
 11 files changed, 121 insertions(+), 59 deletions(-)

diff --git a/src/core-common/src/main/java/org/apache/kylin/common/exception/OutOfMaxCombinationException.java b/src/core-common/src/main/java/org/apache/kylin/common/exception/OutOfMaxCombinationException.java
index 61f6877660..9a389f3c0d 100644
--- a/src/core-common/src/main/java/org/apache/kylin/common/exception/OutOfMaxCombinationException.java
+++ b/src/core-common/src/main/java/org/apache/kylin/common/exception/OutOfMaxCombinationException.java
@@ -17,10 +17,11 @@
  */
 package org.apache.kylin.common.exception;
 
-public class OutOfMaxCombinationException extends RuntimeException {
+import org.apache.kylin.common.exception.code.ErrorCodeProducer;
 
-    public OutOfMaxCombinationException(String message) {
-        super(message);
-    }
+public class OutOfMaxCombinationException extends KylinException {
 
+    public OutOfMaxCombinationException(ErrorCodeProducer errorCodeProducer, Object... args) {
+        super(errorCodeProducer, args);
+    }
 }
diff --git a/src/core-common/src/main/java/org/apache/kylin/common/exception/code/ErrorCodeServer.java b/src/core-common/src/main/java/org/apache/kylin/common/exception/code/ErrorCodeServer.java
index a51a2f54ef..67cf57afed 100644
--- a/src/core-common/src/main/java/org/apache/kylin/common/exception/code/ErrorCodeServer.java
+++ b/src/core-common/src/main/java/org/apache/kylin/common/exception/code/ErrorCodeServer.java
@@ -108,6 +108,7 @@ public enum ErrorCodeServer implements ErrorCodeProducer {
     INDEX_DUPLICATE("KE-010012202"),
     INDEX_PARAMETER_INVALID("KE-010012203"),
     SHARD_BY_COLUMN_NOT_IN_INDEX("KE-010012204"),
+    OUT_OF_MAX_DIM_COMBINATION("KE-010012205"),
 
     // 10043XX parameter check
     REQUEST_PARAMETER_EMPTY_OR_VALUE_EMPTY("KE-010043201"),
diff --git a/src/core-common/src/main/resources/kylin_error_msg_conf_cn.properties b/src/core-common/src/main/resources/kylin_error_msg_conf_cn.properties
index 7697a01c8f..77070d8924 100644
--- a/src/core-common/src/main/resources/kylin_error_msg_conf_cn.properties
+++ b/src/core-common/src/main/resources/kylin_error_msg_conf_cn.properties
@@ -110,6 +110,7 @@ KE-010012201=索引元数据不一致。请尝试刷新下列模型的所有 Seg
 KE-010012202=因为存在相同的索引,无法新建该索引。请修改。
 KE-010012203=参数 “%s” 仅支持 “%s”。
 KE-010012204=ShardBy 列不在索引包含的列中,请修改后重试。
+KE-010012205=聚合组生成的索引数超出系统允许的最大索引数(%s)。
 
 ## 10043XX parameter check
 KE-010043201=请求参数 “%s” 为空或值为空。请检查请求参数是否正确填写。
diff --git a/src/core-common/src/main/resources/kylin_error_msg_conf_en.properties b/src/core-common/src/main/resources/kylin_error_msg_conf_en.properties
index 6acc8373fc..7c67ccdae4 100644
--- a/src/core-common/src/main/resources/kylin_error_msg_conf_en.properties
+++ b/src/core-common/src/main/resources/kylin_error_msg_conf_en.properties
@@ -108,6 +108,7 @@ KE-010012201=Index metadata might be inconsistent. Please try refreshing all seg
 KE-010012202=Can't add this index, as the same index already exists. Please modify.
 KE-010012203=The parameter "%s" only supports "%s".
 KE-010012204=The ShardBy column is not included in the index. Please fix and try again.
+KE-010012205=The number of indexes generated by the aggregate group exceeds the maximum number(%s) of indexes allowed by the system.
 
 ## 10043XX parameter check
 KE-010043201=Request parameter "%s" is empty or value is empty. Please check the request parameters.
diff --git a/src/core-common/src/main/resources/kylin_errorcode_conf.properties b/src/core-common/src/main/resources/kylin_errorcode_conf.properties
index 8fb380af3f..091be9bf2b 100644
--- a/src/core-common/src/main/resources/kylin_errorcode_conf.properties
+++ b/src/core-common/src/main/resources/kylin_errorcode_conf.properties
@@ -120,6 +120,7 @@ KE-010012201
 KE-010012202
 KE-010012203
 KE-010012204
+KE-010012205
 
 ## 10043XX parameter check
 KE-010043201
diff --git a/src/core-metadata/src/main/java/org/apache/kylin/metadata/cube/cuboid/CuboidScheduler.java b/src/core-metadata/src/main/java/org/apache/kylin/metadata/cube/cuboid/CuboidScheduler.java
index 75fbce510c..6da2b6469f 100644
--- a/src/core-metadata/src/main/java/org/apache/kylin/metadata/cube/cuboid/CuboidScheduler.java
+++ b/src/core-metadata/src/main/java/org/apache/kylin/metadata/cube/cuboid/CuboidScheduler.java
@@ -39,8 +39,6 @@ import lombok.val;
  */
 public abstract class CuboidScheduler implements Serializable {
 
-    protected static final String OUT_OF_MAX_COMBINATION_MSG_FORMAT = "Too many cuboids for the cube. Cuboid combination reached %s and limit is %s. Abort calculation.";
-
     public static CuboidScheduler getInstance(IndexPlan indexPlan, RuleBasedIndex ruleBasedIndex, boolean skipAll) {
         if (ruleBasedIndex.getSchedulerVersion() == 1) {
             return new KECuboidSchedulerV1(indexPlan, ruleBasedIndex, skipAll);
@@ -89,6 +87,10 @@ public abstract class CuboidScheduler implements Serializable {
         return indexPlan;
     }
 
+    public long getAggGroupCombinationSize() {
+        return indexPlan.getConfig().getCubeAggrGroupMaxCombination();
+    }
+
     protected ColOrder extractDimAndMeaFromBigInt(BigInteger bigInteger) {
         val allDims = ruleBasedAggIndex.getDimensions();
         val allMeas = ruleBasedAggIndex.getMeasures();
diff --git a/src/core-metadata/src/main/java/org/apache/kylin/metadata/cube/cuboid/KECuboidSchedulerV1.java b/src/core-metadata/src/main/java/org/apache/kylin/metadata/cube/cuboid/KECuboidSchedulerV1.java
index 82cc18bf78..325898a158 100644
--- a/src/core-metadata/src/main/java/org/apache/kylin/metadata/cube/cuboid/KECuboidSchedulerV1.java
+++ b/src/core-metadata/src/main/java/org/apache/kylin/metadata/cube/cuboid/KECuboidSchedulerV1.java
@@ -24,7 +24,6 @@ import java.math.BigInteger;
 import java.util.Collection;
 import java.util.HashSet;
 import java.util.List;
-import java.util.Locale;
 import java.util.Map;
 import java.util.Objects;
 import java.util.Set;
@@ -34,8 +33,8 @@ import java.util.stream.Stream;
 import javax.annotation.Nullable;
 
 import org.apache.commons.lang.StringUtils;
-import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.exception.OutOfMaxCombinationException;
+import org.apache.kylin.common.exception.code.ErrorCodeServer;
 import org.apache.kylin.common.util.ThreadUtil;
 import org.apache.kylin.metadata.cube.model.IndexPlan;
 import org.apache.kylin.metadata.cube.model.RuleBasedIndex;
@@ -163,19 +162,16 @@ public class KECuboidSchedulerV1 extends CuboidScheduler {
      * @return Cuboid collection
      */
     private Set<CuboidBigInteger> buildTreeBottomUp(SetCreator setCreatorFunc) {
-        KylinConfig config = indexPlan.getConfig();
-        long maxCombination = config.getCubeAggrGroupMaxCombination() * 10;
-        maxCombination = maxCombination < 0 ? Long.MAX_VALUE : maxCombination;
-
         Set<CuboidBigInteger> cuboidHolder = setCreatorFunc.create();
 
         // build tree structure
+        long maxCombination = getAggGroupCombinationSize() * 10;
+        maxCombination = maxCombination < 0 ? Integer.MAX_VALUE : maxCombination;
         Set<CuboidBigInteger> children = getOnTreeParentsByLayer(Sets.newHashSet(new CuboidBigInteger(BigInteger.ZERO)),
                 setCreatorFunc, maxCombination); // lowest level cuboids
         while (!children.isEmpty()) {
             if (cuboidHolder.size() + children.size() > maxCombination) {
-                throw new OutOfMaxCombinationException(String.format(Locale.ROOT, OUT_OF_MAX_COMBINATION_MSG_FORMAT,
-                        cuboidHolder.size() + children.size(), maxCombination));
+                throw new OutOfMaxCombinationException(ErrorCodeServer.OUT_OF_MAX_DIM_COMBINATION, maxCombination);
             }
             cuboidHolder.addAll(children);
             children = getOnTreeParentsByLayer(children, setCreatorFunc, maxCombination);
@@ -210,19 +206,20 @@ public class KECuboidSchedulerV1 extends CuboidScheduler {
                 if (cuboidId == null) {
                     return false;
                 }
-                if (++cuboidCount > maxCombination) {
-                    throw new OutOfMaxCombinationException(
-                            String.format(Locale.ROOT, OUT_OF_MAX_COMBINATION_MSG_FORMAT, cuboidCount, maxCombination));
+                if (cuboidCount > maxCombination) {
+                    throw new OutOfMaxCombinationException(ErrorCodeServer.OUT_OF_MAX_DIM_COMBINATION, maxCombination);
                 }
 
                 BigInteger cuboidBits = cuboidId.getDimMeas();
 
                 if (cuboidBits.equals(ruleBasedAggIndex.getFullMask()) && isBaseCuboidValid) {
+                    cuboidCount++;
                     return true;
                 }
 
                 for (NAggregationGroup agg : ruleBasedAggIndex.getAggregationGroups()) {
                     if (agg.isOnTree(cuboidBits) && agg.checkDimCap(cuboidBits)) {
+                        cuboidCount++;
                         return true;
                     }
                 }
@@ -253,8 +250,9 @@ public class KECuboidSchedulerV1 extends CuboidScheduler {
         Set<CuboidBigInteger> children = getOnTreeParentsByLayer(Sets.newHashSet(new CuboidBigInteger(BigInteger.ZERO)),
                 agg, newHashSet); // lowest level cuboids
         while (!children.isEmpty()) {
-            if (cuboidHolder.size() + children.size() > indexPlan.getConfig().getCubeAggrGroupMaxCombination()) {
-                throw new OutOfMaxCombinationException("Holder size larger than kylin.cube.aggrgroup.max-combination");
+            if (cuboidHolder.size() + children.size() > getAggGroupCombinationSize()) {
+                throw new OutOfMaxCombinationException(ErrorCodeServer.OUT_OF_MAX_DIM_COMBINATION,
+                        getAggGroupCombinationSize());
             }
             cuboidHolder.addAll(children);
             children = getOnTreeParentsByLayer(children, agg, newHashSet);
diff --git a/src/core-metadata/src/main/java/org/apache/kylin/metadata/cube/cuboid/KECuboidSchedulerV2.java b/src/core-metadata/src/main/java/org/apache/kylin/metadata/cube/cuboid/KECuboidSchedulerV2.java
index b1705c0525..eff3825eb7 100644
--- a/src/core-metadata/src/main/java/org/apache/kylin/metadata/cube/cuboid/KECuboidSchedulerV2.java
+++ b/src/core-metadata/src/main/java/org/apache/kylin/metadata/cube/cuboid/KECuboidSchedulerV2.java
@@ -22,12 +22,12 @@ import java.math.BigInteger;
 import java.util.Collection;
 import java.util.Comparator;
 import java.util.List;
-import java.util.Locale;
 import java.util.Set;
 import java.util.stream.Collectors;
 
 import org.apache.commons.lang3.ArrayUtils;
 import org.apache.kylin.common.exception.OutOfMaxCombinationException;
+import org.apache.kylin.common.exception.code.ErrorCodeServer;
 import org.apache.kylin.metadata.cube.model.IndexPlan;
 import org.apache.kylin.metadata.cube.model.RuleBasedIndex;
 
@@ -40,14 +40,13 @@ import lombok.extern.slf4j.Slf4j;
 @Slf4j
 public class KECuboidSchedulerV2 extends CuboidScheduler {
 
-    private final BigInteger max;
     private final int measureSize;
     private transient final OrderedSet<ColOrder> allColOrders;
 
     KECuboidSchedulerV2(IndexPlan indexPlan, RuleBasedIndex ruleBasedAggIndex, boolean skipAll) {
         super(indexPlan, ruleBasedAggIndex);
 
-        this.max = ruleBasedAggIndex.getFullMask();
+        BigInteger max = ruleBasedAggIndex.getFullMask();
         this.measureSize = ruleBasedAggIndex.getMeasures().size();
 
         // handle nRuleBasedCuboidDesc has 0 dimensions
@@ -55,19 +54,20 @@ public class KECuboidSchedulerV2 extends CuboidScheduler {
         if (max.bitCount() == 0 || skipAll) {
             return;
         }
-        long maxCombination = indexPlan.getConfig().getCubeAggrGroupMaxCombination() * 10;
-        maxCombination = maxCombination < 0 ? Long.MAX_VALUE : maxCombination;
+
         if (ruleBasedAggIndex.getBaseLayoutEnabled() == null) {
             ruleBasedAggIndex.setBaseLayoutEnabled(true);
         }
         if (Boolean.TRUE.equals(ruleBasedAggIndex.getBaseLayoutEnabled())) {
             allColOrders.add(new ColOrder(ruleBasedAggIndex.getDimensions(), ruleBasedAggIndex.getMeasures()));
         }
+
+        long maxCombinationSize = getAggGroupCombinationSize() * 10;
+        maxCombinationSize = maxCombinationSize < 0 ? Integer.MAX_VALUE : maxCombinationSize;
         for (NAggregationGroup agg : ruleBasedAggIndex.getAggregationGroups()) {
             allColOrders.addAll(calculateCuboidsForAggGroup(agg));
-            if (allColOrders.size() > maxCombination) {
-                throw new OutOfMaxCombinationException(String.format(Locale.ROOT, OUT_OF_MAX_COMBINATION_MSG_FORMAT,
-                        allColOrders.size(), maxCombination));
+            if (allColOrders.size() > maxCombinationSize) {
+                throw new OutOfMaxCombinationException(ErrorCodeServer.OUT_OF_MAX_DIM_COMBINATION, maxCombinationSize);
             }
         }
     }
@@ -106,8 +106,9 @@ public class KECuboidSchedulerV2 extends CuboidScheduler {
         Set<CuboidBigInteger> children = getOnTreeParentsByLayer(Sets.newHashSet(new CuboidBigInteger(BigInteger.ZERO)),
                 agg); // lowest level cuboids
         while (!children.isEmpty()) {
-            if (cuboidHolder.size() + children.size() > indexPlan.getConfig().getCubeAggrGroupMaxCombination()) {
-                throw new OutOfMaxCombinationException("Holder size larger than kylin.cube.aggrgroup.max-combination");
+            if (cuboidHolder.size() + children.size() > getAggGroupCombinationSize()) {
+                throw new OutOfMaxCombinationException(ErrorCodeServer.OUT_OF_MAX_DIM_COMBINATION,
+                        getAggGroupCombinationSize());
             }
             cuboidHolder.addAll(children);
             children = getOnTreeParentsByLayer(children, agg);
diff --git a/src/core-metadata/src/test/java/org/apache/kylin/metadata/cube/model/RuleBasedCuboidDescTest.java b/src/core-metadata/src/test/java/org/apache/kylin/metadata/cube/model/RuleBasedCuboidDescTest.java
index 404258a670..0e9c9890cc 100644
--- a/src/core-metadata/src/test/java/org/apache/kylin/metadata/cube/model/RuleBasedCuboidDescTest.java
+++ b/src/core-metadata/src/test/java/org/apache/kylin/metadata/cube/model/RuleBasedCuboidDescTest.java
@@ -166,16 +166,70 @@ public class RuleBasedCuboidDescTest extends NLocalFileMetadataTestCase {
 
         CubeTestUtils.createTmpModel(getTestConfig(), newPlan);
 
+        try {
+            indexPlanManager.createIndexPlan(newPlan);
+            Assert.fail();
+        } catch (IllegalArgumentException e) {
+            Assert.assertEquals("The number of indexes generated by the aggregate group exceeds the maximum "
+                    + "number(40960) of indexes allowed by the system.", e.getMessage());
+        }
+    }
+
+    @Test
+    public void testGenTooManyCuboidsWithMaxDimCompIsOne() throws IOException {
+        val indexPlanManager = NIndexPlanManager.getInstance(getTestConfig(), "default");
+        var newPlan = JsonUtil.readValue(getClass().getResourceAsStream("/enormous_rule_based_cube.json"),
+                IndexPlan.class);
+        newPlan.setLastModified(0L);
+        List<NAggregationGroup> aggregationGroups = newPlan.getRuleBasedIndex().getAggregationGroups();
+        Assert.assertEquals(1, aggregationGroups.size());
+        aggregationGroups.get(0).getSelectRule().setDimCap(1);
+
+        CubeTestUtils.createTmpModel(getTestConfig(), newPlan);
+
+        IndexPlan indexPlan = indexPlanManager.createIndexPlan(newPlan);
+        List<LayoutEntity> allLayouts = indexPlan.getAllLayouts();
+        Assert.assertEquals(34, allLayouts.size());
+    }
+
+    @Test
+    public void testGenTooManyCuboidsWithScheduleVersion2() throws IOException {
+        val indexPlanManager = NIndexPlanManager.getInstance(getTestConfig(), "default");
+        var newPlan = JsonUtil.readValue(getClass().getResourceAsStream("/enormous_rule_based_cube.json"),
+                IndexPlan.class);
+        newPlan.setLastModified(0L);
+        newPlan.getRuleBasedIndex().setSchedulerVersion(2);
+
+        CubeTestUtils.createTmpModel(getTestConfig(), newPlan);
+
         try {
             indexPlanManager.createIndexPlan(newPlan);
             Assert.fail();
         } catch (IllegalArgumentException e) {
             Assert.assertEquals(
-                    "Too many cuboids for the cube. Cuboid combination reached 41449 and limit is 40960. Abort calculation.",
+                    "The number of indexes generated by the aggregate group exceeds the maximum number(4096) of indexes allowed by the system.",
                     e.getMessage());
         }
     }
 
+    @Test
+    public void testGenTooManyCuboidsWithScheduleV2AndMaxDimCompIsOne() throws IOException {
+        val indexPlanManager = NIndexPlanManager.getInstance(getTestConfig(), "default");
+        var newPlan = JsonUtil.readValue(getClass().getResourceAsStream("/enormous_rule_based_cube.json"),
+                IndexPlan.class);
+        newPlan.setLastModified(0L);
+        newPlan.getRuleBasedIndex().setSchedulerVersion(2);
+        List<NAggregationGroup> aggregationGroups = newPlan.getRuleBasedIndex().getAggregationGroups();
+        Assert.assertEquals(1, aggregationGroups.size());
+        aggregationGroups.get(0).getSelectRule().setDimCap(1);
+
+        CubeTestUtils.createTmpModel(getTestConfig(), newPlan);
+
+        IndexPlan indexPlan = indexPlanManager.createIndexPlan(newPlan);
+        List<LayoutEntity> allLayouts = indexPlan.getAllLayouts();
+        Assert.assertEquals(34, allLayouts.size());
+    }
+
     @Test
     public void testGenCuboidsWithAuto() throws Exception {
         val indexPlanManager = NIndexPlanManager.getInstance(getTestConfig(), "default");
diff --git a/src/modeling-service/src/main/java/org/apache/kylin/rest/request/UpdateRuleBasedCuboidRequest.java b/src/modeling-service/src/main/java/org/apache/kylin/rest/request/UpdateRuleBasedCuboidRequest.java
index c262ee221a..c919a1f735 100644
--- a/src/modeling-service/src/main/java/org/apache/kylin/rest/request/UpdateRuleBasedCuboidRequest.java
+++ b/src/modeling-service/src/main/java/org/apache/kylin/rest/request/UpdateRuleBasedCuboidRequest.java
@@ -51,7 +51,7 @@ public class UpdateRuleBasedCuboidRequest implements ProjectInsensitiveRequest {
 
     @Builder.Default
     @JsonProperty("scheduler_version")
-    private int schedulerVersion = 1;
+    private int schedulerVersion = 2;
 
     @Builder.Default
     @JsonProperty("load_data")
diff --git a/src/modeling-service/src/test/java/org/apache/kylin/rest/service/IndexPlanServiceTest.java b/src/modeling-service/src/test/java/org/apache/kylin/rest/service/IndexPlanServiceTest.java
index 72d5df1edf..2a8db33ac3 100644
--- a/src/modeling-service/src/test/java/org/apache/kylin/rest/service/IndexPlanServiceTest.java
+++ b/src/modeling-service/src/test/java/org/apache/kylin/rest/service/IndexPlanServiceTest.java
@@ -17,19 +17,33 @@
  */
 package org.apache.kylin.rest.service;
 
-import com.google.common.collect.Lists;
-import com.google.common.collect.Sets;
-import org.apache.kylin.engine.spark.job.ExecutableAddCuboidHandler;
-import org.apache.kylin.engine.spark.job.NSparkCubingJob;
-import lombok.extern.slf4j.Slf4j;
-import lombok.val;
-import lombok.var;
+import static org.apache.kylin.common.exception.code.ErrorCodeServer.INDEX_DUPLICATE;
+import static org.apache.kylin.common.exception.code.ErrorCodeServer.LAYOUT_NOT_EXISTS;
+import static org.apache.kylin.metadata.cube.model.IndexEntity.Source.CUSTOM_TABLE_INDEX;
+import static org.apache.kylin.metadata.cube.model.IndexEntity.Source.RECOMMENDED_TABLE_INDEX;
+import static org.apache.kylin.metadata.model.SegmentStatusEnum.READY;
+import static org.apache.kylin.metadata.model.SegmentStatusEnum.WARNING;
+import static org.hamcrest.Matchers.is;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Locale;
+import java.util.concurrent.atomic.AtomicBoolean;
+import java.util.stream.Collectors;
+
 import org.apache.commons.collections.ListUtils;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.exception.KylinException;
 import org.apache.kylin.common.msg.Message;
 import org.apache.kylin.common.util.JsonUtil;
 import org.apache.kylin.cube.model.SelectRule;
+import org.apache.kylin.engine.spark.job.ExecutableAddCuboidHandler;
+import org.apache.kylin.engine.spark.job.NSparkCubingJob;
 import org.apache.kylin.metadata.cube.cuboid.NAggregationGroup;
 import org.apache.kylin.metadata.cube.model.IndexEntity;
 import org.apache.kylin.metadata.cube.model.IndexPlan;
@@ -69,24 +83,12 @@ import org.mockito.Mock;
 import org.mockito.Mockito;
 import org.springframework.test.util.ReflectionTestUtils;
 
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Locale;
-import java.util.concurrent.atomic.AtomicBoolean;
-import java.util.stream.Collectors;
+import com.google.common.collect.Lists;
+import com.google.common.collect.Sets;
 
-import static org.apache.kylin.common.exception.code.ErrorCodeServer.INDEX_DUPLICATE;
-import static org.apache.kylin.common.exception.code.ErrorCodeServer.LAYOUT_NOT_EXISTS;
-import static org.apache.kylin.metadata.cube.model.IndexEntity.Source.CUSTOM_TABLE_INDEX;
-import static org.apache.kylin.metadata.cube.model.IndexEntity.Source.RECOMMENDED_TABLE_INDEX;
-import static org.apache.kylin.metadata.model.SegmentStatusEnum.READY;
-import static org.apache.kylin.metadata.model.SegmentStatusEnum.WARNING;
-import static org.hamcrest.Matchers.is;
+import lombok.val;
+import lombok.var;
+import lombok.extern.slf4j.Slf4j;
 
 @Slf4j
 public class IndexPlanServiceTest extends SourceTestCase {
@@ -1258,8 +1260,8 @@ public class IndexPlanServiceTest extends SourceTestCase {
                 indexPlanService.getIndexGraph(getProject(), modelId, 100).getSegmentToComplementCount());
 
         // mark a layout tobedelete
-        indexManager.updateIndexPlan(modelId,
-                copyForWrite -> copyForWrite.markWhiteIndexToBeDelete(modelId, Sets.newHashSet(tobeDeleteLayoutId), Collections.emptyMap()));
+        indexManager.updateIndexPlan(modelId, copyForWrite -> copyForWrite.markWhiteIndexToBeDelete(modelId,
+                Sets.newHashSet(tobeDeleteLayoutId), Collections.emptyMap()));
 
         //remove tobedelete layout from seg1
         val newDf = dfManager.getDataflow(modelId);
@@ -1323,12 +1325,12 @@ public class IndexPlanServiceTest extends SourceTestCase {
     }
 
     @Test
-    public void testCalculateAggIndexCountWhenTotalCuboidsOutOfMaxComb() throws Exception {
+    public void testCalculateAggIndexCountWhenTotalCuboidsOutOfMaxComb() {
         testOutOfCombination(1);
     }
 
     @Test
-    public void testCalculateAggIndexCountWhenTotalCuboidsOutOfMaxComb_WithSchedulerV2() throws Exception {
+    public void testCalculateAggIndexCountWhenTotalCuboidsOutOfMaxComb_WithSchedulerV2() {
         testOutOfCombination(2);
     }
 


[kylin] 06/34: KYLIN-5446 remove systools module

Posted by xx...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

xxyu pushed a commit to branch kylin5
in repository https://gitbox.apache.org/repos/asf/kylin.git

commit 8be8d54aa1c8ff915fd4afc3c7feabce4fc3875b
Author: qianhao.zhou <z....@gmail.com>
AuthorDate: Wed Dec 14 14:06:02 2022 +0800

    KYLIN-5446 remove systools module
    
    * refactor common-server
    refactor query-service
    refactor job-service
    refactor modeling service & refactor HdfsCapacityMetrics
    remove useless code
    fix UT
    
    * move ACL entity
    
    * move ACL entity
    
    * move ACL entity
    
    * move ACL entity
    
    * move ACL entity
    
    * move ACL entity
    
    * fix code smell
    
    * remove systools
    
    * remove systools module
    
    * fix code smell
    
    * fix sonar issue
    
    * remove ignored test
    
    * Revert "remove ignored test"
    
    This reverts commit a6bafdace41d11e40368b13e38338f5b86d6bdde.
    
    * fix comments
    
    Co-authored-by: qhzhou <qi...@kyligence.io>
---
 pom.xml                                            |  18 +--
 src/common-server/pom.xml                          |  37 ------
 .../kylin/rest/advice/BaseExceptionHandler.java    |  60 +++++++++
 .../kylin/rest/controller/NBasicController.java    |  87 +++---------
 .../rest/controller/fixture/FixtureController.java |   9 +-
 .../rest/controller/NBasicControllerTest.java      |  11 +-
 src/common-service/pom.xml                         |   4 +
 .../apache/kylin/rest/monitor/MonitorReporter.java |  20 ++-
 .../rest/response/ExecutorMemoryResponse.java      |   0
 .../rest/response/ExecutorThreadInfoResponse.java  |   0
 .../security/FillEmptyAuthorizationFilter.java     |   0
 .../apache/kylin/rest/service/AccessService.java   |  19 ++-
 .../kylin/rest/service/AsyncTaskService.java       |   0
 .../apache/kylin/rest/service/UserAclService.java  |   3 +-
 .../kylin/query/exception/BusyQueryException.java  |   0
 .../org/apache/kylin/query/util/QueryLimiter.java  |   2 +-
 .../kylin/rest/exception/ForbiddenException.java   |   4 -
 .../rest/exception/InternalErrorException.java     |   4 -
 .../kylin/rest/exception/NotFoundException.java    |   4 -
 .../rest/exception/UnauthorizedException.java      |   3 -
 src/core-metadata/pom.xml                          |   7 +-
 .../org/apache/kylin/constants/AclConstants.java}  |  16 ++-
 .../cube/storage/TotalStorageCollector.java        |   6 +-
 .../apache/kylin/metadata/model/ISourceAware.java  |  16 +--
 .../apache/kylin/metadata/model/PartitionDesc.java |   9 --
 .../apache/kylin/metrics/HdfsCapacityMetrics.java  | 118 +++++++++--------
 .../org/apache/kylin/rest/security/AceImpl.java    |  13 +-
 .../kylin/rest/security/AclEntityFactory.java      |  24 ++--
 .../apache/kylin/rest/security/AclEntityType.java  |   9 +-
 .../org/apache/kylin/rest/security/AclManager.java |  22 ++-
 .../apache/kylin/rest/security/AclPermission.java  |   0
 .../kylin/rest/security/AclPermissionEnum.java     |  28 ++--
 .../kylin/rest/security/AclPermissionFactory.java  |  11 +-
 .../org/apache/kylin/rest/security/AclRecord.java  |   6 +-
 .../rest/security/CompositeAclPermission.java      |   0
 .../kylin/rest/security/ExternalAclProvider.java   |  45 +++----
 .../kylin/rest/security/KerberosLoginManager.java  |  29 ++--
 .../rest/security/KylinAclPermissionEvaluator.java |   0
 .../security/KylinPermissionGrantingStrategy.java  |   0
 .../apache/kylin/rest/security/LegacyAceInfo.java  |   0
 .../kylin/rest/security/MutableAclRecord.java      |   2 +-
 .../rest/security/MutableHttpServletRequest.java   |   0
 .../rest/security/NoneBCryptPasswordEncoder.java   |   0
 .../kylin/rest/security/ObjectIdentityImpl.java    |   9 +-
 .../security/PasswordPlaceholderConfigurer.java    |   7 +-
 .../org/apache/kylin/rest/security/SidInfo.java    |   5 +-
 .../org/apache/kylin/rest/security/UserAcl.java    |   0
 .../apache/kylin/rest/security/UserAclManager.java |   0
 .../kylin/rest/security/UserLockRuleUtil.java      |   5 +-
 .../org/apache/kylin/rest/service/AclService.java  |   0
 .../apache/kylin/rest/util/AclPermissionUtil.java  |   0
 .../java/org/apache/kylin/rest/util/AclUtil.java   |   0
 .../kylin/rest/util/CreateTableFromJson.java       |   2 +-
 .../java/org/apache/kylin/util/DataRangeUtils.java |  96 ++++++++++++++
 .../storage/ProjectStorageInfoCollectorTest.java   |  11 +-
 .../kylin/metrics/HdfsCapacityMetricsTest.java     |  71 ++++------
 .../rest/security/ExternalAclProviderTest.java     |   0
 .../KylinPermissionGrantingStrategyTest.java       |   0
 .../kylin/rest/security/UserAclManagerTest.java    |   0
 .../kylin/rest/util/AclPermissionUtilTest.java     |   0
 src/data-loading-server/pom.xml                    |   2 +-
 .../kylin/rest/controller/BaseController.java      |  57 --------
 .../kylin/rest/controller/SampleController.java    |   3 +-
 .../kylin/rest/controller/SegmentController.java   |  11 +-
 .../controller/open/OpenSegmentController.java     |   3 +-
 .../rest/controller/v2/SegmentControllerV2.java    |   3 +-
 .../kylin/rest/controller/BaseControllerTest.java  |  23 ++--
 src/data-loading-service/pom.xml                   |  11 ++
 .../apache/kylin/rest/service/JobErrorTest.java    |   0
 .../apache/kylin/rest/service/JobServiceTest.java  |   0
 .../kylin/rest/service/MockClusterManager.java     |   0
 .../org/apache/kylin/rest/service/StageTest.java   |   0
 src/datasource-service/pom.xml                     |   5 -
 .../apache/kylin/rest/service/TableService.java    |   7 +-
 src/job-service/pom.xml                            |  20 ---
 .../config/initialize/SchedulerEventBusTest.java   | 147 +++++++++------------
 src/metadata-server/pom.xml                        |  33 ++---
 .../rest/controller/open/OpenModelController.java  |   5 +-
 .../kylin/rest/controller/NModelController.java    |   5 +-
 .../controller/v2/NProjectControllerKylin.java     |   3 +-
 src/modeling-service/pom.xml                       |   8 +-
 src/query-server/pom.xml                           |  10 +-
 .../kylin/rest/controller/NQueryController.java    |   3 +-
 .../rest/controller/SparkSourceController.java     |   0
 .../rest/controller/SparkSourceControllerTest.java |   0
 src/query-service/pom.xml                          |  51 ++-----
 .../apache/kylin/rest/service/MonitorService.java  |  39 +++---
 src/second-storage/core-ui/pom.xml                 |   2 +-
 .../rest/security/KerberosLoginManagerTest.java    |   0
 src/spark-project/spark-ddl-plugin/pom.xml         |   4 -
 src/systools/pom.xml                               |   6 +-
 src/tool/pom.xml                                   |   4 +-
 .../kylin/tool/upgrade/UpdateUserAclTool.java      |   4 +-
 93 files changed, 604 insertions(+), 717 deletions(-)

diff --git a/pom.xml b/pom.xml
index 1e0b4ca899..3f1fd958c0 100644
--- a/pom.xml
+++ b/pom.xml
@@ -391,7 +391,6 @@
         <module>src/second-storage/core-ui</module>
         <module>src/second-storage/clickhouse</module>
         <module>src/second-storage/clickhouse-it</module>
-        <module>src/systools</module>
         <module>src/modeling-service</module>
         <module>src/query-service</module>
         <module>src/datasource-service</module>
@@ -573,11 +572,6 @@
                 <artifactId>kylin-streaming-service</artifactId>
                 <version>${project.version}</version>
             </dependency>
-            <dependency>
-                <groupId>org.apache.kylin</groupId>
-                <artifactId>kylin-systools</artifactId>
-                <version>${project.version}</version>
-            </dependency>
             <dependency>
                 <groupId>org.apache.kylin</groupId>
                 <artifactId>distributed-lock-ext</artifactId>
@@ -842,12 +836,6 @@
                 <version>${project.version}</version>
                 <type>test-jar</type>
             </dependency>
-            <dependency>
-                <groupId>org.apache.kylin</groupId>
-                <artifactId>kylin-systools</artifactId>
-                <version>${project.version}</version>
-                <type>test-jar</type>
-            </dependency>
             <dependency>
                 <groupId>org.apache.kylin</groupId>
                 <artifactId>kylin-soft-affinity-cache</artifactId>
@@ -2303,6 +2291,12 @@
                     </exclusion>
                 </exclusions>
             </dependency>
+            <dependency>
+                <groupId>javax.servlet</groupId>
+                <artifactId>servlet-api</artifactId>
+                <scope>provided</scope>
+                <version>2.5</version>
+            </dependency>
 
             <!-- Tomcat -->
             <dependency>
diff --git a/src/common-server/pom.xml b/src/common-server/pom.xml
index b563fe64ab..8a16402ba1 100644
--- a/src/common-server/pom.xml
+++ b/src/common-server/pom.xml
@@ -81,24 +81,6 @@
                 </exclusion>
             </exclusions>
         </dependency>
-        <dependency>
-            <groupId>org.springframework.boot</groupId>
-            <artifactId>spring-boot-starter</artifactId>
-            <exclusions>
-                <exclusion>
-                    <groupId>org.springframework.boot</groupId>
-                    <artifactId>spring-boot-starter-logging</artifactId>
-                </exclusion>
-                <exclusion>
-                    <artifactId>spring-context</artifactId>
-                    <groupId>org.springframework</groupId>
-                </exclusion>
-                <exclusion>
-                    <artifactId>snakeyaml</artifactId>
-                    <groupId>org.yaml</groupId>
-                </exclusion>
-            </exclusions>
-        </dependency>
         <dependency>
             <groupId>io.springfox</groupId>
             <artifactId>springfox-boot-starter</artifactId>
@@ -110,20 +92,6 @@
             <scope>provided</scope>
         </dependency>
         <!-- Spring Core -->
-        <dependency>
-            <groupId>org.springframework.boot</groupId>
-            <artifactId>spring-boot-autoconfigure</artifactId>
-            <exclusions>
-                <exclusion>
-                    <artifactId>spring-context</artifactId>
-                    <groupId>org.springframework</groupId>
-                </exclusion>
-                <exclusion>
-                    <artifactId>spring-core</artifactId>
-                    <groupId>org.springframework</groupId>
-                </exclusion>
-            </exclusions>
-        </dependency>
 
         <dependency>
             <groupId>org.apache.hadoop</groupId>
@@ -151,11 +119,6 @@
             <artifactId>spring-test</artifactId>
             <scope>test</scope>
         </dependency>
-        <dependency>
-            <groupId>org.junit.vintage</groupId>
-            <artifactId>junit-vintage-engine</artifactId>
-            <scope>test</scope>
-        </dependency>
         <dependency>
             <groupId>org.powermock</groupId>
             <artifactId>powermock-module-junit4</artifactId>
diff --git a/src/common-server/src/main/java/org/apache/kylin/rest/advice/BaseExceptionHandler.java b/src/common-server/src/main/java/org/apache/kylin/rest/advice/BaseExceptionHandler.java
new file mode 100644
index 0000000000..b1d4a2d37b
--- /dev/null
+++ b/src/common-server/src/main/java/org/apache/kylin/rest/advice/BaseExceptionHandler.java
@@ -0,0 +1,60 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.kylin.rest.advice;
+
+import org.apache.kylin.rest.exception.ForbiddenException;
+import org.apache.kylin.rest.exception.InternalErrorException;
+import org.apache.kylin.rest.exception.NotFoundException;
+import org.apache.kylin.rest.exception.UnauthorizedException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.http.HttpStatus;
+import org.springframework.http.ResponseEntity;
+import org.springframework.web.bind.annotation.ControllerAdvice;
+import org.springframework.web.bind.annotation.ExceptionHandler;
+import org.springframework.web.context.request.WebRequest;
+
+@ControllerAdvice
+public class BaseExceptionHandler {
+
+    private static final Logger logger = LoggerFactory.getLogger(BaseExceptionHandler.class);
+
+    @ExceptionHandler(ForbiddenException.class)
+    public final ResponseEntity<Void> handle(ForbiddenException ex, WebRequest request) {
+        logger.error("uncaught exception", ex);
+        return new ResponseEntity<>(HttpStatus.FORBIDDEN);
+    }
+
+    @ExceptionHandler(InternalErrorException.class)
+    public final ResponseEntity<Void> handle(InternalErrorException ex, WebRequest request) {
+        logger.error("uncaught exception", ex);
+        return new ResponseEntity<>(HttpStatus.INTERNAL_SERVER_ERROR);
+    }
+
+    @ExceptionHandler(NotFoundException.class)
+    public final ResponseEntity<Void> handle(NotFoundException ex, WebRequest request) {
+        logger.error("uncaught exception", ex);
+        return new ResponseEntity<>(HttpStatus.NOT_FOUND);
+    }
+
+    @ExceptionHandler(UnauthorizedException.class)
+    public final ResponseEntity<Void> handle(UnauthorizedException ex, WebRequest request) {
+        logger.error("uncaught exception", ex);
+        return new ResponseEntity<>(HttpStatus.UNAUTHORIZED);
+    }
+}
diff --git a/src/common-service/src/main/java/org/apache/kylin/rest/controller/NBasicController.java b/src/common-server/src/main/java/org/apache/kylin/rest/controller/NBasicController.java
similarity index 89%
rename from src/common-service/src/main/java/org/apache/kylin/rest/controller/NBasicController.java
rename to src/common-server/src/main/java/org/apache/kylin/rest/controller/NBasicController.java
index ab39cab22d..a403338984 100644
--- a/src/common-service/src/main/java/org/apache/kylin/rest/controller/NBasicController.java
+++ b/src/common-server/src/main/java/org/apache/kylin/rest/controller/NBasicController.java
@@ -40,10 +40,7 @@ import static org.apache.kylin.common.exception.code.ErrorCodeServer.SEGMENT_EMP
 import static org.apache.kylin.common.exception.code.ErrorCodeServer.TIME_INVALID_RANGE_END_LESS_THAN_EQUALS_START;
 import static org.apache.kylin.common.exception.code.ErrorCodeServer.TIME_INVALID_RANGE_IN_RANGE;
 import static org.apache.kylin.common.exception.code.ErrorCodeServer.TIME_INVALID_RANGE_LESS_THAN_ZERO;
-import static org.apache.kylin.common.exception.code.ErrorCodeServer.TIME_INVALID_RANGE_NOT_CONSISTENT;
-import static org.apache.kylin.common.exception.code.ErrorCodeServer.TIME_INVALID_RANGE_NOT_FORMAT_MS;
 import static org.apache.kylin.common.exception.code.ErrorCodeServer.USER_UNAUTHORIZED;
-import static org.apache.kylin.metadata.model.PartitionDesc.transformTimestamp2Format;
 
 import java.io.File;
 import java.io.FileInputStream;
@@ -51,6 +48,7 @@ import java.io.FileOutputStream;
 import java.io.IOException;
 import java.io.InputStream;
 import java.io.OutputStream;
+import java.nio.file.Files;
 import java.text.SimpleDateFormat;
 import java.util.Arrays;
 import java.util.Collection;
@@ -81,7 +79,6 @@ import org.apache.kylin.common.exception.ServerErrorCode;
 import org.apache.kylin.common.msg.Message;
 import org.apache.kylin.common.msg.MsgPicker;
 import org.apache.kylin.common.persistence.transaction.TransactionException;
-import org.apache.kylin.common.util.DateFormat;
 import org.apache.kylin.common.util.JsonUtil;
 import org.apache.kylin.common.util.Pair;
 import org.apache.kylin.job.constant.JobStatusEnum;
@@ -103,6 +100,7 @@ import org.apache.kylin.rest.response.ErrorResponse;
 import org.apache.kylin.rest.service.ProjectService;
 import org.apache.kylin.rest.service.UserService;
 import org.apache.kylin.rest.util.PagingUtil;
+import org.apache.kylin.util.DataRangeUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.springframework.beans.factory.annotation.Autowired;
@@ -131,6 +129,7 @@ import org.springframework.web.client.ResponseExtractor;
 import org.springframework.web.client.RestTemplate;
 import org.springframework.web.method.annotation.MethodArgumentTypeMismatchException;
 
+import com.google.common.base.Preconditions;
 import com.google.common.collect.Lists;
 
 import lombok.SneakyThrows;
@@ -138,11 +137,11 @@ import lombok.val;
 
 public class NBasicController {
     private static final Logger logger = LoggerFactory.getLogger(NBasicController.class);
-    protected static final int MAX_NAME_LENGTH = 50;
+    public static final int MAX_NAME_LENGTH = 50;
 
-    protected static final long FIVE_MINUTE_MILLISECOND = TimeUnit.MINUTES.toMillis(5);
+    public static final long FIVE_MINUTE_MILLISECOND = TimeUnit.MINUTES.toMillis(5);
 
-    protected static final long THIRTY_DAYS_MILLISECOND = TimeUnit.DAYS.toMillis(30);
+    public static final long THIRTY_DAYS_MILLISECOND = TimeUnit.DAYS.toMillis(30);
 
     @Autowired
     @Qualifier("normalRestTemplate")
@@ -297,12 +296,11 @@ public class NBasicController {
     }
 
     public List<String> makeUserNameCaseInSentive(List<String> userNames) {
-        List<String> names = Lists.newArrayList();
         if (CollectionUtils.isNotEmpty(userNames)) {
-            userNames.forEach(name -> names.add(makeUserNameCaseInSentive(name)));
-            userNames = names;
+            return userNames.stream().map(this::makeUserNameCaseInSentive).collect(Collectors.toList());
+        } else {
+            return Collections.emptyList();
         }
-        return userNames;
     }
 
     protected void checkNonNegativeIntegerArg(String fieldName, Object fieldValue) {
@@ -359,7 +357,7 @@ public class NBasicController {
         setDownloadResponse(file, file.getName(), contentType, response);
     }
 
-    protected static boolean isAdmin() {
+    public static boolean isAdmin() {
         boolean isAdmin = false;
         Authentication authentication = SecurityContextHolder.getContext().getAuthentication();
         if (authentication != null) {
@@ -387,21 +385,15 @@ public class NBasicController {
         return data;
     }
 
-    public List<?> getDataNoEnvelopeResponse(List<?> result, int offset, int limit) {
-        return PagingUtil.cutPage(result, offset, limit);
-    }
-
     public String getHost(String serverHost, String serverName) {
         String host = KylinConfig.getInstanceFromEnv().getModelExportHost();
-        host = Optional.ofNullable(Optional.ofNullable(host).orElse(serverHost)).orElse(serverName);
-        return host;
+        return Optional.ofNullable(Optional.ofNullable(host).orElse(serverHost)).orElse(serverName);
     }
 
     public int getPort(Integer serverPort, Integer requestServerPort) {
         Integer port = KylinConfig.getInstanceFromEnv().getModelExportPort() == -1 ? null
                 : KylinConfig.getInstanceFromEnv().getModelExportPort();
-        port = Optional.ofNullable(Optional.ofNullable(port).orElse(serverPort)).orElse(requestServerPort);
-        return port;
+        return Optional.ofNullable(Optional.ofNullable(port).orElse(serverPort)).orElse(requestServerPort);
     }
 
     public String checkProjectName(String project) {
@@ -423,11 +415,10 @@ public class NBasicController {
         }
     }
 
-    public static List<String> checkSqlIsNotNull(List<String> rawSqls) {
+    public static void checkSqlIsNotNull(List<String> rawSqls) {
         if (CollectionUtils.isEmpty(rawSqls)) {
             throw new KylinException(INVALID_PARAMETER, MsgPicker.getMsg().getSqlListIsEmpty());
         }
-        return rawSqls;
     }
 
     protected void checkSegmentParms(String[] ids, String[] names) {
@@ -501,7 +492,7 @@ public class NBasicController {
     }
 
     public void validateDataRange(String start, String end) {
-        validateDataRange(start, end, null);
+        DataRangeUtils.validateDataRange(start, end, null);
         if (StringUtils.isNotEmpty(start) && StringUtils.isNotEmpty(end)) {
             long differenceMillisecond = Long.parseLong(end) - Long.parseLong(start);
             if (differenceMillisecond < FIVE_MINUTE_MILLISECOND || differenceMillisecond > THIRTY_DAYS_MILLISECOND) {
@@ -510,44 +501,6 @@ public class NBasicController {
         }
     }
 
-    public void validateDataRange(String start, String end, String partitionColumnFormat) {
-        if (StringUtils.isEmpty(start) && StringUtils.isEmpty(end)) {
-            return;
-        }
-        doValidateDataRange(start, end, partitionColumnFormat);
-    }
-
-    public void doValidateDataRange(String start, String end, String partitionColumnFormat) {
-        if (StringUtils.isNotEmpty(start) && StringUtils.isNotEmpty(end)) {
-            long startLong = 0;
-            long endLong = 0;
-
-            try {
-                startLong = Long.parseLong(start);
-                endLong = Long.parseLong(end);
-            } catch (Exception e) {
-                throw new KylinException(TIME_INVALID_RANGE_NOT_FORMAT_MS);
-            }
-
-            if (startLong < 0 || endLong < 0) {
-                throw new KylinException(TIME_INVALID_RANGE_LESS_THAN_ZERO);
-            }
-
-            try {
-                startLong = DateFormat.getFormatTimeStamp(start, transformTimestamp2Format(partitionColumnFormat));
-                endLong = DateFormat.getFormatTimeStamp(end, transformTimestamp2Format(partitionColumnFormat));
-            } catch (Exception e) {
-                throw new KylinException(TIME_INVALID_RANGE_NOT_FORMAT_MS);
-            }
-
-            if (startLong >= endLong)
-                throw new KylinException(TIME_INVALID_RANGE_END_LESS_THAN_EQUALS_START);
-
-        } else {
-            throw new KylinException(TIME_INVALID_RANGE_NOT_CONSISTENT);
-        }
-    }
-
     public void validateDateTimeFormatPattern(String pattern) {
         if (pattern.isEmpty()) {
             throw new KylinException(DATETIME_FORMAT_EMPTY);
@@ -568,7 +521,7 @@ public class NBasicController {
         }
     }
 
-    private ResponseEntity<byte[]> getHttpResponse(final HttpServletRequest request, String url) throws Exception {
+    private ResponseEntity<byte[]> getHttpResponse(final HttpServletRequest request, String url) throws IOException {
         byte[] body = IOUtils.toByteArray(request.getInputStream());
         HttpHeaders headers = new HttpHeaders();
         Collections.list(request.getHeaderNames())
@@ -587,12 +540,10 @@ public class NBasicController {
 
     @PreAuthorize(Constant.ACCESS_HAS_ROLE_ADMIN)
     public void downloadFromRemoteHost(final HttpServletRequest request, String url,
-            HttpServletResponse servletResponse) throws Exception {
+            HttpServletResponse servletResponse) throws IOException {
         File temporaryZipFile = KylinConfigBase.getDiagFileName();
-        temporaryZipFile.getParentFile().mkdirs();
-        if (!temporaryZipFile.createNewFile()) {
-            throw new RuntimeException("create temporary zip file failed");
-        }
+        Preconditions.checkState(temporaryZipFile.getParentFile().mkdirs(), "create temporary zip file folder failed");
+        Preconditions.checkState(temporaryZipFile.createNewFile(), "create temporary zip file failed");
         RequestCallback requestCallback = x -> {
             Collections.list(request.getHeaderNames())
                     .forEach(k -> x.getHeaders().put(k, Collections.list(request.getHeaders(k))));
@@ -609,7 +560,7 @@ public class NBasicController {
 
         String fileName = restTemplate.execute(url, HttpMethod.GET, requestCallback, responseExtractor);
 
-        try (InputStream in = new FileInputStream(temporaryZipFile);
+        try (InputStream in = Files.newInputStream(temporaryZipFile.toPath());
                 OutputStream out = servletResponse.getOutputStream()) {
             servletResponse.reset();
             servletResponse.setContentLengthLong(temporaryZipFile.length());
diff --git a/src/common-service/src/test/java/org/apache/kylin/rest/controller/fixture/FixtureController.java b/src/common-server/src/main/java/org/apache/kylin/rest/controller/fixture/FixtureController.java
similarity index 82%
rename from src/common-service/src/test/java/org/apache/kylin/rest/controller/fixture/FixtureController.java
rename to src/common-server/src/main/java/org/apache/kylin/rest/controller/fixture/FixtureController.java
index 0855e3858d..e9f50d019b 100644
--- a/src/common-service/src/test/java/org/apache/kylin/rest/controller/fixture/FixtureController.java
+++ b/src/common-server/src/main/java/org/apache/kylin/rest/controller/fixture/FixtureController.java
@@ -19,16 +19,17 @@
 package org.apache.kylin.rest.controller.fixture;
 
 import org.apache.kylin.common.exception.KylinException;
-import org.apache.kylin.rest.response.EnvelopeResponse;
 import org.apache.kylin.rest.controller.NBasicController;
+import org.apache.kylin.rest.response.EnvelopeResponse;
 import org.springframework.web.bind.annotation.RequestMapping;
+import org.springframework.web.bind.annotation.RequestMethod;
 import org.springframework.web.bind.annotation.RestController;
 
 @RestController
 public class FixtureController extends NBasicController {
 
-    @RequestMapping("/api/handleErrors")
-    public EnvelopeResponse request() {
-        return new EnvelopeResponse(KylinException.CODE_SUCCESS, null, "");
+    @RequestMapping(value = "/api/handleErrors", method = RequestMethod.GET)
+    public EnvelopeResponse<Void> request() {
+        return new EnvelopeResponse<>(KylinException.CODE_SUCCESS, null, "");
     }
 }
diff --git a/src/common-service/src/test/java/org/apache/kylin/rest/controller/NBasicControllerTest.java b/src/common-server/src/test/java/org/apache/kylin/rest/controller/NBasicControllerTest.java
similarity index 96%
rename from src/common-service/src/test/java/org/apache/kylin/rest/controller/NBasicControllerTest.java
rename to src/common-server/src/test/java/org/apache/kylin/rest/controller/NBasicControllerTest.java
index e436db2066..55bfc1f257 100644
--- a/src/common-service/src/test/java/org/apache/kylin/rest/controller/NBasicControllerTest.java
+++ b/src/common-server/src/test/java/org/apache/kylin/rest/controller/NBasicControllerTest.java
@@ -50,6 +50,7 @@ import org.apache.kylin.rest.controller.fixture.FixtureController;
 import org.apache.kylin.rest.exception.ForbiddenException;
 import org.apache.kylin.rest.exception.NotFoundException;
 import org.apache.kylin.rest.exception.UnauthorizedException;
+import org.apache.kylin.util.DataRangeUtils;
 import org.junit.After;
 import org.junit.Assert;
 import org.junit.Before;
@@ -217,10 +218,10 @@ public class NBasicControllerTest extends NLocalFileMetadataTestCase {
 
     @Test
     public void testTimeRangeValid() {
-        nBasicController.validateDataRange("0", "86400000", "yyyy-MM-dd");
-        nBasicController.validateDataRange("1000000000000", "2200000000000", "yyyy-MM-dd");
-        nBasicController.validateDataRange("0", "86400000", PartitionDesc.TimestampType.MILLISECOND.name);
-        nBasicController.validateDataRange("1000000000000", "2200000000000", PartitionDesc.TimestampType.SECOND.name);
+        DataRangeUtils.validateDataRange("0", "86400000", "yyyy-MM-dd");
+        DataRangeUtils.validateDataRange("1000000000000", "2200000000000", "yyyy-MM-dd");
+        DataRangeUtils.validateDataRange("0", "86400000", PartitionDesc.TimestampType.MILLISECOND.name);
+        DataRangeUtils.validateDataRange("1000000000000", "2200000000000", PartitionDesc.TimestampType.SECOND.name);
     }
 
     @Test
@@ -236,7 +237,7 @@ public class NBasicControllerTest extends NLocalFileMetadataTestCase {
         }
         thrown.expect(KylinException.class);
         thrown.expectMessage("The end time must be greater than the start time");
-        nBasicController.validateDataRange(start, end, "yyyy-MM-dd");
+        DataRangeUtils.validateDataRange(start, end, "yyyy-MM-dd");
     }
 
     @Test
diff --git a/src/common-service/pom.xml b/src/common-service/pom.xml
index 8a9c91c534..adaff3899b 100644
--- a/src/common-service/pom.xml
+++ b/src/common-service/pom.xml
@@ -91,6 +91,10 @@
             <groupId>org.springframework.session</groupId>
             <artifactId>spring-session-core</artifactId>
         </dependency>
+        <dependency>
+            <groupId>org.springframework</groupId>
+            <artifactId>spring-webmvc</artifactId>
+        </dependency>
         <!-- hadoop -->
         <dependency>
             <groupId>org.apache.hadoop</groupId>
diff --git a/src/common-service/src/main/java/org/apache/kylin/rest/monitor/MonitorReporter.java b/src/common-service/src/main/java/org/apache/kylin/rest/monitor/MonitorReporter.java
index b526201cb6..16bfc47612 100644
--- a/src/common-service/src/main/java/org/apache/kylin/rest/monitor/MonitorReporter.java
+++ b/src/common-service/src/main/java/org/apache/kylin/rest/monitor/MonitorReporter.java
@@ -44,6 +44,8 @@ import com.google.common.base.Preconditions;
 
 public class MonitorReporter {
     private static final Logger logger = LoggerFactory.getLogger(MonitorReporter.class);
+    private final String nodeType;
+    private final String serverPort;
 
     private ScheduledExecutorService dataCollectorExecutor;
     private static final int MAX_SCHEDULED_TASKS = 5;
@@ -54,8 +56,7 @@ public class MonitorReporter {
 
     private static final long REPORT_MONITOR_METRICS_SECONDS = 1;
 
-    private KapConfig kapConfig;
-    private Long periodInMilliseconds;
+    private final Long periodInMilliseconds;
 
     @VisibleForTesting
     public int reportInitialDelaySeconds = 0;
@@ -70,8 +71,10 @@ public class MonitorReporter {
         reportMonitorMetricsExecutor = Executors
                 .newSingleThreadScheduledExecutor(new NamedThreadFactory("report_monitor_metrics"));
 
-        this.kapConfig = KapConfig.getInstanceFromEnv();
-        periodInMilliseconds = kapConfig.getMonitorInterval();
+        KapConfig kapConfig = KapConfig.getInstanceFromEnv();
+        this.periodInMilliseconds = kapConfig.getMonitorInterval();
+        this.nodeType = kapConfig.getKylinConfig().getServerMode();
+        this.serverPort = kapConfig.getKylinConfig().getServerPort();
     }
 
     public static MonitorReporter getInstance() {
@@ -93,7 +96,7 @@ public class MonitorReporter {
     }
 
     private String getLocalPort() {
-        return kapConfig.getKylinConfig().getServerPort();
+        return serverPort;
     }
 
     private static String getLocalPid() {
@@ -102,7 +105,7 @@ public class MonitorReporter {
     }
 
     private String getNodeType() {
-        return kapConfig.getKylinConfig().getServerMode();
+        return this.nodeType;
     }
 
     private <T extends MonitorMetric> T createMonitorMetric(T monitorMetric) {
@@ -166,11 +169,6 @@ public class MonitorReporter {
     }
 
     public void submit(AbstractMonitorCollectTask collectTask) {
-        if (!kapConfig.isMonitorEnabled()) {
-            logger.warn("Monitor reporter is not enabled!");
-            return;
-        }
-
         // for UT
         if (!started) {
             logger.warn("MonitorReporter is not started!");
diff --git a/src/data-loading-service/src/main/java/org/apache/kylin/rest/response/ExecutorMemoryResponse.java b/src/common-service/src/main/java/org/apache/kylin/rest/response/ExecutorMemoryResponse.java
similarity index 100%
rename from src/data-loading-service/src/main/java/org/apache/kylin/rest/response/ExecutorMemoryResponse.java
rename to src/common-service/src/main/java/org/apache/kylin/rest/response/ExecutorMemoryResponse.java
diff --git a/src/data-loading-service/src/main/java/org/apache/kylin/rest/response/ExecutorThreadInfoResponse.java b/src/common-service/src/main/java/org/apache/kylin/rest/response/ExecutorThreadInfoResponse.java
similarity index 100%
rename from src/data-loading-service/src/main/java/org/apache/kylin/rest/response/ExecutorThreadInfoResponse.java
rename to src/common-service/src/main/java/org/apache/kylin/rest/response/ExecutorThreadInfoResponse.java
diff --git a/src/systools/src/main/java/org/apache/kylin/rest/security/FillEmptyAuthorizationFilter.java b/src/common-service/src/main/java/org/apache/kylin/rest/security/FillEmptyAuthorizationFilter.java
similarity index 100%
rename from src/systools/src/main/java/org/apache/kylin/rest/security/FillEmptyAuthorizationFilter.java
rename to src/common-service/src/main/java/org/apache/kylin/rest/security/FillEmptyAuthorizationFilter.java
diff --git a/src/common-service/src/main/java/org/apache/kylin/rest/service/AccessService.java b/src/common-service/src/main/java/org/apache/kylin/rest/service/AccessService.java
index cd402e0c9f..41230847eb 100644
--- a/src/common-service/src/main/java/org/apache/kylin/rest/service/AccessService.java
+++ b/src/common-service/src/main/java/org/apache/kylin/rest/service/AccessService.java
@@ -77,6 +77,7 @@ import org.apache.kylin.common.persistence.transaction.AccessRevokeEventNotifier
 import org.apache.kylin.common.persistence.transaction.UnitOfWork;
 import org.apache.kylin.common.util.JsonUtil;
 import org.apache.kylin.common.util.Pair;
+import org.apache.kylin.constants.AclConstants;
 import org.apache.kylin.metadata.MetadataConstants;
 import org.apache.kylin.metadata.project.NProjectManager;
 import org.apache.kylin.metadata.project.ProjectInstance;
@@ -602,11 +603,11 @@ public class AccessService extends BasicService {
     private Pair<String, Pair<Boolean, String>> getUserMaximumPermissionWithSourceInProject(String project,
             String username) throws IOException {
         if (isGlobalAdmin(username)) {
-            return Pair.newPair(ExternalAclProvider.ADMINISTRATION, Pair.newPair(Boolean.FALSE, null));
+            return Pair.newPair(AclConstants.ADMINISTRATION, Pair.newPair(Boolean.FALSE, null));
         }
 
         if (hasGlobalAdminGroup(username)) {
-            return Pair.newPair(ExternalAclProvider.ADMINISTRATION, Pair.newPair(Boolean.TRUE, ROLE_ADMIN));
+            return Pair.newPair(AclConstants.ADMINISTRATION, Pair.newPair(Boolean.TRUE, ROLE_ADMIN));
         }
 
         // get user's greater permission between user and groups
@@ -692,12 +693,11 @@ public class AccessService extends BasicService {
         if (Objects.nonNull(authentication)) {
             val userName = authentication.getName();
             if (userAclService.canAdminUserQuery(userName)) {
-                return Collections.singleton(ExternalAclProvider.DATA_QUERY);
+                return Collections.singleton(AclConstants.DATA_QUERY);
             }
             if (userService.isGlobalAdmin(userName)) {
                 val hasDataQueryPermission = userAclService.hasUserAclPermissionInProject(userName, project);
-                return hasDataQueryPermission ? Collections.singleton(ExternalAclProvider.DATA_QUERY)
-                        : Collections.emptySet();
+                return hasDataQueryPermission ? Collections.singleton(AclConstants.DATA_QUERY) : Collections.emptySet();
             }
             return getUserNormalExtPermissions(projectUuid, userName).stream()
                     .map(ExternalAclProvider::convertToExternalPermission).collect(Collectors.toSet());
@@ -730,7 +730,7 @@ public class AccessService extends BasicService {
     private String getGroupPermissionInProject(String project, String groupName) throws IOException {
         checkSid(groupName, false);
         if (ROLE_ADMIN.equals(groupName)) {
-            return ExternalAclProvider.ADMINISTRATION;
+            return AclConstants.ADMINISTRATION;
         }
         Map<Sid, Integer> projectPermissions = getProjectPermission(project);
         int mask = projectPermissions.get(getSid(groupName, false));
@@ -777,10 +777,7 @@ public class AccessService extends BasicService {
             return projects.stream().map(ProjectInstance::getName).collect(Collectors.toList());
         }
 
-        List<String> groupsOfUser = new ArrayList<>();
-        if (principal) {
-            groupsOfUser = getGroupsOfUser(name);
-        }
+        List<String> groupsOfUser = principal ? getGroupsOfUser(name) : Collections.emptyList();
 
         Set<String> grantedProjects = new HashSet<>();
 
@@ -797,7 +794,7 @@ public class AccessService extends BasicService {
                 grantedProjects.add(project.getName());
             }
         }
-        return new ArrayList<>(grantedProjects);
+        return Lists.newArrayList(grantedProjects);
     }
 
     @PreAuthorize(Constant.ACCESS_HAS_ROLE_ADMIN)
diff --git a/src/job-service/src/main/java/org/apache/kylin/rest/service/AsyncTaskService.java b/src/common-service/src/main/java/org/apache/kylin/rest/service/AsyncTaskService.java
similarity index 100%
rename from src/job-service/src/main/java/org/apache/kylin/rest/service/AsyncTaskService.java
rename to src/common-service/src/main/java/org/apache/kylin/rest/service/AsyncTaskService.java
diff --git a/src/common-service/src/main/java/org/apache/kylin/rest/service/UserAclService.java b/src/common-service/src/main/java/org/apache/kylin/rest/service/UserAclService.java
index 06dffb33dc..379291cbdb 100644
--- a/src/common-service/src/main/java/org/apache/kylin/rest/service/UserAclService.java
+++ b/src/common-service/src/main/java/org/apache/kylin/rest/service/UserAclService.java
@@ -39,6 +39,7 @@ import org.apache.kylin.common.exception.KylinException;
 import org.apache.kylin.common.msg.MsgPicker;
 import org.apache.kylin.common.persistence.transaction.UnitOfWork;
 import org.apache.kylin.common.util.CaseInsensitiveStringSet;
+import org.apache.kylin.constants.AclConstants;
 import org.apache.kylin.metadata.project.EnhancedUnitOfWork;
 import org.apache.kylin.rest.aspect.Transaction;
 import org.apache.kylin.rest.constant.Constant;
@@ -97,7 +98,7 @@ public class UserAclService extends BasicService implements UserAclServiceSuppor
     }
 
     private void checkAclPermission(String sid, String permissionType) {
-        Preconditions.checkArgument(ExternalAclProvider.DATA_QUERY.equalsIgnoreCase(permissionType),
+        Preconditions.checkArgument(AclConstants.DATA_QUERY.equalsIgnoreCase(permissionType),
                 "unknown PermissionType " + permissionType);
         if (isSuperAdmin(sid)) {
             throw new KylinException(PERMISSION_DENIED, MsgPicker.getMsg().getModifyPermissionOfSuperAdminFailed());
diff --git a/src/query/src/main/java/org/apache/kylin/query/exception/BusyQueryException.java b/src/core-common/src/main/java/org/apache/kylin/query/exception/BusyQueryException.java
similarity index 100%
rename from src/query/src/main/java/org/apache/kylin/query/exception/BusyQueryException.java
rename to src/core-common/src/main/java/org/apache/kylin/query/exception/BusyQueryException.java
diff --git a/src/query/src/main/java/org/apache/kylin/query/util/QueryLimiter.java b/src/core-common/src/main/java/org/apache/kylin/query/util/QueryLimiter.java
similarity index 98%
rename from src/query/src/main/java/org/apache/kylin/query/util/QueryLimiter.java
rename to src/core-common/src/main/java/org/apache/kylin/query/util/QueryLimiter.java
index 0127df028e..e6a4290007 100644
--- a/src/query/src/main/java/org/apache/kylin/query/util/QueryLimiter.java
+++ b/src/core-common/src/main/java/org/apache/kylin/query/util/QueryLimiter.java
@@ -82,7 +82,7 @@ public class QueryLimiter {
     }
 
     public static void release() {
-        if (!downgradeState.get()) {
+        if (Boolean.FALSE.equals(downgradeState.get())) {
             return;
         }
 
diff --git a/src/systools/src/main/java/org/apache/kylin/rest/exception/ForbiddenException.java b/src/core-common/src/main/java/org/apache/kylin/rest/exception/ForbiddenException.java
similarity index 92%
rename from src/systools/src/main/java/org/apache/kylin/rest/exception/ForbiddenException.java
rename to src/core-common/src/main/java/org/apache/kylin/rest/exception/ForbiddenException.java
index 28640cdd48..9d7dc8abf1 100644
--- a/src/systools/src/main/java/org/apache/kylin/rest/exception/ForbiddenException.java
+++ b/src/core-common/src/main/java/org/apache/kylin/rest/exception/ForbiddenException.java
@@ -36,14 +36,10 @@
 
 package org.apache.kylin.rest.exception;
 
-import org.springframework.http.HttpStatus;
-import org.springframework.web.bind.annotation.ResponseStatus;
-
 /**
  * @author xduo
  * 
  */
-@ResponseStatus(value = HttpStatus.FORBIDDEN)
 public class ForbiddenException extends RuntimeException {
 
     private static final long serialVersionUID = 2741885728370162194L;
diff --git a/src/systools/src/main/java/org/apache/kylin/rest/exception/InternalErrorException.java b/src/core-common/src/main/java/org/apache/kylin/rest/exception/InternalErrorException.java
similarity index 93%
rename from src/systools/src/main/java/org/apache/kylin/rest/exception/InternalErrorException.java
rename to src/core-common/src/main/java/org/apache/kylin/rest/exception/InternalErrorException.java
index e839b6295c..f1cb6d08a9 100644
--- a/src/systools/src/main/java/org/apache/kylin/rest/exception/InternalErrorException.java
+++ b/src/core-common/src/main/java/org/apache/kylin/rest/exception/InternalErrorException.java
@@ -36,16 +36,12 @@
 
 package org.apache.kylin.rest.exception;
 
-import org.springframework.http.HttpStatus;
-import org.springframework.web.bind.annotation.ResponseStatus;
-
 /**
  * Class to wrap backend exception
  * 
  * @author jianliu
  * 
  */
-@ResponseStatus(value = HttpStatus.INTERNAL_SERVER_ERROR)
 public class InternalErrorException extends RuntimeException {
     /**
      * 
diff --git a/src/systools/src/main/java/org/apache/kylin/rest/exception/NotFoundException.java b/src/core-common/src/main/java/org/apache/kylin/rest/exception/NotFoundException.java
similarity index 92%
rename from src/systools/src/main/java/org/apache/kylin/rest/exception/NotFoundException.java
rename to src/core-common/src/main/java/org/apache/kylin/rest/exception/NotFoundException.java
index f9f56dac68..5d83144e48 100644
--- a/src/systools/src/main/java/org/apache/kylin/rest/exception/NotFoundException.java
+++ b/src/core-common/src/main/java/org/apache/kylin/rest/exception/NotFoundException.java
@@ -36,14 +36,10 @@
 
 package org.apache.kylin.rest.exception;
 
-import org.springframework.http.HttpStatus;
-import org.springframework.web.bind.annotation.ResponseStatus;
-
 /**
  * @author xduo
  *
  */
-@ResponseStatus(value = HttpStatus.NOT_FOUND)
 public class NotFoundException extends RuntimeException {
     private static final long serialVersionUID = 1L;
 
diff --git a/src/systools/src/main/java/org/apache/kylin/rest/exception/UnauthorizedException.java b/src/core-common/src/main/java/org/apache/kylin/rest/exception/UnauthorizedException.java
similarity index 88%
rename from src/systools/src/main/java/org/apache/kylin/rest/exception/UnauthorizedException.java
rename to src/core-common/src/main/java/org/apache/kylin/rest/exception/UnauthorizedException.java
index b98a49f149..9dbc8fecb8 100644
--- a/src/systools/src/main/java/org/apache/kylin/rest/exception/UnauthorizedException.java
+++ b/src/core-common/src/main/java/org/apache/kylin/rest/exception/UnauthorizedException.java
@@ -20,10 +20,7 @@ package org.apache.kylin.rest.exception;
 
 import org.apache.kylin.common.exception.KylinException;
 import org.apache.kylin.common.exception.code.ErrorCodeProducer;
-import org.springframework.http.HttpStatus;
-import org.springframework.web.bind.annotation.ResponseStatus;
 
-@ResponseStatus(value = HttpStatus.UNAUTHORIZED)
 public class UnauthorizedException extends KylinException {
 
     public UnauthorizedException(ErrorCodeProducer errorCodeProducer, Object... args) {
diff --git a/src/core-metadata/pom.xml b/src/core-metadata/pom.xml
index 323788989a..74c4765391 100644
--- a/src/core-metadata/pom.xml
+++ b/src/core-metadata/pom.xml
@@ -70,7 +70,7 @@
         </dependency>
         <dependency>
             <groupId>org.springframework.security</groupId>
-            <artifactId>spring-security-core</artifactId>
+            <artifactId>spring-security-acl</artifactId>
         </dependency>
         <dependency>
             <groupId>org.mybatis</groupId>
@@ -86,6 +86,11 @@
             <artifactId>lombok</artifactId>
             <scope>provided</scope>
         </dependency>
+        <dependency>
+            <groupId>javax.servlet</groupId>
+            <artifactId>servlet-api</artifactId>
+            <scope>provided</scope>
+        </dependency>
         <dependency>
             <groupId>net.sf.ehcache</groupId>
             <artifactId>ehcache</artifactId>
diff --git a/src/systools/src/main/java/org/apache/kylin/rest/security/NoneBCryptPasswordEncoder.java b/src/core-metadata/src/main/java/org/apache/kylin/constants/AclConstants.java
similarity index 65%
copy from src/systools/src/main/java/org/apache/kylin/rest/security/NoneBCryptPasswordEncoder.java
copy to src/core-metadata/src/main/java/org/apache/kylin/constants/AclConstants.java
index 0cd93d73ed..e646113a49 100644
--- a/src/systools/src/main/java/org/apache/kylin/rest/security/NoneBCryptPasswordEncoder.java
+++ b/src/core-metadata/src/main/java/org/apache/kylin/constants/AclConstants.java
@@ -15,14 +15,18 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.kylin.rest.security;
 
-import org.springframework.security.crypto.bcrypt.BCryptPasswordEncoder;
+package org.apache.kylin.constants;
 
-public class NoneBCryptPasswordEncoder extends BCryptPasswordEncoder {
+public final class AclConstants {
 
-    @Override
-    public boolean matches(CharSequence rawPassword, String encodedPassword) {
-        return true;
+    public static final String ADMINISTRATION = "ADMIN";
+    public static final String MANAGEMENT = "MANAGEMENT";
+    public static final String OPERATION = "OPERATION";
+    public static final String READ = "QUERY";
+    public static final String EMPTY = "EMPTY";
+    public static final String DATA_QUERY = "DATA_QUERY";
+
+    private AclConstants() {
     }
 }
diff --git a/src/core-metadata/src/main/java/org/apache/kylin/metadata/cube/storage/TotalStorageCollector.java b/src/core-metadata/src/main/java/org/apache/kylin/metadata/cube/storage/TotalStorageCollector.java
index 3250f40cbf..f6b93383ea 100644
--- a/src/core-metadata/src/main/java/org/apache/kylin/metadata/cube/storage/TotalStorageCollector.java
+++ b/src/core-metadata/src/main/java/org/apache/kylin/metadata/cube/storage/TotalStorageCollector.java
@@ -20,19 +20,21 @@ package org.apache.kylin.metadata.cube.storage;
 
 import java.io.IOException;
 
-import lombok.extern.slf4j.Slf4j;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.util.HadoopUtil;
 import org.apache.kylin.metrics.HdfsCapacityMetrics;
 
+import lombok.extern.slf4j.Slf4j;
+
 @Slf4j
 public class TotalStorageCollector implements StorageInfoCollector {
 
+    private HdfsCapacityMetrics hdfsCapacityMetrics = new HdfsCapacityMetrics(KylinConfig.getInstanceFromEnv());
     @Override
     public void doCollect(KylinConfig config, String project, StorageVolumeInfo storageVolumeInfo) throws IOException {
-        long totalStorageSize = HdfsCapacityMetrics.getHdfsCapacityByProject(project);
+        long totalStorageSize = hdfsCapacityMetrics.getHdfsCapacityByProject(project);
         if (totalStorageSize != -1L) {
             log.info("Reuse workingDirCapacity by project {}, storageSize: {}", project, totalStorageSize);
             storageVolumeInfo.setTotalStorageSize(totalStorageSize);
diff --git a/src/core-metadata/src/main/java/org/apache/kylin/metadata/model/ISourceAware.java b/src/core-metadata/src/main/java/org/apache/kylin/metadata/model/ISourceAware.java
index 435e5374cb..376f9a0c6b 100644
--- a/src/core-metadata/src/main/java/org/apache/kylin/metadata/model/ISourceAware.java
+++ b/src/core-metadata/src/main/java/org/apache/kylin/metadata/model/ISourceAware.java
@@ -22,14 +22,14 @@ import org.apache.kylin.common.KylinConfig;
 
 public interface ISourceAware {
 
-    public static final int ID_HIVE = 0;
-    public static final int ID_STREAMING = 1;
-    public static final int ID_SPARKSQL = 5;
-    public static final int ID_EXTERNAL = 7;
-    public static final int ID_JDBC = 8;
-    public static final int ID_SPARK = 9;
-    public static final int ID_CSV = 11;
-    public static final int ID_FILE = 13;
+    int ID_HIVE = 0;
+    int ID_STREAMING = 1;
+    int ID_SPARKSQL = 5;
+    int ID_EXTERNAL = 7;
+    int ID_JDBC = 8;
+    int ID_SPARK = 9;
+    int ID_CSV = 11;
+    int ID_FILE = 13;
 
     int getSourceType();
 
diff --git a/src/core-metadata/src/main/java/org/apache/kylin/metadata/model/PartitionDesc.java b/src/core-metadata/src/main/java/org/apache/kylin/metadata/model/PartitionDesc.java
index c83c5834b2..47b0adc805 100644
--- a/src/core-metadata/src/main/java/org/apache/kylin/metadata/model/PartitionDesc.java
+++ b/src/core-metadata/src/main/java/org/apache/kylin/metadata/model/PartitionDesc.java
@@ -143,15 +143,6 @@ public class PartitionDesc implements Serializable {
         return null;
     }
 
-    public static String transformTimestamp2Format(String columnFormat) {
-        for (TimestampType timestampType : TimestampType.values()) {
-            if (timestampType.name.equals(columnFormat)) {
-                return timestampType.format;
-            }
-        }
-        return columnFormat;
-    }
-
     public boolean partitionColumnIsDate() {
         if (partitionDateColumnRef == null)
             return false;
diff --git a/src/core-metadata/src/main/java/org/apache/kylin/metrics/HdfsCapacityMetrics.java b/src/core-metadata/src/main/java/org/apache/kylin/metrics/HdfsCapacityMetrics.java
index 6cb031ffc2..39300ecb68 100644
--- a/src/core-metadata/src/main/java/org/apache/kylin/metrics/HdfsCapacityMetrics.java
+++ b/src/core-metadata/src/main/java/org/apache/kylin/metrics/HdfsCapacityMetrics.java
@@ -18,28 +18,30 @@
 
 package org.apache.kylin.metrics;
 
-import lombok.extern.slf4j.Slf4j;
+import java.io.IOException;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Set;
+import java.util.concurrent.ScheduledThreadPoolExecutor;
+import java.util.concurrent.TimeUnit;
+import java.util.stream.Collectors;
+
 import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.persistence.metadata.EpochStore;
-import org.apache.kylin.common.util.AddressUtil;
 import org.apache.kylin.common.util.HadoopUtil;
 import org.apache.kylin.common.util.JsonUtil;
 import org.apache.kylin.common.util.NamedThreadFactory;
 import org.apache.kylin.metadata.project.NProjectManager;
 import org.apache.kylin.metadata.project.ProjectInstance;
 
-import java.io.IOException;
-import java.util.Set;
-import java.util.concurrent.ConcurrentHashMap;
-import java.util.concurrent.ConcurrentMap;
-import java.util.concurrent.Executors;
-import java.util.concurrent.ScheduledExecutorService;
-import java.util.concurrent.TimeUnit;
-import java.util.stream.Collectors;
+import com.google.common.collect.Maps;
+
+import lombok.extern.slf4j.Slf4j;
 
 /**
  * 1. Unify the entry point for all calculation calls to obtain the capacity of the WorkingDir through scheduled threads
@@ -50,49 +52,56 @@ import java.util.stream.Collectors;
 @Slf4j
 public class HdfsCapacityMetrics {
 
-    protected static final KylinConfig KYLIN_CONFIG;
-    protected static final String SERVICE_INFO;
-    protected static final Path HDFS_CAPACITY_METRICS_PATH;
-    protected static final FileSystem WORKING_FS;
-    protected static final ScheduledExecutorService HDFS_METRICS_SCHEDULED_EXECUTOR;
-    protected static boolean hdfsMetricsPeriodicCalculationEnabled;
-    protected static boolean quotaStorageEnabled;
+    private final Path hdfsCapacityMetricsPath;
+    private final FileSystem workingFs;
+    private final ScheduledThreadPoolExecutor scheduledThreadPoolExecutor;
+    private final KylinConfig config;
+    private final boolean quotaStorageEnabled;
+    private final boolean hdfsMetricsPeriodicCalculationEnabled;
     // For all places that need to query WorkingDir capacity for retrieval, initialize to avoid NPE
-    protected static ConcurrentMap<String, Long> workingDirCapacity = new ConcurrentHashMap<>();
-    // Used to clear the existing workingDirCapacity in memory, you cannot use the clear method for workingDirCapacity
-    // to avoid other calls to raise NPE, When the data in memory is ready, it is first put into readyWorkingDirCapacity,
-    // and then a data exchange operation is performed.
-    protected static ConcurrentMap<String, Long> prepareForWorkingDirCapacity = new ConcurrentHashMap<>();
-
-    static {
-        KYLIN_CONFIG = KylinConfig.getInstanceFromEnv();
-        SERVICE_INFO = AddressUtil.getLocalInstance();
-        WORKING_FS = HadoopUtil.getWorkingFileSystem();
-        HDFS_CAPACITY_METRICS_PATH = new Path(KYLIN_CONFIG.getHdfsMetricsDir("hdfsCapacity.json"));
-        HDFS_METRICS_SCHEDULED_EXECUTOR = Executors.newScheduledThreadPool(1, new NamedThreadFactory("HdfsMetricsChecker"));
-        registerHdfsMetrics();
-    }
+    private volatile Map<String, Long> workingDirCapacity = Collections.emptyMap();
 
     // Utility classes should not have public constructors
-    private HdfsCapacityMetrics() {
+    public HdfsCapacityMetrics(KylinConfig config) {
+        this.config = config;
+        workingFs = HadoopUtil.getWorkingFileSystem();
+        hdfsCapacityMetricsPath = new Path(config.getHdfsMetricsDir("hdfsCapacity.json"));
+        scheduledThreadPoolExecutor = new ScheduledThreadPoolExecutor(1, new NamedThreadFactory("HdfsMetricsChecker"));
+        hdfsMetricsPeriodicCalculationEnabled = config.isHdfsMetricsPeriodicCalculationEnabled();
+        quotaStorageEnabled = config.isStorageQuotaEnabled();
+        if (hdfsMetricsPeriodicCalculationEnabled && quotaStorageEnabled) {
+            registerHdfsMetrics(config.getHdfsMetricsPeriodicCalculationInterval());
+        }
+    }
+
+    public int getPoolSize() {
+        return scheduledThreadPoolExecutor.getPoolSize();
+    }
+
+    public int getActiveCount() {
+        return scheduledThreadPoolExecutor.getActiveCount();
     }
 
-    public static void registerHdfsMetrics() {
+    Map<String, Long> getWorkingDirCapacity() {
+        return Collections.unmodifiableMap(workingDirCapacity);
+    }
+
+    public Path getHdfsCapacityMetricsPath() {
+        return hdfsCapacityMetricsPath;
+    }
+
+    private void registerHdfsMetrics(long hdfsMetricsPeriodicCalculationInterval) {
         // 1. Call a scheduled thread to maintain the data in memory
         //    - Read data from HDFS and load it into memory, only the leader node writes to HDFS, other nodes read only
         // 2. When the data in memory reaches the time of the update interval, it is stored on HDFS
         // 3. Junk cleanup: theoretically the file will not be very large, do not need to consider cleaning up for the time
         // being, cleaning will affect the recalculation of the directory involved
-        hdfsMetricsPeriodicCalculationEnabled = KYLIN_CONFIG.isHdfsMetricsPeriodicCalculationEnabled();
-        quotaStorageEnabled = KYLIN_CONFIG.isStorageQuotaEnabled();
-        if (quotaStorageEnabled && hdfsMetricsPeriodicCalculationEnabled) {
-            log.info("Quota storage and HDFS metrics periodic calculation are enabled, path: {}", HDFS_CAPACITY_METRICS_PATH);
-            HDFS_METRICS_SCHEDULED_EXECUTOR.scheduleAtFixedRate(HdfsCapacityMetrics::handleNodeHdfsMetrics,
-                    0, KYLIN_CONFIG.getHdfsMetricsPeriodicCalculationInterval(), TimeUnit.MILLISECONDS);
-        }
+        log.info("Quota storage and HDFS metrics periodic calculation are enabled, path: {}", hdfsCapacityMetricsPath);
+        scheduledThreadPoolExecutor.scheduleAtFixedRate(this::handleNodeHdfsMetrics, 0,
+                hdfsMetricsPeriodicCalculationInterval, TimeUnit.MILLISECONDS);
     }
 
-    public static void handleNodeHdfsMetrics() {
+    public void handleNodeHdfsMetrics() {
         // Check whether the current KE node is the leader node, which requires repeated and continuous monitoring
         // because the leader node may change. Update first and then overwrite, only leader nodes need to be overwritten,
         // other nodes are read only
@@ -103,17 +112,17 @@ public class HdfsCapacityMetrics {
         }
     }
 
-    public static void writeHdfsMetrics() {
-        prepareForWorkingDirCapacity.clear();
+    public void writeHdfsMetrics() {
         // All WorkingDir capacities involved are calculated here
-        Set<String> allProjects = NProjectManager.getInstance(KYLIN_CONFIG).listAllProjects()
-                .stream().map(ProjectInstance::getName).collect(Collectors.toSet());
+        Set<String> allProjects = NProjectManager.getInstance(config).listAllProjects().stream()
+                .map(ProjectInstance::getName).collect(Collectors.toSet());
+        HashMap<String, Long> prepareForWorkingDirCapacity = Maps.newHashMapWithExpectedSize(allProjects.size());
         try {
             for (String project : allProjects) {
                 // Should not initialize projectTotalStorageSize outside the loop, otherwise it may affect the next calculation
                 // if a project calculation throws an exception.
                 long projectTotalStorageSize = 0L;
-                Path projectPath = new Path(KYLIN_CONFIG.getWorkingDirectoryWithConfiguredFs(project));
+                Path projectPath = new Path(config.getWorkingDirectoryWithConfiguredFs(project));
                 FileSystem fs = projectPath.getFileSystem(HadoopUtil.getCurrentConfiguration());
                 if (fs.exists(projectPath)) {
                     projectTotalStorageSize = HadoopUtil.getContentSummary(fs, projectPath).getLength();
@@ -126,7 +135,7 @@ public class HdfsCapacityMetrics {
         // If the project is deleted, it will be updated here
         workingDirCapacity = prepareForWorkingDirCapacity;
         try {
-            FSDataOutputStream fsDataOutputStream = WORKING_FS.create(HDFS_CAPACITY_METRICS_PATH, true);
+            FSDataOutputStream fsDataOutputStream = workingFs.create(hdfsCapacityMetricsPath, true);
             JsonUtil.writeValue(fsDataOutputStream, workingDirCapacity);
         } catch (IOException e) {
             log.warn("Write HdfsCapacityMetrics failed.", e);
@@ -134,17 +143,16 @@ public class HdfsCapacityMetrics {
     }
 
     @SuppressWarnings("unchecked")
-    public static ConcurrentMap<String, Long> readHdfsMetrics() {
-        ConcurrentHashMap<String, Long> workingCapacity = new ConcurrentHashMap<>();
+    public Map<String, Long> readHdfsMetrics() {
         try {
-            if (WORKING_FS.exists(HDFS_CAPACITY_METRICS_PATH)) {
-                FSDataInputStream fsDataInputStream = WORKING_FS.open(HDFS_CAPACITY_METRICS_PATH);
-                workingCapacity = JsonUtil.readValue(fsDataInputStream, ConcurrentHashMap.class);
+            if (workingFs.exists(hdfsCapacityMetricsPath)) {
+                FSDataInputStream fsDataInputStream = workingFs.open(hdfsCapacityMetricsPath);
+                return JsonUtil.readValue(fsDataInputStream, HashMap.class);
             }
         } catch (IOException e) {
             log.warn("Read HdfsCapacityMetrics failed.", e);
         }
-        return workingCapacity;
+        return Collections.emptyMap();
     }
 
     /**
@@ -153,8 +161,8 @@ public class HdfsCapacityMetrics {
      *
      * @return HDFS Capacity by each project
      */
-    public static Long getHdfsCapacityByProject(String project) {
-        if (hdfsMetricsPeriodicCalculationEnabled) {
+    public Long getHdfsCapacityByProject(String project) {
+        if (hdfsMetricsPeriodicCalculationEnabled && quotaStorageEnabled) {
             // Writing numbers in JSON may be read as integer
             Object orDefault = workingDirCapacity.getOrDefault(project, 0L);
             return Long.parseLong(orDefault.toString());
diff --git a/src/systools/src/main/java/org/apache/kylin/rest/security/AceImpl.java b/src/core-metadata/src/main/java/org/apache/kylin/rest/security/AceImpl.java
similarity index 94%
rename from src/systools/src/main/java/org/apache/kylin/rest/security/AceImpl.java
rename to src/core-metadata/src/main/java/org/apache/kylin/rest/security/AceImpl.java
index 0510a49799..6f31ae9cbb 100644
--- a/src/systools/src/main/java/org/apache/kylin/rest/security/AceImpl.java
+++ b/src/core-metadata/src/main/java/org/apache/kylin/rest/security/AceImpl.java
@@ -46,14 +46,11 @@ import lombok.val;
 @JsonAutoDetect(fieldVisibility = Visibility.NONE, getterVisibility = Visibility.NONE, isGetterVisibility = Visibility.NONE, setterVisibility = Visibility.NONE)
 public class AceImpl implements AccessControlEntry {
 
-    public static final Comparator<AceImpl> SID_ORDER = new Comparator<AceImpl>() {
-        @Override
-        public int compare(AceImpl o1, AceImpl o2) {
-            if (o1.sidOfAuthority == null) {
-                return o2.sidOfAuthority == null ? o1.sidOfPrincipal.compareTo(o2.sidOfPrincipal) : 1;
-            } else {
-                return o2.sidOfAuthority == null ? -1 : o1.sidOfAuthority.compareTo(o2.sidOfAuthority);
-            }
+    public static final Comparator<AceImpl> SID_ORDER = (o1, o2) -> {
+        if (o1.sidOfAuthority == null) {
+            return o2.sidOfAuthority == null ? o1.sidOfPrincipal.compareTo(o2.sidOfPrincipal) : 1;
+        } else {
+            return o2.sidOfAuthority == null ? -1 : o1.sidOfAuthority.compareTo(o2.sidOfAuthority);
         }
     };
 
diff --git a/src/systools/src/main/java/org/apache/kylin/rest/security/AclEntityFactory.java b/src/core-metadata/src/main/java/org/apache/kylin/rest/security/AclEntityFactory.java
similarity index 78%
rename from src/systools/src/main/java/org/apache/kylin/rest/security/AclEntityFactory.java
rename to src/core-metadata/src/main/java/org/apache/kylin/rest/security/AclEntityFactory.java
index a6662e6708..7cca23b726 100644
--- a/src/systools/src/main/java/org/apache/kylin/rest/security/AclEntityFactory.java
+++ b/src/core-metadata/src/main/java/org/apache/kylin/rest/security/AclEntityFactory.java
@@ -19,30 +19,32 @@
 package org.apache.kylin.rest.security;
 
 import org.apache.kylin.common.persistence.RootPersistentEntity;
-import org.apache.kylin.metadata.project.ProjectInstance;
 import org.apache.kylin.metadata.model.NDataModel;
+import org.apache.kylin.metadata.project.ProjectInstance;
+
+import com.google.common.base.Preconditions;
 
 /**
  * @author xduo
  */
-public class AclEntityFactory implements AclEntityType {
+public class AclEntityFactory {
 
-    public static RootPersistentEntity createAclEntity(String entityType, String uuid) {
+    private AclEntityFactory() {
+    }
 
-        if (N_DATA_MODEL.equals(entityType)) {
+    public static RootPersistentEntity createAclEntity(String entityType, String uuid) {
+        Preconditions.checkNotNull(entityType);
+        switch (entityType) {
+        case AclEntityType.N_DATA_MODEL:
             NDataModel modelInstance = new NDataModel();
             modelInstance.setUuid(uuid);
-
             return modelInstance;
-        }
-
-        if (PROJECT_INSTANCE.equals(entityType)) {
+        case AclEntityType.PROJECT_INSTANCE:
             ProjectInstance projectInstance = new ProjectInstance();
             projectInstance.setUuid(uuid);
-
             return projectInstance;
+        default:
+            throw new IllegalArgumentException("Unsupported entity type " + entityType);
         }
-
-        throw new RuntimeException("Unsupported entity type!");
     }
 }
diff --git a/src/systools/src/main/java/org/apache/kylin/rest/security/AclEntityType.java b/src/core-metadata/src/main/java/org/apache/kylin/rest/security/AclEntityType.java
similarity index 81%
rename from src/systools/src/main/java/org/apache/kylin/rest/security/AclEntityType.java
rename to src/core-metadata/src/main/java/org/apache/kylin/rest/security/AclEntityType.java
index 6f576aa181..d43669385f 100644
--- a/src/systools/src/main/java/org/apache/kylin/rest/security/AclEntityType.java
+++ b/src/core-metadata/src/main/java/org/apache/kylin/rest/security/AclEntityType.java
@@ -20,9 +20,12 @@ package org.apache.kylin.rest.security;
 
 /**
  */
-public interface AclEntityType {
+public class AclEntityType {
 
-    static final String N_DATA_MODEL = "NDataModel";
+    private AclEntityType() {
+    }
 
-    static final String PROJECT_INSTANCE = "ProjectInstance";
+    public static final String N_DATA_MODEL = "NDataModel";
+
+    public static final String PROJECT_INSTANCE = "ProjectInstance";
 }
diff --git a/src/systools/src/main/java/org/apache/kylin/rest/security/AclManager.java b/src/core-metadata/src/main/java/org/apache/kylin/rest/security/AclManager.java
similarity index 93%
rename from src/systools/src/main/java/org/apache/kylin/rest/security/AclManager.java
rename to src/core-metadata/src/main/java/org/apache/kylin/rest/security/AclManager.java
index 742c946ab4..2306c3a2f4 100644
--- a/src/systools/src/main/java/org/apache/kylin/rest/security/AclManager.java
+++ b/src/core-metadata/src/main/java/org/apache/kylin/rest/security/AclManager.java
@@ -20,11 +20,12 @@ package org.apache.kylin.rest.security;
 
 import static org.apache.kylin.common.exception.ServerErrorCode.PERMISSION_DENIED;
 
-import java.util.Arrays;
 import java.util.Collection;
+import java.util.Collections;
 import java.util.List;
 import java.util.Locale;
 import java.util.Map;
+import java.util.stream.Collectors;
 
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.exception.KylinException;
@@ -49,7 +50,6 @@ import org.springframework.security.acls.model.PermissionGrantingStrategy;
 import org.springframework.security.acls.model.Sid;
 import org.springframework.security.core.context.SecurityContextHolder;
 
-import com.google.common.collect.Lists;
 import com.google.common.collect.Maps;
 
 import lombok.val;
@@ -132,17 +132,11 @@ public class AclManager {
     }
 
     public List<ObjectIdentity> findChildren(ObjectIdentity parentIdentity) {
-        List<ObjectIdentity> oids = Lists.newArrayList();
-        Collection<AclRecord> allAclRecords;
-        allAclRecords = crud.listAll();
-        for (AclRecord record : allAclRecords) {
-            ObjectIdentityImpl parent = record.getParentDomainObjectInfo();
-            if (parent != null && parent.equals(parentIdentity)) {
-                ObjectIdentityImpl child = record.getDomainObjectInfo();
-                oids.add(child);
-            }
-        }
-        return oids;
+        Collection<AclRecord> allAclRecords = crud.listAll();
+        return allAclRecords.stream()
+                .filter(record -> record.getParentDomainObjectInfo() != null
+                        && record.getParentDomainObjectInfo().equals(parentIdentity))
+                .map(AclRecord::getObjectIdentity).collect(Collectors.toList());
     }
 
     public MutableAclRecord readAcl(ObjectIdentity oid) {
@@ -155,7 +149,7 @@ public class AclManager {
     }
 
     public Acl readAclById(ObjectIdentity object) {
-        return readAclsById(Arrays.asList(object)).get(object);
+        return readAclsById(Collections.singletonList(object)).get(object);
     }
 
     public Map<ObjectIdentity, Acl> readAclsById(List<ObjectIdentity> oids) {
diff --git a/src/systools/src/main/java/org/apache/kylin/rest/security/AclPermission.java b/src/core-metadata/src/main/java/org/apache/kylin/rest/security/AclPermission.java
similarity index 100%
rename from src/systools/src/main/java/org/apache/kylin/rest/security/AclPermission.java
rename to src/core-metadata/src/main/java/org/apache/kylin/rest/security/AclPermission.java
diff --git a/src/systools/src/main/java/org/apache/kylin/rest/security/AclPermissionEnum.java b/src/core-metadata/src/main/java/org/apache/kylin/rest/security/AclPermissionEnum.java
similarity index 70%
rename from src/systools/src/main/java/org/apache/kylin/rest/security/AclPermissionEnum.java
rename to src/core-metadata/src/main/java/org/apache/kylin/rest/security/AclPermissionEnum.java
index f571877109..f116d1c216 100644
--- a/src/systools/src/main/java/org/apache/kylin/rest/security/AclPermissionEnum.java
+++ b/src/core-metadata/src/main/java/org/apache/kylin/rest/security/AclPermissionEnum.java
@@ -20,31 +20,25 @@ package org.apache.kylin.rest.security;
 import static org.apache.kylin.common.exception.ServerErrorCode.PERMISSION_DENIED;
 
 import org.apache.kylin.common.exception.KylinException;
+import org.apache.kylin.constants.AclConstants;
 
 public enum AclPermissionEnum {
     ADMINISTRATION, MANAGEMENT, OPERATION, READ, EMPTY;
 
     public static String convertToAclPermission(String externalPermission) {
-        AclPermissionEnum permission;
         switch (externalPermission) {
-        case ExternalAclProvider.ADMINISTRATION:
-            permission = ADMINISTRATION;
-            break;
-        case ExternalAclProvider.MANAGEMENT:
-            permission = MANAGEMENT;
-            break;
-        case ExternalAclProvider.OPERATION:
-            permission = OPERATION;
-            break;
-        case ExternalAclProvider.READ:
-            permission = READ;
-            break;
-        case ExternalAclProvider.EMPTY:
-            permission = EMPTY;
-            break;
+        case AclConstants.ADMINISTRATION:
+            return ADMINISTRATION.name();
+        case AclConstants.MANAGEMENT:
+            return MANAGEMENT.name();
+        case AclConstants.OPERATION:
+            return OPERATION.name();
+        case AclConstants.READ:
+            return READ.name();
+        case AclConstants.EMPTY:
+            return EMPTY.name();
         default:
             throw new KylinException(PERMISSION_DENIED, "invalid permission state: " + externalPermission);
         }
-        return permission.name();
     }
 }
diff --git a/src/systools/src/main/java/org/apache/kylin/rest/security/AclPermissionFactory.java b/src/core-metadata/src/main/java/org/apache/kylin/rest/security/AclPermissionFactory.java
similarity index 93%
rename from src/systools/src/main/java/org/apache/kylin/rest/security/AclPermissionFactory.java
rename to src/core-metadata/src/main/java/org/apache/kylin/rest/security/AclPermissionFactory.java
index 93aff60eaf..ee063ae9a0 100644
--- a/src/systools/src/main/java/org/apache/kylin/rest/security/AclPermissionFactory.java
+++ b/src/core-metadata/src/main/java/org/apache/kylin/rest/security/AclPermissionFactory.java
@@ -43,6 +43,8 @@ import java.util.List;
 import org.springframework.security.acls.domain.DefaultPermissionFactory;
 import org.springframework.security.acls.model.Permission;
 
+import com.google.common.collect.Lists;
+
 /**
  * @author xduo
  * 
@@ -55,8 +57,8 @@ public class AclPermissionFactory extends DefaultPermissionFactory {
     }
 
     public static List<Permission> getPermissions() {
-        List<Permission> permissions = new ArrayList<Permission>();
         Field[] fields = AclPermission.class.getFields();
+        List<Permission> permissions = Lists.newArrayListWithExpectedSize(fields.length);
 
         for (Field field : fields) {
             try {
@@ -88,11 +90,8 @@ public class AclPermissionFactory extends DefaultPermissionFactory {
             try {
                 Object fieldValue = field.get(null);
 
-                if (Permission.class.isAssignableFrom(fieldValue.getClass())) {
-                    // Found a Permission static field
-                    if (perName.equals(field.getName())) {
-                        return (Permission) fieldValue;
-                    }
+                if (Permission.class.isAssignableFrom(fieldValue.getClass()) && perName.equals(field.getName())) {
+                    return (Permission) fieldValue;
                 }
             } catch (Exception ignore) {
                 //ignore on purpose
diff --git a/src/systools/src/main/java/org/apache/kylin/rest/security/AclRecord.java b/src/core-metadata/src/main/java/org/apache/kylin/rest/security/AclRecord.java
similarity index 98%
rename from src/systools/src/main/java/org/apache/kylin/rest/security/AclRecord.java
rename to src/core-metadata/src/main/java/org/apache/kylin/rest/security/AclRecord.java
index b16546d5c5..21bab12643 100644
--- a/src/systools/src/main/java/org/apache/kylin/rest/security/AclRecord.java
+++ b/src/core-metadata/src/main/java/org/apache/kylin/rest/security/AclRecord.java
@@ -132,7 +132,7 @@ public class AclRecord extends RootPersistentEntity implements Acl, OwnershipAcl
         this.entriesInheriting = entriesInheriting;
     }
 
-    public ObjectIdentityImpl getDomainObjectInfo() {
+    public final ObjectIdentityImpl getDomainObjectInfo() {
         return domainObjectInfo;
     }
 
@@ -161,7 +161,7 @@ public class AclRecord extends RootPersistentEntity implements Acl, OwnershipAcl
 
     @Override
     public ObjectIdentity getObjectIdentity() {
-        return domainObjectInfo;
+        return getDomainObjectInfo();
     }
 
     @Override
@@ -190,7 +190,7 @@ public class AclRecord extends RootPersistentEntity implements Acl, OwnershipAcl
 
     @Override
     public List<AccessControlEntry> getEntries() {
-        return new ArrayList<AccessControlEntry>(entries);
+        return Collections.unmodifiableList(entries);
     }
 
     public AccessControlEntry getAccessControlEntryAt(int entryIndex) {
diff --git a/src/systools/src/main/java/org/apache/kylin/rest/security/CompositeAclPermission.java b/src/core-metadata/src/main/java/org/apache/kylin/rest/security/CompositeAclPermission.java
similarity index 100%
rename from src/systools/src/main/java/org/apache/kylin/rest/security/CompositeAclPermission.java
rename to src/core-metadata/src/main/java/org/apache/kylin/rest/security/CompositeAclPermission.java
diff --git a/src/systools/src/main/java/org/apache/kylin/rest/security/ExternalAclProvider.java b/src/core-metadata/src/main/java/org/apache/kylin/rest/security/ExternalAclProvider.java
similarity index 78%
rename from src/systools/src/main/java/org/apache/kylin/rest/security/ExternalAclProvider.java
rename to src/core-metadata/src/main/java/org/apache/kylin/rest/security/ExternalAclProvider.java
index 345f271ba7..25289385f0 100644
--- a/src/systools/src/main/java/org/apache/kylin/rest/security/ExternalAclProvider.java
+++ b/src/core-metadata/src/main/java/org/apache/kylin/rest/security/ExternalAclProvider.java
@@ -30,6 +30,7 @@ import org.apache.kylin.common.exception.KylinException;
 import org.apache.kylin.common.msg.MsgPicker;
 import org.apache.kylin.common.util.ClassUtil;
 import org.apache.kylin.common.util.Pair;
+import org.apache.kylin.constants.AclConstants;
 import org.springframework.security.acls.domain.BasePermission;
 import org.springframework.security.acls.model.Permission;
 
@@ -49,28 +50,19 @@ public abstract class ExternalAclProvider {
         });
     }
 
-    // ============================================================================
-
-    public static final String ADMINISTRATION = "ADMIN";
-    public static final String MANAGEMENT = "MANAGEMENT";
-    public static final String OPERATION = "OPERATION";
-    public static final String READ = "QUERY";
-    public static final String EMPTY = "EMPTY";
-    public static final String DATA_QUERY = "DATA_QUERY";
-
     // used by ranger ExternalAclProvider
     public static String convertToExternalPermission(Permission p) {
         String permString;
         if (BasePermission.ADMINISTRATION.equals(p)) {
-            permString = ADMINISTRATION;
+            permString = AclConstants.ADMINISTRATION;
         } else if (AclPermission.MANAGEMENT.equals(p)) {
-            permString = MANAGEMENT;
+            permString = AclConstants.MANAGEMENT;
         } else if (AclPermission.OPERATION.equals(p)) {
-            permString = OPERATION;
+            permString = AclConstants.OPERATION;
         } else if (BasePermission.READ.equals(p)) {
-            permString = READ;
+            permString = AclConstants.READ;
         } else if (AclPermission.DATA_QUERY.equals(p)) {
-            permString = DATA_QUERY;
+            permString = AclConstants.DATA_QUERY;
         } else {
             permString = p.getPattern();
         }
@@ -81,37 +73,32 @@ public abstract class ExternalAclProvider {
         if (StringUtils.isBlank(permission)) {
             throw new KylinException(INVALID_PARAMETER, MsgPicker.getMsg().getEmptyPermission());
         }
-        if (ADMINISTRATION.equalsIgnoreCase(permission) || MANAGEMENT.equalsIgnoreCase(permission)
-                || OPERATION.equalsIgnoreCase(permission) || READ.equalsIgnoreCase(permission)) {
+        if (AclConstants.ADMINISTRATION.equalsIgnoreCase(permission)
+                || AclConstants.MANAGEMENT.equalsIgnoreCase(permission)
+                || AclConstants.OPERATION.equalsIgnoreCase(permission)
+                || AclConstants.READ.equalsIgnoreCase(permission)) {
             return;
         }
         throw new KylinException(INVALID_PARAMETER, MsgPicker.getMsg().getInvalidPermission());
     }
 
     public static String convertToExternalPermission(int mask) {
-        String permission;
         switch (mask) {
         case 16:
-            permission = ADMINISTRATION;
-            break;
+            return AclConstants.ADMINISTRATION;
         case 32:
-            permission = MANAGEMENT;
-            break;
+            return AclConstants.MANAGEMENT;
         case 64:
-            permission = OPERATION;
-            break;
+            return AclConstants.OPERATION;
         case 1:
-            permission = READ;
-            break;
+            return AclConstants.READ;
         case 128:
-            permission = DATA_QUERY;
-            break;
+            return AclConstants.DATA_QUERY;
         case 0:
-            return EMPTY;
+            return AclConstants.EMPTY;
         default:
             throw new KylinException(PERMISSION_DENIED, "Invalid permission state: " + mask);
         }
-        return permission;
     }
 
     // ============================================================================
diff --git a/src/systools/src/main/java/org/apache/kylin/rest/security/KerberosLoginManager.java b/src/core-metadata/src/main/java/org/apache/kylin/rest/security/KerberosLoginManager.java
similarity index 85%
rename from src/systools/src/main/java/org/apache/kylin/rest/security/KerberosLoginManager.java
rename to src/core-metadata/src/main/java/org/apache/kylin/rest/security/KerberosLoginManager.java
index 6cfe74dbda..970408823b 100644
--- a/src/systools/src/main/java/org/apache/kylin/rest/security/KerberosLoginManager.java
+++ b/src/core-metadata/src/main/java/org/apache/kylin/rest/security/KerberosLoginManager.java
@@ -21,11 +21,11 @@ import static org.apache.kylin.common.exception.ServerErrorCode.INVALID_KERBEROS
 import static org.apache.kylin.common.exception.ServerErrorCode.PERMISSION_DENIED;
 
 import java.io.File;
+import java.io.IOException;
 import java.security.PrivilegedAction;
 import java.util.concurrent.atomic.AtomicBoolean;
 import java.util.stream.Collectors;
 
-import com.google.common.annotations.VisibleForTesting;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.kylin.common.KapConfig;
@@ -43,6 +43,8 @@ import org.apache.kylin.source.SourceFactory;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import com.google.common.annotations.VisibleForTesting;
+
 import lombok.val;
 
 public class KerberosLoginManager {
@@ -60,25 +62,21 @@ public class KerberosLoginManager {
         val projectManager = NProjectManager.getInstance(config);
         val project = projectManager.getProject(projectName);
         val principal = project.getPrincipal();
-        val keytab = project.getKeytab();
-        UserGroupInformation ugi = null;
         try {
             if (project.isProjectKerberosEnabled()) {
                 val keytabPath = wrapAndDownloadKeytab(projectName);
-                ugi = UserGroupInformation.loginUserFromKeytabAndReturnUGI(principal, keytabPath);
+                return UserGroupInformation.loginUserFromKeytabAndReturnUGI(principal, keytabPath);
             } else {
-                ugi = UserGroupInformation.getLoginUser();
+                return UserGroupInformation.getLoginUser();
             }
         } catch (Exception e) {
             try {
-                ugi = UserGroupInformation.getLoginUser();
+                return UserGroupInformation.getLoginUser();
             } catch (Exception ex) {
-                logger.error("Fetch login user error.", projectName, principal, ex);
+                logger.error("Fetch login user error. project {}, principal {}", projectName, principal, ex);
             }
             throw new KylinException(INVALID_KERBEROS_FILE, MsgPicker.getMsg().getKerberosInfoError(), e);
         }
-
-        return ugi;
     }
 
     private String wrapAndDownloadKeytab(String projectName) throws Exception {
@@ -107,7 +105,7 @@ public class KerberosLoginManager {
         }
     }
 
-    public void checkAndReplaceProjectKerberosInfo(String project, String principal) throws Exception {
+    public void checkAndReplaceProjectKerberosInfo(String project, String principal) throws IOException {
         String kylinConfHome = KapConfig.getKylinConfDirAtBestEffort();
         String keytab = new Path(kylinConfHome, principal + TMP_KEYTAB_SUFFIX).toString();
         checkKerberosInfo(principal, keytab);
@@ -126,17 +124,16 @@ public class KerberosLoginManager {
             ProjectInstance projectInstance = projectManager.getProject(project);
             val tables = projectInstance.getTables();
             AtomicBoolean accessible = new AtomicBoolean(true);
-            val tableMap = tables.stream().map(tableName -> {
-                NTableMetadataManager tableMetadataManager = NTableMetadataManager
-                        .getInstance(KylinConfig.getInstanceFromEnv(), project);
-                return tableMetadataManager.getTableDesc(tableName);
-            }).collect(Collectors.groupingBy(TableDesc::getSourceType));
+            NTableMetadataManager tableMetadataManager = NTableMetadataManager
+                    .getInstance(KylinConfig.getInstanceFromEnv(), project);
+            val tableMap = tables.stream().map(tableMetadataManager::getTableDesc)
+                    .collect(Collectors.groupingBy(TableDesc::getSourceType));
 
             tableMap.forEach((sourceType, tableDescSet) -> {
                 ISourceMetadataExplorer explorer = SourceFactory.getSource(sourceType, projectInstance.getConfig())
                         .getSourceMetadataExplorer();
                 accessible.set(accessible.get() && explorer.checkTablesAccess(
-                        tableDescSet.stream().map(tableDesc -> tableDesc.getIdentity()).collect(Collectors.toSet())));
+                        tableDescSet.stream().map(TableDesc::getIdentity).collect(Collectors.toSet())));
             });
             return accessible.get();
         });
diff --git a/src/systools/src/main/java/org/apache/kylin/rest/security/KylinAclPermissionEvaluator.java b/src/core-metadata/src/main/java/org/apache/kylin/rest/security/KylinAclPermissionEvaluator.java
similarity index 100%
rename from src/systools/src/main/java/org/apache/kylin/rest/security/KylinAclPermissionEvaluator.java
rename to src/core-metadata/src/main/java/org/apache/kylin/rest/security/KylinAclPermissionEvaluator.java
diff --git a/src/systools/src/main/java/org/apache/kylin/rest/security/KylinPermissionGrantingStrategy.java b/src/core-metadata/src/main/java/org/apache/kylin/rest/security/KylinPermissionGrantingStrategy.java
similarity index 100%
rename from src/systools/src/main/java/org/apache/kylin/rest/security/KylinPermissionGrantingStrategy.java
rename to src/core-metadata/src/main/java/org/apache/kylin/rest/security/KylinPermissionGrantingStrategy.java
diff --git a/src/systools/src/main/java/org/apache/kylin/rest/security/LegacyAceInfo.java b/src/core-metadata/src/main/java/org/apache/kylin/rest/security/LegacyAceInfo.java
similarity index 100%
rename from src/systools/src/main/java/org/apache/kylin/rest/security/LegacyAceInfo.java
rename to src/core-metadata/src/main/java/org/apache/kylin/rest/security/LegacyAceInfo.java
diff --git a/src/systools/src/main/java/org/apache/kylin/rest/security/MutableAclRecord.java b/src/core-metadata/src/main/java/org/apache/kylin/rest/security/MutableAclRecord.java
similarity index 98%
rename from src/systools/src/main/java/org/apache/kylin/rest/security/MutableAclRecord.java
rename to src/core-metadata/src/main/java/org/apache/kylin/rest/security/MutableAclRecord.java
index d70a3f93aa..d33517038e 100644
--- a/src/systools/src/main/java/org/apache/kylin/rest/security/MutableAclRecord.java
+++ b/src/core-metadata/src/main/java/org/apache/kylin/rest/security/MutableAclRecord.java
@@ -50,7 +50,7 @@ public class MutableAclRecord implements Acl, MutableAcl, OwnershipAcl {
 
     @Override
     public Serializable getId() {
-        return acl.getDomainObjectInfo().getIdentifier();
+        return acl.getObjectIdentity().getIdentifier();
     }
 
     @Override
diff --git a/src/systools/src/main/java/org/apache/kylin/rest/security/MutableHttpServletRequest.java b/src/core-metadata/src/main/java/org/apache/kylin/rest/security/MutableHttpServletRequest.java
similarity index 100%
rename from src/systools/src/main/java/org/apache/kylin/rest/security/MutableHttpServletRequest.java
rename to src/core-metadata/src/main/java/org/apache/kylin/rest/security/MutableHttpServletRequest.java
diff --git a/src/systools/src/main/java/org/apache/kylin/rest/security/NoneBCryptPasswordEncoder.java b/src/core-metadata/src/main/java/org/apache/kylin/rest/security/NoneBCryptPasswordEncoder.java
similarity index 100%
rename from src/systools/src/main/java/org/apache/kylin/rest/security/NoneBCryptPasswordEncoder.java
rename to src/core-metadata/src/main/java/org/apache/kylin/rest/security/NoneBCryptPasswordEncoder.java
diff --git a/src/systools/src/main/java/org/apache/kylin/rest/security/ObjectIdentityImpl.java b/src/core-metadata/src/main/java/org/apache/kylin/rest/security/ObjectIdentityImpl.java
similarity index 96%
rename from src/systools/src/main/java/org/apache/kylin/rest/security/ObjectIdentityImpl.java
rename to src/core-metadata/src/main/java/org/apache/kylin/rest/security/ObjectIdentityImpl.java
index 1526214cea..5ea20d1971 100644
--- a/src/systools/src/main/java/org/apache/kylin/rest/security/ObjectIdentityImpl.java
+++ b/src/core-metadata/src/main/java/org/apache/kylin/rest/security/ObjectIdentityImpl.java
@@ -84,8 +84,9 @@ public class ObjectIdentityImpl implements ObjectIdentity {
      *
      * @return <code>true</code> if the presented object matches this object
      */
+    @Override
     public boolean equals(Object arg0) {
-        if (arg0 == null || !(arg0 instanceof ObjectIdentity)) {
+        if (!(arg0 instanceof ObjectIdentity)) {
             return false;
         }
 
@@ -98,14 +99,16 @@ public class ObjectIdentityImpl implements ObjectIdentity {
         return type.equals(other.getType());
     }
 
+    @Override
     public Serializable getIdentifier() {
-        return identifier;
+        return getId();
     }
 
     public String getId() {
         return identifier;
     }
 
+    @Override
     public String getType() {
         return type;
     }
@@ -115,6 +118,7 @@ public class ObjectIdentityImpl implements ObjectIdentity {
      *
      * @return the hash
      */
+    @Override
     public int hashCode() {
         int code = 31;
         code ^= this.type.hashCode();
@@ -123,6 +127,7 @@ public class ObjectIdentityImpl implements ObjectIdentity {
         return code;
     }
 
+    @Override
     public String toString() {
         StringBuilder sb = new StringBuilder();
         sb.append(this.getClass().getName()).append("[");
diff --git a/src/systools/src/main/java/org/apache/kylin/rest/security/PasswordPlaceholderConfigurer.java b/src/core-metadata/src/main/java/org/apache/kylin/rest/security/PasswordPlaceholderConfigurer.java
similarity index 96%
rename from src/systools/src/main/java/org/apache/kylin/rest/security/PasswordPlaceholderConfigurer.java
rename to src/core-metadata/src/main/java/org/apache/kylin/rest/security/PasswordPlaceholderConfigurer.java
index 42d8de88be..3a68ca69c5 100644
--- a/src/systools/src/main/java/org/apache/kylin/rest/security/PasswordPlaceholderConfigurer.java
+++ b/src/core-metadata/src/main/java/org/apache/kylin/rest/security/PasswordPlaceholderConfigurer.java
@@ -21,6 +21,7 @@ package org.apache.kylin.rest.security;
 import java.io.IOException;
 import java.io.InputStream;
 import java.io.PrintWriter;
+import java.lang.reflect.InvocationTargetException;
 import java.lang.reflect.Method;
 import java.nio.charset.Charset;
 import java.util.Locale;
@@ -31,9 +32,9 @@ import org.apache.commons.io.IOUtils;
 import org.apache.commons.io.output.StringBuilderWriter;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.KylinConfigBase;
-import org.apache.kylin.rest.exception.PasswordDecryptionException;
 import org.apache.kylin.common.util.EncryptUtil;
 import org.apache.kylin.common.util.Unsafe;
+import org.apache.kylin.rest.exception.PasswordDecryptionException;
 import org.springframework.beans.factory.config.PropertyPlaceholderConfigurer;
 import org.springframework.core.io.InputStreamResource;
 import org.springframework.core.io.Resource;
@@ -101,8 +102,8 @@ public class PasswordPlaceholderConfigurer extends PropertyPlaceholderConfigurer
             Method getAllMethod = KylinConfigBase.class.getDeclaredMethod("getAllProperties");
             Unsafe.changeAccessibleObject(getAllMethod, true);
             allProps = (Properties) getAllMethod.invoke(kylinConfig);
-        } catch (Exception e) {
-            throw new RuntimeException(e);
+        } catch (InvocationTargetException | NoSuchMethodException | IllegalAccessException e) {
+            throw new IllegalArgumentException(e);
         }
         return allProps;
     }
diff --git a/src/systools/src/main/java/org/apache/kylin/rest/security/SidInfo.java b/src/core-metadata/src/main/java/org/apache/kylin/rest/security/SidInfo.java
similarity index 96%
rename from src/systools/src/main/java/org/apache/kylin/rest/security/SidInfo.java
rename to src/core-metadata/src/main/java/org/apache/kylin/rest/security/SidInfo.java
index 5b7fc19f04..5c41cce5d1 100644
--- a/src/systools/src/main/java/org/apache/kylin/rest/security/SidInfo.java
+++ b/src/core-metadata/src/main/java/org/apache/kylin/rest/security/SidInfo.java
@@ -24,6 +24,7 @@ import org.springframework.security.acls.model.Sid;
 
 import com.fasterxml.jackson.annotation.JsonAutoDetect;
 import com.fasterxml.jackson.annotation.JsonAutoDetect.Visibility;
+import com.fasterxml.jackson.annotation.JsonIgnore;
 import com.fasterxml.jackson.annotation.JsonProperty;
 
 /**
@@ -35,8 +36,8 @@ public class SidInfo {
     private String sid;
     @JsonProperty("principal")
     private boolean isPrincipal;
-
-    private transient Sid sidObj;
+    @JsonIgnore
+    private Sid sidObj;
 
     // for Jackson
     public SidInfo() {
diff --git a/src/systools/src/main/java/org/apache/kylin/rest/security/UserAcl.java b/src/core-metadata/src/main/java/org/apache/kylin/rest/security/UserAcl.java
similarity index 100%
rename from src/systools/src/main/java/org/apache/kylin/rest/security/UserAcl.java
rename to src/core-metadata/src/main/java/org/apache/kylin/rest/security/UserAcl.java
diff --git a/src/systools/src/main/java/org/apache/kylin/rest/security/UserAclManager.java b/src/core-metadata/src/main/java/org/apache/kylin/rest/security/UserAclManager.java
similarity index 100%
rename from src/systools/src/main/java/org/apache/kylin/rest/security/UserAclManager.java
rename to src/core-metadata/src/main/java/org/apache/kylin/rest/security/UserAclManager.java
diff --git a/src/systools/src/main/java/org/apache/kylin/rest/security/UserLockRuleUtil.java b/src/core-metadata/src/main/java/org/apache/kylin/rest/security/UserLockRuleUtil.java
similarity index 95%
rename from src/systools/src/main/java/org/apache/kylin/rest/security/UserLockRuleUtil.java
rename to src/core-metadata/src/main/java/org/apache/kylin/rest/security/UserLockRuleUtil.java
index ed149361c8..47e82b63c3 100644
--- a/src/systools/src/main/java/org/apache/kylin/rest/security/UserLockRuleUtil.java
+++ b/src/core-metadata/src/main/java/org/apache/kylin/rest/security/UserLockRuleUtil.java
@@ -26,7 +26,10 @@ import com.google.common.collect.Maps;
 
 public class UserLockRuleUtil {
 
-    private static Map<Integer, Long> lockDurationRules = Maps.newHashMap();
+    private UserLockRuleUtil() {
+    }
+
+    private static final Map<Integer, Long> lockDurationRules = Maps.newHashMap();
 
     static {
         // wrong time => lock duration (ms)
diff --git a/src/systools/src/main/java/org/apache/kylin/rest/service/AclService.java b/src/core-metadata/src/main/java/org/apache/kylin/rest/service/AclService.java
similarity index 100%
rename from src/systools/src/main/java/org/apache/kylin/rest/service/AclService.java
rename to src/core-metadata/src/main/java/org/apache/kylin/rest/service/AclService.java
diff --git a/src/systools/src/main/java/org/apache/kylin/rest/util/AclPermissionUtil.java b/src/core-metadata/src/main/java/org/apache/kylin/rest/util/AclPermissionUtil.java
similarity index 100%
rename from src/systools/src/main/java/org/apache/kylin/rest/util/AclPermissionUtil.java
rename to src/core-metadata/src/main/java/org/apache/kylin/rest/util/AclPermissionUtil.java
diff --git a/src/systools/src/main/java/org/apache/kylin/rest/util/AclUtil.java b/src/core-metadata/src/main/java/org/apache/kylin/rest/util/AclUtil.java
similarity index 100%
rename from src/systools/src/main/java/org/apache/kylin/rest/util/AclUtil.java
rename to src/core-metadata/src/main/java/org/apache/kylin/rest/util/AclUtil.java
diff --git a/src/core-metadata/src/main/java/org/apache/kylin/rest/util/CreateTableFromJson.java b/src/core-metadata/src/main/java/org/apache/kylin/rest/util/CreateTableFromJson.java
index 635c8c816a..453e53902b 100644
--- a/src/core-metadata/src/main/java/org/apache/kylin/rest/util/CreateTableFromJson.java
+++ b/src/core-metadata/src/main/java/org/apache/kylin/rest/util/CreateTableFromJson.java
@@ -56,7 +56,7 @@ public class CreateTableFromJson {
 
         map.forEach((k, v) -> {
             logger.info(k);
-            v.forEach(item -> logger.info(item));
+            v.forEach(logger::info);
         });
 
         logger.info("\n\n\n\n\n");
diff --git a/src/core-metadata/src/main/java/org/apache/kylin/util/DataRangeUtils.java b/src/core-metadata/src/main/java/org/apache/kylin/util/DataRangeUtils.java
new file mode 100644
index 0000000000..1683c5822c
--- /dev/null
+++ b/src/core-metadata/src/main/java/org/apache/kylin/util/DataRangeUtils.java
@@ -0,0 +1,96 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.kylin.util;
+
+import org.apache.commons.lang3.StringUtils;
+import org.apache.kylin.common.exception.KylinException;
+import org.apache.kylin.common.util.DateFormat;
+import org.apache.kylin.metadata.model.PartitionDesc;
+
+import static org.apache.kylin.common.exception.code.ErrorCodeServer.TIME_INVALID_RANGE_END_LESS_THAN_EQUALS_START;
+import static org.apache.kylin.common.exception.code.ErrorCodeServer.TIME_INVALID_RANGE_LESS_THAN_ZERO;
+import static org.apache.kylin.common.exception.code.ErrorCodeServer.TIME_INVALID_RANGE_NOT_CONSISTENT;
+import static org.apache.kylin.common.exception.code.ErrorCodeServer.TIME_INVALID_RANGE_NOT_FORMAT_MS;
+
+public final class DataRangeUtils {
+    private DataRangeUtils() {
+    }
+
+    public static void validateRange(String start, String end) {
+        validateRange(Long.parseLong(start), Long.parseLong(end));
+    }
+
+    private static void validateRange(long start, long end) {
+        if (start < 0 || end < 0) {
+            throw new KylinException(TIME_INVALID_RANGE_LESS_THAN_ZERO);
+        }
+        if (start >= end) {
+            throw new KylinException(TIME_INVALID_RANGE_END_LESS_THAN_EQUALS_START);
+        }
+    }
+
+    public static void validateDataRange(String start, String end) {
+        validateDataRange(start, end, null);
+    }
+
+    public static void validateDataRange(String start, String end, String partitionColumnFormat) {
+        if (StringUtils.isEmpty(start) && StringUtils.isEmpty(end)) {
+            return;
+        }
+        if (StringUtils.isNotEmpty(start) && StringUtils.isNotEmpty(end)) {
+            long startLong = 0;
+            long endLong = 0;
+
+            try {
+                startLong = Long.parseLong(start);
+                endLong = Long.parseLong(end);
+            } catch (Exception e) {
+                throw new KylinException(TIME_INVALID_RANGE_NOT_FORMAT_MS, e);
+            }
+
+            if (startLong < 0 || endLong < 0) {
+                throw new KylinException(TIME_INVALID_RANGE_LESS_THAN_ZERO);
+            }
+
+            try {
+                startLong = DateFormat.getFormatTimeStamp(start, transformTimestamp2Format(partitionColumnFormat));
+                endLong = DateFormat.getFormatTimeStamp(end, transformTimestamp2Format(partitionColumnFormat));
+            } catch (Exception e) {
+                throw new KylinException(TIME_INVALID_RANGE_NOT_FORMAT_MS);
+            }
+
+            if (startLong >= endLong) {
+                throw new KylinException(TIME_INVALID_RANGE_END_LESS_THAN_EQUALS_START);
+            }
+
+        } else {
+            throw new KylinException(TIME_INVALID_RANGE_NOT_CONSISTENT);
+        }
+
+    }
+
+    private static String transformTimestamp2Format(String columnFormat) {
+        for (PartitionDesc.TimestampType timestampType : PartitionDesc.TimestampType.values()) {
+            if (timestampType.name.equals(columnFormat)) {
+                return timestampType.format;
+            }
+        }
+        return columnFormat;
+    }
+
+}
diff --git a/src/core-metadata/src/test/java/org/apache/kylin/metadata/cube/storage/ProjectStorageInfoCollectorTest.java b/src/core-metadata/src/test/java/org/apache/kylin/metadata/cube/storage/ProjectStorageInfoCollectorTest.java
index 6dd8df4aa6..eb79e97fdb 100644
--- a/src/core-metadata/src/test/java/org/apache/kylin/metadata/cube/storage/ProjectStorageInfoCollectorTest.java
+++ b/src/core-metadata/src/test/java/org/apache/kylin/metadata/cube/storage/ProjectStorageInfoCollectorTest.java
@@ -29,8 +29,8 @@ import java.util.TreeMap;
 import java.util.stream.Collectors;
 
 import org.apache.kylin.common.KylinConfig;
-import org.apache.kylin.common.util.TimeUtil;
 import org.apache.kylin.common.util.NLocalFileMetadataTestCase;
+import org.apache.kylin.common.util.TimeUtil;
 import org.apache.kylin.common.util.Unsafe;
 import org.apache.kylin.metadata.cube.model.IndexEntity;
 import org.apache.kylin.metadata.cube.model.IndexPlan;
@@ -68,19 +68,12 @@ public class ProjectStorageInfoCollectorTest extends NLocalFileMetadataTestCase
     private static final String GC_MODEL_ID = "e0e90065-e7c3-49a0-a801-20465ca64799";
     private static final String DEFAULT_PROJECT = "default";
     private static final String DEFAULT_MODEL_BASIC_ID = "89af4ee2-2cdb-4b07-b39e-4c29856309aa";
-    private JdbcRawRecStore jdbcRawRecStore;
-
     private static final long DAY_IN_MILLIS = 24 * 60 * 60 * 1000L;
 
     @Before
     public void setUp() throws Exception {
         this.createTestMetadata();
         overwriteSystemProp("kylin.cube.low-frequency-threshold", "5");
-        try {
-            jdbcRawRecStore = new JdbcRawRecStore(getTestConfig());
-        } catch (Exception e) {
-            log.error("initialize rec store failed.");
-        }
     }
 
     @After
@@ -476,7 +469,6 @@ public class ProjectStorageInfoCollectorTest extends NLocalFileMetadataTestCase
         overwriteSystemProp("kylin.storage.check-quota-enabled", "true");
         KylinConfig testConfig = getTestConfig();
         overwriteSystemProp("kylin.metrics.hdfs-periodic-calculation-enabled", "true");
-        HdfsCapacityMetrics.registerHdfsMetrics();
         StorageVolumeInfo storageVolumeInfo = Mockito.spy(StorageVolumeInfo.class);
         TotalStorageCollector totalStorageCollector = new TotalStorageCollector();
         totalStorageCollector.collect(testConfig, DEFAULT_PROJECT, storageVolumeInfo);
@@ -487,7 +479,6 @@ public class ProjectStorageInfoCollectorTest extends NLocalFileMetadataTestCase
     public void testGetStorageVolumeQuotaStorageEnabledFalse() throws IOException {
         overwriteSystemProp("kylin.storage.check-quota-enabled", "false");
         KylinConfig testConfig = getTestConfig();
-        HdfsCapacityMetrics.registerHdfsMetrics();
         StorageVolumeInfo storageVolumeInfo = Mockito.spy(StorageVolumeInfo.class);
         TotalStorageCollector totalStorageCollector = new TotalStorageCollector();
         totalStorageCollector.collect(testConfig, DEFAULT_PROJECT, storageVolumeInfo);
diff --git a/src/core-metadata/src/test/java/org/apache/kylin/metrics/HdfsCapacityMetricsTest.java b/src/core-metadata/src/test/java/org/apache/kylin/metrics/HdfsCapacityMetricsTest.java
index 9239ff9093..689fbdc831 100644
--- a/src/core-metadata/src/test/java/org/apache/kylin/metrics/HdfsCapacityMetricsTest.java
+++ b/src/core-metadata/src/test/java/org/apache/kylin/metrics/HdfsCapacityMetricsTest.java
@@ -49,86 +49,68 @@ public class HdfsCapacityMetricsTest extends NLocalFileMetadataTestCase {
     }
 
     @Test
-    public void testRegisterHdfsMetricsFailed() {
-        overwriteSystemProp("kylin.storage.check-quota-enabled", "true");
-        HdfsCapacityMetrics.registerHdfsMetrics();
-        // scheduledExecutor may like this
-        // java.util.concurrent.ScheduledThreadPoolExecutor@5bf61e67[Running, pool size = 1, active threads = 1, queued tasks = 1, completed tasks = 0]
-        String scheduledExecutor = HdfsCapacityMetrics.HDFS_METRICS_SCHEDULED_EXECUTOR.toString();
-        String poolSizeStr = "pool size = ";
-        int activePoolSizeIdx = scheduledExecutor.indexOf(poolSizeStr);
-        String poolSize = scheduledExecutor.substring(activePoolSizeIdx + poolSizeStr.length(),
-                activePoolSizeIdx + poolSizeStr.length() + 1);
-        Assert.assertEquals(1, Integer.parseInt(poolSize));
-    }
-
-    @Test
-    @Ignore("KE-40537")
-    public void testRegisterHdfsMetrics() {
+    @Ignore("unstable")
+    public void testRegisterHdfsMetrics() throws InterruptedException {
         overwriteSystemProp("kylin.storage.check-quota-enabled", "true");
         overwriteSystemProp("kylin.metrics.hdfs-periodic-calculation-enabled", "true");
-        HdfsCapacityMetrics.registerHdfsMetrics();
         // scheduledExecutor may like this
         // java.util.concurrent.ScheduledThreadPoolExecutor@4b5189ac[Running, pool size = 1, active threads = 1, queued tasks = 1, completed tasks = 0]
-        String scheduledExecutor = HdfsCapacityMetrics.HDFS_METRICS_SCHEDULED_EXECUTOR.toString();
-        String activeThreadStr = "active threads = ";
-        int activeThreadIdx = scheduledExecutor.indexOf(activeThreadStr);
-        String thread = scheduledExecutor.substring(activeThreadIdx + activeThreadStr.length(),
-                activeThreadIdx + activeThreadStr.length() + 1);
-        Assert.assertEquals(1, Integer.parseInt(thread));
+        overwriteSystemProp("kylin.metrics.hdfs-periodic-calculation-enabled", "true");
+        HdfsCapacityMetrics hdfsCapacityMetrics = new HdfsCapacityMetrics(getTestConfig());
+        Assert.assertEquals(1, hdfsCapacityMetrics.getPoolSize());
+        Assert.assertTrue(hdfsCapacityMetrics.getActiveCount() <= 1);
     }
 
     @Test
     public void testRegisterHdfsMetricsQuotaStorageEnabledFalse() {
         overwriteSystemProp("kylin.storage.check-quota-enabled", "false");
-        HdfsCapacityMetrics.registerHdfsMetrics();
-        String scheduledExecutor = HdfsCapacityMetrics.HDFS_METRICS_SCHEDULED_EXECUTOR.toString();
-        String activeThreadStr = "active threads = ";
-        int activeThreadIdx = scheduledExecutor.indexOf(activeThreadStr);
-        String thread = scheduledExecutor.substring(activeThreadIdx + activeThreadStr.length(),
-                activeThreadIdx + activeThreadStr.length() + 1);
-        Assert.assertEquals(0, Integer.parseInt(thread));
+        HdfsCapacityMetrics hdfsCapacityMetrics = new HdfsCapacityMetrics(getTestConfig());
+        Assert.assertEquals(0, hdfsCapacityMetrics.getActiveCount());
     }
 
     @Test
     public void testHandleNodeHdfsMetrics() {
         overwriteSystemProp("kylin.metrics.hdfs-periodic-calculation-enabled", "true");
+        HdfsCapacityMetrics hdfsCapacityMetrics = new HdfsCapacityMetrics(getTestConfig());
         EpochManager.getInstance().tryUpdateEpoch(EpochManager.GLOBAL, true);
-        HdfsCapacityMetrics.handleNodeHdfsMetrics();
-        Assert.assertTrue(HdfsCapacityMetrics.workingDirCapacity.size() > 0);
+        hdfsCapacityMetrics.handleNodeHdfsMetrics();
+        Assert.assertTrue(hdfsCapacityMetrics.getWorkingDirCapacity().size() > 0);
     }
 
     @Test
     public void testWriteHdfsMetrics() throws IOException {
-        KylinConfig testConfig = getTestConfig();
         overwriteSystemProp("kylin.metrics.hdfs-periodic-calculation-enabled", "true");
+        KylinConfig testConfig = getTestConfig();
+        HdfsCapacityMetrics hdfsCapacityMetrics = new HdfsCapacityMetrics(testConfig);
         Path projectPath = new Path(testConfig.getWorkingDirectoryWithConfiguredFs("newten"));
         FileSystem fs = projectPath.getFileSystem(HadoopUtil.getCurrentConfiguration());
         if (!fs.exists(projectPath)) {
             fs.mkdirs(projectPath);
             fs.createNewFile(projectPath);
         }
-        HdfsCapacityMetrics.writeHdfsMetrics();
+        hdfsCapacityMetrics.writeHdfsMetrics();
     }
 
     @Test
     public void testReadHdfsMetrics() throws IOException {
-        KylinConfig testConfig = getTestConfig();
         overwriteSystemProp("kylin.metrics.hdfs-periodic-calculation-enabled", "true");
+        KylinConfig testConfig = getTestConfig();
+        HdfsCapacityMetrics hdfsCapacityMetrics = new HdfsCapacityMetrics(testConfig);
         Path projectPath = new Path(testConfig.getWorkingDirectoryWithConfiguredFs("newten"));
         FileSystem fs = projectPath.getFileSystem(HadoopUtil.getCurrentConfiguration());
         if (!fs.exists(projectPath)) {
             fs.mkdirs(projectPath);
             fs.createNewFile(projectPath);
         }
-        HdfsCapacityMetrics.writeHdfsMetrics();
-        HdfsCapacityMetrics.readHdfsMetrics();
+        hdfsCapacityMetrics.writeHdfsMetrics();
+        Assert.assertEquals(hdfsCapacityMetrics.getWorkingDirCapacity().size(), hdfsCapacityMetrics.readHdfsMetrics().size());
     }
 
     @Test
     public void testWriteAndReadHdfsMetrics() throws IOException {
-        KylinConfig testConfig = getTestConfig();
         overwriteSystemProp("kylin.metrics.hdfs-periodic-calculation-enabled", "true");
+        KylinConfig testConfig = getTestConfig();
+        HdfsCapacityMetrics hdfsCapacityMetrics = new HdfsCapacityMetrics(testConfig);
         EpochManager.getInstance().tryUpdateEpoch(EpochManager.GLOBAL, true);
         Path projectPath = new Path(testConfig.getWorkingDirectoryWithConfiguredFs("newten"));
         FileSystem fs = projectPath.getFileSystem(HadoopUtil.getCurrentConfiguration());
@@ -136,21 +118,22 @@ public class HdfsCapacityMetricsTest extends NLocalFileMetadataTestCase {
             fs.mkdirs(projectPath);
             fs.createNewFile(projectPath);
         }
-        HdfsCapacityMetrics.registerHdfsMetrics();
-
         Thread t1 = new Thread(() -> {
             await().pollDelay(new Duration(1, TimeUnit.SECONDS)).until(() -> true);
-            HdfsCapacityMetrics.readHdfsMetrics();
-            Assert.assertTrue(HdfsCapacityMetrics.workingDirCapacity.size() > 0);
+            hdfsCapacityMetrics.readHdfsMetrics();
+            Assert.assertTrue(hdfsCapacityMetrics.getWorkingDirCapacity().size() > 0);
         });
         t1.start();
         fs.deleteOnExit(projectPath);
-        fs.deleteOnExit(HdfsCapacityMetrics.HDFS_CAPACITY_METRICS_PATH);
+        fs.deleteOnExit(hdfsCapacityMetrics.getHdfsCapacityMetricsPath());
     }
 
     @Test
     public void testGetHdfsCapacityByProject() {
         overwriteSystemProp("kylin.metrics.hdfs-periodic-calculation-enabled", "true");
-        Assert.assertEquals(0L, (long) HdfsCapacityMetrics.getHdfsCapacityByProject("kylin"));
+        overwriteSystemProp("kylin.storage.check-quota-enabled", "true");
+        overwriteSystemProp("kylin.metrics.hdfs-periodic-calculation-enabled", "true");
+        HdfsCapacityMetrics hdfsCapacityMetrics = new HdfsCapacityMetrics(getTestConfig());
+        Assert.assertEquals(0L, (long) hdfsCapacityMetrics.getHdfsCapacityByProject("kylin"));
     }
 }
\ No newline at end of file
diff --git a/src/systools/src/test/java/org/apache/kylin/rest/security/ExternalAclProviderTest.java b/src/core-metadata/src/test/java/org/apache/kylin/rest/security/ExternalAclProviderTest.java
similarity index 100%
rename from src/systools/src/test/java/org/apache/kylin/rest/security/ExternalAclProviderTest.java
rename to src/core-metadata/src/test/java/org/apache/kylin/rest/security/ExternalAclProviderTest.java
diff --git a/src/systools/src/test/java/org/apache/kylin/rest/security/KylinPermissionGrantingStrategyTest.java b/src/core-metadata/src/test/java/org/apache/kylin/rest/security/KylinPermissionGrantingStrategyTest.java
similarity index 100%
rename from src/systools/src/test/java/org/apache/kylin/rest/security/KylinPermissionGrantingStrategyTest.java
rename to src/core-metadata/src/test/java/org/apache/kylin/rest/security/KylinPermissionGrantingStrategyTest.java
diff --git a/src/systools/src/test/java/org/apache/kylin/rest/security/UserAclManagerTest.java b/src/core-metadata/src/test/java/org/apache/kylin/rest/security/UserAclManagerTest.java
similarity index 100%
rename from src/systools/src/test/java/org/apache/kylin/rest/security/UserAclManagerTest.java
rename to src/core-metadata/src/test/java/org/apache/kylin/rest/security/UserAclManagerTest.java
diff --git a/src/systools/src/test/java/org/apache/kylin/rest/util/AclPermissionUtilTest.java b/src/core-metadata/src/test/java/org/apache/kylin/rest/util/AclPermissionUtilTest.java
similarity index 100%
rename from src/systools/src/test/java/org/apache/kylin/rest/util/AclPermissionUtilTest.java
rename to src/core-metadata/src/test/java/org/apache/kylin/rest/util/AclPermissionUtilTest.java
diff --git a/src/data-loading-server/pom.xml b/src/data-loading-server/pom.xml
index 7ce2077b10..50d47eadbc 100644
--- a/src/data-loading-server/pom.xml
+++ b/src/data-loading-server/pom.xml
@@ -40,7 +40,7 @@
 
         <dependency>
             <groupId>org.apache.kylin</groupId>
-            <artifactId>kylin-common-service</artifactId>
+            <artifactId>kylin-common-server</artifactId>
         </dependency>
 
         <dependency>
diff --git a/src/data-loading-server/src/main/java/org/apache/kylin/rest/controller/BaseController.java b/src/data-loading-server/src/main/java/org/apache/kylin/rest/controller/BaseController.java
index b5a2cec688..83f152d7e8 100644
--- a/src/data-loading-server/src/main/java/org/apache/kylin/rest/controller/BaseController.java
+++ b/src/data-loading-server/src/main/java/org/apache/kylin/rest/controller/BaseController.java
@@ -33,11 +33,6 @@ import static org.apache.kylin.common.exception.code.ErrorCodeServer.PROJECT_NOT
 import static org.apache.kylin.common.exception.code.ErrorCodeServer.REQUEST_PARAMETER_EMPTY_OR_VALUE_EMPTY;
 import static org.apache.kylin.common.exception.code.ErrorCodeServer.SEGMENT_CONFLICT_PARAMETER;
 import static org.apache.kylin.common.exception.code.ErrorCodeServer.SEGMENT_EMPTY_PARAMETER;
-import static org.apache.kylin.common.exception.code.ErrorCodeServer.TIME_INVALID_RANGE_END_LESS_THAN_EQUALS_START;
-import static org.apache.kylin.common.exception.code.ErrorCodeServer.TIME_INVALID_RANGE_LESS_THAN_ZERO;
-import static org.apache.kylin.common.exception.code.ErrorCodeServer.TIME_INVALID_RANGE_NOT_CONSISTENT;
-import static org.apache.kylin.common.exception.code.ErrorCodeServer.TIME_INVALID_RANGE_NOT_FORMAT_MS;
-import static org.apache.kylin.metadata.model.PartitionDesc.transformTimestamp2Format;
 
 import java.io.File;
 import java.io.FileInputStream;
@@ -66,7 +61,6 @@ import org.apache.kylin.common.exception.KylinException;
 import org.apache.kylin.common.msg.Message;
 import org.apache.kylin.common.msg.MsgPicker;
 import org.apache.kylin.common.persistence.transaction.TransactionException;
-import org.apache.kylin.common.util.DateFormat;
 import org.apache.kylin.common.util.JsonUtil;
 import org.apache.kylin.job.dao.ExecutablePO;
 import org.apache.kylin.metadata.project.NProjectManager;
@@ -383,57 +377,6 @@ public class BaseController {
         }
     }
 
-    public void validateRange(String start, String end) {
-        validateRange(Long.parseLong(start), Long.parseLong(end));
-    }
-
-    private void validateRange(long start, long end) {
-        if (start < 0 || end < 0) {
-            throw new KylinException(TIME_INVALID_RANGE_LESS_THAN_ZERO);
-        }
-        if (start >= end) {
-            throw new KylinException(TIME_INVALID_RANGE_END_LESS_THAN_EQUALS_START);
-        }
-    }
-
-    public void validateDataRange(String start, String end) {
-        validateDataRange(start, end, null);
-    }
-
-    public void validateDataRange(String start, String end, String partitionColumnFormat) {
-        if (StringUtils.isEmpty(start) && StringUtils.isEmpty(end)) {
-            return;
-        }
-
-        if (StringUtils.isNotEmpty(start) && StringUtils.isNotEmpty(end)) {
-            long startLong = 0;
-            long endLong = 0;
-
-            try {
-                startLong = Long.parseLong(start);
-                endLong = Long.parseLong(end);
-            } catch (Exception e) {
-                throw new KylinException(TIME_INVALID_RANGE_NOT_FORMAT_MS);
-            }
-
-            if (startLong < 0 || endLong < 0)
-                throw new KylinException(TIME_INVALID_RANGE_LESS_THAN_ZERO);
-
-            try {
-                startLong = DateFormat.getFormatTimeStamp(start, transformTimestamp2Format(partitionColumnFormat));
-                endLong = DateFormat.getFormatTimeStamp(end, transformTimestamp2Format(partitionColumnFormat));
-            } catch (Exception e) {
-                throw new KylinException(TIME_INVALID_RANGE_NOT_FORMAT_MS);
-            }
-
-            if (startLong >= endLong)
-                throw new KylinException(TIME_INVALID_RANGE_END_LESS_THAN_EQUALS_START);
-
-        } else {
-            throw new KylinException(TIME_INVALID_RANGE_NOT_CONSISTENT);
-        }
-    }
-
     public void checkStreamingOperation(String project, String table) {
         val config = KylinConfig.getInstanceFromEnv();
         val kafkaConf = KafkaConfigManager.getInstance(config, project).getKafkaConfig(table);
diff --git a/src/data-loading-server/src/main/java/org/apache/kylin/rest/controller/SampleController.java b/src/data-loading-server/src/main/java/org/apache/kylin/rest/controller/SampleController.java
index b59eec4054..60f672d993 100644
--- a/src/data-loading-server/src/main/java/org/apache/kylin/rest/controller/SampleController.java
+++ b/src/data-loading-server/src/main/java/org/apache/kylin/rest/controller/SampleController.java
@@ -32,6 +32,7 @@ import org.apache.kylin.rest.response.EnvelopeResponse;
 import org.apache.kylin.rest.service.ModelBuildSupporter;
 import org.apache.kylin.rest.service.TableSamplingService;
 import org.apache.kylin.rest.service.TableService;
+import org.apache.kylin.util.DataRangeUtils;
 import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.beans.factory.annotation.Qualifier;
 import org.springframework.stereotype.Controller;
@@ -76,7 +77,7 @@ public class SampleController extends BaseController {
         checkRequiredArg("refresh end", request.getRefreshEnd());
         checkRequiredArg("affected start", request.getAffectedStart());
         checkRequiredArg("affected end", request.getAffectedEnd());
-        validateRange(request.getRefreshStart(), request.getRefreshEnd());
+        DataRangeUtils.validateRange(request.getRefreshStart(), request.getRefreshEnd());
         modelBuildService.refreshSegments(request.getProject(), request.getTable(), request.getRefreshStart(),
                 request.getRefreshEnd(), request.getAffectedStart(), request.getAffectedEnd());
         return new EnvelopeResponse<>(KylinException.CODE_SUCCESS, "", "");
diff --git a/src/data-loading-server/src/main/java/org/apache/kylin/rest/controller/SegmentController.java b/src/data-loading-server/src/main/java/org/apache/kylin/rest/controller/SegmentController.java
index d1f706e275..5d8e2b4012 100644
--- a/src/data-loading-server/src/main/java/org/apache/kylin/rest/controller/SegmentController.java
+++ b/src/data-loading-server/src/main/java/org/apache/kylin/rest/controller/SegmentController.java
@@ -57,6 +57,7 @@ import org.apache.kylin.rest.service.ModelService;
 import org.apache.kylin.rest.service.params.IncrementBuildSegmentParams;
 import org.apache.kylin.rest.service.params.MergeSegmentParams;
 import org.apache.kylin.rest.service.params.RefreshSegmentParams;
+import org.apache.kylin.util.DataRangeUtils;
 import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.beans.factory.annotation.Qualifier;
 import org.springframework.stereotype.Controller;
@@ -133,7 +134,7 @@ public class SegmentController extends BaseController {
             @RequestParam(value = "statuses", required = false, defaultValue = "") List<String> statuses,
             @RequestParam(value = "statuses_second_storage", required = false, defaultValue = "") List<String> statusesSecondStorage) {
         checkProjectName(project);
-        validateRange(start, end);
+        DataRangeUtils.validateRange(start, end);
         modelService.checkSegmentStatus(statuses);
         modelService.checkSegmentSecondStorageStatus(statusesSecondStorage);
         List<NDataSegmentResponse> segments = modelService.getSegmentsResponse(dataflowId, project, start, end, status,
@@ -150,7 +151,7 @@ public class SegmentController extends BaseController {
         checkRequiredArg("segment_holes", segmentsRequest.getSegmentHoles());
         String partitionColumnFormat = modelService.getPartitionColumnFormatById(segmentsRequest.getProject(), modelId);
         segmentsRequest.getSegmentHoles()
-                .forEach(seg -> validateDataRange(seg.getStart(), seg.getEnd(), partitionColumnFormat));
+                .forEach(seg -> DataRangeUtils.validateDataRange(seg.getStart(), seg.getEnd(), partitionColumnFormat));
         JobInfoResponse response = modelService.fixSegmentHoles(segmentsRequest.getProject(), modelId,
                 segmentsRequest.getSegmentHoles(), segmentsRequest.getIgnoredSnapshotTables());
         return new EnvelopeResponse<>(KylinException.CODE_SUCCESS, response, "");
@@ -164,7 +165,7 @@ public class SegmentController extends BaseController {
         checkProjectName(buildSegmentsRequest.getProject());
         String partitionColumnFormat = modelService.getPartitionColumnFormatById(buildSegmentsRequest.getProject(),
                 modelId);
-        validateDataRange(buildSegmentsRequest.getStart(), buildSegmentsRequest.getEnd(), partitionColumnFormat);
+        DataRangeUtils.validateDataRange(buildSegmentsRequest.getStart(), buildSegmentsRequest.getEnd(), partitionColumnFormat);
         val res = modelService.checkSegHoleExistIfNewRangeBuild(buildSegmentsRequest.getProject(), modelId,
                 buildSegmentsRequest.getStart(), buildSegmentsRequest.getEnd(),
                 buildSegmentsRequest.isBuildAllIndexes(), buildSegmentsRequest.getBatchIndexIds());
@@ -274,7 +275,7 @@ public class SegmentController extends BaseController {
         checkParamLength("tag", buildSegmentsRequest.getTag(), prjInstance.getConfig().getJobTagMaxSize());
         String partitionColumnFormat = modelService.getPartitionColumnFormatById(buildSegmentsRequest.getProject(),
                 modelId);
-        validateDataRange(buildSegmentsRequest.getStart(), buildSegmentsRequest.getEnd(), partitionColumnFormat);
+        DataRangeUtils.validateDataRange(buildSegmentsRequest.getStart(), buildSegmentsRequest.getEnd(), partitionColumnFormat);
         modelService.validateCCType(modelId, buildSegmentsRequest.getProject());
         JobInfoResponse response = modelBuildService.buildSegmentsManually(buildSegmentsRequest.getProject(), modelId,
                 buildSegmentsRequest.getStart(), buildSegmentsRequest.getEnd(),
@@ -296,7 +297,7 @@ public class SegmentController extends BaseController {
                 .getProject(buildSegmentsRequest.getProject());
         checkParamLength("tag", buildSegmentsRequest.getTag(), prjInstance.getConfig().getJobTagMaxSize());
         String partitionColumnFormat = buildSegmentsRequest.getPartitionDesc().getPartitionDateFormat();
-        validateDataRange(buildSegmentsRequest.getStart(), buildSegmentsRequest.getEnd(), partitionColumnFormat);
+        DataRangeUtils.validateDataRange(buildSegmentsRequest.getStart(), buildSegmentsRequest.getEnd(), partitionColumnFormat);
         modelService.validateCCType(modelId, buildSegmentsRequest.getProject());
 
         IncrementBuildSegmentParams incrParams = new IncrementBuildSegmentParams(buildSegmentsRequest.getProject(),
diff --git a/src/data-loading-server/src/main/java/org/apache/kylin/rest/controller/open/OpenSegmentController.java b/src/data-loading-server/src/main/java/org/apache/kylin/rest/controller/open/OpenSegmentController.java
index 4b4e1bfdcc..0e7636eee2 100644
--- a/src/data-loading-server/src/main/java/org/apache/kylin/rest/controller/open/OpenSegmentController.java
+++ b/src/data-loading-server/src/main/java/org/apache/kylin/rest/controller/open/OpenSegmentController.java
@@ -55,6 +55,7 @@ import org.apache.kylin.rest.response.SegmentPartitionResponse;
 import org.apache.kylin.rest.service.FusionModelService;
 import org.apache.kylin.rest.service.ModelService;
 import org.apache.kylin.rest.util.AclEvaluate;
+import org.apache.kylin.util.DataRangeUtils;
 import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.stereotype.Controller;
 import org.springframework.web.bind.annotation.DeleteMapping;
@@ -241,7 +242,7 @@ public class OpenSegmentController extends BaseController {
         aclEvaluate.checkProjectOperationPermission(request.getProject());
         checkRequiredArg("start", request.getStart());
         checkRequiredArg("end", request.getEnd());
-        validateDataRange(request.getStart(), request.getEnd());
+        DataRangeUtils.validateDataRange(request.getStart(), request.getEnd());
         NDataModel model = getModel(modelAlias, projectName);
         return new EnvelopeResponse<>(KylinException.CODE_SUCCESS, modelService.checkSegments(request.getProject(),
                 model.getAlias(), request.getStart(), request.getEnd()), "");
diff --git a/src/data-loading-server/src/main/java/org/apache/kylin/rest/controller/v2/SegmentControllerV2.java b/src/data-loading-server/src/main/java/org/apache/kylin/rest/controller/v2/SegmentControllerV2.java
index a9bc935d0f..ee1de0d848 100644
--- a/src/data-loading-server/src/main/java/org/apache/kylin/rest/controller/v2/SegmentControllerV2.java
+++ b/src/data-loading-server/src/main/java/org/apache/kylin/rest/controller/v2/SegmentControllerV2.java
@@ -56,6 +56,7 @@ import org.apache.kylin.rest.service.ModelBuildService;
 import org.apache.kylin.rest.service.ModelService;
 import org.apache.kylin.rest.service.params.MergeSegmentParams;
 import org.apache.kylin.rest.service.params.RefreshSegmentParams;
+import org.apache.kylin.util.DataRangeUtils;
 import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.beans.factory.annotation.Qualifier;
 import org.springframework.web.bind.annotation.GetMapping;
@@ -142,7 +143,7 @@ public class SegmentControllerV2 extends BaseController {
         }
         String partitionColumnFormat = modelService.getPartitionColumnFormatByAlias(dataModelResponse.getProject(),
                 modelAlias);
-        validateDataRange(startTime, endTime, partitionColumnFormat);
+        DataRangeUtils.validateDataRange(startTime, endTime, partitionColumnFormat);
         JobInfoResponseV2 result = null;
         switch (request.getBuildType()) {
         case "BUILD":
diff --git a/src/data-loading-server/src/test/java/org/apache/kylin/rest/controller/BaseControllerTest.java b/src/data-loading-server/src/test/java/org/apache/kylin/rest/controller/BaseControllerTest.java
index 69557635c7..fb44ff9c95 100644
--- a/src/data-loading-server/src/test/java/org/apache/kylin/rest/controller/BaseControllerTest.java
+++ b/src/data-loading-server/src/test/java/org/apache/kylin/rest/controller/BaseControllerTest.java
@@ -48,6 +48,7 @@ import org.apache.kylin.rest.exception.ForbiddenException;
 import org.apache.kylin.rest.exception.NotFoundException;
 import org.apache.kylin.rest.exception.UnauthorizedException;
 import org.apache.kylin.rest.service.ProjectService;
+import org.apache.kylin.util.DataRangeUtils;
 import org.junit.After;
 import org.junit.Assert;
 import org.junit.Before;
@@ -159,50 +160,50 @@ public class BaseControllerTest extends NLocalFileMetadataTestCase {
     public void testCheckStartAndEndException() {
         thrown.expect(KylinException.class);
         thrown.expectMessage(TIME_INVALID_RANGE_NOT_CONSISTENT.getMsg());
-        baseController.validateDataRange("10", "");
+        DataRangeUtils.validateDataRange("10", "");
     }
 
     @Test
     public void testTimeRangeEndGreaterThanStart() {
         thrown.expect(KylinException.class);
         thrown.expectMessage("The end time must be greater than the start time");
-        baseController.validateDataRange("10", "1");
+        DataRangeUtils.validateDataRange("10", "1");
     }
 
     @Test
     public void testTimeRangeEndEqualToStart() {
         thrown.expect(KylinException.class);
         thrown.expectMessage("The end time must be greater than the start time");
-        baseController.validateDataRange("1", "1");
+        DataRangeUtils.validateDataRange("1", "1");
     }
 
     @Test
     public void testTimeRangeInvalidStart() {
         thrown.expect(KylinException.class);
         thrown.expectMessage(TIME_INVALID_RANGE_LESS_THAN_ZERO.getMsg());
-        baseController.validateDataRange("-1", "1");
+        DataRangeUtils.validateDataRange("-1", "1");
     }
 
     @Test
     public void testTimeRangeInvalidEnd() {
         thrown.expect(KylinException.class);
         thrown.expectMessage(TIME_INVALID_RANGE_LESS_THAN_ZERO.getMsg());
-        baseController.validateDataRange("2", "-1");
+        DataRangeUtils.validateDataRange("2", "-1");
     }
 
     @Test
     public void testTimeRangeInvalidFormat() {
         thrown.expect(KylinException.class);
         thrown.expectMessage(TIME_INVALID_RANGE_NOT_FORMAT_MS.getMsg());
-        baseController.validateDataRange("start", "end");
+        DataRangeUtils.validateDataRange("start", "end");
     }
 
     @Test
     public void testTimeRangeValid() {
-        baseController.validateDataRange("0", "86400000", "yyyy-MM-dd");
-        baseController.validateDataRange("1000000000000", "2200000000000", "yyyy-MM-dd");
-        baseController.validateDataRange("0", "86400000", PartitionDesc.TimestampType.MILLISECOND.name);
-        baseController.validateDataRange("1000000000000", "2200000000000", PartitionDesc.TimestampType.SECOND.name);
+        DataRangeUtils.validateDataRange("0", "86400000", "yyyy-MM-dd");
+        DataRangeUtils.validateDataRange("1000000000000", "2200000000000", "yyyy-MM-dd");
+        DataRangeUtils.validateDataRange("0", "86400000", PartitionDesc.TimestampType.MILLISECOND.name);
+        DataRangeUtils.validateDataRange("1000000000000", "2200000000000", PartitionDesc.TimestampType.SECOND.name);
     }
 
     @Test
@@ -218,7 +219,7 @@ public class BaseControllerTest extends NLocalFileMetadataTestCase {
         }
         thrown.expect(KylinException.class);
         thrown.expectMessage("The end time must be greater than the start time");
-        baseController.validateDataRange(start, end, "yyyy-MM-dd");
+        DataRangeUtils.validateDataRange(start, end, "yyyy-MM-dd");
     }
 
     @Test
diff --git a/src/data-loading-service/pom.xml b/src/data-loading-service/pom.xml
index dd608e3d3c..6ec32d89ba 100644
--- a/src/data-loading-service/pom.xml
+++ b/src/data-loading-service/pom.xml
@@ -134,6 +134,17 @@
             <artifactId>awaitility</artifactId>
             <scope>test</scope>
         </dependency>
+        <dependency>
+            <groupId>org.apache.kylin</groupId>
+            <artifactId>kap-second-storage-clickhouse</artifactId>
+            <scope>test</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.kylin</groupId>
+            <artifactId>kap-second-storage-clickhouse</artifactId>
+            <type>test-jar</type>
+            <scope>test</scope>
+        </dependency>
     </dependencies>
 
 </project>
diff --git a/src/job-service/src/test/java/org/apache/kylin/rest/service/JobErrorTest.java b/src/data-loading-service/src/test/java/org/apache/kylin/rest/service/JobErrorTest.java
similarity index 100%
rename from src/job-service/src/test/java/org/apache/kylin/rest/service/JobErrorTest.java
rename to src/data-loading-service/src/test/java/org/apache/kylin/rest/service/JobErrorTest.java
diff --git a/src/job-service/src/test/java/org/apache/kylin/rest/service/JobServiceTest.java b/src/data-loading-service/src/test/java/org/apache/kylin/rest/service/JobServiceTest.java
similarity index 100%
rename from src/job-service/src/test/java/org/apache/kylin/rest/service/JobServiceTest.java
rename to src/data-loading-service/src/test/java/org/apache/kylin/rest/service/JobServiceTest.java
diff --git a/src/job-service/src/test/java/org/apache/kylin/rest/service/MockClusterManager.java b/src/data-loading-service/src/test/java/org/apache/kylin/rest/service/MockClusterManager.java
similarity index 100%
rename from src/job-service/src/test/java/org/apache/kylin/rest/service/MockClusterManager.java
rename to src/data-loading-service/src/test/java/org/apache/kylin/rest/service/MockClusterManager.java
diff --git a/src/job-service/src/test/java/org/apache/kylin/rest/service/StageTest.java b/src/data-loading-service/src/test/java/org/apache/kylin/rest/service/StageTest.java
similarity index 100%
rename from src/job-service/src/test/java/org/apache/kylin/rest/service/StageTest.java
rename to src/data-loading-service/src/test/java/org/apache/kylin/rest/service/StageTest.java
diff --git a/src/datasource-service/pom.xml b/src/datasource-service/pom.xml
index efdb2d3061..8766e49a50 100644
--- a/src/datasource-service/pom.xml
+++ b/src/datasource-service/pom.xml
@@ -60,11 +60,6 @@
             <groupId>org.apache.kylin</groupId>
             <artifactId>kylin-spark-ddl</artifactId>
         </dependency>
-        <dependency>
-            <groupId>org.apache.tomcat.embed</groupId>
-            <artifactId>tomcat-embed-core</artifactId>
-            <scope>compile</scope>
-        </dependency>
         <dependency>
             <groupId>org.apache.kafka</groupId>
             <artifactId>kafka-clients</artifactId>
diff --git a/src/datasource-service/src/main/java/org/apache/kylin/rest/service/TableService.java b/src/datasource-service/src/main/java/org/apache/kylin/rest/service/TableService.java
index 950469188d..6b7538697c 100644
--- a/src/datasource-service/src/main/java/org/apache/kylin/rest/service/TableService.java
+++ b/src/datasource-service/src/main/java/org/apache/kylin/rest/service/TableService.java
@@ -83,6 +83,7 @@ import org.apache.kylin.common.util.JsonUtil;
 import org.apache.kylin.common.util.Pair;
 import org.apache.kylin.common.util.RandomUtil;
 import org.apache.kylin.common.util.ShellException;
+import org.apache.kylin.constants.AclConstants;
 import org.apache.kylin.job.dao.ExecutablePO;
 import org.apache.kylin.job.execution.AbstractExecutable;
 import org.apache.kylin.job.execution.ExecutableState;
@@ -155,7 +156,6 @@ import org.apache.kylin.rest.response.TableNameResponse;
 import org.apache.kylin.rest.response.TableRefresh;
 import org.apache.kylin.rest.response.TableRefreshAll;
 import org.apache.kylin.rest.response.TablesAndColumnsResponse;
-import org.apache.kylin.rest.security.ExternalAclProvider;
 import org.apache.kylin.rest.security.KerberosLoginManager;
 import org.apache.kylin.rest.source.DataSourceState;
 import org.apache.kylin.rest.util.AclEvaluate;
@@ -461,8 +461,8 @@ public class TableService extends BasicService {
         FileSystem fs = HadoopUtil.getWorkingFileSystem();
         List<NDataModel> healthyModels = projectManager.listHealthyModels(project);
         Set<String> extPermissionSet = accessService.getUserNormalExtPermissions(project);
-        boolean hasDataQueryPermission = extPermissionSet.contains(ExternalAclProvider.DATA_QUERY);
         int satisfiedTableSize = 0;
+        boolean hasDataQueryPermission = extPermissionSet.contains(AclConstants.DATA_QUERY);
         for (val originTable : tables) {
             // New judgment logic, when the total size of tables meet the current size of paging directly after the exit
             // Also, if the processing is not finished, the total size of tables is returned
@@ -512,9 +512,10 @@ public class TableService extends BasicService {
         }
         List<String[]> result = Lists.newArrayList();
         final String dbTblName = rtableDesc.getIdentity();
+        AclTCRManager manager = getManager(AclTCRManager.class, project);
         Map<Integer, AclTCR.ColumnRealRows> columnRows = Arrays.stream(rtableDesc.getExtColumns()).map(cdr -> {
             int id = Integer.parseInt(cdr.getId());
-            val columnRealRows = getManager(AclTCRManager.class, project).getAuthorizedRows(dbTblName, cdr.getName(),
+            val columnRealRows = manager.getAuthorizedRows(dbTblName, cdr.getName(),
                     aclTCRS);
             return new AbstractMap.SimpleEntry<>(id, columnRealRows);
         }).collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue));
diff --git a/src/job-service/pom.xml b/src/job-service/pom.xml
index 223bd59f53..38c92b0b25 100644
--- a/src/job-service/pom.xml
+++ b/src/job-service/pom.xml
@@ -39,10 +39,6 @@
             <groupId>org.apache.kylin</groupId>
             <artifactId>kylin-modeling-service</artifactId>
         </dependency>
-        <dependency>
-            <groupId>org.apache.tomcat.embed</groupId>
-            <artifactId>tomcat-embed-core</artifactId>
-        </dependency>
 
         <!-- test -->
         <dependency>
@@ -64,11 +60,6 @@
             <groupId>org.mockito</groupId>
             <artifactId>mockito-core</artifactId>
         </dependency>
-        <dependency>
-            <groupId>org.apache.kylin</groupId>
-            <artifactId>kylin-data-loading-service</artifactId>
-            <scope>test</scope>
-        </dependency>
         <dependency>
             <groupId>org.apache.kylin</groupId>
             <artifactId>kylin-core-job</artifactId>
@@ -89,17 +80,6 @@
             <scope>test</scope>
         </dependency>
 
-        <dependency>
-            <groupId>org.apache.kylin</groupId>
-            <artifactId>kap-second-storage-clickhouse</artifactId>
-            <scope>test</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.kylin</groupId>
-            <artifactId>kap-second-storage-clickhouse</artifactId>
-            <type>test-jar</type>
-            <scope>test</scope>
-        </dependency>
         <dependency>
             <groupId>com.h2database</groupId>
             <artifactId>h2</artifactId>
diff --git a/src/job-service/src/test/java/org/apache/kylin/rest/config/initialize/SchedulerEventBusTest.java b/src/job-service/src/test/java/org/apache/kylin/rest/config/initialize/SchedulerEventBusTest.java
index 7d6d597d30..1e8d360580 100644
--- a/src/job-service/src/test/java/org/apache/kylin/rest/config/initialize/SchedulerEventBusTest.java
+++ b/src/job-service/src/test/java/org/apache/kylin/rest/config/initialize/SchedulerEventBusTest.java
@@ -24,7 +24,6 @@ import java.util.concurrent.TimeUnit;
 import java.util.concurrent.atomic.AtomicInteger;
 import java.util.stream.Collectors;
 
-import org.apache.kylin.common.persistence.transaction.UnitOfWork;
 import org.apache.kylin.common.scheduler.EpochStartedNotifier;
 import org.apache.kylin.common.scheduler.EventBusFactory;
 import org.apache.kylin.common.scheduler.JobFinishedNotifier;
@@ -43,20 +42,14 @@ import org.apache.kylin.metadata.cube.model.NDataSegment;
 import org.apache.kylin.metadata.cube.model.NDataflowManager;
 import org.apache.kylin.metadata.project.NProjectManager;
 import org.apache.kylin.rest.constant.Constant;
-import org.apache.kylin.rest.service.JobService;
 import org.apache.kylin.rest.service.UserAclService;
 import org.apache.kylin.rest.service.UserService;
 import org.apache.kylin.rest.service.task.RecommendationTopNUpdateScheduler;
-import org.apache.kylin.rest.util.AclEvaluate;
-import org.apache.kylin.rest.util.AclUtil;
-import org.assertj.core.util.Lists;
 import org.junit.After;
 import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Ignore;
 import org.junit.Test;
-import org.mockito.InjectMocks;
-import org.mockito.Mock;
 import org.mockito.Mockito;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -75,12 +68,6 @@ public class SchedulerEventBusTest extends NLocalFileMetadataTestCase {
     private static final String PROJECT = "default";
     private static final String PROJECT_NEWTEN = "newten";
 
-    @InjectMocks
-    private final JobService jobService = Mockito.spy(new JobService());
-
-    @Mock
-    private final AclEvaluate aclEvaluate = Mockito.spy(AclEvaluate.class);
-
     private final AtomicInteger readyCalledCount = new AtomicInteger(0);
     private final AtomicInteger jobFinishedCalledCount = new AtomicInteger(0);
 
@@ -92,8 +79,6 @@ public class SchedulerEventBusTest extends NLocalFileMetadataTestCase {
         SecurityContextHolder.getContext()
                 .setAuthentication(new TestingAuthenticationToken("ADMIN", "ADMIN", Constant.ROLE_ADMIN));
 
-        ReflectionTestUtils.setField(aclEvaluate, "aclUtil", Mockito.spy(AclUtil.class));
-        ReflectionTestUtils.setField(jobService, "aclEvaluate", aclEvaluate);
         // init DefaultScheduler
         overwriteSystemProp("kylin.job.max-local-consumption-ratio", "10");
         NDefaultScheduler.getInstance(PROJECT_NEWTEN).init(new JobEngineConfig(getTestConfig()));
@@ -172,72 +157,72 @@ public class SchedulerEventBusTest extends NLocalFileMetadataTestCase {
         });
     }
 
-    @Ignore
-    @Test
-    public void testResumeJob() {
-        logger.info("SchedulerEventBusTest testResumeJob");
-
-        overwriteSystemProp("kylin.scheduler.schedule-limit-per-minute", "6000");
-        val df = NDataflowManager.getInstance(getTestConfig(), PROJECT)
-                .getDataflow("89af4ee2-2cdb-4b07-b39e-4c29856309aa");
-        DefaultExecutable job = new DefaultExecutable();
-        job.setProject(PROJECT);
-        job.setJobType(JobTypeEnum.INC_BUILD);
-        job.setTargetSubject(df.getModel().getUuid());
-        job.setTargetSegments(df.getSegments().stream().map(NDataSegment::getId).collect(Collectors.toList()));
-        FiveSecondSucceedTestExecutable task = new FiveSecondSucceedTestExecutable();
-        task.setTargetSubject(df.getModel().getUuid());
-        task.setTargetSegments(df.getSegments().stream().map(NDataSegment::getId).collect(Collectors.toList()));
-        job.addTask(task);
-
-        val executableManager = NExecutableManager.getInstance(getTestConfig(), PROJECT);
-        executableManager.addJob(job);
-
-        readyCalledCount.set(0);
-
-        executableManager.updateJobOutput(job.getId(), ExecutableState.PAUSED);
-
-        UnitOfWork.doInTransactionWithRetry(() -> {
-            jobService.batchUpdateJobStatus(Lists.newArrayList(job.getId()), PROJECT, "RESUME", Lists.newArrayList());
-            return null;
-        }, PROJECT);
-
-        await().atMost(60000, TimeUnit.MILLISECONDS).until(() -> 1 == readyCalledCount.get());
-    }
-
-    @Ignore
-    @Test
-    public void testRestartJob() {
-        logger.info("SchedulerEventBusTest testRestartJob");
-
-        overwriteSystemProp("kylin.scheduler.schedule-limit-per-minute", "6000");
-        val df = NDataflowManager.getInstance(getTestConfig(), PROJECT)
-                .getDataflow("89af4ee2-2cdb-4b07-b39e-4c29856309aa");
-        DefaultExecutable job = new DefaultExecutable();
-        job.setProject(PROJECT);
-        job.setJobType(JobTypeEnum.INC_BUILD);
-        job.setTargetSubject(df.getModel().getUuid());
-        job.setTargetSegments(df.getSegments().stream().map(NDataSegment::getId).collect(Collectors.toList()));
-        FiveSecondSucceedTestExecutable task = new FiveSecondSucceedTestExecutable();
-        task.setTargetSubject(df.getModel().getUuid());
-        task.setTargetSegments(df.getSegments().stream().map(NDataSegment::getId).collect(Collectors.toList()));
-        job.addTask(task);
-
-        val executableManager = NExecutableManager.getInstance(getTestConfig(), PROJECT);
-        executableManager.addJob(job);
-
-        readyCalledCount.set(0);
-
-        executableManager.updateJobOutput(job.getId(), ExecutableState.ERROR);
-
-        UnitOfWork.doInTransactionWithRetry(() -> {
-            jobService.batchUpdateJobStatus(Lists.newArrayList(job.getId()), PROJECT, "RESTART", Lists.newArrayList());
-            return null;
-        }, PROJECT);
-
-        await().atMost(120000, TimeUnit.MILLISECONDS)
-                .untilAsserted(() -> Assert.assertEquals(1, readyCalledCount.get()));
-    }
+//    @Ignore
+//    @Test
+//    public void testResumeJob() {
+//        logger.info("SchedulerEventBusTest testResumeJob");
+//
+//        overwriteSystemProp("kylin.scheduler.schedule-limit-per-minute", "6000");
+//        val df = NDataflowManager.getInstance(getTestConfig(), PROJECT)
+//                .getDataflow("89af4ee2-2cdb-4b07-b39e-4c29856309aa");
+//        DefaultExecutable job = new DefaultExecutable();
+//        job.setProject(PROJECT);
+//        job.setJobType(JobTypeEnum.INC_BUILD);
+//        job.setTargetSubject(df.getModel().getUuid());
+//        job.setTargetSegments(df.getSegments().stream().map(NDataSegment::getId).collect(Collectors.toList()));
+//        FiveSecondSucceedTestExecutable task = new FiveSecondSucceedTestExecutable();
+//        task.setTargetSubject(df.getModel().getUuid());
+//        task.setTargetSegments(df.getSegments().stream().map(NDataSegment::getId).collect(Collectors.toList()));
+//        job.addTask(task);
+//
+//        val executableManager = NExecutableManager.getInstance(getTestConfig(), PROJECT);
+//        executableManager.addJob(job);
+//
+//        readyCalledCount.set(0);
+//
+//        executableManager.updateJobOutput(job.getId(), ExecutableState.PAUSED);
+//
+//        UnitOfWork.doInTransactionWithRetry(() -> {
+//            jobService.batchUpdateJobStatus(Lists.newArrayList(job.getId()), PROJECT, "RESUME", Lists.newArrayList());
+//            return null;
+//        }, PROJECT);
+//
+//        await().atMost(60000, TimeUnit.MILLISECONDS).until(() -> 1 == readyCalledCount.get());
+//    }
+
+//    @Ignore
+//    @Test
+//    public void testRestartJob() {
+//        logger.info("SchedulerEventBusTest testRestartJob");
+//
+//        overwriteSystemProp("kylin.scheduler.schedule-limit-per-minute", "6000");
+//        val df = NDataflowManager.getInstance(getTestConfig(), PROJECT)
+//                .getDataflow("89af4ee2-2cdb-4b07-b39e-4c29856309aa");
+//        DefaultExecutable job = new DefaultExecutable();
+//        job.setProject(PROJECT);
+//        job.setJobType(JobTypeEnum.INC_BUILD);
+//        job.setTargetSubject(df.getModel().getUuid());
+//        job.setTargetSegments(df.getSegments().stream().map(NDataSegment::getId).collect(Collectors.toList()));
+//        FiveSecondSucceedTestExecutable task = new FiveSecondSucceedTestExecutable();
+//        task.setTargetSubject(df.getModel().getUuid());
+//        task.setTargetSegments(df.getSegments().stream().map(NDataSegment::getId).collect(Collectors.toList()));
+//        job.addTask(task);
+//
+//        val executableManager = NExecutableManager.getInstance(getTestConfig(), PROJECT);
+//        executableManager.addJob(job);
+//
+//        readyCalledCount.set(0);
+//
+//        executableManager.updateJobOutput(job.getId(), ExecutableState.ERROR);
+//
+//        UnitOfWork.doInTransactionWithRetry(() -> {
+//            jobService.batchUpdateJobStatus(Lists.newArrayList(job.getId()), PROJECT, "RESTART", Lists.newArrayList());
+//            return null;
+//        }, PROJECT);
+//
+//        await().atMost(120000, TimeUnit.MILLISECONDS)
+//                .untilAsserted(() -> Assert.assertEquals(1, readyCalledCount.get()));
+//    }
 
     @Test
     public void testEpochChangedListener() throws Exception {
diff --git a/src/metadata-server/pom.xml b/src/metadata-server/pom.xml
index 32b8984ea3..1ff70f33cb 100644
--- a/src/metadata-server/pom.xml
+++ b/src/metadata-server/pom.xml
@@ -48,7 +48,7 @@
         </dependency>
         <dependency>
             <groupId>org.apache.kylin</groupId>
-            <artifactId>kylin-common-service</artifactId>
+            <artifactId>kylin-common-server</artifactId>
         </dependency>
         <dependency>
             <groupId>io.springfox</groupId>
@@ -56,24 +56,24 @@
         </dependency>
 
         <dependency>
-            <groupId>org.springframework</groupId>
-            <artifactId>spring-test</artifactId>
-            <scope>test</scope>
+            <groupId>org.apache.hadoop</groupId>
+            <artifactId>hadoop-common</artifactId>
+            <scope>provided</scope>
         </dependency>
         <dependency>
-            <groupId>org.apache.kylin</groupId>
-            <artifactId>kylin-common-server</artifactId>
-            <scope>test</scope>
+            <groupId>org.apache.hadoop</groupId>
+            <artifactId>hadoop-hdfs</artifactId>
+            <scope>provided</scope>
         </dependency>
+
         <dependency>
-            <groupId>org.apache.kylin</groupId>
-            <artifactId>kylin-common-service</artifactId>
-            <type>test-jar</type>
+            <groupId>org.springframework</groupId>
+            <artifactId>spring-test</artifactId>
             <scope>test</scope>
         </dependency>
         <dependency>
             <groupId>org.apache.kylin</groupId>
-            <artifactId>kylin-systools</artifactId>
+            <artifactId>kylin-common-service</artifactId>
             <type>test-jar</type>
             <scope>test</scope>
         </dependency>
@@ -97,17 +97,6 @@
             <artifactId>junit-vintage-engine</artifactId>
             <scope>test</scope>
         </dependency>
-
-        <dependency>
-            <groupId>org.apache.hadoop</groupId>
-            <artifactId>hadoop-common</artifactId>
-            <scope>provided</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.hadoop</groupId>
-            <artifactId>hadoop-hdfs</artifactId>
-            <scope>provided</scope>
-        </dependency>
         <dependency>
             <groupId>com.h2database</groupId>
             <artifactId>h2</artifactId>
diff --git a/src/metadata-server/src/main/java/io/kyligence/kap/rest/controller/open/OpenModelController.java b/src/metadata-server/src/main/java/io/kyligence/kap/rest/controller/open/OpenModelController.java
index 23c16ef30a..fff6b63367 100644
--- a/src/metadata-server/src/main/java/io/kyligence/kap/rest/controller/open/OpenModelController.java
+++ b/src/metadata-server/src/main/java/io/kyligence/kap/rest/controller/open/OpenModelController.java
@@ -76,6 +76,7 @@ import org.apache.kylin.rest.service.ModelTdsService;
 import org.apache.kylin.rest.util.AclPermissionUtil;
 import org.apache.kylin.tool.bisync.SyncContext;
 import org.apache.kylin.tool.bisync.model.SyncModel;
+import org.apache.kylin.util.DataRangeUtils;
 import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.beans.factory.annotation.Qualifier;
 import org.springframework.stereotype.Controller;
@@ -312,7 +313,7 @@ public class OpenModelController extends NBasicController {
                     modelParatitionDescRequest.getPartitionDesc().getPartitionDateFormat());
             partitionDateFormat = modelParatitionDescRequest.getPartitionDesc().getPartitionDateFormat();
         }
-        validateDataRange(modelParatitionDescRequest.getStart(), modelParatitionDescRequest.getEnd(),
+        DataRangeUtils.validateDataRange(modelParatitionDescRequest.getStart(), modelParatitionDescRequest.getEnd(),
                 partitionDateFormat);
         val dataModel = getModel(modelAlias, projectName);
         modelService.updateModelPartitionColumn(projectName, dataModel.getAlias(), modelParatitionDescRequest);
@@ -478,7 +479,7 @@ public class OpenModelController extends NBasicController {
         request.setManagementType(model.getManagementType());
         request.setCanvas(model.getCanvas());
         String partitionColumnFormat = modelService.getPartitionColumnFormatById(request.getProject(), request.getId());
-        validateDataRange(request.getStart(), request.getEnd(), partitionColumnFormat);
+        DataRangeUtils.validateDataRange(request.getStart(), request.getEnd(), partitionColumnFormat);
         modelService.validatePartitionDesc(request.getPartitionDesc());
         checkRequiredArg(MODEL_ID, request.getUuid());
         try {
diff --git a/src/metadata-server/src/main/java/org/apache/kylin/rest/controller/NModelController.java b/src/metadata-server/src/main/java/org/apache/kylin/rest/controller/NModelController.java
index 6efa43ec6e..8017577c07 100644
--- a/src/metadata-server/src/main/java/org/apache/kylin/rest/controller/NModelController.java
+++ b/src/metadata-server/src/main/java/org/apache/kylin/rest/controller/NModelController.java
@@ -77,6 +77,7 @@ import org.apache.kylin.rest.service.ModelService;
 import org.apache.kylin.rest.service.ModelTdsService;
 import org.apache.kylin.tool.bisync.SyncContext;
 import org.apache.kylin.tool.bisync.model.SyncModel;
+import org.apache.kylin.util.DataRangeUtils;
 import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.beans.factory.annotation.Qualifier;
 import org.springframework.stereotype.Controller;
@@ -191,7 +192,7 @@ public class NModelController extends NBasicController {
         modelService.validatePartitionDesc(modelRequest.getPartitionDesc());
         String partitionDateFormat = modelRequest.getPartitionDesc() == null ? null
                 : modelRequest.getPartitionDesc().getPartitionDateFormat();
-        validateDataRange(modelRequest.getStart(), modelRequest.getEnd(), partitionDateFormat);
+        DataRangeUtils.validateDataRange(modelRequest.getStart(), modelRequest.getEnd(), partitionDateFormat);
         try {
             NDataModel model = modelService.createModel(modelRequest.getProject(), modelRequest);
             return new EnvelopeResponse<>(KylinException.CODE_SUCCESS,
@@ -437,7 +438,7 @@ public class NModelController extends NBasicController {
     public EnvelopeResponse<BuildBaseIndexResponse> updateSemantic(@RequestBody ModelRequest request) {
         checkProjectName(request.getProject());
         String partitionColumnFormat = modelService.getPartitionColumnFormatById(request.getProject(), request.getId());
-        validateDataRange(request.getStart(), request.getEnd(), partitionColumnFormat);
+        DataRangeUtils.validateDataRange(request.getStart(), request.getEnd(), partitionColumnFormat);
         modelService.validatePartitionDesc(request.getPartitionDesc());
         checkRequiredArg(MODEL_ID, request.getUuid());
         try {
diff --git a/src/metadata-server/src/main/java/org/apache/kylin/rest/controller/v2/NProjectControllerKylin.java b/src/metadata-server/src/main/java/org/apache/kylin/rest/controller/v2/NProjectControllerKylin.java
index 147343f9eb..47b9aeb5da 100644
--- a/src/metadata-server/src/main/java/org/apache/kylin/rest/controller/v2/NProjectControllerKylin.java
+++ b/src/metadata-server/src/main/java/org/apache/kylin/rest/controller/v2/NProjectControllerKylin.java
@@ -24,6 +24,7 @@ import java.util.List;
 import org.apache.kylin.metadata.project.ProjectInstance;
 import org.apache.kylin.rest.controller.NBasicController;
 import org.apache.kylin.rest.service.ProjectService;
+import org.apache.kylin.rest.util.PagingUtil;
 import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.beans.factory.annotation.Qualifier;
 import org.springframework.stereotype.Controller;
@@ -51,7 +52,7 @@ public class NProjectControllerKylin extends NBasicController {
             @RequestParam(value = "pageSize", required = false, defaultValue = "10") Integer size,
             @RequestParam(value = "exact", required = false, defaultValue = "true") boolean exactMatch) {
         List<ProjectInstance> readableProjects = projectService.getReadableProjects(project, exactMatch);
-        return (List<ProjectInstance>) getDataNoEnvelopeResponse(readableProjects, offset, size);
+        return PagingUtil.cutPage(readableProjects, offset, size);
     }
 
 }
diff --git a/src/modeling-service/pom.xml b/src/modeling-service/pom.xml
index 39c35f8856..c560e34aaa 100644
--- a/src/modeling-service/pom.xml
+++ b/src/modeling-service/pom.xml
@@ -50,11 +50,6 @@
         </dependency>
 
         <!-- test -->
-        <dependency>
-            <groupId>org.apache.kylin</groupId>
-            <artifactId>kap-second-storage-clickhouse</artifactId>
-            <scope>test</scope>
-        </dependency>
         <dependency>
             <groupId>org.apache.kylin</groupId>
             <artifactId>kap-second-storage-clickhouse</artifactId>
@@ -63,8 +58,7 @@
         </dependency>
         <dependency>
             <groupId>org.apache.kylin</groupId>
-            <artifactId>kylin-core-job</artifactId>
-            <type>test-jar</type>
+            <artifactId>kap-second-storage-clickhouse</artifactId>
             <scope>test</scope>
         </dependency>
         <dependency>
diff --git a/src/query-server/pom.xml b/src/query-server/pom.xml
index 01fd906d4f..22adaf2ed6 100644
--- a/src/query-server/pom.xml
+++ b/src/query-server/pom.xml
@@ -38,12 +38,10 @@
 
     <dependencies>
         <!--Add dependencies of KAP extensions-->
-
         <dependency>
             <groupId>org.apache.kylin</groupId>
-            <artifactId>kylin-common-service</artifactId>
+            <artifactId>kylin-common-server</artifactId>
         </dependency>
-
         <dependency>
             <groupId>org.apache.kylin</groupId>
             <artifactId>kylin-query-service</artifactId>
@@ -63,12 +61,6 @@
             <groupId>org.apache.kylin</groupId>
             <artifactId>kylin-core-common</artifactId>
         </dependency>
-
-        <dependency>
-            <groupId>org.apache.kylin</groupId>
-            <artifactId>kylin-systools</artifactId>
-        </dependency>
-
         <dependency>
             <groupId>io.springfox</groupId>
             <artifactId>springfox-boot-starter</artifactId>
diff --git a/src/query-server/src/main/java/org/apache/kylin/rest/controller/NQueryController.java b/src/query-server/src/main/java/org/apache/kylin/rest/controller/NQueryController.java
index bb7ce8d2e3..1c3ae4384b 100644
--- a/src/query-server/src/main/java/org/apache/kylin/rest/controller/NQueryController.java
+++ b/src/query-server/src/main/java/org/apache/kylin/rest/controller/NQueryController.java
@@ -77,6 +77,7 @@ import org.apache.kylin.rest.response.QueryStatisticsResponse;
 import org.apache.kylin.rest.response.ServerInfoResponse;
 import org.apache.kylin.rest.service.QueryCacheManager;
 import org.apache.kylin.rest.service.QueryHistoryService;
+import org.apache.kylin.util.DataRangeUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.springframework.beans.factory.annotation.Autowired;
@@ -381,7 +382,7 @@ public class NQueryController extends NBasicController {
             @RequestParam(value = "page_size", required = false, defaultValue = "10") Integer size) {
         checkProjectName(project);
         QueryHistoryRequest request = new QueryHistoryRequest(project, startTimeFrom, startTimeTo);
-        validateDataRange(startTimeFrom, startTimeTo, null);
+        DataRangeUtils.validateDataRange(startTimeFrom, startTimeTo, null);
         Map<String, Object> queryHistories = QueryHisTransformStandardUtil.transformQueryHistory(
                 queryHistoryService.getQueryHistories(request, size, offset));
         return new EnvelopeResponse<>(KylinException.CODE_SUCCESS, queryHistories, "");
diff --git a/src/common-server/src/main/java/org/apache/kylin/rest/controller/SparkSourceController.java b/src/query-server/src/main/java/org/apache/kylin/rest/controller/SparkSourceController.java
similarity index 100%
rename from src/common-server/src/main/java/org/apache/kylin/rest/controller/SparkSourceController.java
rename to src/query-server/src/main/java/org/apache/kylin/rest/controller/SparkSourceController.java
diff --git a/src/common-server/src/test/java/org/apache/kylin/rest/controller/SparkSourceControllerTest.java b/src/query-server/src/test/java/org/apache/kylin/rest/controller/SparkSourceControllerTest.java
similarity index 100%
rename from src/common-server/src/test/java/org/apache/kylin/rest/controller/SparkSourceControllerTest.java
rename to src/query-server/src/test/java/org/apache/kylin/rest/controller/SparkSourceControllerTest.java
diff --git a/src/query-service/pom.xml b/src/query-service/pom.xml
index a64b54af78..a69eeb9ebc 100644
--- a/src/query-service/pom.xml
+++ b/src/query-service/pom.xml
@@ -26,7 +26,6 @@
     </parent>
     <modelVersion>4.0.0</modelVersion>
     <name>Kylin - Query Service</name>
-    <groupId>org.apache.kylin</groupId>
     <artifactId>kylin-query-service</artifactId>
 
     <properties>
@@ -35,66 +34,34 @@
     </properties>
 
     <dependencies>
-        <dependency>
-            <groupId>org.apache.kylin</groupId>
-            <artifactId>kylin-modeling-service</artifactId>
-        </dependency>
         <dependency>
             <groupId>org.apache.kylin</groupId>
             <artifactId>kylin-common-service</artifactId>
         </dependency>
-        <dependency>
-            <groupId>org.apache.kylin</groupId>
-            <artifactId>kylin-job-service</artifactId>
-        </dependency>
         <dependency>
             <groupId>org.apache.kylin</groupId>
             <artifactId>kylin-data-loading-service</artifactId>
         </dependency>
-        <dependency>
-            <groupId>org.apache.kylin</groupId>
-            <artifactId>kylin-systools</artifactId>
-        </dependency>
         <dependency>
             <groupId>org.apache.kylin</groupId>
             <artifactId>kylin-query</artifactId>
         </dependency>
 
-        <dependency>
-            <groupId>org.apache.tomcat.embed</groupId>
-            <artifactId>tomcat-embed-core</artifactId>
-        </dependency>
         <!-- hadoop -->
         <dependency>
             <groupId>org.apache.hadoop</groupId>
             <artifactId>hadoop-common</artifactId>
+            <scope>provided</scope>
         </dependency>
         <dependency>
             <groupId>org.apache.hadoop</groupId>
             <artifactId>hadoop-hdfs</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.hadoop</groupId>
-            <artifactId>hadoop-mapreduce-client-core</artifactId>
+            <scope>provided</scope>
         </dependency>
         <dependency>
             <groupId>net.sf.supercsv</groupId>
             <artifactId>super-csv</artifactId>
         </dependency>
-        <dependency>
-            <groupId>org.springframework.boot</groupId>
-            <artifactId>spring-boot-starter-actuator</artifactId>
-            <exclusions>
-                <exclusion>
-                    <groupId>org.springframework.boot</groupId>
-                    <artifactId>spring-boot-starter-logging</artifactId>
-                </exclusion>
-                <exclusion>
-                    <artifactId>spring-context</artifactId>
-                    <groupId>org.springframework</groupId>
-                </exclusion>
-            </exclusions>
-        </dependency>
 
         <!-- Spark dependency -->
         <dependency>
@@ -109,10 +76,6 @@
             <scope>test</scope>
         </dependency>
         <!-- Spring Security -->
-        <dependency>
-            <groupId>org.springframework.security</groupId>
-            <artifactId>spring-security-ldap</artifactId>
-        </dependency>
         <dependency>
             <groupId>org.springframework.security</groupId>
             <artifactId>spring-security-test</artifactId>
@@ -158,6 +121,16 @@
             <type>test-jar</type>
             <scope>test</scope>
         </dependency>
+        <dependency>
+            <groupId>org.apache.hadoop</groupId>
+            <artifactId>hadoop-mapreduce-client-core</artifactId>
+            <scope>test</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.hive</groupId>
+            <artifactId>hive-exec</artifactId>
+            <scope>test</scope>
+        </dependency>
         <dependency>
             <groupId>com.h2database</groupId>
             <artifactId>h2</artifactId>
diff --git a/src/query-service/src/main/java/org/apache/kylin/rest/service/MonitorService.java b/src/query-service/src/main/java/org/apache/kylin/rest/service/MonitorService.java
index 1a096b5f01..d1c3e60617 100644
--- a/src/query-service/src/main/java/org/apache/kylin/rest/service/MonitorService.java
+++ b/src/query-service/src/main/java/org/apache/kylin/rest/service/MonitorService.java
@@ -104,18 +104,22 @@ public class MonitorService extends BasicService implements ApplicationListener<
     public void onApplicationEvent(AfterMetadataReadyEvent event) {
         val kylinConfig = KylinConfig.getInstanceFromEnv();
         KapConfig kapConfig = KapConfig.wrap(kylinConfig);
-        if (kapConfig.isMonitorEnabled()) {
-            try {
-                MonitorReporter.getInstance().startReporter();
-            } catch (Exception e) {
-                log.error("Failed to start monitor reporter!", e);
-            }
+        boolean monitorEnabled = kapConfig.isMonitorEnabled();
+        MonitorReporter monitorReporter = MonitorReporter.getInstance();
+        if (!monitorEnabled) {
+            logger.warn("Monitor reporter is not enabled!");
+            return;
+        }
+        try {
+            monitorReporter.startReporter();
+        } catch (Exception e) {
+            log.error("Failed to start monitor reporter!", e);
         }
-        MonitorReporter.getInstance().submit(new AbstractMonitorCollectTask(
+        monitorReporter.submit(new AbstractMonitorCollectTask(
                 Lists.newArrayList(ClusterConstant.ALL, ClusterConstant.QUERY, ClusterConstant.JOB)) {
             @Override
             protected MonitorMetric collect() {
-                QueryMonitorMetric queryMonitorMetric = MonitorReporter.getInstance().createQueryMonitorMetric();
+                QueryMonitorMetric queryMonitorMetric = monitorReporter.createQueryMonitorMetric();
 
                 queryMonitorMetric.setLastResponseTime(SparkContextCanary.getInstance().getLastResponseTime());
                 queryMonitorMetric.setErrorAccumulated(SparkContextCanary.getInstance().getErrorAccumulated());
@@ -124,16 +128,15 @@ public class MonitorService extends BasicService implements ApplicationListener<
                 return queryMonitorMetric;
             }
         });
-        if (!kylinConfig.isJobNode()) {
-            return;
+        if (kylinConfig.isJobNode()) {
+            monitorReporter.submit(
+                    new AbstractMonitorCollectTask(Lists.newArrayList(ClusterConstant.ALL, ClusterConstant.JOB)) {
+                        @Override
+                        protected MonitorMetric collect() {
+                            return collectJobMetric();
+                        }
+                    });
         }
-        MonitorReporter.getInstance()
-                .submit(new AbstractMonitorCollectTask(Lists.newArrayList(ClusterConstant.ALL, ClusterConstant.JOB)) {
-                    @Override
-                    protected MonitorMetric collect() {
-                        return collectJobMetric();
-                    }
-                });
     }
 
     private JobStatusMonitorMetric collectJobMetric() {
@@ -260,7 +263,7 @@ public class MonitorService extends BasicService implements ApplicationListener<
         }
         return stringBuilder.toString();
     }
-    
+
     public void handleAlertMessage(AlertMessageRequest request) {
         log.info("handle alert message : {}", request);
         List<AlertMessageRequest.Alerts> relatedQueryLimitAlerts = request.getAlerts().stream()
diff --git a/src/second-storage/core-ui/pom.xml b/src/second-storage/core-ui/pom.xml
index 679020e968..8df89d9173 100644
--- a/src/second-storage/core-ui/pom.xml
+++ b/src/second-storage/core-ui/pom.xml
@@ -40,7 +40,7 @@
     <dependencies>
         <dependency>
             <groupId>org.apache.kylin</groupId>
-            <artifactId>kylin-common-service</artifactId>
+            <artifactId>kylin-common-server</artifactId>
         </dependency>
         <dependency>
             <groupId>org.apache.kylin</groupId>
diff --git a/src/systools/src/test/java/org/apache/kylin/rest/security/KerberosLoginManagerTest.java b/src/spark-project/engine-spark/src/test/java/org/apache/kylin/rest/security/KerberosLoginManagerTest.java
similarity index 100%
rename from src/systools/src/test/java/org/apache/kylin/rest/security/KerberosLoginManagerTest.java
rename to src/spark-project/engine-spark/src/test/java/org/apache/kylin/rest/security/KerberosLoginManagerTest.java
diff --git a/src/spark-project/spark-ddl-plugin/pom.xml b/src/spark-project/spark-ddl-plugin/pom.xml
index f4c982bedd..e4f880d5bb 100644
--- a/src/spark-project/spark-ddl-plugin/pom.xml
+++ b/src/spark-project/spark-ddl-plugin/pom.xml
@@ -31,10 +31,6 @@
 
     <artifactId>kylin-spark-ddl</artifactId>
     <dependencies>
-        <dependency>
-            <groupId>org.apache.kylin</groupId>
-            <artifactId>kylin-systools</artifactId>
-        </dependency>
         <dependency>
             <groupId>org.apache.kylin</groupId>
             <artifactId>kylin-core-metadata</artifactId>
diff --git a/src/systools/pom.xml b/src/systools/pom.xml
index 2b14821cea..9e68da2eba 100644
--- a/src/systools/pom.xml
+++ b/src/systools/pom.xml
@@ -26,7 +26,6 @@
     </parent>
     <modelVersion>4.0.0</modelVersion>
     <name>Kylin - System Tools</name>
-    <groupId>org.apache.kylin</groupId>
     <artifactId>kylin-systools</artifactId>
 
     <dependencies>
@@ -58,9 +57,8 @@
             <scope>provided</scope>
         </dependency>
         <dependency>
-            <groupId>org.apache.tomcat.embed</groupId>
-            <artifactId>tomcat-embed-core</artifactId>
-            <scope>provided</scope>
+            <groupId>javax.servlet</groupId>
+            <artifactId>servlet-api</artifactId>
         </dependency>
 
         <dependency>
diff --git a/src/tool/pom.xml b/src/tool/pom.xml
index 24dbd4ce9c..d7ff907fb3 100644
--- a/src/tool/pom.xml
+++ b/src/tool/pom.xml
@@ -36,7 +36,7 @@
     <dependencies>
         <dependency>
             <groupId>org.apache.kylin</groupId>
-            <artifactId>kylin-systools</artifactId>
+            <artifactId>kylin-core-metadata</artifactId>
         </dependency>
         <dependency>
             <groupId>org.apache.kylin</groupId>
@@ -45,7 +45,7 @@
 
         <dependency>
             <groupId>org.springframework.security</groupId>
-            <artifactId>spring-security-core</artifactId>
+            <artifactId>spring-security-ldap</artifactId>
         </dependency>
         <dependency>
             <groupId>org.apache.curator</groupId>
diff --git a/src/tool/src/main/java/org/apache/kylin/tool/upgrade/UpdateUserAclTool.java b/src/tool/src/main/java/org/apache/kylin/tool/upgrade/UpdateUserAclTool.java
index 7b9b913f30..e58738fd2c 100644
--- a/src/tool/src/main/java/org/apache/kylin/tool/upgrade/UpdateUserAclTool.java
+++ b/src/tool/src/main/java/org/apache/kylin/tool/upgrade/UpdateUserAclTool.java
@@ -305,7 +305,7 @@ public class UpdateUserAclTool extends ExecutableApplication {
                                 AclPermissionUtil.convertToBasePermission(ace.getPermission()));
                     }
                 });
-                val mutableAclRecord = aclManager.readAcl(aclRecord.getDomainObjectInfo());
+                val mutableAclRecord = aclManager.readAcl(aclRecord.getObjectIdentity());
                 aclManager.batchUpsertAce(mutableAclRecord, sidPermissionMap);
                 log.info("{} query permission for _global/acl/{} successfully.", StringUtils.capitalize(operation),
                         aclRecord.getUuid());
@@ -378,4 +378,4 @@ public class UpdateUserAclTool extends ExecutableApplication {
         }
         return 0;
     }
-}
+}
\ No newline at end of file


[kylin] 18/34: KYLIN-5452 Optimise constant queries, add cache for getBindable

Posted by xx...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

xxyu pushed a commit to branch kylin5
in repository https://gitbox.apache.org/repos/asf/kylin.git

commit 89b96f474351d2dbaf9ca5bb756a127ae0b46e85
Author: Yaguang Jia <ji...@foxmail.com>
AuthorDate: Thu Dec 29 14:32:39 2022 +0800

    KYLIN-5452 Optimise constant queries, add cache for getBindable
    
    * KYLIN-5452 Optimise constant queries, add cache for getBindable
    
    * KYLIN-5452 update calcite version
---
 pom.xml                                                |  2 +-
 .../org/apache/kylin/rest/KylinPrepareEnvListener.java |  3 +++
 .../java/org/apache/kylin/common/KylinConfigBase.java  |  7 +++++++
 .../org/apache/kylin/common/KylinConfigBaseTest.java   | 18 ++++++++++++++++++
 4 files changed, 29 insertions(+), 1 deletion(-)

diff --git a/pom.xml b/pom.xml
index 611087bf48..e1a852cf3c 100644
--- a/pom.xml
+++ b/pom.xml
@@ -126,7 +126,7 @@
         <scala-retry>0.3.0</scala-retry>
 
         <!-- Calcite Version -->
-        <calcite.version>1.116.0-kylin-4.x-r024</calcite.version>
+        <calcite.version>1.116.0-kylin-4.x-r025</calcite.version>
         <avatica.version>4.x_1.10-r01</avatica.version>
 
         <!-- Hadoop Common deps, keep compatible with hadoop2.version -->
diff --git a/src/common-service/src/main/java/org/apache/kylin/rest/KylinPrepareEnvListener.java b/src/common-service/src/main/java/org/apache/kylin/rest/KylinPrepareEnvListener.java
index 0109c7fea5..483acca4b9 100644
--- a/src/common-service/src/main/java/org/apache/kylin/rest/KylinPrepareEnvListener.java
+++ b/src/common-service/src/main/java/org/apache/kylin/rest/KylinPrepareEnvListener.java
@@ -81,6 +81,9 @@ public class KylinPrepareEnvListener implements EnvironmentPostProcessor, Ordere
         } else {
             Unsafe.setProperty("calcite.convert-multiple-columns-in-to-or", "false");
         }
+        Unsafe.setProperty("calcite.bindable.cache.maxSize", Integer.toString(config.getCalciteBindableCacheSize()));
+        Unsafe.setProperty("calcite.bindable.cache.concurrencyLevel",
+                Integer.toString(config.getCalciteBindableCacheConcurrencyLevel()));
 
         TimeZoneUtils.setDefaultTimeZone(config);
         DelegationTokenManager delegationTokenManager = new DelegationTokenManager();
diff --git a/src/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java b/src/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
index 773fa0af0f..f6b9622033 100644
--- a/src/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
+++ b/src/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
@@ -2469,6 +2469,13 @@ public abstract class KylinConfigBase implements Serializable {
         return Boolean.parseBoolean(getOptional("kylin.query.calcite.aggregate-pushdown-enabled", FALSE));
     }
 
+    public int getCalciteBindableCacheSize() {
+        return Integer.parseInt(getOptional("kylin.query.calcite.bindable.cache.maxSize", "10"));
+    }
+    public int getCalciteBindableCacheConcurrencyLevel() {
+        return Integer.parseInt(getOptional("kylin.query.calcite.bindable.cache.concurrencyLevel", "5"));
+    }
+
     public int getEventPollIntervalSecond() {
         return Integer.parseInt(getOptional("kylin.job.event.poll-interval-second", "60"));
     }
diff --git a/src/core-common/src/test/java/org/apache/kylin/common/KylinConfigBaseTest.java b/src/core-common/src/test/java/org/apache/kylin/common/KylinConfigBaseTest.java
index a241b73b4c..23f39ccd1f 100644
--- a/src/core-common/src/test/java/org/apache/kylin/common/KylinConfigBaseTest.java
+++ b/src/core-common/src/test/java/org/apache/kylin/common/KylinConfigBaseTest.java
@@ -943,6 +943,10 @@ class KylinConfigBaseTest {
                 new PropertiesEntity("kylin.metrics.hdfs-periodic-calculation-interval", "5m", 300000L));
         map.put("isSkipResourceCheck",
                 new PropertiesEntity("kylin.build.resource.skip-resource-check", "false", false));
+        map.put("getCalciteBindableCacheSize",
+                new PropertiesEntity("kylin.query.calcite.bindable.cache.maxSize", "10", 10));
+        map.put("getCalciteBindableCacheConcurrencyLevel",
+                new PropertiesEntity("kylin.query.calcite.bindable.cache.concurrencyLevel", "5", 5));
     }
 
     @Test
@@ -1400,6 +1404,20 @@ class KylinConfigBaseTest {
         config.setProperty("kylin.storage.check-quota-enabled", "true");
         assertTrue(config.isStorageQuotaEnabled());
     }
+    @Test
+    void testCalciteBindableCacheSize() {
+        KylinConfig config = KylinConfig.getInstanceFromEnv();
+        assertEquals(10, config.getCalciteBindableCacheSize());
+        config.setProperty("kylin.query.calcite.bindable.cache.maxSize", "7");
+        assertEquals(7, config.getCalciteBindableCacheSize());
+    }
+    @Test
+    void testCalciteBindableCacheConcurrencyLevel() {
+        KylinConfig config = KylinConfig.getInstanceFromEnv();
+        assertEquals(5, config.getCalciteBindableCacheConcurrencyLevel());
+        config.setProperty("kylin.query.calcite.bindable.cache.concurrencyLevel", "3");
+        assertEquals(3, config.getCalciteBindableCacheConcurrencyLevel());
+    }
 }
 
 class EnvironmentUpdateUtils {


[kylin] 19/34: KYLIN-5454 Downloading the async query result may cause OOM

Posted by xx...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

xxyu pushed a commit to branch kylin5
in repository https://gitbox.apache.org/repos/asf/kylin.git

commit db624c3e89e5444a3462893e6c86973f3fb72790
Author: fanshu.kong <17...@qq.com>
AuthorDate: Wed Nov 23 15:11:15 2022 +0800

    KYLIN-5454 Downloading the async query result may cause OOM
    
    Co-authored-by: Dorris Zhang <ru...@kyligence.io>
---
 .../org/apache/kylin/rest/request/SQLRequest.java  |   2 +
 .../java/org/apache/kylin/common/QueryContext.java |   1 +
 .../common/exception/code/ErrorCodeServer.java     |   6 +
 .../org/apache/kylin/common/msg/CnMessage.java     |  10 -
 .../java/org/apache/kylin/common/msg/Message.java  |  11 +-
 .../resources/kylin_error_msg_conf_cn.properties   |   6 +-
 .../resources/kylin_error_msg_conf_en.properties   |   8 +
 .../main/resources/kylin_errorcode_conf.properties |   6 +
 .../apache/kylin/query/util/AsyncQueryUtil.java    |   9 +-
 .../rest/controller/NAsyncQueryController.java     |  39 +-
 .../rest/controller/NAsyncQueryControllerV2.java   |  24 +-
 .../rest/controller/NAsyncQueryControllerTest.java |  98 ++--
 .../controller/NAsyncQueryControllerV2Test.java    |  40 +-
 .../kylin/rest/request/AsyncQuerySQLRequestV2.java |   3 +
 .../kylin/rest/service/AsyncQueryService.java      |  83 +---
 .../org/apache/kylin/rest/service/CSVWriter.java   | 120 -----
 .../apache/kylin/rest/service/XLSXExcelWriter.java | 155 -------
 .../kylin/rest/service/AysncQueryServiceTest.java  | 496 ++++++++++++---------
 .../kylin/query/pushdown/SparkSqlClient.scala      |  51 ++-
 .../kylin/query/runtime/plan/ResultPlan.scala      | 214 +++++++--
 20 files changed, 629 insertions(+), 753 deletions(-)

diff --git a/src/common-service/src/main/java/org/apache/kylin/rest/request/SQLRequest.java b/src/common-service/src/main/java/org/apache/kylin/rest/request/SQLRequest.java
index 597ebd52c6..4bc4ce91d8 100644
--- a/src/common-service/src/main/java/org/apache/kylin/rest/request/SQLRequest.java
+++ b/src/common-service/src/main/java/org/apache/kylin/rest/request/SQLRequest.java
@@ -69,6 +69,8 @@ public class SQLRequest implements Serializable, ProjectInsensitiveRequest, Vali
     @JsonProperty("file_name")
     private String fileName = "result";
     private Integer forcedToTieredStorage;  //0:CH->DFS; 1:CH->pushDown; 2:CH->return error
+    @JsonProperty("include_header")
+    private boolean includeHeader;
 
     private Map<String, String> backdoorToggles;
 
diff --git a/src/core-common/src/main/java/org/apache/kylin/common/QueryContext.java b/src/core-common/src/main/java/org/apache/kylin/common/QueryContext.java
index 551b66d90f..d65396e95b 100644
--- a/src/core-common/src/main/java/org/apache/kylin/common/QueryContext.java
+++ b/src/core-common/src/main/java/org/apache/kylin/common/QueryContext.java
@@ -360,6 +360,7 @@ public class QueryContext implements Closeable {
         private String fileName;
         private String separator;
         private boolean isRefused;
+        private boolean includeHeader;
     }
 
     @Getter
diff --git a/src/core-common/src/main/java/org/apache/kylin/common/exception/code/ErrorCodeServer.java b/src/core-common/src/main/java/org/apache/kylin/common/exception/code/ErrorCodeServer.java
index 54f81183e7..a51a2f54ef 100644
--- a/src/core-common/src/main/java/org/apache/kylin/common/exception/code/ErrorCodeServer.java
+++ b/src/core-common/src/main/java/org/apache/kylin/common/exception/code/ErrorCodeServer.java
@@ -132,6 +132,12 @@ public enum ErrorCodeServer implements ErrorCodeProducer {
     USER_GROUP_NOT_EXIST("KE-010043220"),
     REPEATED_PARAMETER("KE-010043221"),
 
+    // 100313xx async query
+    ASYNC_QUERY_RESULT_NOT_FOUND("KE-010031301"),
+    ASYNC_QUERY_PROJECT_NAME_EMPTY("KE-010031302"),
+    ASYNC_QUERY_TIME_FORMAT_ERROR("KE-010031303"),
+    ASYNC_QUERY_INCLUDE_HEADER_NOT_EMPTY("KE-010031304"),
+
     // 400272XX resource group
     RESOURCE_GROUP_DISABLE_FAILED("KE-040027201"),
     RESOURCE_GROUP_ENABLE_FAILED("KE-040027202"),
diff --git a/src/core-common/src/main/java/org/apache/kylin/common/msg/CnMessage.java b/src/core-common/src/main/java/org/apache/kylin/common/msg/CnMessage.java
index 6e099d683b..1f7923622b 100644
--- a/src/core-common/src/main/java/org/apache/kylin/common/msg/CnMessage.java
+++ b/src/core-common/src/main/java/org/apache/kylin/common/msg/CnMessage.java
@@ -423,16 +423,6 @@ public class CnMessage extends Message {
         return "当前无法清理文件夹。请确保相关 HDFS 文件可以正常访问。";
     }
 
-    @Override
-    public String getAsyncQueryTimeFormatError() {
-        return "无效的时间格式。请按 “yyyy-MM-dd HH:mm:ss” 格式填写。";
-    }
-
-    @Override
-    public String getAsyncQueryProjectNameEmpty() {
-        return "项目名称不能为空。请检查后重试。";
-    }
-
     @Override
     public String getUserNotFound() {
         return "找不到用户 '%s'";
diff --git a/src/core-common/src/main/java/org/apache/kylin/common/msg/Message.java b/src/core-common/src/main/java/org/apache/kylin/common/msg/Message.java
index 8f349bd31f..805caa5688 100644
--- a/src/core-common/src/main/java/org/apache/kylin/common/msg/Message.java
+++ b/src/core-common/src/main/java/org/apache/kylin/common/msg/Message.java
@@ -60,8 +60,7 @@ public class Message {
     private static final String LICENSE_MISMATCH_LICENSE = "The license doesn’t match the current cluster information. Please upload a new license, or contact Kyligence.";
     private static final String LICENSE_NOT_EFFECTIVE = "License is not effective yet, please apply for a new license.";
     private static final String LICENSE_EXPIRED = "The license has expired. Please upload a new license, or contact Kyligence.";
-    private static final String DDL_UNSUPPORTED = "Unsupported DDL syntax, only support single `create view`, `drop "
-        + "view`,  `alter view`, `show create table`";
+    private static final String DDL_UNSUPPORTED = "Unsupported DDL syntax, only support single `create view`, `drop view`,  `alter view`, `show create table`";
     private static final String DDL_VIEW_NAME_ERROR = "View names need to start with KE_";
     private static final String DDL_VIEW_NAME_DUPLICATE_ERROR = "Logical View names is duplicate";
     private static final String DDL_DROP_ERROR = "Only support drop view";
@@ -522,14 +521,6 @@ public class Message {
         return "Can’t clean file folder at the moment. Please ensure that the related file on HDFS could be accessed.";
     }
 
-    public String getAsyncQueryTimeFormatError() {
-        return "The time format is invalid. Please enter the date in the format “yyyy-MM-dd HH:mm:ss”.";
-    }
-
-    public String getAsyncQueryProjectNameEmpty() {
-        return "The project name can’t be empty. Please check and try again.";
-    }
-
     public String getUserNotFound() {
         return "User '%s' not found.";
     }
diff --git a/src/core-common/src/main/resources/kylin_error_msg_conf_cn.properties b/src/core-common/src/main/resources/kylin_error_msg_conf_cn.properties
index 2768232b9d..7697a01c8f 100644
--- a/src/core-common/src/main/resources/kylin_error_msg_conf_cn.properties
+++ b/src/core-common/src/main/resources/kylin_error_msg_conf_cn.properties
@@ -134,7 +134,6 @@ KE-010043219=请使用中文、英文、空格命名用户名和公司。
 KE-010043220=找不到用户组 “%s”。请检查后重试。
 KE-010043221=参数 “%s” 已存在。请检查后重试。
 
-
 ## Streaming
 KE-010035202=使用解析器 “%s” 解析Topic “%s” 的消息时发生异常,请检查后重试。
 KE-010035215=无法正确读取 Kafka 认证文件,请检查后再试。
@@ -160,6 +159,11 @@ KE-010042214=Jar文件 “%s” 不存在。
 KE-010042215=解析器 “%s” 已存在。
 KE-010042216=Jar文件 “%s” 已存在。
 
+## 100313xx async query
+KE-010031301=该项目下无法找到该 Query ID 对应的异步查询。请检查后重试。
+KE-010031302=项目名称不能为空。请检查后重试。
+KE-010031303=无效的时间格式。请按 “yyyy-MM-dd HH:mm:ss” 格式填写。
+KE-010031304=在当前版本中,"include header"参数被移至提交异步查询的API,因此您在下载结果中的"include header"参数将不起作用。请参考产品手册以了解更多细节。
 
 # System
 ## 400052XX password
diff --git a/src/core-common/src/main/resources/kylin_error_msg_conf_en.properties b/src/core-common/src/main/resources/kylin_error_msg_conf_en.properties
index fc75aa0610..6acc8373fc 100644
--- a/src/core-common/src/main/resources/kylin_error_msg_conf_en.properties
+++ b/src/core-common/src/main/resources/kylin_error_msg_conf_en.properties
@@ -156,6 +156,14 @@ KE-010042214=Jar "%s" does not exist.
 KE-010042215=Parser "%s" already exists.
 KE-010042216=Jar "%s" already exists.
 
+## 100313xx async query
+KE-010031301=Can’t find the query by this query ID in this project. Please check and try again.
+KE-010031302=The project name can’t be empty. Please check and try again.
+KE-010031303=The time format is invalid. Please enter the date in the format “yyyy-MM-dd HH:mm:ss”.
+KE-010031304=Notice:Now we move the "include_header" parameter to  Submit Async Query API, so the parameter here doesn't work.Please read user manual for details.
+
+
+### batch 3
 # System
 ## 400052XX password
 KE-040005201=Can't find PASSWORD ENCODER. Please check configuration item kylin.security.user-password-encoder.
diff --git a/src/core-common/src/main/resources/kylin_errorcode_conf.properties b/src/core-common/src/main/resources/kylin_errorcode_conf.properties
index 976e342cc9..8fb380af3f 100644
--- a/src/core-common/src/main/resources/kylin_errorcode_conf.properties
+++ b/src/core-common/src/main/resources/kylin_errorcode_conf.properties
@@ -102,6 +102,12 @@ KE-010032221
 KE-010031201
 KE-010031202
 
+## 100313xx async query
+KE-010031301
+KE-010031302
+KE-010031303
+KE-010031304
+
 ## 100102XX computed column
 KE-010010201
 KE-010010202
diff --git a/src/query-common/src/main/java/org/apache/kylin/query/util/AsyncQueryUtil.java b/src/query-common/src/main/java/org/apache/kylin/query/util/AsyncQueryUtil.java
index b7deb09e12..09e05a4e53 100644
--- a/src/query-common/src/main/java/org/apache/kylin/query/util/AsyncQueryUtil.java
+++ b/src/query-common/src/main/java/org/apache/kylin/query/util/AsyncQueryUtil.java
@@ -18,6 +18,8 @@
 
 package org.apache.kylin.query.util;
 
+import static org.apache.kylin.common.exception.code.ErrorCodeServer.ASYNC_QUERY_RESULT_NOT_FOUND;
+
 import java.io.IOException;
 import java.io.OutputStreamWriter;
 import java.nio.charset.Charset;
@@ -30,10 +32,9 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.kylin.common.KapConfig;
 import org.apache.kylin.common.QueryContext;
-import org.apache.kylin.common.msg.MsgPicker;
+import org.apache.kylin.common.exception.KylinException;
 import org.apache.kylin.common.util.HadoopUtil;
 import org.apache.kylin.metadata.querymeta.SelectedColumnMeta;
-import org.apache.kylin.query.exception.NAsyncQueryIllegalParamException;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -79,7 +80,7 @@ public class AsyncQueryUtil {
                 osw.write(metaString);
             }
         } else {
-            throw new NAsyncQueryIllegalParamException(MsgPicker.getMsg().getQueryResultNotFound());
+            throw new KylinException(ASYNC_QUERY_RESULT_NOT_FOUND);
         }
     }
 
@@ -96,7 +97,7 @@ public class AsyncQueryUtil {
                 osw.write(separator);
             }
         } else {
-            throw new NAsyncQueryIllegalParamException(MsgPicker.getMsg().getQueryResultNotFound());
+            throw new KylinException(ASYNC_QUERY_RESULT_NOT_FOUND);
         }
     }
 
diff --git a/src/query-server/src/main/java/org/apache/kylin/rest/controller/NAsyncQueryController.java b/src/query-server/src/main/java/org/apache/kylin/rest/controller/NAsyncQueryController.java
index 30cdb088fe..3be9d9d753 100644
--- a/src/query-server/src/main/java/org/apache/kylin/rest/controller/NAsyncQueryController.java
+++ b/src/query-server/src/main/java/org/apache/kylin/rest/controller/NAsyncQueryController.java
@@ -17,9 +17,12 @@
  */
 package org.apache.kylin.rest.controller;
 
-import static org.apache.kylin.common.exception.ServerErrorCode.ACCESS_DENIED;
 import static org.apache.kylin.common.constant.HttpConstant.HTTP_VND_APACHE_KYLIN_JSON;
 import static org.apache.kylin.common.constant.HttpConstant.HTTP_VND_APACHE_KYLIN_V4_PUBLIC_JSON;
+import static org.apache.kylin.common.exception.ServerErrorCode.ACCESS_DENIED;
+import static org.apache.kylin.common.exception.code.ErrorCodeServer.ASYNC_QUERY_INCLUDE_HEADER_NOT_EMPTY;
+import static org.apache.kylin.common.exception.code.ErrorCodeServer.ASYNC_QUERY_PROJECT_NAME_EMPTY;
+import static org.apache.kylin.common.exception.code.ErrorCodeServer.ASYNC_QUERY_TIME_FORMAT_ERROR;
 
 import java.io.IOException;
 import java.text.ParseException;
@@ -40,16 +43,15 @@ import org.apache.kylin.common.exception.KylinException;
 import org.apache.kylin.common.exception.QueryErrorCode;
 import org.apache.kylin.common.msg.Message;
 import org.apache.kylin.common.msg.MsgPicker;
-import org.apache.kylin.query.exception.NAsyncQueryIllegalParamException;
 import org.apache.kylin.query.util.AsyncQueryUtil;
 import org.apache.kylin.rest.exception.ForbiddenException;
+import org.apache.kylin.rest.request.AsyncQuerySQLRequest;
+import org.apache.kylin.rest.response.AsyncQueryResponse;
 import org.apache.kylin.rest.response.EnvelopeResponse;
 import org.apache.kylin.rest.response.SQLResponse;
+import org.apache.kylin.rest.service.AsyncQueryService;
 import org.apache.kylin.rest.service.QueryService;
 import org.apache.kylin.rest.util.AclEvaluate;
-import org.apache.kylin.rest.request.AsyncQuerySQLRequest;
-import org.apache.kylin.rest.response.AsyncQueryResponse;
-import org.apache.kylin.rest.service.AsyncQueryService;
 import org.apache.spark.sql.SparderEnv;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -136,6 +138,7 @@ public class NAsyncQueryController extends NBasicController {
             queryContext.getQueryTagInfo().setFileEncode(encode);
             queryContext.getQueryTagInfo().setFileName(sqlRequest.getFileName());
             queryContext.getQueryTagInfo().setSeparator(sqlRequest.getSeparator());
+            queryContext.getQueryTagInfo().setIncludeHeader(sqlRequest.isIncludeHeader());
             queryContext.setProject(sqlRequest.getProject());
             logger.info("Start a new async query with queryId: {}", queryContext.getQueryId());
             String queryId = queryContext.getQueryId();
@@ -203,8 +206,8 @@ public class NAsyncQueryController extends NBasicController {
                         MsgPicker.getMsg().getCleanFolderFail());
             }
         } catch (ParseException e) {
-            logger.error(MsgPicker.getMsg().getAsyncQueryTimeFormatError(), e);
-            throw new NAsyncQueryIllegalParamException(MsgPicker.getMsg().getAsyncQueryTimeFormatError());
+            logger.error(ASYNC_QUERY_TIME_FORMAT_ERROR.getMsg(), e);
+            throw new KylinException(ASYNC_QUERY_TIME_FORMAT_ERROR);
         }
     }
 
@@ -216,7 +219,7 @@ public class NAsyncQueryController extends NBasicController {
             @RequestParam(value = "project", required = false) String project) throws IOException {
         if (project == null) {
             if (sqlRequest == null) {
-                throw new NAsyncQueryIllegalParamException(MsgPicker.getMsg().getAsyncQueryProjectNameEmpty());
+                throw new KylinException(ASYNC_QUERY_PROJECT_NAME_EMPTY);
             }
             project = sqlRequest.getProject();
         }
@@ -242,7 +245,7 @@ public class NAsyncQueryController extends NBasicController {
             throws IOException {
         if (project == null) {
             if (sqlRequest == null) {
-                throw new NAsyncQueryIllegalParamException(MsgPicker.getMsg().getAsyncQueryProjectNameEmpty());
+                throw new KylinException(ASYNC_QUERY_PROJECT_NAME_EMPTY);
             }
             project = sqlRequest.getProject();
         }
@@ -283,7 +286,7 @@ public class NAsyncQueryController extends NBasicController {
             @RequestParam(value = "project", required = false) String project) throws IOException {
         if (project == null) {
             if (sqlRequest == null) {
-                throw new NAsyncQueryIllegalParamException(MsgPicker.getMsg().getAsyncQueryProjectNameEmpty());
+                throw new KylinException(ASYNC_QUERY_PROJECT_NAME_EMPTY);
             }
             project = sqlRequest.getProject();
         }
@@ -306,7 +309,7 @@ public class NAsyncQueryController extends NBasicController {
             throws IOException {
         if (project == null) {
             if (sqlRequest == null) {
-                throw new NAsyncQueryIllegalParamException(MsgPicker.getMsg().getAsyncQueryProjectNameEmpty());
+                throw new KylinException(ASYNC_QUERY_PROJECT_NAME_EMPTY);
             }
             project = sqlRequest.getProject();
         }
@@ -323,16 +326,19 @@ public class NAsyncQueryController extends NBasicController {
     @GetMapping(value = "/async_query/{query_id:.+}/result_download")
     @ResponseBody
     public void downloadQueryResult(@PathVariable("query_id") String queryId,
-            @RequestParam(value = "include_header", required = false, defaultValue = "false") boolean include_header,
-            @RequestParam(value = "includeHeader", required = false, defaultValue = "false") boolean includeHeader,
+            @RequestParam(value = "oldIncludeHeader", required = false) Boolean oldIncludeHeader,
+            @RequestParam(value = "includeHeader", required = false) Boolean includeHeader,
             @Valid @RequestBody(required = false) final AsyncQuerySQLRequest sqlRequest, HttpServletResponse response,
             @RequestParam(value = "project", required = false) String project) throws IOException {
         if (project == null) {
             if (sqlRequest == null) {
-                throw new NAsyncQueryIllegalParamException(MsgPicker.getMsg().getAsyncQueryProjectNameEmpty());
+                throw new KylinException(ASYNC_QUERY_PROJECT_NAME_EMPTY);
             }
             project = sqlRequest.getProject();
         }
+        if (oldIncludeHeader != null || includeHeader != null) {
+            throw new KylinException(ASYNC_QUERY_INCLUDE_HEADER_NOT_EMPTY);
+        }
         aclEvaluate.checkProjectQueryPermission(project);
         checkProjectName(project);
         KylinConfig config = queryService.getConfig();
@@ -356,8 +362,7 @@ public class NAsyncQueryController extends NBasicController {
             response.setContentType("application/" + format + ";charset=" + encode);
         }
         response.setHeader("Content-Disposition", "attachment; filename=\"" + fileName + "." + format + "\"");
-        asyncQueryService.retrieveSavedQueryResult(project, queryId, includeHeader || include_header, response, format,
-                encode, fileInfo.getSeparator());
+        asyncQueryService.retrieveSavedQueryResult(project, queryId, response, format, encode);
     }
 
     @ApiOperation(value = "async query result path", tags = { "QE" })
@@ -368,7 +373,7 @@ public class NAsyncQueryController extends NBasicController {
             @RequestParam(value = "project", required = false) String project) throws IOException {
         if (project == null) {
             if (sqlRequest == null) {
-                throw new NAsyncQueryIllegalParamException(MsgPicker.getMsg().getAsyncQueryProjectNameEmpty());
+                throw new KylinException(ASYNC_QUERY_PROJECT_NAME_EMPTY);
             }
             project = sqlRequest.getProject();
         }
diff --git a/src/query-server/src/main/java/org/apache/kylin/rest/controller/NAsyncQueryControllerV2.java b/src/query-server/src/main/java/org/apache/kylin/rest/controller/NAsyncQueryControllerV2.java
index 498f154904..190de73c9b 100644
--- a/src/query-server/src/main/java/org/apache/kylin/rest/controller/NAsyncQueryControllerV2.java
+++ b/src/query-server/src/main/java/org/apache/kylin/rest/controller/NAsyncQueryControllerV2.java
@@ -19,17 +19,21 @@
 package org.apache.kylin.rest.controller;
 
 import static org.apache.kylin.common.constant.HttpConstant.HTTP_VND_APACHE_KYLIN_V2_JSON;
+import static org.apache.kylin.common.exception.code.ErrorCodeServer.ASYNC_QUERY_RESULT_NOT_FOUND;
 
+import java.io.IOException;
+import java.util.List;
+
+import javax.servlet.http.HttpServletResponse;
+import javax.validation.Valid;
+
+import org.apache.kylin.common.exception.KylinException;
 import org.apache.kylin.rest.request.AsyncQuerySQLRequest;
 import org.apache.kylin.rest.request.AsyncQuerySQLRequestV2;
 import org.apache.kylin.rest.response.AsyncQueryResponse;
 import org.apache.kylin.rest.response.AsyncQueryResponseV2;
-import org.apache.kylin.rest.service.AsyncQueryService;
-import io.swagger.annotations.ApiOperation;
-import org.apache.kylin.common.exception.KylinException;
-import org.apache.kylin.common.msg.MsgPicker;
-import org.apache.kylin.query.exception.NAsyncQueryIllegalParamException;
 import org.apache.kylin.rest.response.EnvelopeResponse;
+import org.apache.kylin.rest.service.AsyncQueryService;
 import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.beans.factory.annotation.Qualifier;
 import org.springframework.web.bind.annotation.GetMapping;
@@ -41,10 +45,7 @@ import org.springframework.web.bind.annotation.RequestParam;
 import org.springframework.web.bind.annotation.ResponseBody;
 import org.springframework.web.bind.annotation.RestController;
 
-import javax.servlet.http.HttpServletResponse;
-import javax.validation.Valid;
-import java.io.IOException;
-import java.util.List;
+import io.swagger.annotations.ApiOperation;
 
 
 @RestController
@@ -68,6 +69,7 @@ public class NAsyncQueryControllerV2 extends NBasicController {
         sqlRequest.setProject(asyncQuerySQLRequest.getProject());
         sqlRequest.setSql(asyncQuerySQLRequest.getSql());
         sqlRequest.setSeparator(asyncQuerySQLRequest.getSeparator());
+        sqlRequest.setIncludeHeader(asyncQuerySQLRequest.isIncludeHeader());
         sqlRequest.setFormat("csv");
         sqlRequest.setEncode("utf-8");
         sqlRequest.setFileName("result");
@@ -112,7 +114,7 @@ public class NAsyncQueryControllerV2 extends NBasicController {
     @GetMapping(value = "/async_query/{query_id:.+}/result_download")
     @ResponseBody
     public void downloadQueryResult(@PathVariable("query_id") String queryId,
-                                    @RequestParam(value = "includeHeader", required = false, defaultValue = "false") boolean includeHeader,
+                                    @RequestParam(value = "includeHeader", required = false) Boolean includeHeader,
                                     HttpServletResponse response) throws IOException {
         asyncQueryController.downloadQueryResult(queryId, includeHeader, includeHeader, null, response, searchProject(queryId));
     }
@@ -120,7 +122,7 @@ public class NAsyncQueryControllerV2 extends NBasicController {
     private String searchProject(String queryId) throws IOException {
         String project = asyncQueryService.searchQueryResultProject(queryId);
         if (project == null) {
-            throw new NAsyncQueryIllegalParamException(MsgPicker.getMsg().getQueryResultNotFound());
+            throw new KylinException(ASYNC_QUERY_RESULT_NOT_FOUND);
         }
         return project;
     }
diff --git a/src/query-server/src/test/java/org/apache/kylin/rest/controller/NAsyncQueryControllerTest.java b/src/query-server/src/test/java/org/apache/kylin/rest/controller/NAsyncQueryControllerTest.java
index 093c5e9c48..65d74a56f0 100644
--- a/src/query-server/src/test/java/org/apache/kylin/rest/controller/NAsyncQueryControllerTest.java
+++ b/src/query-server/src/test/java/org/apache/kylin/rest/controller/NAsyncQueryControllerTest.java
@@ -19,11 +19,14 @@
 package org.apache.kylin.rest.controller;
 
 import static org.apache.kylin.common.constant.HttpConstant.HTTP_VND_APACHE_KYLIN_JSON;
+import static org.apache.kylin.common.exception.ServerErrorCode.ACCESS_DENIED;
+import static org.apache.kylin.common.exception.code.ErrorCodeServer.ASYNC_QUERY_PROJECT_NAME_EMPTY;
+import static org.apache.kylin.common.exception.code.ErrorCodeServer.ASYNC_QUERY_RESULT_NOT_FOUND;
+import static org.apache.kylin.common.exception.code.ErrorCodeServer.ASYNC_QUERY_TIME_FORMAT_ERROR;
 import static org.apache.kylin.rest.service.AsyncQueryService.QueryStatus.FAILED;
 import static org.apache.kylin.rest.service.AsyncQueryService.QueryStatus.MISS;
 import static org.apache.kylin.rest.service.AsyncQueryService.QueryStatus.RUNNING;
 import static org.apache.kylin.rest.service.AsyncQueryService.QueryStatus.SUCCESS;
-import static org.apache.kylin.common.exception.ServerErrorCode.ACCESS_DENIED;
 
 import java.io.IOException;
 import java.text.ParseException;
@@ -33,10 +36,13 @@ import org.apache.kylin.common.QueryContext;
 import org.apache.kylin.common.exception.KylinException;
 import org.apache.kylin.common.msg.MsgPicker;
 import org.apache.kylin.common.util.JsonUtil;
-import org.apache.kylin.query.exception.NAsyncQueryIllegalParamException;
+import org.apache.kylin.common.util.NLocalFileMetadataTestCase;
 import org.apache.kylin.rest.constant.Constant;
+import org.apache.kylin.rest.request.AsyncQuerySQLRequest;
+import org.apache.kylin.rest.response.AsyncQueryResponse;
 import org.apache.kylin.rest.response.EnvelopeResponse;
 import org.apache.kylin.rest.response.SQLResponse;
+import org.apache.kylin.rest.service.AsyncQueryService;
 import org.apache.kylin.rest.service.QueryService;
 import org.apache.kylin.rest.util.AclEvaluate;
 import org.junit.After;
@@ -56,11 +62,6 @@ import org.springframework.test.web.servlet.request.MockMvcRequestBuilders;
 import org.springframework.test.web.servlet.result.MockMvcResultMatchers;
 import org.springframework.test.web.servlet.setup.MockMvcBuilders;
 
-import org.apache.kylin.common.util.NLocalFileMetadataTestCase;
-import org.apache.kylin.rest.request.AsyncQuerySQLRequest;
-import org.apache.kylin.rest.response.AsyncQueryResponse;
-import org.apache.kylin.rest.service.AsyncQueryService;
-
 public class NAsyncQueryControllerTest extends NLocalFileMetadataTestCase {
 
     private static final String PROJECT = "default";
@@ -108,6 +109,7 @@ public class NAsyncQueryControllerTest extends NLocalFileMetadataTestCase {
         asyncQuerySQLRequest.setProject(PROJECT);
         asyncQuerySQLRequest.setSql("select PART_DT from KYLIN_SALES limit 500");
         asyncQuerySQLRequest.setSeparator(",");
+        asyncQuerySQLRequest.setIncludeHeader(false);
         return asyncQuerySQLRequest;
     }
 
@@ -216,8 +218,8 @@ public class NAsyncQueryControllerTest extends NLocalFileMetadataTestCase {
                 .accept(MediaType.parseMediaType(HTTP_VND_APACHE_KYLIN_JSON)))
                 .andExpect(MockMvcResultMatchers.status().isInternalServerError());
 
-        Mockito.verify(nAsyncQueryController).downloadQueryResult(Mockito.anyString(), Mockito.anyBoolean(),
-                Mockito.anyBoolean(), Mockito.any(), Mockito.any(), Mockito.any());
+        Mockito.verify(nAsyncQueryController).downloadQueryResult(Mockito.anyString(), Mockito.any(), Mockito.any(),
+                Mockito.any(), Mockito.any(), Mockito.any());
     }
 
     @Test
@@ -225,8 +227,7 @@ public class NAsyncQueryControllerTest extends NLocalFileMetadataTestCase {
         Mockito.doReturn(true).when(asyncQueryService).hasPermission(Mockito.anyString(), Mockito.anyString());
         Mockito.doThrow(new IOException()).when(asyncQueryService).getFileInfo(Mockito.anyString(),
                 Mockito.anyString());
-        Mockito.doThrow(new NAsyncQueryIllegalParamException(MsgPicker.getMsg().getQueryResultNotFound()))
-                .when(asyncQueryService)
+        Mockito.doThrow(new KylinException(ASYNC_QUERY_RESULT_NOT_FOUND)).when(asyncQueryService)
                 .checkStatus(Mockito.anyString(), Mockito.any(), Mockito.anyString(), Mockito.anyString());
 
         mockMvc.perform(MockMvcRequestBuilders.get("/api/async_query/{query_id:.+}/result_download", "123")
@@ -238,8 +239,8 @@ public class NAsyncQueryControllerTest extends NLocalFileMetadataTestCase {
                             "Can’t find the query by this query ID in this project. Please check and try again."));
                 });
 
-        Mockito.verify(nAsyncQueryController).downloadQueryResult(Mockito.anyString(), Mockito.anyBoolean(),
-                Mockito.anyBoolean(), Mockito.any(), Mockito.any(), Mockito.any());
+        Mockito.verify(nAsyncQueryController).downloadQueryResult(Mockito.anyString(), Mockito.any(), Mockito.any(),
+                Mockito.any(), Mockito.any(), Mockito.any());
     }
 
     @Test
@@ -407,11 +408,10 @@ public class NAsyncQueryControllerTest extends NLocalFileMetadataTestCase {
         mockMvc.perform(MockMvcRequestBuilders.delete("/api/async_query").param("project", PROJECT)
                 .param("older_than", "2011-11/11 11:11:11")
                 .accept(MediaType.parseMediaType(HTTP_VND_APACHE_KYLIN_JSON))).andExpect(result -> {
-                    Assert.assertTrue(result.getResolvedException() instanceof NAsyncQueryIllegalParamException);
-                    Assert.assertEquals("KE-020040001",
-                            ((NAsyncQueryIllegalParamException) result.getResolvedException()).getErrorCode()
-                                    .getCodeString());
-                    Assert.assertEquals(MsgPicker.getMsg().getAsyncQueryTimeFormatError(),
+                    Assert.assertTrue(result.getResolvedException() instanceof KylinException);
+                    Assert.assertEquals("KE-010031303",
+                            ((KylinException) result.getResolvedException()).getErrorCode().getCodeString());
+                    Assert.assertEquals(ASYNC_QUERY_TIME_FORMAT_ERROR.getMsg(),
                             result.getResolvedException().getMessage());
                 });
     }
@@ -586,8 +586,8 @@ public class NAsyncQueryControllerTest extends NLocalFileMetadataTestCase {
                             resolvedException.getMessage());
                 });
 
-        Mockito.verify(nAsyncQueryController).downloadQueryResult(Mockito.anyString(), Mockito.anyBoolean(),
-                Mockito.anyBoolean(), Mockito.any(), Mockito.any(), Mockito.any());
+        Mockito.verify(nAsyncQueryController).downloadQueryResult(Mockito.anyString(), Mockito.any(), Mockito.any(),
+                Mockito.any(), Mockito.any(), Mockito.any());
     }
 
     @Test
@@ -603,8 +603,8 @@ public class NAsyncQueryControllerTest extends NLocalFileMetadataTestCase {
                 .accept(MediaType.parseMediaType(HTTP_VND_APACHE_KYLIN_JSON)))
                 .andExpect(MockMvcResultMatchers.status().isOk());
 
-        Mockito.verify(nAsyncQueryController).downloadQueryResult(Mockito.anyString(), Mockito.anyBoolean(),
-                Mockito.anyBoolean(), Mockito.any(), Mockito.any(), Mockito.any());
+        Mockito.verify(nAsyncQueryController).downloadQueryResult(Mockito.anyString(), Mockito.any(), Mockito.any(),
+                Mockito.any(), Mockito.any(), Mockito.any());
     }
 
     @Test
@@ -638,11 +638,10 @@ public class NAsyncQueryControllerTest extends NLocalFileMetadataTestCase {
         mockMvc.perform(MockMvcRequestBuilders.delete("/api/async_query/{query_id}", "123")
                 .contentType(MediaType.APPLICATION_JSON).accept(MediaType.parseMediaType(HTTP_VND_APACHE_KYLIN_JSON)))
                 .andExpect(result -> {
-                    Assert.assertTrue(result.getResolvedException() instanceof NAsyncQueryIllegalParamException);
-                    Assert.assertEquals("KE-020040001",
-                            ((NAsyncQueryIllegalParamException) result.getResolvedException()).getErrorCode()
-                                    .getCodeString());
-                    Assert.assertEquals(MsgPicker.getMsg().getAsyncQueryProjectNameEmpty(),
+                    Assert.assertTrue(result.getResolvedException() instanceof KylinException);
+                    Assert.assertEquals("KE-010031302",
+                            ((KylinException) result.getResolvedException()).getErrorCode().getCodeString());
+                    Assert.assertEquals(ASYNC_QUERY_PROJECT_NAME_EMPTY.getMsg(),
                             result.getResolvedException().getMessage());
                 });
     }
@@ -652,11 +651,10 @@ public class NAsyncQueryControllerTest extends NLocalFileMetadataTestCase {
         mockMvc.perform(MockMvcRequestBuilders.get("/api/async_query/{query_id:.+}/status", "123")
                 .contentType(MediaType.APPLICATION_JSON).accept(MediaType.parseMediaType(HTTP_VND_APACHE_KYLIN_JSON)))
                 .andExpect(result -> {
-                    Assert.assertTrue(result.getResolvedException() instanceof NAsyncQueryIllegalParamException);
-                    Assert.assertEquals("KE-020040001",
-                            ((NAsyncQueryIllegalParamException) result.getResolvedException()).getErrorCode()
-                                    .getCodeString());
-                    Assert.assertEquals(MsgPicker.getMsg().getAsyncQueryProjectNameEmpty(),
+                    Assert.assertTrue(result.getResolvedException() instanceof KylinException);
+                    Assert.assertEquals("KE-010031302",
+                            ((KylinException) result.getResolvedException()).getErrorCode().getCodeString());
+                    Assert.assertEquals(ASYNC_QUERY_PROJECT_NAME_EMPTY.getMsg(),
                             result.getResolvedException().getMessage());
                 });
     }
@@ -666,11 +664,10 @@ public class NAsyncQueryControllerTest extends NLocalFileMetadataTestCase {
         mockMvc.perform(MockMvcRequestBuilders.get("/api/async_query/{query_id:.+}/file_status", "123")
                 .contentType(MediaType.APPLICATION_JSON).accept(MediaType.parseMediaType(HTTP_VND_APACHE_KYLIN_JSON)))
                 .andExpect(result -> {
-                    Assert.assertTrue(result.getResolvedException() instanceof NAsyncQueryIllegalParamException);
-                    Assert.assertEquals("KE-020040001",
-                            ((NAsyncQueryIllegalParamException) result.getResolvedException()).getErrorCode()
-                                    .getCodeString());
-                    Assert.assertEquals(MsgPicker.getMsg().getAsyncQueryProjectNameEmpty(),
+                    Assert.assertTrue(result.getResolvedException() instanceof KylinException);
+                    Assert.assertEquals("KE-010031302",
+                            ((KylinException) result.getResolvedException()).getErrorCode().getCodeString());
+                    Assert.assertEquals(ASYNC_QUERY_PROJECT_NAME_EMPTY.getMsg(),
                             result.getResolvedException().getMessage());
                 });
     }
@@ -680,11 +677,10 @@ public class NAsyncQueryControllerTest extends NLocalFileMetadataTestCase {
         mockMvc.perform(MockMvcRequestBuilders.get("/api/async_query/{query_id:.+}/metadata", "123")
                 .contentType(MediaType.APPLICATION_JSON).accept(MediaType.parseMediaType(HTTP_VND_APACHE_KYLIN_JSON)))
                 .andExpect(result -> {
-                    Assert.assertTrue(result.getResolvedException() instanceof NAsyncQueryIllegalParamException);
-                    Assert.assertEquals("KE-020040001",
-                            ((NAsyncQueryIllegalParamException) result.getResolvedException()).getErrorCode()
-                                    .getCodeString());
-                    Assert.assertEquals(MsgPicker.getMsg().getAsyncQueryProjectNameEmpty(),
+                    Assert.assertTrue(result.getResolvedException() instanceof KylinException);
+                    Assert.assertEquals("KE-010031302",
+                            ((KylinException) result.getResolvedException()).getErrorCode().getCodeString());
+                    Assert.assertEquals(ASYNC_QUERY_PROJECT_NAME_EMPTY.getMsg(),
                             result.getResolvedException().getMessage());
                 });
     }
@@ -694,11 +690,10 @@ public class NAsyncQueryControllerTest extends NLocalFileMetadataTestCase {
         mockMvc.perform(MockMvcRequestBuilders.get("/api/async_query/{query_id:.+}/result_download", "123")
                 .contentType(MediaType.APPLICATION_JSON).accept(MediaType.parseMediaType(HTTP_VND_APACHE_KYLIN_JSON)))
                 .andExpect(result -> {
-                    Assert.assertTrue(result.getResolvedException() instanceof NAsyncQueryIllegalParamException);
-                    Assert.assertEquals("KE-020040001",
-                            ((NAsyncQueryIllegalParamException) result.getResolvedException()).getErrorCode()
-                                    .getCodeString());
-                    Assert.assertEquals(MsgPicker.getMsg().getAsyncQueryProjectNameEmpty(),
+                    Assert.assertTrue(result.getResolvedException() instanceof KylinException);
+                    Assert.assertEquals("KE-010031302",
+                            ((KylinException) result.getResolvedException()).getErrorCode().getCodeString());
+                    Assert.assertEquals(ASYNC_QUERY_PROJECT_NAME_EMPTY.getMsg(),
                             result.getResolvedException().getMessage());
                 });
     }
@@ -708,11 +703,10 @@ public class NAsyncQueryControllerTest extends NLocalFileMetadataTestCase {
         mockMvc.perform(MockMvcRequestBuilders.get("/api/async_query/{query_id}/result_path", "123")
                 .contentType(MediaType.APPLICATION_JSON).accept(MediaType.parseMediaType(HTTP_VND_APACHE_KYLIN_JSON)))
                 .andExpect(result -> {
-                    Assert.assertTrue(result.getResolvedException() instanceof NAsyncQueryIllegalParamException);
-                    Assert.assertEquals("KE-020040001",
-                            ((NAsyncQueryIllegalParamException) result.getResolvedException()).getErrorCode()
-                                    .getCodeString());
-                    Assert.assertEquals(MsgPicker.getMsg().getAsyncQueryProjectNameEmpty(),
+                    Assert.assertTrue(result.getResolvedException() instanceof KylinException);
+                    Assert.assertEquals("KE-010031302",
+                            ((KylinException) result.getResolvedException()).getErrorCode().getCodeString());
+                    Assert.assertEquals(ASYNC_QUERY_PROJECT_NAME_EMPTY.getMsg(),
                             result.getResolvedException().getMessage());
                 });
     }
diff --git a/src/query-server/src/test/java/org/apache/kylin/rest/controller/NAsyncQueryControllerV2Test.java b/src/query-server/src/test/java/org/apache/kylin/rest/controller/NAsyncQueryControllerV2Test.java
index e570c0fd3e..d33135cc6f 100644
--- a/src/query-server/src/test/java/org/apache/kylin/rest/controller/NAsyncQueryControllerV2Test.java
+++ b/src/query-server/src/test/java/org/apache/kylin/rest/controller/NAsyncQueryControllerV2Test.java
@@ -19,22 +19,27 @@
 package org.apache.kylin.rest.controller;
 
 import static org.apache.kylin.common.constant.HttpConstant.HTTP_VND_APACHE_KYLIN_V2_JSON;
+import static org.apache.kylin.common.exception.code.ErrorCodeServer.ASYNC_QUERY_INCLUDE_HEADER_NOT_EMPTY;
 import static org.apache.kylin.rest.service.AsyncQueryService.QueryStatus.FAILED;
 import static org.apache.kylin.rest.service.AsyncQueryService.QueryStatus.MISS;
 import static org.apache.kylin.rest.service.AsyncQueryService.QueryStatus.RUNNING;
 import static org.apache.kylin.rest.service.AsyncQueryService.QueryStatus.SUCCESS;
 
-import org.apache.kylin.common.util.NLocalFileMetadataTestCase;
-import org.apache.kylin.rest.request.AsyncQuerySQLRequestV2;
-import org.apache.kylin.rest.service.AsyncQueryService;
+import java.io.IOException;
+
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.QueryContext;
+import org.apache.kylin.common.exception.KylinException;
 import org.apache.kylin.common.util.JsonUtil;
+import org.apache.kylin.common.util.NLocalFileMetadataTestCase;
 import org.apache.kylin.rest.constant.Constant;
+import org.apache.kylin.rest.request.AsyncQuerySQLRequestV2;
 import org.apache.kylin.rest.response.SQLResponse;
+import org.apache.kylin.rest.service.AsyncQueryService;
 import org.apache.kylin.rest.service.QueryService;
 import org.apache.kylin.rest.util.AclEvaluate;
 import org.junit.After;
+import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Test;
 import org.mockito.InjectMocks;
@@ -50,9 +55,6 @@ import org.springframework.test.web.servlet.request.MockMvcRequestBuilders;
 import org.springframework.test.web.servlet.result.MockMvcResultMatchers;
 import org.springframework.test.web.servlet.setup.MockMvcBuilders;
 
-
-import java.io.IOException;
-
 public class NAsyncQueryControllerV2Test extends NLocalFileMetadataTestCase {
 
     private static final String PROJECT = "default";
@@ -80,8 +82,8 @@ public class NAsyncQueryControllerV2Test extends NLocalFileMetadataTestCase {
     public void setup() throws IOException {
         MockitoAnnotations.initMocks(this);
 
-        mockMvc = MockMvcBuilders.standaloneSetup(nAsyncQueryControllerV2).defaultRequest(MockMvcRequestBuilders.get("/"))
-                .build();
+        mockMvc = MockMvcBuilders.standaloneSetup(nAsyncQueryControllerV2)
+                .defaultRequest(MockMvcRequestBuilders.get("/")).build();
 
         SecurityContextHolder.getContext().setAuthentication(authentication);
         createTestMetadata();
@@ -237,7 +239,27 @@ public class NAsyncQueryControllerV2Test extends NLocalFileMetadataTestCase {
                 .accept(MediaType.parseMediaType(HTTP_VND_APACHE_KYLIN_V2_JSON)))
                 .andExpect(MockMvcResultMatchers.status().isOk());
 
-        Mockito.verify(nAsyncQueryControllerV2).downloadQueryResult(Mockito.anyString(), Mockito.anyBoolean(), Mockito.any());
+        Mockito.verify(nAsyncQueryControllerV2).downloadQueryResult(Mockito.anyString(), Mockito.any(), Mockito.any());
+    }
+
+    @Test
+    public void testDownloadQueryResultNotIncludeHeader() throws Exception {
+        Mockito.doReturn(true).when(asyncQueryService).hasPermission(Mockito.anyString(), Mockito.anyString());
+        AsyncQueryService.FileInfo fileInfo = new AsyncQueryService.FileInfo("csv", "gbk", "result");
+        Mockito.doReturn(fileInfo).when(asyncQueryService).getFileInfo(Mockito.anyString(), Mockito.anyString());
+        Mockito.doReturn(KylinConfig.getInstanceFromEnv()).when(kapQueryService).getConfig();
+
+        mockMvc.perform(MockMvcRequestBuilders.get("/api/async_query/{query_id:.+}/result_download", "123")
+                .param("includeHeader", "false").contentType(MediaType.APPLICATION_JSON)
+                .content(JsonUtil.writeValueAsString(mockAsyncQuerySQLRequest()))
+                .accept(MediaType.parseMediaType(HTTP_VND_APACHE_KYLIN_V2_JSON)))
+                .andExpect(MockMvcResultMatchers.status().isInternalServerError()).andExpect(result -> {
+                    Assert.assertTrue(result.getResolvedException() instanceof KylinException);
+                    Assert.assertEquals(ASYNC_QUERY_INCLUDE_HEADER_NOT_EMPTY.getMsg(),
+                            result.getResolvedException().getMessage());
+                });
+
+        Mockito.verify(nAsyncQueryControllerV2).downloadQueryResult(Mockito.anyString(), Mockito.any(), Mockito.any());
     }
 
 }
diff --git a/src/query-service/src/main/java/org/apache/kylin/rest/request/AsyncQuerySQLRequestV2.java b/src/query-service/src/main/java/org/apache/kylin/rest/request/AsyncQuerySQLRequestV2.java
index 5e16dde7b1..eb8e11bd07 100644
--- a/src/query-service/src/main/java/org/apache/kylin/rest/request/AsyncQuerySQLRequestV2.java
+++ b/src/query-service/src/main/java/org/apache/kylin/rest/request/AsyncQuerySQLRequestV2.java
@@ -18,6 +18,7 @@
 
 package org.apache.kylin.rest.request;
 
+import com.fasterxml.jackson.annotation.JsonProperty;
 import lombok.Getter;
 import lombok.NoArgsConstructor;
 import lombok.Setter;
@@ -35,5 +36,7 @@ public class AsyncQuerySQLRequestV2 implements Serializable, ProjectInsensitiveR
     private String separator = ",";
     private Integer offset = 0;
     private Integer limit = 0;
+    @JsonProperty("include_header")
+    private boolean includeHeader;
 
 }
diff --git a/src/query-service/src/main/java/org/apache/kylin/rest/service/AsyncQueryService.java b/src/query-service/src/main/java/org/apache/kylin/rest/service/AsyncQueryService.java
index 55faca587a..71f8ca7e9a 100644
--- a/src/query-service/src/main/java/org/apache/kylin/rest/service/AsyncQueryService.java
+++ b/src/query-service/src/main/java/org/apache/kylin/rest/service/AsyncQueryService.java
@@ -18,6 +18,7 @@
 
 package org.apache.kylin.rest.service;
 
+import static org.apache.kylin.common.exception.code.ErrorCodeServer.ASYNC_QUERY_RESULT_NOT_FOUND;
 import static org.apache.kylin.query.util.AsyncQueryUtil.getUserFileName;
 import static org.apache.kylin.rest.util.AclPermissionUtil.isAdmin;
 
@@ -47,6 +48,7 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.kylin.common.KapConfig;
 import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.common.exception.KylinException;
 import org.apache.kylin.common.msg.Message;
 import org.apache.kylin.common.msg.MsgPicker;
 import org.apache.kylin.metadata.project.NProjectManager;
@@ -54,10 +56,6 @@ import org.apache.kylin.metadata.project.ProjectInstance;
 import org.apache.kylin.query.exception.NAsyncQueryIllegalParamException;
 import org.apache.kylin.query.util.AsyncQueryUtil;
 import org.apache.kylin.rest.exception.NotFoundException;
-import org.apache.poi.ss.usermodel.Sheet;
-import org.apache.poi.ss.usermodel.Workbook;
-import org.apache.poi.xssf.usermodel.XSSFWorkbook;
-import org.apache.spark.sql.SparderEnv;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.springframework.security.core.context.SecurityContextHolder;
@@ -114,8 +112,8 @@ public class AsyncQueryService extends BasicService {
         }
     }
 
-    public void retrieveSavedQueryResult(String project, String queryId, boolean includeHeader,
-            HttpServletResponse response, String fileFormat, String encode, String separator) throws IOException {
+    public void retrieveSavedQueryResult(String project, String queryId, HttpServletResponse response,
+            String fileFormat, String encode) throws IOException {
         checkStatus(queryId, QueryStatus.SUCCESS, project, MsgPicker.getMsg().getQueryResultNotFound());
 
         FileSystem fileSystem = AsyncQueryUtil.getFileSystem();
@@ -126,37 +124,15 @@ public class AsyncQueryService extends BasicService {
         }
 
         try (ServletOutputStream outputStream = response.getOutputStream()) {
-            String columnNames = null;
-            if (includeHeader) {
-                columnNames = processHeader(fileSystem, dataPath);
-                if (columnNames != null) {
-                    logger.debug("Query:{}, columnMeta:{}", columnNames, columnNames);
-                    if (!columnNames.endsWith(IOUtils.LINE_SEPARATOR_UNIX)) {
-                        columnNames = columnNames + IOUtils.LINE_SEPARATOR_UNIX;
-                    }
-                } else {
-                    logger.error("Query:{}, no columnMeta found", queryId);
-                }
-            }
             switch (fileFormat) {
             case "csv":
-                CSVWriter csvWriter = new CSVWriter();
-                processCSV(outputStream, dataPath, includeHeader, columnNames, csvWriter, separator);
-                break;
-            case "json":
-                processJSON(outputStream, dataPath, encode);
-                break;
             case "xlsx":
-                if (!includeHeader) {
-                    processFile(outputStream, dataPath);
-                } else {
-                    XLSXExcelWriter xlsxExcelWriter = new XLSXExcelWriter();
-                    processXLSX(outputStream, dataPath, includeHeader, columnNames, xlsxExcelWriter);
-                }
-                break;
             case "parquet":
                 processFile(outputStream, dataPath);
                 break;
+            case "json":
+                processJSON(outputStream, dataPath, encode);
+                break;
             default:
                 logger.info("Query:{}, processed", queryId);
             }
@@ -281,7 +257,7 @@ public class AsyncQueryService extends BasicService {
     public boolean deleteByQueryId(String project, String queryId) throws IOException {
         Path resultDir = getAsyncQueryResultDir(project, queryId);
         if (queryStatus(project, queryId) == QueryStatus.MISS) {
-            throw new NAsyncQueryIllegalParamException(MsgPicker.getMsg().getQueryResultNotFound());
+            throw new KylinException(ASYNC_QUERY_RESULT_NOT_FOUND);
         }
         logger.info("clean async query result for query id [{}]", queryId);
         return AsyncQueryUtil.getFileSystem().delete(resultDir, true);
@@ -324,7 +300,7 @@ public class AsyncQueryService extends BasicService {
 
     public String asyncQueryResultPath(String project, String queryId) throws IOException {
         if (queryStatus(project, queryId) == QueryStatus.MISS) {
-            throw new NAsyncQueryIllegalParamException(MsgPicker.getMsg().getQueryResultNotFound());
+            throw new KylinException(ASYNC_QUERY_RESULT_NOT_FOUND);
         }
         return getAsyncQueryResultDir(project, queryId).toString();
     }
@@ -344,28 +320,6 @@ public class AsyncQueryService extends BasicService {
         return new Path(KapConfig.getInstanceFromEnv().getAsyncResultBaseDir(project), queryId);
     }
 
-    private String processHeader(FileSystem fileSystem, Path dataPath) throws IOException {
-
-        FileStatus[] fileStatuses = fileSystem.listStatus(dataPath);
-        for (FileStatus header : fileStatuses) {
-            if (header.getPath().getName().equals(AsyncQueryUtil.getMetaDataFileName())) {
-                try (FSDataInputStream inputStream = fileSystem.open(header.getPath());
-                        BufferedReader bufferedReader = new BufferedReader(
-                                new InputStreamReader(inputStream, Charset.defaultCharset()))) {
-                    return bufferedReader.readLine();
-                }
-            }
-        }
-        return null;
-    }
-
-    private void processCSV(OutputStream outputStream, Path dataPath, boolean includeHeader, String columnNames,
-            CSVWriter excelWriter, String separator) throws IOException {
-        FileSystem fileSystem = AsyncQueryUtil.getFileSystem();
-        FileStatus[] fileStatuses = fileSystem.listStatus(dataPath);
-        excelWriter.writeData(fileStatuses, outputStream, columnNames, separator, includeHeader);
-    }
-
     private void processJSON(OutputStream outputStream, Path dataPath, String encode) throws IOException {
         FileSystem fileSystem = AsyncQueryUtil.getFileSystem();
         FileStatus[] fileStatuses = fileSystem.listStatus(dataPath);
@@ -395,25 +349,6 @@ public class AsyncQueryService extends BasicService {
         }
     }
 
-    private void processXLSX(OutputStream outputStream, Path dataPath, boolean includeHeader, String columnNames, XLSXExcelWriter excelWriter)
-            throws IOException {
-        FileSystem fileSystem = AsyncQueryUtil.getFileSystem();
-        FileStatus[] fileStatuses = fileSystem.listStatus(dataPath);
-        try (Workbook wb = new XSSFWorkbook()) {
-            Sheet sheet = wb.createSheet("query_result");
-            // Apply column names
-            if (includeHeader && columnNames != null) {
-                org.apache.poi.ss.usermodel.Row excelRow = sheet.createRow(0);
-                String[] columnNameArray = columnNames.split(SparderEnv.getSeparator());
-                for (int i = 0; i < columnNameArray.length; i++) {
-                    excelRow.createCell(i).setCellValue(columnNameArray[i]);
-                }
-            }
-            excelWriter.writeData(fileStatuses, sheet);
-            wb.write(outputStream);
-        }
-    }
-
     public enum QueryStatus {
         RUNNING, FAILED, SUCCESS, MISS
     }
diff --git a/src/query-service/src/main/java/org/apache/kylin/rest/service/CSVWriter.java b/src/query-service/src/main/java/org/apache/kylin/rest/service/CSVWriter.java
deleted file mode 100644
index a17dcf318c..0000000000
--- a/src/query-service/src/main/java/org/apache/kylin/rest/service/CSVWriter.java
+++ /dev/null
@@ -1,120 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.kylin.rest.service;
-
-import java.io.IOException;
-import java.io.OutputStream;
-import java.io.OutputStreamWriter;
-import java.io.Writer;
-import java.nio.charset.StandardCharsets;
-import java.util.Iterator;
-import java.util.List;
-
-import org.apache.commons.io.IOUtils;
-import org.apache.hadoop.fs.FileStatus;
-import org.apache.spark.sql.SparderEnv;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import scala.collection.JavaConverters;
-
-public class CSVWriter {
-
-    private static final Logger logger = LoggerFactory.getLogger("query");
-
-    private static final String QUOTE_CHAR = "\"";
-    private static final String END_OF_LINE_SYMBOLS = IOUtils.LINE_SEPARATOR_UNIX;
-
-    public void writeData(FileStatus[] fileStatuses, OutputStream outputStream,
-                          String columnNames, String separator, boolean includeHeaders) throws IOException {
-
-        try (Writer writer = new OutputStreamWriter(outputStream, StandardCharsets.UTF_8)) {
-            if (includeHeaders) {
-                writer.write(columnNames.replace(",", separator));
-                writer.flush();
-            }
-            for (FileStatus fileStatus : fileStatuses) {
-                if (!fileStatus.getPath().getName().startsWith("_")) {
-                    if (fileStatus.getPath().getName().endsWith("parquet")) {
-                        writeDataByParquet(fileStatus, writer, separator);
-                    } else {
-                        writeDataByCsv(fileStatus, writer, separator);
-                    }
-                }
-            }
-
-            writer.flush();
-        }
-    }
-
-    public static void writeCsv(Iterator<List<Object>> rows, Writer writer, String separator) {
-        rows.forEachRemaining(row -> {
-            StringBuilder builder = new StringBuilder();
-
-            for (int i = 0; i < row.size(); i++) {
-                Object cell = row.get(i);
-                String column = cell == null ? "" : cell.toString();
-
-                if (i > 0) {
-                    builder.append(separator);
-                }
-
-                final String escapedCsv = encodeCell(column, separator);
-                builder.append(escapedCsv);
-            }
-            builder.append(END_OF_LINE_SYMBOLS); // EOL
-            try {
-                writer.write(builder.toString());
-            } catch (IOException e) {
-                logger.error("Failed to download asyncQueryResult csvExcel by parquet", e);
-            }
-        });
-    }
-
-    private void writeDataByParquet(FileStatus fileStatus, Writer writer, String separator) {
-        List<org.apache.spark.sql.Row> rowList = SparderEnv.getSparkSession().read()
-                .parquet(fileStatus.getPath().toString()).collectAsList();
-        writeCsv(rowList.stream().map(row -> JavaConverters.seqAsJavaList(row.toSeq())).iterator(), writer, separator);
-    }
-
-    // the encode logic is copied from org.supercsv.encoder.DefaultCsvEncoder.encode
-    private static String encodeCell(String cell, String separator) {
-
-        boolean needQuote = cell.contains(separator) || cell.contains("\r") || cell.contains("\n");
-
-        if (cell.contains(QUOTE_CHAR)) {
-            needQuote = true;
-            // escape
-            cell = cell.replace(QUOTE_CHAR, QUOTE_CHAR + QUOTE_CHAR);
-        }
-
-        if (needQuote) {
-            return QUOTE_CHAR + cell + QUOTE_CHAR;
-        } else {
-            return cell;
-        }
-    }
-
-    private void writeDataByCsv(FileStatus fileStatus, Writer writer, String separator) {
-        List<org.apache.spark.sql.Row> rowList = SparderEnv.getSparkSession().read()
-                .csv(fileStatus.getPath().toString()).collectAsList();
-        writeCsv(rowList.stream().map(row -> JavaConverters.seqAsJavaList(row.toSeq())).iterator(), writer, separator);
-    }
-
-}
diff --git a/src/query-service/src/main/java/org/apache/kylin/rest/service/XLSXExcelWriter.java b/src/query-service/src/main/java/org/apache/kylin/rest/service/XLSXExcelWriter.java
deleted file mode 100644
index e54678f375..0000000000
--- a/src/query-service/src/main/java/org/apache/kylin/rest/service/XLSXExcelWriter.java
+++ /dev/null
@@ -1,155 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.kylin.rest.service;
-
-import java.io.BufferedReader;
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.InputStreamReader;
-import java.nio.charset.StandardCharsets;
-import java.nio.file.Files;
-import java.util.List;
-import java.util.concurrent.atomic.AtomicInteger;
-import java.util.stream.Collectors;
-
-import org.apache.hadoop.fs.FSDataInputStream;
-import org.apache.hadoop.fs.FileStatus;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.kylin.query.util.AsyncQueryUtil;
-import org.apache.poi.ss.usermodel.CellType;
-import org.apache.poi.ss.usermodel.Row;
-import org.apache.poi.ss.usermodel.Sheet;
-import org.apache.poi.xssf.usermodel.XSSFCell;
-import org.apache.poi.xssf.usermodel.XSSFRow;
-import org.apache.poi.xssf.usermodel.XSSFSheet;
-import org.apache.poi.xssf.usermodel.XSSFWorkbook;
-import org.apache.spark.sql.SparderEnv;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import com.clearspring.analytics.util.Lists;
-
-import lombok.val;
-
-public class XLSXExcelWriter {
-
-    private static final Logger logger = LoggerFactory.getLogger("query");
-
-    public void writeData(FileStatus[] fileStatuses, Sheet sheet) {
-        for (FileStatus fileStatus : fileStatuses) {
-            if (!fileStatus.getPath().getName().startsWith("_")) {
-                if (fileStatus.getPath().getName().endsWith("parquet")) {
-                    writeDataByParquet(fileStatus, sheet);
-                } else if (fileStatus.getPath().getName().endsWith("xlsx")) {
-                    writeDataByXlsx(fileStatus, sheet);
-                } else {
-                    writeDataByCsv(fileStatus, sheet);
-                }
-            }
-        }
-    }
-
-    private void writeDataByXlsx(FileStatus f, Sheet sheet) {
-        boolean createTempFileStatus = false;
-        File file = new File("temp.xlsx");
-        try {
-            createTempFileStatus = file.createNewFile();
-            FileSystem fileSystem = AsyncQueryUtil.getFileSystem();
-            fileSystem.copyToLocalFile(f.getPath(), new Path(file.getPath()));
-        } catch (Exception e) {
-            logger.error("Export excel writeDataByXlsx create exception f:{} createTempFileStatus:{} ",
-                    f.getPath(), createTempFileStatus, e);
-        }
-        try (InputStream is = new FileInputStream(file.getAbsolutePath());
-             XSSFWorkbook sheets = new XSSFWorkbook(is)) {
-            final AtomicInteger offset = new AtomicInteger(sheet.getPhysicalNumberOfRows());
-            XSSFSheet sheetAt = sheets.getSheetAt(0);
-            for (int i = 0; i < sheetAt.getPhysicalNumberOfRows(); i++) {
-                XSSFRow row = sheetAt.getRow(i);
-                org.apache.poi.ss.usermodel.Row excelRow = sheet.createRow(offset.get());
-                offset.incrementAndGet();
-                for (int index = 0; index < row.getPhysicalNumberOfCells(); index++) {
-                    XSSFCell cell = row.getCell(index);
-                    excelRow.createCell(index).setCellValue(getString(cell));
-                }
-            }
-            Files.delete(file.toPath());
-        } catch (Exception e) {
-            logger.error("Export excel writeDataByXlsx handler exception f:{} createTempFileStatus:{} ",
-                    f.getPath(), createTempFileStatus, e);
-        }
-    }
-
-    private static String getString(XSSFCell xssfCell) {
-        if (xssfCell == null) {
-            return "";
-        }
-        if (xssfCell.getCellType() == CellType.NUMERIC) {
-            return String.valueOf(xssfCell.getNumericCellValue());
-        } else if (xssfCell.getCellType() == CellType.BOOLEAN) {
-            return String.valueOf(xssfCell.getBooleanCellValue());
-        } else {
-            return xssfCell.getStringCellValue();
-        }
-    }
-
-    private void writeDataByParquet(FileStatus fileStatus, Sheet sheet) {
-        final AtomicInteger offset = new AtomicInteger(sheet.getPhysicalNumberOfRows());
-        List<org.apache.spark.sql.Row> rowList = SparderEnv.getSparkSession().read()
-                .parquet(fileStatus.getPath().toString()).collectAsList();
-        rowList.stream().forEach(row -> {
-            org.apache.poi.ss.usermodel.Row excelRow = sheet.createRow(offset.get());
-            offset.incrementAndGet();
-            val list = row.toSeq().toList();
-            for (int i = 0; i < list.size(); i++) {
-                Object cell = list.apply(i);
-                String column = cell == null ? "" : cell.toString();
-                excelRow.createCell(i).setCellValue(column);
-            }
-        });
-    }
-
-    public void writeDataByCsv(FileStatus fileStatus, Sheet sheet) {
-        FileSystem fileSystem = AsyncQueryUtil.getFileSystem();
-        List<String> rowResults = Lists.newArrayList();
-        List<String[]> results = Lists.newArrayList();
-        final AtomicInteger offset = new AtomicInteger(sheet.getPhysicalNumberOfRows());
-        try (FSDataInputStream inputStream = fileSystem.open(fileStatus.getPath())) {
-            BufferedReader bufferedReader = new BufferedReader(
-                    new InputStreamReader(inputStream, StandardCharsets.UTF_8));
-            rowResults.addAll(Lists.newArrayList(bufferedReader.lines().collect(Collectors.toList())));
-            for (String row : rowResults) {
-                results.add(row.split(SparderEnv.getSeparator()));
-            }
-            for (int i = 0; i < results.size(); i++) {
-                Row row = sheet.createRow(offset.get());
-                offset.incrementAndGet();
-                String[] rowValues = results.get(i);
-                for (int j = 0; j < rowValues.length; j++) {
-                    row.createCell(j).setCellValue(rowValues[j]);
-                }
-            }
-        } catch (IOException e) {
-            logger.error("Failed to download asyncQueryResult xlsxExcel by csv", e);
-        }
-    }
-}
diff --git a/src/query-service/src/test/java/org/apache/kylin/rest/service/AysncQueryServiceTest.java b/src/query-service/src/test/java/org/apache/kylin/rest/service/AysncQueryServiceTest.java
index 282824ac65..241ffd3d10 100644
--- a/src/query-service/src/test/java/org/apache/kylin/rest/service/AysncQueryServiceTest.java
+++ b/src/query-service/src/test/java/org/apache/kylin/rest/service/AysncQueryServiceTest.java
@@ -37,10 +37,10 @@ import java.io.FileInputStream;
 import java.io.IOException;
 import java.io.InputStream;
 import java.io.OutputStreamWriter;
-import java.io.StringWriter;
 import java.nio.charset.Charset;
 import java.nio.charset.StandardCharsets;
 import java.nio.file.Files;
+import java.sql.SQLException;
 import java.util.ArrayList;
 import java.util.List;
 import java.util.UUID;
@@ -58,10 +58,14 @@ import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.fs.Path;
 import org.apache.kylin.common.KapConfig;
 import org.apache.kylin.common.QueryContext;
+import org.apache.kylin.common.exception.KylinException;
 import org.apache.kylin.common.util.RandomUtil;
+import org.apache.kylin.metadata.query.QueryMetricsContext;
 import org.apache.kylin.metadata.querymeta.SelectedColumnMeta;
+import org.apache.kylin.query.engine.QueryExec;
 import org.apache.kylin.query.exception.NAsyncQueryIllegalParamException;
 import org.apache.kylin.query.pushdown.SparkSqlClient;
+import org.apache.kylin.query.runtime.plan.ResultPlan;
 import org.apache.kylin.query.util.AsyncQueryUtil;
 import org.apache.kylin.rest.response.SQLResponse;
 import org.apache.poi.ss.usermodel.CellType;
@@ -87,6 +91,7 @@ import org.supercsv.io.ICsvListWriter;
 import org.supercsv.prefs.CsvPreference;
 
 import com.google.common.collect.Lists;
+import com.google.common.collect.Sets;
 
 import lombok.val;
 
@@ -163,38 +168,23 @@ public class AysncQueryServiceTest extends ServiceTestBase {
     }
 
     @Test
-    public void testAsyncQueryWithParquetSpecialCharacters() throws IOException {
+    public void testAsyncQueryAndDownloadCsvResultNotIncludeHeader() throws IOException {
         QueryContext queryContext = QueryContext.current();
         String queryId = queryContext.getQueryId();
         mockMetadata(queryId, true);
         queryContext.getQueryTagInfo().setAsyncQuery(true);
-        queryContext.getQueryTagInfo().setFileFormat("CSV");
+        queryContext.getQueryTagInfo().setFileFormat("csv");
         queryContext.getQueryTagInfo().setFileEncode("utf-8");
-        String sql = "select '\\(123\\)','123'";
-        queryContext.setProject(PROJECT);
+        queryContext.getQueryTagInfo().setSeparator(",");
+        queryContext.getQueryTagInfo().setIncludeHeader(false);
 
-        ss.sqlContext().setConf("spark.sql.parquet.columnNameCheck.enabled", "false");
-        SparkSqlClient.executeSql(ss, sql, UUID.fromString(queryId), PROJECT);
-
-        await().atMost(60000, TimeUnit.MILLISECONDS).until(
-                () -> AsyncQueryService.QueryStatus.SUCCESS.equals(asyncQueryService.queryStatus(PROJECT, queryId)));
-        HttpServletResponse response = mock(HttpServletResponse.class);
-        ServletOutputStream servletOutputStream = mock(ServletOutputStream.class);
-        final ByteArrayOutputStream baos = new ByteArrayOutputStream();
-        when(response.getOutputStream()).thenReturn(servletOutputStream);
-        doAnswer(new Answer() {
-            @Override
-            public Object answer(InvocationOnMock invocationOnMock) throws Throwable {
-                Object[] arguments = invocationOnMock.getArguments();
-                baos.write((byte[]) arguments[0], (int) arguments[1], (int) arguments[2]);
-                return null;
-            }
-        }).when(servletOutputStream).write(any(byte[].class), anyInt(), anyInt());
+        String sql = "select '123\"','123'";
+        queryContext.setProject(PROJECT);
+        ResultPlan.getResult(ss.sql(sql), null);
+        assertSame(AsyncQueryService.QueryStatus.SUCCESS, asyncQueryService.queryStatus(PROJECT, queryId));
 
-        SparderEnv.getSparkSession().sqlContext().setConf("spark.sql.parquet.columnNameCheck.enabled", "false");
-        asyncQueryService.retrieveSavedQueryResult(PROJECT, queryId, false, response, "csv", encodeDefault, ",");
         List<org.apache.spark.sql.Row> rowList = ss.read()
-                .parquet(asyncQueryService.getAsyncQueryResultDir(PROJECT, queryId).toString()).collectAsList();
+                .csv(asyncQueryService.getAsyncQueryResultDir(PROJECT, queryId).toString()).collectAsList();
         List<String> result = Lists.newArrayList();
         rowList.stream().forEach(row -> {
             val list = row.toSeq().toList();
@@ -204,35 +194,35 @@ public class AysncQueryServiceTest extends ServiceTestBase {
                 result.add(column);
             }
         });
-        assertEquals("(123)" + "123", result.get(0) + result.get(1));
+        assertEquals("123\"\"" + "123", result.get(0) + result.get(1));
+
+        // download asyncQuery result
+        HttpServletResponse response = mock(HttpServletResponse.class);
+        ByteArrayOutputStream baos = mockOutputStream(response);
+        asyncQueryService.retrieveSavedQueryResult(PROJECT, queryId, response, "csv", encodeDefault);
+        Assert.assertEquals("\"123\"\"\",123\n", baos.toString(StandardCharsets.UTF_8.name()));
     }
 
     @Test
-    public void testAsyncQueryDownCsvResultByParquet() throws IOException {
+    public void testAsyncQueryAndDownloadCsvResultIncludeHeader() throws IOException, SQLException {
         QueryContext queryContext = QueryContext.current();
         String queryId = queryContext.getQueryId();
         mockMetadata(queryId, true);
         queryContext.getQueryTagInfo().setAsyncQuery(true);
         queryContext.getQueryTagInfo().setFileFormat("csv");
         queryContext.getQueryTagInfo().setFileEncode("utf-8");
+        queryContext.getQueryTagInfo().setSeparator(",");
+        queryContext.getQueryTagInfo().setIncludeHeader(true);
+
         String sql = "select '123\"','123'";
         queryContext.setProject(PROJECT);
-        SparkSqlClient.executeSql(ss, sql, UUID.fromString(queryId), PROJECT);
+
+        new QueryExec(PROJECT, getTestConfig()).executeQuery(sql);
+
         assertSame(AsyncQueryService.QueryStatus.SUCCESS, asyncQueryService.queryStatus(PROJECT, queryId));
-        HttpServletResponse response = mock(HttpServletResponse.class);
-        ServletOutputStream servletOutputStream = mock(ServletOutputStream.class);
-        final ByteArrayOutputStream baos = new ByteArrayOutputStream();
-        when(response.getOutputStream()).thenReturn(servletOutputStream);
-        doAnswer(new Answer() {
-            @Override
-            public Object answer(InvocationOnMock invocationOnMock) throws Throwable {
-                Object[] arguments = invocationOnMock.getArguments();
-                baos.write((byte[]) arguments[0], (int) arguments[1], (int) arguments[2]);
-                return null;
-            }
-        }).when(servletOutputStream).write(any(byte[].class), anyInt(), anyInt());
-        asyncQueryService.retrieveSavedQueryResult(PROJECT, queryId, false, response, "csv", encodeDefault, ",");
-        List<org.apache.spark.sql.Row> rowList = ss.read().csv(asyncQueryService.getAsyncQueryResultDir(PROJECT, queryId).toString()).collectAsList();
+
+        List<org.apache.spark.sql.Row> rowList = ss.read()
+                .csv(asyncQueryService.getAsyncQueryResultDir(PROJECT, queryId).toString()).collectAsList();
         List<String> result = Lists.newArrayList();
         rowList.stream().forEach(row -> {
             val list = row.toSeq().toList();
@@ -242,130 +232,146 @@ public class AysncQueryServiceTest extends ServiceTestBase {
                 result.add(column);
             }
         });
-        assertEquals("123\"" + "123", result.get(0) + result.get(1));
+        assertEquals("EXPR$0" + "EXPR$1", result.get(0) + result.get(1));
+        assertEquals("123\"\"" + "123", result.get(2) + result.get(3));
+
+        // download asyncQuery result
+        HttpServletResponse response = mock(HttpServletResponse.class);
+        ByteArrayOutputStream baos = mockOutputStream(response);
+        asyncQueryService.retrieveSavedQueryResult(PROJECT, queryId, response, "csv", encodeDefault);
+        Assert.assertEquals("EXPR$0,EXPR$1\n\"123\"\"\",123\n", baos.toString(StandardCharsets.UTF_8.name()));
     }
 
     @Test
-    public void testSuccessQueryAndDownloadXlsxWriter() throws IOException {
+    public void testAsyncQueryPushDownAndDownloadCsvResultNotIncludeHeader() throws IOException {
         QueryContext queryContext = QueryContext.current();
         String queryId = queryContext.getQueryId();
         mockMetadata(queryId, true);
         queryContext.getQueryTagInfo().setAsyncQuery(true);
-        queryContext.getQueryTagInfo().setFileFormat("xlsx");
+        queryContext.getQueryTagInfo().setFileFormat("csv");
         queryContext.getQueryTagInfo().setFileEncode("utf-8");
-        String sql = "select '123\"' as col1,'123' as col2";
+        queryContext.getQueryTagInfo().setSeparator(",");
+        queryContext.getQueryTagInfo().setIncludeHeader(false);
+
+        String sql = "select '123\"','123'";
         queryContext.setProject(PROJECT);
+
         SparkSqlClient.executeSql(ss, sql, UUID.fromString(queryId), PROJECT);
         assertSame(AsyncQueryService.QueryStatus.SUCCESS, asyncQueryService.queryStatus(PROJECT, queryId));
-        HttpServletResponse response = mock(HttpServletResponse.class);
-        final ByteArrayOutputStream baos = new ByteArrayOutputStream();
-        ServletOutputStream servletOutputStream = mock(ServletOutputStream.class);
-        when(response.getOutputStream()).thenReturn(servletOutputStream);
-        doAnswer(new Answer() {
-            @Override
-            public Object answer(InvocationOnMock invocationOnMock) throws Throwable {
-                Object[] arguments = invocationOnMock.getArguments();
-                baos.write((byte[]) arguments[0], (int) arguments[1], (int) arguments[2]);
-                return null;
+
+        List<org.apache.spark.sql.Row> rowList = ss.read()
+                .csv(asyncQueryService.getAsyncQueryResultDir(PROJECT, queryId).toString()).collectAsList();
+        List<String> result = Lists.newArrayList();
+        rowList.stream().forEach(row -> {
+            val list = row.toSeq().toList();
+            for (int i = 0; i < list.size(); i++) {
+                Object cell = list.apply(i);
+                String column = cell == null ? "" : cell.toString();
+                result.add(column);
             }
-        }).when(servletOutputStream).write(any(byte[].class), anyInt(), anyInt());
-        asyncQueryService.retrieveSavedQueryResult(PROJECT, queryId, false, response, "xlsx", encodeDefault, ",");
-        FileSystem fileSystem = AsyncQueryUtil.getFileSystem();
-        FileStatus[] fileStatuses = fileSystem.listStatus(new Path(asyncQueryService.getAsyncQueryResultDir(PROJECT, queryId).toString()));
-        XLSXExcelWriter xlsxExcelWriter = new XLSXExcelWriter();
-        XSSFWorkbook workbook = new XSSFWorkbook();
-        XSSFSheet sheet = workbook.createSheet();
-        xlsxExcelWriter.writeData(fileStatuses, sheet);
-        XSSFRow row = sheet.getRow(0);
-        assertEquals("123\",123", row.getCell(0) + "," + row.getCell(1));
-        assertEquals("[col1, col2]", QueryContext.current().getColumnNames().toString());
+        });
+        assertEquals("123\"\"" + "123", result.get(0) + result.get(1));
+
+        // download asyncQuery pushDown result
+        HttpServletResponse response = mock(HttpServletResponse.class);
+        ByteArrayOutputStream baos = mockOutputStream(response);
+        asyncQueryService.retrieveSavedQueryResult(PROJECT, queryId, response, "csv", encodeDefault);
+        Assert.assertEquals("\"123\"\"\",123\n", baos.toString(StandardCharsets.UTF_8.name()));
     }
 
     @Test
-    public void testSuccessQueryAndDownloadCSV() throws IOException {
+    public void testAsyncQueryPushDownAndDownloadCsvResultIncludeHeader() throws IOException {
         QueryContext queryContext = QueryContext.current();
         String queryId = queryContext.getQueryId();
         mockMetadata(queryId, true);
         queryContext.getQueryTagInfo().setAsyncQuery(true);
         queryContext.getQueryTagInfo().setFileFormat("csv");
         queryContext.getQueryTagInfo().setFileEncode("utf-8");
-        String sql = "select '123\"' as col1,'123' as col2";
+        queryContext.getQueryTagInfo().setSeparator(",");
+        queryContext.getQueryTagInfo().setIncludeHeader(true);
+
+        String sql = "select '123\"','123'";
         queryContext.setProject(PROJECT);
+
         SparkSqlClient.executeSql(ss, sql, UUID.fromString(queryId), PROJECT);
         assertSame(AsyncQueryService.QueryStatus.SUCCESS, asyncQueryService.queryStatus(PROJECT, queryId));
-        HttpServletResponse response = mock(HttpServletResponse.class);
-        final ByteArrayOutputStream baos = new ByteArrayOutputStream();
-        ServletOutputStream servletOutputStream = mock(ServletOutputStream.class);
-        when(response.getOutputStream()).thenReturn(servletOutputStream);
-        doAnswer(new Answer() {
-            @Override
-            public Object answer(InvocationOnMock invocationOnMock) throws Throwable {
-                Object[] arguments = invocationOnMock.getArguments();
-                baos.write((byte[]) arguments[0], (int) arguments[1], (int) arguments[2]);
-                return null;
+
+        List<org.apache.spark.sql.Row> rowList = ss.read()
+                .csv(asyncQueryService.getAsyncQueryResultDir(PROJECT, queryId).toString()).collectAsList();
+        List<String> result = Lists.newArrayList();
+        rowList.stream().forEach(row -> {
+            val list = row.toSeq().toList();
+            for (int i = 0; i < list.size(); i++) {
+                Object cell = list.apply(i);
+                String column = cell == null ? "" : cell.toString();
+                result.add(column);
             }
-        }).when(servletOutputStream).write(any(byte[].class), anyInt(), anyInt());
-        asyncQueryService.retrieveSavedQueryResult(PROJECT, queryId, true, response, "xlsx", encodeDefault, ",");
-        FileSystem fileSystem = AsyncQueryUtil.getFileSystem();
-        FileStatus[] fileStatuses = fileSystem.listStatus(new Path(asyncQueryService.getAsyncQueryResultDir(PROJECT, queryId).toString()));
-        XLSXExcelWriter xlsxExcelWriter = new XLSXExcelWriter();
-        XSSFWorkbook workbook = new XSSFWorkbook();
-        XSSFSheet sheet = workbook.createSheet();
-        xlsxExcelWriter.writeData(fileStatuses, sheet);
-        XSSFRow row = sheet.getRow(0);
-        assertEquals("\"123\\\"\",123", row.getCell(0) + "," + row.getCell(1));
-        assertEquals("[col1, col2]", QueryContext.current().getColumnNames().toString());
+        });
+        assertEquals("123\"" + "123", result.get(0) + result.get(1));
+        assertEquals("123\"\"" + "123", result.get(2) + result.get(3));
+
+        // download asyncQuery pushDown result
+        HttpServletResponse response = mock(HttpServletResponse.class);
+        ByteArrayOutputStream baos = mockOutputStream(response);
+        asyncQueryService.retrieveSavedQueryResult(PROJECT, queryId, response, "csv", encodeDefault);
+        Assert.assertEquals("123\",123\n\"123\"\"\",123\n", baos.toString(StandardCharsets.UTF_8.name()));
     }
 
     @Test
-    public void testSuccessQueryAndDownloadCSVForDateFormat() throws IOException {
+    public void testAsyncQueryAndDownloadCsvResultSpecialSeparator() throws IOException, SQLException {
+        String separator = "\n";
         QueryContext queryContext = QueryContext.current();
         String queryId = queryContext.getQueryId();
         mockMetadata(queryId, true);
         queryContext.getQueryTagInfo().setAsyncQuery(true);
         queryContext.getQueryTagInfo().setFileFormat("csv");
         queryContext.getQueryTagInfo().setFileEncode("utf-8");
-        String sql = "select '123\"' as col1,'123' as col2, date'2021-02-01' as col3";
+        queryContext.getQueryTagInfo().setSeparator(separator);
+        queryContext.getQueryTagInfo().setIncludeHeader(false);
+
+        String sql = "select '123\"','123'";
         queryContext.setProject(PROJECT);
-        SparkSqlClient.executeSql(ss, sql, UUID.fromString(queryId), PROJECT);
+
+        new QueryExec(PROJECT, getTestConfig()).executeQuery(sql);
+
         assertSame(AsyncQueryService.QueryStatus.SUCCESS, asyncQueryService.queryStatus(PROJECT, queryId));
-        HttpServletResponse response = mock(HttpServletResponse.class);
-        final ByteArrayOutputStream baos = new ByteArrayOutputStream();
-        ServletOutputStream servletOutputStream = mock(ServletOutputStream.class);
-        when(response.getOutputStream()).thenReturn(servletOutputStream);
-        doAnswer(new Answer() {
-            @Override
-            public Object answer(InvocationOnMock invocationOnMock) throws Throwable {
-                Object[] arguments = invocationOnMock.getArguments();
-                baos.write((byte[]) arguments[0], (int) arguments[1], (int) arguments[2]);
-                return null;
+
+        List<org.apache.spark.sql.Row> rowList = ss.read()
+                .csv(asyncQueryService.getAsyncQueryResultDir(PROJECT, queryId).toString()).collectAsList();
+        List<String> result = Lists.newArrayList();
+        rowList.stream().forEach(row -> {
+            val list = row.toSeq().toList();
+            for (int i = 0; i < list.size(); i++) {
+                Object cell = list.apply(i);
+                String column = cell == null ? "" : cell.toString();
+                result.add(column);
             }
-        }).when(servletOutputStream).write(any(byte[].class), anyInt(), anyInt());
-        asyncQueryService.retrieveSavedQueryResult(PROJECT, queryId, true, response, "xlsx", encodeDefault, ",");
-        FileSystem fileSystem = AsyncQueryUtil.getFileSystem();
-        FileStatus[] fileStatuses = fileSystem.listStatus(new Path(asyncQueryService.getAsyncQueryResultDir(PROJECT, queryId).toString()));
-        XLSXExcelWriter xlsxExcelWriter = new XLSXExcelWriter();
-        XSSFWorkbook workbook = new XSSFWorkbook();
-        XSSFSheet sheet = workbook.createSheet();
-        xlsxExcelWriter.writeData(fileStatuses, sheet);
-        XSSFRow row = sheet.getRow(0);
-        assertEquals("\"123\\\"\",123,2021-02-01", row.getCell(0)
-                + "," + row.getCell(1) + "," + row.getCell(2));
-        assertEquals("[col1, col2, col3]", QueryContext.current().getColumnNames().toString());
+        });
+        assertEquals("123\"\"" + "123", result.get(0) + result.get(1));
+
+        // download asyncQuery result
+        HttpServletResponse response = mock(HttpServletResponse.class);
+        ByteArrayOutputStream baos = mockOutputStream(response);
+        asyncQueryService.retrieveSavedQueryResult(PROJECT, queryId, response, "csv", encodeDefault);
+        Assert.assertEquals("\"123\"\"\"\n" + "123\n", baos.toString(StandardCharsets.UTF_8.name()));
     }
 
     @Test
-    public void testSuccessQueryAndDownloadCSVNotIncludeHeader() throws IOException {
+    public void testAsyncQueryWithParquetSpecialCharacters() throws IOException {
         QueryContext queryContext = QueryContext.current();
         String queryId = queryContext.getQueryId();
         mockMetadata(queryId, true);
         queryContext.getQueryTagInfo().setAsyncQuery(true);
-        queryContext.getQueryTagInfo().setFileFormat("csv");
+        queryContext.getQueryTagInfo().setFileFormat("CSV");
         queryContext.getQueryTagInfo().setFileEncode("utf-8");
-        String sql = "select '123\"','123'";
+        String sql = "select '\\(123\\)','123'";
         queryContext.setProject(PROJECT);
+
+        ss.sqlContext().setConf("spark.sql.parquet.columnNameCheck.enabled", "false");
         SparkSqlClient.executeSql(ss, sql, UUID.fromString(queryId), PROJECT);
-        assertSame(AsyncQueryService.QueryStatus.SUCCESS, asyncQueryService.queryStatus(PROJECT, queryId));
+
+        await().atMost(60000, TimeUnit.MILLISECONDS).until(
+                () -> AsyncQueryService.QueryStatus.SUCCESS.equals(asyncQueryService.queryStatus(PROJECT, queryId)));
         HttpServletResponse response = mock(HttpServletResponse.class);
         ServletOutputStream servletOutputStream = mock(ServletOutputStream.class);
         final ByteArrayOutputStream baos = new ByteArrayOutputStream();
@@ -378,9 +384,11 @@ public class AysncQueryServiceTest extends ServiceTestBase {
                 return null;
             }
         }).when(servletOutputStream).write(any(byte[].class), anyInt(), anyInt());
-        asyncQueryService.retrieveSavedQueryResult(PROJECT, queryId, false, response, "csv", encodeDefault, "#");
-        List<org.apache.spark.sql.Row> rowList = ss.read().csv(asyncQueryService.getAsyncQueryResultDir(PROJECT, queryId).toString()).collectAsList();
-        Assert.assertEquals("\"123\"\"\"#123\n", baos.toString(StandardCharsets.UTF_8.name()));
+
+        SparderEnv.getSparkSession().sqlContext().setConf("spark.sql.parquet.columnNameCheck.enabled", "false");
+        asyncQueryService.retrieveSavedQueryResult(PROJECT, queryId, response, "csv", encodeDefault);
+        List<org.apache.spark.sql.Row> rowList = ss.read()
+                .parquet(asyncQueryService.getAsyncQueryResultDir(PROJECT, queryId).toString()).collectAsList();
         List<String> result = Lists.newArrayList();
         rowList.stream().forEach(row -> {
             val list = row.toSeq().toList();
@@ -390,18 +398,19 @@ public class AysncQueryServiceTest extends ServiceTestBase {
                 result.add(column);
             }
         });
-        assertEquals("123\"" + "123", result.get(0) + result.get(1));
+        assertEquals("(123)" + "123", result.get(0) + result.get(1));
     }
 
     @Test
-    public void testSuccessQueryAndDownloadJSON() throws IOException {
+    public void testSuccessQueryAndDownloadCSVForDateFormat() throws IOException {
         QueryContext queryContext = QueryContext.current();
         String queryId = queryContext.getQueryId();
         mockMetadata(queryId, true);
         queryContext.getQueryTagInfo().setAsyncQuery(true);
-        queryContext.getQueryTagInfo().setFileFormat("json");
+        queryContext.getQueryTagInfo().setFileFormat("csv");
         queryContext.getQueryTagInfo().setFileEncode("utf-8");
-        String sql = "select '123\"' as col1,'123' as col2";
+        queryContext.getQueryTagInfo().setSeparator(",");
+        String sql = "select '123\"' as col1,'123' as col2, date'2021-02-01' as col3";
         queryContext.setProject(PROJECT);
         SparkSqlClient.executeSql(ss, sql, UUID.fromString(queryId), PROJECT);
         assertSame(AsyncQueryService.QueryStatus.SUCCESS, asyncQueryService.queryStatus(PROJECT, queryId));
@@ -417,25 +426,15 @@ public class AysncQueryServiceTest extends ServiceTestBase {
                 return null;
             }
         }).when(servletOutputStream).write(any(byte[].class), anyInt(), anyInt());
-        asyncQueryService.retrieveSavedQueryResult(PROJECT, queryId, false, response, "json", encodeDefault, ",");
-        FileSystem fileSystem = AsyncQueryUtil.getFileSystem();
-        FileStatus[] fileStatuses = fileSystem.listStatus(new Path(asyncQueryService.getAsyncQueryResultDir(PROJECT, queryId).toString()));
-        XLSXExcelWriter xlsxExcelWriter = new XLSXExcelWriter();
-        XSSFWorkbook workbook = new XSSFWorkbook();
-        XSSFSheet sheet = workbook.createSheet();
-        xlsxExcelWriter.writeData(fileStatuses, sheet);
-        XSSFRow row = sheet.getRow(0);
-        assertEquals("{\"col1\":\"123\\\"\",\"col2\":\"123\"}", row.getCell(0) + "," + row.getCell(1));
-        assertEquals("[col1, col2]", QueryContext.current().getColumnNames().toString());
     }
 
     @Test
-    public void testSuccessQueryAndDownloadXlsxResultByParquet() throws IOException {
+    public void testSuccessQueryAndDownloadJSON() throws IOException {
         QueryContext queryContext = QueryContext.current();
         String queryId = queryContext.getQueryId();
         mockMetadata(queryId, true);
         queryContext.getQueryTagInfo().setAsyncQuery(true);
-        queryContext.getQueryTagInfo().setFileFormat("xlsx");
+        queryContext.getQueryTagInfo().setFileFormat("json");
         queryContext.getQueryTagInfo().setFileEncode("utf-8");
         String sql = "select '123\"' as col1,'123' as col2";
         queryContext.setProject(PROJECT);
@@ -453,44 +452,63 @@ public class AysncQueryServiceTest extends ServiceTestBase {
                 return null;
             }
         }).when(servletOutputStream).write(any(byte[].class), anyInt(), anyInt());
-        asyncQueryService.retrieveSavedQueryResult(PROJECT, queryId, false, response, "xlsx", encodeDefault, ",");
-        FileSystem fileSystem = AsyncQueryUtil.getFileSystem();
+    }
+
+    @Test
+    public void testSuccessQueryAndDownloadXlsxResultNotIncludeHeader() throws IOException {
+        QueryContext queryContext = QueryContext.current();
+        String queryId = queryContext.getQueryId();
+        mockMetadata(queryId, true);
+        queryContext.getQueryTagInfo().setAsyncQuery(true);
+        queryContext.getQueryTagInfo().setFileFormat("xlsx");
+        queryContext.getQueryTagInfo().setFileEncode("utf-8");
+        String sql = "select '123\"' as col1,'123' as col2";
+        queryContext.setProject(PROJECT);
+        SparkSqlClient.executeSql(ss, sql, UUID.fromString(queryId), PROJECT);
+        assertSame(AsyncQueryService.QueryStatus.SUCCESS, asyncQueryService.queryStatus(PROJECT, queryId));
+        HttpServletResponse response = mock(HttpServletResponse.class);
+        ByteArrayOutputStream outputStream = mockOutputStream(response);
+        asyncQueryService.retrieveSavedQueryResult(PROJECT, queryId, response, "xlsx", encodeDefault);
+
         File file = new File("result.xlsx");
         boolean createTempFileStatus = file.createNewFile();
-        ArrayList<String> list = new ArrayList<>();
-        FileStatus[] fileStatuses = fileSystem.listStatus(new Path(asyncQueryService.getAsyncQueryResultDir(PROJECT, queryId).toString()));
-        for (FileStatus f : fileStatuses) {
-            if (!f.getPath().getName().startsWith("_")) {
-                fileSystem.copyToLocalFile(f.getPath(), new Path(file.getPath()));
-                try(InputStream is = new FileInputStream(file.getAbsolutePath());
-                    XSSFWorkbook sheets = new XSSFWorkbook(is)) {
-                    XSSFSheet sheetAt = sheets.getSheetAt(0);
-                    for (int i = 0; i < sheetAt.getPhysicalNumberOfRows(); i++) {
-                        XSSFRow row = sheetAt.getRow(i);
-                        StringBuilder builder = new StringBuilder();
-                        for (int index = 0; index < row.getPhysicalNumberOfCells(); index++) {
-                            XSSFCell cell = row.getCell(index);
-                            if (index > 0) {
-                                builder.append(",");
-                            }
-                            builder.append(getString(cell));
-                        }
-                        list.add(builder.toString());
-                    }
-                }
-            }
-        }
+        List<String> list = getXlsxResult(queryId, file);
         Files.delete(file.toPath());
-        logger.info("Temp File status createTempFileStatus:{}",
-                createTempFileStatus);
+        logger.info("Temp File status createTempFileStatus:{}", createTempFileStatus);
         assertEquals("123\",123", list.get(0));
     }
 
+    @Test
+    public void testSuccessQueryAndDownloadXlsxResultIncludeHeader() throws IOException {
+        QueryContext queryContext = QueryContext.current();
+        String queryId = queryContext.getQueryId();
+        mockMetadata(queryId, true);
+        queryContext.getQueryTagInfo().setAsyncQuery(true);
+        queryContext.getQueryTagInfo().setFileFormat("xlsx");
+        queryContext.getQueryTagInfo().setFileEncode("utf-8");
+        queryContext.getQueryTagInfo().setIncludeHeader(true);
+        String sql = "select '123\"' as col1,'123' as col2";
+        queryContext.setProject(PROJECT);
+        SparkSqlClient.executeSql(ss, sql, UUID.fromString(queryId), PROJECT);
+        assertSame(AsyncQueryService.QueryStatus.SUCCESS, asyncQueryService.queryStatus(PROJECT, queryId));
+        HttpServletResponse response = mock(HttpServletResponse.class);
+        ByteArrayOutputStream outputStream = mockOutputStream(response);
+        asyncQueryService.retrieveSavedQueryResult(PROJECT, queryId, response, "xlsx", encodeDefault);
+
+        File file = new File("result.xlsx");
+        boolean createTempFileStatus = file.createNewFile();
+        List<String> list = getXlsxResult(queryId, file);
+        Files.delete(file.toPath());
+        logger.info("Temp File status createTempFileStatus:{}", createTempFileStatus);
+        assertEquals("col1,col2", list.get(0));
+        assertEquals("123\",123", list.get(1));
+    }
+
     private static String getString(XSSFCell xssfCell) {
         if (xssfCell == null) {
             return "";
         }
-        if (xssfCell.getCellType()== CellType.NUMERIC) {
+        if (xssfCell.getCellType() == CellType.NUMERIC) {
             return String.valueOf(xssfCell.getNumericCellValue());
         } else if (xssfCell.getCellType() == CellType.BOOLEAN) {
             return String.valueOf(xssfCell.getBooleanCellValue());
@@ -519,9 +537,9 @@ public class AysncQueryServiceTest extends ServiceTestBase {
             }
         }).when(servletOutputStream).write(any(byte[].class), anyInt(), anyInt());
 
-        asyncQueryService.retrieveSavedQueryResult(PROJECT, queryId, false, response, formatDefault, encodeDefault, ",");
+        asyncQueryService.retrieveSavedQueryResult(PROJECT, queryId, response, formatDefault, encodeDefault);
 
-        assertEquals("a1,b1,c1\n" + "a2,b2,c2\n", baos.toString(StandardCharsets.UTF_8.name()));
+        assertEquals("a1,b1,c1\r\n" + "a2,b2,c2\r\n", baos.toString(StandardCharsets.UTF_8.name()));
     }
 
     @Test
@@ -542,9 +560,9 @@ public class AysncQueryServiceTest extends ServiceTestBase {
             return null;
         }).when(servletOutputStream).write(any(byte[].class), anyInt(), anyInt());
 
-        asyncQueryService.retrieveSavedQueryResult(PROJECT, queryId, true, response, formatDefault, encodeDefault, ",");
+        asyncQueryService.retrieveSavedQueryResult(PROJECT, queryId, response, formatDefault, encodeDefault);
 
-        assertEquals("name,age,city\na1,b1,c1\n" + "a2,b2,c2\n", baos.toString(StandardCharsets.UTF_8.name()));
+        assertEquals("a1,b1,c1\r\n" + "a2,b2,c2\r\n", baos.toString(StandardCharsets.UTF_8.name()));
     }
 
     @Test
@@ -565,9 +583,9 @@ public class AysncQueryServiceTest extends ServiceTestBase {
             return null;
         }).when(servletOutputStream).write(any(byte[].class), anyInt(), anyInt());
 
-        asyncQueryService.retrieveSavedQueryResult(PROJECT, queryId, false, response, formatDefault, encodeDefault, ",");
+        asyncQueryService.retrieveSavedQueryResult(PROJECT, queryId, response, formatDefault, encodeDefault);
 
-        assertEquals("a1,b1,c1\n" + "a2,b2,c2\n", baos.toString(StandardCharsets.UTF_8.name()));
+        assertEquals("a1,b1,c1\r\n" + "a2,b2,c2\r\n", baos.toString(StandardCharsets.UTF_8.name()));
     }
 
     @Test
@@ -590,35 +608,12 @@ public class AysncQueryServiceTest extends ServiceTestBase {
             }
         }).when(servletOutputStream).write(any(byte[].class), anyInt(), anyInt());
 
-        asyncQueryService.retrieveSavedQueryResult(PROJECT, queryId, false, response, "json", encodeDefault, ",");
+        asyncQueryService.retrieveSavedQueryResult(PROJECT, queryId, response, "json", encodeDefault);
 
         assertEquals("[\"{'column1':'a1', 'column2':'b1'}\",\"{'column1':'a2', 'column2':'b2'}\"]",
                 baos.toString(StandardCharsets.UTF_8.name()));
     }
 
-    @Test
-    public void testSuccessQueryAndDownloadXlsxResult() throws IOException, InterruptedException {
-        SQLResponse sqlResponse = mock(SQLResponse.class);
-        when(sqlResponse.isException()).thenReturn(false);
-        String queryId = RandomUtil.randomUUIDStr();
-        mockResultFile(queryId, false, true);
-        assertSame(AsyncQueryService.QueryStatus.SUCCESS, asyncQueryService.queryStatus(PROJECT, queryId));
-        HttpServletResponse response = mock(HttpServletResponse.class);
-        ServletOutputStream servletOutputStream = mock(ServletOutputStream.class);
-        final ByteArrayOutputStream baos = new ByteArrayOutputStream();
-        when(response.getOutputStream()).thenReturn(servletOutputStream);
-        doAnswer(new Answer() {
-            @Override
-            public Object answer(InvocationOnMock invocationOnMock) throws Throwable {
-                Object[] arguments = invocationOnMock.getArguments();
-                baos.write((byte[]) arguments[0], (int) arguments[1], (int) arguments[2]);
-                return null;
-            }
-        }).when(servletOutputStream).write(any(byte[].class), anyInt(), anyInt());
-
-        asyncQueryService.retrieveSavedQueryResult(PROJECT, queryId, false, response, "xlsx", encodeDefault, ",");
-    }
-
     @Test
     public void testCleanFolder() throws IOException, InterruptedException {
         String queryId = RandomUtil.randomUUIDStr();
@@ -643,7 +638,7 @@ public class AysncQueryServiceTest extends ServiceTestBase {
         try {
             new Path(asyncQueryService.asyncQueryResultPath(PROJECT, queryId));
         } catch (Exception e) {
-            Assert.assertTrue(e instanceof NAsyncQueryIllegalParamException);
+            Assert.assertTrue(e instanceof KylinException);
             Assert.assertEquals("Can’t find the query by this query ID in this project. Please check and try again.",
                     e.getMessage());
         }
@@ -654,7 +649,7 @@ public class AysncQueryServiceTest extends ServiceTestBase {
         try {
             asyncQueryService.deleteByQueryId(PROJECT, "123");
         } catch (Exception e) {
-            Assert.assertTrue(e instanceof NAsyncQueryIllegalParamException);
+            Assert.assertTrue(e instanceof KylinException);
             Assert.assertEquals("Can’t find the query by this query ID in this project. Please check and try again.",
                     e.getMessage());
         }
@@ -678,7 +673,7 @@ public class AysncQueryServiceTest extends ServiceTestBase {
         try {
             new Path(asyncQueryService.asyncQueryResultPath(PROJECT, queryId));
         } catch (Exception e) {
-            Assert.assertTrue(e instanceof NAsyncQueryIllegalParamException);
+            Assert.assertTrue(e instanceof KylinException);
             Assert.assertEquals("Can’t find the query by this query ID in this project. Please check and try again.",
                     e.getMessage());
         }
@@ -808,14 +803,14 @@ public class AysncQueryServiceTest extends ServiceTestBase {
         try {
             AsyncQueryUtil.saveMetaData(PROJECT, sqlResponse.getColumnMetas(), queryId);
         } catch (Exception e) {
-            Assert.assertTrue(e instanceof NAsyncQueryIllegalParamException);
-            Assert.assertEquals("KE-020040001", ((NAsyncQueryIllegalParamException) e).getErrorCode().getCodeString());
+            Assert.assertTrue(e instanceof KylinException);
+            Assert.assertEquals("KE-010031301", ((KylinException) e).getErrorCode().getCodeString());
         }
         try {
             AsyncQueryUtil.saveFileInfo(PROJECT, formatDefault, encodeDefault, fileNameDefault, queryId, ",");
         } catch (Exception e) {
-            Assert.assertTrue(e instanceof NAsyncQueryIllegalParamException);
-            Assert.assertEquals("KE-020040001", ((NAsyncQueryIllegalParamException) e).getErrorCode().getCodeString());
+            Assert.assertTrue(e instanceof KylinException);
+            Assert.assertEquals("KE-010031301", ((KylinException) e).getErrorCode().getCodeString());
         }
     }
 
@@ -902,7 +897,7 @@ public class AysncQueryServiceTest extends ServiceTestBase {
         Path asyncQueryResultDir = AsyncQueryUtil.getAsyncQueryResultDir(PROJECT, queryId);
         fileSystem.delete(new Path(asyncQueryResultDir, AsyncQueryUtil.getFileInfo()));
         try (FSDataOutputStream os = fileSystem.create(new Path(asyncQueryResultDir, AsyncQueryUtil.getFileInfo()));
-             OutputStreamWriter osw = new OutputStreamWriter(os, Charset.defaultCharset())) {
+                OutputStreamWriter osw = new OutputStreamWriter(os, Charset.defaultCharset())) {
             osw.write(formatDefault + "\n");
             osw.write(encodeDefault + "\n");
             osw.write("foo" + "\n");
@@ -924,6 +919,33 @@ public class AysncQueryServiceTest extends ServiceTestBase {
         assertArrayEquals(dataTypes.toArray(), metaData.get(1).toArray());
     }
 
+    @Test
+    public void testAsyncQueryResultRowCount() throws Exception {
+        overwriteSystemProp("kylin.env", "DEV");
+        QueryContext queryContext = QueryContext.current();
+        String queryId = queryContext.getQueryId();
+        mockMetadata(queryId, true);
+        queryContext.getQueryTagInfo().setAsyncQuery(true);
+        queryContext.getQueryTagInfo().setFileFormat("csv");
+        queryContext.getQueryTagInfo().setFileEncode("utf-8");
+        queryContext.getQueryTagInfo().setSeparator(",");
+        queryContext.getQueryTagInfo().setIncludeHeader(false);
+        queryContext.setAclInfo(new QueryContext.AclInfo("ADMIN", Sets.newHashSet("g1"), true));
+
+        String sql = "select '123\"','123'";
+        queryContext.setProject(PROJECT);
+
+        new QueryExec(PROJECT, getTestConfig()).executeQuery(sql);
+
+        assertSame(AsyncQueryService.QueryStatus.SUCCESS, asyncQueryService.queryStatus(PROJECT, queryId));
+
+        QueryMetricsContext.start(queryId, "");
+        Assert.assertTrue(QueryMetricsContext.isStarted());
+        QueryMetricsContext metrics = QueryMetricsContext.collect(queryContext);
+        Assert.assertEquals(1, metrics.getResultRowCount());
+        QueryMetricsContext.reset();
+    }
+
     public Path mockResultFile(String queryId, boolean block, boolean needMeta)
             throws IOException, InterruptedException {
 
@@ -939,8 +961,8 @@ public class AysncQueryServiceTest extends ServiceTestBase {
         }
 
         try (FSDataOutputStream os = fileSystem.create(new Path(asyncQueryResultDir, "m00")); //
-             OutputStreamWriter osw = new OutputStreamWriter(os, StandardCharsets.UTF_8); //
-             ICsvListWriter csvWriter = new CsvListWriter(osw, CsvPreference.STANDARD_PREFERENCE)) {
+                OutputStreamWriter osw = new OutputStreamWriter(os, StandardCharsets.UTF_8); //
+                ICsvListWriter csvWriter = new CsvListWriter(osw, CsvPreference.STANDARD_PREFERENCE)) {
             csvWriter.write(row1);
             csvWriter.write(row2);
             fileSystem.createNewFile(new Path(asyncQueryResultDir, AsyncQueryUtil.getSuccessFlagFileName()));
@@ -963,7 +985,7 @@ public class AysncQueryServiceTest extends ServiceTestBase {
             fileSystem.mkdirs(asyncQueryResultDir);
         }
         try (FSDataOutputStream os = fileSystem.create(new Path(asyncQueryResultDir, "m00")); //
-             OutputStreamWriter osw = new OutputStreamWriter(os, StandardCharsets.UTF_8)) {
+                OutputStreamWriter osw = new OutputStreamWriter(os, StandardCharsets.UTF_8)) {
             osw.write(StringEscapeUtils.unescapeJson(row1));
             osw.write(StringEscapeUtils.unescapeJson(row2));
             fileSystem.createNewFile(new Path(asyncQueryResultDir, AsyncQueryUtil.getSuccessFlagFileName()));
@@ -982,7 +1004,7 @@ public class AysncQueryServiceTest extends ServiceTestBase {
         }
         try (FSDataOutputStream os = fileSystem
                 .create(new Path(asyncQueryResultDir, AsyncQueryUtil.getMetaDataFileName())); //
-             OutputStreamWriter osw = new OutputStreamWriter(os, StandardCharsets.UTF_8)) { //
+                OutputStreamWriter osw = new OutputStreamWriter(os, StandardCharsets.UTF_8)) { //
             String metaString = String.join(",", columnNames) + "\n" + String.join(",", dataTypes);
             osw.write(metaString);
             if (needMeta) {
@@ -1003,7 +1025,7 @@ public class AysncQueryServiceTest extends ServiceTestBase {
         }
         try (FSDataOutputStream os = fileSystem
                 .create(new Path(asyncQueryResultDir, AsyncQueryUtil.getMetaDataFileName())); //
-             OutputStreamWriter osw = new OutputStreamWriter(os, StandardCharsets.UTF_8)) { //
+                OutputStreamWriter osw = new OutputStreamWriter(os, StandardCharsets.UTF_8)) { //
             osw.write(formatDefault);
 
         } catch (IOException e) {
@@ -1019,7 +1041,7 @@ public class AysncQueryServiceTest extends ServiceTestBase {
         }
         try (FSDataOutputStream os = fileSystem
                 .create(new Path(asyncQueryResultDir, AsyncQueryUtil.getMetaDataFileName())); //
-             OutputStreamWriter osw = new OutputStreamWriter(os, StandardCharsets.UTF_8)) { //
+                OutputStreamWriter osw = new OutputStreamWriter(os, StandardCharsets.UTF_8)) { //
             osw.write(encodeDefault);
 
         } catch (IOException e) {
@@ -1027,17 +1049,49 @@ public class AysncQueryServiceTest extends ServiceTestBase {
         }
     }
 
-    @Test
-    public void testCsvWriter() throws IOException {
-        List<List<Object>> rows = Lists.newArrayList(
-                Lists.newArrayList(1, 3.12, "foo"),
-                Lists.newArrayList(2, 3.123, "fo<>o"),
-                Lists.newArrayList(3, 3.124, "fo\ro")
-        );
-        String expected = "1<>3.12<>foo\n2<>3.123<>\"fo<>o\"\n3<>3.124<>\"fo\ro\"\n";
-        try (StringWriter sw = new StringWriter()) {
-            CSVWriter.writeCsv(rows.iterator(), sw, "<>");
-            assertEquals(expected, sw.toString());
+    public ByteArrayOutputStream mockOutputStream(HttpServletResponse response) throws IOException {
+
+        ServletOutputStream servletOutputStream = mock(ServletOutputStream.class);
+        final ByteArrayOutputStream baos = new ByteArrayOutputStream();
+        when(response.getOutputStream()).thenReturn(servletOutputStream);
+        doAnswer(new Answer() {
+            @Override
+            public Object answer(InvocationOnMock invocationOnMock) throws Throwable {
+                Object[] arguments = invocationOnMock.getArguments();
+                baos.write((byte[]) arguments[0], (int) arguments[1], (int) arguments[2]);
+                return null;
+            }
+        }).when(servletOutputStream).write(any(byte[].class), anyInt(), anyInt());
+        return baos;
+    }
+
+    public List<String> getXlsxResult(String queryId, File file) throws IOException {
+        FileSystem fileSystem = AsyncQueryUtil.getFileSystem();
+        List<String> list = new ArrayList<>();
+        FileStatus[] fileStatuses = fileSystem
+                .listStatus(new Path(asyncQueryService.getAsyncQueryResultDir(PROJECT, queryId).toString()));
+        for (FileStatus f : fileStatuses) {
+            if (f.getPath().getName().startsWith("_")) {
+                continue;
+            }
+            fileSystem.copyToLocalFile(f.getPath(), new Path(file.getPath()));
+            try (InputStream is = new FileInputStream(file.getAbsolutePath());
+                    XSSFWorkbook sheets = new XSSFWorkbook(is)) {
+                XSSFSheet sheetAt = sheets.getSheetAt(0);
+                for (int i = 0; i < sheetAt.getPhysicalNumberOfRows(); i++) {
+                    XSSFRow row = sheetAt.getRow(i);
+                    StringBuilder builder = new StringBuilder();
+                    for (int index = 0; index < row.getPhysicalNumberOfCells(); index++) {
+                        XSSFCell cell = row.getCell(index);
+                        if (index > 0) {
+                            builder.append(",");
+                        }
+                        builder.append(getString(cell));
+                    }
+                    list.add(builder.toString());
+                }
+            }
         }
+        return list;
     }
 }
diff --git a/src/spark-project/sparder/src/main/scala/org/apache/kylin/query/pushdown/SparkSqlClient.scala b/src/spark-project/sparder/src/main/scala/org/apache/kylin/query/pushdown/SparkSqlClient.scala
index 5b6d20c715..b5640d7c29 100644
--- a/src/spark-project/sparder/src/main/scala/org/apache/kylin/query/pushdown/SparkSqlClient.scala
+++ b/src/spark-project/sparder/src/main/scala/org/apache/kylin/query/pushdown/SparkSqlClient.scala
@@ -33,10 +33,11 @@ import org.apache.spark.network.util.JavaUtils
 import org.apache.spark.sql.hive.QueryMetricUtils
 import org.apache.spark.sql.hive.utils.ResourceDetectUtils
 import org.apache.spark.sql.util.SparderTypeUtil
-import org.apache.spark.sql.{DataFrame, SparderEnv, SparkSession}
+import org.apache.spark.sql.{DataFrame, Row, SparderEnv, SparkSession}
 import org.slf4j.{Logger, LoggerFactory}
 
 import java.sql.Timestamp
+import java.util
 import java.util.{UUID, List => JList}
 import scala.collection.JavaConverters._
 import scala.collection.{immutable, mutable}
@@ -130,29 +131,8 @@ object SparkSqlClient {
       QueryContext.current().getMetrics.setQueryJobCount(jobCount)
       QueryContext.current().getMetrics.setQueryStageCount(stageCount)
       QueryContext.current().getMetrics.setQueryTaskCount(taskCount)
-      (
-        () => new java.util.Iterator[JList[String]] {
-          /*
-           * After fetching a batch of 1000, checks whether the query thread is interrupted.
-           */
-          val checkInterruptSize = 1000;
-          var readRowSize = 0;
-
-          override def hasNext: Boolean = resultRows.hasNext
-
-          override def next(): JList[String] = {
-            val row = resultRows.next()
-            readRowSize += 1;
-            if (readRowSize % checkInterruptSize == 0) {
-              QueryUtil.checkThreadInterrupted("Interrupted at the stage of collecting result in SparkSqlClient.",
-                "Current step: Collecting dataset of push-down.")
-            }
-            row.toSeq.map(rawValueToString(_)).asJava
-          }
-        },
-        resultSize,
-        fieldList
-      )
+      // return result
+      (readPushDownResultRow(resultRows, true), resultSize, fieldList)
     } catch {
       case e: Throwable =>
         if (e.isInstanceOf[InterruptedException]) {
@@ -169,6 +149,29 @@ object SparkSqlClient {
     }
   }
 
+  def readPushDownResultRow(resultRows: util.Iterator[Row], checkInterrupt: Boolean): java.lang.Iterable[JList[String]] = {
+    () =>
+      new java.util.Iterator[JList[String]] {
+        /*
+         * After fetching a batch of 1000, checks whether the query thread is interrupted.
+         */
+        val checkInterruptSize = 1000;
+        var readRowSize = 0;
+
+        override def hasNext: Boolean = resultRows.hasNext
+
+        override def next(): JList[String] = {
+          val row = resultRows.next()
+          readRowSize += 1;
+          if (checkInterrupt && readRowSize % checkInterruptSize == 0) {
+            QueryUtil.checkThreadInterrupted("Interrupted at the stage of collecting result in SparkSqlClient.",
+              "Current step: Collecting dataset of push-down.")
+          }
+          row.toSeq.map(rawValueToString(_)).asJava
+        }
+      }
+  }
+
   private def rawValueToString(value: Any, wrapped: Boolean = false): String = value match {
     case null => null
     case value: Timestamp => DateFormat.castTimestampToString(value.getTime)
diff --git a/src/spark-project/sparder/src/main/scala/org/apache/kylin/query/runtime/plan/ResultPlan.scala b/src/spark-project/sparder/src/main/scala/org/apache/kylin/query/runtime/plan/ResultPlan.scala
index 7ecc408674..8cc3a6ba25 100644
--- a/src/spark-project/sparder/src/main/scala/org/apache/kylin/query/runtime/plan/ResultPlan.scala
+++ b/src/spark-project/sparder/src/main/scala/org/apache/kylin/query/runtime/plan/ResultPlan.scala
@@ -20,7 +20,8 @@ package org.apache.kylin.query.runtime.plan
 
 import com.google.common.cache.{Cache, CacheBuilder}
 import io.kyligence.kap.secondstorage.SecondStorageUtil
-import org.apache.calcite.rel.`type`.RelDataType
+import org.apache.calcite.rel.`type`.{RelDataType, RelDataTypeField}
+import org.apache.commons.io.IOUtils
 import org.apache.hadoop.fs.Path
 import org.apache.kylin.common.exception.NewQueryRefuseException
 import org.apache.kylin.common.util.{HadoopUtil, RandomUtil}
@@ -30,19 +31,22 @@ import org.apache.kylin.metadata.query.{BigQueryThresholdUpdater, StructField}
 import org.apache.kylin.metadata.state.QueryShareStateManager
 import org.apache.kylin.query.engine.RelColumnMetaDataExtractor
 import org.apache.kylin.query.engine.exec.ExecuteResult
+import org.apache.kylin.query.pushdown.SparkSqlClient.readPushDownResultRow
 import org.apache.kylin.query.relnode.OLAPContext
 import org.apache.kylin.query.util.{AsyncQueryUtil, QueryUtil, SparkJobTrace, SparkQueryJobManager}
-import org.apache.poi.xssf.usermodel.XSSFWorkbook
+import org.apache.poi.xssf.usermodel.{XSSFSheet, XSSFWorkbook}
 import org.apache.spark.SparkConf
 import org.apache.spark.sql.execution._
 import org.apache.spark.sql.hive.QueryMetricUtils
 import org.apache.spark.sql.util.SparderTypeUtil
-import org.apache.spark.sql.{DataFrame, SaveMode, SparderEnv}
+import org.apache.spark.sql.{DataFrame, Row, SaveMode, SparderEnv}
 
-import java.io.{File, FileOutputStream}
-import java.util
+import java.io.{File, FileOutputStream, OutputStreamWriter}
+import java.nio.charset.StandardCharsets
 import java.util.concurrent.atomic.AtomicLong
+import java.{lang, util}
 import scala.collection.JavaConverters._
+import scala.collection.convert.ImplicitConversions.`iterator asScala`
 import scala.collection.mutable
 
 // scalastyle:off
@@ -55,6 +59,10 @@ object ResultPlan extends LogEx {
   val PARTITION_SPLIT_BYTES: Long = KylinConfig.getInstanceFromEnv.getQueryPartitionSplitSizeMB * 1024 * 1024 // 64MB
   val SPARK_SCHEDULER_POOL: String = "spark.scheduler.pool"
 
+  val QUOTE_CHAR = "\""
+  val END_OF_LINE_SYMBOLS = IOUtils.LINE_SEPARATOR_UNIX
+  val CHECK_WRITE_SIZE = 1000
+
   private def collectInternal(df: DataFrame, rowType: RelDataType): (java.lang.Iterable[util.List[String]], Int) = logTime("collectInternal", debug = true) {
     val jobGroup = Thread.currentThread().getName
     val sparkContext = SparderEnv.getSparkSession.sparkContext
@@ -139,20 +147,7 @@ object ResultPlan extends LogEx {
         s"Is TableIndex: ${QueryContext.current().getQueryTagInfo.isTableIndex}")
 
       val resultTypes = rowType.getFieldList.asScala
-      (() => new util.Iterator[util.List[String]] {
-
-        override def hasNext: Boolean = resultRows.hasNext
-
-        override def next(): util.List[String] = {
-          val row = resultRows.next()
-          if (Thread.interrupted()) {
-            throw new InterruptedException
-          }
-          row.toSeq.zip(resultTypes).map {
-            case (value, relField) => SparderTypeUtil.convertToStringWithCalciteType(value, relField.getType)
-          }.asJava
-        }
-      }, resultSize)
+      (readResultRow(resultRows, resultTypes), resultSize)
     } catch {
       case e: Throwable =>
         if (e.isInstanceOf[InterruptedException]) {
@@ -167,6 +162,25 @@ object ResultPlan extends LogEx {
     }
   }
 
+
+  def readResultRow(resultRows: util.Iterator[Row], resultTypes: mutable.Buffer[RelDataTypeField]): lang.Iterable[util.List[String]] = {
+    () =>
+      new util.Iterator[util.List[String]] {
+
+        override def hasNext: Boolean = resultRows.hasNext
+
+        override def next(): util.List[String] = {
+          val row = resultRows.next()
+          if (Thread.interrupted()) {
+            throw new InterruptedException
+          }
+          row.toSeq.zip(resultTypes).map {
+            case (value, relField) => SparderTypeUtil.convertToStringWithCalciteType(value, relField.getType)
+          }.asJava
+        }
+      }
+  }
+
   private def getNormalizedExplain(df: DataFrame): String = {
     df.queryExecution.executedPlan.toString.replaceAll("#\\d+", "#x")
   }
@@ -282,6 +296,8 @@ object ResultPlan extends LogEx {
     QueryContext.currentTrace().endLastSpan()
     val jobTrace = new SparkJobTrace(jobGroup, QueryContext.currentTrace(), QueryContext.current().getQueryId, sparkContext)
     val dateTimeFormat = "yyyy-MM-dd'T'HH:mm:ss.SSSZ"
+    val queryId = QueryContext.current().getQueryId
+    val includeHeader = QueryContext.current().getQueryTagInfo.isIncludeHeader
     format match {
       case "json" =>
         val oldColumnNames = df.columns
@@ -302,31 +318,8 @@ object ResultPlan extends LogEx {
           normalizeSchema(df).write.mode(SaveMode.Overwrite).option("encoding", encode).option("charset", "utf-8").parquet(path)
         }
         sqlContext.setConf("spark.sql.parquet.writeLegacyFormat", "false")
-      case "csv" =>
-        df.write
-          .option("timestampFormat", dateTimeFormat)
-          .option("encoding", encode)
-          .option("dateFormat", "yyyy-MM-dd")
-          .option("charset", "utf-8").mode(SaveMode.Append).csv(path)
-      case "xlsx" => {
-        val queryId = QueryContext.current().getQueryId
-        val file = new File(queryId + ".xlsx")
-        file.createNewFile();
-        val outputStream = new FileOutputStream(file)
-        val workbook = new XSSFWorkbook
-        val sheet = workbook.createSheet("query_result");
-        var num = 0
-        df.collect().foreach(row => {
-          val row1 = sheet.createRow(num)
-          for (i <- 0 until row.length) {
-            row1.createCell(i).setCellValue(row.apply(i).toString)
-          }
-          num = num + 1
-        })
-        workbook.write(outputStream)
-        HadoopUtil.getWorkingFileSystem
-          .copyFromLocalFile(true, true, new Path(file.getPath), new Path(path + "/" + queryId + ".xlsx"))
-      }
+      case "csv" => processCsv(df, format, rowType, path, queryId, includeHeader)
+      case "xlsx" => processXlsx(df, format, rowType, path, queryId, includeHeader)
       case _ =>
         normalizeSchema(df).write.option("timestampFormat", dateTimeFormat).option("encoding", encode)
           .option("charset", "utf-8").mode(SaveMode.Append).parquet(path)
@@ -345,11 +338,142 @@ object ResultPlan extends LogEx {
       QueryContext.current().getMetrics.setQueryJobCount(jobCount)
       QueryContext.current().getMetrics.setQueryStageCount(stageCount)
       QueryContext.current().getMetrics.setQueryTaskCount(taskCount)
-      QueryContext.current().getMetrics.setResultRowCount(newExecution.executedPlan.metrics.get("numOutputRows")
+      setResultRowCount(newExecution.executedPlan)
+    }
+  }
+
+  def setResultRowCount(plan: SparkPlan): Unit = {
+    if (QueryContext.current().getMetrics.getResultRowCount == 0) {
+      QueryContext.current().getMetrics.setResultRowCount(plan.metrics.get("numOutputRows")
         .map(_.value).getOrElse(0))
     }
   }
 
+  def processCsv(df: DataFrame, format: String, rowType: RelDataType, path: String, queryId: String, includeHeader: Boolean) = {
+    val file = createTmpFile(queryId, format)
+    val writer = new OutputStreamWriter(new FileOutputStream(file), StandardCharsets.UTF_8)
+    if (includeHeader) processCsvHeader(writer, rowType)
+    val (iterator, resultRowSize) = df.toIterator()
+    asyncQueryIteratorWriteCsv(iterator, writer, rowType)
+    uploadAsyncQueryResult(file, path, queryId, format)
+    setResultRowCount(resultRowSize)
+  }
+
+  def processXlsx(df: DataFrame, format: String, rowType: RelDataType, path: String, queryId: String, includeHeader: Boolean) = {
+    val file = createTmpFile(queryId, format)
+    val outputStream = new FileOutputStream(file)
+    val workbook = new XSSFWorkbook
+    val sheet = workbook.createSheet("query_result")
+    var num = 0
+    if (includeHeader) {
+      processXlsxHeader(sheet, rowType)
+      num += 1
+    }
+    val (iterator, resultRowSize) = df.toIterator()
+    iterator.foreach(row => {
+      val row1 = sheet.createRow(num)
+      row.toSeq.zipWithIndex.foreach(it => row1.createCell(it._2).setCellValue(it._1.toString))
+      num += 1
+    })
+    workbook.write(outputStream)
+    uploadAsyncQueryResult(file, path, queryId, format)
+    setResultRowCount(resultRowSize)
+  }
+
+  private def setResultRowCount(resultRowSize: Int) = {
+    if (!KylinConfig.getInstanceFromEnv.isUTEnv) {
+      QueryContext.current().getMetrics.setResultRowCount(resultRowSize)
+    }
+  }
+
+  def processCsvHeader(writer: OutputStreamWriter, rowType: RelDataType): Unit = {
+    val separator = QueryContext.current().getQueryTagInfo.getSeparator
+    rowType match {
+      case null =>
+        val columnNames = QueryContext.current().getColumnNames.asScala.mkString(separator)
+        writer.write(columnNames + END_OF_LINE_SYMBOLS)
+      case _ =>
+        val builder = new StringBuilder
+        rowType.getFieldList.asScala.map(t => t.getName).foreach(column => builder.append(separator + column))
+        builder.deleteCharAt(0)
+        writer.write(builder.toString() + END_OF_LINE_SYMBOLS)
+    }
+    writer.flush()
+  }
+
+  def processXlsxHeader(sheet: XSSFSheet, rowType: RelDataType): Unit = {
+    val excelRow = sheet.createRow(0)
+
+    rowType match {
+      case null =>
+        val columnNameArray = QueryContext.current().getColumnNames
+        columnNameArray.asScala.zipWithIndex
+          .foreach(it => excelRow.createCell(it._2).setCellValue(it._1))
+      case _ =>
+        val columnArray = rowType.getFieldList.asScala.map(t => t.getName)
+        columnArray.zipWithIndex.foreach(it => excelRow.createCell(it._2).setCellValue(it._1))
+    }
+  }
+
+  def createTmpFile(queryId: String, format: String): File = {
+    val file = new File(queryId + format)
+    file.createNewFile()
+    file
+  }
+
+  def uploadAsyncQueryResult(file: File, path: String, queryId: String, format: String): Unit = {
+    HadoopUtil.getWorkingFileSystem
+      .copyFromLocalFile(true, true, new Path(file.getPath), new Path(path + "/" + queryId + "." + format))
+    if (file.exists()) file.delete()
+  }
+
+  def asyncQueryIteratorWriteCsv(resultRows: util.Iterator[Row], outputStream: OutputStreamWriter, rowType: RelDataType): Unit = {
+    var asyncQueryRowSize = 0
+    val separator = QueryContext.current().getQueryTagInfo.getSeparator
+    val asyncQueryResult = if (rowType != null) {
+      val resultTypes = rowType.getFieldList.asScala
+      readResultRow(resultRows, resultTypes)
+    } else {
+      readPushDownResultRow(resultRows, false)
+    }
+
+    asyncQueryResult.forEach(row => {
+
+      asyncQueryRowSize += 1
+      val builder = new StringBuilder
+
+      for (i <- 0 until row.size()) {
+        val column = if (row.get(i) == null) "" else row.get(i)
+
+        if (i > 0) builder.append(separator)
+
+        val escapedCsv = encodeCell(column, separator)
+        builder.append(escapedCsv)
+      }
+      builder.append(END_OF_LINE_SYMBOLS)
+      outputStream.write(builder.toString())
+      if (asyncQueryRowSize % CHECK_WRITE_SIZE == 0) {
+        outputStream.flush()
+      }
+    })
+    outputStream.flush()
+  }
+
+  // the encode logic is copied from org.supercsv.encoder.DefaultCsvEncoder.encode
+  def encodeCell(column1: String, separator: String): String = {
+
+    var column = column1
+    var needQuote = column.contains(separator) || column.contains("\r") || column.contains("\n")
+
+    if (column.contains(QUOTE_CHAR)) {
+      needQuote = true
+      column = column.replace(QUOTE_CHAR, QUOTE_CHAR + QUOTE_CHAR)
+    }
+
+    if (needQuote) QUOTE_CHAR + column + QUOTE_CHAR
+    else column
+  }
+
   /**
    * Normalize column name by replacing invalid characters with underscore
    * and strips accents


[kylin] 31/34: KYLIN-5449 fix storage quota issue

Posted by xx...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

xxyu pushed a commit to branch kylin5
in repository https://gitbox.apache.org/repos/asf/kylin.git

commit 19be14f9e67c42b3787176ecab9d2ba32beff938
Author: qianhao.zhou <z....@gmail.com>
AuthorDate: Thu Jan 5 18:48:38 2023 +0800

    KYLIN-5449 fix storage quota issue
    
    Co-authored-by: qhzhou <qi...@kyligence.io>
---
 .../org/apache/kylin/rest/service/ProjectService.java    |  5 ++---
 .../kylin/job/runners/QuotaStorageCheckRunner.java       |  4 +---
 .../metadata/cube/storage/GarbageStorageCollector.java   |  5 +++++
 .../cube/storage/ProjectStorageInfoCollector.java        | 16 +++++++---------
 .../metadata/cube/storage/StorageInfoCollector.java      |  2 ++
 .../metadata/cube/storage/StorageQuotaCollector.java     |  5 +++++
 .../metadata/cube/storage/TotalStorageCollector.java     |  5 +++++
 .../cube/storage/ProjectStorageInfoCollectorTest.java    | 12 +++++-------
 8 files changed, 32 insertions(+), 22 deletions(-)

diff --git a/src/common-service/src/main/java/org/apache/kylin/rest/service/ProjectService.java b/src/common-service/src/main/java/org/apache/kylin/rest/service/ProjectService.java
index 393d7a0653..1af9955759 100644
--- a/src/common-service/src/main/java/org/apache/kylin/rest/service/ProjectService.java
+++ b/src/common-service/src/main/java/org/apache/kylin/rest/service/ProjectService.java
@@ -330,9 +330,8 @@ public class ProjectService extends BasicService {
 
     public StorageVolumeInfoResponse getStorageVolumeInfoResponse(String project) {
         val response = new StorageVolumeInfoResponse();
-        val storageInfoEnumList = Lists.newArrayList(StorageInfoEnum.GARBAGE_STORAGE, StorageInfoEnum.STORAGE_QUOTA,
-                StorageInfoEnum.TOTAL_STORAGE);
-        val collector = new ProjectStorageInfoCollector(storageInfoEnumList);
+        val collector = new ProjectStorageInfoCollector(Lists.newArrayList(StorageInfoEnum.GARBAGE_STORAGE, StorageInfoEnum.STORAGE_QUOTA,
+                StorageInfoEnum.TOTAL_STORAGE));
         val storageVolumeInfo = collector.getStorageVolumeInfo(getConfig(), project);
         response.setGarbageStorageSize(storageVolumeInfo.getGarbageStorageSize());
         response.setStorageQuotaSize(storageVolumeInfo.getStorageQuotaSize());
diff --git a/src/core-job/src/main/java/org/apache/kylin/job/runners/QuotaStorageCheckRunner.java b/src/core-job/src/main/java/org/apache/kylin/job/runners/QuotaStorageCheckRunner.java
index c7f3140671..a69ea16c87 100644
--- a/src/core-job/src/main/java/org/apache/kylin/job/runners/QuotaStorageCheckRunner.java
+++ b/src/core-job/src/main/java/org/apache/kylin/job/runners/QuotaStorageCheckRunner.java
@@ -38,9 +38,7 @@ public class QuotaStorageCheckRunner extends AbstractDefaultSchedulerRunner {
 
     public QuotaStorageCheckRunner(NDefaultScheduler nDefaultScheduler) {
         super(nDefaultScheduler);
-
-        val storageInfoEnumList = Lists.newArrayList(StorageInfoEnum.STORAGE_QUOTA, StorageInfoEnum.TOTAL_STORAGE);
-        collector = new ProjectStorageInfoCollector(storageInfoEnumList);
+        collector = new ProjectStorageInfoCollector(Lists.newArrayList(StorageInfoEnum.STORAGE_QUOTA, StorageInfoEnum.TOTAL_STORAGE));
     }
 
     @Override
diff --git a/src/core-metadata/src/main/java/org/apache/kylin/metadata/cube/storage/GarbageStorageCollector.java b/src/core-metadata/src/main/java/org/apache/kylin/metadata/cube/storage/GarbageStorageCollector.java
index b1e0856197..12f0a21212 100644
--- a/src/core-metadata/src/main/java/org/apache/kylin/metadata/cube/storage/GarbageStorageCollector.java
+++ b/src/core-metadata/src/main/java/org/apache/kylin/metadata/cube/storage/GarbageStorageCollector.java
@@ -61,6 +61,11 @@ public class GarbageStorageCollector implements StorageInfoCollector {
         storageVolumeInfo.setGarbageStorageSize(storageSize);
     }
 
+    @Override
+    public StorageInfoEnum getType() {
+        return StorageInfoEnum.GARBAGE_STORAGE;
+    }
+
     private List<NDataModel> getModels(String project) {
         val dataflowManager = NDataflowManager.getInstance(KylinConfig.getInstanceFromEnv(), project);
         return dataflowManager.listUnderliningDataModels();
diff --git a/src/core-metadata/src/main/java/org/apache/kylin/metadata/cube/storage/ProjectStorageInfoCollector.java b/src/core-metadata/src/main/java/org/apache/kylin/metadata/cube/storage/ProjectStorageInfoCollector.java
index ea864bf15f..1148d304e3 100644
--- a/src/core-metadata/src/main/java/org/apache/kylin/metadata/cube/storage/ProjectStorageInfoCollector.java
+++ b/src/core-metadata/src/main/java/org/apache/kylin/metadata/cube/storage/ProjectStorageInfoCollector.java
@@ -24,17 +24,15 @@ import org.apache.commons.collections.CollectionUtils;
 import org.apache.commons.lang.StringUtils;
 import org.apache.kylin.common.KylinConfig;
 
-import com.google.common.collect.ImmutableMap;
 import com.google.common.collect.Lists;
 
 public class ProjectStorageInfoCollector {
 
     private List<StorageInfoCollector> collectors = Lists.newArrayList();
 
-    private static final ImmutableMap<Class<?>, StorageInfoEnum> collectorType = ImmutableMap
-            .<Class<?>, StorageInfoEnum> builder().put(GarbageStorageCollector.class, StorageInfoEnum.GARBAGE_STORAGE)
-            .put(TotalStorageCollector.class, StorageInfoEnum.TOTAL_STORAGE)
-            .put(StorageQuotaCollector.class, StorageInfoEnum.STORAGE_QUOTA).build();
+    private static GarbageStorageCollector garbageStorageCollector = new GarbageStorageCollector();
+    private static TotalStorageCollector totalStorageCollector = new TotalStorageCollector();
+    private static StorageQuotaCollector storageQuotaCollector = new StorageQuotaCollector();
 
     public ProjectStorageInfoCollector(List<StorageInfoEnum> storageInfoList) {
         if (CollectionUtils.isNotEmpty(storageInfoList)) {
@@ -47,7 +45,7 @@ public class ProjectStorageInfoCollector {
             try {
                 collector.collect(config, project, storageVolumeInfo);
             } catch (Exception e) {
-                storageVolumeInfo.getThrowableMap().put(collectorType.get(collector.getClass()), e);
+                storageVolumeInfo.getThrowableMap().put(collector.getType(), e);
             }
         }
     }
@@ -55,13 +53,13 @@ public class ProjectStorageInfoCollector {
     private void addCollectors(StorageInfoEnum storageInfoEnum) {
         switch (storageInfoEnum) {
         case GARBAGE_STORAGE:
-            collectors.add(new GarbageStorageCollector());
+            collectors.add(garbageStorageCollector);
             break;
         case TOTAL_STORAGE:
-            collectors.add(new TotalStorageCollector());
+            collectors.add(totalStorageCollector);
             break;
         case STORAGE_QUOTA:
-            collectors.add(new StorageQuotaCollector());
+            collectors.add(storageQuotaCollector);
             break;
         default:
             break;
diff --git a/src/core-metadata/src/main/java/org/apache/kylin/metadata/cube/storage/StorageInfoCollector.java b/src/core-metadata/src/main/java/org/apache/kylin/metadata/cube/storage/StorageInfoCollector.java
index 73c63f13ac..6cd330804d 100644
--- a/src/core-metadata/src/main/java/org/apache/kylin/metadata/cube/storage/StorageInfoCollector.java
+++ b/src/core-metadata/src/main/java/org/apache/kylin/metadata/cube/storage/StorageInfoCollector.java
@@ -32,4 +32,6 @@ public interface StorageInfoCollector {
     }
 
     void doCollect(KylinConfig config, String project, StorageVolumeInfo storageVolumeInfo) throws IOException;
+
+    StorageInfoEnum getType();
 }
diff --git a/src/core-metadata/src/main/java/org/apache/kylin/metadata/cube/storage/StorageQuotaCollector.java b/src/core-metadata/src/main/java/org/apache/kylin/metadata/cube/storage/StorageQuotaCollector.java
index 8b04f05164..306d77f9af 100644
--- a/src/core-metadata/src/main/java/org/apache/kylin/metadata/cube/storage/StorageQuotaCollector.java
+++ b/src/core-metadata/src/main/java/org/apache/kylin/metadata/cube/storage/StorageQuotaCollector.java
@@ -32,4 +32,9 @@ public class StorageQuotaCollector implements StorageInfoCollector {
         storageVolumeInfo.setStorageQuotaSize(quotaSize);
     }
 
+    @Override
+    public StorageInfoEnum getType() {
+        return StorageInfoEnum.STORAGE_QUOTA;
+    }
+
 }
diff --git a/src/core-metadata/src/main/java/org/apache/kylin/metadata/cube/storage/TotalStorageCollector.java b/src/core-metadata/src/main/java/org/apache/kylin/metadata/cube/storage/TotalStorageCollector.java
index 74c30ab86a..92d38355ad 100644
--- a/src/core-metadata/src/main/java/org/apache/kylin/metadata/cube/storage/TotalStorageCollector.java
+++ b/src/core-metadata/src/main/java/org/apache/kylin/metadata/cube/storage/TotalStorageCollector.java
@@ -50,4 +50,9 @@ public class TotalStorageCollector implements StorageInfoCollector {
         storageVolumeInfo.setTotalStorageSize(totalStorageSize);
     }
 
+    @Override
+    public StorageInfoEnum getType() {
+        return StorageInfoEnum.TOTAL_STORAGE;
+    }
+
 }
diff --git a/src/core-metadata/src/test/java/org/apache/kylin/metadata/cube/storage/ProjectStorageInfoCollectorTest.java b/src/core-metadata/src/test/java/org/apache/kylin/metadata/cube/storage/ProjectStorageInfoCollectorTest.java
index eb79e97fdb..95be8e38be 100644
--- a/src/core-metadata/src/test/java/org/apache/kylin/metadata/cube/storage/ProjectStorageInfoCollectorTest.java
+++ b/src/core-metadata/src/test/java/org/apache/kylin/metadata/cube/storage/ProjectStorageInfoCollectorTest.java
@@ -20,6 +20,7 @@ package org.apache.kylin.metadata.cube.storage;
 
 import java.io.IOException;
 import java.lang.reflect.Field;
+import java.util.Collections;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
@@ -87,9 +88,8 @@ public class ProjectStorageInfoCollectorTest extends NLocalFileMetadataTestCase
         getTestConfig().setProperty("kylin.metadata.semi-automatic-mode", "true");
         initTestData();
 
-        val storageInfoEnumList = Lists.newArrayList(StorageInfoEnum.GARBAGE_STORAGE, StorageInfoEnum.STORAGE_QUOTA,
-                StorageInfoEnum.TOTAL_STORAGE);
-        val collector = new ProjectStorageInfoCollector(storageInfoEnumList);
+        val collector = new ProjectStorageInfoCollector(Lists.newArrayList(StorageInfoEnum.GARBAGE_STORAGE, StorageInfoEnum.STORAGE_QUOTA,
+                StorageInfoEnum.TOTAL_STORAGE));
         val volumeInfo = collector.getStorageVolumeInfo(getTestConfig(), DEFAULT_PROJECT);
 
         Assert.assertEquals(10240L * 1024 * 1024 * 1024, volumeInfo.getStorageQuotaSize());
@@ -421,8 +421,7 @@ public class ProjectStorageInfoCollectorTest extends NLocalFileMetadataTestCase
 
     @Test
     public void testGetStorageVolumeInfoEmpty() {
-        List<StorageInfoEnum> storageInfoEnumList = Lists.newArrayList();
-        val collector = new ProjectStorageInfoCollector(storageInfoEnumList);
+        val collector = new ProjectStorageInfoCollector(Collections.emptyList());
         val storageVolumeInfo = collector.getStorageVolumeInfo(getTestConfig(), DEFAULT_PROJECT);
 
         Assert.assertEquals(-1L, storageVolumeInfo.getStorageQuotaSize());
@@ -433,9 +432,8 @@ public class ProjectStorageInfoCollectorTest extends NLocalFileMetadataTestCase
 
     @Test
     public void testGetStorageVolumeException() throws NoSuchFieldException, IllegalAccessException, IOException {
-        List<StorageInfoEnum> storageInfoEnumList = Lists.newArrayList();
         TotalStorageCollector totalStorageCollector = Mockito.spy(TotalStorageCollector.class);
-        ProjectStorageInfoCollector collector = new ProjectStorageInfoCollector(storageInfoEnumList);
+        ProjectStorageInfoCollector collector = new ProjectStorageInfoCollector(Collections.emptyList());
         Field field = collector.getClass().getDeclaredField("collectors");
         Unsafe.changeAccessibleObject(field, true);
         List<StorageInfoCollector> collectors = (List<StorageInfoCollector>) field.get(collector);


[kylin] 12/34: KYLIN-5450 check if shard by columns included in col orders

Posted by xx...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

xxyu pushed a commit to branch kylin5
in repository https://gitbox.apache.org/repos/asf/kylin.git

commit 5b41bca2aebfefdc7563ef19110c6ef7ec8c2dc6
Author: Dorris Zhang <ru...@kyligence.io>
AuthorDate: Mon Dec 26 19:16:29 2022 +0800

    KYLIN-5450 check if shard by columns included in col orders
---
 .../common/exception/code/ErrorCodeServer.java      |  1 +
 .../resources/kylin_error_msg_conf_cn.properties    |  1 +
 .../resources/kylin_error_msg_conf_en.properties    |  1 +
 .../main/resources/kylin_errorcode_conf.properties  |  1 +
 .../kylin/rest/controller/NIndexPlanController.java | 10 ++++++++++
 .../rest/controller/IndexPlanControllerTest.java    | 21 ++++++++++++++++++---
 6 files changed, 32 insertions(+), 3 deletions(-)

diff --git a/src/core-common/src/main/java/org/apache/kylin/common/exception/code/ErrorCodeServer.java b/src/core-common/src/main/java/org/apache/kylin/common/exception/code/ErrorCodeServer.java
index b5eb0fd98c..54f81183e7 100644
--- a/src/core-common/src/main/java/org/apache/kylin/common/exception/code/ErrorCodeServer.java
+++ b/src/core-common/src/main/java/org/apache/kylin/common/exception/code/ErrorCodeServer.java
@@ -107,6 +107,7 @@ public enum ErrorCodeServer implements ErrorCodeProducer {
     RULE_BASED_INDEX_METADATA_INCONSISTENT("KE-010012201"),
     INDEX_DUPLICATE("KE-010012202"),
     INDEX_PARAMETER_INVALID("KE-010012203"),
+    SHARD_BY_COLUMN_NOT_IN_INDEX("KE-010012204"),
 
     // 10043XX parameter check
     REQUEST_PARAMETER_EMPTY_OR_VALUE_EMPTY("KE-010043201"),
diff --git a/src/core-common/src/main/resources/kylin_error_msg_conf_cn.properties b/src/core-common/src/main/resources/kylin_error_msg_conf_cn.properties
index be8bca221a..2768232b9d 100644
--- a/src/core-common/src/main/resources/kylin_error_msg_conf_cn.properties
+++ b/src/core-common/src/main/resources/kylin_error_msg_conf_cn.properties
@@ -109,6 +109,7 @@ KE-010010205=可计算列名和表达式不能为空。请检查后重试。
 KE-010012201=索引元数据不一致。请尝试刷新下列模型的所有 Segment:项目[%s],模型[%s]。
 KE-010012202=因为存在相同的索引,无法新建该索引。请修改。
 KE-010012203=参数 “%s” 仅支持 “%s”。
+KE-010012204=ShardBy 列不在索引包含的列中,请修改后重试。
 
 ## 10043XX parameter check
 KE-010043201=请求参数 “%s” 为空或值为空。请检查请求参数是否正确填写。
diff --git a/src/core-common/src/main/resources/kylin_error_msg_conf_en.properties b/src/core-common/src/main/resources/kylin_error_msg_conf_en.properties
index b44c0ef422..fc75aa0610 100644
--- a/src/core-common/src/main/resources/kylin_error_msg_conf_en.properties
+++ b/src/core-common/src/main/resources/kylin_error_msg_conf_en.properties
@@ -107,6 +107,7 @@ KE-010010205=Computed column names and expressions cannot be empty. Please check
 KE-010012201=Index metadata might be inconsistent. Please try refreshing all segments in the following model: Project [%s], Model [%s].
 KE-010012202=Can't add this index, as the same index already exists. Please modify.
 KE-010012203=The parameter "%s" only supports "%s".
+KE-010012204=The ShardBy column is not included in the index. Please fix and try again.
 
 ## 10043XX parameter check
 KE-010043201=Request parameter "%s" is empty or value is empty. Please check the request parameters.
diff --git a/src/core-common/src/main/resources/kylin_errorcode_conf.properties b/src/core-common/src/main/resources/kylin_errorcode_conf.properties
index 074fb6550e..976e342cc9 100644
--- a/src/core-common/src/main/resources/kylin_errorcode_conf.properties
+++ b/src/core-common/src/main/resources/kylin_errorcode_conf.properties
@@ -113,6 +113,7 @@ KE-010010205
 KE-010012201
 KE-010012202
 KE-010012203
+KE-010012204
 
 ## 10043XX parameter check
 KE-010043201
diff --git a/src/metadata-server/src/main/java/org/apache/kylin/rest/controller/NIndexPlanController.java b/src/metadata-server/src/main/java/org/apache/kylin/rest/controller/NIndexPlanController.java
index 24124071a5..3bca9f0ecf 100644
--- a/src/metadata-server/src/main/java/org/apache/kylin/rest/controller/NIndexPlanController.java
+++ b/src/metadata-server/src/main/java/org/apache/kylin/rest/controller/NIndexPlanController.java
@@ -21,6 +21,7 @@ package org.apache.kylin.rest.controller;
 import static org.apache.kylin.common.constant.HttpConstant.HTTP_VND_APACHE_KYLIN_JSON;
 import static org.apache.kylin.common.constant.HttpConstant.HTTP_VND_APACHE_KYLIN_V4_PUBLIC_JSON;
 import static org.apache.kylin.common.exception.code.ErrorCodeServer.LAYOUT_LIST_EMPTY;
+import static org.apache.kylin.common.exception.code.ErrorCodeServer.SHARD_BY_COLUMN_NOT_IN_INDEX;
 
 import java.util.List;
 import java.util.Set;
@@ -143,10 +144,19 @@ public class NIndexPlanController extends NBasicController {
         checkRequiredArg(MODEL_ID, request.getModelId());
         checkRequiredArg("id", request.getId());
         modelService.validateCCType(request.getModelId(), request.getProject());
+        List<String> shardByColumns = request.getShardByColumns();
+        List<String> colOrder = request.getColOrder();
+        checkShardbyCol(shardByColumns, colOrder);
         val response = fusionIndexService.updateTableIndex(request.getProject(), request);
         return new EnvelopeResponse<>(KylinException.CODE_SUCCESS, response, "");
     }
 
+    private void checkShardbyCol(List<String> shardByColumns, List<String> colOrder) {
+        if (!colOrder.containsAll(shardByColumns)) {
+            throw new KylinException(SHARD_BY_COLUMN_NOT_IN_INDEX);
+        }
+    }
+
     @Deprecated
     @ApiOperation(value = "deleteTableIndex", tags = { "AI" }, notes = "Update URL: {project}, Update Param: project")
     @DeleteMapping(value = "/table_index/{id:.+}")
diff --git a/src/metadata-server/src/test/java/org/apache/kylin/rest/controller/IndexPlanControllerTest.java b/src/metadata-server/src/test/java/org/apache/kylin/rest/controller/IndexPlanControllerTest.java
index 1acfb539ea..bcb9ef7ec1 100644
--- a/src/metadata-server/src/test/java/org/apache/kylin/rest/controller/IndexPlanControllerTest.java
+++ b/src/metadata-server/src/test/java/org/apache/kylin/rest/controller/IndexPlanControllerTest.java
@@ -18,21 +18,25 @@
 package org.apache.kylin.rest.controller;
 
 import static org.apache.kylin.common.constant.HttpConstant.HTTP_VND_APACHE_KYLIN_JSON;
+import static org.apache.kylin.common.exception.code.ErrorCodeServer.SHARD_BY_COLUMN_NOT_IN_INDEX;
 
+import org.apache.kylin.common.exception.KylinException;
 import org.apache.kylin.common.util.JsonUtil;
-import org.apache.kylin.common.util.Pair;
-import org.apache.kylin.rest.constant.Constant;
-import org.apache.kylin.rest.response.DiffRuleBasedIndexResponse;
 import org.apache.kylin.common.util.NLocalFileMetadataTestCase;
+import org.apache.kylin.common.util.Pair;
 import org.apache.kylin.metadata.cube.model.IndexEntity;
 import org.apache.kylin.metadata.cube.model.IndexPlan;
+import org.apache.kylin.rest.constant.Constant;
 import org.apache.kylin.rest.request.CreateBaseIndexRequest;
+import org.apache.kylin.rest.request.CreateTableIndexRequest;
 import org.apache.kylin.rest.request.UpdateRuleBasedCuboidRequest;
 import org.apache.kylin.rest.response.BuildIndexResponse;
+import org.apache.kylin.rest.response.DiffRuleBasedIndexResponse;
 import org.apache.kylin.rest.service.FusionIndexService;
 import org.apache.kylin.rest.service.IndexPlanService;
 import org.apache.kylin.rest.service.ModelService;
 import org.junit.After;
+import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Test;
 import org.mockito.InjectMocks;
@@ -156,4 +160,15 @@ public class IndexPlanControllerTest extends NLocalFileMetadataTestCase {
                 .accept(MediaType.parseMediaType(HTTP_VND_APACHE_KYLIN_JSON)))
                 .andExpect(MockMvcResultMatchers.status().isOk());
     }
+
+    @Test
+    public void testUpdateTableIndex() {
+        CreateTableIndexRequest tableIndexRequest = CreateTableIndexRequest.builder().project("default")
+                .modelId("89af4ee2-2cdb-4b07-b39e-4c29856309aa").id(20000010000L)
+                .colOrder(Lists.newArrayList("1", "0", "2")).shardByColumns(Lists.newArrayList("4"))
+                .sortByColumns(Lists.newArrayList("0", "2")).build();
+        Assert.assertThrows(SHARD_BY_COLUMN_NOT_IN_INDEX.getMsg(), KylinException.class, () -> {
+            indexPlanController.updateTableIndex(tableIndexRequest);
+        });
+    }
 }


[kylin] 29/34: KYLIN-5460 fix upgrade in resource group

Posted by xx...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

xxyu pushed a commit to branch kylin5
in repository https://gitbox.apache.org/repos/asf/kylin.git

commit f27ee335285670a745d3d4032f313f436acaa878
Author: Junqing Cai <ca...@163.com>
AuthorDate: Wed Jan 4 22:15:19 2023 +0800

    KYLIN-5460 fix upgrade in resource group
---
 .../apache/kylin/metadata/epoch/EpochManager.java  | 12 ++-
 .../kylin/metadata/epoch/EpochManagerTest.java     | 88 ++++++++++++++++++++++
 2 files changed, 97 insertions(+), 3 deletions(-)

diff --git a/src/core-metadata/src/main/java/org/apache/kylin/metadata/epoch/EpochManager.java b/src/core-metadata/src/main/java/org/apache/kylin/metadata/epoch/EpochManager.java
index e8d3acc29b..a68d25eeec 100644
--- a/src/core-metadata/src/main/java/org/apache/kylin/metadata/epoch/EpochManager.java
+++ b/src/core-metadata/src/main/java/org/apache/kylin/metadata/epoch/EpochManager.java
@@ -691,8 +691,15 @@ public class EpochManager {
         if (force) {
             return true;
         }
+        return currentInstanceHasPermissionToOwn(epochTarget, AddressUtil.getLocalInstance());
+    }
+
+    private boolean currentInstanceHasPermissionToOwn(String epochTarget, String epochServer) {
+        if (isMaintenanceMode()) {
+            return true;
+        }
         ResourceGroupManager rgManager = ResourceGroupManager.getInstance(config);
-        return rgManager.instanceHasPermissionToOwnEpochTarget(epochTarget, AddressUtil.getLocalInstance());
+        return rgManager.instanceHasPermissionToOwnEpochTarget(epochTarget, epochServer);
     }
 
     private boolean isEpochLegal(Epoch epoch) {
@@ -709,9 +716,8 @@ public class EpochManager {
                 return false;
             }
 
-            ResourceGroupManager rgManager = ResourceGroupManager.getInstance(config);
             String epochServer = getHostAndPort(epoch.getCurrentEpochOwner());
-            if (!rgManager.instanceHasPermissionToOwnEpochTarget(epoch.getEpochTarget(), epochServer)) {
+            if (!currentInstanceHasPermissionToOwn(epoch.getEpochTarget(), epochServer)) {
                 logger.debug("Epoch {}'s owner is not in build request type resource group.", epoch);
                 return false;
             }
diff --git a/src/core-metadata/src/test/java/org/apache/kylin/metadata/epoch/EpochManagerTest.java b/src/core-metadata/src/test/java/org/apache/kylin/metadata/epoch/EpochManagerTest.java
index 425d81697d..fb0856c4cc 100644
--- a/src/core-metadata/src/test/java/org/apache/kylin/metadata/epoch/EpochManagerTest.java
+++ b/src/core-metadata/src/test/java/org/apache/kylin/metadata/epoch/EpochManagerTest.java
@@ -31,6 +31,7 @@ import java.util.concurrent.TimeUnit;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.persistence.metadata.Epoch;
 import org.apache.kylin.common.persistence.metadata.EpochStore;
+import org.apache.kylin.common.persistence.transaction.UnitOfWork;
 import org.apache.kylin.junit.annotation.MetadataInfo;
 import org.apache.kylin.junit.annotation.OverwriteProp;
 import org.apache.kylin.metadata.project.NProjectManager;
@@ -489,4 +490,91 @@ class EpochManagerTest {
         Assertions.assertFalse(epochManager.getOwnedEpochs().isEmpty());
     }
 
+    @Test
+    void testIsEpochLegal() {
+        EpochManager epochManager = EpochManager.getInstance();
+        {
+            Epoch epoch = null;
+            Boolean isEpochLegal = ReflectionTestUtils.invokeMethod(epochManager, "isEpochLegal", epoch);
+            Assertions.assertNotNull(isEpochLegal);
+            Assertions.assertFalse(isEpochLegal);
+        }
+
+        {
+            Epoch epoch = new Epoch();
+            epoch.setEpochTarget("test1");
+            epoch.setCurrentEpochOwner(null);
+            epoch.setEpochId(1);
+            epoch.setLastEpochRenewTime(System.currentTimeMillis());
+            Boolean isEpochLegal = ReflectionTestUtils.invokeMethod(epochManager, "isEpochLegal", epoch);
+            Assertions.assertNotNull(isEpochLegal);
+            Assertions.assertFalse(isEpochLegal);
+        }
+
+        {
+            Epoch epoch = new Epoch();
+            epoch.setEpochTarget("test1");
+            epoch.setCurrentEpochOwner("abc");
+            epoch.setEpochId(1);
+            epoch.setLastEpochRenewTime(System.currentTimeMillis() - TimeUnit.DAYS.toMillis(1));
+            Boolean isEpochLegal = ReflectionTestUtils.invokeMethod(epochManager, "isEpochLegal", epoch);
+            Assertions.assertNotNull(isEpochLegal);
+            Assertions.assertFalse(isEpochLegal);
+        }
+
+        {
+            Epoch epoch = new Epoch();
+            epoch.setEpochTarget("test1");
+            epoch.setCurrentEpochOwner("abc");
+            epoch.setEpochId(1);
+            epoch.setLastEpochRenewTime(System.currentTimeMillis());
+            Boolean isEpochLegal = ReflectionTestUtils.invokeMethod(epochManager, "isEpochLegal", epoch);
+            Assertions.assertNotNull(isEpochLegal);
+            Assertions.assertTrue(isEpochLegal);
+        }
+    }
+
+    @Test
+    @MetadataInfo
+    void testIsEpochLegal_WithResourceGroup() {
+        val manager = ResourceGroupManager.getInstance(getTestConfig());
+        manager.getResourceGroup();
+        manager.updateResourceGroup(copyForWrite -> copyForWrite.setResourceGroupEnabled(true));
+        val epochManager = EpochManager.getInstance();
+        Epoch epoch = new Epoch();
+        epoch.setEpochTarget("test1");
+        epoch.setCurrentEpochOwner("abc");
+        epoch.setEpochId(1);
+        epoch.setLastEpochRenewTime(System.currentTimeMillis());
+        Boolean isEpochLegal = ReflectionTestUtils.invokeMethod(epochManager, "isEpochLegal", epoch);
+        Assertions.assertNotNull(isEpochLegal);
+        Assertions.assertFalse(isEpochLegal);
+    }
+
+    @Test
+    @MetadataInfo
+    void testIsEpochLegal_WithResourceGroupInMaintMode() {
+        val manager = ResourceGroupManager.getInstance(getTestConfig());
+        manager.getResourceGroup();
+        manager.updateResourceGroup(copyForWrite -> copyForWrite.setResourceGroupEnabled(true));
+
+        val epochManager = EpochManager.getInstance();
+
+        Epoch epoch = new Epoch();
+        epoch.setEpochTarget(UnitOfWork.GLOBAL_UNIT);
+        epoch.setCurrentEpochOwner("testOwner");
+        epoch.setEpochId(1);
+        epoch.setLastEpochRenewTime(System.currentTimeMillis());
+        getEpochStore().insertBatch(Lists.newArrayList(epoch));
+
+        epochManager.setMaintenanceMode("test");
+
+        //test another target
+        epoch.setEpochTarget("test");
+
+        Boolean isEpochLegal = ReflectionTestUtils.invokeMethod(epochManager, "isEpochLegal", epoch);
+        Assertions.assertNotNull(isEpochLegal);
+        Assertions.assertTrue(isEpochLegal);
+    }
+
 }


[kylin] 07/34: KYLIN-5447 Support Logical View

Posted by xx...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

xxyu pushed a commit to branch kylin5
in repository https://gitbox.apache.org/repos/asf/kylin.git

commit 90e908b69e4850f27f68054cbf8671d8b8f24468
Author: ChenLiang.Lu <31...@users.noreply.github.com>
AuthorDate: Tue Dec 20 11:03:39 2022 +0800

    KYLIN-5447 Support Logical View
    
    Logical View
---
 pom.xml                                            |   5 -
 .../rest/config/initialize/BroadcastListener.java  |   5 +
 .../kylin/rest/controller/NAdminController.java    |   4 +-
 .../org/apache/kylin/common/KylinConfigBase.java   |  26 ++-
 .../kylin/common/exception/ServerErrorCode.java    |   1 +
 .../org/apache/kylin/common/msg/CnMessage.java     |  29 ++-
 .../java/org/apache/kylin/common/msg/Message.java  |  33 ++-
 .../kylin/common/persistence/ResourceStore.java    |   1 +
 .../transaction/LogicalViewBroadcastNotifier.java} |  17 +-
 .../resources/kylin_errorcode_conf_en.properties   |   1 +
 .../resources/kylin_errorcode_conf_zh.properties   |   1 +
 .../common/util/NLocalFileMetadataTestCase.java    |  11 +
 .../org/apache/kylin/metadata/model/TableDesc.java |   4 +
 .../apache/kylin/metadata/view/LogicalView.java    |  71 ++++++
 .../kylin/metadata/view/LogicalViewManager.java    | 137 +++++++++++
 .../apache/kylin/rest/util/AclPermissionUtil.java  |   9 +-
 src/datasource-service/pom.xml                     |   4 -
 .../apache/kylin/rest}/ddl/SourceTableCheck.java   |  45 +++-
 .../apache/kylin/rest/request/ViewRequest.java}    |  46 ++--
 .../apache/kylin/rest/service/SparkDDLService.java | 105 +++++++--
 .../apache/kylin/rest/service/TableExtService.java |  44 +++-
 .../org/apache/kylin/rest/ddl/ViewCheck.scala      | 247 ++++++++++++++++++++
 .../apache/kylin/rest/service/SparkDDLTest.java    | 256 +++++++++++++++++----
 .../spark/sql/common/SparkDDLTestUtils.scala       |   3 +-
 .../org/apache/kylin/newten/LogicalViewTest.java   |  87 +++++++
 .../_global/logical_view/LOGICAL_VIEW_TABLE        |   9 +
 .../metadata/_global/project/logical_view.json     |   6 +
 .../451e127a-b684-1474-744b-c9afc14378af.json      |  18 ++
 .../451e127a-b684-1474-744b-c9afc14378af.json      |  63 +++++
 .../451e127a-b684-1474-744b-c9afc14378af.json      | 149 ++++++++++++
 .../KYLIN_LOGICAL_VIEW.LOGICAL_VIEW_TABLE.json     |  68 ++++++
 .../metadata/logical_view/table/SSB.CUSTOMER.json  |  68 ++++++
 .../kylin/rest/controller/SparkDDLController.java  |  41 +++-
 .../rest/controller/SparkDDLControllerTest.java    |  37 ++-
 .../engine/spark/application/SparkApplication.java |  81 +++++--
 .../engine/spark/job/NResourceDetectStep.java      |   4 +-
 .../kylin/engine/spark/job/NSparkCubingStep.java   |   6 +-
 .../kylin/engine/spark/job/NSparkExecutable.java   |  26 +++
 .../kylin/engine/spark/job/NSparkMergingStep.java  |   6 +-
 .../spark/job/NSparkSnapshotBuildingStep.java      |   1 +
 .../kylin/engine/spark/job/NTableSamplingJob.java  |   2 +-
 .../kylin/engine/spark/mockup/CsvSource.java       |  19 +-
 .../spark/source/NSparkMetadataExplorer.java       |  20 +-
 .../apache/kylin/engine/spark/job/SegmentJob.java  |   1 +
 .../engine/spark/NLocalWithSparkSessionTest.java   |   5 +-
 .../main/java/org/apache}/spark/ddl/DDLCheck.java  |  15 +-
 .../org/apache}/spark/ddl/DDLCheckContext.java     |  31 ++-
 .../java/org/apache/spark/ddl/DDLConstant.java}    |  24 +-
 .../org/apache/spark/sql/LogicalViewLoader.java    | 195 ++++++++++++++++
 .../scala/org/apache/spark/sql/KylinSession.scala  |  22 +-
 .../scala/org/apache/spark/sql/SparderEnv.scala    |  20 +-
 src/spark-project/spark-ddl-plugin/pom.xml         |  73 ------
 .../services/org.apache.kylin.spark.ddl.DDLCheck   |   2 -
 .../org/apache/kylin/spark/ddl/ViewCheck.scala     | 123 ----------
 54 files changed, 1910 insertions(+), 417 deletions(-)

diff --git a/pom.xml b/pom.xml
index 3f1fd958c0..975ff8d1ed 100644
--- a/pom.xml
+++ b/pom.xml
@@ -716,11 +716,6 @@
                 <artifactId>kylin-soft-affinity-cache</artifactId>
                 <version>${project.version}</version>
             </dependency>
-            <dependency>
-                <groupId>org.apache.kylin</groupId>
-                <artifactId>kylin-spark-ddl</artifactId>
-                <version>${project.version}</version>
-            </dependency>
 
             <dependency>
                 <groupId>io.dropwizard.metrics</groupId>
diff --git a/src/common-server/src/main/java/org/apache/kylin/rest/config/initialize/BroadcastListener.java b/src/common-server/src/main/java/org/apache/kylin/rest/config/initialize/BroadcastListener.java
index 4188ac6d17..dbfde604c2 100644
--- a/src/common-server/src/main/java/org/apache/kylin/rest/config/initialize/BroadcastListener.java
+++ b/src/common-server/src/main/java/org/apache/kylin/rest/config/initialize/BroadcastListener.java
@@ -32,6 +32,7 @@ import org.apache.kylin.common.persistence.transaction.AddS3CredentialToSparkBro
 import org.apache.kylin.common.persistence.transaction.AuditLogBroadcastEventNotifier;
 import org.apache.kylin.common.persistence.transaction.BroadcastEventReadyNotifier;
 import org.apache.kylin.common.persistence.transaction.EpochCheckBroadcastNotifier;
+import org.apache.kylin.common.persistence.transaction.LogicalViewBroadcastNotifier;
 import org.apache.kylin.common.persistence.transaction.StopQueryBroadcastEventNotifier;
 import org.apache.kylin.common.persistence.transaction.UpdateJobStatusEventNotifier;
 import org.apache.kylin.metadata.epoch.EpochManager;
@@ -45,6 +46,8 @@ import org.apache.kylin.rest.service.AuditLogService;
 import org.apache.kylin.rest.service.JobService;
 import org.apache.kylin.rest.service.QueryService;
 import org.apache.kylin.rest.service.UserAclService;
+
+import org.apache.spark.sql.LogicalViewLoader;
 import org.apache.spark.sql.SparderEnv;
 import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.beans.factory.annotation.Qualifier;
@@ -130,6 +133,8 @@ public class BroadcastListener implements BroadcastEventHandler {
             AdminUserSyncEventNotifier adminUserSyncEventNotifier = (AdminUserSyncEventNotifier) notifier;
             userAclService.syncAdminUserAcl(adminUserSyncEventNotifier.getAdminUserList(),
                     adminUserSyncEventNotifier.isUseEmptyPermission());
+        } else if(notifier instanceof LogicalViewBroadcastNotifier) {
+            LogicalViewLoader.syncViewAsync();
         }
     }
 
diff --git a/src/common-server/src/main/java/org/apache/kylin/rest/controller/NAdminController.java b/src/common-server/src/main/java/org/apache/kylin/rest/controller/NAdminController.java
index 42ece3badc..2d0440f52f 100644
--- a/src/common-server/src/main/java/org/apache/kylin/rest/controller/NAdminController.java
+++ b/src/common-server/src/main/java/org/apache/kylin/rest/controller/NAdminController.java
@@ -74,7 +74,9 @@ public class NAdminController extends NBasicController {
         propertyKeys.add("kylin.streaming.enabled");
         propertyKeys.add("kylin.model.measure-name-check-enabled");
         propertyKeys.add("kylin.security.remove-ldap-custom-security-limit-enabled");
-        propertyKeys.add("kylin.source.ddl.enabled");
+        propertyKeys.add("kylin.source.ddl.logical-view.enabled");
+        propertyKeys.add("kylin.source.ddl.hive.enabled");
+        propertyKeys.add("kylin.source.ddl.logical-view-database");
         propertyKeys.add("kylin.storage.check-quota-enabled");
 
         // add second storage
diff --git a/src/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java b/src/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
index 7fd00664a1..773fa0af0f 100644
--- a/src/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
+++ b/src/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
@@ -3726,10 +3726,6 @@ public abstract class KylinConfigBase implements Serializable {
         return Integer.parseInt(getOptional("kylin.second-storage.wait-lock-timeout", "180"));
     }
 
-    public boolean getDDLEnabled() {
-        return Boolean.parseBoolean(getOptional("kylin.source.ddl.enabled", FALSE));
-    }
-
     public boolean isBuildSegmentOverlapEnabled() {
         return Boolean.parseBoolean(getOptional("kylin.build.segment-overlap-enabled", FALSE));
     }
@@ -3745,11 +3741,31 @@ public abstract class KylinConfigBase implements Serializable {
     public boolean isStorageQuotaEnabled() {
         return Boolean.parseBoolean(getOptional("kylin.storage.check-quota-enabled", FALSE));
     }
-    
+
     public boolean skipShardPruningForInExpr() {
         return Boolean.parseBoolean(getOptional("kylin.query.skip-shard-pruning-for-in", FALSE));
     }
 
+    public boolean isDDLEnabled() {
+        return isDDLLogicalViewEnabled() || isDDLHiveEnabled();
+    }
+
+    public boolean isDDLLogicalViewEnabled() {
+        return Boolean.parseBoolean(getOptional("kylin.source.ddl.logical-view.enabled", FALSE));
+    }
+
+    public boolean isDDLHiveEnabled() {
+        return Boolean.parseBoolean(getOptional("kylin.source.ddl.hive.enabled", FALSE));
+    }
+
+    public String getDDLLogicalViewDB() {
+        return getOptional("kylin.source.ddl.logical-view.database", "KYLIN_LOGICAL_VIEW");
+    }
+
+    public int getDDLLogicalViewCatchupInterval() {
+        return Integer.parseInt(getOptional("kylin.source.ddl.logical-view-catchup-interval", "60"));
+    }
+
     // ============================================================================
     // Cost based index Planner
     // ============================================================================
diff --git a/src/core-common/src/main/java/org/apache/kylin/common/exception/ServerErrorCode.java b/src/core-common/src/main/java/org/apache/kylin/common/exception/ServerErrorCode.java
index 1e6c9722bf..5c90c5db1d 100644
--- a/src/core-common/src/main/java/org/apache/kylin/common/exception/ServerErrorCode.java
+++ b/src/core-common/src/main/java/org/apache/kylin/common/exception/ServerErrorCode.java
@@ -95,6 +95,7 @@ public enum ServerErrorCode implements ErrorCodeSupplier {
     DUPLICATED_COLUMN_NAME("KE-010007007"), //
     ON_GOING_JOB_EXIST("KE-010007008"), //
     VIEW_PARTITION_DATE_FORMAT_DETECTION_FORBIDDEN("KE-010007009"), //
+    INVALID_LOGICAL_VIEW("KE-010007010"), //
 
     // 10008XXX database
     DATABASE_NOT_EXIST("KE-010008001"), //
diff --git a/src/core-common/src/main/java/org/apache/kylin/common/msg/CnMessage.java b/src/core-common/src/main/java/org/apache/kylin/common/msg/CnMessage.java
index af50c20b53..6e099d683b 100644
--- a/src/core-common/src/main/java/org/apache/kylin/common/msg/CnMessage.java
+++ b/src/core-common/src/main/java/org/apache/kylin/common/msg/CnMessage.java
@@ -1765,6 +1765,11 @@ public class CnMessage extends Message {
         return "视图名需要以 KE_ 开头";
     }
 
+    @Override
+    public String getDDLViewNameDuplicateError() {
+        return "逻辑视图名称和已有视图重复";
+    }
+
     @Override
     public String getDDLDropError() {
         return "仅支持删除 view 类型表且 view 名称需要以 KE_ 开头";
@@ -1781,12 +1786,28 @@ public class CnMessage extends Message {
     }
 
     @Override
-    public String getDDLPermissionDenied() {
-        return "只有系统或者项目管理员可以进行 DDL 操作";
+    public String getDDLDatabaseAccessnDenied() {
+        return "用户没有视图所在数据库的权限";
     }
 
     @Override
-    public String getDDLDatabaseAccessnDenied() {
-        return "用户没有视图所在数据库的权限";
+    public String getDDLLogicalViewHasUsed(String table, String project) {
+        return String.format(Locale.ROOT, "表 %s 已经在项目 %s 中加载过,请先卸载后再删除该表.", table, project);
+    }
+
+    @Override
+    public String getDDLLogicalViewSourceTableError(String table) {
+        return String.format(Locale.ROOT, "来源表 %s 是 Logical View,不能在 SQL 中使用", table);
+    }
+
+    @Override
+    public String getDDLRestrictError(String syntax) {
+        return String.format(Locale.ROOT, "仅支持 %s 语法", syntax);
+    }
+
+    @Override
+    public String getLoadLogicalViewError(String tableName, String project) {
+        return String.format(Locale.ROOT,
+            "无法加载表: %s , 仅支持在项目 %s 中加载此表", tableName, project);
     }
 }
diff --git a/src/core-common/src/main/java/org/apache/kylin/common/msg/Message.java b/src/core-common/src/main/java/org/apache/kylin/common/msg/Message.java
index 2048ce9bc1..8f349bd31f 100644
--- a/src/core-common/src/main/java/org/apache/kylin/common/msg/Message.java
+++ b/src/core-common/src/main/java/org/apache/kylin/common/msg/Message.java
@@ -63,12 +63,19 @@ public class Message {
     private static final String DDL_UNSUPPORTED = "Unsupported DDL syntax, only support single `create view`, `drop "
         + "view`,  `alter view`, `show create table`";
     private static final String DDL_VIEW_NAME_ERROR = "View names need to start with KE_";
+    private static final String DDL_VIEW_NAME_DUPLICATE_ERROR = "Logical View names is duplicate";
     private static final String DDL_DROP_ERROR = "Only support drop view";
     private static final String DDL_TABLE_NOT_LOADED = "Table '%s' is not loaded into the data source ";
     private static final String DDL_TABLE_NOT_SUPPORT = "Only support hive table, but '%s' is not hive table";
-    private static final String DDL_PERMISSION_DENIED = "Only Administrator or Project Administrator can do DDL operations";
     private static final String DDL_DATABASE_ACCESSN_DENIED = "The user does not have the database permission to "
         + "which the view belongs.";
+    private static final String DDL_LOGICAL_VIEW_HAS_USED = "Table %s has already been loaded in project %s, please "
+        + "unload it before deleting this table.";
+    private static final String DDL_LOGICAL_VIEW_SOURCETABLE_ERROR = "Source table %s is a logical view and is not "
+        + "allowed to be used in SQL";
+    private static final String DDL_RESTRICT = "Only support %s syntax";
+    private static final String LOAD_LOGICAL_VIEW_ERROR = "Can't load table %s, table can only be loaded in "
+        + "project %s";
 
     protected Message() {
 
@@ -1600,6 +1607,10 @@ public class Message {
         return DDL_VIEW_NAME_ERROR;
     }
 
+    public String getDDLViewNameDuplicateError() {
+        return DDL_VIEW_NAME_DUPLICATE_ERROR;
+    }
+
     public String getDDLDropError() {
         return DDL_DROP_ERROR;
     }
@@ -1612,11 +1623,23 @@ public class Message {
         return String.format(Locale.ROOT, DDL_TABLE_NOT_SUPPORT, table);
     }
 
-    public String getDDLPermissionDenied() {
-        return DDL_PERMISSION_DENIED;
-    }
-
     public String getDDLDatabaseAccessnDenied() {
         return DDL_DATABASE_ACCESSN_DENIED;
     }
+
+    public String getDDLLogicalViewHasUsed(String table, String project) {
+        return String.format(Locale.ROOT, DDL_LOGICAL_VIEW_HAS_USED, table, project);
+    }
+
+    public String getDDLLogicalViewSourceTableError(String table) {
+        return String.format(Locale.ROOT, DDL_LOGICAL_VIEW_SOURCETABLE_ERROR, table);
+    }
+
+    public String getDDLRestrictError(String syntax) {
+        return String.format(Locale.ROOT, DDL_RESTRICT, syntax);
+    }
+
+    public String getLoadLogicalViewError(String tableName, String project) {
+        return String.format(Locale.ROOT, LOAD_LOGICAL_VIEW_ERROR, tableName, project);
+    }
 }
diff --git a/src/core-common/src/main/java/org/apache/kylin/common/persistence/ResourceStore.java b/src/core-common/src/main/java/org/apache/kylin/common/persistence/ResourceStore.java
index 8d4542d853..8fad8ea224 100644
--- a/src/core-common/src/main/java/org/apache/kylin/common/persistence/ResourceStore.java
+++ b/src/core-common/src/main/java/org/apache/kylin/common/persistence/ResourceStore.java
@@ -77,6 +77,7 @@ public abstract class ResourceStore implements AutoCloseable {
     public static final String PROJECT_ROOT = GLOBAL_PROJECT + "/project";
     public static final String ACL_GLOBAL_ROOT = GLOBAL_PROJECT + "/sys_acl/user";
     public static final String UPGRADE = GLOBAL_PROJECT + "/upgrade";
+    public static final String VIEW_ROOT = GLOBAL_PROJECT + "/logical_view";
 
     public static final String DATA_MODEL_DESC_RESOURCE_ROOT = "/model_desc";
     public static final String FUSION_MODEL_RESOURCE_ROOT = "/fusion_model";
diff --git a/src/datasource-service/src/main/java/org/apache/kylin/rest/request/ViewDDLRequest.java b/src/core-common/src/main/java/org/apache/kylin/common/persistence/transaction/LogicalViewBroadcastNotifier.java
similarity index 75%
rename from src/datasource-service/src/main/java/org/apache/kylin/rest/request/ViewDDLRequest.java
rename to src/core-common/src/main/java/org/apache/kylin/common/persistence/transaction/LogicalViewBroadcastNotifier.java
index 37c29aa835..766dc4262b 100644
--- a/src/datasource-service/src/main/java/org/apache/kylin/rest/request/ViewDDLRequest.java
+++ b/src/core-common/src/main/java/org/apache/kylin/common/persistence/transaction/LogicalViewBroadcastNotifier.java
@@ -16,16 +16,17 @@
  * limitations under the License.
  */
 
-package org.apache.kylin.rest.request;
-
-import com.fasterxml.jackson.annotation.JsonProperty;
+package org.apache.kylin.common.persistence.transaction;
 
 import lombok.Data;
+import lombok.EqualsAndHashCode;
 
 @Data
-public class ViewDDLRequest {
-  @JsonProperty("sql")
-  private String sql;
-  @JsonProperty("project")
-  private String project;
+@EqualsAndHashCode
+public class LogicalViewBroadcastNotifier extends BroadcastEventReadyNotifier {
+
+    @Override
+    public boolean needBroadcastSelf() {
+        return false;
+    }
 }
diff --git a/src/core-common/src/main/resources/kylin_errorcode_conf_en.properties b/src/core-common/src/main/resources/kylin_errorcode_conf_en.properties
index 40d5b154f5..8d649d7813 100644
--- a/src/core-common/src/main/resources/kylin_errorcode_conf_en.properties
+++ b/src/core-common/src/main/resources/kylin_errorcode_conf_en.properties
@@ -93,6 +93,7 @@ KE-010007006=Invalid Table Sampleing Range
 KE-010007007=Duplicated Column Name
 KE-010007008=Ongoing Jobs
 KE-010007009=View Partition Date Format Detection Forbidden
+KE-010007010=illegal Logical View
 KE-010008001=Database Not Exist
 KE-010008002=Failed Import SSB Data
 KE-010008003=Unsupported Data Source Type
diff --git a/src/core-common/src/main/resources/kylin_errorcode_conf_zh.properties b/src/core-common/src/main/resources/kylin_errorcode_conf_zh.properties
index ef6f47a33e..1102001892 100644
--- a/src/core-common/src/main/resources/kylin_errorcode_conf_zh.properties
+++ b/src/core-common/src/main/resources/kylin_errorcode_conf_zh.properties
@@ -93,6 +93,7 @@ KE-010007006=非法的表采样范围
 KE-010007007=列名重复
 KE-010007008=存在运行中的任务
 KE-010007009=视图禁用探测时间分区列格式
+KE-010007010=不合法的 Logical View
 KE-010008001=数据库不存在
 KE-010008002=导入SSB数据集失败
 KE-010008003=不支持的数据源类型
diff --git a/src/core-common/src/test/java/org/apache/kylin/common/util/NLocalFileMetadataTestCase.java b/src/core-common/src/test/java/org/apache/kylin/common/util/NLocalFileMetadataTestCase.java
index db46ce81ff..f677ab9a49 100644
--- a/src/core-common/src/test/java/org/apache/kylin/common/util/NLocalFileMetadataTestCase.java
+++ b/src/core-common/src/test/java/org/apache/kylin/common/util/NLocalFileMetadataTestCase.java
@@ -266,6 +266,17 @@ public class NLocalFileMetadataTestCase extends AbstractTestCase {
         }
     }
 
+    public void assertRuntimeExeption(UserFunction f, String msg) {
+        try {
+            f.process();
+            Assert.fail();
+        } catch (Exception e) {
+            if (StringUtils.isNotEmpty(msg)) {
+                Assert.assertTrue(e.getMessage().contains(msg));
+            }
+        }
+    }
+
     public interface UserFunction {
         void process() throws Exception;
     }
diff --git a/src/core-metadata/src/main/java/org/apache/kylin/metadata/model/TableDesc.java b/src/core-metadata/src/main/java/org/apache/kylin/metadata/model/TableDesc.java
index a8bf64c8d4..983792b139 100644
--- a/src/core-metadata/src/main/java/org/apache/kylin/metadata/model/TableDesc.java
+++ b/src/core-metadata/src/main/java/org/apache/kylin/metadata/model/TableDesc.java
@@ -631,4 +631,8 @@ public class TableDesc extends RootPersistentEntity implements Serializable, ISo
         }
         return getIdentity();
     }
+
+    public boolean isLogicalView() {
+        return KylinConfig.getInstanceFromEnv().getDDLLogicalViewDB().equalsIgnoreCase(this.getDatabase());
+    }
 }
diff --git a/src/core-metadata/src/main/java/org/apache/kylin/metadata/view/LogicalView.java b/src/core-metadata/src/main/java/org/apache/kylin/metadata/view/LogicalView.java
new file mode 100644
index 0000000000..168f501eb0
--- /dev/null
+++ b/src/core-metadata/src/main/java/org/apache/kylin/metadata/view/LogicalView.java
@@ -0,0 +1,71 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.kylin.metadata.view;
+
+import java.io.Serializable;
+import java.util.Locale;
+
+import org.apache.kylin.common.persistence.ResourceStore;
+import org.apache.kylin.common.persistence.RootPersistentEntity;
+
+import com.fasterxml.jackson.annotation.JsonAutoDetect;
+import com.fasterxml.jackson.annotation.JsonAutoDetect.Visibility;
+import com.fasterxml.jackson.annotation.JsonProperty;
+
+import lombok.Data;
+
+/**
+ * Logical views which only defined in KYLIN
+ */
+@Data
+@JsonAutoDetect(fieldVisibility = Visibility.NONE, getterVisibility = Visibility.NONE,
+    isGetterVisibility = Visibility.NONE, setterVisibility = Visibility.NONE)
+public class LogicalView extends RootPersistentEntity implements Serializable {
+
+  @JsonProperty("table_name")
+  private String tableName;
+
+  @JsonProperty("created_sql")
+  private String createdSql;
+
+  @JsonProperty("modified_user")
+  private String modifiedUser;
+
+  @JsonProperty("created_project")
+  private String createdProject;
+
+  public LogicalView() {}
+
+  public LogicalView(String tableName, String createdSql, String modifiedUser, String createdProject) {
+    this.tableName = tableName.toUpperCase(Locale.ROOT);
+    this.createdSql = createdSql;
+    this.modifiedUser = modifiedUser;
+    this.createdProject = createdProject;
+  }
+
+  @Override
+  public String resourceName() {
+    return tableName.toUpperCase(Locale.ROOT);
+  }
+
+  @Override
+  public String getResourcePath() {
+    return ResourceStore.VIEW_ROOT + "/" + resourceName();
+  }
+}
diff --git a/src/core-metadata/src/main/java/org/apache/kylin/metadata/view/LogicalViewManager.java b/src/core-metadata/src/main/java/org/apache/kylin/metadata/view/LogicalViewManager.java
new file mode 100644
index 0000000000..3f139b2554
--- /dev/null
+++ b/src/core-metadata/src/main/java/org/apache/kylin/metadata/view/LogicalViewManager.java
@@ -0,0 +1,137 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.kylin.metadata.view;
+
+import static org.apache.kylin.common.persistence.ResourceStore.VIEW_ROOT;
+
+import java.util.List;
+
+import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.common.persistence.ResourceStore;
+import org.apache.kylin.common.persistence.transaction.UnitOfWork;
+import org.apache.kylin.metadata.cachesync.CachedCrudAssist;
+import org.apache.kylin.metadata.cube.model.NDataflow;
+import org.apache.kylin.metadata.cube.model.NDataflowManager;
+import org.apache.kylin.metadata.model.NDataModel;
+import org.apache.kylin.metadata.model.NTableMetadataManager;
+import org.apache.kylin.metadata.model.TableDesc;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.google.common.collect.Lists;
+
+public class LogicalViewManager {
+
+  private static final Logger logger = LoggerFactory.getLogger(LogicalViewManager.class);
+
+  public static LogicalViewManager getInstance(KylinConfig config) {
+    return config.getManager(LogicalViewManager.class);
+  }
+
+  // called by reflection
+  static LogicalViewManager newInstance(KylinConfig config) {
+    return new LogicalViewManager(config);
+  }
+
+  // ============================================================================
+  private KylinConfig config;
+  private CachedCrudAssist<LogicalView> crud;
+
+  public LogicalViewManager(KylinConfig config) {
+    if (!UnitOfWork.isAlreadyInTransaction()) {
+      logger.info("Initializing LogicalView with KylinConfig Id: {}", System.identityHashCode(config));
+    }
+    this.config = config;
+    this.crud = new CachedCrudAssist<LogicalView>(getStore(), VIEW_ROOT, "", LogicalView.class) {
+      @Override
+      protected LogicalView initEntityAfterReload(LogicalView view, String resourceName) {
+        return view;
+      }
+    };
+  }
+
+  public LogicalView copyForWrite(LogicalView view) {
+    return crud.copyForWrite(view);
+  }
+
+  public KylinConfig getConfig() {
+    return config;
+  }
+
+  public ResourceStore getStore() {
+    return ResourceStore.getKylinMetaStore(this.config);
+  }
+
+  public LogicalView get(String name) {
+    return crud.get(name.toUpperCase());
+  }
+
+  public List<LogicalView> list() {
+    return crud.listAll();
+  }
+
+  public void update(LogicalView view) {
+    LogicalView exist = get(view.getTableName());
+    LogicalView copy = copyForWrite(view);
+    if (exist != null) {
+      copy.setLastModified(exist.getLastModified());
+      copy.setMvcc(exist.getMvcc());
+    }
+    crud.save(copy);
+  }
+
+  public void delete(String tableName) {
+    crud.delete(tableName.toUpperCase());
+  }
+
+  public boolean exists(String tableName) {
+    return get(tableName) != null;
+  }
+
+  public List<LogicalView> findLogicalViewsInModel(String project, String dataflowId) {
+    List<LogicalView> viewsInModel = Lists.newArrayList();
+    NDataflow df = NDataflowManager.getInstance(config, project).getDataflow(dataflowId);
+    if (df == null) {
+      return viewsInModel;
+    }
+    String logicalViewDB = KylinConfig.getInstanceFromEnv().getDDLLogicalViewDB();
+    NDataModel model = df.getModel();
+    model.getAllTableRefs().forEach(tableRef -> {
+      if (logicalViewDB.equalsIgnoreCase(tableRef.getTableDesc().getDatabase())
+          && get(tableRef.getTableName()) != null) {
+        viewsInModel.add(get(tableRef.getTableName()));
+      }
+    });
+    return viewsInModel;
+  }
+
+  public LogicalView findLogicalViewInProject(String project, String tableName) {
+    NTableMetadataManager tblMgr = NTableMetadataManager.getInstance(config, project);
+    TableDesc table = tblMgr.getTableDesc(tableName);
+    if (table == null || !table.isLogicalView()) {
+      return null;
+    }
+    LogicalView logicalView = get(table.getName());
+    if (logicalView != null && logicalView.getCreatedProject()
+        .equalsIgnoreCase(project)) {
+      return logicalView;
+    }
+    return null;
+  }
+}
diff --git a/src/core-metadata/src/main/java/org/apache/kylin/rest/util/AclPermissionUtil.java b/src/core-metadata/src/main/java/org/apache/kylin/rest/util/AclPermissionUtil.java
index c948ab3e53..83bc0020ad 100644
--- a/src/core-metadata/src/main/java/org/apache/kylin/rest/util/AclPermissionUtil.java
+++ b/src/core-metadata/src/main/java/org/apache/kylin/rest/util/AclPermissionUtil.java
@@ -133,14 +133,17 @@ public class AclPermissionUtil {
     }
 
     public static boolean isAdminInProject(String project, Set<String> usergroups) {
+        return isSpecificPermissionInProject(project, usergroups, BasePermission.ADMINISTRATION);
+    }
+
+    public static boolean isSpecificPermissionInProject(String project, Set<String> userGroups, Permission permission) {
         Authentication auth = SecurityContextHolder.getContext().getAuthentication();
         if (Objects.isNull(auth)) {
             return false;
         }
-
         MutableAclRecord acl = getProjectAcl(project);
-        Set<String> groups = filterGroupsInProject(usergroups, acl);
-        return isSpecificPermissionInProject(auth.getName(), groups, BasePermission.ADMINISTRATION, acl);
+        Set<String> groups = filterGroupsInProject(userGroups, acl);
+        return isSpecificPermissionInProject(auth.getName(), groups, permission, acl);
     }
 
     public static boolean isSpecificPermissionInProject(String username, Set<String> userGroupsInProject,
diff --git a/src/datasource-service/pom.xml b/src/datasource-service/pom.xml
index 8766e49a50..f3e5969497 100644
--- a/src/datasource-service/pom.xml
+++ b/src/datasource-service/pom.xml
@@ -56,10 +56,6 @@
             <groupId>org.apache.hadoop</groupId>
             <artifactId>hadoop-hdfs</artifactId>
         </dependency>
-        <dependency>
-            <groupId>org.apache.kylin</groupId>
-            <artifactId>kylin-spark-ddl</artifactId>
-        </dependency>
         <dependency>
             <groupId>org.apache.kafka</groupId>
             <artifactId>kafka-clients</artifactId>
diff --git a/src/spark-project/spark-ddl-plugin/src/main/java/org/apache/kylin/spark/ddl/SourceTableCheck.java b/src/datasource-service/src/main/java/org/apache/kylin/rest/ddl/SourceTableCheck.java
similarity index 64%
rename from src/spark-project/spark-ddl-plugin/src/main/java/org/apache/kylin/spark/ddl/SourceTableCheck.java
rename to src/datasource-service/src/main/java/org/apache/kylin/rest/ddl/SourceTableCheck.java
index beb157bddc..2ad1223719 100644
--- a/src/spark-project/spark-ddl-plugin/src/main/java/org/apache/kylin/spark/ddl/SourceTableCheck.java
+++ b/src/datasource-service/src/main/java/org/apache/kylin/rest/ddl/SourceTableCheck.java
@@ -15,7 +15,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.kylin.spark.ddl;
+package org.apache.kylin.rest.ddl;
 
 import java.util.List;
 import java.util.stream.Collectors;
@@ -25,47 +25,57 @@ import org.apache.kylin.common.msg.MsgPicker;
 import org.apache.kylin.metadata.model.ISourceAware;
 import org.apache.kylin.metadata.model.NTableMetadataManager;
 import org.apache.kylin.metadata.model.TableDesc;
+import org.apache.kylin.rest.security.AclPermission;
 import org.apache.kylin.rest.util.AclPermissionUtil;
+
+import org.apache.spark.ddl.DDLCheck;
+import org.apache.spark.ddl.DDLCheckContext;
 import org.apache.spark.sql.SparderEnv;
 import org.apache.spark.sql.catalyst.analysis.UnresolvedRelation;
 import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan;
 
 import lombok.val;
+
 import scala.collection.Seq;
 
+import static org.apache.spark.ddl.DDLConstant.SOURCE_TABLE_RULE_PRIORITY;
+
 public class SourceTableCheck implements DDLCheck {
 
   @Override
-  public String[] description(String project) {
+  public String[] description(String project, String pageType) {
     return new String[] {
         "The source table used to define the view needs to be loaded into the data source already",
         "定义 view 用到的来源表需要已经加载到数据源"
     };
   }
 
+  @Override
+  public int priority() {
+    return SOURCE_TABLE_RULE_PRIORITY;
+  }
+
   @Override
   public void check(DDLCheckContext context) {
     val spark = SparderEnv.getSparkSession();
+    KylinConfig config = KylinConfig.getInstanceFromEnv();
     LogicalPlan logicalPlan = null;
+    checkACLPermission(context);
     try {
       logicalPlan = spark.sessionState().sqlParser().parsePlan(context.getSql());
     } catch (Throwable t) {
       throwException(t.getMessage());
     }
-    val tableManager = NTableMetadataManager.getInstance(
-        KylinConfig.getInstanceFromEnv(),
-        context.getProject());
-    if (!AclPermissionUtil.hasProjectAdminPermission(context.getProject(), context.getGroups())) {
-      throwException(MsgPicker.getMsg().getDDLPermissionDenied());
-    }
+    val tableManager = NTableMetadataManager.getInstance(config, context.getProject());
     Seq<LogicalPlan> relationLeaves = logicalPlan.collectLeaves();
     if (relationLeaves == null) {
       return;
     }
+    List<TableDesc> allTablesInProject = tableManager.listAllTables();
     for (LogicalPlan plan : scala.collection.JavaConverters.seqAsJavaListConverter(relationLeaves).asJava()) {
       if (plan instanceof UnresolvedRelation) {
         val tableName = ((UnresolvedRelation) plan).tableName();
-        List<TableDesc> loadTable = tableManager.listAllTables().stream()
+        List<TableDesc> loadTable = allTablesInProject.stream()
             .filter(table -> table.getTableAlias().equalsIgnoreCase(tableName))
             .collect(Collectors.toList());
         if (loadTable.isEmpty()) {
@@ -76,7 +86,24 @@ public class SourceTableCheck implements DDLCheck {
             && ISourceAware.ID_SPARK != table.getSourceType()) {
           throwException(MsgPicker.getMsg().getDDLTableNotSupport(tableName));
         }
+        if (context.isLogicalViewCommand() && table.getDatabase()
+            .equalsIgnoreCase(config.getDDLLogicalViewDB())) {
+          throwException(MsgPicker.getMsg().getDDLLogicalViewSourceTableError(tableName));
+        }
       }
     }
   }
+
+  private void checkACLPermission(DDLCheckContext context) {
+    if (context.isHiveCommand()
+        && !AclPermissionUtil.hasProjectAdminPermission(context.getProject(), context.getGroups())) {
+      throwException("Only project administrator can do Hive operations");
+    }
+    if (!context.isHiveCommand()
+        && (!AclPermissionUtil.hasProjectAdminPermission(context.getProject(), context.getGroups())
+        && !AclPermissionUtil.isSpecificPermissionInProject(context.getProject(), context.getGroups(),
+        AclPermission.MANAGEMENT))) {
+      throwException("Only project administrator or modeler can do Logical View operations");
+    }
+  }
 }
diff --git a/src/spark-project/spark-ddl-plugin/src/main/java/org/apache/kylin/spark/ddl/DDLCheckContext.java b/src/datasource-service/src/main/java/org/apache/kylin/rest/request/ViewRequest.java
similarity index 60%
copy from src/spark-project/spark-ddl-plugin/src/main/java/org/apache/kylin/spark/ddl/DDLCheckContext.java
copy to src/datasource-service/src/main/java/org/apache/kylin/rest/request/ViewRequest.java
index e5e6d1819f..cb97d1dc03 100644
--- a/src/spark-project/spark-ddl-plugin/src/main/java/org/apache/kylin/spark/ddl/DDLCheckContext.java
+++ b/src/datasource-service/src/main/java/org/apache/kylin/rest/request/ViewRequest.java
@@ -15,36 +15,28 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.kylin.spark.ddl;
 
-import java.util.Set;
+package org.apache.kylin.rest.request;
 
-public class DDLCheckContext {
-  private String sql;
-  private String project;
-  private String userName;
-  private Set<String> groups;
-
-  public DDLCheckContext(String sql, String project, String userName, Set<String> groups) {
-    this.sql = sql;
-    this.project = project;
-    this.userName = userName;
-    this.groups = groups;
-  }
-
-  public String getSql() {
-    return sql;
-  }
+import com.fasterxml.jackson.annotation.JsonProperty;
 
-  public String getProject() {
-    return project;
-  }
+import lombok.AllArgsConstructor;
+import lombok.Data;
+import lombok.NoArgsConstructor;
 
-  public String getUserName() {
-    return userName;
-  }
+@Data
+@NoArgsConstructor
+@AllArgsConstructor
+public class ViewRequest {
+  @JsonProperty("ddl_project")
+  private String ddlProject;
+  @JsonProperty("sql")
+  private String sql;
+  @JsonProperty("restrict")
+  private String restrict;
 
-  public Set<String> getGroups() {
-    return groups;
+  public ViewRequest(String ddlProject, String sql) {
+    this.ddlProject = ddlProject;
+    this.sql = sql;
   }
-}
+}
\ No newline at end of file
diff --git a/src/datasource-service/src/main/java/org/apache/kylin/rest/service/SparkDDLService.java b/src/datasource-service/src/main/java/org/apache/kylin/rest/service/SparkDDLService.java
index 7bce47ad22..a1c00fdcc9 100644
--- a/src/datasource-service/src/main/java/org/apache/kylin/rest/service/SparkDDLService.java
+++ b/src/datasource-service/src/main/java/org/apache/kylin/rest/service/SparkDDLService.java
@@ -18,18 +18,37 @@
 package org.apache.kylin.rest.service;
 
 import static org.apache.kylin.common.exception.ServerErrorCode.DDL_CHECK_ERROR;
+import static org.apache.spark.ddl.DDLConstant.CREATE_LOGICAL_VIEW;
+import static org.apache.spark.ddl.DDLConstant.DROP_LOGICAL_VIEW;
+import static org.apache.spark.ddl.DDLConstant.REPLACE_LOGICAL_VIEW;
 
+import java.util.ArrayList;
 import java.util.Arrays;
+import java.util.Collections;
 import java.util.List;
-import java.util.ServiceLoader;
+import java.util.stream.Collectors;
 
+import org.apache.commons.lang.StringUtils;
+import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.exception.KylinException;
+import org.apache.kylin.common.persistence.transaction.LogicalViewBroadcastNotifier;
+import org.apache.kylin.common.persistence.transaction.UnitOfWork;
+import org.apache.kylin.common.scheduler.EventBusFactory;
+import org.apache.kylin.metadata.project.EnhancedUnitOfWork;
+import org.apache.kylin.metadata.view.LogicalView;
+import org.apache.kylin.metadata.view.LogicalViewManager;
+import org.apache.kylin.rest.ddl.SourceTableCheck;
+import org.apache.kylin.rest.ddl.ViewCheck;
+import org.apache.kylin.rest.request.ViewRequest;
 import org.apache.kylin.rest.util.AclPermissionUtil;
-import org.apache.kylin.spark.ddl.DDLCheck;
-import org.apache.kylin.spark.ddl.DDLCheckContext;
+
+import org.apache.spark.ddl.DDLCheck;
+import org.apache.spark.ddl.DDLCheckContext;
+import org.apache.spark.sql.LogicalViewLoader;
 import org.apache.spark.sql.Row;
 import org.apache.spark.sql.SparderEnv;
+
 import org.springframework.stereotype.Service;
 
 import com.google.common.collect.Lists;
@@ -41,35 +60,93 @@ import lombok.extern.slf4j.Slf4j;
 @Service
 public class SparkDDLService extends BasicService {
 
-  private final ServiceLoader<DDLCheck> ddlChecks = ServiceLoader.load(DDLCheck.class);
+  private final List<DDLCheck> ddlChecks = Lists.newArrayList(new SourceTableCheck(), new ViewCheck());
 
-  public String executeDDLSql(String project, String sql) {
-    if (!KylinConfig.getInstanceFromEnv().getDDLEnabled()) {
+  public String executeSQL(ViewRequest request) {
+    if (!KylinConfig.getInstanceFromEnv().isDDLEnabled()) {
       throw new KylinException(DDL_CHECK_ERROR, "DDL function has not been turned on.");
     }
+    LogicalViewLoader.checkConfigIfNeed();
     val groups = getCurrentUserGroups();
-    val context = new DDLCheckContext(sql, project, AclPermissionUtil.getCurrentUsername(),
-        groups);
-    for (DDLCheck checker : ddlChecks) {
+    val context = new DDLCheckContext(request.getSql(), request.getDdlProject(), request.getRestrict(),
+        AclPermissionUtil.getCurrentUsername(),
+        groups, UserGroupInformation.isSecurityEnabled());
+
+    ArrayList<DDLCheck> ddlCheckers = Lists.newArrayList(this.ddlChecks.iterator());
+    Collections.sort(ddlCheckers);
+    for (DDLCheck checker : ddlCheckers) {
       checker.check(context);
     }
     final StringBuilder result = new StringBuilder();
-    List<Row> rows = SparderEnv.getSparkSession().sql(sql).collectAsList();
-    rows.forEach(row -> result.append(row.get(0).toString() + "\n"));
+    List<Row> rows = SparderEnv.getSparkSession().sql(request.getSql()).collectAsList();
+    rows.forEach(row -> result.append(row.get(0).toString()).append("\n"));
+    if (context.isLogicalViewCommand()) {
+      /**
+       * Request MUST be handled by global owner node.
+       */
+      switch (context.getCommandType()) {
+        case REPLACE_LOGICAL_VIEW:
+        case CREATE_LOGICAL_VIEW:
+          saveLogicalView(context);
+          break;
+        case DROP_LOGICAL_VIEW:
+          dropLogicalView(context);
+          break;
+        default:
+          break;
+      }
+      EventBusFactory.getInstance().postAsync(new LogicalViewBroadcastNotifier());
+    }
     return result.toString();
   }
 
-  public List<List<String>> pluginsDescription(String project) {
-    if (!KylinConfig.getInstanceFromEnv().getDDLEnabled()) {
+  public List<List<String>> pluginsDescription(String project, String pageType) {
+    if (!KylinConfig.getInstanceFromEnv().isDDLEnabled()) {
       throw new KylinException(DDL_CHECK_ERROR, "DDL function has not been turned on.");
     }
+    LogicalViewLoader.checkConfigIfNeed();
     List<String> descriptionEN = Lists.newArrayList();
     List<String> descriptionCN = Lists.newArrayList();
     for (DDLCheck checker : ddlChecks) {
-      String[] description = checker.description(project);
+      String[] description = checker.description(project, pageType);
       descriptionEN.addAll(Arrays.asList(description[0].split("\n")));
       descriptionCN.addAll(Arrays.asList(description[1].split("\n")));
     }
     return Lists.newArrayList(descriptionEN, descriptionCN);
   }
+
+  private void saveLogicalView(DDLCheckContext context) {
+    EnhancedUnitOfWork.doInTransactionWithCheckAndRetry(() -> {
+      LogicalViewManager manager = LogicalViewManager.getInstance(KylinConfig.getInstanceFromEnv());
+      LogicalView logicalView = new LogicalView(context.getLogicalViewName(), context.getSql(), context.getUserName(),
+          context.getProject());
+      manager.update(logicalView);
+      return null;
+    }, UnitOfWork.GLOBAL_UNIT);
+    LogicalViewLoader.loadView(context.getLogicalViewName(), false, SparderEnv.getSparkSession());
+  }
+
+  private void dropLogicalView(DDLCheckContext context) {
+    EnhancedUnitOfWork.doInTransactionWithCheckAndRetry(() -> {
+      LogicalViewManager manager = LogicalViewManager.getInstance(KylinConfig.getInstanceFromEnv());
+      manager.delete(context.getLogicalViewName());
+      return null;
+    }, UnitOfWork.GLOBAL_UNIT);
+    LogicalViewLoader.unloadView(context.getLogicalViewName(), SparderEnv.getSparkSession());
+  }
+
+  public List<LogicalView> listAll(String project, String tableName) {
+    List<LogicalView> logicalViews = LogicalViewManager.getInstance(KylinConfig.getInstanceFromEnv()).list();
+    if (StringUtils.isNotBlank(tableName)) {
+      logicalViews = logicalViews.stream()
+          .filter(table -> table.getTableName().toLowerCase().contains(tableName.toLowerCase()))
+          .collect(Collectors.toList());
+    }
+    logicalViews.forEach(table -> {
+      if (!table.getCreatedProject().equalsIgnoreCase(project)) {
+        table.setCreatedSql("***");
+      }
+    });
+    return logicalViews;
+  }
 }
\ No newline at end of file
diff --git a/src/datasource-service/src/main/java/org/apache/kylin/rest/service/TableExtService.java b/src/datasource-service/src/main/java/org/apache/kylin/rest/service/TableExtService.java
index e64d75264b..595f5c0b6f 100644
--- a/src/datasource-service/src/main/java/org/apache/kylin/rest/service/TableExtService.java
+++ b/src/datasource-service/src/main/java/org/apache/kylin/rest/service/TableExtService.java
@@ -18,6 +18,7 @@
 
 package org.apache.kylin.rest.service;
 
+import static org.apache.kylin.common.exception.ServerErrorCode.INVALID_LOGICAL_VIEW;
 import static org.apache.kylin.common.exception.ServerErrorCode.INVALID_TABLE_NAME;
 import static org.apache.kylin.common.exception.code.ErrorCodeServer.EXCLUDED_TABLE_REQUEST_NOT_ALLOWED;
 
@@ -49,6 +50,8 @@ import org.apache.kylin.metadata.model.TableExtDesc;
 import org.apache.kylin.metadata.project.EnhancedUnitOfWork;
 import org.apache.kylin.metadata.project.NProjectManager;
 import org.apache.kylin.metadata.project.ProjectInstance;
+import org.apache.kylin.metadata.view.LogicalView;
+import org.apache.kylin.metadata.view.LogicalViewManager;
 import org.apache.kylin.rest.aspect.Transaction;
 import org.apache.kylin.rest.request.S3TableExtInfo;
 import org.apache.kylin.rest.request.TableExclusionRequest;
@@ -69,6 +72,7 @@ import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.beans.factory.annotation.Qualifier;
 import org.springframework.stereotype.Component;
 
+import com.google.common.annotations.VisibleForTesting;
 import com.google.common.collect.Lists;
 import com.google.common.collect.Maps;
 import com.google.common.collect.Sets;
@@ -91,7 +95,7 @@ public class TableExtService extends BasicService {
         Map<String, Set<String>> dbTableMap = classifyDbTables(dbTables, isDb);
         Set<String> existDbs = Sets.newHashSet(tableService.getSourceDbNames(project));
         LoadTableResponse tableResponse = new LoadTableResponse();
-        List<Pair<TableDesc, TableExtDesc>> loadTables = Lists.newArrayList();
+        List<Pair<TableDesc, TableExtDesc>> canLoadTables = Lists.newArrayList();
         for (Map.Entry<String, Set<String>> entry : dbTableMap.entrySet()) {
             String db = entry.getKey();
             Set<String> tableSet = entry.getValue();
@@ -114,16 +118,46 @@ public class TableExtService extends BasicService {
             }
 
             String[] tables = existTables.stream().map(table -> db + "." + table).toArray(String[]::new);
-            if (tables.length > 0)
-                loadTables.addAll(extractTableMeta(tables, project, tableResponse));
+            if (tables.length > 0){
+                filterAccessTables(tables, canLoadTables, tableResponse, project);
+            }
         }
-        if (!loadTables.isEmpty()) {
-            return innerLoadTables(project, tableResponse, loadTables);
+        if (!canLoadTables.isEmpty()) {
+            return innerLoadTables(project, tableResponse, canLoadTables);
         }
 
         return tableResponse;
     }
 
+    @VisibleForTesting
+    public void filterAccessTables(
+        String[] tables, List<Pair<TableDesc, TableExtDesc>> canLoadTables,
+        LoadTableResponse tableResponse, String project) throws Exception {
+        KylinConfig config = KylinConfig.getInstanceFromEnv();
+        List<Pair<TableDesc, TableExtDesc>> toLoadTables = extractTableMeta(tables, project, tableResponse);
+        if (!config.isDDLLogicalViewEnabled()) {
+            canLoadTables.addAll(toLoadTables);
+            return;
+        }
+        LogicalViewManager viewManager = LogicalViewManager.getInstance(config);
+        toLoadTables.stream()
+            .filter(table -> !table.getFirst().isLogicalView())
+            .forEach(canLoadTables::add);
+        toLoadTables.stream()
+            .filter(table -> table.getFirst().isLogicalView())
+            .forEach(table -> {
+                String tableName = table.getFirst().getName();
+                LogicalView logicalTable = viewManager.get(tableName);
+                String viewProject = logicalTable != null ? logicalTable.getCreatedProject() : "unknown";
+                if (logicalTable != null && viewProject.equalsIgnoreCase(project)) {
+                    canLoadTables.add(table);
+                } else {
+                    throw new KylinException(INVALID_LOGICAL_VIEW, MsgPicker.getMsg()
+                        .getLoadLogicalViewError(tableName, viewProject));
+                }
+            });
+    }
+
     public LoadTableResponse loadAWSTablesCompatibleCrossAccount(List<S3TableExtInfo> s3TableExtInfoList,
             String project) throws Exception {
         aclEvaluate.checkProjectWritePermission(project);
diff --git a/src/datasource-service/src/main/scala/org/apache/kylin/rest/ddl/ViewCheck.scala b/src/datasource-service/src/main/scala/org/apache/kylin/rest/ddl/ViewCheck.scala
new file mode 100644
index 0000000000..82935a66e5
--- /dev/null
+++ b/src/datasource-service/src/main/scala/org/apache/kylin/rest/ddl/ViewCheck.scala
@@ -0,0 +1,247 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.kylin.rest.ddl
+
+import java.security.PrivilegedExceptionAction
+
+import scala.collection.convert.ImplicitConversions.{`collection AsScalaIterable`, `map AsScala`}
+import scala.collection.mutable.ListBuffer
+
+import org.apache.commons.lang3.StringUtils
+import org.apache.hadoop.security.UserGroupInformation
+import org.apache.kylin.common.msg.MsgPicker
+import org.apache.kylin.common.KylinConfig
+import org.apache.kylin.engine.spark.source.NSparkMetadataExplorer
+import org.apache.kylin.metadata.model.NTableMetadataManager
+import org.apache.kylin.metadata.view.LogicalViewManager
+import org.apache.kylin.rest.security.KerberosLoginManager
+import org.slf4j.LoggerFactory
+
+import org.apache.spark.ddl.{DDLCheck, DDLCheckContext, DDLConstant}
+import org.apache.spark.sql.SparderEnv
+import org.apache.spark.sql.catalyst.TableIdentifier
+import org.apache.spark.sql.execution.{CommandExecutionMode, CommandResultExec, SparkPlan}
+import org.apache.spark.sql.execution.command._
+
+class ViewCheck extends DDLCheck {
+  private val LOGGER = LoggerFactory.getLogger(classOf[ViewCheck])
+  private val PREFIX = "KE_"
+  private val SOURCE = new NSparkMetadataExplorer
+  private val LOGICAL_VIEW_TYPE = "GlobalTempView"
+
+  override def description(project: String, pageType: String): Array[String] = {
+    val config = KylinConfig.getInstanceFromEnv
+    val cnDescription: StringBuilder = new StringBuilder
+    val enDescription: StringBuilder = new StringBuilder
+    val databasesHasAccess: StringBuilder = new StringBuilder
+    val syntaxSupport: StringBuilder = new StringBuilder
+    if ("hive".equalsIgnoreCase(pageType)) {
+      databasesHasAccess.append(listAllDatabasesHasAccess(project))
+      syntaxSupport.append("`create view`,`alter view`,`drop view`,`show create table`")
+      cnDescription.append("Hive View 名称需要以 `KE_` 开头\n")
+      enDescription.append("Hive View name should start with `KE_`\n")
+      cnDescription
+        .append(s"仅支持 ${syntaxSupport} 语法\n")
+      enDescription
+        .append(s"Only supports ${syntaxSupport} syntax\n")
+      cnDescription.append(s"仅支持创建 Hive View 在如下数据库: ${databasesHasAccess}\n")
+      enDescription.append(s"Only supports creating Hive Views in ${databasesHasAccess}\n")
+    } else {
+      cnDescription.append(s"创建不要加 database 名称,系统自动创建到 ${config.getDDLLogicalViewDB} 库中,"
+        + s"删除要加 ${config.getDDLLogicalViewDB} 库名称 \n")
+      enDescription.append(s"Creating does not require adding database, it is automatically created in"
+        + s" ${config.getDDLLogicalViewDB} ,\n deleting should add ${config.getDDLLogicalViewDB} database")
+      syntaxSupport.append(" `create logical view`, `drop logical view` ")
+      cnDescription
+        .append(s"仅支持 ${syntaxSupport} 语法\n")
+      enDescription
+        .append(s"Only supports ${syntaxSupport} syntax\n")
+    }
+
+
+    Array(
+      enDescription.toString(),
+      cnDescription.toString())
+  }
+
+  override def priority: Int = DDLConstant.VIEW_RULE_PRIORITY
+
+  override def check(context: DDLCheckContext): Unit = {
+    LOGGER.info("start checking DDL view name")
+    val sql = context.getSql
+    val project = context.getProject
+    val spark = SparderEnv.getSparkSession
+    val config = KylinConfig.getInstanceFromEnv
+    var plan: SparkPlan = null
+    try {
+      val logicalPlan = spark.sessionState.sqlParser.parsePlan(sql)
+      plan = stripRootCommandResult(spark.sessionState.executePlan(
+        logicalPlan, CommandExecutionMode.SKIP).executedPlan)
+    } catch {
+      case e: Exception => throwException(e.getMessage)
+    }
+    plan match {
+      case ExecutedCommandExec(view: CreateViewCommand) =>
+        if (view.viewType != null && LOGICAL_VIEW_TYPE.equalsIgnoreCase(view.viewType.toString())) {
+          val viewManager = LogicalViewManager.getInstance(config)
+          val originTable = viewManager.get(view.name.table)
+          if (view.replace) {
+            context.setCommandType(DDLConstant.REPLACE_LOGICAL_VIEW)
+            if (originTable == null) {
+              throwException("View name is not found.")
+            }
+            if (!originTable.getCreatedProject.equals(context.getProject)) {
+              throwException(s"View can only modified in Project ${originTable.getCreatedProject}")
+            }
+          } else {
+            if (originTable != null) {
+              throwException(MsgPicker.getMsg.getDDLViewNameDuplicateError)
+            }
+            context.setCommandType(DDLConstant.CREATE_LOGICAL_VIEW)
+          }
+          context.setLogicalViewName(view.name.table)
+        } else {
+          checkHiveTableName(view.name, context)
+          checkHiveDatabaseAccess(view.name, project, context)
+        }
+      case ExecutedCommandExec(view: ShowCreateTableCommand) =>
+        checkHiveTableName(view.table, context)
+        checkHiveDatabaseAccess(view.table, project, context)
+      case ExecutedCommandExec(table: DropTableCommand) =>
+        if (!table.isView) {
+          throwException(MsgPicker.getMsg.getDDLDropError)
+        }
+        val tableIdentifier = table.tableName
+        if (config.isDDLLogicalViewEnabled && tableIdentifier.database.isDefined
+          && config.getDDLLogicalViewDB.equalsIgnoreCase(tableIdentifier.database.get)) {
+          context.setCommandType(DDLConstant.DROP_LOGICAL_VIEW)
+          context.setLogicalViewName(tableIdentifier.table)
+          checkLogicalViewNotUsed(tableIdentifier, context.getProject)
+        } else {
+          checkHiveTableName(table.tableName, context)
+          checkHiveDatabaseAccess(table.tableName, project, context)
+        }
+      case ExecutedCommandExec(table: AlterViewAsCommand) =>
+        checkHiveTableName(table.name, context)
+        checkHiveDatabaseAccess(table.name, project, context)
+      case _ => throwException(MsgPicker.getMsg.getDDLUnSupported)
+    }
+    if (context.isLogicalViewCommand && !config.isDDLLogicalViewEnabled) {
+      throwException("Logical View operation is not supported, please turn on config.")
+    }
+    if (context.isHiveCommand && !config.isDDLHiveEnabled) {
+      throwException("Hive operation is not supported, please turn on config.")
+    }
+    checkCommandRestrict(context)
+  }
+
+  private def checkHiveTableName(identifier: TableIdentifier, context: DDLCheckContext): Unit = {
+    if (!identifier.table.toUpperCase().startsWith(PREFIX)) {
+      throwException(MsgPicker.getMsg.getDDLViewNameError)
+    }
+  }
+
+  def checkHiveDatabaseAccess(identifier: TableIdentifier, project: String, context: DDLCheckContext): Unit = {
+    if (!context.isKerberosEnv && !KylinConfig.getInstanceFromEnv.isUTEnv) {
+      return
+    }
+    if (identifier.database.isEmpty) {
+      throwException("Missing Databases name in sql.")
+    }
+    val database = identifier.database.get
+    if (database.equalsIgnoreCase(KylinConfig.getInstanceFromEnv.getDDLLogicalViewDB)) {
+      throwException("Shouldn't use logical view database.")
+    }
+    val ugi = KerberosLoginManager.getInstance.getProjectUGI(project)
+    val hasDatabaseAccess = ugi.doAs(new PrivilegedExceptionAction[Boolean]() {
+      override def run(): Boolean = {
+        SOURCE.checkDatabaseHadoopAccessFast(database)
+      }
+    })
+    if (!hasDatabaseAccess) {
+      throwException(MsgPicker.getMsg.getDDLDatabaseAccessnDenied)
+    }
+  }
+
+  def checkLogicalViewNotUsed(tableIdentity: TableIdentifier, project: String): Unit = {
+    val config = KylinConfig.getInstanceFromEnv
+    val db = config.getDDLLogicalViewDB
+    val viewName = tableIdentity.table
+    val tableManager = NTableMetadataManager.getInstance(config, project)
+    tableManager.listTablesGroupBySchema.
+      filter(dbInfo => dbInfo._1.equalsIgnoreCase(db))
+      .foreach(dbInfo => {
+        val isExist = dbInfo._2.exists(loadTable => {
+          loadTable.getName.equalsIgnoreCase(viewName)
+        })
+        if (isExist) {
+          throwException(MsgPicker.getMsg.getDDLLogicalViewHasUsed(viewName, project))
+        }
+      })
+    val viewManager = LogicalViewManager.getInstance(config)
+    val originTable = viewManager.get(viewName)
+    if (originTable != null && !originTable.getCreatedProject.equalsIgnoreCase(project)) {
+      throwException(s"View can only modified in Project ${originTable.getCreatedProject}")
+    }
+  }
+
+  def listAllDatabasesHasAccess(project: String): String = {
+    val shouldCheckKerberosAccess = UserGroupInformation.isSecurityEnabled
+    val ugi = KerberosLoginManager.getInstance.getProjectUGI(project)
+    val databasesHasAccess = ugi.doAs(new PrivilegedExceptionAction[List[String]]() {
+      override def run(): List[String] = {
+        val databases = SOURCE.listDatabases()
+        val databasesHasAccess = ListBuffer[String]()
+        databases.forEach(db => {
+          if (!shouldCheckKerberosAccess || SOURCE.checkDatabaseHadoopAccessFast(db)) {
+            databasesHasAccess.append(db)
+          }
+        })
+        databasesHasAccess.toList
+      }
+    })
+    databasesHasAccess.mkString(",")
+  }
+
+  def checkCommandRestrict(context: DDLCheckContext): Unit = {
+    val restrict = context.getRestrict
+    val commandType = context.getCommandType
+    if (StringUtils.isBlank(restrict)) {
+      return
+    }
+    if (restrict.equalsIgnoreCase(DDLConstant.LOGICAL_VIEW) && !(commandType.equalsIgnoreCase(DDLConstant.CREATE_LOGICAL_VIEW)
+      || commandType.equalsIgnoreCase(DDLConstant.DROP_LOGICAL_VIEW))) {
+      throwException(
+        MsgPicker.getMsg.getDDLRestrictError("`create logical view`, `drop logical view`"))
+    } else if (restrict.equalsIgnoreCase(DDLConstant.REPLACE_LOGICAL_VIEW) && !restrict.equalsIgnoreCase(commandType)) {
+      throwException(
+        MsgPicker.getMsg.getDDLRestrictError("`replace logical view`"))
+    } else if (restrict.equalsIgnoreCase(DDLConstant.HIVE_VIEW) && !restrict.equalsIgnoreCase(commandType)) {
+      throwException(
+        MsgPicker.getMsg.getDDLRestrictError("`create view`,`alter view`,`drop view`,`show create table`"))
+    } else if (!(restrict.equalsIgnoreCase(DDLConstant.HIVE_VIEW) || restrict.equalsIgnoreCase(DDLConstant.LOGICAL_VIEW) ||
+      restrict.equalsIgnoreCase(DDLConstant.REPLACE_LOGICAL_VIEW))) {
+      throwException(s"illegal restrict: ${restrict}")
+    }
+  }
+
+  private def stripRootCommandResult(executedPlan: SparkPlan) = executedPlan match {
+    case CommandResultExec(_, plan, _) => plan
+    case other => other
+  }
+}
diff --git a/src/datasource-service/src/test/java/org/apache/kylin/rest/service/SparkDDLTest.java b/src/datasource-service/src/test/java/org/apache/kylin/rest/service/SparkDDLTest.java
index d3a1ceba42..8e00df161f 100644
--- a/src/datasource-service/src/test/java/org/apache/kylin/rest/service/SparkDDLTest.java
+++ b/src/datasource-service/src/test/java/org/apache/kylin/rest/service/SparkDDLTest.java
@@ -17,17 +17,34 @@
  */
 package org.apache.kylin.rest.service;
 
+import static org.apache.spark.ddl.DDLConstant.HIVE_VIEW;
+import static org.apache.spark.ddl.DDLConstant.LOGICAL_VIEW;
+import static org.apache.spark.ddl.DDLConstant.REPLACE_LOGICAL_VIEW;
+import static org.apache.spark.sql.LogicalViewLoader.LOADED_LOGICAL_VIEWS;
+import static org.awaitility.Awaitility.await;
+
 import java.util.List;
+import java.util.concurrent.TimeUnit;
+import java.util.stream.Collectors;
 
+import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.msg.MsgPicker;
+import org.apache.kylin.common.util.JsonUtil;
 import org.apache.kylin.common.util.NLocalFileMetadataTestCase;
-import org.apache.kylin.engine.spark.source.NSparkMetadataExplorer;
+import org.apache.kylin.common.util.Pair;
 import org.apache.kylin.metadata.model.NTableMetadataManager;
+import org.apache.kylin.metadata.model.TableDesc;
+import org.apache.kylin.metadata.model.TableExtDesc;
+import org.apache.kylin.metadata.view.LogicalView;
+import org.apache.kylin.metadata.view.LogicalViewManager;
 import org.apache.kylin.rest.constant.Constant;
+import org.apache.kylin.rest.request.ViewRequest;
+import org.apache.kylin.rest.response.LoadTableResponse;
+
+import org.apache.spark.sql.LogicalViewLoader;
 import org.apache.spark.sql.SparderEnv;
 import org.apache.spark.sql.SparkSession;
 import org.apache.spark.sql.common.SparkDDLTestUtils;
-import org.apache.spark.sql.internal.SQLConf;
 
 import org.junit.After;
 import org.junit.AfterClass;
@@ -41,28 +58,61 @@ import org.springframework.security.core.Authentication;
 import org.springframework.security.core.context.SecurityContextHolder;
 import org.springframework.test.util.ReflectionTestUtils;
 
+import com.google.common.collect.Lists;
+
 public class SparkDDLTest extends NLocalFileMetadataTestCase {
   @Autowired
   private final SparkDDLService ddlService = Mockito.spy(new SparkDDLService());
   @Autowired
+  private final TableExtService tableExtService = Mockito.spy(new TableExtService());
+  @Autowired
+  private final TableService tableService = Mockito.spy(new TableService());
+  @Autowired
   private final IUserGroupService userGroupService = Mockito.spy(NUserGroupService.class);
+  private final Integer LOGICAL_VIEW_CATCHUP_INTERVAL = 3;
 
+  // Hive View
   private static final String CREATEVIEW_SQL1 =
       "CREATE VIEW `ssb`.`ke_order_view` as select LO_ORDERKEY, C_NAME from SSB.p_lineorder t1 left join "
           + "SSB. CUSTOMER t2 on t1. LO_CUSTKEY = t2. C_CUSTKEY";
   private static final String CREATEVIEW_SQL2 = "CREATE VIEW `ssb`.`order_view2` as select * from SSB.P_LINEORDER";
   private static final String CREATEVIEW_SQL3 = "CREATE VIEW `ssb`.`order_view2` as abc";
-  private static final String CREATEVIEW_SQL4 = "CREATE VIEW `ssb`.`order_view2` as select * from SSB.unload_table";
+  private static final String CREATEVIEW_SQL4 = "CREATE VIEW `ssb`.`ke_order_view2` as select * from SSB.unload_table";
   private static final String CREATEVIEW_SQL5 = "CREATE VIEW `ke_order_view2` as select * from SSB.P_LINEORDER";
   private static final String CREATEVIEW_SQL6 = "abc";
+  private static final String CREATEVIEW_SQL7 = "CREATE VIEW `ssb`.`ke_order_view3` as select * from SSB.P_LINEORDER";
   private static final String ALTERVIEW_SQL =
       "alter view `ssb`.`ke_order_view` as select lo_orderkey from SSB.P_LINEORDER";
   private static final String DROPVIEW_SQL1 = "drop view `ssb`.`ke_order_view`";
   private static final String DROPVIEW_SQL2 = "drop table `ssb`.`ke_table1`";
   private static final String DROPVIEW_SQL3 = "drop table `ssb`.`ke_order_view`";
   private static final String DROPVIEW_SQL4 = "drop table `ke_table2`";
+
   private static final String SHOWVIEW_SQL = "show create table ssb.ke_order_view";
 
+  // Logical View
+  private static final String CREATE_LOGICAL_VIEW_SQL1 = "CREATE LOGICAL VIEW  "
+      + "logical_view_table1  AS select * from SSB.P_LINEORDER";
+  private static final String CREATE_LOGICAL_VIEW_SQL2 = "CREATE LOGICAL VIEW  "
+      + "logical_view_table2  AS select * from SSB.P_LINEORDER";
+  private static final String CREATE_LOGICAL_VIEW_SQL3 = "CREATE LOGICAL VIEW  "
+      + "logical_view_table3  AS select * from SSB.P_LINEORDER";
+  private static final String CREATE_LOGICAL_VIEW_SQL4 = "CREATE LOGICAL VIEW  "
+      + "logical_view_table4  AS select * from SSB.P_LINEORDER";
+  private static final String CREATE_LOGICAL_VIEW_SQL5 = "CREATE LOGICAL VIEW  "
+      + "logical_view_table5  AS select * from SSB.P_LINEORDER";
+  private static final String REPLACE_LOGICAL_VIEW_SQL1 = "REPLACE LOGICAL VIEW  "
+      + "logical_view_no_exist  AS select * from SSB.Customer";
+  private static final String REPLACE_LOGICAL_VIEW_SQL2 = "REPLACE LOGICAL VIEW  "
+      + "logical_view_table2  AS select * from SSB.Customer";
+  private static final String DROP_LOGICAL_VIEW_SQL1 = "drop LOGICAL VIEW KYLIN_LOGICAL_VIEW.logical_view_table1";
+  private static final String DROP_LOGICAL_VIEW_SQL2 = "drop LOGICAL VIEW KYLIN_LOGICAL_VIEW.logical_view_table3";
+  private static final String SELECT_LOGICAL_VIEW_SQL = "select * from KYLIN_LOGICAL_VIEW.logical_view_table3";
+
+  // DDL Config
+  private static final String DDL_HIVE_CONFIG = "kylin.source.ddl.hive.enabled";
+  private static final String DDL_LOGICAL_VIEW_CONFIG = "kylin.source.ddl.logical-view.enabled";
+
   @AfterClass
   public static void tearDownResource() {
     staticCleanupTestMetadata();
@@ -71,9 +121,12 @@ public class SparkDDLTest extends NLocalFileMetadataTestCase {
   @Before
   public void setup() {
     createTestMetadata();
-    Authentication authentication = new TestingAuthenticationToken("ADMIN", "ADMIN", Constant.ROLE_ADMIN);
+    Authentication authentication = new TestingAuthenticationToken("ADMIN",
+        "ADMIN", Constant.ROLE_ADMIN);
     SecurityContextHolder.getContext().setAuthentication(authentication);
     ReflectionTestUtils.setField(ddlService, "userGroupService", userGroupService);
+    ReflectionTestUtils.setField(tableExtService, "tableService", tableService);
+    getTestConfig().setProperty(DDL_LOGICAL_VIEW_CONFIG, "true");
   }
 
   @After
@@ -84,63 +137,178 @@ public class SparkDDLTest extends NLocalFileMetadataTestCase {
   @Test
   public void testDDL() throws Exception {
     try {
-      assertKylinExeption(
-          () ->
-              ddlService.executeDDLSql("ssb", CREATEVIEW_SQL5),
-          "DDL function has not been turned on.");
-
-      getTestConfig().setProperty("kylin.source.ddl.enabled", "true");
-      NTableMetadataManager tableManager = NTableMetadataManager.getInstance(getTestConfig(), "ssb");
       SparkDDLTestUtils.prepare();
-      ddlService.executeDDLSql("ssb", CREATEVIEW_SQL5);
-      assertKylinExeption(
-          () ->
-              ddlService.executeDDLSql("ssb", CREATEVIEW_SQL2),
-          MsgPicker.getMsg().getDDLViewNameError());
-      assertKylinExeption(() ->
-          ddlService.executeDDLSql("ssb", CREATEVIEW_SQL3), "");
-      assertKylinExeption(() ->
-          ddlService.executeDDLSql("ssb", CREATEVIEW_SQL6), "");
-      assertKylinExeption(
-          () ->
-              ddlService.executeDDLSql("ssb", CREATEVIEW_SQL4),
-          MsgPicker.getMsg().getDDLTableNotLoad("SSB.unload_table"));
       assertKylinExeption(
           () ->
-              ddlService.executeDDLSql("ssb", DROPVIEW_SQL2),
-          MsgPicker.getMsg().getDDLDropError());
+              ddlService.executeSQL(new ViewRequest("ssb", CREATEVIEW_SQL1, HIVE_VIEW)),
+          "Hive operation is not supported, please turn on config.");
+      getTestConfig().setProperty(DDL_HIVE_CONFIG, "true");
+
+      getTestConfig().setProperty(DDL_LOGICAL_VIEW_CONFIG, "false");
       assertKylinExeption(
           () ->
-              ddlService.executeDDLSql("ssb", DROPVIEW_SQL3), "");
+              ddlService.executeSQL(new ViewRequest("ssb", CREATE_LOGICAL_VIEW_SQL1, LOGICAL_VIEW)),
+          "Logical View operation is not supported, please turn on config.");
+      getTestConfig().setProperty(DDL_LOGICAL_VIEW_CONFIG, "true");
+      getTestConfig().setProperty(
+          "kylin.source.ddl.logical-view-catchup-interval", LOGICAL_VIEW_CATCHUP_INTERVAL.toString());
 
-      ddlService.executeDDLSql("ssb", CREATEVIEW_SQL1);
-      ddlService.executeDDLSql("ssb", ALTERVIEW_SQL);
-      String createViewSQL = ddlService.executeDDLSql("ssb", SHOWVIEW_SQL);
-      Assert.assertTrue(createViewSQL.contains("ke_order_view"));
-      ddlService.executeDDLSql("ssb", DROPVIEW_SQL1);
+      testHiveDDL();
 
+      testLogicalView();
+
+      // User authentication
       Authentication authentication = new TestingAuthenticationToken("USER1",
           "", Constant.GROUP_ALL_USERS);
       SecurityContextHolder.getContext().setAuthentication(authentication);
       assertKylinExeption(
           () ->
-              ddlService.executeDDLSql("ssb", CREATEVIEW_SQL1),
-          MsgPicker.getMsg().getDDLPermissionDenied());
-
-      // ddl description
-      List<List<String>> description = ddlService.pluginsDescription("ssb");
-      Assert.assertTrue(description.size() > 0);
-
-
-      // read/write cluster
-      SparderEnv.getSparkSession().sessionState().conf()
-          .setConf(SQLConf.HIVE_SPECIFIC_FS_LOCATION(), "hdfs://read");
-      new NSparkMetadataExplorer().checkDatabaseHadoopAccessFast("SSB");
+              ddlService.executeSQL(new ViewRequest("ssb", CREATEVIEW_SQL1)),
+          "");
     } finally {
+      LogicalViewLoader.stopScheduler();
+      LOADED_LOGICAL_VIEWS.clear();
       SparkSession spark = SparderEnv.getSparkSession();
       if (spark != null && !spark.sparkContext().isStopped()) {
         spark.stop();
       }
     }
   }
+
+  private void testHiveDDL() throws Exception {
+    // Hive View DDL
+    ddlService.executeSQL(new ViewRequest("ssb", CREATEVIEW_SQL5, HIVE_VIEW));
+    assertKylinExeption(
+        () ->
+            ddlService.executeSQL(new ViewRequest("ssb", CREATEVIEW_SQL2, HIVE_VIEW)),
+        MsgPicker.getMsg().getDDLViewNameError());
+    assertKylinExeption(() ->
+        ddlService.executeSQL(new ViewRequest("ssb", CREATEVIEW_SQL3)), "");
+    assertKylinExeption(() ->
+        ddlService.executeSQL(new ViewRequest("ssb", CREATEVIEW_SQL6)), "");
+    assertKylinExeption(
+        () ->
+            ddlService.executeSQL(new ViewRequest("ssb", CREATEVIEW_SQL4)),
+        MsgPicker.getMsg().getDDLTableNotLoad("SSB.unload_table"));
+    assertKylinExeption(
+        () ->
+            ddlService.executeSQL(new ViewRequest("ssb", DROPVIEW_SQL2)),
+        MsgPicker.getMsg().getDDLDropError());
+    assertKylinExeption(
+        () ->
+            ddlService.executeSQL(new ViewRequest("ssb", DROPVIEW_SQL3)), "");
+    ddlService.executeSQL(new ViewRequest("ssb", CREATEVIEW_SQL1, HIVE_VIEW));
+    ddlService.executeSQL(new ViewRequest("ssb", ALTERVIEW_SQL, HIVE_VIEW));
+    String createViewSQL = ddlService.executeSQL(new ViewRequest("ssb", SHOWVIEW_SQL, HIVE_VIEW));
+    Assert.assertTrue(createViewSQL.contains("ke_order_view"));
+    ddlService.executeSQL(new ViewRequest("ssb", DROPVIEW_SQL1, HIVE_VIEW));
+  }
+
+  private void testLogicalView() throws Exception {
+    SparkSession spark = SparderEnv.getSparkSession();
+    // Logical View DDL
+    assertRuntimeExeption(() -> spark.sql(SELECT_LOGICAL_VIEW_SQL), "");
+    ddlService.executeSQL(new ViewRequest("ssb", CREATE_LOGICAL_VIEW_SQL1, LOGICAL_VIEW));
+    ddlService.executeSQL(new ViewRequest("ssb", CREATE_LOGICAL_VIEW_SQL2, LOGICAL_VIEW));
+    ddlService.executeSQL(new ViewRequest("demo", CREATE_LOGICAL_VIEW_SQL4, LOGICAL_VIEW));
+    ddlService.executeSQL(new ViewRequest("demo", CREATE_LOGICAL_VIEW_SQL5, LOGICAL_VIEW));
+    assertKylinExeption(
+        () ->
+            ddlService.executeSQL(new ViewRequest("ssb", CREATE_LOGICAL_VIEW_SQL2, LOGICAL_VIEW)),
+        MsgPicker.getMsg().getDDLViewNameDuplicateError());
+    ddlService.executeSQL(new ViewRequest("ssb", DROP_LOGICAL_VIEW_SQL1, LOGICAL_VIEW));
+    NTableMetadataManager tableMgr = NTableMetadataManager.getInstance(getTestConfig(), "ssb");
+    TableDesc tableDesc = tableMgr.getTableDesc("SSB.P_LINEORDER");
+    String s = JsonUtil.writeValueAsIndentString(tableDesc);
+    TableDesc newTable = JsonUtil.readValue(s, TableDesc.class);
+    newTable.setName("KYLIN_LOGICAL_VIEW.logical_view_table3");
+    newTable.setMvcc(-1);
+    tableMgr.saveSourceTable(newTable);
+    assertKylinExeption(
+        () ->
+            ddlService.executeSQL(new ViewRequest("ssb", DROP_LOGICAL_VIEW_SQL2, LOGICAL_VIEW)), "");
+    assertKylinExeption(
+        () ->
+            ddlService.executeSQL(new ViewRequest("ssb", REPLACE_LOGICAL_VIEW_SQL1)),
+        "View name is not found.");
+    ddlService.executeSQL(new ViewRequest("ssb", REPLACE_LOGICAL_VIEW_SQL2));
+    assertKylinExeption(
+        () ->
+            ddlService.executeSQL(new ViewRequest("demo", REPLACE_LOGICAL_VIEW_SQL2)),
+        "View can only modified in Project");
+
+    // Request Restrict
+    assertKylinExeption(
+        () ->
+            ddlService.executeSQL(new ViewRequest("ssb", CREATE_LOGICAL_VIEW_SQL3, HIVE_VIEW)),
+        "Only support");
+    assertKylinExeption(
+        () ->
+            ddlService.executeSQL(new ViewRequest("ssb", CREATEVIEW_SQL7, LOGICAL_VIEW)),
+        "Only support");
+    assertKylinExeption(
+        () ->
+            ddlService.executeSQL(new ViewRequest("ssb", CREATE_LOGICAL_VIEW_SQL3, REPLACE_LOGICAL_VIEW)),
+        "Only support");
+
+    // Logical View Loader
+    LogicalViewLoader.initScheduler();
+    LogicalViewManager manager = LogicalViewManager.getInstance(KylinConfig.getInstanceFromEnv());
+    LogicalView logicalView = new LogicalView("logical_view_table3", CREATE_LOGICAL_VIEW_SQL3,
+        "ADMIN", "SSB");
+    manager.update(logicalView);
+    await().atMost(LOGICAL_VIEW_CATCHUP_INTERVAL * 10, TimeUnit.SECONDS).until(() -> {
+      try {
+        if (!LOADED_LOGICAL_VIEWS.containsKey("LOGICAL_VIEW_TABLE5")) {
+          return false;
+        }
+        spark.sql(SELECT_LOGICAL_VIEW_SQL);
+      } catch (Exception e) {
+        return false;
+      }
+      return true;
+    });
+    manager.delete("logical_view_table5");
+    await().atMost(LOGICAL_VIEW_CATCHUP_INTERVAL * 5, TimeUnit.SECONDS).until(() -> {
+      if (LOADED_LOGICAL_VIEWS.containsKey("LOGICAL_VIEW_TABLE5")) {
+        return false;
+      }
+      return true;
+    });
+    // DDL description
+    List<List<String>> description = ddlService.pluginsDescription("ssb", "hive");
+    Assert.assertEquals(4, description.get(0).size());
+
+    description = ddlService.pluginsDescription("ssb", "logic");
+    Assert.assertEquals(3, description.get(0).size());
+
+    // view list in project
+    List<LogicalView> logicalViewsInProject = ddlService.listAll("ssb", "");
+    List<LogicalView> logicalViewsInProject2 = ddlService.listAll("ssb", "table2");
+    Assert.assertEquals(3, logicalViewsInProject.size());
+    Assert.assertEquals(1, logicalViewsInProject2.size());
+    LogicalView confidentialTable =
+        logicalViewsInProject.stream().filter(table -> table.getCreatedProject().equals("demo")).collect(
+            Collectors.toList()).get(0);
+    LogicalView noConfidentialTable =
+        logicalViewsInProject.stream().filter(table -> table.getCreatedProject().equals("ssb")).collect(
+            Collectors.toList()).get(0);
+    Assert.assertEquals("***", confidentialTable.getCreatedSql());
+    Assert.assertNotEquals("***", noConfidentialTable.getCreatedSql());
+
+    // load table list
+    String[] failedLoadTables = {"KYLIN_LOGICAL_VIEW.logical_view_table2",
+                                 "KYLIN_LOGICAL_VIEW.logical_view_table3",
+                                 "KYLIN_LOGICAL_VIEW.logical_view_table4"};
+    String[] successLoadTables = {"KYLIN_LOGICAL_VIEW.logical_view_table2",
+                                  "KYLIN_LOGICAL_VIEW.logical_view_table3"};
+    List<Pair<TableDesc, TableExtDesc>> canLoadTables = Lists.newArrayList();
+    LoadTableResponse tableResponse = new LoadTableResponse();
+    tableExtService.filterAccessTables(successLoadTables, canLoadTables, tableResponse, "ssb");
+    Assert.assertEquals(2, canLoadTables.size());
+    assertKylinExeption(
+        () ->
+            tableExtService.filterAccessTables(failedLoadTables, canLoadTables, tableResponse, "ssb"),
+        "Can't load table");
+  }
 }
diff --git a/src/datasource-service/src/test/scala/org/apache/spark/sql/common/SparkDDLTestUtils.scala b/src/datasource-service/src/test/scala/org/apache/spark/sql/common/SparkDDLTestUtils.scala
index 22af162c2a..ae46511dbb 100644
--- a/src/datasource-service/src/test/scala/org/apache/spark/sql/common/SparkDDLTestUtils.scala
+++ b/src/datasource-service/src/test/scala/org/apache/spark/sql/common/SparkDDLTestUtils.scala
@@ -23,7 +23,7 @@ import org.apache.commons.io.FileUtils
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars
 import org.apache.kylin.common.util.RandomUtil
 
-import org.apache.spark.sql.{SparderEnv, SparkSession}
+import org.apache.spark.sql.{KylinSession, SparderEnv, SparkSession}
 import org.apache.spark.SparkConf
 import org.apache.spark.sql.internal.StaticSQLConf
 import org.apache.spark.util.Utils
@@ -49,6 +49,7 @@ object SparkDDLTestUtils {
     }
     conf.set(ConfVars.SCRATCHDIR.varname, scratchDir.toString)
     conf.set("spark.hadoop.javax.jdo.option.ConnectionURL", "jdbc:derby:memory:db;create=true")
+    KylinSession.initLogicalViewConfig(conf)
     val sparkSession = SparkSession.builder
       .master("local[2]")
       .appName(getClass.getSimpleName)
diff --git a/src/kylin-it/src/test/java/org/apache/kylin/newten/LogicalViewTest.java b/src/kylin-it/src/test/java/org/apache/kylin/newten/LogicalViewTest.java
new file mode 100644
index 0000000000..df98c689ab
--- /dev/null
+++ b/src/kylin-it/src/test/java/org/apache/kylin/newten/LogicalViewTest.java
@@ -0,0 +1,87 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.kylin.newten;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.common.util.Pair;
+import org.apache.kylin.engine.spark.NLocalWithSparkSessionTest;
+import org.apache.kylin.job.engine.JobEngineConfig;
+import org.apache.kylin.job.impl.threadpool.NDefaultScheduler;
+import org.apache.kylin.metadata.cube.model.NDataflow;
+import org.apache.kylin.metadata.cube.model.NDataflowManager;
+import org.apache.kylin.metadata.model.SegmentRange;
+import org.apache.kylin.util.ExecAndComp;
+
+import org.apache.spark.sql.SparderEnv;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+import com.google.common.collect.Sets;
+
+public class LogicalViewTest extends NLocalWithSparkSessionTest {
+
+  private NDataflowManager dfMgr = null;
+
+  @Before
+  public void setup() throws Exception {
+    // kylin.source.ddl.logical-view.enabled=true
+    overwriteSystemProp("kylin.source.ddl.logical-view.enabled", "true");
+    this.createTestMetadata("src/test/resources/ut_meta/logical_view");
+    dfMgr = NDataflowManager.getInstance(getTestConfig(), getProject());
+    NDefaultScheduler scheduler = NDefaultScheduler.getInstance(getProject());
+    scheduler.init(new JobEngineConfig(KylinConfig.getInstanceFromEnv()));
+    if (!scheduler.hasStarted()) {
+      throw new RuntimeException("scheduler has not been started");
+    }
+  }
+
+  @After
+  public void after() throws Exception {
+    NDefaultScheduler.destroyInstance();
+    cleanupTestMetadata();
+  }
+
+  @Override
+  public String getProject() {
+    return "logical_view";
+  }
+
+  @Test
+  public void testLogicalView() throws Exception {
+    String dfID = "451e127a-b684-1474-744b-c9afc14378af";
+    NDataflow dataflow = dfMgr.getDataflow(dfID);
+    populateSSWithCSVData(getTestConfig(), getProject(), SparderEnv.getSparkSession());
+    indexDataConstructor.buildIndex(dfID, SegmentRange.TimePartitionedSegmentRange.createInfinite(),
+        Sets.newHashSet(
+            dataflow.getIndexPlan().getLayoutEntity(20000000001L),
+            dataflow.getIndexPlan().getLayoutEntity(1L)), true);
+
+    List<Pair<String, String>> query = new ArrayList<>();
+    String sql1 = "select t1.C_CUSTKEY from KYLIN_LOGICAL_VIEW.LOGICAL_VIEW_TABLE t1"
+        + " INNER JOIN SSB.CUSTOMER t2 on t1.C_CUSTKEY = t2.C_CUSTKEY ";
+    query.add(Pair.newPair("logical_view", sql1));
+    ExecAndComp.execAndCompare(
+        query, getProject(), ExecAndComp.CompareLevel.NONE, "inner");
+  }
+}
diff --git a/src/kylin-it/src/test/resources/ut_meta/logical_view/metadata/_global/logical_view/LOGICAL_VIEW_TABLE b/src/kylin-it/src/test/resources/ut_meta/logical_view/metadata/_global/logical_view/LOGICAL_VIEW_TABLE
new file mode 100644
index 0000000000..f0815c6eb2
--- /dev/null
+++ b/src/kylin-it/src/test/resources/ut_meta/logical_view/metadata/_global/logical_view/LOGICAL_VIEW_TABLE
@@ -0,0 +1,9 @@
+{
+  "uuid" : "087dc284-caae-c99c-12f4-b7bfb562746d",
+  "last_modified" : 0,
+  "create_time" : 1670222960426,
+  "version" : "4.0.0.0",
+  "table_name" : "LOGICAL_VIEW_TABLE",
+  "created_sql" : "CREATE OR REPLACE GLOBAL TEMPORARY VIEW  logical_view_table  AS select * from CUSTOMER",
+  "created_user" : "ADMIN"
+}
\ No newline at end of file
diff --git a/src/kylin-it/src/test/resources/ut_meta/logical_view/metadata/_global/project/logical_view.json b/src/kylin-it/src/test/resources/ut_meta/logical_view/metadata/_global/project/logical_view.json
new file mode 100644
index 0000000000..8a990718fd
--- /dev/null
+++ b/src/kylin-it/src/test/resources/ut_meta/logical_view/metadata/_global/project/logical_view.json
@@ -0,0 +1,6 @@
+{
+  "uuid": "705bab00-9e78-c6e7-a78b-cb94150b7108",
+  "override_kylin_properties": {
+    "kylin.query.slowquery-detect-interval": "4"
+  }
+}
\ No newline at end of file
diff --git a/src/kylin-it/src/test/resources/ut_meta/logical_view/metadata/logical_view/dataflow/451e127a-b684-1474-744b-c9afc14378af.json b/src/kylin-it/src/test/resources/ut_meta/logical_view/metadata/logical_view/dataflow/451e127a-b684-1474-744b-c9afc14378af.json
new file mode 100644
index 0000000000..173c8f2f7f
--- /dev/null
+++ b/src/kylin-it/src/test/resources/ut_meta/logical_view/metadata/logical_view/dataflow/451e127a-b684-1474-744b-c9afc14378af.json
@@ -0,0 +1,18 @@
+{
+  "uuid" : "451e127a-b684-1474-744b-c9afc14378af",
+  "last_modified" : 1670312998313,
+  "create_time" : 1670235129348,
+  "version" : "4.0.0.0",
+  "status" : "OFFLINE",
+  "last_status" : null,
+  "cost" : 50,
+  "query_hit_count" : 3,
+  "last_query_time" : 1670294540150,
+  "layout_query_hit_count" : {
+    "20000000001" : {
+      "1670169600000" : 1,
+      "1670256000000" : 2
+    }
+  },
+  "segments" : []
+}
\ No newline at end of file
diff --git a/src/kylin-it/src/test/resources/ut_meta/logical_view/metadata/logical_view/index_plan/451e127a-b684-1474-744b-c9afc14378af.json b/src/kylin-it/src/test/resources/ut_meta/logical_view/metadata/logical_view/index_plan/451e127a-b684-1474-744b-c9afc14378af.json
new file mode 100644
index 0000000000..af1645dfb8
--- /dev/null
+++ b/src/kylin-it/src/test/resources/ut_meta/logical_view/metadata/logical_view/index_plan/451e127a-b684-1474-744b-c9afc14378af.json
@@ -0,0 +1,63 @@
+{
+  "uuid" : "451e127a-b684-1474-744b-c9afc14378af",
+  "last_modified" : 1670235329564,
+  "create_time" : 1670235129269,
+  "version" : "4.0.0.0",
+  "description" : null,
+  "rule_based_index" : null,
+  "indexes" : [ {
+    "id" : 0,
+    "dimensions" : [ 1, 3, 6 ],
+    "measures" : [ 100000 ],
+    "layouts" : [ {
+      "id" : 1,
+      "name" : null,
+      "owner" : null,
+      "col_order" : [ 1, 3, 6, 100000 ],
+      "shard_by_columns" : [ ],
+      "partition_by_columns" : [ ],
+      "sort_by_columns" : [ ],
+      "storage_type" : 20,
+      "update_time" : 1670235129280,
+      "manual" : false,
+      "auto" : false,
+      "base" : true,
+      "draft_version" : null,
+      "index_range" : null
+    } ],
+    "next_layout_offset" : 2
+  }, {
+    "id" : 20000000000,
+    "dimensions" : [ 1, 3, 6 ],
+    "measures" : [ ],
+    "layouts" : [ {
+      "id" : 20000000001,
+      "name" : null,
+      "owner" : null,
+      "col_order" : [ 1, 3, 6 ],
+      "shard_by_columns" : [ ],
+      "partition_by_columns" : [ ],
+      "sort_by_columns" : [ ],
+      "storage_type" : 20,
+      "update_time" : 1670235129281,
+      "manual" : false,
+      "auto" : false,
+      "base" : true,
+      "draft_version" : null,
+      "index_range" : null
+    } ],
+    "next_layout_offset" : 2
+  } ],
+  "override_properties" : { },
+  "to_be_deleted_indexes" : [ ],
+  "auto_merge_time_ranges" : null,
+  "retention_range" : 0,
+  "engine_type" : 80,
+  "next_aggregation_index_id" : 10000,
+  "next_table_index_id" : 20000010000,
+  "agg_shard_by_columns" : [ ],
+  "extend_partition_columns" : [ ],
+  "layout_bucket_num" : { },
+  "approved_additional_recs" : 0,
+  "approved_removal_recs" : 0
+}
\ No newline at end of file
diff --git a/src/kylin-it/src/test/resources/ut_meta/logical_view/metadata/logical_view/model_desc/451e127a-b684-1474-744b-c9afc14378af.json b/src/kylin-it/src/test/resources/ut_meta/logical_view/metadata/logical_view/model_desc/451e127a-b684-1474-744b-c9afc14378af.json
new file mode 100644
index 0000000000..ff12760c35
--- /dev/null
+++ b/src/kylin-it/src/test/resources/ut_meta/logical_view/metadata/logical_view/model_desc/451e127a-b684-1474-744b-c9afc14378af.json
@@ -0,0 +1,149 @@
+{
+  "uuid" : "451e127a-b684-1474-744b-c9afc14378af",
+  "last_modified" : 1670312998311,
+  "create_time" : 1670235128486,
+  "version" : "4.0.0.0",
+  "alias" : "logical_view",
+  "owner" : "ADMIN",
+  "config_last_modifier" : null,
+  "config_last_modified" : 0,
+  "description" : "",
+  "fact_table" : "KYLIN_LOGICAL_VIEW.LOGICAL_VIEW_TABLE",
+  "fact_table_alias" : null,
+  "management_type" : "MODEL_BASED",
+  "join_tables" : [ {
+    "table" : "SSB.CUSTOMER",
+    "kind" : "LOOKUP",
+    "alias" : "CUSTOMER",
+    "join" : {
+      "type" : "INNER",
+      "primary_key" : [ "CUSTOMER.C_CUSTKEY" ],
+      "foreign_key" : [ "LOGICAL_VIEW_TABLE.C_CUSTKEY" ],
+      "non_equi_join_condition" : null,
+      "primary_table" : null,
+      "foreign_table" : null
+    },
+    "flattenable" : "flatten",
+    "join_relation_type" : "MANY_TO_ONE"
+  } ],
+  "filter_condition" : "",
+  "partition_desc" : null,
+  "capacity" : "MEDIUM",
+  "segment_config" : {
+    "auto_merge_enabled" : null,
+    "auto_merge_time_ranges" : null,
+    "volatile_range" : null,
+    "retention_range" : null,
+    "create_empty_segment_enabled" : false
+  },
+  "data_check_desc" : null,
+  "semantic_version" : 1,
+  "storage_type" : 0,
+  "model_type" : "BATCH",
+  "all_named_columns" : [ {
+    "id" : 0,
+    "name" : "C_REGION_LOGICAL_VIEW_TABLE",
+    "column" : "LOGICAL_VIEW_TABLE.C_REGION"
+  }, {
+    "id" : 1,
+    "name" : "C_NAME",
+    "column" : "LOGICAL_VIEW_TABLE.C_NAME",
+    "status" : "DIMENSION"
+  }, {
+    "id" : 2,
+    "name" : "C_NATION_LOGICAL_VIEW_TABLE",
+    "column" : "LOGICAL_VIEW_TABLE.C_NATION"
+  }, {
+    "id" : 3,
+    "name" : "C_CUSTKEY",
+    "column" : "LOGICAL_VIEW_TABLE.C_CUSTKEY",
+    "status" : "DIMENSION"
+  }, {
+    "id" : 4,
+    "name" : "C_CITY_LOGICAL_VIEW_TABLE",
+    "column" : "LOGICAL_VIEW_TABLE.C_CITY"
+  }, {
+    "id" : 5,
+    "name" : "C_MKTSEGMENT_LOGICAL_VIEW_TABLE",
+    "column" : "LOGICAL_VIEW_TABLE.C_MKTSEGMENT"
+  }, {
+    "id" : 6,
+    "name" : "C_ADDRESS",
+    "column" : "LOGICAL_VIEW_TABLE.C_ADDRESS",
+    "status" : "DIMENSION"
+  }, {
+    "id" : 7,
+    "name" : "C_PHONE_LOGICAL_VIEW_TABLE",
+    "column" : "LOGICAL_VIEW_TABLE.C_PHONE"
+  }, {
+    "id" : 8,
+    "name" : "C_ADDRESS",
+    "column" : "CUSTOMER.C_ADDRESS"
+  }, {
+    "id" : 9,
+    "name" : "C_NATION_CUSTOMER",
+    "column" : "CUSTOMER.C_NATION"
+  }, {
+    "id" : 10,
+    "name" : "C_CUSTKEY",
+    "column" : "CUSTOMER.C_CUSTKEY"
+  }, {
+    "id" : 11,
+    "name" : "C_MKTSEGMENT_CUSTOMER",
+    "column" : "CUSTOMER.C_MKTSEGMENT"
+  }, {
+    "id" : 12,
+    "name" : "C_NAME",
+    "column" : "CUSTOMER.C_NAME"
+  }, {
+    "id" : 13,
+    "name" : "C_CITY_CUSTOMER",
+    "column" : "CUSTOMER.C_CITY"
+  }, {
+    "id" : 14,
+    "name" : "C_REGION_CUSTOMER",
+    "column" : "CUSTOMER.C_REGION"
+  }, {
+    "id" : 15,
+    "name" : "C_PHONE_CUSTOMER",
+    "column" : "CUSTOMER.C_PHONE"
+  } ],
+  "all_measures" : [ {
+    "name" : "COUNT_ALL",
+    "function" : {
+      "expression" : "COUNT",
+      "parameters" : [ {
+        "type" : "constant",
+        "value" : "1"
+      } ],
+      "returntype" : "bigint"
+    },
+    "column" : null,
+    "comment" : null,
+    "id" : 100000,
+    "type" : "NORMAL",
+    "internal_ids" : [ ]
+  } ],
+  "recommendations_count" : 0,
+  "computed_columns" : [ ],
+  "canvas" : {
+    "coordinate" : {
+      "LOGICAL_VIEW_TABLE" : {
+        "x" : 472.88889567057294,
+        "y" : 203.50000169542102,
+        "width" : 220.0,
+        "height" : 200.0
+      },
+      "CUSTOMER" : {
+        "x" : 871.7777845594618,
+        "y" : 222.1111043294271,
+        "width" : 220.0,
+        "height" : 200.0
+      }
+    },
+    "zoom" : 9.0
+  },
+  "multi_partition_desc" : null,
+  "multi_partition_key_mapping" : null,
+  "fusion_id" : null
+}
\ No newline at end of file
diff --git a/src/kylin-it/src/test/resources/ut_meta/logical_view/metadata/logical_view/table/KYLIN_LOGICAL_VIEW.LOGICAL_VIEW_TABLE.json b/src/kylin-it/src/test/resources/ut_meta/logical_view/metadata/logical_view/table/KYLIN_LOGICAL_VIEW.LOGICAL_VIEW_TABLE.json
new file mode 100644
index 0000000000..22cf5c8cb2
--- /dev/null
+++ b/src/kylin-it/src/test/resources/ut_meta/logical_view/metadata/logical_view/table/KYLIN_LOGICAL_VIEW.LOGICAL_VIEW_TABLE.json
@@ -0,0 +1,68 @@
+{
+  "uuid" : "b8eb24fb-e917-b575-279b-5c5e2d9192a2",
+  "last_modified" : 0,
+  "create_time" : 1670235049605,
+  "version" : "4.0.0.0",
+  "name" : "LOGICAL_VIEW_TABLE",
+  "columns" : [ {
+    "id" : "1",
+    "name" : "C_CUSTKEY",
+    "datatype" : "integer",
+    "case_sensitive_name" : "c_custkey"
+  }, {
+    "id" : "2",
+    "name" : "C_NAME",
+    "datatype" : "varchar(4096)",
+    "case_sensitive_name" : "c_name"
+  }, {
+    "id" : "3",
+    "name" : "C_ADDRESS",
+    "datatype" : "varchar(4096)",
+    "case_sensitive_name" : "c_address"
+  }, {
+    "id" : "4",
+    "name" : "C_CITY",
+    "datatype" : "varchar(4096)",
+    "case_sensitive_name" : "c_city"
+  }, {
+    "id" : "5",
+    "name" : "C_NATION",
+    "datatype" : "varchar(4096)",
+    "case_sensitive_name" : "c_nation"
+  }, {
+    "id" : "6",
+    "name" : "C_REGION",
+    "datatype" : "varchar(4096)",
+    "case_sensitive_name" : "c_region"
+  }, {
+    "id" : "7",
+    "name" : "C_PHONE",
+    "datatype" : "varchar(4096)",
+    "case_sensitive_name" : "c_phone"
+  }, {
+    "id" : "8",
+    "name" : "C_MKTSEGMENT",
+    "datatype" : "varchar(4096)",
+    "case_sensitive_name" : "c_mktsegment"
+  } ],
+  "source_type" : 9,
+  "table_type" : "VIEW",
+  "top" : false,
+  "increment_loading" : false,
+  "last_snapshot_path" : null,
+  "last_snapshot_size" : 0,
+  "snapshot_last_modified" : 0,
+  "query_hit_count" : 0,
+  "partition_column" : null,
+  "snapshot_partitions" : { },
+  "snapshot_partitions_info" : { },
+  "snapshot_total_rows" : 0,
+  "snapshot_partition_col" : null,
+  "selected_snapshot_partition_col" : null,
+  "temp_snapshot_path" : null,
+  "snapshot_has_broken" : false,
+  "database" : "KYLIN_LOGICAL_VIEW",
+  "transactional" : false,
+  "rangePartition" : false,
+  "partition_desc" : null
+}
\ No newline at end of file
diff --git a/src/kylin-it/src/test/resources/ut_meta/logical_view/metadata/logical_view/table/SSB.CUSTOMER.json b/src/kylin-it/src/test/resources/ut_meta/logical_view/metadata/logical_view/table/SSB.CUSTOMER.json
new file mode 100644
index 0000000000..f91ad24976
--- /dev/null
+++ b/src/kylin-it/src/test/resources/ut_meta/logical_view/metadata/logical_view/table/SSB.CUSTOMER.json
@@ -0,0 +1,68 @@
+{
+  "uuid" : "8d62cc4a-a60f-8c43-1d12-5c16ff91a5b0",
+  "last_modified" : 0,
+  "create_time" : 1670222704146,
+  "version" : "4.0.0.0",
+  "name" : "CUSTOMER",
+  "columns" : [ {
+    "id" : "1",
+    "name" : "C_CUSTKEY",
+    "datatype" : "integer",
+    "case_sensitive_name" : "c_custkey"
+  }, {
+    "id" : "2",
+    "name" : "C_NAME",
+    "datatype" : "varchar(4096)",
+    "case_sensitive_name" : "c_name"
+  }, {
+    "id" : "3",
+    "name" : "C_ADDRESS",
+    "datatype" : "varchar(4096)",
+    "case_sensitive_name" : "c_address"
+  }, {
+    "id" : "4",
+    "name" : "C_CITY",
+    "datatype" : "varchar(4096)",
+    "case_sensitive_name" : "c_city"
+  }, {
+    "id" : "5",
+    "name" : "C_NATION",
+    "datatype" : "varchar(4096)",
+    "case_sensitive_name" : "c_nation"
+  }, {
+    "id" : "6",
+    "name" : "C_REGION",
+    "datatype" : "varchar(4096)",
+    "case_sensitive_name" : "c_region"
+  }, {
+    "id" : "7",
+    "name" : "C_PHONE",
+    "datatype" : "varchar(4096)",
+    "case_sensitive_name" : "c_phone"
+  }, {
+    "id" : "8",
+    "name" : "C_MKTSEGMENT",
+    "datatype" : "varchar(4096)",
+    "case_sensitive_name" : "c_mktsegment"
+  } ],
+  "source_type" : 9,
+  "table_type" : "EXTERNAL",
+  "top" : false,
+  "increment_loading" : false,
+  "last_snapshot_path" : null,
+  "last_snapshot_size" : 0,
+  "snapshot_last_modified" : 0,
+  "query_hit_count" : 0,
+  "partition_column" : null,
+  "snapshot_partitions" : { },
+  "snapshot_partitions_info" : { },
+  "snapshot_total_rows" : 0,
+  "snapshot_partition_col" : null,
+  "selected_snapshot_partition_col" : null,
+  "temp_snapshot_path" : null,
+  "snapshot_has_broken" : false,
+  "database" : "SSB",
+  "transactional" : false,
+  "rangePartition" : false,
+  "partition_desc" : null
+}
\ No newline at end of file
diff --git a/src/metadata-server/src/main/java/org/apache/kylin/rest/controller/SparkDDLController.java b/src/metadata-server/src/main/java/org/apache/kylin/rest/controller/SparkDDLController.java
index 38c2e9d003..aa4ebbd9a6 100644
--- a/src/metadata-server/src/main/java/org/apache/kylin/rest/controller/SparkDDLController.java
+++ b/src/metadata-server/src/main/java/org/apache/kylin/rest/controller/SparkDDLController.java
@@ -23,9 +23,13 @@ import static org.apache.kylin.common.constant.HttpConstant.HTTP_VND_APACHE_KYLI
 import java.util.List;
 
 import org.apache.kylin.common.exception.KylinException;
-import org.apache.kylin.rest.request.ViewDDLRequest;
+import org.apache.kylin.metadata.view.LogicalView;
+import org.apache.kylin.rest.request.ViewRequest;
 import org.apache.kylin.rest.response.EnvelopeResponse;
 import org.apache.kylin.rest.service.SparkDDLService;
+
+import org.apache.spark.sql.LogicalViewLoader;
+
 import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.web.bind.annotation.GetMapping;
 import org.springframework.web.bind.annotation.PostMapping;
@@ -50,19 +54,40 @@ public class SparkDDLController extends NBasicController {
   @ApiOperation(value = "ddl")
   @PostMapping(value = "/ddl")
   @ResponseBody
-  public EnvelopeResponse<String> executeSQL(@RequestBody ViewDDLRequest request)
-      throws Exception {
-    checkProjectName(request.getProject());
-    String result = sparkDDLService.executeDDLSql(request.getProject(), request.getSql());
+  public EnvelopeResponse<String> executeSQL(@RequestBody ViewRequest request) {
+    String project = checkProjectName(request.getDdlProject());
+    request.setDdlProject(project);
+    String result = sparkDDLService.executeSQL(request);
     return new EnvelopeResponse<>(KylinException.CODE_SUCCESS, result, "");
   }
 
   @ApiOperation(value = "ddl_description")
   @GetMapping(value = "/ddl/description")
   @ResponseBody
-  public EnvelopeResponse<List<List<String>>> description(@RequestParam("project") String project) {
-    checkProjectName(project);
+  public EnvelopeResponse<List<List<String>>> description(
+      @RequestParam("project") String project,
+      @RequestParam("page_type") String pageType) {
+    project = checkProjectName(project);
     return new EnvelopeResponse<>(KylinException.CODE_SUCCESS,
-        sparkDDLService.pluginsDescription(project), "");
+        sparkDDLService.pluginsDescription(project, pageType), "");
+  }
+
+  @ApiOperation(value = "ddl_sync")
+  @GetMapping(value = "/ddl/sync")
+  @ResponseBody
+  public EnvelopeResponse<String> sync() {
+    LogicalViewLoader.syncViewFromDB();
+    return new EnvelopeResponse<>(KylinException.CODE_SUCCESS, "", "");
+  }
+
+  @ApiOperation(value = "ddl_desc")
+  @GetMapping(value = "/ddl/view_list")
+  @ResponseBody
+  public EnvelopeResponse<List<LogicalView>> list(
+      @RequestParam("project") String project,
+      @RequestParam(value = "table", required = false, defaultValue = "") String tableName) {
+    project = checkProjectName(project);
+    List<LogicalView> logicalViews = sparkDDLService.listAll(project, tableName);
+    return new EnvelopeResponse<>(KylinException.CODE_SUCCESS, logicalViews, "");
   }
 }
diff --git a/src/metadata-server/src/test/java/org/apache/kylin/rest/controller/SparkDDLControllerTest.java b/src/metadata-server/src/test/java/org/apache/kylin/rest/controller/SparkDDLControllerTest.java
index f7d9938989..be48e03b26 100644
--- a/src/metadata-server/src/test/java/org/apache/kylin/rest/controller/SparkDDLControllerTest.java
+++ b/src/metadata-server/src/test/java/org/apache/kylin/rest/controller/SparkDDLControllerTest.java
@@ -20,18 +20,27 @@ package org.apache.kylin.rest.controller;
 
 import static org.apache.kylin.common.constant.HttpConstant.HTTP_VND_APACHE_KYLIN_V4_PUBLIC_JSON;
 
+import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.kylin.common.util.JsonUtil;
 import org.apache.kylin.common.util.NLocalFileMetadataTestCase;
 import org.apache.kylin.rest.constant.Constant;
-import org.apache.kylin.rest.request.ViewDDLRequest;
+import org.apache.kylin.rest.request.ViewRequest;
 import org.apache.kylin.rest.service.SparkDDLService;
+import org.apache.kylin.rest.util.SpringContext;
+
+import org.apache.spark.sql.LogicalViewLoader;
+
 import org.junit.After;
 import org.junit.Before;
 import org.junit.Test;
+import org.junit.runner.RunWith;
 import org.mockito.InjectMocks;
 import org.mockito.Mock;
 import org.mockito.Mockito;
 import org.mockito.MockitoAnnotations;
+import org.powermock.api.mockito.PowerMockito;
+import org.powermock.core.classloader.annotations.PrepareForTest;
+import org.powermock.modules.junit4.PowerMockRunner;
 import org.springframework.http.MediaType;
 import org.springframework.security.authentication.TestingAuthenticationToken;
 import org.springframework.security.core.Authentication;
@@ -41,6 +50,8 @@ import org.springframework.test.web.servlet.request.MockMvcRequestBuilders;
 import org.springframework.test.web.servlet.result.MockMvcResultMatchers;
 import org.springframework.test.web.servlet.setup.MockMvcBuilders;
 
+@RunWith(PowerMockRunner.class)
+@PrepareForTest({LogicalViewLoader.class, SpringContext.class, UserGroupInformation.class})
 public class SparkDDLControllerTest extends NLocalFileMetadataTestCase {
   private MockMvc mockMvc;
 
@@ -53,7 +64,11 @@ public class SparkDDLControllerTest extends NLocalFileMetadataTestCase {
   private final Authentication authentication = new TestingAuthenticationToken("ADMIN", "ADMIN", Constant.ROLE_ADMIN);
 
   @Before
-  public void setup() {
+  public void setup() throws Exception {
+    PowerMockito.mockStatic(UserGroupInformation.class);
+    UserGroupInformation userGroupInformation = Mockito.mock(UserGroupInformation.class);
+    PowerMockito.when(UserGroupInformation.getCurrentUser()).thenReturn(userGroupInformation);
+    PowerMockito.mockStatic(LogicalViewLoader.class);
     MockitoAnnotations.initMocks(this);
     mockMvc = MockMvcBuilders.standaloneSetup(ddlController)
         .defaultRequest(MockMvcRequestBuilders.get("/")).build();
@@ -68,20 +83,30 @@ public class SparkDDLControllerTest extends NLocalFileMetadataTestCase {
   }
 
   @Test
-  public void testExecuteSQL() throws Exception {
-    ViewDDLRequest request = new ViewDDLRequest();
-    request.setProject("ssb");
+  public void testDDL() throws Exception {
+    ViewRequest request = new ViewRequest();
+    request.setDdlProject("ssb");
 
     mockMvc.perform(MockMvcRequestBuilders.post("/api/spark_source/ddl")
         .contentType(MediaType.APPLICATION_JSON)
         .content(JsonUtil.writeValueAsString(request))
         .accept(MediaType.parseMediaType(HTTP_VND_APACHE_KYLIN_V4_PUBLIC_JSON)))
         .andExpect(MockMvcResultMatchers.status().isOk()).andReturn();
+
+    mockMvc.perform(MockMvcRequestBuilders.get("/api/spark_source/ddl/sync")
+        .contentType(MediaType.APPLICATION_JSON)
+        .accept(MediaType.parseMediaType(HTTP_VND_APACHE_KYLIN_V4_PUBLIC_JSON)))
+        .andExpect(MockMvcResultMatchers.status().isOk()).andReturn();
+
+    mockMvc.perform(MockMvcRequestBuilders.get("/api/spark_source/ddl/view_list?project=ssb")
+        .contentType(MediaType.APPLICATION_JSON)
+        .accept(MediaType.parseMediaType(HTTP_VND_APACHE_KYLIN_V4_PUBLIC_JSON)))
+        .andExpect(MockMvcResultMatchers.status().isOk()).andReturn();
   }
 
   @Test
   public void testDescription() throws Exception {
-    mockMvc.perform(MockMvcRequestBuilders.get("/api/spark_source/ddl/description?project=ssb")
+    mockMvc.perform(MockMvcRequestBuilders.get("/api/spark_source/ddl/description?project=ssb&page_type=hive")
         .contentType(MediaType.APPLICATION_JSON)
         .accept(MediaType.parseMediaType(HTTP_VND_APACHE_KYLIN_V4_PUBLIC_JSON)))
         .andExpect(MockMvcResultMatchers.status().isOk()).andReturn();
diff --git a/src/spark-project/engine-spark/src/main/java/org/apache/kylin/engine/spark/application/SparkApplication.java b/src/spark-project/engine-spark/src/main/java/org/apache/kylin/engine/spark/application/SparkApplication.java
index 5bc553d353..7239720986 100644
--- a/src/spark-project/engine-spark/src/main/java/org/apache/kylin/engine/spark/application/SparkApplication.java
+++ b/src/spark-project/engine-spark/src/main/java/org/apache/kylin/engine/spark/application/SparkApplication.java
@@ -18,9 +18,28 @@
 
 package org.apache.kylin.engine.spark.application;
 
-import com.google.common.collect.Maps;
-import org.apache.kylin.engine.spark.job.SegmentBuildJob;
-import lombok.val;
+import static org.apache.kylin.engine.spark.job.StageType.WAITE_FOR_RESOURCE;
+import static org.apache.kylin.engine.spark.utils.SparkConfHelper.COUNT_DISTICT;
+
+import java.io.BufferedReader;
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.net.InetAddress;
+import java.net.URI;
+import java.net.UnknownHostException;
+import java.nio.charset.Charset;
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.Locale;
+import java.util.Map;
+import java.util.Objects;
+import java.util.Set;
+import java.util.concurrent.atomic.AtomicBoolean;
+import java.util.concurrent.atomic.AtomicLong;
+import java.util.concurrent.atomic.AtomicReference;
+
 import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSDataInputStream;
@@ -57,14 +76,18 @@ import org.apache.kylin.metadata.model.NDataModel;
 import org.apache.kylin.metadata.model.NDataModelManager;
 import org.apache.kylin.metadata.model.NTableMetadataManager;
 import org.apache.kylin.metadata.model.PartitionDesc;
+import org.apache.kylin.metadata.view.LogicalView;
+import org.apache.kylin.metadata.view.LogicalViewManager;
 import org.apache.kylin.query.pushdown.SparkSubmitter;
 import org.apache.kylin.query.util.PushDownUtil;
+
 import org.apache.spark.SparkConf;
 import org.apache.spark.SparkException;
 import org.apache.spark.application.NoRetryException;
 import org.apache.spark.launcher.SparkLauncher;
 import org.apache.spark.sql.KylinSession;
 import org.apache.spark.sql.KylinSession$;
+import org.apache.spark.sql.LogicalViewLoader;
 import org.apache.spark.sql.SparderEnv;
 import org.apache.spark.sql.SparkSession;
 import org.apache.spark.sql.SparkSessionExtensions;
@@ -74,32 +97,18 @@ import org.apache.spark.sql.catalyst.rules.Rule;
 import org.apache.spark.sql.execution.datasource.AlignmentTableStats;
 import org.apache.spark.sql.hive.utils.ResourceDetectUtils;
 import org.apache.spark.util.Utils;
+
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
-import scala.runtime.AbstractFunction1;
-import scala.runtime.BoxedUnit;
 
-import java.io.BufferedReader;
-import java.io.IOException;
-import java.io.InputStreamReader;
-import java.net.InetAddress;
-import java.net.URI;
-import java.net.UnknownHostException;
-import java.nio.charset.Charset;
-import java.text.ParseException;
-import java.text.SimpleDateFormat;
-import java.util.Arrays;
-import java.util.HashMap;
-import java.util.Locale;
-import java.util.Map;
-import java.util.Objects;
-import java.util.Set;
-import java.util.concurrent.atomic.AtomicBoolean;
-import java.util.concurrent.atomic.AtomicLong;
-import java.util.concurrent.atomic.AtomicReference;
+import com.google.common.annotations.VisibleForTesting;
+import com.google.common.collect.Maps;
 
-import static org.apache.kylin.engine.spark.job.StageType.WAITE_FOR_RESOURCE;
-import static org.apache.kylin.engine.spark.utils.SparkConfHelper.COUNT_DISTICT;
+import org.apache.kylin.engine.spark.job.SegmentBuildJob;
+import lombok.val;
+
+import scala.runtime.AbstractFunction1;
+import scala.runtime.BoxedUnit;
 
 public abstract class SparkApplication implements Application {
     private static final Logger logger = LoggerFactory.getLogger(SparkApplication.class);
@@ -392,6 +401,7 @@ public abstract class SparkApplication implements Application {
     }
 
     protected void extraInit() {
+        loadLogicalView();
     }
 
     public void extraDestroy() {
@@ -645,4 +655,25 @@ public abstract class SparkApplication implements Application {
                 atomicUnreachableSparkMaster);
     }
 
+    @VisibleForTesting
+    public void loadLogicalView() {
+        if (!config.isDDLLogicalViewEnabled()) {
+            return;
+        }
+        String dataflowId = getParam(NBatchConstants.P_DATAFLOW_ID);
+        String tableName = getParam(NBatchConstants.P_TABLE_NAME);
+        LogicalViewManager viewManager = LogicalViewManager.getInstance(config);
+
+        if (StringUtils.isNotBlank(dataflowId)) {
+            viewManager
+                .findLogicalViewsInModel(project, dataflowId)
+                .forEach(view -> LogicalViewLoader.loadView(view.getTableName(), true, ss));
+        }
+        if (StringUtils.isNotBlank(tableName)) {
+            LogicalView view = viewManager.findLogicalViewInProject(getProject(), tableName);
+            if (view != null) {
+                LogicalViewLoader.loadView(view.getTableName(), true, ss);
+            }
+        }
+    }
 }
diff --git a/src/spark-project/engine-spark/src/main/java/org/apache/kylin/engine/spark/job/NResourceDetectStep.java b/src/spark-project/engine-spark/src/main/java/org/apache/kylin/engine/spark/job/NResourceDetectStep.java
index 03bdd919b1..9f92018aef 100644
--- a/src/spark-project/engine-spark/src/main/java/org/apache/kylin/engine/spark/job/NResourceDetectStep.java
+++ b/src/spark-project/engine-spark/src/main/java/org/apache/kylin/engine/spark/job/NResourceDetectStep.java
@@ -61,7 +61,9 @@ public class NResourceDetectStep extends NSparkExecutable {
     protected Set<String> getMetadataDumpList(KylinConfig config) {
         final AbstractExecutable parent = getParent();
         if (parent instanceof DefaultExecutable) {
-            return ((DefaultExecutable) parent).getMetadataDumpList(config);
+            Set<String> dumpList = ((DefaultExecutable) parent).getMetadataDumpList(config);
+            dumpList.addAll(getLogicalViewMetaDumpList(config));
+            return dumpList;
         }
         throw new IllegalStateException("Unsupported resource detect for non chained executable!");
     }
diff --git a/src/spark-project/engine-spark/src/main/java/org/apache/kylin/engine/spark/job/NSparkCubingStep.java b/src/spark-project/engine-spark/src/main/java/org/apache/kylin/engine/spark/job/NSparkCubingStep.java
index 5a5b8f57f2..62ea6ec1e7 100644
--- a/src/spark-project/engine-spark/src/main/java/org/apache/kylin/engine/spark/job/NSparkCubingStep.java
+++ b/src/spark-project/engine-spark/src/main/java/org/apache/kylin/engine/spark/job/NSparkCubingStep.java
@@ -33,6 +33,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import java.util.Arrays;
+import java.util.LinkedHashSet;
 import java.util.Set;
 
 @NoArgsConstructor
@@ -51,8 +52,11 @@ public class NSparkCubingStep extends NSparkExecutable {
 
     @Override
     protected Set<String> getMetadataDumpList(KylinConfig config) {
+        Set<String> dumpList = new LinkedHashSet<>();
         NDataflow df = NDataflowManager.getInstance(config, getProject()).getDataflow(getDataflowId());
-        return df.collectPrecalculationResource();
+        dumpList.addAll(df.collectPrecalculationResource());
+        dumpList.addAll(getLogicalViewMetaDumpList(config));
+        return dumpList;
     }
 
     @Override
diff --git a/src/spark-project/engine-spark/src/main/java/org/apache/kylin/engine/spark/job/NSparkExecutable.java b/src/spark-project/engine-spark/src/main/java/org/apache/kylin/engine/spark/job/NSparkExecutable.java
index 9b4f31004a..20a502ff9a 100644
--- a/src/spark-project/engine-spark/src/main/java/org/apache/kylin/engine/spark/job/NSparkExecutable.java
+++ b/src/spark-project/engine-spark/src/main/java/org/apache/kylin/engine/spark/job/NSparkExecutable.java
@@ -71,6 +71,8 @@ import org.apache.kylin.metadata.cube.model.NDataflowManager;
 import org.apache.kylin.metadata.project.EnhancedUnitOfWork;
 import org.apache.kylin.metadata.project.NProjectManager;
 import org.apache.kylin.plugin.asyncprofiler.BuildAsyncProfilerSparkPlugin;
+import org.apache.kylin.metadata.view.LogicalView;
+import org.apache.kylin.metadata.view.LogicalViewManager;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -470,6 +472,30 @@ public class NSparkExecutable extends AbstractExecutable implements ChainedStage
         return Collections.emptySet();
     }
 
+    protected Set<String> getLogicalViewMetaDumpList(KylinConfig config) {
+        Set<String> dumpList = new LinkedHashSet<>();
+        if (!config.isDDLLogicalViewEnabled()) {
+            return dumpList;
+        }
+        String table = getParam(NBatchConstants.P_TABLE_NAME);
+        String dataflowId = getDataflowId();
+        LogicalViewManager viewManager = LogicalViewManager.getInstance(config);
+        if (StringUtils.isNotBlank(dataflowId)) {
+            Set<String> viewsMeta = viewManager
+                .findLogicalViewsInModel(getProject(), getDataflowId())
+                .stream().map(LogicalView::getResourcePath)
+                .collect(Collectors.toSet());
+            dumpList.addAll(viewsMeta);
+        }
+        if (StringUtils.isNotBlank(table)) {
+            LogicalView logicalView = viewManager.findLogicalViewInProject(getProject(), table);
+            if (logicalView != null) {
+                dumpList.add(logicalView.getResourcePath());
+            }
+        }
+        return dumpList;
+    }
+
     void attachMetadataAndKylinProps(KylinConfig config) throws IOException {
         attachMetadataAndKylinProps(config, false);
     }
diff --git a/src/spark-project/engine-spark/src/main/java/org/apache/kylin/engine/spark/job/NSparkMergingStep.java b/src/spark-project/engine-spark/src/main/java/org/apache/kylin/engine/spark/job/NSparkMergingStep.java
index 15e3e0edf8..e8ca27be7b 100644
--- a/src/spark-project/engine-spark/src/main/java/org/apache/kylin/engine/spark/job/NSparkMergingStep.java
+++ b/src/spark-project/engine-spark/src/main/java/org/apache/kylin/engine/spark/job/NSparkMergingStep.java
@@ -35,6 +35,7 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import java.util.Arrays;
+import java.util.LinkedHashSet;
 import java.util.Set;
 
 @NoArgsConstructor
@@ -52,8 +53,11 @@ public class NSparkMergingStep extends NSparkExecutable {
 
     @Override
     protected Set<String> getMetadataDumpList(KylinConfig config) {
+        Set<String> dumpList = new LinkedHashSet<>();
         NDataflow df = NDataflowManager.getInstance(config, getProject()).getDataflow(getDataflowId());
-        return df.collectPrecalculationResource();
+        dumpList.addAll(df.collectPrecalculationResource());
+        dumpList.addAll(getLogicalViewMetaDumpList(config));
+        return dumpList;
     }
 
     @Override
diff --git a/src/spark-project/engine-spark/src/main/java/org/apache/kylin/engine/spark/job/NSparkSnapshotBuildingStep.java b/src/spark-project/engine-spark/src/main/java/org/apache/kylin/engine/spark/job/NSparkSnapshotBuildingStep.java
index 8ef8b1699a..f20eca8a2f 100644
--- a/src/spark-project/engine-spark/src/main/java/org/apache/kylin/engine/spark/job/NSparkSnapshotBuildingStep.java
+++ b/src/spark-project/engine-spark/src/main/java/org/apache/kylin/engine/spark/job/NSparkSnapshotBuildingStep.java
@@ -73,6 +73,7 @@ public class NSparkSnapshotBuildingStep extends NSparkExecutable {
         }
         dumpList.add(tableDesc.getResourcePath());
         dumpList.add(projectInstance.getResourcePath());
+        dumpList.addAll(getLogicalViewMetaDumpList(config));
 
         return dumpList;
     }
diff --git a/src/spark-project/engine-spark/src/main/java/org/apache/kylin/engine/spark/job/NTableSamplingJob.java b/src/spark-project/engine-spark/src/main/java/org/apache/kylin/engine/spark/job/NTableSamplingJob.java
index 480c691a72..774b9aef86 100644
--- a/src/spark-project/engine-spark/src/main/java/org/apache/kylin/engine/spark/job/NTableSamplingJob.java
+++ b/src/spark-project/engine-spark/src/main/java/org/apache/kylin/engine/spark/job/NTableSamplingJob.java
@@ -192,7 +192,7 @@ public class NTableSamplingJob extends DefaultExecutableOnTable {
             if (table != null) {
                 dumpList.add(table.getResourcePath());
             }
-
+            dumpList.addAll(getLogicalViewMetaDumpList(config));
             return dumpList;
         }
     }
diff --git a/src/spark-project/engine-spark/src/main/java/org/apache/kylin/engine/spark/mockup/CsvSource.java b/src/spark-project/engine-spark/src/main/java/org/apache/kylin/engine/spark/mockup/CsvSource.java
index 79e62702ca..c0be9c5f69 100644
--- a/src/spark-project/engine-spark/src/main/java/org/apache/kylin/engine/spark/mockup/CsvSource.java
+++ b/src/spark-project/engine-spark/src/main/java/org/apache/kylin/engine/spark/mockup/CsvSource.java
@@ -19,7 +19,6 @@
 package org.apache.kylin.engine.spark.mockup;
 
 import java.io.File;
-import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.List;
@@ -32,6 +31,9 @@ import org.apache.commons.lang.StringUtils;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.util.JsonUtil;
 import org.apache.kylin.common.util.Pair;
+import org.apache.kylin.engine.spark.NSparkCubingEngine.NSparkCubingSource;
+import org.apache.kylin.engine.spark.source.NSparkCubingSourceInput;
+import org.apache.kylin.engine.spark.source.NSparkMetadataExplorer;
 import org.apache.kylin.metadata.model.ColumnDesc;
 import org.apache.kylin.metadata.model.IBuildable;
 import org.apache.kylin.metadata.model.SegmentRange;
@@ -41,7 +43,7 @@ import org.apache.kylin.source.IReadableTable;
 import org.apache.kylin.source.ISampleDataDeployer;
 import org.apache.kylin.source.ISource;
 import org.apache.kylin.source.ISourceMetadataExplorer;
-import org.apache.kylin.engine.spark.NSparkCubingEngine.NSparkCubingSource;
+
 import org.apache.spark.sql.Dataset;
 import org.apache.spark.sql.Row;
 import org.apache.spark.sql.SparkSession;
@@ -71,7 +73,10 @@ public class CsvSource implements ISource {
 
                 @Override
                 public Dataset<Row> getSourceData(TableDesc table, SparkSession ss, Map<String, String> parameters) {
-
+                    if (KylinConfig.getInstanceFromEnv().getDDLLogicalViewDB()
+                        .equalsIgnoreCase(table.getDatabase())) {
+                      return new NSparkCubingSourceInput().getSourceData(table, ss, parameters);
+                    }
                     String path = new File(getUtMetaDir(), "data/" + table.getIdentity() + ".csv").getAbsolutePath();
                     ColumnDesc[] columnDescs = table.getColumns();
                     List<ColumnDesc> tblColDescs = Lists.newArrayListWithCapacity(columnDescs.length);
@@ -165,8 +170,12 @@ public class CsvSource implements ISource {
 
         @Override
         public Pair<TableDesc, TableExtDesc> loadTableMetadata(String database, String table, String prj)
-                throws IOException {
-            String resPath = KylinConfig.getInstanceFromEnv().getMetadataUrl().getIdentifier();
+            throws Exception {
+            KylinConfig config = KylinConfig.getInstanceFromEnv();
+            if (config.getDDLLogicalViewDB().equalsIgnoreCase(database)) {
+                return new NSparkMetadataExplorer().loadTableMetadata(database, table, prj);
+            }
+            String resPath = config.getMetadataUrl().getIdentifier();
             String path = resPath + "/../data/tableDesc/" + database + "." + table + ".json";
             TableDesc tableDesc = JsonUtil.readValue(new File(path), TableDesc.class);
             for (ColumnDesc column : tableDesc.getColumns()) {
diff --git a/src/spark-project/engine-spark/src/main/java/org/apache/kylin/engine/spark/source/NSparkMetadataExplorer.java b/src/spark-project/engine-spark/src/main/java/org/apache/kylin/engine/spark/source/NSparkMetadataExplorer.java
index 64e65d7c73..91cc8670bf 100644
--- a/src/spark-project/engine-spark/src/main/java/org/apache/kylin/engine/spark/source/NSparkMetadataExplorer.java
+++ b/src/spark-project/engine-spark/src/main/java/org/apache/kylin/engine/spark/source/NSparkMetadataExplorer.java
@@ -35,6 +35,7 @@ import org.apache.hadoop.fs.RemoteIterator;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.kylin.common.KapConfig;
 import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.common.exception.KylinException;
 import org.apache.kylin.common.util.HadoopUtil;
 import org.apache.kylin.common.util.Pair;
 import org.apache.kylin.common.util.RandomUtil;
@@ -60,6 +61,7 @@ import com.clearspring.analytics.util.Lists;
 import com.google.common.collect.Sets;
 
 import lombok.val;
+import static org.apache.kylin.common.exception.ServerErrorCode.DDL_CHECK_ERROR;
 
 public class NSparkMetadataExplorer implements ISourceMetadataExplorer, ISampleDataDeployer, Serializable {
 
@@ -97,7 +99,19 @@ public class NSparkMetadataExplorer implements ISourceMetadataExplorer, ISampleD
     @Override
     public List<String> listDatabases() throws Exception {
         Dataset<Row> dataset = SparderEnv.getSparkSession().sql("show databases").select("namespace");
-        return dataset.collectAsList().stream().map(row -> row.getString(0)).collect(Collectors.toList());
+        List<String> databases =
+            dataset.collectAsList().stream().map(row -> row.getString(0)).collect(Collectors.toList());
+        if (KylinConfig.getInstanceFromEnv().isDDLLogicalViewEnabled()) {
+            String logicalViewDB = KylinConfig.getInstanceFromEnv().getDDLLogicalViewDB();
+            databases.forEach(db -> {
+                if(db.equalsIgnoreCase(logicalViewDB)){
+                    throw new KylinException(DDL_CHECK_ERROR, "Logical view database should not be duplicated "
+                        + "with normal hive database!!!");
+                }
+            });
+            databases.add(logicalViewDB);
+        }
+        return databases;
     }
 
     @Override
@@ -297,6 +311,10 @@ public class NSparkMetadataExplorer implements ISourceMetadataExplorer, ISampleD
     @Override
     public boolean checkDatabaseAccess(String database) throws Exception {
         boolean hiveDBAccessFilterEnable = KapConfig.getInstanceFromEnv().getDBAccessFilterEnable();
+        String viewDB = KylinConfig.getInstanceFromEnv().getDDLLogicalViewDB();
+        if(viewDB.equalsIgnoreCase(database)){
+            return true;
+        }
         if (hiveDBAccessFilterEnable) {
             logger.info("Check database {} access start.", database);
             try {
diff --git a/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/SegmentJob.java b/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/SegmentJob.java
index d4dfddbee1..0b969ed923 100644
--- a/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/SegmentJob.java
+++ b/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/SegmentJob.java
@@ -149,6 +149,7 @@ public abstract class SegmentJob extends SparkApplication {
 
     @Override
     protected void extraInit() {
+        super.extraInit();
         partialBuild = Boolean.parseBoolean(getParam(NBatchConstants.P_PARTIAL_BUILD));
         Set<String> segmentIDs = Arrays.stream(getParam(NBatchConstants.P_SEGMENT_IDS).split(COMMA))
                 .collect(Collectors.toCollection(LinkedHashSet::new));
diff --git a/src/spark-project/engine-spark/src/test/java/org/apache/kylin/engine/spark/NLocalWithSparkSessionTest.java b/src/spark-project/engine-spark/src/test/java/org/apache/kylin/engine/spark/NLocalWithSparkSessionTest.java
index 22e98dc05b..e1bc429877 100644
--- a/src/spark-project/engine-spark/src/test/java/org/apache/kylin/engine/spark/NLocalWithSparkSessionTest.java
+++ b/src/spark-project/engine-spark/src/test/java/org/apache/kylin/engine/spark/NLocalWithSparkSessionTest.java
@@ -114,6 +114,7 @@ public class NLocalWithSparkSessionTest extends NLocalFileMetadataTestCase imple
         sparkConf.set("spark.sql.parquet.mergeSchema", "true");
         sparkConf.set("spark.sql.legacy.allowNegativeScaleOfDecimal", "true");
         sparkConf.set("spark.sql.broadcastTimeout", "900");
+        sparkConf.set("spark.sql.globalTempDatabase", "KYLIN_LOGICAL_VIEW");
 
         if (!sparkConf.getOption("spark.sql.extensions").isEmpty()) {
             sparkConf.set("spark.sql.extensions",
@@ -184,7 +185,8 @@ public class NLocalWithSparkSessionTest extends NLocalFileMetadataTestCase imple
         Preconditions.checkArgument(projectInstance != null);
         for (String table : projectInstance.getTables()) {
 
-            if ("DEFAULT.STREAMING_TABLE".equals(table) || "DEFAULT.TEST_SNAPSHOT_TABLE".equals(table)) {
+            if ("DEFAULT.STREAMING_TABLE".equals(table) || "DEFAULT.TEST_SNAPSHOT_TABLE".equals(table)
+             || table.contains(kylinConfig.getDDLLogicalViewDB())) {
                 continue;
             }
 
@@ -197,7 +199,6 @@ public class NLocalWithSparkSessionTest extends NLocalFileMetadataTestCase imple
             Dataset<Row> ret = sparkSession.read().schema(schema).csv(String.format(Locale.ROOT, CSV_TABLE_DIR, table));
             ret.createOrReplaceTempView(tableDesc.getName());
         }
-
     }
 
     private static DataType convertType(org.apache.kylin.metadata.datatype.DataType type) {
diff --git a/src/spark-project/spark-ddl-plugin/src/main/java/org/apache/kylin/spark/ddl/DDLCheck.java b/src/spark-project/sparder/src/main/java/org/apache/spark/ddl/DDLCheck.java
similarity index 77%
copy from src/spark-project/spark-ddl-plugin/src/main/java/org/apache/kylin/spark/ddl/DDLCheck.java
copy to src/spark-project/sparder/src/main/java/org/apache/spark/ddl/DDLCheck.java
index 10e3e3439c..912687b424 100644
--- a/src/spark-project/spark-ddl-plugin/src/main/java/org/apache/kylin/spark/ddl/DDLCheck.java
+++ b/src/spark-project/sparder/src/main/java/org/apache/spark/ddl/DDLCheck.java
@@ -15,15 +15,15 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.kylin.spark.ddl;
+package org.apache.spark.ddl;
 
 import static org.apache.kylin.common.exception.ServerErrorCode.DDL_CHECK_ERROR;
 
 import org.apache.kylin.common.exception.KylinException;
 
-public interface DDLCheck {
+public interface DDLCheck extends Comparable<DDLCheck> {
 
-  default String[] description(String project) {
+  default String[] description(String project, String pageType) {
     return new String[] {"", ""};
   }
 
@@ -32,4 +32,13 @@ public interface DDLCheck {
   default void throwException(String msg) {
     throw new KylinException(DDL_CHECK_ERROR, msg);
   }
+
+  default int priority() {
+    return Integer.MAX_VALUE;
+  }
+
+  @Override
+  default int compareTo(DDLCheck other) {
+    return this.priority() - other.priority();
+  }
 }
diff --git a/src/spark-project/spark-ddl-plugin/src/main/java/org/apache/kylin/spark/ddl/DDLCheckContext.java b/src/spark-project/sparder/src/main/java/org/apache/spark/ddl/DDLCheckContext.java
similarity index 54%
rename from src/spark-project/spark-ddl-plugin/src/main/java/org/apache/kylin/spark/ddl/DDLCheckContext.java
rename to src/spark-project/sparder/src/main/java/org/apache/spark/ddl/DDLCheckContext.java
index e5e6d1819f..ea1d4e750f 100644
--- a/src/spark-project/spark-ddl-plugin/src/main/java/org/apache/kylin/spark/ddl/DDLCheckContext.java
+++ b/src/spark-project/sparder/src/main/java/org/apache/spark/ddl/DDLCheckContext.java
@@ -15,21 +15,40 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.kylin.spark.ddl;
+package org.apache.spark.ddl;
 
 import java.util.Set;
 
+import lombok.Data;
+import static org.apache.spark.ddl.DDLConstant.CREATE_LOGICAL_VIEW;
+import static org.apache.spark.ddl.DDLConstant.DROP_LOGICAL_VIEW;
+import static org.apache.spark.ddl.DDLConstant.HIVE_VIEW;
+import static org.apache.spark.ddl.DDLConstant.REPLACE_LOGICAL_VIEW;
+
+@Data
 public class DDLCheckContext {
+
+  public static final int LOGICAL_VIEW_CREATE_COMMAND = 2;
+  public static final int LOGICAL_VIEW_DROP_COMMAND = 3;
+  public static final int HIVE_COMMAND = 1;
+
   private String sql;
   private String project;
   private String userName;
   private Set<String> groups;
+  private boolean kerberosEnv;
+  private String commandType = HIVE_VIEW;
+  private String logicalViewName;
+  private String restrict;
 
-  public DDLCheckContext(String sql, String project, String userName, Set<String> groups) {
+  public DDLCheckContext(String sql, String project, String restrict, String userName, Set<String> groups,
+      boolean kerberosEnv) {
     this.sql = sql;
     this.project = project;
+    this.restrict = restrict;
     this.userName = userName;
     this.groups = groups;
+    this.kerberosEnv = kerberosEnv;
   }
 
   public String getSql() {
@@ -47,4 +66,12 @@ public class DDLCheckContext {
   public Set<String> getGroups() {
     return groups;
   }
+
+  public boolean isLogicalViewCommand() {
+    return commandType.equals(REPLACE_LOGICAL_VIEW) || commandType.equals(CREATE_LOGICAL_VIEW)
+        || commandType.equals(DROP_LOGICAL_VIEW);
+  }
+  public boolean isHiveCommand() {
+    return commandType.equals(HIVE_VIEW);
+  }
 }
diff --git a/src/spark-project/spark-ddl-plugin/src/main/java/org/apache/kylin/spark/ddl/DDLCheck.java b/src/spark-project/sparder/src/main/java/org/apache/spark/ddl/DDLConstant.java
similarity index 58%
rename from src/spark-project/spark-ddl-plugin/src/main/java/org/apache/kylin/spark/ddl/DDLCheck.java
rename to src/spark-project/sparder/src/main/java/org/apache/spark/ddl/DDLConstant.java
index 10e3e3439c..75e71f57c4 100644
--- a/src/spark-project/spark-ddl-plugin/src/main/java/org/apache/kylin/spark/ddl/DDLCheck.java
+++ b/src/spark-project/sparder/src/main/java/org/apache/spark/ddl/DDLConstant.java
@@ -15,21 +15,19 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.kylin.spark.ddl;
+package org.apache.spark.ddl;
 
-import static org.apache.kylin.common.exception.ServerErrorCode.DDL_CHECK_ERROR;
+public class DDLConstant {
+  public static final String LOGICAL_VIEW = "logic";
+  public static final String REPLACE_LOGICAL_VIEW = "replaceLogicalView";
+  public static final String CREATE_LOGICAL_VIEW = "createLogicalView";
+  public static final String DROP_LOGICAL_VIEW = "dropLogicalView";
+  public static final String HIVE_VIEW = "hive";
+  public static final String NO_RESTRICT = "noRestrict";
+  public static final Integer VIEW_RULE_PRIORITY = 1;
+  public static final Integer SOURCE_TABLE_RULE_PRIORITY = 2;
 
-import org.apache.kylin.common.exception.KylinException;
+  private DDLConstant() {
 
-public interface DDLCheck {
-
-  default String[] description(String project) {
-    return new String[] {"", ""};
-  }
-
-  void check(DDLCheckContext context);
-
-  default void throwException(String msg) {
-    throw new KylinException(DDL_CHECK_ERROR, msg);
   }
 }
diff --git a/src/spark-project/sparder/src/main/java/org/apache/spark/sql/LogicalViewLoader.java b/src/spark-project/sparder/src/main/java/org/apache/spark/sql/LogicalViewLoader.java
new file mode 100644
index 0000000000..c7c3ebea4b
--- /dev/null
+++ b/src/spark-project/sparder/src/main/java/org/apache/spark/sql/LogicalViewLoader.java
@@ -0,0 +1,195 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.spark.sql;
+
+import static org.apache.kylin.common.exception.ServerErrorCode.DDL_CHECK_ERROR;
+
+import java.util.Set;
+import java.util.concurrent.ConcurrentMap;
+import java.util.concurrent.Executors;
+import java.util.concurrent.ScheduledExecutorService;
+import java.util.concurrent.ScheduledFuture;
+import java.util.concurrent.TimeUnit;
+
+import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.common.exception.KylinException;
+import org.apache.kylin.common.util.NamedThreadFactory;
+import org.apache.kylin.metadata.view.LogicalView;
+import org.apache.kylin.metadata.view.LogicalViewManager;
+import org.apache.kylin.source.SourceFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.google.common.collect.Maps;
+import com.google.common.collect.Sets;
+
+public class LogicalViewLoader {
+  public static final Logger LOGGER = LoggerFactory.getLogger(LogicalViewLoader.class);
+
+  public static final ConcurrentMap<String, LogicalView> LOADED_LOGICAL_VIEWS = Maps.newConcurrentMap();
+  public static final ScheduledExecutorService executorService =
+      Executors.newSingleThreadScheduledExecutor(new NamedThreadFactory("logical_view"));
+  private static ScheduledFuture<?> syncViewScheduler;
+  private static boolean hasChecked = false;
+  private static boolean isLogicalViewConfigLegal = true;
+
+  public static void initScheduler() {
+    LOADED_LOGICAL_VIEWS.clear();
+    LOGGER.info("Start sync logical view...");
+    stopScheduler();
+    syncViewScheduler = executorService.scheduleAtFixedRate(() -> {
+      try {
+        syncViewFromDB();
+      } catch (Throwable e) {
+        LOGGER.error("Error when sync logical view", e);
+      }
+    }, 0, KylinConfig.getInstanceFromEnv().getDDLLogicalViewCatchupInterval(), TimeUnit.SECONDS);
+  }
+
+  public static void syncViewAsync() {
+    executorService.schedule(LogicalViewLoader::syncViewFromDB, 0, TimeUnit.SECONDS);
+  }
+
+  public static synchronized void loadView(String viewName, boolean loadBySpark, SparkSession spark) {
+    LOGGER.info("start load new logical view, view name is {}", viewName);
+    LogicalViewManager viewManager = LogicalViewManager.getInstance(KylinConfig.getInstanceFromEnv());
+    LogicalView toLoadView = viewManager.get(viewName);
+    try {
+      if (toLoadView == null) {
+        LOGGER.warn("failed to find logical view {} ", viewName);
+        return;
+      }
+      if (loadBySpark) {
+        dropLogicalViewIfExist(toLoadView.getTableName(), spark);
+        spark.sql(toLoadView.getCreatedSql());
+      }
+      LOADED_LOGICAL_VIEWS.put(toLoadView.getTableName().toUpperCase(), toLoadView);
+      LOGGER.info("The new table loaded successfully is {}", viewName);
+    } catch (Throwable e) {
+      LOGGER.error("Error when load new Logical View {}", viewName, e);
+    }
+  }
+
+  public static synchronized void unloadView(String viewName, SparkSession spark) {
+    LOADED_LOGICAL_VIEWS.remove(viewName.toUpperCase());
+    dropLogicalViewIfExist(viewName, spark);
+  }
+
+  public static synchronized void syncViewFromDB() {
+    checkConfigIfNeed();
+    long start = System.currentTimeMillis();
+    LogicalViewManager viewManager = LogicalViewManager.getInstance(KylinConfig.getInstanceFromEnv());
+    Set<LogicalView> toLoadViews = Sets.newHashSet();
+    Set<LogicalView> toReplaceViews = Sets.newHashSet();
+    Set<LogicalView> toRemoveViews = Sets.newHashSet();
+    Set<String> successLoadViews = Sets.newHashSet();
+    Set<String> successReplaceViews = Sets.newHashSet();
+    Set<String> successRemoveViews = Sets.newHashSet();
+
+    viewManager.list().forEach(view -> {
+      if (LOADED_LOGICAL_VIEWS.containsKey(view.getTableName())) {
+        LogicalView viewLoaded = LOADED_LOGICAL_VIEWS.get(view.getTableName());
+        if (viewLoaded.getLastModified() != view.getLastModified()) {
+          toReplaceViews.add(view);
+        }
+      } else {
+        toLoadViews.add(view);
+      }
+    });
+    LOADED_LOGICAL_VIEWS.keySet().forEach(table -> {
+      if (viewManager.get(table) == null) {
+        toRemoveViews.add(LOADED_LOGICAL_VIEWS.get(table));
+      }
+    });
+
+    SparkSession spark = SparderEnv.getSparkSession();
+    toLoadViews.forEach(view -> {
+      try {
+        dropLogicalViewIfExist(view.getTableName(), spark);
+        spark.sql(view.getCreatedSql());
+        LOADED_LOGICAL_VIEWS.put(view.getTableName(), view);
+        successLoadViews.add(view.getTableName());
+      } catch (Throwable e) {
+        LOGGER.error("Error when load new Logical View {}", view.getTableName());
+      }
+    });
+    toReplaceViews.forEach(view -> {
+      try {
+        dropLogicalViewIfExist(view.getTableName(), spark);
+        spark.sql(view.getCreatedSql());
+        LOADED_LOGICAL_VIEWS.put(view.getTableName(), view);
+        successReplaceViews.add(view.getTableName());
+      } catch (Throwable e) {
+        LOGGER.error("Error when replace new Logical View {}", view.getTableName());
+      }
+    });
+    toRemoveViews.forEach(view -> {
+      try {
+        dropLogicalViewIfExist(view.getTableName(), spark);
+        LOADED_LOGICAL_VIEWS.remove(view.getTableName());
+        successRemoveViews.add(view.getTableName());
+      } catch (Throwable e) {
+        LOGGER.error("Error when remove Logical View {}", view.getTableName());
+      }
+    });
+    long costTime = (System.currentTimeMillis() - start) / 1000;
+    LOGGER.info("End sync logical view, cost time is {}, "
+            + "\tsuccess loaded views: {},"
+            + "\tsuccess replaced views: {},"
+            + "\tsuccess removed views: {}.", costTime,
+        successLoadViews, successReplaceViews, successRemoveViews);
+  }
+
+  private static void dropLogicalViewIfExist(String tableName, SparkSession spark) {
+    String logicalViewDatabase = KylinConfig.getInstanceFromEnv().getDDLLogicalViewDB();
+    spark.sql("DROP LOGICAL VIEW IF EXISTS " + logicalViewDatabase + "." + tableName);
+  }
+
+  public static void checkConfigIfNeed() {
+    if (!KylinConfig.getInstanceFromEnv().isDDLLogicalViewEnabled()) {
+      return;
+    }
+    if (!hasChecked) {
+      try {
+        // check if logical view database is duplicated with hive databases
+        SourceFactory.getSparkSource().getSourceMetadataExplorer().listDatabases();
+      } catch (Exception e) {
+        LOGGER.warn("Error when list databases....", e);
+        isLogicalViewConfigLegal = false;
+      } finally {
+        hasChecked = true;
+      }
+    }
+    if (!isLogicalViewConfigLegal) {
+      throw new KylinException(DDL_CHECK_ERROR, "Logical view database should not be duplicated with normal "
+          + "hive database!!!");
+    }
+  }
+
+  public static void stopScheduler() {
+    try {
+      if (null != syncViewScheduler && !syncViewScheduler.isCancelled()) {
+        syncViewScheduler.cancel(true);
+      }
+    } catch (Exception e) {
+      LOGGER.error("Error when cancel syncViewScheduler", e);
+    }
+  }
+
+  private LogicalViewLoader() {}
+}
diff --git a/src/spark-project/sparder/src/main/scala/org/apache/spark/sql/KylinSession.scala b/src/spark-project/sparder/src/main/scala/org/apache/spark/sql/KylinSession.scala
index ba6d98fc3e..1f6ad0ebba 100644
--- a/src/spark-project/sparder/src/main/scala/org/apache/spark/sql/KylinSession.scala
+++ b/src/spark-project/sparder/src/main/scala/org/apache/spark/sql/KylinSession.scala
@@ -22,23 +22,24 @@ import java.io._
 import java.net.URI
 import java.nio.file.Paths
 
+import scala.collection.JavaConverters._
+
 import org.apache.hadoop.fs.Path
 import org.apache.hadoop.security.UserGroupInformation
-import org.apache.kylin.common.util.{HadoopUtil, Unsafe}
 import org.apache.kylin.common.{KapConfig, KylinConfig}
+import org.apache.kylin.common.util.{HadoopUtil, Unsafe}
 import org.apache.kylin.metadata.query.BigQueryThresholdUpdater
 import org.apache.kylin.query.util.ExtractFactory
+import org.springframework.expression.common.TemplateParserContext
+import org.springframework.expression.spel.standard.SpelExpressionParser
+
+import org.apache.spark.{SparkConf, SparkContext}
 import org.apache.spark.internal.Logging
 import org.apache.spark.scheduler.{SparkListener, SparkListenerApplicationEnd}
 import org.apache.spark.sql.SparkSession.Builder
-import org.apache.spark.sql.internal.{SQLConf, SessionState, SharedState, StaticSQLConf}
+import org.apache.spark.sql.internal.{SessionState, SharedState, SQLConf, StaticSQLConf}
 import org.apache.spark.sql.udf.UdfManager
 import org.apache.spark.util.{KylinReflectUtils, Utils}
-import org.apache.spark.{SparkConf, SparkContext}
-import org.springframework.expression.common.TemplateParserContext
-import org.springframework.expression.spel.standard.SpelExpressionParser
-
-import scala.collection.JavaConverters._
 
 class KylinSession(
                     @transient val sc: SparkContext,
@@ -174,6 +175,7 @@ object KylinSession extends Logging {
           } else {
             conf
           }
+          initLogicalViewConfig(conf)
           val sc = SparkContext.getOrCreate(sparkConf)
           // maybe this is an existing SparkContext, update its SparkConf which maybe used
           // by SparkSession
@@ -434,4 +436,10 @@ object KylinSession extends Logging {
         false
     }
   }
+
+  def initLogicalViewConfig(sparkConf: SparkConf): Unit = {
+    if (KylinConfig.getInstanceFromEnv.isDDLLogicalViewEnabled) {
+      sparkConf.set("spark.sql.globalTempDatabase", KylinConfig.getInstanceFromEnv.getDDLLogicalViewDB)
+    }
+  }
 }
diff --git a/src/spark-project/sparder/src/main/scala/org/apache/spark/sql/SparderEnv.scala b/src/spark-project/sparder/src/main/scala/org/apache/spark/sql/SparderEnv.scala
index f376b32180..2da5620517 100644
--- a/src/spark-project/sparder/src/main/scala/org/apache/spark/sql/SparderEnv.scala
+++ b/src/spark-project/sparder/src/main/scala/org/apache/spark/sql/SparderEnv.scala
@@ -18,15 +18,23 @@
 
 package org.apache.spark.sql
 
+import java.lang.{Boolean => JBoolean, String => JString}
+import java.security.PrivilegedAction
+import java.util.Map
+import java.util.concurrent.{Callable, ExecutorService}
+import java.util.concurrent.locks.ReentrantLock
+
 import org.apache.hadoop.conf.Configuration
 import org.apache.hadoop.security.UserGroupInformation
+import org.apache.kylin.common.{KylinConfig, QueryContext}
 import org.apache.kylin.common.exception.{KylinException, KylinTimeoutException, ServerErrorCode}
 import org.apache.kylin.common.msg.MsgPicker
 import org.apache.kylin.common.util.{DefaultHostInfoFetcher, HadoopUtil, S3AUtil}
-import org.apache.kylin.common.{KylinConfig, QueryContext}
 import org.apache.kylin.metadata.model.{NTableMetadataManager, TableExtDesc}
 import org.apache.kylin.metadata.project.NProjectManager
 import org.apache.kylin.query.runtime.plan.QueryToExecutionIDCache
+
+import org.apache.spark.{SparkConf, SparkContext}
 import org.apache.spark.internal.Logging
 import org.apache.spark.scheduler.{SparkListener, SparkListenerEvent, SparkListenerLogRollUp}
 import org.apache.spark.sql.KylinSession._
@@ -38,13 +46,6 @@ import org.apache.spark.sql.execution.ui.PostQueryExecutionForKylin
 import org.apache.spark.sql.hive.ReplaceLocationRule
 import org.apache.spark.sql.udf.UdfManager
 import org.apache.spark.util.{ThreadUtils, Utils}
-import org.apache.spark.{SparkConf, SparkContext}
-
-import java.lang.{Boolean => JBoolean, String => JString}
-import java.security.PrivilegedAction
-import java.util.Map
-import java.util.concurrent.locks.ReentrantLock
-import java.util.concurrent.{Callable, ExecutorService}
 
 // scalastyle:off
 object SparderEnv extends Logging {
@@ -265,6 +266,9 @@ object SparderEnv extends Logging {
           tableMetadataManager.listAllTables().forEach(tableDesc => SparderEnv.addS3Credential(tableMetadataManager.getOrCreateTableExt(tableDesc).getS3RoleCredentialInfo, spark))
         })
       }
+      if (KylinConfig.getInstanceFromEnv.isDDLLogicalViewEnabled) {
+        LogicalViewLoader.initScheduler()
+      }
     } catch {
       case throwable: Throwable =>
         logError("Error for initializing spark ", throwable)
diff --git a/src/spark-project/spark-ddl-plugin/pom.xml b/src/spark-project/spark-ddl-plugin/pom.xml
index e4f880d5bb..e69de29bb2 100644
--- a/src/spark-project/spark-ddl-plugin/pom.xml
+++ b/src/spark-project/spark-ddl-plugin/pom.xml
@@ -1,73 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
-  Licensed to the Apache Software Foundation (ASF) under one or more
-  contributor license agreements.  See the NOTICE file distributed with
-  this work for additional information regarding copyright ownership.
-  The ASF licenses this file to You under the Apache License, Version 2.0
-  (the "License"); you may not use this file except in compliance with
-  the License.  You may obtain a copy of the License at
-
-      http://www.apache.org/licenses/LICENSE-2.0
-
-  Unless required by applicable law or agreed to in writing, software
-  distributed under the License is distributed on an "AS IS" BASIS,
-  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  See the License for the specific language governing permissions and
-  limitations under the License.
--->
-<project xmlns="http://maven.apache.org/POM/4.0.0"
-         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
-    <parent>
-        <artifactId>kylin</artifactId>
-        <groupId>org.apache.kylin</groupId>
-        <version>5.0.0-alpha-SNAPSHOT</version>
-        <relativePath>../../../pom.xml</relativePath>
-    </parent>
-    <modelVersion>4.0.0</modelVersion>
-    <name>Kylin - Spark DDL</name>
-    <url>http://kylin.apache.org</url>
-    <description>Kylin DDL - SPARK</description>
-
-    <artifactId>kylin-spark-ddl</artifactId>
-    <dependencies>
-        <dependency>
-            <groupId>org.apache.kylin</groupId>
-            <artifactId>kylin-core-metadata</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.kylin</groupId>
-            <artifactId>kylin-sparder</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.spark</groupId>
-            <artifactId>spark-sql_2.12</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>org.junit.vintage</groupId>
-            <artifactId>junit-vintage-engine</artifactId>
-            <scope>test</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.kylin</groupId>
-            <artifactId>kylin-engine-spark</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.hadoop</groupId>
-            <artifactId>hadoop-common</artifactId>
-        </dependency>
-
-    </dependencies>
-    <build>
-        <plugins>
-            <plugin>
-                <groupId>org.apache.maven.plugins</groupId>
-                <artifactId>maven-compiler-plugin</artifactId>
-            </plugin>
-            <plugin>
-                <groupId>net.alchim31.maven</groupId>
-                <artifactId>scala-maven-plugin</artifactId>
-            </plugin>
-        </plugins>
-    </build>
-</project>
\ No newline at end of file
diff --git a/src/spark-project/spark-ddl-plugin/src/main/resources/META-INF/services/org.apache.kylin.spark.ddl.DDLCheck b/src/spark-project/spark-ddl-plugin/src/main/resources/META-INF/services/org.apache.kylin.spark.ddl.DDLCheck
deleted file mode 100644
index 4f3a7527a8..0000000000
--- a/src/spark-project/spark-ddl-plugin/src/main/resources/META-INF/services/org.apache.kylin.spark.ddl.DDLCheck
+++ /dev/null
@@ -1,2 +0,0 @@
-org.apache.kylin.spark.ddl.SourceTableCheck
-org.apache.kylin.spark.ddl.ViewCheck
\ No newline at end of file
diff --git a/src/spark-project/spark-ddl-plugin/src/main/scala/org/apache/kylin/spark/ddl/ViewCheck.scala b/src/spark-project/spark-ddl-plugin/src/main/scala/org/apache/kylin/spark/ddl/ViewCheck.scala
deleted file mode 100644
index 4fc451bdf2..0000000000
--- a/src/spark-project/spark-ddl-plugin/src/main/scala/org/apache/kylin/spark/ddl/ViewCheck.scala
+++ /dev/null
@@ -1,123 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.kylin.spark.ddl
-
-import java.security.PrivilegedExceptionAction
-
-import scala.collection.mutable.ListBuffer
-
-import org.apache.kylin.common.msg.MsgPicker
-import org.apache.kylin.engine.spark.source.NSparkMetadataExplorer
-import org.apache.kylin.rest.security.KerberosLoginManager
-import org.slf4j.LoggerFactory
-
-import org.apache.spark.sql.SparderEnv
-import org.apache.spark.sql.catalyst.TableIdentifier
-import org.apache.spark.sql.execution.{CommandExecutionMode, CommandResultExec, SparkPlan}
-import org.apache.spark.sql.execution.command._
-
-class ViewCheck extends DDLCheck {
-  private val log = LoggerFactory.getLogger(classOf[ViewCheck])
-  private val PREFIX = "KE_"
-  private val source = new NSparkMetadataExplorer
-
-  override def description(project: String): Array[String] = {
-    val databasesHasAccess = listAllDatabasesHasAccess(project)
-    Array(
-      "View name should start with `KE_`\n"
-        + "Only support `create view`,`alter view`,`drop view`,`show create table` syntax\n"
-        + s"Only supports creating views in ${databasesHasAccess}",
-      "View 名称需要以`KE_`开头\n"
-        + "仅支持 `create view`, `drop view`, `alter view`, `show create table` 语法\n"
-        + s"仅支持在 ${databasesHasAccess} 上述 database 中创建 view")
-  }
-
-  override def check(context: DDLCheckContext): Unit = {
-    log.info("start checking DDL view name")
-    val sql = context.getSql
-    val project = context.getProject
-    val spark = SparderEnv.getSparkSession
-    var plan: SparkPlan = null
-    try {
-      val logicalPlan = spark.sessionState.sqlParser.parsePlan(sql)
-      plan = stripRootCommandResult(spark.sessionState.executePlan(
-        logicalPlan, CommandExecutionMode.SKIP).executedPlan)
-    } catch {
-      case e: Exception => throwException(e.getMessage)
-    }
-    plan match {
-      case ExecutedCommandExec(view: CreateViewCommand) =>
-        checkTableName(view.name)
-        checkAccess(view.name, project)
-      case ExecutedCommandExec(view: ShowCreateTableCommand) =>
-        checkTableName(view.table)
-        checkAccess(view.table, project)
-      case ExecutedCommandExec(table: DropTableCommand) =>
-        checkTableName(table.tableName)
-        checkAccess(table.tableName, project)
-        if (!table.isView) {
-          throwException(MsgPicker.getMsg.getDDLDropError)
-        }
-      case ExecutedCommandExec(table: AlterViewAsCommand) =>
-        checkTableName(table.name)
-        checkAccess(table.name, project)
-      case _ => throwException(MsgPicker.getMsg.getDDLUnSupported)
-    }
-  }
-
-  private def checkTableName(identifier: TableIdentifier): Unit = {
-    if (!identifier.table.toUpperCase().startsWith(PREFIX)) {
-      throwException(MsgPicker.getMsg.getDDLViewNameError)
-    }
-  }
-
-  def checkAccess(identifier: TableIdentifier, project: String): Unit = {
-    val database = identifier.database.get
-    val ugi = KerberosLoginManager.getInstance.getProjectUGI(project)
-    val hasDatabaseAccess = ugi.doAs(new PrivilegedExceptionAction[Boolean]() {
-      override def run(): Boolean = {
-        source.checkDatabaseHadoopAccessFast(database)
-      }
-    })
-    if (!hasDatabaseAccess) {
-      throwException(MsgPicker.getMsg.getDDLDatabaseAccessnDenied)
-    }
-  }
-
-  def listAllDatabasesHasAccess(project: String): String = {
-    val ugi = KerberosLoginManager.getInstance.getProjectUGI(project)
-    val databasesHasAccess = ugi.doAs(new PrivilegedExceptionAction[List[String]]() {
-      override def run(): List[String] = {
-        val databases = source.listDatabases()
-        val databasesHasAccess = ListBuffer[String]()
-        databases.forEach(db => {
-          if (source.checkDatabaseHadoopAccessFast(db)) {
-            databasesHasAccess.append(db)
-          }
-        })
-        databasesHasAccess.toList
-      }
-    })
-    databasesHasAccess.mkString(",")
-  }
-
-  private def stripRootCommandResult(executedPlan: SparkPlan) = executedPlan match {
-    case CommandResultExec(_, plan, _) => plan
-    case other => other
-  }
-}


[kylin] 25/34: KYLIN-5459 Partial Log Governance

Posted by xx...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

xxyu pushed a commit to branch kylin5
in repository https://gitbox.apache.org/repos/asf/kylin.git

commit 858d0ae6da67e5ba1d418df4043af91f28611190
Author: Guoliang Sun <gu...@kyligence.io>
AuthorDate: Tue Jan 3 13:39:27 2023 +0800

    KYLIN-5459 Partial Log Governance
---
 build/conf/kylin-server-log4j.xml                  |  48 +++++++--
 .../kylin/rest/aspect/SchedulerEnhancer.java       |   2 +-
 .../apache/kylin/rest/broadcaster/Broadcaster.java |   8 +-
 .../apache/kylin/rest/service/AuditLogService.java |  10 +-
 .../service/task/QueryHistoryTaskScheduler.java    |   8 +-
 .../apache/kylin/common/constant/LogConstant.java} |  18 ++--
 .../transaction/AbstractAuditLogReplayWorker.java  |  10 +-
 .../transaction/AuditLogReplayWorker.java          |  18 +++-
 .../common/persistence/transaction/UnitOfWork.java |  52 ++++++----
 .../kylin/common/scheduler/EventBusFactory.java    |  16 ++-
 .../kylin/job/execution/DefaultExecutable.java     |   2 +
 .../cube/storage/TotalStorageCollector.java        |   2 +-
 .../apache/kylin/metadata/epoch/EpochManager.java  | 108 ++++++++++++++-------
 .../metadata/recommendation/ref/OptRecV2.java      |   4 +-
 .../kylin/metadata/epoch/EpochManagerTest.java     |  22 +++++
 .../common/metrics/MetricsInfluxdbReporter.java    |   4 +-
 .../rest/scheduler/AutoRefreshSnapshotRunner.java  |   8 +-
 .../org/apache/kylin/rest/service/JobService.java  |  30 ++++--
 .../apache/kylin/rest/service/JobServiceTest.java  |  12 ++-
 .../kylin/rest/broadcaster/BroadcasterTest.java    |  35 +++++++
 .../kylin/rest/service/QueryCacheManager.java      |   8 +-
 .../apache/kylin/rest/service/QueryService.java    |   8 +-
 .../spark/source/NSparkMetadataExplorer.java       |   2 +-
 .../engine/spark/builder/SegmentFlatTable.scala    |  22 +++--
 .../engine/spark/job/RDSegmentBuildExec.scala      |   5 +-
 .../kylin/engine/spark/job/SegmentBuildJob.java    |   2 +
 .../kylin/engine/spark/job/exec/BuildExec.scala    |   8 +-
 .../kylin/engine/spark/job/exec/MergeExec.scala    |   2 +
 .../kylin/engine/spark/job/exec/SnapshotExec.scala |   2 +
 .../engine/spark/job/exec/TableAnalyzerExec.scala  |   2 +
 .../kylin/engine/spark/job/stage/StageExec.scala   |   2 +
 .../engine/spark/job/stage/WaiteForResource.scala  |   2 +
 .../engine/spark/job/stage/build/BuildDict.scala   |   2 +
 .../engine/spark/job/stage/build/BuildLayer.scala  |   2 +
 .../spark/job/stage/build/CostBasedPlanner.scala   |   2 +
 .../job/stage/build/FlatTableAndDictBase.scala     |   5 +
 .../job/stage/build/GatherFlatTableStats.scala     |   2 +
 .../spark/job/stage/build/GenerateFlatTable.scala  |   2 +
 .../stage/build/MaterializedFactTableView.scala    |   2 +
 .../spark/job/stage/build/RefreshColumnBytes.scala |   2 +
 .../spark/job/stage/build/RefreshSnapshots.scala   |   2 +
 .../stage/build/partition/PartitionBuildDict.scala |   2 +
 .../build/partition/PartitionBuildLayer.scala      |   2 +
 .../partition/PartitionCostBasedPlanner.scala      |   2 +
 .../partition/PartitionGatherFlatTableStats.scala  |   1 +
 .../partition/PartitionGenerateFlatTable.scala     |   2 +
 .../PartitionMaterializedFactTableView.scala       |   2 +
 .../partition/PartitionRefreshColumnBytes.scala    |   2 +
 .../spark/job/stage/merge/MergeColumnBytes.scala   |   2 +
 .../spark/job/stage/merge/MergeFlatTable.scala     |   2 +
 .../spark/job/stage/merge/MergeIndices.scala       |   2 +
 .../partition/PartitionMergeColumnBytes.scala      |   2 +
 .../merge/partition/PartitionMergeFlatTable.scala  |   2 +
 .../merge/partition/PartitionMergeIndices.scala    |   2 +
 .../spark/job/stage/snapshots/SnapshotsBuild.scala |   2 +
 .../job/stage/tablesampling/AnalyzerTable.scala    |   2 +
 .../spark/job/stage/WaiteForResourceTest.scala}    |  20 ++--
 .../job/stage/build/RefreshColumnBytesTest.scala}  |  37 +++----
 .../job/stage/build/RefreshSnapshotsTest.scala}    |  18 ++--
 .../PartitionRefreshColumnBytesTest.scala}         |  30 +++---
 .../job/stage/merge/MergeColumnBytesTest.scala}    |  18 +++-
 .../spark/job/stage/merge/MergeStageTest.scala     |   2 +
 .../partition/PartitionMergeColumnBytesTest.scala} |  18 +++-
 .../org/apache/spark/utils/TestResourceUtils.scala |   6 ++
 .../common/asyncprofiler/AsyncProfilerUtils.java   |   2 +-
 .../org/apache/kylin/common/CustomUtils.scala}     |  19 ++--
 .../java/org/apache/kylin/tool/KylinLogTool.java   |   2 +-
 67 files changed, 499 insertions(+), 205 deletions(-)

diff --git a/build/conf/kylin-server-log4j.xml b/build/conf/kylin-server-log4j.xml
index 701e8071fc..93e0b51299 100644
--- a/build/conf/kylin-server-log4j.xml
+++ b/build/conf/kylin-server-log4j.xml
@@ -26,7 +26,7 @@
             <PatternLayout pattern="%d{ISO8601} %-5p %X{request.project}[%t] %c{2} : %mask{%m}%n"/>
         </RollingRandomAccessFile>
         <Routing name="routing">
-            <Routes pattern="${ctx:logCategory}">
+            <Routes pattern="$${ctx:logCategory}">
                 <Route>
                     <RollingFile name="rolling-${ctx:logCategory}"
                                  fileName="${env:KYLIN_HOME}/logs/kylin.${ctx:logCategory}.log"
@@ -39,7 +39,7 @@
                     </RollingFile>
                 </Route>
 
-                <Route ref="server" key="${ctx:logCategory}"/>
+                <Route ref="server" key="$${ctx:logCategory}"/>
             </Routes>
         </Routing>
         <RollingFile name="query-log-spark" fileName="${env:KYLIN_HOME}/logs/kylin.query.log" append="true"
@@ -50,20 +50,48 @@
             <DefaultRolloverStrategy max="10"/>
             <PatternLayout pattern="%d{ISO8601} %-5p %X{request.project}[%t] %c{2} : %mask{%m}%n"/>
         </RollingFile>
+        <RollingFile name="spark-history-server" fileName="${env:KYLIN_HOME}/logs/kylin.history_server.log"
+                     append="true"
+                     filePattern="${env:KYLIN_HOME}/logs/kylin.history_server.log.%i">
+            <Policies>
+                <SizeBasedTriggeringPolicy size="268435456"/>
+            </Policies>
+            <DefaultRolloverStrategy max="10"/>
+            <PatternLayout pattern="%d{ISO8601} %-5p %X{request.project}[%t] %c{2} : %mask{%m}%n"/>
+        </RollingFile>
+        <RollingFile name="build-log-spark" fileName="${env:KYLIN_HOME}/logs/kylin.build.log" append="true"
+                     filePattern="${env:KYLIN_HOME}/logs/kylin.build.log.%i">
+            <Policies>
+                <SizeBasedTriggeringPolicy size="268435456"/>
+            </Policies>
+            <DefaultRolloverStrategy max="10"/>
+            <PatternLayout pattern="%d{ISO8601} %-5p %X{request.project}[%t] %c{2} : %mask{%m}%n"/>
+        </RollingFile>
+        <RollingFile name="metadata-log-spark" fileName="${env:KYLIN_HOME}/logs/kylin.metadata.log" append="true"
+                     filePattern="${env:KYLIN_HOME}/logs/kylin.metadata.log.%i">
+            <Policies>
+                <SizeBasedTriggeringPolicy size="268435456"/>
+            </Policies>
+            <DefaultRolloverStrategy max="10"/>
+            <PatternLayout pattern="%d{ISO8601} %-5p %X{request.project}[%t] %c{2} : %mask{%m}%n"/>
+        </RollingFile>
     </Appenders>
     <Loggers>
         <Root level="INFO">
             <AppenderRef ref="routing"/>
         </Root>
-        <Logger name="org.apache.spark.scheduler.TaskSetManager" level="INFO" additivity="false">
+        <Logger name="org.apache.spark.scheduler.TaskSetManager" level="WARN" additivity="false">
             <AppenderRef ref="query-log-spark"/>
         </Logger>
-        <Logger name="org.apache.spark.scheduler.DAGScheduler" level="INFO" additivity="false">
+        <Logger name="org.apache.spark.scheduler.DAGScheduler" level="WARN" additivity="false">
             <AppenderRef ref="query-log-spark"/>
         </Logger>
-        <Logger name="org.apache.spark.scheduler.YarnScheduler" level="INFO" additivity="false">
+        <Logger name="org.apache.spark.scheduler.YarnScheduler" level="WARN" additivity="false">
             <AppenderRef ref="query-log-spark"/>
         </Logger>
+        <Logger name="org.apache.spark.deploy.history" level="INFO" additivity="false">
+            <AppenderRef ref="spark-history-server"/>
+        </Logger>
         <Logger name="io.kyligence" level="DEBUG"/>
         <Logger name="org.springframework" level="WARN"/>
         <Logger name="org.apache.kylin" level="DEBUG"/>
@@ -74,12 +102,20 @@
         <Logger name="org.apache.kylin.ext" level="INFO"/>
         <!--  Query log  -->
         <Logger name="org.apache.kylin.query" level="INFO"/>
-        <Logger name="org.apache.kylin.query" level="INFO"/>
         <Logger name="NDataflowCapabilityChecker" level="INFO" />
         <Logger name="org.apache.kylin.common.util.CheckUtil" level="INFO" />
         <Logger name="NQueryLayoutChooser" level="INFO" />
         <Logger name="org.apache.kylin.query.runtime.plan.ResultPlan" level="INFO" />
         <Logger name="org.apache.spark.sql.kylin.external.LogEx" level="INFO" />
         <Logger name="org.apache.kylin.engine.spark.utils.LogEx" level="INFO" />
+        <Logger name="org.apache.kylin.rest.service.QueryCacheManager" level="INFO"/>
+        <!-- Kerberos log -->
+        <Logger name="io.kyligence.kap.tool.kerberos" level="INFO"/>
+        <!-- Other log -->
+        <Logger name="org.apache.kylin.metadata.cube.storage.TotalStorageCollector" level="INFO" />
+        <Logger name="org.apache.kylin.common.metrics.MetricsInfluxdbReporter" level="INFO" />
+        <Logger name="io.kyligence.kap.metadata.recommendation.ref.OptRecV2" level="INFO" />
+        <Logger name="org.apache.kylin.rest.security.LdapAuthenticationProvider" level="INFO" />
+        <Logger name="org.apache.kylin.rest.aspect.SchedulerEnhancer" level="INFO" />
     </Loggers>
 </Configuration>
diff --git a/src/common-service/src/main/java/org/apache/kylin/rest/aspect/SchedulerEnhancer.java b/src/common-service/src/main/java/org/apache/kylin/rest/aspect/SchedulerEnhancer.java
index b086f379d2..d39066c42c 100644
--- a/src/common-service/src/main/java/org/apache/kylin/rest/aspect/SchedulerEnhancer.java
+++ b/src/common-service/src/main/java/org/apache/kylin/rest/aspect/SchedulerEnhancer.java
@@ -35,7 +35,7 @@ public class SchedulerEnhancer {
     public void aroundScheduled(ProceedingJoinPoint pjp) throws Throwable {
         val config = KylinConfig.getInstanceFromEnv();
         if (!"query".equals(config.getServerMode())) {
-            log.info("schedule at job leader");
+            log.debug("schedule at job leader");
             pjp.proceed();
         }
     }
diff --git a/src/common-service/src/main/java/org/apache/kylin/rest/broadcaster/Broadcaster.java b/src/common-service/src/main/java/org/apache/kylin/rest/broadcaster/Broadcaster.java
index c7b36369c1..92b9686557 100644
--- a/src/common-service/src/main/java/org/apache/kylin/rest/broadcaster/Broadcaster.java
+++ b/src/common-service/src/main/java/org/apache/kylin/rest/broadcaster/Broadcaster.java
@@ -39,6 +39,8 @@ import java.util.stream.Stream;
 
 import org.apache.commons.collections.CollectionUtils;
 import org.apache.commons.lang.ArrayUtils;
+import org.apache.kylin.common.constant.LogConstant;
+import org.apache.kylin.common.logging.SetLogCategory;
 import org.apache.kylin.common.persistence.transaction.BroadcastEventReadyNotifier;
 import org.apache.kylin.common.util.AddressUtil;
 import org.apache.kylin.common.util.DaemonThreadFactory;
@@ -91,7 +93,9 @@ public class Broadcaster implements Closeable {
 
     public void announce(BroadcastEventReadyNotifier event) {
         if (eventQueue.contains(event)) {
-            logger.debug("broadcast event queue has contain this event: {}", event);
+            try (SetLogCategory ignored = new SetLogCategory(LogConstant.SCHEDULE_CATEGORY)) {
+                logger.debug("broadcast event queue has contain this event: {}", event);
+            }
             return;
         }
         if (!eventQueue.offer(event)) {
@@ -100,7 +104,7 @@ public class Broadcaster implements Closeable {
     }
 
     public void consumeEvent() {
-        try {
+        try (SetLogCategory ignored = new SetLogCategory(LogConstant.SCHEDULE_CATEGORY)) {
             while (isRunning) {
                 BroadcastEventReadyNotifier notifier = eventQueue.take();
                 handleEvent(notifier);
diff --git a/src/common-service/src/main/java/org/apache/kylin/rest/service/AuditLogService.java b/src/common-service/src/main/java/org/apache/kylin/rest/service/AuditLogService.java
index 1d79c2734f..1f60bfdd83 100644
--- a/src/common-service/src/main/java/org/apache/kylin/rest/service/AuditLogService.java
+++ b/src/common-service/src/main/java/org/apache/kylin/rest/service/AuditLogService.java
@@ -19,6 +19,8 @@
 package org.apache.kylin.rest.service;
 
 import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.common.constant.LogConstant;
+import org.apache.kylin.common.logging.SetLogCategory;
 import org.apache.kylin.common.persistence.ResourceStore;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -31,8 +33,10 @@ public class AuditLogService {
 
     public void notifyCatchUp() {
         ResourceStore store = ResourceStore.getKylinMetaStore(KylinConfig.getInstanceFromEnv());
-        logger.info("Start to catchup manually");
-        store.getAuditLogStore().catchup();
-        logger.info("End to catchup manually");
+        try (SetLogCategory ignored = new SetLogCategory(LogConstant.METADATA_CATEGORY)) {
+            logger.info("Start to catchup manually");
+            store.getAuditLogStore().catchup();
+            logger.info("End to catchup manually");
+        }
     }
 }
diff --git a/src/common-service/src/main/java/org/apache/kylin/rest/service/task/QueryHistoryTaskScheduler.java b/src/common-service/src/main/java/org/apache/kylin/rest/service/task/QueryHistoryTaskScheduler.java
index eae1ed351b..07ceafe06c 100644
--- a/src/common-service/src/main/java/org/apache/kylin/rest/service/task/QueryHistoryTaskScheduler.java
+++ b/src/common-service/src/main/java/org/apache/kylin/rest/service/task/QueryHistoryTaskScheduler.java
@@ -33,6 +33,8 @@ import java.util.stream.Collectors;
 import org.apache.commons.collections.CollectionUtils;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.common.constant.LogConstant;
+import org.apache.kylin.common.logging.SetLogCategory;
 import org.apache.kylin.common.util.ExecutorServiceUtil;
 import org.apache.kylin.common.util.NamedThreadFactory;
 import org.apache.kylin.common.util.Pair;
@@ -98,7 +100,9 @@ public class QueryHistoryTaskScheduler {
         if (querySmartSupporter == null && SpringContext.getApplicationContext() != null) {
             querySmartSupporter = SpringContext.getBean(QuerySmartSupporter.class);
         }
-        log.debug("New QueryHistoryAccelerateScheduler created by project {}", project);
+        try (SetLogCategory ignored = new SetLogCategory(LogConstant.SCHEDULE_CATEGORY)) {
+            log.debug("New QueryHistoryAccelerateScheduler created by project {}", project);
+        }
     }
 
     public static QueryHistoryTaskScheduler getInstance(String project) {
@@ -484,7 +488,7 @@ public class QueryHistoryTaskScheduler {
 
         @Override
         public void run() {
-            try {
+            try (SetLogCategory ignored = new SetLogCategory(LogConstant.SCHEDULE_CATEGORY)) {
                 work();
             } catch (Exception e) {
                 log.warn("QueryHistory {}  process failed of project({})", name(), project, e);
diff --git a/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/merge/MergeColumnBytes.scala b/src/core-common/src/main/java/org/apache/kylin/common/constant/LogConstant.java
similarity index 69%
copy from src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/merge/MergeColumnBytes.scala
copy to src/core-common/src/main/java/org/apache/kylin/common/constant/LogConstant.java
index 3091e92e0a..4a5293dffa 100644
--- a/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/merge/MergeColumnBytes.scala
+++ b/src/core-common/src/main/java/org/apache/kylin/common/constant/LogConstant.java
@@ -16,17 +16,15 @@
  * limitations under the License.
  */
 
-package org.apache.kylin.engine.spark.job.stage.merge
+package org.apache.kylin.common.constant;
 
-import org.apache.kylin.engine.spark.job.SegmentJob
-import org.apache.kylin.metadata.cube.model.NDataSegment
+public class LogConstant {
 
-class MergeColumnBytes(jobContext: SegmentJob, dataSegment: NDataSegment)
-  extends MergeStage(jobContext, dataSegment) {
+    private LogConstant() {
+    }
 
-  override def execute(): Unit = {
-    mergeColumnBytes()
-
-    cleanup()
-  }
+    public static final String SCHEDULE_CATEGORY = "schedule";
+    public static final String METADATA_CATEGORY = "metadata";
+    public static final String QUERY_CATEGORY = "query";
+    public static final String BUILD_CATEGORY = "build";
 }
diff --git a/src/core-common/src/main/java/org/apache/kylin/common/persistence/transaction/AbstractAuditLogReplayWorker.java b/src/core-common/src/main/java/org/apache/kylin/common/persistence/transaction/AbstractAuditLogReplayWorker.java
index afa66f885c..9bc177891a 100644
--- a/src/core-common/src/main/java/org/apache/kylin/common/persistence/transaction/AbstractAuditLogReplayWorker.java
+++ b/src/core-common/src/main/java/org/apache/kylin/common/persistence/transaction/AbstractAuditLogReplayWorker.java
@@ -30,7 +30,9 @@ import java.util.function.Predicate;
 
 import org.apache.commons.collections.CollectionUtils;
 import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.common.constant.LogConstant;
 import org.apache.kylin.common.exception.KylinException;
+import org.apache.kylin.common.logging.SetLogCategory;
 import org.apache.kylin.common.persistence.AuditLog;
 import org.apache.kylin.common.persistence.UnitMessages;
 import org.apache.kylin.common.persistence.event.Event;
@@ -116,9 +118,11 @@ public abstract class AbstractAuditLogReplayWorker {
             }
         }
 
-        for (UnitMessages message : messagesMap.values()) {
-            log.debug("replay {} event for project:{}", message.getMessages().size(), message.getKey());
-            replayer.replay(message);
+        try (SetLogCategory ignored = new SetLogCategory(LogConstant.METADATA_CATEGORY)) {
+            for (UnitMessages message : messagesMap.values()) {
+                log.debug("replay {} event for project:{}", message.getMessages().size(), message.getKey());
+                replayer.replay(message);
+            }
         }
     }
 
diff --git a/src/core-common/src/main/java/org/apache/kylin/common/persistence/transaction/AuditLogReplayWorker.java b/src/core-common/src/main/java/org/apache/kylin/common/persistence/transaction/AuditLogReplayWorker.java
index 2ecb5041e1..63299c0aa4 100644
--- a/src/core-common/src/main/java/org/apache/kylin/common/persistence/transaction/AuditLogReplayWorker.java
+++ b/src/core-common/src/main/java/org/apache/kylin/common/persistence/transaction/AuditLogReplayWorker.java
@@ -33,6 +33,8 @@ import java.util.stream.LongStream;
 import org.apache.commons.collections.CollectionUtils;
 import org.apache.commons.lang.StringUtils;
 import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.common.constant.LogConstant;
+import org.apache.kylin.common.logging.SetLogCategory;
 import org.apache.kylin.common.persistence.ResourceStore;
 import org.apache.kylin.common.persistence.VersionConflictException;
 import org.apache.kylin.common.persistence.AuditLog;
@@ -103,7 +105,7 @@ public class AuditLogReplayWorker extends AbstractAuditLogReplayWorker {
             log.info("Catchup Already stopped");
             return;
         }
-        try {
+        try (SetLogCategory ignored = new SetLogCategory(LogConstant.METADATA_CATEGORY)) {
             catchupToMaxId(logOffset);
         } catch (TransactionException | DatabaseNotAvailableException e) {
             log.warn("cannot create transaction or auditlog database connect error, ignore it", e);
@@ -148,7 +150,9 @@ public class AuditLogReplayWorker extends AbstractAuditLogReplayWorker {
         }
 
         if (CollectionUtils.isEmpty(needReplayedIdList)) {
-            log.debug("needReplayedIdList is empty");
+            try (SetLogCategory ignored = new SetLogCategory(LogConstant.METADATA_CATEGORY)) {
+                log.debug("needReplayedIdList is empty");
+            }
             return Lists.newArrayList();
         }
 
@@ -160,7 +164,7 @@ public class AuditLogReplayWorker extends AbstractAuditLogReplayWorker {
             return;
         }
 
-        try {
+        try (SetLogCategory ignored = new SetLogCategory(LogConstant.METADATA_CATEGORY)) {
             val fetchAuditLog = auditLogStore.fetch(needReplayedIdList);
             if (CollectionUtils.isEmpty(fetchAuditLog)) {
                 return;
@@ -200,7 +204,9 @@ public class AuditLogReplayWorker extends AbstractAuditLogReplayWorker {
                 return -1L;
             }
 
-            log.debug("start restore from {}", currentWindow);
+            try (SetLogCategory ignored = new SetLogCategory(LogConstant.METADATA_CATEGORY)) {
+                log.debug("start restore from {}", currentWindow);
+            }
             val stepWin = new SlideWindow(currentWindow);
 
             while (stepWin.forwardRightStep(STEP)) {
@@ -211,7 +217,9 @@ public class AuditLogReplayWorker extends AbstractAuditLogReplayWorker {
                 }
                 stepWin.syncRightStep();
             }
-            log.debug("end restore from {}, delay queue:{}", currentWindow, delayIdQueue.size());
+            try (SetLogCategory ignored = new SetLogCategory(LogConstant.METADATA_CATEGORY)) {
+                log.debug("end restore from {}, delay queue:{}", currentWindow, delayIdQueue.size());
+            }
             return currentWindow.getEnd();
         });
 
diff --git a/src/core-common/src/main/java/org/apache/kylin/common/persistence/transaction/UnitOfWork.java b/src/core-common/src/main/java/org/apache/kylin/common/persistence/transaction/UnitOfWork.java
index 4e576d4e4f..b256db1fdd 100644
--- a/src/core-common/src/main/java/org/apache/kylin/common/persistence/transaction/UnitOfWork.java
+++ b/src/core-common/src/main/java/org/apache/kylin/common/persistence/transaction/UnitOfWork.java
@@ -23,8 +23,10 @@ import java.util.function.Consumer;
 import java.util.stream.Collectors;
 
 import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.common.constant.LogConstant;
 import org.apache.kylin.common.exception.KylinException;
 import org.apache.kylin.common.exception.code.ErrorCodeSystem;
+import org.apache.kylin.common.logging.SetLogCategory;
 import org.apache.kylin.common.persistence.InMemResourceStore;
 import org.apache.kylin.common.persistence.RawResource;
 import org.apache.kylin.common.persistence.ResourceStore;
@@ -111,11 +113,13 @@ public class UnitOfWork {
         try {
             T ret;
 
-            if (retry != 1) {
-                log.debug("UnitOfWork {} in project {} is retrying for {}th time", traceId, params.getUnitName(),
-                        retry);
-            } else {
-                log.debug("UnitOfWork {} started on project {}", traceId, params.getUnitName());
+            try (SetLogCategory ignored = new SetLogCategory(LogConstant.METADATA_CATEGORY)) {
+                if (retry != 1) {
+                    log.debug("UnitOfWork {} in project {} is retrying for {}th time", traceId, params.getUnitName(),
+                            retry);
+                } else {
+                    log.debug("UnitOfWork {} started on project {}", traceId, params.getUnitName());
+                }
             }
 
             long startTime = System.currentTimeMillis();
@@ -124,7 +128,9 @@ public class UnitOfWork {
             long startTransactionTime = System.currentTimeMillis();
             val waitForLockTime = startTransactionTime - startTime;
             if (waitForLockTime > 3000) {
-                log.warn("UnitOfWork {} takes too long time {}ms to start", traceId, waitForLockTime);
+                try (SetLogCategory ignored = new SetLogCategory(LogConstant.METADATA_CATEGORY)) {
+                    log.warn("UnitOfWork {} takes too long time {}ms to start", traceId, waitForLockTime);
+                }
             }
 
             ret = params.getProcessor().process();
@@ -137,7 +143,7 @@ public class UnitOfWork {
             handleError(throwable, params, retry, traceId);
         } finally {
             if (isAlreadyInTransaction()) {
-                try {
+                try (SetLogCategory ignored = new SetLogCategory(LogConstant.METADATA_CATEGORY)) {
                     val unitOfWork = UnitOfWork.get();
                     unitOfWork.getCurrentLock().unlock();
                     unitOfWork.cleanResource();
@@ -161,13 +167,15 @@ public class UnitOfWork {
     }
 
     private static void logIfLongTransaction(long duration, String traceId) {
-        if (duration > 3000) {
-            log.warn("UnitOfWork {} takes too long time {}ms to complete", traceId, duration);
-            if (duration > 10000) {
-                log.warn("current stack: ", new Throwable());
+        try (SetLogCategory ignored = new SetLogCategory(LogConstant.METADATA_CATEGORY)) {
+            if (duration > 3000) {
+                log.warn("UnitOfWork {} takes too long time {}ms to complete", traceId, duration);
+                if (duration > 10000) {
+                    log.warn("current stack: ", new Throwable());
+                }
+            } else {
+                log.debug("UnitOfWork {} takes {}ms to complete", traceId, duration);
             }
-        } else {
-            log.debug("UnitOfWork {} takes {}ms to complete", traceId, duration);
         }
     }
 
@@ -178,7 +186,9 @@ public class UnitOfWork {
         val lock = params.getTempLockName() == null ? TransactionLock.getLock(project, readonly)
                 : TransactionLock.getLock(params.getTempLockName(), readonly);
 
-        log.trace("get lock for project {}, lock is held by current thread: {}", project, lock.isHeldByCurrentThread());
+        try (SetLogCategory ignored = new SetLogCategory(LogConstant.METADATA_CATEGORY)) {
+            log.trace("get lock for project {}, lock is held by current thread: {}", project, lock.isHeldByCurrentThread());
+        }
         //re-entry is not encouraged (because it indicates complex handling logic, bad smell), let's abandon it first
         Preconditions.checkState(!lock.isHeldByCurrentThread());
         lock.lock();
@@ -202,7 +212,9 @@ public class UnitOfWork {
         ResourceStore.setRS(configCopy, rs);
         unitOfWork.setLocalConfig(KylinConfig.setAndUnsetThreadLocalConfig(configCopy));
 
-        log.trace("sandbox RS {} now takes place for main RS {}", rs, underlying);
+        try (SetLogCategory ignored = new SetLogCategory(LogConstant.METADATA_CATEGORY)) {
+            log.trace("sandbox RS {} now takes place for main RS {}", rs, underlying);
+        }
 
         return unitOfWork;
     }
@@ -248,14 +260,16 @@ public class UnitOfWork {
         val unitMessages = packageEvents(eventList, get().getProject(), traceId, writeInterceptor);
         long entitiesSize = unitMessages.getMessages().stream().filter(event -> event instanceof ResourceRelatedEvent)
                 .count();
-        log.debug("transaction {} updates {} metadata items", traceId, entitiesSize);
+        try (SetLogCategory ignored = new SetLogCategory(LogConstant.METADATA_CATEGORY)) {
+            log.debug("transaction {} updates {} metadata items", traceId, entitiesSize);
+        }
         checkEpoch(params);
         val unitName = params.getUnitName();
         metadataStore.batchUpdate(unitMessages, get().getParams().isSkipAuditLog(), unitName, params.getEpochId());
         if (entitiesSize != 0 && !params.isReadonly() && !params.isSkipAuditLog() && !config.isUTEnv()) {
             factory.postAsync(new AuditLogBroadcastEventNotifier());
         }
-        try {
+        try (SetLogCategory ignored = new SetLogCategory(LogConstant.METADATA_CATEGORY)) {
             // replayInTransaction in leader before release lock
             val replayer = MessageSynchronization.getInstance(originConfig);
             replayer.replayInTransaction(unitMessages);
@@ -284,7 +298,9 @@ public class UnitOfWork {
         }
 
         if (retry == 1) {
-            log.warn("transaction failed at first time, traceId:" + traceId, throwable);
+            try (SetLogCategory ignored = new SetLogCategory(LogConstant.METADATA_CATEGORY)) {
+                log.warn("transaction failed at first time, traceId:" + traceId, throwable);
+            }
         }
     }
 
diff --git a/src/core-common/src/main/java/org/apache/kylin/common/scheduler/EventBusFactory.java b/src/core-common/src/main/java/org/apache/kylin/common/scheduler/EventBusFactory.java
index ca476ed474..92da48642f 100644
--- a/src/core-common/src/main/java/org/apache/kylin/common/scheduler/EventBusFactory.java
+++ b/src/core-common/src/main/java/org/apache/kylin/common/scheduler/EventBusFactory.java
@@ -27,6 +27,8 @@ import java.util.concurrent.TimeUnit;
 
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.Singletons;
+import org.apache.kylin.common.constant.LogConstant;
+import org.apache.kylin.common.logging.SetLogCategory;
 import org.apache.kylin.common.util.ExecutorServiceUtil;
 import org.apache.kylin.common.util.NamedThreadFactory;
 import org.apache.kylin.common.persistence.transaction.BroadcastEventReadyNotifier;
@@ -126,7 +128,9 @@ public class EventBusFactory {
     }
 
     public void postAsync(SchedulerEventNotifier event) {
-        log.debug("Post event {} async", event);
+        try (SetLogCategory ignored = new SetLogCategory(LogConstant.SCHEDULE_CATEGORY)) {
+            log.debug("Post event {} async", event);
+        }
         if (event instanceof BroadcastEventReadyNotifier) {
             broadcastEventBus.post(event);
         } else {
@@ -135,12 +139,16 @@ public class EventBusFactory {
     }
 
     public void postSync(Object event) {
-        log.debug("Post event {} sync", event);
+        try (SetLogCategory ignored = new SetLogCategory(LogConstant.SCHEDULE_CATEGORY)) {
+            log.debug("Post event {} sync", event);
+        }
         syncEventBus.post(event);
     }
 
     public void callService(Object event) {
-        log.debug("Post Service event {} sync", event);
+        try (SetLogCategory ignored = new SetLogCategory(LogConstant.SCHEDULE_CATEGORY)) {
+            log.debug("Post Service event {} sync", event);
+        }
         serviceEventBus.post(event);
     }
 
@@ -153,7 +161,7 @@ public class EventBusFactory {
 
     private void stopThreadPool(ExecutorService executor) {
         executor.shutdown();
-        try {
+        try (SetLogCategory ignored = new SetLogCategory(LogConstant.SCHEDULE_CATEGORY)) {
             if (!executor.awaitTermination(6000, TimeUnit.SECONDS)) {
                 ExecutorServiceUtil.forceShutdown(executor);
             }
diff --git a/src/core-job/src/main/java/org/apache/kylin/job/execution/DefaultExecutable.java b/src/core-job/src/main/java/org/apache/kylin/job/execution/DefaultExecutable.java
index 3f22a1c502..5a4e92e58c 100644
--- a/src/core-job/src/main/java/org/apache/kylin/job/execution/DefaultExecutable.java
+++ b/src/core-job/src/main/java/org/apache/kylin/job/execution/DefaultExecutable.java
@@ -71,10 +71,12 @@ public class DefaultExecutable extends AbstractExecutable implements ChainedExec
         List<Executable> executables = getTasks().stream().map(Executable.class::cast).collect(Collectors.toList());
         switch (getJobSchedulerMode()) {
         case DAG:
+            logger.info("Execute in DAG mode.");
             dagSchedule(executables, context);
             break;
         case CHAIN:
         default:
+            logger.info("Execute in CHAIN mode.");
             chainedSchedule(executables, context);
             break;
         }
diff --git a/src/core-metadata/src/main/java/org/apache/kylin/metadata/cube/storage/TotalStorageCollector.java b/src/core-metadata/src/main/java/org/apache/kylin/metadata/cube/storage/TotalStorageCollector.java
index f6b93383ea..74c30ab86a 100644
--- a/src/core-metadata/src/main/java/org/apache/kylin/metadata/cube/storage/TotalStorageCollector.java
+++ b/src/core-metadata/src/main/java/org/apache/kylin/metadata/cube/storage/TotalStorageCollector.java
@@ -36,7 +36,7 @@ public class TotalStorageCollector implements StorageInfoCollector {
     public void doCollect(KylinConfig config, String project, StorageVolumeInfo storageVolumeInfo) throws IOException {
         long totalStorageSize = hdfsCapacityMetrics.getHdfsCapacityByProject(project);
         if (totalStorageSize != -1L) {
-            log.info("Reuse workingDirCapacity by project {}, storageSize: {}", project, totalStorageSize);
+            log.debug("Reuse workingDirCapacity by project {}, storageSize: {}", project, totalStorageSize);
             storageVolumeInfo.setTotalStorageSize(totalStorageSize);
             return;
         }
diff --git a/src/core-metadata/src/main/java/org/apache/kylin/metadata/epoch/EpochManager.java b/src/core-metadata/src/main/java/org/apache/kylin/metadata/epoch/EpochManager.java
index cbe1ba9590..e8d3acc29b 100644
--- a/src/core-metadata/src/main/java/org/apache/kylin/metadata/epoch/EpochManager.java
+++ b/src/core-metadata/src/main/java/org/apache/kylin/metadata/epoch/EpochManager.java
@@ -45,6 +45,8 @@ import org.apache.commons.collections.CollectionUtils;
 import org.apache.commons.lang.StringUtils;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.Singletons;
+import org.apache.kylin.common.constant.LogConstant;
+import org.apache.kylin.common.logging.SetLogCategory;
 import org.apache.kylin.common.persistence.metadata.Epoch;
 import org.apache.kylin.common.persistence.metadata.EpochStore;
 import org.apache.kylin.common.persistence.transaction.UnitOfWork;
@@ -72,7 +74,7 @@ import lombok.Synchronized;
 import lombok.val;
 
 public class EpochManager {
-    private static final Logger logger = LoggerFactory.getLogger(EpochManager.class);
+    private static final Logger logger = LoggerFactory.getLogger(LogConstant.METADATA_CATEGORY);
 
     public static EpochManager getInstance() {
         return Singletons.getInstance(EpochManager.class, clz -> {
@@ -174,13 +176,17 @@ public class EpochManager {
             if (CollectionUtils.isNotEmpty(outdatedProjects)) {
                 outdatedProjects.forEach(EpochManager.this::deleteEpoch);
                 notifierEscapedProject(outdatedProjects);
-                logger.warn("remove outdated epoch list :{}", String.join(",", outdatedProjects));
+                try (SetLogCategory ignored = new SetLogCategory(LogConstant.METADATA_CATEGORY)) {
+                    logger.warn("remove outdated epoch list :{}", String.join(",", outdatedProjects));
+                }
             }
         }
 
         @Synchronized("renewLock")
         public void tryRenewOwnedEpochs() {
-            logger.debug("Start renew owned epoch.........");
+            try (SetLogCategory ignored = new SetLogCategory(LogConstant.METADATA_CATEGORY)) {
+                logger.debug("Start renew owned epoch.........");
+            }
             long startTime = System.currentTimeMillis();
 
             //1.check and get project
@@ -197,7 +203,9 @@ public class EpochManager {
             }
 
             if (CollectionUtils.isEmpty(oriOwnedEpochSet)) {
-                logger.info("current node own none project, end renew...");
+                try (SetLogCategory ignored = new SetLogCategory(LogConstant.METADATA_CATEGORY)) {
+                    logger.info("current node own none project, end renew...");
+                }
                 return;
             }
 
@@ -208,8 +216,10 @@ public class EpochManager {
             notifierAfterUpdatedEpoch("renew", lastRenewEpochSet, afterRenewEpochSets);
             lastRenewEpochSet.clear();
             lastRenewEpochSet.addAll(afterRenewEpochSets);
-            logger.debug("End renew owned epoch,cost:{}.........",
-                    TimeUnit.MILLISECONDS.toSeconds(System.currentTimeMillis() - startTime));
+            try (SetLogCategory ignored = new SetLogCategory(LogConstant.METADATA_CATEGORY)) {
+                logger.debug("End renew owned epoch,cost:{}.........",
+                        TimeUnit.MILLISECONDS.toSeconds(System.currentTimeMillis() - startTime));
+            }
         }
 
         private Set<String> innerRenewEpochWithRetry(Set<Epoch> oriEpochs) {
@@ -251,7 +261,7 @@ public class EpochManager {
             totalTask.forEach(taskEpochList -> {
                 val epochTargetList = taskEpochList.stream().map(Epoch::getEpochTarget).collect(Collectors.toList());
                 renewExecutor.submit(() -> {
-                    try {
+                    try (SetLogCategory ignored = new SetLogCategory(LogConstant.METADATA_CATEGORY)) {
                         if (CollectionUtils.isNotEmpty(epochTargetList)) {
                             batchRenewEpoch(taskEpochList);
                             newRenewEpochSets.addAll(epochTargetList);
@@ -264,7 +274,7 @@ public class EpochManager {
                 });
             });
 
-            try {
+            try (SetLogCategory ignored = new SetLogCategory(LogConstant.METADATA_CATEGORY)) {
                 if (!countDownLatch.await(epochRenewTimeout, TimeUnit.SECONDS)) {
                     logger.error("renew not finished,{}/{}...", newRenewEpochSets.size(), oriEpochs.size());
                 }
@@ -276,7 +286,9 @@ public class EpochManager {
 
         @Synchronized("updateLock")
         public void tryUpdateAllEpochs() {
-            logger.debug("Start update Epochs.........");
+            try (SetLogCategory ignored = new SetLogCategory(LogConstant.METADATA_CATEGORY)) {
+                logger.debug("Start update Epochs.........");
+            }
             long startTime = System.currentTimeMillis();
 
             //1.check and get project
@@ -294,7 +306,9 @@ public class EpochManager {
             }
 
             if (CollectionUtils.isEmpty(projects)) {
-                logger.debug("don't have more new project, end update...");
+                try (SetLogCategory ignored = new SetLogCategory(LogConstant.METADATA_CATEGORY)) {
+                    logger.debug("don't have more new project, end update...");
+                }
                 return;
             }
 
@@ -303,8 +317,10 @@ public class EpochManager {
 
             notifierAfterUpdatedEpoch("update", Collections.emptySet(), updatedMewEpochs);
 
-            logger.debug("End update Epochs,cost:{}:.........",
-                    TimeUnit.MILLISECONDS.toSeconds(System.currentTimeMillis() - startTime));
+            try (SetLogCategory ignored = new SetLogCategory(LogConstant.METADATA_CATEGORY)) {
+                logger.debug("End update Epochs,cost:{}:.........",
+                        TimeUnit.MILLISECONDS.toSeconds(System.currentTimeMillis() - startTime));
+            }
         }
 
         private Set<String> tryUpdateEpochByProjects(final List<String> projects) {
@@ -345,18 +361,24 @@ public class EpochManager {
                 eventBusFactory.postAsync(new ProjectEscapedNotifier(project));
             }
 
-            logger.warn("notifier escaped project:{}", String.join(",", escapedProjects));
+            try (SetLogCategory ignored = new SetLogCategory(LogConstant.METADATA_CATEGORY)) {
+                logger.warn("notifier escaped project:{}", String.join(",", escapedProjects));
+            }
         }
 
         private void notifierAfterUpdatedEpoch(String updateTypeName, Set<String> oriEpochs, Set<String> newEpochs) {
-            logger.debug("after {} new epoch size:{}, Project {} owned by {}", updateTypeName, newEpochs.size(),
+            try (SetLogCategory ignored = new SetLogCategory(LogConstant.METADATA_CATEGORY)) {
+                logger.debug("after {} new epoch size:{}, Project {} owned by {}", updateTypeName, newEpochs.size(),
                     String.join(",", newEpochs), identity);
+            }
 
             if (CollectionUtils.isNotEmpty(newEpochs)) {
                 Collection<String> newControlledProjects = new HashSet<>(Sets.difference(newEpochs, oriEpochs));
                 if (CollectionUtils.isNotEmpty(newControlledProjects)) {
-                    logger.debug("after {} controlled projects: {}", updateTypeName,
+                    try (SetLogCategory ignored = new SetLogCategory(LogConstant.METADATA_CATEGORY)) {
+                        logger.debug("after {} controlled projects: {}", updateTypeName,
                             String.join(",", newControlledProjects));
+                    }
                     newControlledProjects.forEach(p -> eventBusFactory.postAsync(new ProjectControlledNotifier(p)));
                 }
             }
@@ -364,7 +386,9 @@ public class EpochManager {
             if (CollectionUtils.isNotEmpty(oriEpochs)) {
                 Collection<String> escapedProjects = new HashSet<>(Sets.difference(oriEpochs, newEpochs));
                 if (CollectionUtils.isNotEmpty(escapedProjects)) {
-                    logger.debug("after {} escaped projects: {}", updateTypeName, String.join(",", escapedProjects));
+                    try (SetLogCategory ignored = new SetLogCategory(LogConstant.METADATA_CATEGORY)) {
+                        logger.debug("after {} escaped projects: {}", updateTypeName, String.join(",", escapedProjects));
+                    }
                     notifierEscapedProject(escapedProjects);
                 }
             }
@@ -531,7 +555,7 @@ public class EpochManager {
             return false;
         }
         return EpochUpdateLockManager.executeEpochWithLock(epochTarget, () -> {
-            try {
+            try (SetLogCategory ignored = new SetLogCategory(LogConstant.METADATA_CATEGORY)) {
                 Epoch epoch = epochStore.getEpoch(epochTarget);
                 Pair<Epoch, Epoch> oldNewEpochPair = oldEpoch2NewEpoch(epoch, epochTarget, force, null);
 
@@ -672,23 +696,25 @@ public class EpochManager {
     }
 
     private boolean isEpochLegal(Epoch epoch) {
-        if (epoch == null) {
-            logger.debug("Get null epoch");
-            return false;
-        } else if (StringUtils.isEmpty(epoch.getCurrentEpochOwner())) {
-            logger.debug("Epoch {}'s owner is empty", epoch);
-            return false;
-        } else if (System.currentTimeMillis() - epoch.getLastEpochRenewTime() > epochExpiredTime * 1000) {
-            logger.warn("Epoch {}'s last renew time is expired. Current time is {}, expiredTime is {}", epoch,
-                    System.currentTimeMillis(), epochExpiredTime);
-            return false;
-        }
+        try (SetLogCategory ignored = new SetLogCategory(LogConstant.METADATA_CATEGORY)) {
+            if (epoch == null) {
+                logger.debug("Get null epoch");
+                return false;
+            } else if (StringUtils.isEmpty(epoch.getCurrentEpochOwner())) {
+                logger.debug("Epoch {}'s owner is empty", epoch);
+                return false;
+            } else if (System.currentTimeMillis() - epoch.getLastEpochRenewTime() > epochExpiredTime * 1000) {
+                logger.warn("Epoch {}'s last renew time is expired. Current time is {}, expiredTime is {}", epoch,
+                        System.currentTimeMillis(), epochExpiredTime);
+                return false;
+            }
 
-        ResourceGroupManager rgManager = ResourceGroupManager.getInstance(config);
-        String epochServer = getHostAndPort(epoch.getCurrentEpochOwner());
-        if (!rgManager.instanceHasPermissionToOwnEpochTarget(epoch.getEpochTarget(), epochServer)) {
-            logger.debug("Epoch {}'s owner is not in build request type resource group.", epoch);
-            return false;
+            ResourceGroupManager rgManager = ResourceGroupManager.getInstance(config);
+            String epochServer = getHostAndPort(epoch.getCurrentEpochOwner());
+            if (!rgManager.instanceHasPermissionToOwnEpochTarget(epoch.getEpochTarget(), epochServer)) {
+                logger.debug("Epoch {}'s owner is not in build request type resource group.", epoch);
+                return false;
+            }
         }
         return true;
     }
@@ -713,7 +739,9 @@ public class EpochManager {
         if (!isGlobalProject(epochTargetTemp)) {
             val targetProjectInstance = NProjectManager.getInstance(config).getProject(epochTargetTemp);
             if (Objects.isNull(targetProjectInstance)) {
-                logger.warn("get epoch failed, because the project:{} dose not exist", epochTargetTemp);
+                try (SetLogCategory ignored = new SetLogCategory(LogConstant.METADATA_CATEGORY)) {
+                    logger.warn("get epoch failed, because the project:{} dose not exist", epochTargetTemp);
+                }
                 return null;
             }
 
@@ -762,7 +790,9 @@ public class EpochManager {
     public void deleteEpoch(String epochTarget) {
         EpochUpdateLockManager.executeEpochWithLock(epochTarget, () -> {
             epochStore.delete(epochTarget);
-            logger.debug("delete epoch:{}", epochTarget);
+            try (SetLogCategory ignored = new SetLogCategory(LogConstant.METADATA_CATEGORY)) {
+                logger.debug("delete epoch:{}", epochTarget);
+            }
             return null;
         });
     }
@@ -790,7 +820,9 @@ public class EpochManager {
 
     private boolean checkInMaintenanceMode() {
         if (isMaintenanceMode()) {
-            logger.debug("System is currently undergoing maintenance. Abort updating Epochs");
+            try (SetLogCategory ignored = new SetLogCategory(LogConstant.METADATA_CATEGORY)) {
+                logger.debug("System is currently undergoing maintenance. Abort updating Epochs");
+            }
             return true;
         }
         return false;
@@ -802,7 +834,9 @@ public class EpochManager {
 
     // when shutdown or meta data is inconsistent
     public void releaseOwnedEpochs() {
-        logger.info("Release owned epochs");
+        try (SetLogCategory ignored = new SetLogCategory(LogConstant.METADATA_CATEGORY)) {
+            logger.info("Release owned epochs");
+        }
         epochStore.executeWithTransaction(() -> {
             val epochs = epochStore.list().stream().filter(this::checkEpochOwnerOnly).collect(Collectors.toList());
             epochs.forEach(epoch -> {
diff --git a/src/core-metadata/src/main/java/org/apache/kylin/metadata/recommendation/ref/OptRecV2.java b/src/core-metadata/src/main/java/org/apache/kylin/metadata/recommendation/ref/OptRecV2.java
index 41c9d6100f..44da2716cb 100644
--- a/src/core-metadata/src/main/java/org/apache/kylin/metadata/recommendation/ref/OptRecV2.java
+++ b/src/core-metadata/src/main/java/org/apache/kylin/metadata/recommendation/ref/OptRecV2.java
@@ -114,7 +114,7 @@ public class OptRecV2 {
     }
 
     public void initRecommendation() {
-        log.info("Start to initialize recommendation({}/{}}", project, getUuid());
+        log.debug("Start to initialize recommendation({}/{}}", project, getUuid());
 
         NDataModel dataModel = getModel();
         if (dataModel.isBroken()) {
@@ -133,7 +133,7 @@ public class OptRecV2 {
     }
 
     public List<RawRecItem> filterExcludedRecPatterns(List<RawRecItem> rawRecItems) {
-        log.info("Start to initialize recommendation patterns({}/{}}", project, getUuid());
+        log.debug("Start to initialize recommendation patterns({}/{}}", project, getUuid());
         NDataModel dataModel = getModel();
         if (dataModel.isBroken()) {
             log.warn("Discard all related recommendations for model({}/{}) is broken.", project, uuid);
diff --git a/src/core-metadata/src/test/java/org/apache/kylin/metadata/epoch/EpochManagerTest.java b/src/core-metadata/src/test/java/org/apache/kylin/metadata/epoch/EpochManagerTest.java
index d488dfe67b..425d81697d 100644
--- a/src/core-metadata/src/test/java/org/apache/kylin/metadata/epoch/EpochManagerTest.java
+++ b/src/core-metadata/src/test/java/org/apache/kylin/metadata/epoch/EpochManagerTest.java
@@ -467,4 +467,26 @@ class EpochManagerTest {
         }
     }
 
+    @Test
+    void testUpdateAllEpochsSuccess() {
+        Epoch e1 = new Epoch();
+        e1.setEpochTarget("test1");
+        e1.setCurrentEpochOwner("owner1");
+        e1.setEpochId(1);
+        e1.setLastEpochRenewTime(System.currentTimeMillis());
+
+        Epoch e2 = new Epoch();
+        e2.setEpochTarget("test2");
+        e2.setCurrentEpochOwner("owner2");
+        e2.setEpochId(1);
+        e2.setLastEpochRenewTime(System.currentTimeMillis());
+
+        getEpochStore().insertBatch(Arrays.asList(e1, e2));
+
+        EpochManager epochManager = EpochManager.getInstance();
+        epochManager.tryUpdateEpoch(EpochManager.GLOBAL, false);
+        epochManager.updateAllEpochs();
+        Assertions.assertFalse(epochManager.getOwnedEpochs().isEmpty());
+    }
+
 }
diff --git a/src/core-metrics/src/main/java/org/apache/kylin/common/metrics/MetricsInfluxdbReporter.java b/src/core-metrics/src/main/java/org/apache/kylin/common/metrics/MetricsInfluxdbReporter.java
index e33e4aafee..087555f533 100644
--- a/src/core-metrics/src/main/java/org/apache/kylin/common/metrics/MetricsInfluxdbReporter.java
+++ b/src/core-metrics/src/main/java/org/apache/kylin/common/metrics/MetricsInfluxdbReporter.java
@@ -114,7 +114,7 @@ public class MetricsInfluxdbReporter implements MetricsReporter {
         dailyInstance.init();
         Executors.newSingleThreadScheduledExecutor().scheduleWithFixedDelay(() -> {
             try {
-                logger.info("Start to aggregate daily metrics ...");
+                logger.debug("Start to aggregate daily metrics ...");
                 long now = System.currentTimeMillis();
                 long todayStart = TimeUtil.getDayStart(now);
 
@@ -141,7 +141,7 @@ public class MetricsInfluxdbReporter implements MetricsReporter {
                 updateDailyMetrics(todayStart, config);
 
                 retry.set(0);
-                logger.info("Aggregate daily metrics success ...");
+                logger.debug("Aggregate daily metrics success ...");
             } catch (Exception e) {
                 retry.incrementAndGet();
                 logger.error("Failed to aggregate daily metrics, retry: {}", retry.get(), e);
diff --git a/src/data-loading-service/src/main/java/org/apache/kylin/rest/scheduler/AutoRefreshSnapshotRunner.java b/src/data-loading-service/src/main/java/org/apache/kylin/rest/scheduler/AutoRefreshSnapshotRunner.java
index 294b6ddb75..6bcbe1cfbd 100644
--- a/src/data-loading-service/src/main/java/org/apache/kylin/rest/scheduler/AutoRefreshSnapshotRunner.java
+++ b/src/data-loading-service/src/main/java/org/apache/kylin/rest/scheduler/AutoRefreshSnapshotRunner.java
@@ -42,6 +42,7 @@ import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.Path;
 import org.apache.http.HttpStatus;
 import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.common.constant.LogConstant;
 import org.apache.kylin.common.exception.KylinException;
 import org.apache.kylin.common.exception.KylinRuntimeException;
 import org.apache.kylin.common.logging.SetLogCategory;
@@ -73,7 +74,6 @@ import lombok.extern.slf4j.Slf4j;
 @Slf4j
 public class AutoRefreshSnapshotRunner implements Runnable {
     private static final String SNAPSHOT_VIEW_MAPPING_ERROR_MESSAGE = "Project[%s] Save View Mapping Failed";
-    private static final String SCHEDULE_LOG_CATEGORY = "schedule";
 
     private static final Map<String, AutoRefreshSnapshotRunner> INSTANCE_MAP = Maps.newConcurrentMap();
     @Setter
@@ -131,7 +131,7 @@ public class AutoRefreshSnapshotRunner implements Runnable {
     }
 
     public void doRun() {
-        try (SetLogCategory ignored = new SetLogCategory(SCHEDULE_LOG_CATEGORY)) {
+        try (SetLogCategory ignored = new SetLogCategory(LogConstant.SCHEDULE_CATEGORY)) {
             log.info("Project[{}] start check and refresh snapshot", project);
             if (log.isDebugEnabled()) {
                 val poolExecutor = (ThreadPoolExecutor) jobPool;
@@ -346,7 +346,7 @@ public class AutoRefreshSnapshotRunner implements Runnable {
 
     @Override
     public void run() {
-        try (SetLogCategory ignored = new SetLogCategory(SCHEDULE_LOG_CATEGORY)) {
+        try (SetLogCategory ignored = new SetLogCategory(LogConstant.SCHEDULE_CATEGORY)) {
             saveMarkFile();
             doRun();
         } catch (Exception e) {
@@ -357,7 +357,7 @@ public class AutoRefreshSnapshotRunner implements Runnable {
     }
 
     public void runWhenSchedulerInit() {
-        try (SetLogCategory ignored = new SetLogCategory(SCHEDULE_LOG_CATEGORY)) {
+        try (SetLogCategory ignored = new SetLogCategory(LogConstant.SCHEDULE_CATEGORY)) {
             doRun();
         } catch (Exception e) {
             log.error(e.getMessage(), e);
diff --git a/src/data-loading-service/src/main/java/org/apache/kylin/rest/service/JobService.java b/src/data-loading-service/src/main/java/org/apache/kylin/rest/service/JobService.java
index c9305850a9..b3b902b4b6 100644
--- a/src/data-loading-service/src/main/java/org/apache/kylin/rest/service/JobService.java
+++ b/src/data-loading-service/src/main/java/org/apache/kylin/rest/service/JobService.java
@@ -54,6 +54,7 @@ import org.apache.commons.lang.StringUtils;
 import org.apache.kylin.cluster.ClusterManagerFactory;
 import org.apache.kylin.cluster.IClusterManager;
 import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.common.constant.LogConstant;
 import org.apache.kylin.common.exception.ErrorCode;
 import org.apache.kylin.common.exception.ExceptionReason;
 import org.apache.kylin.common.exception.ExceptionResolve;
@@ -61,6 +62,7 @@ import org.apache.kylin.common.exception.JobErrorCode;
 import org.apache.kylin.common.exception.JobExceptionReason;
 import org.apache.kylin.common.exception.JobExceptionResolve;
 import org.apache.kylin.common.exception.KylinException;
+import org.apache.kylin.common.logging.SetLogCategory;
 import org.apache.kylin.common.metrics.MetricsCategory;
 import org.apache.kylin.common.metrics.MetricsGroup;
 import org.apache.kylin.common.metrics.MetricsName;
@@ -156,7 +158,7 @@ public class JobService extends BasicService implements JobSupporter, ISmartAppl
     @Autowired
     private ModelService modelService;
 
-    private static final Logger logger = LoggerFactory.getLogger("schedule");
+    private static final Logger logger = LoggerFactory.getLogger(LogConstant.BUILD_CATEGORY);
 
     private static final Map<String, String> jobTypeMap = Maps.newHashMap();
     private static final String LAST_MODIFIED = "last_modified";
@@ -615,7 +617,7 @@ public class JobService extends BasicService implements JobSupporter, ISmartAppl
         // waite time in output
         Map<String, String> waiteTimeMap;
         val output = executable.getOutput();
-        try {
+        try (SetLogCategory ignored = new SetLogCategory(LogConstant.BUILD_CATEGORY)) {
             waiteTimeMap = JsonUtil.readValueAsMap(output.getExtra().getOrDefault(NBatchConstants.P_WAITE_TIME, "{}"));
         } catch (IOException e) {
             logger.error(e.getMessage(), e);
@@ -710,7 +712,7 @@ public class JobService extends BasicService implements JobSupporter, ISmartAppl
     }
 
     public void setExceptionResolveAndCodeAndReason(Output output, ExecutableStepResponse executableStepResponse) {
-        try {
+        try (SetLogCategory ignored = new SetLogCategory(LogConstant.BUILD_CATEGORY)) {
             val exceptionCode = getExceptionCode(output);
             executableStepResponse.setFailedResolve(ExceptionResolve.getResolve(exceptionCode));
             executableStepResponse.setFailedCode(ErrorCode.getLocalizedString(exceptionCode));
@@ -734,7 +736,7 @@ public class JobService extends BasicService implements JobSupporter, ISmartAppl
     }
 
     public String getExceptionCode(Output output) {
-        try {
+        try (SetLogCategory ignored = new SetLogCategory(LogConstant.BUILD_CATEGORY)) {
             var exceptionOrExceptionMessage = output.getFailedReason();
 
             if (StringUtils.isBlank(exceptionOrExceptionMessage)) {
@@ -889,7 +891,9 @@ public class JobService extends BasicService implements JobSupporter, ISmartAppl
         result.setSequenceID(stageBase.getStepId());
 
         if (stageOutput == null) {
-            logger.warn("Cannot found output for task: id={}", stageBase.getId());
+            try (SetLogCategory ignored = new SetLogCategory(LogConstant.BUILD_CATEGORY)) {
+                logger.warn("Cannot found output for task: id={}", stageBase.getId());
+            }
             return result;
         }
         for (Map.Entry<String, String> entry : stageOutput.getExtra().entrySet()) {
@@ -925,7 +929,9 @@ public class JobService extends BasicService implements JobSupporter, ISmartAppl
         result.setSequenceID(task.getStepId());
 
         if (stepOutput == null) {
-            logger.warn("Cannot found output for task: id={}", task.getId());
+            try (SetLogCategory ignored = new SetLogCategory(LogConstant.BUILD_CATEGORY)) {
+                logger.warn("Cannot found output for task: id={}", task.getId());
+            }
             return result;
         }
 
@@ -1018,7 +1024,9 @@ public class JobService extends BasicService implements JobSupporter, ISmartAppl
     }
 
     public void batchUpdateGlobalJobStatus(List<String> jobIds, String action, List<String> filterStatuses) {
-        logger.info("Owned projects is {}", projectService.getOwnedProjects());
+        try (SetLogCategory ignored = new SetLogCategory(LogConstant.BUILD_CATEGORY)) {
+            logger.info("Owned projects is {}", projectService.getOwnedProjects());
+        }
         for (String project : projectService.getOwnedProjects()) {
             aclEvaluate.checkProjectOperationPermission(project);
             batchUpdateJobStatus0(jobIds, project, action, filterStatuses);
@@ -1252,7 +1260,9 @@ public class JobService extends BasicService implements JobSupporter, ISmartAppl
                 project);
         FusionModel fusionModel = fusionModelManager.getFusionModel(modelId);
         if (!model.isFusionModel() || Objects.isNull(fusionModel)) {
-            logger.warn("model is not fusion model or fusion model is null, {}", modelId);
+            try (SetLogCategory ignored = new SetLogCategory(LogConstant.BUILD_CATEGORY)) {
+                logger.warn("model is not fusion model or fusion model is null, {}", modelId);
+            }
             return;
         }
 
@@ -1370,7 +1380,9 @@ public class JobService extends BasicService implements JobSupporter, ISmartAppl
     @Override
     public void onApplicationEvent(ApplicationEvent event) {
         if (event instanceof ContextClosedEvent) {
-            logger.info("Stop kyligence node, kill job on yarn for yarn cluster mode");
+            try (SetLogCategory ignored = new SetLogCategory(LogConstant.BUILD_CATEGORY)) {
+                logger.info("Stop kyligence node, kill job on yarn for yarn cluster mode");
+            }
             EpochManager epochManager = EpochManager.getInstance();
             KylinConfig kylinConfig = KylinConfig.getInstanceFromEnv();
             List<Epoch> ownedEpochs = epochManager.getOwnedEpochs();
diff --git a/src/data-loading-service/src/test/java/org/apache/kylin/rest/service/JobServiceTest.java b/src/data-loading-service/src/test/java/org/apache/kylin/rest/service/JobServiceTest.java
index 0de349d774..c51fc1c35e 100644
--- a/src/data-loading-service/src/test/java/org/apache/kylin/rest/service/JobServiceTest.java
+++ b/src/data-loading-service/src/test/java/org/apache/kylin/rest/service/JobServiceTest.java
@@ -21,6 +21,7 @@ package org.apache.kylin.rest.service;
 import static org.apache.kylin.common.exception.code.ErrorCodeServer.JOB_ACTION_ILLEGAL;
 import static org.apache.kylin.common.exception.code.ErrorCodeServer.JOB_NOT_EXIST;
 import static org.apache.kylin.common.exception.code.ErrorCodeServer.JOB_STATUS_ILLEGAL;
+import static org.apache.kylin.job.constant.JobStatusEnum.PENDING;
 import static org.apache.kylin.job.constant.JobStatusEnum.SKIP;
 import static org.awaitility.Awaitility.await;
 import static org.junit.Assert.assertEquals;
@@ -917,7 +918,7 @@ public class JobServiceTest extends NLocalFileMetadataTestCase {
         List<ExecutableStepResponse> stages2 = subStages.get(segmentId2).getStage();
         assertEquals(1, stages2.size());
         ExecutableStepResponse logicStepResponse2 = stages2.get(0);
-        checkResponse(logicStepResponse2, logicStep.getId(), JobStatusEnum.PENDING);
+        checkResponse(logicStepResponse2, logicStep.getId(), PENDING);
         assertEquals(0, logicStepResponse2.getExecStartTime());
         assertTrue(logicStepResponse2.getExecStartTime() < System.currentTimeMillis());
 
@@ -2034,4 +2035,13 @@ public class JobServiceTest extends NLocalFileMetadataTestCase {
                     .get(0).getOutput().getStatus(), jobStatus);
         }
     }
+
+    @Test
+    public void testParseToExecutableStepWithStepOutputNull() {
+        AbstractExecutable task = new FiveSecondSucceedTestExecutable();
+        task.setProject("default");
+        ExecutableState jobState = ExecutableState.RUNNING;
+        ExecutableStepResponse result = jobService.parseToExecutableStep(task, null, new HashMap<>(), jobState);
+        Assert.assertSame(PENDING, result.getStatus());
+    }
 }
diff --git a/src/kylin-it/src/test/java/org/apache/kylin/rest/broadcaster/BroadcasterTest.java b/src/kylin-it/src/test/java/org/apache/kylin/rest/broadcaster/BroadcasterTest.java
index 1f267173ae..61fb0132b3 100644
--- a/src/kylin-it/src/test/java/org/apache/kylin/rest/broadcaster/BroadcasterTest.java
+++ b/src/kylin-it/src/test/java/org/apache/kylin/rest/broadcaster/BroadcasterTest.java
@@ -18,10 +18,12 @@
 
 package org.apache.kylin.rest.broadcaster;
 
+import java.io.IOException;
 import java.util.Arrays;
 
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.persistence.transaction.AddS3CredentialToSparkBroadcastEventNotifier;
+import org.apache.kylin.common.persistence.transaction.AuditLogBroadcastEventNotifier;
 import org.apache.kylin.common.persistence.transaction.BroadcastEventReadyNotifier;
 import org.apache.kylin.junit.annotation.MetadataInfo;
 import org.apache.kylin.rest.cluster.ClusterManager;
@@ -30,6 +32,7 @@ import org.apache.kylin.rest.config.initialize.BroadcastListener;
 import org.apache.kylin.rest.security.AclPermission;
 import org.apache.kylin.rest.security.AdminUserSyncEventNotifier;
 import org.apache.kylin.rest.security.UserAclManager;
+import org.apache.kylin.rest.service.AuditLogService;
 import org.apache.kylin.rest.service.UserAclService;
 import org.apache.kylin.rest.service.UserService;
 import org.apache.kylin.rest.util.SpringContext;
@@ -51,6 +54,8 @@ import org.apache.kylin.metadata.epoch.EpochManager;
 import lombok.val;
 import lombok.extern.slf4j.Slf4j;
 
+import static org.apache.kylin.common.persistence.transaction.BroadcastEventReadyNotifier.BroadcastScopeEnum.WHOLE_NODES;
+
 @Slf4j
 @MetadataInfo(onlyProps = true)
 class BroadcasterTest {
@@ -77,6 +82,22 @@ class BroadcasterTest {
         }
     }
 
+    @Test
+    void testBroadcastWithAnnounceContains() {
+        try (ConfigurableApplicationContext context = this.application.run("--kylin.server.mode=all")) {
+            SpringContext springContext = context.getBean(SpringContext.class);
+            ReflectionTestUtils.setField(springContext, "applicationContext", context);
+            Broadcaster broadcaster = context.getBean(Broadcaster.class);
+
+            BroadcastEventReadyNotifier eventReadyNotifier = new BroadcastEventReadyNotifier();
+            broadcaster.announce(eventReadyNotifier);
+            // announce twice
+            broadcaster.announce(eventReadyNotifier);
+
+            Assertions.assertSame(WHOLE_NODES, eventReadyNotifier.getBroadcastScope());
+        }
+    }
+
     @Test
     void testBroadcastSyncAdminUserAcl() throws Exception {
         EpochManager epochManager = EpochManager.getInstance();
@@ -97,6 +118,20 @@ class BroadcasterTest {
         assert SparderEnv.getSparkSession().conf().contains(String.format("fs.s3a.bucket.%s.assumed.role.arn", "aa"));
     }
 
+    @Test
+    void testBroadcastWithAuditLog() {
+        BroadcastListener broadcastListener = new BroadcastListener();
+        val auditLogService = Mockito.spy(AuditLogService.class);
+        ReflectionTestUtils.setField(broadcastListener, "auditLogService", auditLogService);
+        String errorMsg = "";
+        try {
+            broadcastListener.handle(new AuditLogBroadcastEventNotifier());
+        } catch (IOException e) {
+            errorMsg = e.getMessage();
+        }
+        Assertions.assertTrue(errorMsg.isEmpty());
+    }
+
     @Configuration
     static class Config {
         @Bean
diff --git a/src/query-service/src/main/java/org/apache/kylin/rest/service/QueryCacheManager.java b/src/query-service/src/main/java/org/apache/kylin/rest/service/QueryCacheManager.java
index bdbce8d893..5f68795339 100644
--- a/src/query-service/src/main/java/org/apache/kylin/rest/service/QueryCacheManager.java
+++ b/src/query-service/src/main/java/org/apache/kylin/rest/service/QueryCacheManager.java
@@ -162,7 +162,7 @@ public class QueryCacheManager implements CommonQueryCacheSupporter {
 
     public SQLResponse doSearchQuery(QueryCacheManager.Type type, SQLRequest sqlRequest) {
         Object response = kylinCache.get(type.rootCacheName, sqlRequest.getProject(), sqlRequest.getCacheKey());
-        logger.info("[query cache log] The cache key is: {}", sqlRequest.getCacheKey());
+        logger.debug("[query cache log] The cache key is: {}", sqlRequest.getCacheKey());
         if (response == null) {
             return null;
         }
@@ -178,7 +178,7 @@ public class QueryCacheManager implements CommonQueryCacheSupporter {
 
         // check signature for success query resp in case the datasource is changed
         if (QueryCacheSignatureUtil.checkCacheExpired(cached, sqlRequest.getProject())) {
-            logger.info("[query cache log] cache has expired, cache key is {}", sqlRequest.getCacheKey());
+            logger.debug("[query cache log] cache has expired, cache key is {}", sqlRequest.getCacheKey());
             clearQueryCache(sqlRequest);
             return null;
         }
@@ -315,10 +315,10 @@ public class QueryCacheManager implements CommonQueryCacheSupporter {
 
     public void clearProjectCache(String project) {
         if (project == null) {
-            logger.debug("[query cache log] clear query cache for all projects.");
+            logger.info("[query cache log] clear query cache for all projects.");
             kylinCache.clearAll();
         } else {
-            logger.debug("[query cache log] clear query cache for {}", project);
+            logger.info("[query cache log] clear query cache for {}", project);
             kylinCache.clearByType(Type.SUCCESS_QUERY_CACHE.rootCacheName, project);
             kylinCache.clearByType(Type.EXCEPTION_QUERY_CACHE.rootCacheName, project);
             kylinCache.clearByType(Type.SCHEMA_CACHE.rootCacheName, project);
diff --git a/src/query-service/src/main/java/org/apache/kylin/rest/service/QueryService.java b/src/query-service/src/main/java/org/apache/kylin/rest/service/QueryService.java
index dcc26d6a51..04dad61a83 100644
--- a/src/query-service/src/main/java/org/apache/kylin/rest/service/QueryService.java
+++ b/src/query-service/src/main/java/org/apache/kylin/rest/service/QueryService.java
@@ -62,6 +62,7 @@ import org.apache.kylin.common.KapConfig;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.QueryContext;
 import org.apache.kylin.common.QueryTrace;
+import org.apache.kylin.common.constant.LogConstant;
 import org.apache.kylin.common.debug.BackdoorToggles;
 import org.apache.kylin.common.exception.KylinException;
 import org.apache.kylin.common.exception.KylinTimeoutException;
@@ -183,7 +184,7 @@ public class QueryService extends BasicService implements CacheSignatureQuerySup
 
     public static final String QUERY_STORE_PATH_PREFIX = "/query/";
     private static final String JDBC_METADATA_SCHEMA = "metadata";
-    private static final Logger logger = LoggerFactory.getLogger("query");
+    private static final Logger logger = LoggerFactory.getLogger(LogConstant.QUERY_CATEGORY);
     final SlowQueryDetector slowQueryDetector = new SlowQueryDetector();
 
     @Autowired
@@ -298,6 +299,7 @@ public class QueryService extends BasicService implements CacheSignatureQuerySup
 
             if (QueryContext.current().getQueryTagInfo().isAsyncQuery()
                     && NProjectManager.getProjectConfig(sqlRequest.getProject()).isUniqueAsyncQueryYarnQueue()) {
+                logger.info("This query is an async query in project: {}", sqlRequest.getProject());
                 if (StringUtils.isNotEmpty(sqlRequest.getSparkQueue())) {
                     queryParams.setSparkQueue(sqlRequest.getSparkQueue());
                 }
@@ -479,7 +481,8 @@ public class QueryService extends BasicService implements CacheSignatureQuerySup
             queryContext.setQueryId(UUID.fromString(sqlRequest.getQueryId()).toString());
         }
         try (SetThreadName ignored = new SetThreadName("Query %s", queryContext.getQueryId());
-                SetLogCategory ignored2 = new SetLogCategory("query")) {
+                SetLogCategory ignored2 = new SetLogCategory(LogConstant.QUERY_CATEGORY)) {
+            logger.info("Start query in project: {}", sqlRequest.getProject());
             if (sqlRequest.getExecuteAs() != null)
                 sqlRequest.setUsername(sqlRequest.getExecuteAs());
             else
@@ -606,6 +609,7 @@ public class QueryService extends BasicService implements CacheSignatureQuerySup
             QueryUtils.updateQueryContextSQLMetrics(rawSql.getStatementString());
             QueryContext.currentTrace().amendLast(QueryTrace.PREPARE_AND_SUBMIT_JOB, System.currentTimeMillis());
             QueryContext.currentTrace().endLastSpan();
+            QueryContext.current().record("update_metrics_time");
             QueryContext.currentMetrics().setQueryEndTime(System.currentTimeMillis());
 
             sqlResponse.setServer(clusterManager.getLocalServer());
diff --git a/src/spark-project/engine-spark/src/main/java/org/apache/kylin/engine/spark/source/NSparkMetadataExplorer.java b/src/spark-project/engine-spark/src/main/java/org/apache/kylin/engine/spark/source/NSparkMetadataExplorer.java
index 91cc8670bf..c9543e11e0 100644
--- a/src/spark-project/engine-spark/src/main/java/org/apache/kylin/engine/spark/source/NSparkMetadataExplorer.java
+++ b/src/spark-project/engine-spark/src/main/java/org/apache/kylin/engine/spark/source/NSparkMetadataExplorer.java
@@ -321,7 +321,7 @@ public class NSparkMetadataExplorer implements ISourceMetadataExplorer, ISampleD
                 Database db = SparderEnv.getSparkSession().catalog().getDatabase(database);
             } catch (AnalysisException e) {
                 UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
-                logger.info("The current user: {} does not have permission to access database {}", ugi.getUserName(),
+                logger.error("The current user: {} does not have permission to access database {}", ugi.getUserName(),
                         database);
                 return false;
             }
diff --git a/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/builder/SegmentFlatTable.scala b/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/builder/SegmentFlatTable.scala
index 29d38f641b..931570c6c9 100644
--- a/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/builder/SegmentFlatTable.scala
+++ b/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/builder/SegmentFlatTable.scala
@@ -18,12 +18,10 @@
 
 package org.apache.kylin.engine.spark.builder
 
-import java.util.concurrent.{CountDownLatch, TimeUnit}
-import java.util.{Locale, Objects, Timer, TimerTask}
-
+import com.google.common.collect.Sets
 import org.apache.commons.lang3.StringUtils
 import org.apache.kylin.common.util.HadoopUtil
-import org.apache.kylin.common.{KapConfig, KylinConfig}
+import org.apache.kylin.common.{CustomUtils, KapConfig, KylinConfig}
 import org.apache.kylin.engine.spark.builder.DFBuilderHelper._
 import org.apache.kylin.engine.spark.job.NSparkCubingUtil._
 import org.apache.kylin.engine.spark.job.{FiltersUtil, TableMetaManager}
@@ -38,6 +36,12 @@ import org.apache.spark.sql.functions.{col, expr}
 import org.apache.spark.sql.types.StructField
 import org.apache.spark.sql.util.SparderTypeUtil
 import org.apache.spark.utils.ProxyThreadUtils
+import java.util.concurrent.{CountDownLatch, TimeUnit}
+import java.util.{Locale, Objects, Timer, TimerTask}
+
+import org.apache.kylin.common.constant.LogConstant
+import org.apache.kylin.common.logging.SetLogCategory
+import org.apache.spark.util.Utils
 
 import scala.collection.JavaConverters._
 import scala.collection.mutable
@@ -46,8 +50,6 @@ import scala.concurrent.duration.{Duration, MILLISECONDS}
 import scala.concurrent.forkjoin.ForkJoinPool
 import scala.util.{Failure, Success, Try}
 
-import com.google.common.collect.Sets
-
 class SegmentFlatTable(private val sparkSession: SparkSession, //
                        private val tableDesc: SegmentFlatTableDesc) extends LogEx {
 
@@ -524,7 +526,9 @@ object SegmentFlatTable extends LogEx {
     val newFields = originDS.schema.fields.map(f =>
       convertFromDot("`" + alias + "`" + "." + "`" + f.name + "`")).toSeq
     val newDS = originDS.toDF(newFields: _*)
-    logInfo(s"Wrap ALIAS ${originDS.schema.treeString} TO ${newDS.schema.treeString}")
+    CustomUtils.tryWithResourceIgnore(new SetLogCategory(LogConstant.BUILD_CATEGORY)) {
+      _ => logInfo(s"Wrap ALIAS ${originDS.schema.treeString} TO ${newDS.schema.treeString}")
+    }
     newDS
   }
 
@@ -557,7 +561,9 @@ object SegmentFlatTable extends LogEx {
       val equiConditionColPairs = fk.zip(pk).map(joinKey =>
         col(convertFromDot(joinKey._1.getBackTickIdentity))
           .equalTo(col(convertFromDot(joinKey._2.getBackTickIdentity))))
-      logInfo(s"Lookup table schema ${lookupDataset.schema.treeString}")
+      CustomUtils.tryWithResourceIgnore(new SetLogCategory(LogConstant.BUILD_CATEGORY)) {
+        _ => logInfo(s"Lookup table schema ${lookupDataset.schema.treeString}")
+      }
 
       if (join.getNonEquiJoinCondition != null) {
         var condition = NonEquiJoinConditionBuilder.convert(join.getNonEquiJoinCondition)
diff --git a/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/RDSegmentBuildExec.scala b/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/RDSegmentBuildExec.scala
index 269065599b..a2c272dec6 100644
--- a/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/RDSegmentBuildExec.scala
+++ b/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/RDSegmentBuildExec.scala
@@ -80,11 +80,10 @@ class RDSegmentBuildExec(private val jobContext: RDSegmentBuildJob, //
       val paths = ResourceDetectUtils.getPaths(execution.sparkPlan).map(_.toString).asJava
       logInfo(s"Detected source: $sourceName $leaves ${paths.asScala.mkString(",")}")
       val startTime = System.currentTimeMillis()
-      logInfo(s"Detect source size start time is $startTime")
-      val resourceSize = ResourceDetectUtils.getResourceSize(SparderEnv.getHadoopConfiguration(),config.isConcurrencyFetchDataSourceSize,
+      val resourceSize = ResourceDetectUtils.getResourceSize(SparderEnv.getHadoopConfiguration(), config.isConcurrencyFetchDataSourceSize,
         paths.asScala.map(path => new Path(path)): _*)
       val endTime = System.currentTimeMillis()
-      logInfo(s"Detect source size end time is $endTime")
+      logInfo(s"Detect source size cost time is ${endTime - startTime} ms.")
 
       logInfo(s"Detect source size $resourceSize")
       sourceSize.put(sourceName, resourceSize)
diff --git a/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/SegmentBuildJob.java b/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/SegmentBuildJob.java
index b9663c8d26..6bbf5217c7 100644
--- a/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/SegmentBuildJob.java
+++ b/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/SegmentBuildJob.java
@@ -95,7 +95,9 @@ public class SegmentBuildJob extends SegmentJob {
     @Override
     protected final void doExecute() throws Exception {
 
+        log.info("Start sub stage {}" + REFRESH_SNAPSHOTS.name());
         REFRESH_SNAPSHOTS.create(this, null, null).toWork();
+        log.info("End sub stage {}" + REFRESH_SNAPSHOTS.name());
 
         buildContext = new BuildContext(getSparkSession().sparkContext(), config);
         buildContext.appStatusTracker().startMonitorBuildResourceState();
diff --git a/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/exec/BuildExec.scala b/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/exec/BuildExec.scala
index 70a01b35e4..91b2850c83 100644
--- a/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/exec/BuildExec.scala
+++ b/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/exec/BuildExec.scala
@@ -19,13 +19,15 @@
 package org.apache.kylin.engine.spark.job.exec
 
 import org.apache.kylin.engine.spark.job.stage.StageExec
-
 import java.io.IOException
 import java.util
 import java.util.Locale
+
+import org.apache.spark.internal.Logging
+
 import scala.collection.JavaConverters._
 
-class BuildExec(var id: String) {
+class BuildExec(var id: String) extends Logging{
   protected var subStages = new util.ArrayList[StageExec]
 
   def getId: String = {
@@ -35,7 +37,9 @@ class BuildExec(var id: String) {
   @throws(classOf[IOException])
   def buildSegment(): Unit = {
     for (stage <- subStages.asScala) {
+      logInfo(s"Start sub stage ${stage.getStageName}")
       stage.toWork()
+      logInfo(s"End sub stage ${stage.getStageName}")
     }
   }
 
diff --git a/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/exec/MergeExec.scala b/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/exec/MergeExec.scala
index 498ef117d4..2b4bd57c94 100644
--- a/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/exec/MergeExec.scala
+++ b/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/exec/MergeExec.scala
@@ -29,7 +29,9 @@ class MergeExec(id: String) extends BuildExec(id) {
   @throws(classOf[IOException])
   def mergeSegment(): Unit = {
     for (stage <- subStages.asScala) {
+      logInfo(s"Start sub stage ${stage.getStageName}")
       stage.toWork()
+      logInfo(s"End sub stage ${stage.getStageName}")
     }
   }
 
diff --git a/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/exec/SnapshotExec.scala b/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/exec/SnapshotExec.scala
index 4394a8615d..86dd500178 100644
--- a/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/exec/SnapshotExec.scala
+++ b/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/exec/SnapshotExec.scala
@@ -29,7 +29,9 @@ class SnapshotExec(id: String) extends BuildExec(id) {
   @throws(classOf[IOException])
   def buildSnapshot(): Unit = {
     for (stage <- subStages.asScala) {
+      logInfo(s"Start sub stage ${stage.getStageName}")
       stage.toWorkWithoutFinally()
+      logInfo(s"End sub stage ${stage.getStageName}")
     }
   }
 
diff --git a/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/exec/TableAnalyzerExec.scala b/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/exec/TableAnalyzerExec.scala
index d95607398d..aa0bb0d780 100644
--- a/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/exec/TableAnalyzerExec.scala
+++ b/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/exec/TableAnalyzerExec.scala
@@ -27,7 +27,9 @@ class TableAnalyzerExec(id: String) extends BuildExec(id) {
 
   def analyzerTable(): Unit = {
     for (stage <- subStages.asScala) {
+      logInfo(s"Start sub stage ${stage.getStageName}")
       stage.toWorkWithoutFinally()
+      logInfo(s"End sub stage ${stage.getStageName}")
     }
   }
 
diff --git a/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/StageExec.scala b/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/StageExec.scala
index 50de84001b..7f99feff77 100644
--- a/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/StageExec.scala
+++ b/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/StageExec.scala
@@ -32,6 +32,8 @@ import java.util
 trait StageExec extends Logging {
   protected var id: String = _
 
+  def getStageName: String
+
   def getJobContext: SparkApplication
 
   def getDataSegment: NDataSegment
diff --git a/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/WaiteForResource.scala b/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/WaiteForResource.scala
index c7111bd852..fd05f59b8a 100644
--- a/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/WaiteForResource.scala
+++ b/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/WaiteForResource.scala
@@ -80,4 +80,6 @@ class WaiteForResource(jobContext: SparkApplication) extends StageExec {
       KylinBuildEnv.get().buildJobInfos.endWait()
     }
   }
+
+  override def getStageName: String = "WaiteForResource"
 }
diff --git a/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/build/BuildDict.scala b/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/build/BuildDict.scala
index 84f8454bae..ca4aaa2998 100644
--- a/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/build/BuildDict.scala
+++ b/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/build/BuildDict.scala
@@ -31,4 +31,6 @@ class BuildDict(jobContext: SegmentJob, dataSegment: NDataSegment, buildParam: B
     val dict: Dataset[Row] = buildDictIfNeed()
     buildParam.setDict(dict)
   }
+
+  override def getStageName: String = "BuildDict"
 }
diff --git a/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/build/BuildLayer.scala b/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/build/BuildLayer.scala
index 734a4361e2..a8cf9dc237 100644
--- a/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/build/BuildLayer.scala
+++ b/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/build/BuildLayer.scala
@@ -31,4 +31,6 @@ class BuildLayer(jobContext: SegmentJob, dataSegment: NDataSegment, buildParam:
     // Drain results immediately after building.
     drain()
   }
+
+  override def getStageName: String = "BuildLayer"
 }
diff --git a/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/build/CostBasedPlanner.scala b/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/build/CostBasedPlanner.scala
index 08ba41e46d..cef881284c 100644
--- a/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/build/CostBasedPlanner.scala
+++ b/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/build/CostBasedPlanner.scala
@@ -50,4 +50,6 @@ class CostBasedPlanner(jobContext: SegmentJob, dataSegment: NDataSegment, buildP
       buildParam.setFlatTableDesc(flatTableDesc)
     }
   }
+
+  override def getStageName: String = "CostBasedPlanner"
 }
diff --git a/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/build/FlatTableAndDictBase.scala b/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/build/FlatTableAndDictBase.scala
index 38a191d461..24391d28a7 100644
--- a/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/build/FlatTableAndDictBase.scala
+++ b/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/build/FlatTableAndDictBase.scala
@@ -48,6 +48,10 @@ import org.apache.spark.utils.ProxyThreadUtils
 import java.math.BigInteger
 import java.util.concurrent.{CountDownLatch, TimeUnit}
 import java.util.{Locale, Objects, Timer, TimerTask}
+
+import org.apache.kylin.common.constant.LogConstant
+import org.apache.kylin.common.logging.SetLogCategory
+
 import scala.collection.JavaConverters._
 import scala.collection.mutable
 import scala.collection.parallel.ForkJoinTaskSupport
@@ -607,6 +611,7 @@ abstract class FlatTableAndDictBase(private val jobContext: SegmentJob,
 
   private def buildDict(ds: Dataset[Row], dictCols: Set[TblColRef]): Unit = {
     if (config.isV2DictEnable) {
+      logInfo("Build v2 dict default.")
       var matchedCols = selectColumnsInTable(ds, dictCols)
       if (dataSegment.getIndexPlan.isSkipEncodeIntegerFamilyEnabled) {
         matchedCols = matchedCols.filterNot(_.getType.isIntegerFamily)
diff --git a/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/build/GatherFlatTableStats.scala b/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/build/GatherFlatTableStats.scala
index 920e1b7bf6..6668593a42 100644
--- a/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/build/GatherFlatTableStats.scala
+++ b/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/build/GatherFlatTableStats.scala
@@ -48,4 +48,6 @@ class GatherFlatTableStats(jobContext: SegmentJob, dataSegment: NDataSegment, bu
     // Cleanup previous potentially left temp layout data.
     cleanupLayoutTempData(dataSegment, readOnlyLayouts.asScala.toSeq)
   }
+
+  override def getStageName: String = "GatherFlatTableStats"
 }
diff --git a/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/build/GenerateFlatTable.scala b/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/build/GenerateFlatTable.scala
index f3b304a57d..ba62f0bfd9 100644
--- a/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/build/GenerateFlatTable.scala
+++ b/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/build/GenerateFlatTable.scala
@@ -38,4 +38,6 @@ class GenerateFlatTable(jobContext: SegmentJob, dataSegment: NDataSegment, build
       onStageSkipped()
     }
   }
+
+  override def getStageName: String = "GenerateFlatTable"
 }
diff --git a/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/build/MaterializedFactTableView.scala b/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/build/MaterializedFactTableView.scala
index f467058559..632ae0cd1c 100644
--- a/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/build/MaterializedFactTableView.scala
+++ b/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/build/MaterializedFactTableView.scala
@@ -54,4 +54,6 @@ class MaterializedFactTableView(jobContext: SegmentJob, dataSegment: NDataSegmen
       onStageSkipped()
     }
   }
+
+  override def getStageName: String = "MaterializedFactTableView"
 }
diff --git a/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/build/RefreshColumnBytes.scala b/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/build/RefreshColumnBytes.scala
index 328097df3e..48b63f122b 100644
--- a/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/build/RefreshColumnBytes.scala
+++ b/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/build/RefreshColumnBytes.scala
@@ -35,4 +35,6 @@ class RefreshColumnBytes(jobContext: SegmentJob, dataSegment: NDataSegment, buil
     cleanup()
     logInfo(s"Finished SEGMENT $segmentId")
   }
+
+  override def getStageName: String = "RefreshColumnBytes"
 }
diff --git a/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/build/RefreshSnapshots.scala b/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/build/RefreshSnapshots.scala
index 4cfc8711b2..ef6518546b 100644
--- a/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/build/RefreshSnapshots.scala
+++ b/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/build/RefreshSnapshots.scala
@@ -44,4 +44,6 @@ class RefreshSnapshots(jobContext: SegmentJob) extends StageExec {
       case _ =>
     }
   }
+
+  override def getStageName: String = "RefreshSnapshots"
 }
diff --git a/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/build/partition/PartitionBuildDict.scala b/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/build/partition/PartitionBuildDict.scala
index ab0d070b52..23e834f085 100644
--- a/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/build/partition/PartitionBuildDict.scala
+++ b/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/build/partition/PartitionBuildDict.scala
@@ -30,4 +30,6 @@ class PartitionBuildDict(jobContext: SegmentJob, dataSegment: NDataSegment, buil
     val dict: Dataset[Row] = buildDictIfNeed()
     buildParam.setDict(dict)
   }
+
+  override def getStageName: String = "PartitionBuildDict"
 }
diff --git a/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/build/partition/PartitionBuildLayer.scala b/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/build/partition/PartitionBuildLayer.scala
index 7a43766be0..995bb588cc 100644
--- a/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/build/partition/PartitionBuildLayer.scala
+++ b/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/build/partition/PartitionBuildLayer.scala
@@ -30,4 +30,6 @@ class PartitionBuildLayer(jobContext: SegmentJob, dataSegment: NDataSegment, bui
     // Drain results immediately after building.
     drain()
   }
+
+  override def getStageName: String = "PartitionBuildLayer"
 }
diff --git a/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/build/partition/PartitionCostBasedPlanner.scala b/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/build/partition/PartitionCostBasedPlanner.scala
index 3aac410594..7b3c1bf586 100644
--- a/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/build/partition/PartitionCostBasedPlanner.scala
+++ b/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/build/partition/PartitionCostBasedPlanner.scala
@@ -55,4 +55,6 @@ class PartitionCostBasedPlanner(jobContext: SegmentJob, dataSegment: NDataSegmen
       buildParam.setTableDesc(tableDesc)
     }
   }
+
+  override def getStageName: String = "PartitionCostBasedPlanner"
 }
diff --git a/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/build/partition/PartitionGatherFlatTableStats.scala b/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/build/partition/PartitionGatherFlatTableStats.scala
index 43afbb100e..103c02c32b 100644
--- a/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/build/partition/PartitionGatherFlatTableStats.scala
+++ b/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/build/partition/PartitionGatherFlatTableStats.scala
@@ -75,4 +75,5 @@ class PartitionGatherFlatTableStats(jobContext: SegmentJob, dataSegment: NDataSe
     }
   }
 
+  override def getStageName: String = "PartitionGatherFlatTableStats"
 }
diff --git a/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/build/partition/PartitionGenerateFlatTable.scala b/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/build/partition/PartitionGenerateFlatTable.scala
index b42b2a3956..bf406ac72e 100644
--- a/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/build/partition/PartitionGenerateFlatTable.scala
+++ b/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/build/partition/PartitionGenerateFlatTable.scala
@@ -37,4 +37,6 @@ class PartitionGenerateFlatTable(jobContext: SegmentJob, dataSegment: NDataSegme
       onStageSkipped()
     }
   }
+
+  override def getStageName: String = "PartitionGenerateFlatTable"
 }
diff --git a/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/build/partition/PartitionMaterializedFactTableView.scala b/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/build/partition/PartitionMaterializedFactTableView.scala
index e8f2fca99f..deb1c604bb 100644
--- a/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/build/partition/PartitionMaterializedFactTableView.scala
+++ b/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/build/partition/PartitionMaterializedFactTableView.scala
@@ -61,4 +61,6 @@ class PartitionMaterializedFactTableView(jobContext: SegmentJob, dataSegment: ND
       onStageSkipped()
     }
   }
+
+  override def getStageName: String = "PartitionMaterializedFactTableView"
 }
diff --git a/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/build/partition/PartitionRefreshColumnBytes.scala b/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/build/partition/PartitionRefreshColumnBytes.scala
index c8e26660e2..86ef005167 100644
--- a/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/build/partition/PartitionRefreshColumnBytes.scala
+++ b/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/build/partition/PartitionRefreshColumnBytes.scala
@@ -34,4 +34,6 @@ class PartitionRefreshColumnBytes(jobContext: SegmentJob, dataSegment: NDataSegm
     cleanup()
     logInfo(s"Finished SEGMENT $segmentId")
   }
+
+  override def getStageName: String = "PartitionRefreshColumnBytes"
 }
diff --git a/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/merge/MergeColumnBytes.scala b/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/merge/MergeColumnBytes.scala
index 3091e92e0a..1aa160e8f7 100644
--- a/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/merge/MergeColumnBytes.scala
+++ b/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/merge/MergeColumnBytes.scala
@@ -29,4 +29,6 @@ class MergeColumnBytes(jobContext: SegmentJob, dataSegment: NDataSegment)
 
     cleanup()
   }
+
+  override def getStageName: String = "MergeColumnBytes"
 }
diff --git a/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/merge/MergeFlatTable.scala b/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/merge/MergeFlatTable.scala
index 58c5edbcbf..f525cfec82 100644
--- a/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/merge/MergeFlatTable.scala
+++ b/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/merge/MergeFlatTable.scala
@@ -29,4 +29,6 @@ class MergeFlatTable(jobContext: SegmentJob, dataSegment: NDataSegment)
 
     mergeFlatTable()
   }
+
+  override def getStageName: String = "MergeFlatTable"
 }
diff --git a/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/merge/MergeIndices.scala b/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/merge/MergeIndices.scala
index 66956fafe6..9812131e51 100644
--- a/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/merge/MergeIndices.scala
+++ b/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/merge/MergeIndices.scala
@@ -29,4 +29,6 @@ class MergeIndices(jobContext: SegmentJob, dataSegment: NDataSegment)
     // Drain results immediately after merging.
     drain()
   }
+
+  override def getStageName: String = "MergeIndices"
 }
diff --git a/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/merge/partition/PartitionMergeColumnBytes.scala b/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/merge/partition/PartitionMergeColumnBytes.scala
index 4f52f70781..2de190be1d 100644
--- a/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/merge/partition/PartitionMergeColumnBytes.scala
+++ b/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/merge/partition/PartitionMergeColumnBytes.scala
@@ -29,4 +29,6 @@ class PartitionMergeColumnBytes(jobContext: SegmentJob, dataSegment: NDataSegmen
 
     cleanup()
   }
+
+  override def getStageName: String = "PartitionMergeColumnBytes"
 }
diff --git a/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/merge/partition/PartitionMergeFlatTable.scala b/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/merge/partition/PartitionMergeFlatTable.scala
index 8fb98af061..5ca128dceb 100644
--- a/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/merge/partition/PartitionMergeFlatTable.scala
+++ b/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/merge/partition/PartitionMergeFlatTable.scala
@@ -29,4 +29,6 @@ class PartitionMergeFlatTable(jobContext: SegmentJob, dataSegment: NDataSegment)
 
     mergeFlatTable()
   }
+
+  override def getStageName: String = "PartitionMergeFlatTable"
 }
diff --git a/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/merge/partition/PartitionMergeIndices.scala b/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/merge/partition/PartitionMergeIndices.scala
index f18f264aed..d51ff5c7ac 100644
--- a/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/merge/partition/PartitionMergeIndices.scala
+++ b/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/merge/partition/PartitionMergeIndices.scala
@@ -29,4 +29,6 @@ class PartitionMergeIndices(jobContext: SegmentJob, dataSegment: NDataSegment)
     // Drain results immediately after merging.
     drain()
   }
+
+  override def getStageName: String = "PartitionMergeIndices"
 }
diff --git a/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/snapshots/SnapshotsBuild.scala b/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/snapshots/SnapshotsBuild.scala
index 77f8bb5bb9..de0d8dd1cc 100644
--- a/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/snapshots/SnapshotsBuild.scala
+++ b/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/snapshots/SnapshotsBuild.scala
@@ -33,4 +33,6 @@ class SnapshotsBuild(jobContext: SnapshotBuildJob) extends StageExec {
   override def execute(): Unit = {
     jobContext.buildSnapshot()
   }
+
+  override def getStageName: String = "SnapshotsBuild"
 }
diff --git a/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/tablesampling/AnalyzerTable.scala b/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/tablesampling/AnalyzerTable.scala
index e12e44344a..f215383531 100644
--- a/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/tablesampling/AnalyzerTable.scala
+++ b/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/tablesampling/AnalyzerTable.scala
@@ -33,4 +33,6 @@ class AnalyzerTable(jobContext: TableAnalyzerJob) extends StageExec {
   override def execute(): Unit = {
     jobContext.analyzerTable()
   }
+
+  override def getStageName: String = "AnalyzerTable"
 }
diff --git a/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/snapshots/SnapshotsBuild.scala b/src/spark-project/engine-spark/src/test/scala/org/apache/kylin/engine/spark/job/stage/WaiteForResourceTest.scala
similarity index 62%
copy from src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/snapshots/SnapshotsBuild.scala
copy to src/spark-project/engine-spark/src/test/scala/org/apache/kylin/engine/spark/job/stage/WaiteForResourceTest.scala
index 77f8bb5bb9..7d06e5dfc0 100644
--- a/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/snapshots/SnapshotsBuild.scala
+++ b/src/spark-project/engine-spark/src/test/scala/org/apache/kylin/engine/spark/job/stage/WaiteForResourceTest.scala
@@ -16,21 +16,19 @@
  * limitations under the License.
  */
 
-package org.apache.kylin.engine.spark.job.stage.snapshots
+package org.apache.kylin.engine.spark.job.stage
 
 import org.apache.kylin.engine.spark.application.SparkApplication
-import org.apache.kylin.engine.spark.job.SnapshotBuildJob
-import org.apache.kylin.engine.spark.job.stage.StageExec
-import org.apache.kylin.metadata.cube.model.NDataSegment
+import org.junit.Assert
+import org.mockito.Mockito
+import org.scalatest.funsuite.AnyFunSuite
 
-class SnapshotsBuild(jobContext: SnapshotBuildJob) extends StageExec {
-  override def getJobContext: SparkApplication = jobContext
+class WaiteForResourceTest extends AnyFunSuite{
 
-  override def getDataSegment: NDataSegment = null
+  test("test WaiteForResource getStageName") {
+    val sparkApplication = Mockito.mock(classOf[SparkApplication])
 
-  override def getSegmentId: String = jobContext.getJobId
-
-  override def execute(): Unit = {
-    jobContext.buildSnapshot()
+    val waiteForResource = new WaiteForResource(sparkApplication)
+    Assert.assertEquals("WaiteForResource", waiteForResource.getStageName)
   }
 }
diff --git a/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/build/GatherFlatTableStats.scala b/src/spark-project/engine-spark/src/test/scala/org/apache/kylin/engine/spark/job/stage/build/RefreshColumnBytesTest.scala
similarity index 52%
copy from src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/build/GatherFlatTableStats.scala
copy to src/spark-project/engine-spark/src/test/scala/org/apache/kylin/engine/spark/job/stage/build/RefreshColumnBytesTest.scala
index 920e1b7bf6..d6f0b87c1a 100644
--- a/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/build/GatherFlatTableStats.scala
+++ b/src/spark-project/engine-spark/src/test/scala/org/apache/kylin/engine/spark/job/stage/build/RefreshColumnBytesTest.scala
@@ -21,31 +21,26 @@ package org.apache.kylin.engine.spark.job.stage.build
 import org.apache.kylin.engine.spark.job.SegmentJob
 import org.apache.kylin.engine.spark.job.stage.BuildParam
 import org.apache.kylin.metadata.cube.model.NDataSegment
+import org.apache.kylin.metadata.model.NDataModel
+import org.junit.Assert
+import org.mockito.Mockito
+import org.scalatest.funsuite.AnyFunSuite
 
-import scala.collection.JavaConverters._
+import com.google.common.collect.ImmutableBiMap
 
-class GatherFlatTableStats(jobContext: SegmentJob, dataSegment: NDataSegment, buildParam: BuildParam)
-  extends BuildStage(jobContext, dataSegment, buildParam) {
+class RefreshColumnBytesTest extends AnyFunSuite {
 
-  override def execute(): Unit = {
-    scheduleCheckpoint()
+  test("test RefreshColumnBytes getStageName") {
+    val segmentJob = Mockito.mock(classOf[SegmentJob])
+    val dataSegment = Mockito.mock(classOf[NDataSegment])
+    val buildParam = Mockito.mock(classOf[BuildParam])
 
-    // Build flat table?
-    if (buildParam.getSpanningTree.fromFlatTable()) {
-      // Collect statistics for flat table.
-      val statistics = buildStatistics()
-      buildParam.setFlatTableStatistics(statistics)
+    val dataModel = Mockito.mock(classOf[NDataModel])
+    Mockito.when(dataSegment.getModel).thenReturn(dataModel)
+    val builder: ImmutableBiMap.Builder[Integer, NDataModel.Measure] = ImmutableBiMap.builder();
+    Mockito.when(dataSegment.getModel.getEffectiveMeasures).thenReturn(builder.build())
 
-      // Build inferior flat table.
-      if (config.isInferiorFlatTableEnabled) {
-        buildInferior()
-      }
-    }
-
-    // Build root node's layout sanity cache.
-    buildSanityCache()
-
-    // Cleanup previous potentially left temp layout data.
-    cleanupLayoutTempData(dataSegment, readOnlyLayouts.asScala.toSeq)
+    val mergeColumnBytes = new RefreshColumnBytes(segmentJob, dataSegment, buildParam)
+    Assert.assertEquals("RefreshColumnBytes", mergeColumnBytes.getStageName)
   }
 }
diff --git a/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/build/BuildLayer.scala b/src/spark-project/engine-spark/src/test/scala/org/apache/kylin/engine/spark/job/stage/build/RefreshSnapshotsTest.scala
similarity index 69%
copy from src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/build/BuildLayer.scala
copy to src/spark-project/engine-spark/src/test/scala/org/apache/kylin/engine/spark/job/stage/build/RefreshSnapshotsTest.scala
index 734a4361e2..82931d40ca 100644
--- a/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/build/BuildLayer.scala
+++ b/src/spark-project/engine-spark/src/test/scala/org/apache/kylin/engine/spark/job/stage/build/RefreshSnapshotsTest.scala
@@ -19,16 +19,16 @@
 package org.apache.kylin.engine.spark.job.stage.build
 
 import org.apache.kylin.engine.spark.job.SegmentJob
-import org.apache.kylin.engine.spark.job.stage.BuildParam
-import org.apache.kylin.metadata.cube.model.NDataSegment
+import org.junit.Assert
+import org.mockito.Mockito
+import org.scalatest.funsuite.AnyFunSuite
 
-class BuildLayer(jobContext: SegmentJob, dataSegment: NDataSegment, buildParam: BuildParam)
-  extends BuildStage(jobContext, dataSegment, buildParam) {
+class RefreshSnapshotsTest extends AnyFunSuite {
 
-  override def execute(): Unit = {
-    // Build layers.
-    buildLayouts()
-    // Drain results immediately after building.
-    drain()
+  test("test RefreshSnapshots getStageName") {
+    val segmentJob = Mockito.mock(classOf[SegmentJob])
+
+    val refreshSnapshots = new RefreshSnapshots(segmentJob)
+    Assert.assertEquals("RefreshSnapshots", refreshSnapshots.getStageName)
   }
 }
diff --git a/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/build/partition/PartitionGenerateFlatTable.scala b/src/spark-project/engine-spark/src/test/scala/org/apache/kylin/engine/spark/job/stage/build/partition/PartitionRefreshColumnBytesTest.scala
similarity index 51%
copy from src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/build/partition/PartitionGenerateFlatTable.scala
copy to src/spark-project/engine-spark/src/test/scala/org/apache/kylin/engine/spark/job/stage/build/partition/PartitionRefreshColumnBytesTest.scala
index b42b2a3956..ee95311a9d 100644
--- a/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/build/partition/PartitionGenerateFlatTable.scala
+++ b/src/spark-project/engine-spark/src/test/scala/org/apache/kylin/engine/spark/job/stage/build/partition/PartitionRefreshColumnBytesTest.scala
@@ -21,20 +21,26 @@ package org.apache.kylin.engine.spark.job.stage.build.partition
 import org.apache.kylin.engine.spark.job.SegmentJob
 import org.apache.kylin.engine.spark.job.stage.BuildParam
 import org.apache.kylin.metadata.cube.model.NDataSegment
-import org.apache.spark.sql.{Dataset, Row}
+import org.apache.kylin.metadata.model.NDataModel
+import org.junit.Assert
+import org.mockito.Mockito
+import org.scalatest.funsuite.AnyFunSuite
 
-class PartitionGenerateFlatTable(jobContext: SegmentJob, dataSegment: NDataSegment, buildParam: BuildParam)
-  extends PartitionFlatTableAndDictBase(jobContext, dataSegment, buildParam) {
-  override def execute(): Unit = {
-    val flatTable: Dataset[Row] = generateFlatTable()
-    buildParam.setFlatTable(flatTable)
-    val flatTablePart: Dataset[Row] = generateFlatTablePart()
-    buildParam.setFlatTablePart(flatTablePart)
+import com.google.common.collect.ImmutableBiMap
 
-    buildParam.setPartitionFlatTable(this)
+class PartitionRefreshColumnBytesTest extends AnyFunSuite {
 
-    if (buildParam.isSkipGenerateFlatTable) {
-      onStageSkipped()
-    }
+  test("test PartitionRefreshColumnBytes getStageName") {
+    val segmentJob = Mockito.mock(classOf[SegmentJob])
+    val dataSegment = Mockito.mock(classOf[NDataSegment])
+    val buildParam = Mockito.mock(classOf[BuildParam])
+
+    val dataModel = Mockito.mock(classOf[NDataModel])
+    Mockito.when(dataSegment.getModel).thenReturn(dataModel)
+    val builder: ImmutableBiMap.Builder[Integer, NDataModel.Measure] = ImmutableBiMap.builder();
+    Mockito.when(dataSegment.getModel.getEffectiveMeasures).thenReturn(builder.build())
+
+    val partitionRefreshColumnBytes = new PartitionRefreshColumnBytes(segmentJob, dataSegment, buildParam)
+    Assert.assertEquals("PartitionRefreshColumnBytes", partitionRefreshColumnBytes.getStageName)
   }
 }
diff --git a/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/merge/MergeColumnBytes.scala b/src/spark-project/engine-spark/src/test/scala/org/apache/kylin/engine/spark/job/stage/merge/MergeColumnBytesTest.scala
similarity index 61%
copy from src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/merge/MergeColumnBytes.scala
copy to src/spark-project/engine-spark/src/test/scala/org/apache/kylin/engine/spark/job/stage/merge/MergeColumnBytesTest.scala
index 3091e92e0a..28c0b0ccba 100644
--- a/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/merge/MergeColumnBytes.scala
+++ b/src/spark-project/engine-spark/src/test/scala/org/apache/kylin/engine/spark/job/stage/merge/MergeColumnBytesTest.scala
@@ -20,13 +20,21 @@ package org.apache.kylin.engine.spark.job.stage.merge
 
 import org.apache.kylin.engine.spark.job.SegmentJob
 import org.apache.kylin.metadata.cube.model.NDataSegment
+import org.apache.kylin.metadata.model.NDataModel
+import org.junit.Assert
+import org.mockito.Mockito
+import org.scalatest.funsuite.AnyFunSuite
 
-class MergeColumnBytes(jobContext: SegmentJob, dataSegment: NDataSegment)
-  extends MergeStage(jobContext, dataSegment) {
+class MergeColumnBytesTest extends AnyFunSuite {
 
-  override def execute(): Unit = {
-    mergeColumnBytes()
+  test("test MergeColumnBytes getStageName") {
+    val segmentJob = Mockito.mock(classOf[SegmentJob])
+    val dataSegment = Mockito.mock(classOf[NDataSegment])
+    val dataModel = Mockito.mock(classOf[NDataModel])
 
-    cleanup()
+    Mockito.when(dataSegment.getModel).thenReturn(dataModel)
+
+    val mergeColumnBytes = new MergeColumnBytes(segmentJob, dataSegment)
+    Assert.assertEquals("MergeColumnBytes", mergeColumnBytes.getStageName)
   }
 }
diff --git a/src/spark-project/engine-spark/src/test/scala/org/apache/kylin/engine/spark/job/stage/merge/MergeStageTest.scala b/src/spark-project/engine-spark/src/test/scala/org/apache/kylin/engine/spark/job/stage/merge/MergeStageTest.scala
index 2e23a6f8c7..88b15ec3ee 100644
--- a/src/spark-project/engine-spark/src/test/scala/org/apache/kylin/engine/spark/job/stage/merge/MergeStageTest.scala
+++ b/src/spark-project/engine-spark/src/test/scala/org/apache/kylin/engine/spark/job/stage/merge/MergeStageTest.scala
@@ -51,6 +51,8 @@ class MergeStageTest extends AnyFunSuite with LocalMetadata {
     override def execute(): Unit = {}
 
     override def getUnmergedFTPaths: Seq[Path] = super.getUnmergedFTPaths
+
+    override def getStageName: String = "MergeStageMock"
   }
 
   def testGetUnmergedFTPaths(config: KylinConfig): Unit = {
diff --git a/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/merge/partition/PartitionMergeColumnBytes.scala b/src/spark-project/engine-spark/src/test/scala/org/apache/kylin/engine/spark/job/stage/merge/partition/PartitionMergeColumnBytesTest.scala
similarity index 59%
copy from src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/merge/partition/PartitionMergeColumnBytes.scala
copy to src/spark-project/engine-spark/src/test/scala/org/apache/kylin/engine/spark/job/stage/merge/partition/PartitionMergeColumnBytesTest.scala
index 4f52f70781..cfaf90a59e 100644
--- a/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/merge/partition/PartitionMergeColumnBytes.scala
+++ b/src/spark-project/engine-spark/src/test/scala/org/apache/kylin/engine/spark/job/stage/merge/partition/PartitionMergeColumnBytesTest.scala
@@ -20,13 +20,21 @@ package org.apache.kylin.engine.spark.job.stage.merge.partition
 
 import org.apache.kylin.engine.spark.job.SegmentJob
 import org.apache.kylin.metadata.cube.model.NDataSegment
+import org.apache.kylin.metadata.model.NDataModel
+import org.junit.Assert
+import org.mockito.Mockito
+import org.scalatest.funsuite.AnyFunSuite
 
-class PartitionMergeColumnBytes(jobContext: SegmentJob, dataSegment: NDataSegment)
-  extends PartitionMergeStage(jobContext, dataSegment) {
+class PartitionMergeColumnBytesTest extends AnyFunSuite {
 
-  override def execute(): Unit = {
-    mergeColumnBytes()
+  test("test PartitionMergeColumnBytes getStageName") {
+    val segmentJob = Mockito.mock(classOf[SegmentJob])
+    val dataSegment = Mockito.mock(classOf[NDataSegment])
+    val dataModel = Mockito.mock(classOf[NDataModel])
 
-    cleanup()
+    Mockito.when(dataSegment.getModel).thenReturn(dataModel)
+
+    val partitionMergeColumnBytes = new PartitionMergeColumnBytes(segmentJob, dataSegment)
+    Assert.assertEquals("PartitionMergeColumnBytes", partitionMergeColumnBytes.getStageName)
   }
 }
diff --git a/src/spark-project/engine-spark/src/test/scala/org/apache/spark/utils/TestResourceUtils.scala b/src/spark-project/engine-spark/src/test/scala/org/apache/spark/utils/TestResourceUtils.scala
index 59bf9108b2..641f9ca70c 100644
--- a/src/spark-project/engine-spark/src/test/scala/org/apache/spark/utils/TestResourceUtils.scala
+++ b/src/spark-project/engine-spark/src/test/scala/org/apache/spark/utils/TestResourceUtils.scala
@@ -56,6 +56,12 @@ class TestResourceUtils extends SparderBaseFunSuite with BeforeAndAfterEach {
 
   // test case: available(10, 10)  executor(20, 10) driver(1, 1)
   test("checkResource return false when available memory does not meet acquirement") {
+    // Without this may cause NPE
+    KylinBuildEnv.clean()
+    val config: KylinConfig = Mockito.mock(classOf[KylinConfig])
+    Mockito.when(config.getMaxAllocationResourceProportion).thenReturn(0.9)
+    KylinBuildEnv.getOrCreate(config)
+    Mockito.when(fetcher.fetchMaximumResourceAllocation).thenReturn(ResourceInfo(Integer.MAX_VALUE, Integer.MAX_VALUE))
     val conf = new SparkConf()
     conf.set(EXECUTOR_INSTANCES, "5")
     conf.set(EXECUTOR_MEMORY, "2MB")
diff --git a/src/spark-project/spark-common/src/main/java/org/apache/kylin/common/asyncprofiler/AsyncProfilerUtils.java b/src/spark-project/spark-common/src/main/java/org/apache/kylin/common/asyncprofiler/AsyncProfilerUtils.java
index cd74a7cd33..490000d745 100644
--- a/src/spark-project/spark-common/src/main/java/org/apache/kylin/common/asyncprofiler/AsyncProfilerUtils.java
+++ b/src/spark-project/spark-common/src/main/java/org/apache/kylin/common/asyncprofiler/AsyncProfilerUtils.java
@@ -66,7 +66,7 @@ public class AsyncProfilerUtils {
         if (!cachedResult.await(resultCollectionTimeout, TimeUnit.MILLISECONDS)) {
             logger.warn("timeout while waiting for profile result");
         }
-        logger.debug("profiler stopped and result dumped to $localCacheDir");
+        logger.debug("profiler stopped and result dumped to {}", localCacheDir);
         ZipFileUtils.compressZipFile(localCacheDir.getAbsolutePath(), outStream);
     }
 
diff --git a/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/merge/MergeColumnBytes.scala b/src/spark-project/spark-common/src/main/scala/org/apache/kylin/common/CustomUtils.scala
similarity index 69%
copy from src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/merge/MergeColumnBytes.scala
copy to src/spark-project/spark-common/src/main/scala/org/apache/kylin/common/CustomUtils.scala
index 3091e92e0a..b60fca9912 100644
--- a/src/spark-project/engine-spark/src/main/scala/org/apache/kylin/engine/spark/job/stage/merge/MergeColumnBytes.scala
+++ b/src/spark-project/spark-common/src/main/scala/org/apache/kylin/common/CustomUtils.scala
@@ -16,17 +16,16 @@
  * limitations under the License.
  */
 
-package org.apache.kylin.engine.spark.job.stage.merge
+package org.apache.kylin.common
 
-import org.apache.kylin.engine.spark.job.SegmentJob
-import org.apache.kylin.metadata.cube.model.NDataSegment
+object CustomUtils {
 
-class MergeColumnBytes(jobContext: SegmentJob, dataSegment: NDataSegment)
-  extends MergeStage(jobContext, dataSegment) {
-
-  override def execute(): Unit = {
-    mergeColumnBytes()
-
-    cleanup()
+  // Somehow equivalent to Java's try with resource, handle cannot be null
+  def tryWithResourceIgnore[T <: AutoCloseable](handle: T)(func: T => Any): Unit = {
+    try {
+      func(handle)
+    } finally {
+      handle.close()
+    }
   }
 }
diff --git a/src/tool/src/main/java/org/apache/kylin/tool/KylinLogTool.java b/src/tool/src/main/java/org/apache/kylin/tool/KylinLogTool.java
index 54736982ed..612e6dc7ce 100644
--- a/src/tool/src/main/java/org/apache/kylin/tool/KylinLogTool.java
+++ b/src/tool/src/main/java/org/apache/kylin/tool/KylinLogTool.java
@@ -93,7 +93,7 @@ public class KylinLogTool {
     private static final String SYSTEM_PROPERTIES = "System Properties";
 
     private static final Set<String> kylinLogPrefix = Sets.newHashSet("kylin.log", "kylin.schedule.log",
-            "kylin.query.log", "kylin.smart.log", "kylin.build.log", "kylin.security.log");
+            "kylin.query.log", "kylin.smart.log", "kylin.build.log", "kylin.security.log", "kylin.metadata.log");
 
     private static final Set<String> queryDiagExcludedLogs = Sets.newHashSet("kylin.log", "kylin.schedule.log",
             "kylin.smart.log", "kylin.build.log", "kylin.security.log");


[kylin] 03/34: KYLIN-5445 minor fix sonar

Posted by xx...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

xxyu pushed a commit to branch kylin5
in repository https://gitbox.apache.org/repos/asf/kylin.git

commit 665400c2bc24c3f8109ff878e2b67ac245f738be
Author: Jiawei Li <10...@qq.com>
AuthorDate: Fri Jan 6 14:14:50 2023 +0800

    KYLIN-5445 minor fix sonar
---
 .../common/persistence/metadata/jdbc/JdbcUtil.java | 33 +++++++++-------------
 1 file changed, 14 insertions(+), 19 deletions(-)

diff --git a/src/core-common/src/main/java/org/apache/kylin/common/persistence/metadata/jdbc/JdbcUtil.java b/src/core-common/src/main/java/org/apache/kylin/common/persistence/metadata/jdbc/JdbcUtil.java
index 0fa58ed773..5c24313fb2 100644
--- a/src/core-common/src/main/java/org/apache/kylin/common/persistence/metadata/jdbc/JdbcUtil.java
+++ b/src/core-common/src/main/java/org/apache/kylin/common/persistence/metadata/jdbc/JdbcUtil.java
@@ -123,27 +123,22 @@ public class JdbcUtil {
     }
 
     private static boolean isPrimaryKeyExists(Connection conn, String... tables) throws SQLException {
-        try {
-            for (String table : tables) {
-                try {
-                    val resultSet = conn.getMetaData().getPrimaryKeys(conn.getCatalog(), conn.getSchema(), table);
-                    if (resultSet.next()) {
-                        return true;
-                    }
-                } catch (Exception e) {
-                    log.warn("get primary key from table {} failed", table, e);
-                }
-            }
 
-            return false;
-        } catch (Exception e) {
-            logger.error("Fail to know if table {} primary key exists", tables, e);
-        } finally {
-            if (!conn.isClosed()) {
-                conn.close();
+        for (String table : tables) {
+            try {
+                val resultSet = conn.getMetaData().getPrimaryKeys(conn.getCatalog(), conn.getSchema(), table);
+                if (resultSet.next()) {
+                    return true;
+                }
+            } catch (Exception e) {
+                log.warn("get primary key from table {} failed", table, e);
             }
         }
-        return true;
+        if (!conn.isClosed()) {
+            conn.close();
+        }
+        return false;
+
     }
 
     public static boolean isIndexExists(Connection conn, String table, String index) throws SQLException {
@@ -161,7 +156,7 @@ public class JdbcUtil {
                     }
                 }
             }
-        } catch (SQLException e) {
+        } catch (Exception e) {
             logger.error("Fail to know if table {} index {} exists", tables, index, e);
         } finally {
             if (!conn.isClosed())


[kylin] 21/34: KYLIN-5456 fix duplicate key when exporting tds

Posted by xx...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

xxyu pushed a commit to branch kylin5
in repository https://gitbox.apache.org/repos/asf/kylin.git

commit eeaa487d31f11b628be01846be54ede2cac21aa3
Author: Dorris Zhang <ru...@kyligence.io>
AuthorDate: Tue Dec 13 13:44:04 2022 +0800

    KYLIN-5456 fix duplicate key when exporting tds
---
 .../apache/kylin/rest/service/ModelTdsService.java |  38 +--
 .../apache/kylin/tool/bisync/SyncModelBuilder.java |  12 +-
 .../apache/kylin/tool/bisync/model/ColumnDef.java  |   6 +-
 .../service/ModelTdsServiceColumnNameTest.java     | 128 ++++++++++
 .../kylin/rest/service/ModelTdsServiceTest.java    |   6 +-
 .../metadata/_global/project/test_tds_export.json  |  35 +++
 .../8b6fa01d-1607-9459-81aa-115b9419b830.json      |  93 ++++++++
 .../8b6fa01d-1607-9459-81aa-115b9419b830.json      |  63 +++++
 .../8b6fa01d-1607-9459-81aa-115b9419b830.json      | 262 +++++++++++++++++++++
 .../test_tds_export/table/SSB.LINEORDER.json       | 113 +++++++++
 .../test_tds_export/table/SSB.P_LINEORDER.json     | 118 ++++++++++
 11 files changed, 847 insertions(+), 27 deletions(-)

diff --git a/src/modeling-service/src/main/java/org/apache/kylin/rest/service/ModelTdsService.java b/src/modeling-service/src/main/java/org/apache/kylin/rest/service/ModelTdsService.java
index df277831e5..b28fe7a019 100644
--- a/src/modeling-service/src/main/java/org/apache/kylin/rest/service/ModelTdsService.java
+++ b/src/modeling-service/src/main/java/org/apache/kylin/rest/service/ModelTdsService.java
@@ -57,7 +57,6 @@ import org.apache.kylin.metadata.model.TableRef;
 import org.apache.kylin.metadata.model.TblColRef;
 import org.apache.kylin.metadata.model.util.ComputedColumnUtil;
 import org.apache.kylin.metadata.project.NProjectManager;
-import org.apache.kylin.metadata.project.ProjectInstance;
 import org.apache.kylin.metadata.realization.RealizationStatusEnum;
 import org.apache.kylin.rest.security.MutableAclRecord;
 import org.apache.kylin.rest.util.AclPermissionUtil;
@@ -106,24 +105,27 @@ public class ModelTdsService extends AbstractModelService {
     }
 
     public boolean preCheckNameConflict(SyncModel syncModel) {
-        ProjectInstance prjInstance = getManager(NProjectManager.class).getProject(syncModel.getProject());
-        boolean skipCheckTds = prjInstance.getConfig().skipCheckTds();
-        Set<String> measureNames = syncModel.getMetrics().stream().filter(measureDef -> !measureDef.isHidden())
-                .map(measureDef -> measureDef.getMeasure().getName()).collect(Collectors.toSet());
-        Map<String, ColumnDef> nameOfColDefMap = syncModel.getColumnDefMap().values().stream()
-                .collect(Collectors.toMap(ColumnDef::getColumnName, Function.identity()));
-        Sets.SetView<String> intersection = Sets.intersection(nameOfColDefMap.keySet(), measureNames);
-        if (skipCheckTds || CollectionUtils.isEmpty(intersection)) {
-            return true;
-        }
-
-        String name = intersection.iterator().next();
-        ColumnDef columnDef = nameOfColDefMap.get(name);
-        if (columnDef.isDimension()) {
-            throw new KylinException(MODEL_TDS_EXPORT_DIM_COL_AND_MEASURE_NAME_CONFLICT, name, name);
-        } else {
-            throw new KylinException(MODEL_TDS_EXPORT_COLUMN_AND_MEASURE_NAME_CONFLICT, name, name);
+        boolean skipCheckTds = NProjectManager.getProjectConfig(syncModel.getProject()).skipCheckTds();
+
+        if (!skipCheckTds) {
+            Set<String> measureNames = syncModel.getMetrics().stream().filter(measureDef -> !measureDef.isHidden())
+                    .map(measureDef -> measureDef.getMeasure().getName()).collect(Collectors.toSet());
+            Map<String, ColumnDef> nameOfColDefMap = syncModel.getColumnDefMap().values().stream()
+                    .filter(columnDef -> !columnDef.isHidden())
+                    .collect(Collectors.toMap(ColumnDef::getAliasDotColumn, Function.identity()));
+
+            nameOfColDefMap.forEach((aliasColName, columnDef) -> {
+                String name = aliasColName.split("\\.").length > 1 ? aliasColName.split("\\.")[1] : "";
+                if (measureNames.contains(name)) {
+                    if (columnDef.isDimension()) {
+                        throw new KylinException(MODEL_TDS_EXPORT_DIM_COL_AND_MEASURE_NAME_CONFLICT, name, name);
+                    } else {
+                        throw new KylinException(MODEL_TDS_EXPORT_COLUMN_AND_MEASURE_NAME_CONFLICT, name, name);
+                    }
+                }
+            });
         }
+        return true;
     }
 
     public SyncModel exportModel(SyncContext syncContext) {
diff --git a/src/modeling-service/src/main/java/org/apache/kylin/tool/bisync/SyncModelBuilder.java b/src/modeling-service/src/main/java/org/apache/kylin/tool/bisync/SyncModelBuilder.java
index 74b402bc29..fa14d623cd 100644
--- a/src/modeling-service/src/main/java/org/apache/kylin/tool/bisync/SyncModelBuilder.java
+++ b/src/modeling-service/src/main/java/org/apache/kylin/tool/bisync/SyncModelBuilder.java
@@ -209,7 +209,9 @@ public class SyncModelBuilder {
         default:
             break;
         }
-        showDimsAndMeasures(columnDefMap, measureDefs, colsToShow, measuresToShow);
+        Set<String> dimensionSet = indexPlan.getModel().getEffectiveDimensions().values().stream()
+                .map(TblColRef::getAliasDotName).collect(Collectors.toSet());
+        showDimsAndMeasures(columnDefMap, measureDefs, colsToShow, measuresToShow, dimensionSet);
     }
 
     private boolean testAuthorizedCols(Set<String> authorizedCols, TblColRef colRef) {
@@ -227,9 +229,13 @@ public class SyncModelBuilder {
     }
 
     private void showDimsAndMeasures(Map<String, ColumnDef> columnDefMap, List<MeasureDef> measureDefs,
-            Set<String> colsToShow, Set<String> measuresToShow) {
+            Set<String> colsToShow, Set<String> measuresToShow, Set<String> dimensionSet) {
         for (String colToShow : colsToShow) {
-            columnDefMap.get(colToShow).setHidden(false);
+            ColumnDef colToShowDef = columnDefMap.get(colToShow);
+            colToShowDef.setHidden(false);
+            if (dimensionSet.contains(colToShow)) {
+                colToShowDef.setDimension(true);
+            }
         }
         for (MeasureDef measureDef : measureDefs) {
             if (measuresToShow.contains(measureDef.getMeasure().getName())) {
diff --git a/src/modeling-service/src/main/java/org/apache/kylin/tool/bisync/model/ColumnDef.java b/src/modeling-service/src/main/java/org/apache/kylin/tool/bisync/model/ColumnDef.java
index c67f56a432..89aa966aee 100644
--- a/src/modeling-service/src/main/java/org/apache/kylin/tool/bisync/model/ColumnDef.java
+++ b/src/modeling-service/src/main/java/org/apache/kylin/tool/bisync/model/ColumnDef.java
@@ -44,7 +44,9 @@ public class ColumnDef {
 
     private boolean isComputedColumn;
 
-    public boolean isDimension() {
-        return columnType.equalsIgnoreCase("nominal");
+    private boolean isDimension;
+
+    public String getAliasDotColumn() {
+        return this.getTableAlias() + "." + this.getColumnName();
     }
 }
diff --git a/src/modeling-service/src/test/java/org/apache/kylin/rest/service/ModelTdsServiceColumnNameTest.java b/src/modeling-service/src/test/java/org/apache/kylin/rest/service/ModelTdsServiceColumnNameTest.java
new file mode 100644
index 0000000000..6a381fa564
--- /dev/null
+++ b/src/modeling-service/src/test/java/org/apache/kylin/rest/service/ModelTdsServiceColumnNameTest.java
@@ -0,0 +1,128 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.kylin.rest.service;
+
+import lombok.extern.slf4j.Slf4j;
+import org.apache.kylin.common.scheduler.EventBusFactory;
+import org.apache.kylin.engine.spark.ExecutableUtils;
+import org.apache.kylin.junit.rule.TransactionExceptedException;
+import org.apache.kylin.metadata.cube.model.NDataflowManager;
+import org.apache.kylin.metadata.recommendation.candidate.JdbcRawRecStore;
+import org.apache.kylin.rest.constant.Constant;
+import org.apache.kylin.rest.util.AclEvaluate;
+import org.apache.kylin.rest.util.AclUtil;
+import org.apache.kylin.tool.bisync.SyncContext;
+import org.apache.kylin.tool.bisync.model.SyncModel;
+import org.junit.After;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+import org.mockito.InjectMocks;
+import org.mockito.Mock;
+import org.mockito.Mockito;
+import org.springframework.security.authentication.TestingAuthenticationToken;
+import org.springframework.security.core.Authentication;
+import org.springframework.security.core.context.SecurityContextHolder;
+import org.springframework.test.util.ReflectionTestUtils;
+
+@Slf4j
+public class ModelTdsServiceColumnNameTest extends SourceTestCase {
+
+    @InjectMocks
+    private final ModelService modelService = Mockito.spy(new ModelService());
+
+    @InjectMocks
+    private final ModelTdsService tdsService = Mockito.spy(new ModelTdsService());
+
+    @InjectMocks
+    private final ModelSemanticHelper semanticService = Mockito.spy(new ModelSemanticHelper());
+
+    @InjectMocks
+    private final IndexPlanService indexPlanService = Mockito.spy(new IndexPlanService());
+
+    @Mock
+    private final AclUtil aclUtil = Mockito.spy(AclUtil.class);
+
+    @Mock
+    private final AclEvaluate aclEvaluate = Mockito.spy(AclEvaluate.class);
+
+    @Mock
+    protected IUserGroupService userGroupService = Mockito.spy(NUserGroupService.class);
+
+    @Mock
+    private final AccessService accessService = Mockito.spy(AccessService.class);
+
+    @Rule
+    public TransactionExceptedException thrown = TransactionExceptedException.none();
+
+    protected String getProject() {
+        return "test_tds_export";
+    }
+
+    @Before
+    public void setup() {
+        ExecutableUtils.initJobFactory();
+        String localMetaDir = "src/test/resources/ut_meta/tds_export_test";
+        createTestMetadata(localMetaDir);
+        Authentication authentication = new TestingAuthenticationToken("ADMIN", "ADMIN", Constant.ROLE_ADMIN);
+        SecurityContextHolder.getContext().setAuthentication(authentication);
+
+        overwriteSystemProp("HADOOP_USER_NAME", "root");
+        ReflectionTestUtils.setField(aclEvaluate, "aclUtil", aclUtil);
+        ReflectionTestUtils.setField(modelService, "aclEvaluate", aclEvaluate);
+        ReflectionTestUtils.setField(modelService, "accessService", accessService);
+        ReflectionTestUtils.setField(modelService, "userGroupService", userGroupService);
+        ReflectionTestUtils.setField(modelService, "userGroupService", userGroupService);
+
+        ReflectionTestUtils.setField(tdsService, "accessService", accessService);
+        ReflectionTestUtils.setField(tdsService, "userGroupService", userGroupService);
+        ReflectionTestUtils.setField(tdsService, "aclEvaluate", aclEvaluate);
+
+        modelService.setSemanticUpdater(semanticService);
+        modelService.setIndexPlanService(indexPlanService);
+
+        try {
+            new JdbcRawRecStore(getTestConfig());
+        } catch (Exception e) {
+            //
+        }
+    }
+
+    @After
+    public void tearDown() {
+        getTestConfig().setProperty("kylin.metadata.semi-automatic-mode", "false");
+        EventBusFactory.getInstance().restart();
+        cleanupTestMetadata();
+    }
+
+    @Test
+    public void testDifferentTableSameColNameExportTds() {
+        String modelId = "8b6fa01d-1607-9459-81aa-115b9419b830";
+        SyncContext syncContext = new SyncContext();
+        syncContext.setProjectName(getProject());
+        syncContext.setModelId(modelId);
+        syncContext.setModelElement(SyncContext.ModelElement.AGG_INDEX_COL);
+        syncContext.setAdmin(true);
+        syncContext.setDataflow(NDataflowManager.getInstance(getTestConfig(), getProject()).getDataflow(modelId));
+        syncContext.setKylinConfig(getTestConfig());
+        SyncModel syncModel = tdsService.exportModel(syncContext);
+        Assert.assertTrue(tdsService.preCheckNameConflict(syncModel));
+    }
+}
diff --git a/src/modeling-service/src/test/java/org/apache/kylin/rest/service/ModelTdsServiceTest.java b/src/modeling-service/src/test/java/org/apache/kylin/rest/service/ModelTdsServiceTest.java
index f1db49147f..b60753ea7b 100644
--- a/src/modeling-service/src/test/java/org/apache/kylin/rest/service/ModelTdsServiceTest.java
+++ b/src/modeling-service/src/test/java/org/apache/kylin/rest/service/ModelTdsServiceTest.java
@@ -192,7 +192,7 @@ public class ModelTdsServiceTest extends SourceTestCase {
     }
 
     @Test
-    public void testExportTDSWithDupMeasureColumnNames() throws IOException {
+    public void testExportTDSWithDupMeasureColumnNamesOutOfScope() throws IOException {
         String projectName = "default";
         String modelId = "2ed3bf12-ad40-e8a0-73da-8dc3b4c798bb";
         val modelRequest = JsonUtil.readValue(
@@ -213,9 +213,7 @@ public class ModelTdsServiceTest extends SourceTestCase {
         syncContext.setKylinConfig(getTestConfig());
         syncContext.setAdmin(true);
         SyncModel syncModel = tdsService.exportModel(syncContext);
-        Assert.assertThrows(
-                "There are duplicated names among model column LO_LINENUMBER and measure name LO_LINENUMBER. Cannot export a valid TDS file. Please correct the duplicated names and try again.",
-                KylinException.class, () -> tdsService.preCheckNameConflict(syncModel));
+        Assert.assertTrue(tdsService.preCheckNameConflict(syncModel));
     }
 
     @Test
diff --git a/src/modeling-service/src/test/resources/ut_meta/tds_export_test/metadata/_global/project/test_tds_export.json b/src/modeling-service/src/test/resources/ut_meta/tds_export_test/metadata/_global/project/test_tds_export.json
new file mode 100644
index 0000000000..22d5d8bc18
--- /dev/null
+++ b/src/modeling-service/src/test/resources/ut_meta/tds_export_test/metadata/_global/project/test_tds_export.json
@@ -0,0 +1,35 @@
+{
+  "uuid" : "68c1c378-9bc0-e3a2-bb44-3d5bd23fc5ea",
+  "last_modified" : 1670814885403,
+  "create_time" : 1670814885385,
+  "version" : "4.0.0.0",
+  "name" : "test_tds_export",
+  "owner" : "ADMIN",
+  "status" : "ENABLED",
+  "create_time_utc" : 1670814885385,
+  "default_database" : "DEFAULT",
+  "description" : "",
+  "principal" : null,
+  "keytab" : null,
+  "maintain_model_type" : "MANUAL_MAINTAIN",
+  "override_kylin_properties" : {
+    "kylin.metadata.semi-automatic-mode" : "false",
+    "kylin.query.metadata.expose-computed-column" : "true",
+    "kylin.source.default" : "9"
+  },
+  "segment_config" : {
+    "auto_merge_enabled" : false,
+    "auto_merge_time_ranges" : [ "WEEK", "MONTH", "QUARTER", "YEAR" ],
+    "volatile_range" : {
+      "volatile_range_number" : 0,
+      "volatile_range_enabled" : false,
+      "volatile_range_type" : "DAY"
+    },
+    "retention_range" : {
+      "retention_range_number" : 1,
+      "retention_range_enabled" : false,
+      "retention_range_type" : "MONTH"
+    },
+    "create_empty_segment_enabled" : false
+  }
+}
diff --git a/src/modeling-service/src/test/resources/ut_meta/tds_export_test/metadata/test_tds_export/dataflow/8b6fa01d-1607-9459-81aa-115b9419b830.json b/src/modeling-service/src/test/resources/ut_meta/tds_export_test/metadata/test_tds_export/dataflow/8b6fa01d-1607-9459-81aa-115b9419b830.json
new file mode 100644
index 0000000000..f441cd702a
--- /dev/null
+++ b/src/modeling-service/src/test/resources/ut_meta/tds_export_test/metadata/test_tds_export/dataflow/8b6fa01d-1607-9459-81aa-115b9419b830.json
@@ -0,0 +1,93 @@
+{
+  "uuid" : "8b6fa01d-1607-9459-81aa-115b9419b830",
+  "last_modified" : 1670816215330,
+  "create_time" : 1670816130299,
+  "version" : "4.0.0.0",
+  "status" : "ONLINE",
+  "last_status" : null,
+  "cost" : 50,
+  "query_hit_count" : 0,
+  "last_query_time" : 0,
+  "layout_query_hit_count" : { },
+  "segments" : [ {
+    "id" : "7592350d-aed6-2ece-c99b-b0bf8002837b",
+    "name" : "FULL_BUILD",
+    "create_time_utc" : 1670816137816,
+    "status" : "READY",
+    "segRange" : {
+      "@class" : "org.apache.kylin.metadata.model.SegmentRange$TimePartitionedSegmentRange",
+      "date_range_start" : 0,
+      "date_range_end" : 9223372036854775807
+    },
+    "timeRange" : null,
+    "dimension_range_info_map" : {
+      "0" : {
+        "min" : "1",
+        "max" : "60000"
+      },
+      "1" : {
+        "min" : "1",
+        "max" : "2000"
+      },
+      "23" : {
+        "min" : "1",
+        "max" : "2000"
+      },
+      "24" : {
+        "min" : "1",
+        "max" : "60000"
+      },
+      "25" : {
+        "min" : "1",
+        "max" : "299"
+      },
+      "15" : {
+        "min" : "1",
+        "max" : "7"
+      },
+      "16" : {
+        "min" : "1",
+        "max" : "299"
+      },
+      "7" : {
+        "min" : "1",
+        "max" : "20"
+      },
+      "18" : {
+        "min" : "1",
+        "max" : "7"
+      },
+      "21" : {
+        "min" : "1",
+        "max" : "20"
+      }
+    },
+    "parameters" : null,
+    "dictionaries" : null,
+    "snapshots" : null,
+    "last_build_time" : 1670815943859,
+    "source_count" : 301389,
+    "source_bytes_size" : 11319910,
+    "column_source_bytes" : {
+      "SSB.P_LINEORDER.LO_ORDERKEY" : 140689,
+      "SSB.LINEORDER.LO_LINENUMBER" : 301389,
+      "SSB.P_LINEORDER.LO_CUSTKEY" : 149895,
+      "SSB.LINEORDER.LO_ORDERKEY" : 704647,
+      "SSB.LINEORDER.LO_SUPPKEY" : 475290,
+      "SSB.LINEORDER.LO_PARTKEY" : 1019900,
+      "SSB.P_LINEORDER.LO_PARTKEY" : 204053,
+      "SSB.LINEORDER.LO_CUSTKEY" : 750759,
+      "SSB.P_LINEORDER.LO_LINENUMBER" : 60175,
+      "SSB.P_LINEORDER.LO_SUPPKEY" : 94835
+    },
+    "ori_snapshot_size" : { },
+    "additionalInfo" : { },
+    "is_realtime_segment" : false,
+    "is_snapshot_ready" : false,
+    "is_dict_ready" : true,
+    "is_flat_table_ready" : true,
+    "is_fact_view_ready" : false,
+    "multi_partitions" : [ ],
+    "max_bucket_id" : -1
+  } ]
+}
diff --git a/src/modeling-service/src/test/resources/ut_meta/tds_export_test/metadata/test_tds_export/index_plan/8b6fa01d-1607-9459-81aa-115b9419b830.json b/src/modeling-service/src/test/resources/ut_meta/tds_export_test/metadata/test_tds_export/index_plan/8b6fa01d-1607-9459-81aa-115b9419b830.json
new file mode 100644
index 0000000000..ab4d8dae82
--- /dev/null
+++ b/src/modeling-service/src/test/resources/ut_meta/tds_export_test/metadata/test_tds_export/index_plan/8b6fa01d-1607-9459-81aa-115b9419b830.json
@@ -0,0 +1,63 @@
+{
+  "uuid" : "8b6fa01d-1607-9459-81aa-115b9419b830",
+  "last_modified" : 1670816130298,
+  "create_time" : 1670816130232,
+  "version" : "4.0.0.0",
+  "description" : null,
+  "rule_based_index" : null,
+  "indexes" : [ {
+    "id" : 0,
+    "dimensions" : [ 0, 1, 7, 15, 16, 18, 21, 23, 24, 25 ],
+    "measures" : [ 100000, 100001, 100002 ],
+    "layouts" : [ {
+      "id" : 1,
+      "name" : null,
+      "owner" : null,
+      "col_order" : [ 0, 1, 7, 15, 16, 18, 21, 23, 24, 25, 100000, 100001, 100002 ],
+      "shard_by_columns" : [ ],
+      "partition_by_columns" : [ ],
+      "sort_by_columns" : [ ],
+      "storage_type" : 20,
+      "update_time" : 1670816130247,
+      "manual" : false,
+      "auto" : false,
+      "base" : true,
+      "draft_version" : null,
+      "index_range" : null
+    } ],
+    "next_layout_offset" : 2
+  }, {
+    "id" : 20000000000,
+    "dimensions" : [ 0, 1, 7, 15, 16, 18, 21, 23, 24, 25 ],
+    "measures" : [ ],
+    "layouts" : [ {
+      "id" : 20000000001,
+      "name" : null,
+      "owner" : null,
+      "col_order" : [ 0, 1, 7, 15, 16, 18, 21, 23, 24, 25 ],
+      "shard_by_columns" : [ ],
+      "partition_by_columns" : [ ],
+      "sort_by_columns" : [ ],
+      "storage_type" : 20,
+      "update_time" : 1670816130249,
+      "manual" : false,
+      "auto" : false,
+      "base" : true,
+      "draft_version" : null,
+      "index_range" : null
+    } ],
+    "next_layout_offset" : 2
+  } ],
+  "override_properties" : { },
+  "to_be_deleted_indexes" : [ ],
+  "auto_merge_time_ranges" : null,
+  "retention_range" : 0,
+  "engine_type" : 80,
+  "next_aggregation_index_id" : 10000,
+  "next_table_index_id" : 20000010000,
+  "agg_shard_by_columns" : [ ],
+  "extend_partition_columns" : [ ],
+  "layout_bucket_num" : { },
+  "approved_additional_recs" : 0,
+  "approved_removal_recs" : 0
+}
diff --git a/src/modeling-service/src/test/resources/ut_meta/tds_export_test/metadata/test_tds_export/model_desc/8b6fa01d-1607-9459-81aa-115b9419b830.json b/src/modeling-service/src/test/resources/ut_meta/tds_export_test/metadata/test_tds_export/model_desc/8b6fa01d-1607-9459-81aa-115b9419b830.json
new file mode 100644
index 0000000000..45e14db8c3
--- /dev/null
+++ b/src/modeling-service/src/test/resources/ut_meta/tds_export_test/metadata/test_tds_export/model_desc/8b6fa01d-1607-9459-81aa-115b9419b830.json
@@ -0,0 +1,262 @@
+{
+  "uuid" : "8b6fa01d-1607-9459-81aa-115b9419b830",
+  "last_modified" : 1670816130232,
+  "create_time" : 1670816130091,
+  "version" : "4.0.0.0",
+  "alias" : "model1",
+  "owner" : "ADMIN",
+  "config_last_modifier" : null,
+  "config_last_modified" : 0,
+  "description" : null,
+  "fact_table" : "SSB.LINEORDER",
+  "fact_table_alias" : null,
+  "management_type" : "MODEL_BASED",
+  "join_tables" : [ {
+    "table" : "SSB.P_LINEORDER",
+    "kind" : "LOOKUP",
+    "alias" : "P_LINEORDER",
+    "join" : {
+      "type" : "INNER",
+      "primary_key" : [ "P_LINEORDER.LO_ORDERKEY" ],
+      "foreign_key" : [ "LINEORDER.LO_ORDERKEY" ],
+      "non_equi_join_condition" : null,
+      "primary_table" : null,
+      "foreign_table" : null
+    },
+    "flattenable" : "flatten",
+    "join_relation_type" : "MANY_TO_ONE"
+  } ],
+  "filter_condition" : "",
+  "partition_desc" : null,
+  "capacity" : "MEDIUM",
+  "segment_config" : {
+    "auto_merge_enabled" : null,
+    "auto_merge_time_ranges" : null,
+    "volatile_range" : null,
+    "retention_range" : null,
+    "create_empty_segment_enabled" : false
+  },
+  "data_check_desc" : null,
+  "semantic_version" : 0,
+  "storage_type" : 0,
+  "model_type" : "BATCH",
+  "all_named_columns" : [ {
+    "id" : 0,
+    "name" : "LO_ORDERKEY_LINEORDER",
+    "column" : "LINEORDER.LO_ORDERKEY",
+    "status" : "DIMENSION"
+  }, {
+    "id" : 1,
+    "name" : "LO_PARTKEY_LINEORDER",
+    "column" : "LINEORDER.LO_PARTKEY",
+    "status" : "DIMENSION"
+  }, {
+    "id" : 2,
+    "name" : "LO_DISCOUNT_LINEORDER_1",
+    "column" : "LINEORDER.LO_DISCOUNT"
+  }, {
+    "id" : 3,
+    "name" : "LO_SUPPLYCOST_LINEORDER_1",
+    "column" : "LINEORDER.LO_SUPPLYCOST"
+  }, {
+    "id" : 4,
+    "name" : "LO_COMMITDATE_LINEORDER_1",
+    "column" : "LINEORDER.LO_COMMITDATE"
+  }, {
+    "id" : 5,
+    "name" : "LO_EXTENDEDPRICE_LINEORDER_1",
+    "column" : "LINEORDER.LO_EXTENDEDPRICE"
+  }, {
+    "id" : 6,
+    "name" : "LO_TAX_LINEORDER_1",
+    "column" : "LINEORDER.LO_TAX"
+  }, {
+    "id" : 7,
+    "name" : "LO_SUPPKEY_LINEORDER_1",
+    "column" : "LINEORDER.LO_SUPPKEY",
+    "status" : "DIMENSION"
+  }, {
+    "id" : 8,
+    "name" : "LO_ORDTOTALPRICE_LINEORDER_1",
+    "column" : "LINEORDER.LO_ORDTOTALPRICE"
+  }, {
+    "id" : 9,
+    "name" : "LO_REVENUE_LINEORDER_1",
+    "column" : "LINEORDER.LO_REVENUE"
+  }, {
+    "id" : 10,
+    "name" : "LO_ORDERDATE_LINEORDER_1",
+    "column" : "LINEORDER.LO_ORDERDATE"
+  }, {
+    "id" : 11,
+    "name" : "LO_ORDERPRIOTITY_LINEORDER_1",
+    "column" : "LINEORDER.LO_ORDERPRIOTITY"
+  }, {
+    "id" : 12,
+    "name" : "LO_SHIPPRIOTITY_LINEORDER_1",
+    "column" : "LINEORDER.LO_SHIPPRIOTITY"
+  }, {
+    "id" : 13,
+    "name" : "LO_QUANTITY_LINEORDER_1",
+    "column" : "LINEORDER.LO_QUANTITY"
+  }, {
+    "id" : 14,
+    "name" : "LO_SHIPMODE_LINEORDER_1",
+    "column" : "LINEORDER.LO_SHIPMODE"
+  }, {
+    "id" : 15,
+    "name" : "LO_LINENUMBER_LINEORDER",
+    "column" : "LINEORDER.LO_LINENUMBER",
+    "status" : "DIMENSION"
+  }, {
+    "id" : 16,
+    "name" : "LO_CUSTKEY_LINEORDER",
+    "column" : "LINEORDER.LO_CUSTKEY",
+    "status" : "DIMENSION"
+  }, {
+    "id" : 17,
+    "name" : "LO_SHIPMODE_P_LINEORDER_1",
+    "column" : "P_LINEORDER.LO_SHIPMODE"
+  }, {
+    "id" : 18,
+    "name" : "LO_LINENUMBER_P_LINEORDER_1",
+    "column" : "P_LINEORDER.LO_LINENUMBER",
+    "status" : "DIMENSION"
+  }, {
+    "id" : 19,
+    "name" : "LO_ORDTOTALPRICE_P_LINEORDER_1",
+    "column" : "P_LINEORDER.LO_ORDTOTALPRICE"
+  }, {
+    "id" : 20,
+    "name" : "LO_SUPPLYCOST_P_LINEORDER_1",
+    "column" : "P_LINEORDER.LO_SUPPLYCOST"
+  }, {
+    "id" : 21,
+    "name" : "LO_SUPPKEY_P_LINEORDER_1",
+    "column" : "P_LINEORDER.LO_SUPPKEY",
+    "status" : "DIMENSION"
+  }, {
+    "id" : 22,
+    "name" : "LO_QUANTITY_P_LINEORDER_1",
+    "column" : "P_LINEORDER.LO_QUANTITY"
+  }, {
+    "id" : 23,
+    "name" : "LO_PARTKEY_P_LINEORDER_1",
+    "column" : "P_LINEORDER.LO_PARTKEY",
+    "status" : "DIMENSION"
+  }, {
+    "id" : 24,
+    "name" : "LO_ORDERKEY_P_LINEORDER_1",
+    "column" : "P_LINEORDER.LO_ORDERKEY",
+    "status" : "DIMENSION"
+  }, {
+    "id" : 25,
+    "name" : "LO_CUSTKEY_P_LINEORDER_1",
+    "column" : "P_LINEORDER.LO_CUSTKEY",
+    "status" : "DIMENSION"
+  }, {
+    "id" : 26,
+    "name" : "LO_SHIPPRIOTITY_P_LINEORDER_1",
+    "column" : "P_LINEORDER.LO_SHIPPRIOTITY"
+  }, {
+    "id" : 27,
+    "name" : "LO_DISCOUNT_P_LINEORDER_1",
+    "column" : "P_LINEORDER.LO_DISCOUNT"
+  }, {
+    "id" : 28,
+    "name" : "LO_ORDERPRIOTITY_P_LINEORDER_1",
+    "column" : "P_LINEORDER.LO_ORDERPRIOTITY"
+  }, {
+    "id" : 29,
+    "name" : "LO_ORDERDATE_P_LINEORDER_1",
+    "column" : "P_LINEORDER.LO_ORDERDATE"
+  }, {
+    "id" : 30,
+    "name" : "LO_REVENUE_P_LINEORDER_1",
+    "column" : "P_LINEORDER.LO_REVENUE"
+  }, {
+    "id" : 31,
+    "name" : "V_REVENUE",
+    "column" : "P_LINEORDER.V_REVENUE"
+  }, {
+    "id" : 32,
+    "name" : "LO_COMMITDATE_P_LINEORDER_1",
+    "column" : "P_LINEORDER.LO_COMMITDATE"
+  }, {
+    "id" : 33,
+    "name" : "LO_EXTENDEDPRICE_P_LINEORDER_1",
+    "column" : "P_LINEORDER.LO_EXTENDEDPRICE"
+  }, {
+    "id" : 34,
+    "name" : "LO_TAX_P_LINEORDER_1",
+    "column" : "P_LINEORDER.LO_TAX"
+  } ],
+  "all_measures" : [ {
+    "name" : "COUNT_ALL",
+    "function" : {
+      "expression" : "COUNT",
+      "parameters" : [ {
+        "type" : "constant",
+        "value" : "1"
+      } ],
+      "returntype" : "bigint"
+    },
+    "column" : null,
+    "comment" : null,
+    "id" : 100000,
+    "type" : "NORMAL",
+    "internal_ids" : [ ]
+  }, {
+    "name" : "sum11",
+    "function" : {
+      "expression" : "SUM",
+      "parameters" : [ {
+        "type" : "column",
+        "value" : "LINEORDER.LO_LINENUMBER"
+      } ],
+      "returntype" : "bigint"
+    },
+    "column" : null,
+    "comment" : "",
+    "id" : 100001,
+    "type" : "NORMAL",
+    "internal_ids" : [ ]
+  }, {
+    "name" : "sum2",
+    "function" : {
+      "expression" : "SUM",
+      "parameters" : [ {
+        "type" : "column",
+        "value" : "P_LINEORDER.LO_LINENUMBER"
+      } ],
+      "returntype" : "bigint"
+    },
+    "column" : null,
+    "comment" : "",
+    "id" : 100002,
+    "type" : "NORMAL",
+    "internal_ids" : [ ]
+  } ],
+  "recommendations_count" : 0,
+  "computed_columns" : [ ],
+  "canvas" : {
+    "coordinate" : {
+      "LINEORDER" : {
+        "x" : 739.9444580078124,
+        "y" : 82.94443766276042,
+        "width" : 200.0,
+        "height" : 230.0
+      },
+      "P_LINEORDER" : {
+        "x" : 346.61112467447913,
+        "y" : 220.72221544053812,
+        "width" : 200.0,
+        "height" : 230.0
+      }
+    },
+    "zoom" : 9.0
+  },
+  "multi_partition_desc" : null,
+  "multi_partition_key_mapping" : null,
+  "fusion_id" : null
+}
diff --git a/src/modeling-service/src/test/resources/ut_meta/tds_export_test/metadata/test_tds_export/table/SSB.LINEORDER.json b/src/modeling-service/src/test/resources/ut_meta/tds_export_test/metadata/test_tds_export/table/SSB.LINEORDER.json
new file mode 100644
index 0000000000..9b5626b915
--- /dev/null
+++ b/src/modeling-service/src/test/resources/ut_meta/tds_export_test/metadata/test_tds_export/table/SSB.LINEORDER.json
@@ -0,0 +1,113 @@
+{
+  "uuid" : "2639d836-95e1-a1dc-850b-089a89825193",
+  "last_modified" : 0,
+  "create_time" : 1670815325493,
+  "version" : "4.0.0.0",
+  "name" : "LINEORDER",
+  "columns" : [ {
+    "id" : "1",
+    "name" : "LO_ORDERKEY",
+    "datatype" : "bigint",
+    "case_sensitive_name" : "lo_orderkey"
+  }, {
+    "id" : "2",
+    "name" : "LO_LINENUMBER",
+    "datatype" : "bigint",
+    "case_sensitive_name" : "lo_linenumber"
+  }, {
+    "id" : "3",
+    "name" : "LO_CUSTKEY",
+    "datatype" : "integer",
+    "case_sensitive_name" : "lo_custkey"
+  }, {
+    "id" : "4",
+    "name" : "LO_PARTKEY",
+    "datatype" : "integer",
+    "case_sensitive_name" : "lo_partkey"
+  }, {
+    "id" : "5",
+    "name" : "LO_SUPPKEY",
+    "datatype" : "integer",
+    "case_sensitive_name" : "lo_suppkey"
+  }, {
+    "id" : "6",
+    "name" : "LO_ORDERDATE",
+    "datatype" : "date",
+    "case_sensitive_name" : "lo_orderdate"
+  }, {
+    "id" : "7",
+    "name" : "LO_ORDERPRIOTITY",
+    "datatype" : "varchar(4096)",
+    "case_sensitive_name" : "lo_orderpriotity"
+  }, {
+    "id" : "8",
+    "name" : "LO_SHIPPRIOTITY",
+    "datatype" : "integer",
+    "case_sensitive_name" : "lo_shippriotity"
+  }, {
+    "id" : "9",
+    "name" : "LO_QUANTITY",
+    "datatype" : "bigint",
+    "case_sensitive_name" : "lo_quantity"
+  }, {
+    "id" : "10",
+    "name" : "LO_EXTENDEDPRICE",
+    "datatype" : "bigint",
+    "case_sensitive_name" : "lo_extendedprice"
+  }, {
+    "id" : "11",
+    "name" : "LO_ORDTOTALPRICE",
+    "datatype" : "bigint",
+    "case_sensitive_name" : "lo_ordtotalprice"
+  }, {
+    "id" : "12",
+    "name" : "LO_DISCOUNT",
+    "datatype" : "bigint",
+    "case_sensitive_name" : "lo_discount"
+  }, {
+    "id" : "13",
+    "name" : "LO_REVENUE",
+    "datatype" : "bigint",
+    "case_sensitive_name" : "lo_revenue"
+  }, {
+    "id" : "14",
+    "name" : "LO_SUPPLYCOST",
+    "datatype" : "bigint",
+    "case_sensitive_name" : "lo_supplycost"
+  }, {
+    "id" : "15",
+    "name" : "LO_TAX",
+    "datatype" : "bigint",
+    "case_sensitive_name" : "lo_tax"
+  }, {
+    "id" : "16",
+    "name" : "LO_COMMITDATE",
+    "datatype" : "date",
+    "case_sensitive_name" : "lo_commitdate"
+  }, {
+    "id" : "17",
+    "name" : "LO_SHIPMODE",
+    "datatype" : "varchar(4096)",
+    "case_sensitive_name" : "lo_shipmode"
+  } ],
+  "source_type" : 9,
+  "table_type" : "EXTERNAL",
+  "top" : false,
+  "increment_loading" : false,
+  "last_snapshot_path" : null,
+  "last_snapshot_size" : 0,
+  "snapshot_last_modified" : 0,
+  "query_hit_count" : 0,
+  "partition_column" : null,
+  "snapshot_partitions" : { },
+  "snapshot_partitions_info" : { },
+  "snapshot_total_rows" : 0,
+  "snapshot_partition_col" : null,
+  "selected_snapshot_partition_col" : null,
+  "temp_snapshot_path" : null,
+  "snapshot_has_broken" : false,
+  "database" : "SSB",
+  "transactional" : false,
+  "rangePartition" : false,
+  "partition_desc" : null
+}
diff --git a/src/modeling-service/src/test/resources/ut_meta/tds_export_test/metadata/test_tds_export/table/SSB.P_LINEORDER.json b/src/modeling-service/src/test/resources/ut_meta/tds_export_test/metadata/test_tds_export/table/SSB.P_LINEORDER.json
new file mode 100644
index 0000000000..3f05465e97
--- /dev/null
+++ b/src/modeling-service/src/test/resources/ut_meta/tds_export_test/metadata/test_tds_export/table/SSB.P_LINEORDER.json
@@ -0,0 +1,118 @@
+{
+  "uuid" : "e0463929-d9c0-55ba-3c63-6cb0703053df",
+  "last_modified" : 1670836576389,
+  "create_time" : 1670815325386,
+  "version" : "4.0.0.0",
+  "name" : "P_LINEORDER",
+  "columns" : [ {
+    "id" : "1",
+    "name" : "LO_ORDERKEY",
+    "datatype" : "bigint",
+    "case_sensitive_name" : "lo_orderkey"
+  }, {
+    "id" : "2",
+    "name" : "LO_LINENUMBER",
+    "datatype" : "bigint",
+    "case_sensitive_name" : "lo_linenumber"
+  }, {
+    "id" : "3",
+    "name" : "LO_CUSTKEY",
+    "datatype" : "integer",
+    "case_sensitive_name" : "lo_custkey"
+  }, {
+    "id" : "4",
+    "name" : "LO_PARTKEY",
+    "datatype" : "integer",
+    "case_sensitive_name" : "lo_partkey"
+  }, {
+    "id" : "5",
+    "name" : "LO_SUPPKEY",
+    "datatype" : "integer",
+    "case_sensitive_name" : "lo_suppkey"
+  }, {
+    "id" : "6",
+    "name" : "LO_ORDERDATE",
+    "datatype" : "date",
+    "case_sensitive_name" : "lo_orderdate"
+  }, {
+    "id" : "7",
+    "name" : "LO_ORDERPRIOTITY",
+    "datatype" : "varchar(4096)",
+    "case_sensitive_name" : "lo_orderpriotity"
+  }, {
+    "id" : "8",
+    "name" : "LO_SHIPPRIOTITY",
+    "datatype" : "integer",
+    "case_sensitive_name" : "lo_shippriotity"
+  }, {
+    "id" : "9",
+    "name" : "LO_QUANTITY",
+    "datatype" : "bigint",
+    "case_sensitive_name" : "lo_quantity"
+  }, {
+    "id" : "10",
+    "name" : "LO_EXTENDEDPRICE",
+    "datatype" : "bigint",
+    "case_sensitive_name" : "lo_extendedprice"
+  }, {
+    "id" : "11",
+    "name" : "LO_ORDTOTALPRICE",
+    "datatype" : "bigint",
+    "case_sensitive_name" : "lo_ordtotalprice"
+  }, {
+    "id" : "12",
+    "name" : "LO_DISCOUNT",
+    "datatype" : "bigint",
+    "case_sensitive_name" : "lo_discount"
+  }, {
+    "id" : "13",
+    "name" : "LO_REVENUE",
+    "datatype" : "bigint",
+    "case_sensitive_name" : "lo_revenue"
+  }, {
+    "id" : "14",
+    "name" : "LO_SUPPLYCOST",
+    "datatype" : "bigint",
+    "case_sensitive_name" : "lo_supplycost"
+  }, {
+    "id" : "15",
+    "name" : "LO_TAX",
+    "datatype" : "bigint",
+    "case_sensitive_name" : "lo_tax"
+  }, {
+    "id" : "16",
+    "name" : "LO_COMMITDATE",
+    "datatype" : "date",
+    "case_sensitive_name" : "lo_commitdate"
+  }, {
+    "id" : "17",
+    "name" : "LO_SHIPMODE",
+    "datatype" : "varchar(4096)",
+    "case_sensitive_name" : "lo_shipmode"
+  }, {
+    "id" : "18",
+    "name" : "V_REVENUE",
+    "datatype" : "bigint",
+    "case_sensitive_name" : "v_revenue"
+  } ],
+  "source_type" : 9,
+  "table_type" : "VIEW",
+  "top" : false,
+  "increment_loading" : false,
+  "last_snapshot_path" : "test_tds_export/table_snapshot/SSB.P_LINEORDER/162b8fce-4867-4322-9b5c-e1149784d478",
+  "last_snapshot_size" : 1867437,
+  "snapshot_last_modified" : 1670902544401,
+  "query_hit_count" : 0,
+  "partition_column" : null,
+  "snapshot_partitions" : { },
+  "snapshot_partitions_info" : { },
+  "snapshot_total_rows" : 0,
+  "snapshot_partition_col" : null,
+  "selected_snapshot_partition_col" : null,
+  "temp_snapshot_path" : null,
+  "snapshot_has_broken" : false,
+  "database" : "SSB",
+  "transactional" : false,
+  "rangePartition" : false,
+  "partition_desc" : null
+}


[kylin] 28/34: KYLIN-5457 fix user group delete

Posted by xx...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

xxyu pushed a commit to branch kylin5
in repository https://gitbox.apache.org/repos/asf/kylin.git

commit beabe662fd745466f0e8b0ece9f2e96b0eaa9fea
Author: Jiale He <35...@users.noreply.github.com>
AuthorDate: Wed Jan 4 21:33:17 2023 +0800

    KYLIN-5457 fix user group delete
---
 .../kylin/rest/service/NUserGroupService.java      | 33 ++++++++++------------
 .../kylin/rest/service/NUserGroupServiceTest.java  | 21 ++++++++++++++
 .../metadata/usergroup/NUserGroupManager.java      |  2 +-
 .../metadata/usergroup/NUserGroupManagerTest.java  |  2 +-
 4 files changed, 38 insertions(+), 20 deletions(-)

diff --git a/src/common-service/src/main/java/org/apache/kylin/rest/service/NUserGroupService.java b/src/common-service/src/main/java/org/apache/kylin/rest/service/NUserGroupService.java
index c8a50fdbb4..7983384e18 100644
--- a/src/common-service/src/main/java/org/apache/kylin/rest/service/NUserGroupService.java
+++ b/src/common-service/src/main/java/org/apache/kylin/rest/service/NUserGroupService.java
@@ -106,16 +106,15 @@ public class NUserGroupService implements IUserGroupService {
         aclEvaluate.checkIsGlobalAdmin();
         checkGroupCanBeDeleted(name);
         // remove retained user group in all users
-        List<ManagedUser> managedUsers = userService.listUsers();
-        for (ManagedUser managedUser : managedUsers) {
-            if (managedUser.getAuthorities().contains(new SimpleGrantedAuthority(name))) {
-                managedUser.removeAuthorities(name);
-                userService.updateUser(managedUser);
-            }
-        }
+        SimpleGrantedAuthority simpleAuthority = new SimpleGrantedAuthority(name);
+        userService.listUsers(false).stream().filter(
+                user -> user.getAuthorities().parallelStream().anyMatch(authority -> authority.equals(simpleAuthority)))
+                .forEach(user -> {
+                    user.removeAuthorities(name);
+                    userService.updateUser(user);
+                });
         //delete group's project ACL
         accessService.revokeProjectPermission(name, MetadataConstants.TYPE_GROUP);
-
         getUserGroupManager().delete(name);
     }
 
@@ -126,10 +125,8 @@ public class NUserGroupService implements IUserGroupService {
         aclEvaluate.checkIsGlobalAdmin();
         checkGroupNameExist(groupName);
 
-        List<String> groupUsers = new ArrayList<>();
-        for (ManagedUser user : getGroupMembersByName(groupName)) {
-            groupUsers.add(user.getUsername());
-        }
+        List<String> groupUsers = getGroupMembersByName(groupName).stream().map(ManagedUser::getUsername)
+                .collect(Collectors.toList());
         List<String> moveInUsers = Lists.newArrayList(users);
         List<String> moveOutUsers = Lists.newArrayList(groupUsers);
         moveInUsers.removeAll(groupUsers);
@@ -139,7 +136,7 @@ public class NUserGroupService implements IUserGroupService {
 
         String currentUser = aclEvaluate.getCurrentUserName();
 
-        List<String> moveList = new ArrayList<String>();
+        List<String> moveList = Lists.newArrayList();
         moveList.addAll(moveInUsers);
         moveList.addAll(moveOutUsers);
         val superAdminList = userService.listSuperAdminUsers();
@@ -193,7 +190,7 @@ public class NUserGroupService implements IUserGroupService {
         if (StringUtils.isEmpty(userGroupName)) {
             return listUserGroups();
         }
-        return getUserGroupManager().getAllUsers(path -> {
+        return getUserGroupManager().getAllGroups(path -> {
             val pathPair = StringUtils.split(path, "/");
             String groupName = pathPair[pathPair.length - 1];
             return StringUtils.containsIgnoreCase(groupName, userGroupName);
@@ -218,13 +215,13 @@ public class NUserGroupService implements IUserGroupService {
             throw new KylinException(USERGROUP_NOT_EXIST,
                     String.format(Locale.ROOT, MsgPicker.getMsg().getUserGroupNotExist(), groupName));
         }
-        List<UserGroup> userGroups = getUserGroupManager()
-                .getAllUsers(path -> StringUtils.endsWithIgnoreCase(path, groupName));
-        if (userGroups.isEmpty()) {
+        val optional = getUserGroupManager().getAllGroups(path -> StringUtils.endsWithIgnoreCase(path, groupName))
+                .stream().filter(group -> StringUtils.equalsIgnoreCase(group.getGroupName(), groupName)).findFirst();
+        if (!optional.isPresent()) {
             throw new KylinException(USERGROUP_NOT_EXIST,
                     String.format(Locale.ROOT, MsgPicker.getMsg().getUserGroupNotExist(), groupName));
         }
-        return userGroups.get(0).getUuid();
+        return optional.get().getUuid();
     }
 
     public boolean exists(String name) {
diff --git a/src/common-service/src/test/java/org/apache/kylin/rest/service/NUserGroupServiceTest.java b/src/common-service/src/test/java/org/apache/kylin/rest/service/NUserGroupServiceTest.java
index cda761931c..93de1a097b 100644
--- a/src/common-service/src/test/java/org/apache/kylin/rest/service/NUserGroupServiceTest.java
+++ b/src/common-service/src/test/java/org/apache/kylin/rest/service/NUserGroupServiceTest.java
@@ -32,6 +32,7 @@ import org.apache.commons.lang3.StringUtils;
 import org.apache.commons.lang3.exception.ExceptionUtils;
 import org.apache.kylin.common.exception.KylinException;
 import org.apache.kylin.common.persistence.transaction.TransactionException;
+import org.apache.kylin.metadata.usergroup.NUserGroupManager;
 import org.apache.kylin.metadata.user.ManagedUser;
 import org.apache.kylin.metadata.usergroup.UserGroup;
 import org.apache.kylin.rest.response.UserGroupResponseKI;
@@ -217,6 +218,26 @@ public class NUserGroupServiceTest extends ServiceTestBase {
         Assert.assertEquals(Lists.newArrayList("g1", "g2", "g3"), userGroupService.getAllUserGroups());
     }
 
+    @Test
+    public void testDeleteAdminNameGroup() throws IOException {
+        String adminGroupName = "admin";
+        NUserGroupManager manager = NUserGroupManager.getInstance(getTestConfig());
+        Assert.assertFalse(userGroupService.exists(adminGroupName));
+        Assert.assertFalse(manager.exists(adminGroupName));
+        // add 'admin' group
+        userGroupService.addGroup(adminGroupName);
+        Assert.assertTrue(userGroupService.exists(adminGroupName));
+        Assert.assertTrue(manager.exists(adminGroupName));
+        // check 'admin' group uuid
+        String adminUUID = userGroupService.getUuidByGroupName(adminGroupName);
+        manager.getAllGroups().stream().filter(group -> group.getGroupName().equalsIgnoreCase(adminGroupName))
+                .findFirst().ifPresent(userGroup -> Assert.assertEquals(userGroup.getUuid(), adminUUID));
+        // delete 'admin' group
+        userGroupService.deleteGroup(adminGroupName);
+        Assert.assertFalse(userGroupService.exists(adminGroupName));
+        Assert.assertFalse(manager.exists(adminGroupName));
+    }
+
     private void checkDelUserGroupWithException(String groupName) {
         try {
             userGroupService.deleteGroup(groupName);
diff --git a/src/core-metadata/src/main/java/org/apache/kylin/metadata/usergroup/NUserGroupManager.java b/src/core-metadata/src/main/java/org/apache/kylin/metadata/usergroup/NUserGroupManager.java
index f9c5115f66..32fd77f381 100644
--- a/src/core-metadata/src/main/java/org/apache/kylin/metadata/usergroup/NUserGroupManager.java
+++ b/src/core-metadata/src/main/java/org/apache/kylin/metadata/usergroup/NUserGroupManager.java
@@ -91,7 +91,7 @@ public class NUserGroupManager {
         return ImmutableList.copyOf(crud.listAll());
     }
 
-    public List<UserGroup> getAllUsers(Predicate<String> predicate) {
+    public List<UserGroup> getAllGroups(Predicate<String> predicate) {
         return ImmutableList.copyOf(crud.listPartial(predicate));
     }
 
diff --git a/src/core-metadata/src/test/java/org/apache/kylin/metadata/usergroup/NUserGroupManagerTest.java b/src/core-metadata/src/test/java/org/apache/kylin/metadata/usergroup/NUserGroupManagerTest.java
index 96647be4f0..58bf84d092 100644
--- a/src/core-metadata/src/test/java/org/apache/kylin/metadata/usergroup/NUserGroupManagerTest.java
+++ b/src/core-metadata/src/test/java/org/apache/kylin/metadata/usergroup/NUserGroupManagerTest.java
@@ -52,7 +52,7 @@ public class NUserGroupManagerTest extends NLocalFileMetadataTestCase {
         Assert.assertTrue(group.exists("g1"));
         Assert.assertFalse(group.exists("g4"));
         Assert.assertEquals(Lists.newArrayList("g1", "g2", "g3"), group.getAllGroupNames());
-        Assert.assertEquals("g1", group.getAllUsers(path -> path.endsWith("g1")).get(0).getGroupName());
+        Assert.assertEquals("g1", group.getAllGroups(path -> path.endsWith("g1")).get(0).getGroupName());
 
         Assert.assertThrows(String.format(Locale.ROOT, MsgPicker.getMsg().getUserGroupExist(), "g1"),
                 KylinException.class, () -> group.add("g1"));


[kylin] 23/34: KYLIN-5447 wrap logical view response

Posted by xx...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

xxyu pushed a commit to branch kylin5
in repository https://gitbox.apache.org/repos/asf/kylin.git

commit 3aff4aa461340b709694a8ac610c24188b954ac9
Author: ChenLiang.Lu <31...@users.noreply.github.com>
AuthorDate: Fri Dec 30 16:58:26 2022 +0800

    KYLIN-5447 wrap logical view response
    
    KYLIN-5447 Wrap Logical View list
---
 .../kylin/rest/controller/NAdminController.java    |  2 +-
 .../kylin/rest/response/LogicalViewResponse.java   | 54 ++++++++++++++++++++++
 .../apache/kylin/rest/service/SparkDDLService.java |  9 ++--
 .../apache/kylin/rest/service/TableExtService.java |  4 +-
 .../org/apache/kylin/rest/ddl/ViewCheck.scala      |  2 +-
 .../apache/kylin/rest/service/SparkDDLTest.java    | 21 +++++----
 .../org/apache/kylin/newten/LogicalViewTest.java   |  4 ++
 .../kylin/rest/controller/SparkDDLController.java  |  6 +--
 .../kylin/rest/service/MetaStoreService.java       | 13 +++++-
 9 files changed, 95 insertions(+), 20 deletions(-)

diff --git a/src/common-server/src/main/java/org/apache/kylin/rest/controller/NAdminController.java b/src/common-server/src/main/java/org/apache/kylin/rest/controller/NAdminController.java
index 2d0440f52f..c2f26bd8aa 100644
--- a/src/common-server/src/main/java/org/apache/kylin/rest/controller/NAdminController.java
+++ b/src/common-server/src/main/java/org/apache/kylin/rest/controller/NAdminController.java
@@ -76,7 +76,7 @@ public class NAdminController extends NBasicController {
         propertyKeys.add("kylin.security.remove-ldap-custom-security-limit-enabled");
         propertyKeys.add("kylin.source.ddl.logical-view.enabled");
         propertyKeys.add("kylin.source.ddl.hive.enabled");
-        propertyKeys.add("kylin.source.ddl.logical-view-database");
+        propertyKeys.add("kylin.source.ddl.logical-view.database");
         propertyKeys.add("kylin.storage.check-quota-enabled");
 
         // add second storage
diff --git a/src/datasource-service/src/main/java/org/apache/kylin/rest/response/LogicalViewResponse.java b/src/datasource-service/src/main/java/org/apache/kylin/rest/response/LogicalViewResponse.java
new file mode 100644
index 0000000000..2343207aa3
--- /dev/null
+++ b/src/datasource-service/src/main/java/org/apache/kylin/rest/response/LogicalViewResponse.java
@@ -0,0 +1,54 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.kylin.rest.response;
+
+import org.apache.kylin.metadata.view.LogicalView;
+
+import com.fasterxml.jackson.annotation.JsonAutoDetect;
+import com.fasterxml.jackson.annotation.JsonProperty;
+
+import lombok.AllArgsConstructor;
+import lombok.Data;
+import lombok.NoArgsConstructor;
+
+@Data
+@AllArgsConstructor
+@NoArgsConstructor
+@JsonAutoDetect(fieldVisibility = JsonAutoDetect.Visibility.NONE, getterVisibility = JsonAutoDetect.Visibility.NONE,
+    isGetterVisibility = JsonAutoDetect.Visibility.NONE, setterVisibility = JsonAutoDetect.Visibility.NONE)
+public class LogicalViewResponse {
+  @JsonProperty("table_name")
+  private String tableName;
+
+  @JsonProperty("created_sql")
+  private String createdSql;
+
+  @JsonProperty("modified_user")
+  private String modifiedUser;
+
+  @JsonProperty("created_project")
+  private String createdProject;
+
+  public LogicalViewResponse(LogicalView view) {
+    this.tableName = view.getTableName();
+    this.createdSql = view.getCreatedSql();
+    this.modifiedUser = view.getModifiedUser();
+    this.createdProject = view.getCreatedProject();
+  }
+}
diff --git a/src/datasource-service/src/main/java/org/apache/kylin/rest/service/SparkDDLService.java b/src/datasource-service/src/main/java/org/apache/kylin/rest/service/SparkDDLService.java
index a1c00fdcc9..ef76e21e55 100644
--- a/src/datasource-service/src/main/java/org/apache/kylin/rest/service/SparkDDLService.java
+++ b/src/datasource-service/src/main/java/org/apache/kylin/rest/service/SparkDDLService.java
@@ -41,6 +41,7 @@ import org.apache.kylin.metadata.view.LogicalViewManager;
 import org.apache.kylin.rest.ddl.SourceTableCheck;
 import org.apache.kylin.rest.ddl.ViewCheck;
 import org.apache.kylin.rest.request.ViewRequest;
+import org.apache.kylin.rest.response.LogicalViewResponse;
 import org.apache.kylin.rest.util.AclPermissionUtil;
 
 import org.apache.spark.ddl.DDLCheck;
@@ -135,18 +136,20 @@ public class SparkDDLService extends BasicService {
     LogicalViewLoader.unloadView(context.getLogicalViewName(), SparderEnv.getSparkSession());
   }
 
-  public List<LogicalView> listAll(String project, String tableName) {
+  public List<LogicalViewResponse> listAll(String project, String tableName) {
     List<LogicalView> logicalViews = LogicalViewManager.getInstance(KylinConfig.getInstanceFromEnv()).list();
     if (StringUtils.isNotBlank(tableName)) {
       logicalViews = logicalViews.stream()
           .filter(table -> table.getTableName().toLowerCase().contains(tableName.toLowerCase()))
           .collect(Collectors.toList());
     }
-    logicalViews.forEach(table -> {
+    List<LogicalViewResponse> viewResponses =
+        logicalViews.stream().map(LogicalViewResponse::new).collect(Collectors.toList());
+    viewResponses.forEach(table -> {
       if (!table.getCreatedProject().equalsIgnoreCase(project)) {
         table.setCreatedSql("***");
       }
     });
-    return logicalViews;
+    return viewResponses;
   }
 }
\ No newline at end of file
diff --git a/src/datasource-service/src/main/java/org/apache/kylin/rest/service/TableExtService.java b/src/datasource-service/src/main/java/org/apache/kylin/rest/service/TableExtService.java
index 595f5c0b6f..aabb60b613 100644
--- a/src/datasource-service/src/main/java/org/apache/kylin/rest/service/TableExtService.java
+++ b/src/datasource-service/src/main/java/org/apache/kylin/rest/service/TableExtService.java
@@ -18,7 +18,6 @@
 
 package org.apache.kylin.rest.service;
 
-import static org.apache.kylin.common.exception.ServerErrorCode.INVALID_LOGICAL_VIEW;
 import static org.apache.kylin.common.exception.ServerErrorCode.INVALID_TABLE_NAME;
 import static org.apache.kylin.common.exception.code.ErrorCodeServer.EXCLUDED_TABLE_REQUEST_NOT_ALLOWED;
 
@@ -152,8 +151,7 @@ public class TableExtService extends BasicService {
                 if (logicalTable != null && viewProject.equalsIgnoreCase(project)) {
                     canLoadTables.add(table);
                 } else {
-                    throw new KylinException(INVALID_LOGICAL_VIEW, MsgPicker.getMsg()
-                        .getLoadLogicalViewError(tableName, viewProject));
+                    tableResponse.getFailed().add(tableName);
                 }
             });
     }
diff --git a/src/datasource-service/src/main/scala/org/apache/kylin/rest/ddl/ViewCheck.scala b/src/datasource-service/src/main/scala/org/apache/kylin/rest/ddl/ViewCheck.scala
index 82935a66e5..879283b427 100644
--- a/src/datasource-service/src/main/scala/org/apache/kylin/rest/ddl/ViewCheck.scala
+++ b/src/datasource-service/src/main/scala/org/apache/kylin/rest/ddl/ViewCheck.scala
@@ -65,7 +65,7 @@ class ViewCheck extends DDLCheck {
       cnDescription.append(s"创建不要加 database 名称,系统自动创建到 ${config.getDDLLogicalViewDB} 库中,"
         + s"删除要加 ${config.getDDLLogicalViewDB} 库名称 \n")
       enDescription.append(s"Creating does not require adding database, it is automatically created in"
-        + s" ${config.getDDLLogicalViewDB} ,\n deleting should add ${config.getDDLLogicalViewDB} database")
+        + s" ${config.getDDLLogicalViewDB} , deleting should add ${config.getDDLLogicalViewDB} database\n")
       syntaxSupport.append(" `create logical view`, `drop logical view` ")
       cnDescription
         .append(s"仅支持 ${syntaxSupport} 语法\n")
diff --git a/src/datasource-service/src/test/java/org/apache/kylin/rest/service/SparkDDLTest.java b/src/datasource-service/src/test/java/org/apache/kylin/rest/service/SparkDDLTest.java
index 8e00df161f..80e9a5f114 100644
--- a/src/datasource-service/src/test/java/org/apache/kylin/rest/service/SparkDDLTest.java
+++ b/src/datasource-service/src/test/java/org/apache/kylin/rest/service/SparkDDLTest.java
@@ -40,6 +40,7 @@ import org.apache.kylin.metadata.view.LogicalViewManager;
 import org.apache.kylin.rest.constant.Constant;
 import org.apache.kylin.rest.request.ViewRequest;
 import org.apache.kylin.rest.response.LoadTableResponse;
+import org.apache.kylin.rest.response.LogicalViewResponse;
 
 import org.apache.spark.sql.LogicalViewLoader;
 import org.apache.spark.sql.SparderEnv;
@@ -283,18 +284,23 @@ public class SparkDDLTest extends NLocalFileMetadataTestCase {
     Assert.assertEquals(3, description.get(0).size());
 
     // view list in project
-    List<LogicalView> logicalViewsInProject = ddlService.listAll("ssb", "");
-    List<LogicalView> logicalViewsInProject2 = ddlService.listAll("ssb", "table2");
+    List<LogicalViewResponse> logicalViewsInProject = ddlService.listAll("ssb", "");
+    List<LogicalViewResponse> logicalViewsInProject2 = ddlService.listAll("ssb", "table2");
+    List<LogicalViewResponse> logicalViewsInProject3 = ddlService.listAll("demo", "");
     Assert.assertEquals(3, logicalViewsInProject.size());
     Assert.assertEquals(1, logicalViewsInProject2.size());
-    LogicalView confidentialTable =
+    LogicalViewResponse confidentialTable =
         logicalViewsInProject.stream().filter(table -> table.getCreatedProject().equals("demo")).collect(
             Collectors.toList()).get(0);
-    LogicalView noConfidentialTable =
+    LogicalViewResponse noConfidentialTable =
         logicalViewsInProject.stream().filter(table -> table.getCreatedProject().equals("ssb")).collect(
             Collectors.toList()).get(0);
+    LogicalViewResponse noConfidentialTable2 =
+        logicalViewsInProject3.stream().filter(table -> table.getCreatedProject().equals("demo")).collect(
+            Collectors.toList()).get(0);
     Assert.assertEquals("***", confidentialTable.getCreatedSql());
     Assert.assertNotEquals("***", noConfidentialTable.getCreatedSql());
+    Assert.assertNotEquals("***", noConfidentialTable2.getCreatedSql());
 
     // load table list
     String[] failedLoadTables = {"KYLIN_LOGICAL_VIEW.logical_view_table2",
@@ -306,9 +312,8 @@ public class SparkDDLTest extends NLocalFileMetadataTestCase {
     LoadTableResponse tableResponse = new LoadTableResponse();
     tableExtService.filterAccessTables(successLoadTables, canLoadTables, tableResponse, "ssb");
     Assert.assertEquals(2, canLoadTables.size());
-    assertKylinExeption(
-        () ->
-            tableExtService.filterAccessTables(failedLoadTables, canLoadTables, tableResponse, "ssb"),
-        "Can't load table");
+    canLoadTables.clear();
+    tableExtService.filterAccessTables(failedLoadTables, canLoadTables, tableResponse, "ssb");
+    Assert.assertEquals(2, canLoadTables.size());
   }
 }
diff --git a/src/kylin-it/src/test/java/org/apache/kylin/newten/LogicalViewTest.java b/src/kylin-it/src/test/java/org/apache/kylin/newten/LogicalViewTest.java
index df98c689ab..58d1109a55 100644
--- a/src/kylin-it/src/test/java/org/apache/kylin/newten/LogicalViewTest.java
+++ b/src/kylin-it/src/test/java/org/apache/kylin/newten/LogicalViewTest.java
@@ -26,6 +26,7 @@ import org.apache.kylin.common.util.Pair;
 import org.apache.kylin.engine.spark.NLocalWithSparkSessionTest;
 import org.apache.kylin.job.engine.JobEngineConfig;
 import org.apache.kylin.job.impl.threadpool.NDefaultScheduler;
+import org.apache.kylin.metadata.cube.model.LayoutEntity;
 import org.apache.kylin.metadata.cube.model.NDataflow;
 import org.apache.kylin.metadata.cube.model.NDataflowManager;
 import org.apache.kylin.metadata.model.SegmentRange;
@@ -34,6 +35,7 @@ import org.apache.kylin.util.ExecAndComp;
 import org.apache.spark.sql.SparderEnv;
 
 import org.junit.After;
+import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Test;
 
@@ -71,6 +73,8 @@ public class LogicalViewTest extends NLocalWithSparkSessionTest {
   public void testLogicalView() throws Exception {
     String dfID = "451e127a-b684-1474-744b-c9afc14378af";
     NDataflow dataflow = dfMgr.getDataflow(dfID);
+    LayoutEntity layout = dataflow.getIndexPlan().getLayoutEntity(20000000001L);
+    Assert.assertNotNull(layout);
     populateSSWithCSVData(getTestConfig(), getProject(), SparderEnv.getSparkSession());
     indexDataConstructor.buildIndex(dfID, SegmentRange.TimePartitionedSegmentRange.createInfinite(),
         Sets.newHashSet(
diff --git a/src/metadata-server/src/main/java/org/apache/kylin/rest/controller/SparkDDLController.java b/src/metadata-server/src/main/java/org/apache/kylin/rest/controller/SparkDDLController.java
index aa4ebbd9a6..4395f8c4c2 100644
--- a/src/metadata-server/src/main/java/org/apache/kylin/rest/controller/SparkDDLController.java
+++ b/src/metadata-server/src/main/java/org/apache/kylin/rest/controller/SparkDDLController.java
@@ -23,9 +23,9 @@ import static org.apache.kylin.common.constant.HttpConstant.HTTP_VND_APACHE_KYLI
 import java.util.List;
 
 import org.apache.kylin.common.exception.KylinException;
-import org.apache.kylin.metadata.view.LogicalView;
 import org.apache.kylin.rest.request.ViewRequest;
 import org.apache.kylin.rest.response.EnvelopeResponse;
+import org.apache.kylin.rest.response.LogicalViewResponse;
 import org.apache.kylin.rest.service.SparkDDLService;
 
 import org.apache.spark.sql.LogicalViewLoader;
@@ -83,11 +83,11 @@ public class SparkDDLController extends NBasicController {
   @ApiOperation(value = "ddl_desc")
   @GetMapping(value = "/ddl/view_list")
   @ResponseBody
-  public EnvelopeResponse<List<LogicalView>> list(
+  public EnvelopeResponse<List<LogicalViewResponse>> list(
       @RequestParam("project") String project,
       @RequestParam(value = "table", required = false, defaultValue = "") String tableName) {
     project = checkProjectName(project);
-    List<LogicalView> logicalViews = sparkDDLService.listAll(project, tableName);
+    List<LogicalViewResponse> logicalViews = sparkDDLService.listAll(project, tableName);
     return new EnvelopeResponse<>(KylinException.CODE_SUCCESS, logicalViews, "");
   }
 }
diff --git a/src/modeling-service/src/main/java/org/apache/kylin/rest/service/MetaStoreService.java b/src/modeling-service/src/main/java/org/apache/kylin/rest/service/MetaStoreService.java
index 3680b15cc4..7d6599b389 100644
--- a/src/modeling-service/src/main/java/org/apache/kylin/rest/service/MetaStoreService.java
+++ b/src/modeling-service/src/main/java/org/apache/kylin/rest/service/MetaStoreService.java
@@ -19,6 +19,7 @@
 package org.apache.kylin.rest.service;
 
 import static org.apache.kylin.common.constant.Constants.KE_VERSION;
+import static org.apache.kylin.common.exception.ServerErrorCode.FAILED_CREATE_MODEL;
 import static org.apache.kylin.common.exception.ServerErrorCode.MODEL_EXPORT_ERROR;
 import static org.apache.kylin.common.exception.ServerErrorCode.MODEL_IMPORT_ERROR;
 import static org.apache.kylin.common.exception.ServerErrorCode.MODEL_METADATA_FILE_ERROR;
@@ -97,6 +98,8 @@ import org.apache.kylin.metadata.project.NProjectManager;
 import org.apache.kylin.metadata.project.ProjectInstance;
 import org.apache.kylin.metadata.query.util.QueryHisStoreUtil;
 import org.apache.kylin.metadata.realization.RealizationStatusEnum;
+import org.apache.kylin.metadata.view.LogicalView;
+import org.apache.kylin.metadata.view.LogicalViewManager;
 import org.apache.kylin.rest.aspect.Transaction;
 import org.apache.kylin.rest.constant.ModelStatusToDisplayEnum;
 import org.apache.kylin.rest.request.ModelImportRequest;
@@ -440,7 +443,7 @@ public class MetaStoreService extends BasicService {
         ProjectInstance projectInstance = NProjectManager.getInstance(KylinConfig.getInstanceFromEnv())
                 .getProject(targetProject);
         ISourceMetadataExplorer explorer = SourceFactory.getSource(projectInstance).getSourceMetadataExplorer();
-
+        KylinConfig config = KylinConfig.getInstanceFromEnv();
         List<TableDesc> existTableSet = Lists.newArrayList();
         for (TableDesc missTableDesc : missTableList) {
             try {
@@ -454,6 +457,14 @@ public class MetaStoreService extends BasicService {
             } catch (Exception e) {
                 logger.warn("try load table: {} failed.", missTableDesc.getIdentity(), e);
             }
+            if (config.isDDLLogicalViewEnabled() && missTableDesc.isLogicalView()) {
+                LogicalView logicalView = LogicalViewManager.getInstance(config).get(missTableDesc.getName());
+                if (logicalView != null && !targetProject.equalsIgnoreCase(logicalView.getCreatedProject())) {
+                    throw new KylinException(FAILED_CREATE_MODEL, String.format(Locale.ROOT,
+                        " Logical View %s can only add in project %s",
+                        missTableDesc.getName(), logicalView.getCreatedProject()));
+                }
+            }
         }
         return existTableSet;
     }


[kylin] 16/34: KYLIN-5448 update spring-boot-admin to 2.6.10

Posted by xx...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

xxyu pushed a commit to branch kylin5
in repository https://gitbox.apache.org/repos/asf/kylin.git

commit 7501e54df78e33d9eb478c857faec17c82d1e3d8
Author: Yaguang Jia <ji...@foxmail.com>
AuthorDate: Thu Dec 29 14:31:23 2022 +0800

    KYLIN-5448 update spring-boot-admin to 2.6.10
---
 pom.xml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/pom.xml b/pom.xml
index ce5dfcb4a6..611087bf48 100644
--- a/pom.xml
+++ b/pom.xml
@@ -205,7 +205,7 @@
 
         <spring.cloud.version>2021.0.0</spring.cloud.version>
         <spring.boot.version>2.6.14</spring.boot.version>
-        <spring-boot-admin.version>2.6.6</spring-boot-admin.version>
+        <spring-boot-admin.version>2.6.10</spring-boot-admin.version>
         <spring-session.version>2.6.1-kylin-r4</spring-session.version>
         <spring.framework.security.extensions.version>1.0.10.RELEASE</spring.framework.security.extensions.version>
         <opensaml.version>2.6.6</opensaml.version>


[kylin] 01/34: KYLIN-5445 set epoch_target as primary key of epoch table

Posted by xx...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

xxyu pushed a commit to branch kylin5
in repository https://gitbox.apache.org/repos/asf/kylin.git

commit ca4f1d6000220ca8fa63ac4b252881aa517e4429
Author: Jiawei Li <10...@qq.com>
AuthorDate: Tue Jan 3 19:01:23 2023 +0800

    KYLIN-5445  set epoch_target as primary key of epoch table
    
    * KYLIN-5445  set epoch_target as primary key of epoch table
    
    * KYLIN-5445  minor fix default value
    
    * KYLIN-5445  minor add ut
    
    * KYLIN-5445  minor fix sonar
---
 .../persistence/metadata/JdbcEpochStore.java       | 32 ++++++++++++++++++++--
 .../common/persistence/metadata/jdbc/JdbcUtil.java | 24 ++++++++++++++++
 .../resources/metadata-jdbc-default.properties     |  4 +--
 .../src/main/resources/metadata-jdbc-h2.properties |  4 +--
 .../main/resources/metadata-jdbc-mysql.properties  |  4 +--
 .../resources/metadata-jdbc-postgresql.properties  |  4 +--
 .../metadata/epochstore/JdbcEpochStoreTest.java    | 15 ++++++++++
 7 files changed, 76 insertions(+), 11 deletions(-)

diff --git a/src/core-common/src/main/java/org/apache/kylin/common/persistence/metadata/JdbcEpochStore.java b/src/core-common/src/main/java/org/apache/kylin/common/persistence/metadata/JdbcEpochStore.java
index bb444822ac..7faa05f117 100644
--- a/src/core-common/src/main/java/org/apache/kylin/common/persistence/metadata/JdbcEpochStore.java
+++ b/src/core-common/src/main/java/org/apache/kylin/common/persistence/metadata/JdbcEpochStore.java
@@ -19,11 +19,13 @@ package org.apache.kylin.common.persistence.metadata;
 
 import static org.apache.kylin.common.exception.CommonErrorCode.FAILED_UPDATE_METADATA;
 import static org.apache.kylin.common.persistence.metadata.jdbc.JdbcUtil.datasourceParameters;
+import static org.apache.kylin.common.persistence.metadata.jdbc.JdbcUtil.isPrimaryKeyExists;
 import static org.apache.kylin.common.persistence.metadata.jdbc.JdbcUtil.isTableExists;
 import static org.apache.kylin.common.persistence.metadata.jdbc.JdbcUtil.withTransaction;
 import static org.apache.kylin.common.persistence.metadata.jdbc.JdbcUtil.withTransactionTimeout;
 
 import java.io.InputStream;
+import java.sql.Connection;
 import java.sql.PreparedStatement;
 import java.sql.SQLException;
 import java.util.Arrays;
@@ -32,6 +34,8 @@ import java.util.Locale;
 import java.util.Objects;
 import java.util.Properties;
 
+import javax.sql.DataSource;
+
 import org.apache.commons.collections.CollectionUtils;
 import org.apache.commons.dbcp2.BasicDataSource;
 import org.apache.kylin.common.KylinConfig;
@@ -62,6 +66,8 @@ public class JdbcEpochStore extends EpochStore {
     static final String MAINTENANCE_MODE_REASON = "maintenance_mode_reason";
     static final String MVCC = "mvcc";
 
+    static final String ADD_PRIMARY_KEY_SQL = "alter table %s ADD PRIMARY KEY(" + EPOCH_TARGET + ")";
+
     static final String INSERT_SQL = "insert into %s (" + Joiner.on(",").join(EPOCH_ID, EPOCH_TARGET,
             CURRENT_EPOCH_OWNER, LAST_EPOCH_RENEW_TIME, SERVER_MODE, MAINTENANCE_MODE_REASON, MVCC)
             + ") values (?, ?, ?, ?, ?, ?, ?)";
@@ -101,12 +107,32 @@ public class JdbcEpochStore extends EpochStore {
 
     public static String getEpochSql(String sql, String tableName) {
         return String.format(Locale.ROOT, sql, tableName, EPOCH_ID, EPOCH_TARGET, CURRENT_EPOCH_OWNER,
-                LAST_EPOCH_RENEW_TIME, SERVER_MODE, MAINTENANCE_MODE_REASON, MVCC, tableName, EPOCH_TARGET,
-                EPOCH_TARGET);
+                LAST_EPOCH_RENEW_TIME, SERVER_MODE, MAINTENANCE_MODE_REASON, MVCC, EPOCH_TARGET);
+    }
+
+    public static String getAddPrimarykeySql(String tableName) {
+        return String.format(Locale.ROOT, ADD_PRIMARY_KEY_SQL, tableName);
+
     }
 
+    private Connection getConnection(JdbcTemplate jdbcTemplate) throws SQLException {
+        DataSource dataSource = jdbcTemplate.getDataSource();
+        if (dataSource == null) {
+            return null;
+        }
+        return dataSource.getConnection();
+    }
+
+    @Override
     public void createIfNotExist() throws Exception {
-        if (isTableExists(jdbcTemplate.getDataSource().getConnection(), table)) {
+        if (isTableExists(getConnection(jdbcTemplate), table)) {
+            if (!isPrimaryKeyExists(getConnection(jdbcTemplate), table)) {
+                withTransaction(transactionManager, () -> {
+                    jdbcTemplate.execute(getAddPrimarykeySql(table));
+                    return 1;
+                });
+                log.info("Succeed to add table primary key: {}", table);
+            }
             return;
         }
         String fileName = "metadata-jdbc-default.properties";
diff --git a/src/core-common/src/main/java/org/apache/kylin/common/persistence/metadata/jdbc/JdbcUtil.java b/src/core-common/src/main/java/org/apache/kylin/common/persistence/metadata/jdbc/JdbcUtil.java
index 8d4d54873c..a4bc8e4221 100644
--- a/src/core-common/src/main/java/org/apache/kylin/common/persistence/metadata/jdbc/JdbcUtil.java
+++ b/src/core-common/src/main/java/org/apache/kylin/common/persistence/metadata/jdbc/JdbcUtil.java
@@ -118,6 +118,30 @@ public class JdbcUtil {
         return false;
     }
 
+    public static boolean isPrimaryKeyExists(Connection conn, String table) throws SQLException {
+        return isPrimaryKeyExists(conn, table, table.toUpperCase(Locale.ROOT), table.toLowerCase(Locale.ROOT));
+    }
+
+    private static boolean isPrimaryKeyExists(Connection conn, String... tables) throws SQLException {
+        try {
+            for (String table : tables) {
+                val resultSet = conn.getMetaData().getPrimaryKeys(conn.getCatalog(), conn.getSchema(), table);
+                if (resultSet.next()) {
+                    return true;
+                }
+            }
+            
+            return false;
+        } catch (Exception e) {
+            logger.error("Fail to know if table {} primary key exists", tables, e);
+        } finally {
+            if (!conn.isClosed()) {
+                conn.close();
+            }
+        }
+        return true;
+    }
+
     public static boolean isIndexExists(Connection conn, String table, String index) throws SQLException {
         return isIndexExists(conn, index, table, table.toUpperCase(Locale.ROOT), table.toLowerCase(Locale.ROOT));
     }
diff --git a/src/core-common/src/main/resources/metadata-jdbc-default.properties b/src/core-common/src/main/resources/metadata-jdbc-default.properties
index 35879f9e34..09f0bfea78 100644
--- a/src/core-common/src/main/resources/metadata-jdbc-default.properties
+++ b/src/core-common/src/main/resources/metadata-jdbc-default.properties
@@ -36,7 +36,7 @@ create.auditlog.store.table=create table if not exists %s ( \
 
 create.epoch.store.table=create table if not exists %s ( \
     %s int null, \
-    %s varchar(255) null, \
+    %s varchar(255), \
     %s varchar(2000) null, \
     %s bigint null, \
     %s varchar(10) null, \
@@ -45,5 +45,5 @@ create.epoch.store.table=create table if not exists %s ( \
     `reserved_field_1` VARCHAR(50), \
     `reserved_field_2` longblob, \
     `reserved_field_3` longblob, \
-    constraint %s_%s_uindex unique (%s) \
+    primary key(%s) \
 );
\ No newline at end of file
diff --git a/src/core-common/src/main/resources/metadata-jdbc-h2.properties b/src/core-common/src/main/resources/metadata-jdbc-h2.properties
index 45ad513c27..77328950c1 100644
--- a/src/core-common/src/main/resources/metadata-jdbc-h2.properties
+++ b/src/core-common/src/main/resources/metadata-jdbc-h2.properties
@@ -102,7 +102,7 @@ create.rawrecommendation.store.index=
 
 create.epoch.store.table=create table if not exists %s ( \
     %s int null, \
-    %s varchar(255) null, \
+    %s varchar(255), \
     %s varchar(2000) null, \
     %s bigint null, \
     %s varchar(10) null, \
@@ -111,7 +111,7 @@ create.epoch.store.table=create table if not exists %s ( \
     `reserved_field_1` VARCHAR(50), \
     `reserved_field_2` longblob, \
     `reserved_field_3` longblob, \
-    constraint %s_%s_uindex unique (%s) \
+    primary key(%s) \
 );
 
 #### JDBC STREAMING JOB STATS STORE
diff --git a/src/core-common/src/main/resources/metadata-jdbc-mysql.properties b/src/core-common/src/main/resources/metadata-jdbc-mysql.properties
index 2f855a8eb6..6a2df4c3b1 100644
--- a/src/core-common/src/main/resources/metadata-jdbc-mysql.properties
+++ b/src/core-common/src/main/resources/metadata-jdbc-mysql.properties
@@ -174,7 +174,7 @@ create.rawrecommendation.store.index=ALTER TABLE %s ADD UNIQUE %s_idx (project,
 
 create.epoch.store.table=create table if not exists %s ( \
     %s int null, \
-    %s varchar(255) null, \
+    %s varchar(255), \
     %s varchar(2000) null, \
     %s bigint null, \
     %s varchar(10) null, \
@@ -183,7 +183,7 @@ create.epoch.store.table=create table if not exists %s ( \
     `reserved_field_1` VARCHAR(50), \
     `reserved_field_2` longblob, \
     `reserved_field_3` longblob, \
-    constraint %s_%s_uindex unique (%s) \
+    primary key(%s) \
 ) ENGINE=INNODB DEFAULT CHARSET=utf8;
 
 ### jdbc distributed lock
diff --git a/src/core-common/src/main/resources/metadata-jdbc-postgresql.properties b/src/core-common/src/main/resources/metadata-jdbc-postgresql.properties
index a8b8839fc3..a26f852134 100644
--- a/src/core-common/src/main/resources/metadata-jdbc-postgresql.properties
+++ b/src/core-common/src/main/resources/metadata-jdbc-postgresql.properties
@@ -173,7 +173,7 @@ create.rawrecommendation.store.index=CREATE UNIQUE INDEX %s_idx ON %s using btre
 
 create.epoch.store.table=create table if not exists %s ( \
     %s int null, \
-    %s varchar(255) null, \
+    %s varchar(255), \
     %s varchar(2000) null, \
     %s bigint null, \
     %s varchar(10) null, \
@@ -182,7 +182,7 @@ create.epoch.store.table=create table if not exists %s ( \
     reserved_field_1 VARCHAR(50), \
     reserved_field_2 bytea, \
     reserved_field_3 bytea, \
-    constraint %s_%s_uindex unique (%s) \
+    primary key(%s) \
 );
 
 ### jdbc distributed lock
diff --git a/src/core-common/src/test/java/org/apache/kylin/common/persistence/metadata/epochstore/JdbcEpochStoreTest.java b/src/core-common/src/test/java/org/apache/kylin/common/persistence/metadata/epochstore/JdbcEpochStoreTest.java
index d51c15ec66..5b30affd7a 100644
--- a/src/core-common/src/test/java/org/apache/kylin/common/persistence/metadata/epochstore/JdbcEpochStoreTest.java
+++ b/src/core-common/src/test/java/org/apache/kylin/common/persistence/metadata/epochstore/JdbcEpochStoreTest.java
@@ -18,9 +18,12 @@
 package org.apache.kylin.common.persistence.metadata.epochstore;
 
 import static org.apache.kylin.common.persistence.metadata.jdbc.JdbcUtil.datasourceParameters;
+import static org.apache.kylin.common.persistence.metadata.jdbc.JdbcUtil.isPrimaryKeyExists;
 import static org.apache.kylin.common.util.TestUtils.getTestConfig;
 import static org.awaitility.Awaitility.await;
 
+import java.sql.Connection;
+import java.util.Locale;
 import java.util.concurrent.TimeUnit;
 
 import org.apache.commons.dbcp2.BasicDataSourceFactory;
@@ -59,6 +62,18 @@ public final class JdbcEpochStoreTest extends AbstractEpochStoreTest {
         return new JdbcTemplate(dataSource);
     }
 
+    @Test
+    void testAddPrimaryKey() throws Exception {
+        val jdbcTemplate = getJdbcTemplate();
+        String table = getTestConfig().getMetadataUrl().getIdentifier() + "_epoch";
+        jdbcTemplate.execute(String.format(Locale.ROOT, "alter table %s  drop primary key", table));
+        Connection conn = jdbcTemplate.getDataSource().getConnection();
+        assert !isPrimaryKeyExists(conn, table);
+        epochStore = getEpochStore();
+        conn = getJdbcTemplate().getDataSource().getConnection();
+        assert isPrimaryKeyExists(conn, table);
+    }
+
     @Test
     void testExecuteWithTransaction_RollBack() {
 


[kylin] 15/34: KYLIN-5448 fix snyk vulnerabilities, upgrade netty-codec and netty-codec-haproxy from 4.1.85.Final to 4.1.86.Final

Posted by xx...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

xxyu pushed a commit to branch kylin5
in repository https://gitbox.apache.org/repos/asf/kylin.git

commit f044cdd593d74b9b2bc7643a68bddc93efb390a0
Author: huangsheng <hu...@163.com>
AuthorDate: Thu Dec 29 10:55:29 2022 +0800

    KYLIN-5448 fix snyk vulnerabilities, upgrade netty-codec and netty-codec-haproxy from 4.1.85.Final to 4.1.86.Final
    
    * KYLIN-5448 fix snyk vulnerabilities, upgrade netty-codec and netty-codec-haproxy from 4.1.85.Final to 4.1.86.Final
---
 pom.xml | 19 ++++++++++++++++++-
 1 file changed, 18 insertions(+), 1 deletion(-)

diff --git a/pom.xml b/pom.xml
index bb4fc4f907..ce5dfcb4a6 100644
--- a/pom.xml
+++ b/pom.xml
@@ -352,6 +352,7 @@
         <ant.version>1.9.16</ant.version>
         <netty-all.version>4.1.73.Final</netty-all.version>
         <netty.version>3.10.6.Final</netty.version>
+        <netty-codec.version>4.1.86.Final</netty-codec.version>
         <servo-core.version>0.13.2</servo-core.version>
         <apache-api.version>1.0.3</apache-api.version>
         <saas.nacos.discovery.starter.version>2.6.2</saas.nacos.discovery.starter.version>
@@ -714,7 +715,11 @@
                 <artifactId>kylin-soft-affinity-cache</artifactId>
                 <version>${project.version}</version>
             </dependency>
-
+            <dependency>
+                <groupId>io.netty</groupId>
+                <artifactId>netty-codec-haproxy</artifactId>
+                <version>${netty-codec.version}</version>
+            </dependency>
             <dependency>
                 <groupId>io.dropwizard.metrics</groupId>
                 <artifactId>metrics-core</artifactId>
@@ -889,6 +894,10 @@
                 <groupId>org.apache.hadoop</groupId>
                 <artifactId>hadoop-hdfs</artifactId>
                 <exclusions>
+                    <exclusion>
+                        <groupId>io.netty</groupId>
+                        <artifactId>netty-codec-haproxy</artifactId>
+                    </exclusion>
                     <exclusion>
                         <groupId>com.sun.jersey</groupId>
                         <artifactId>*</artifactId>
@@ -2488,6 +2497,10 @@
                 <groupId>org.apache.spark</groupId>
                 <artifactId>spark-core_2.12</artifactId>
                 <exclusions>
+                    <exclusion>
+                        <groupId>io.netty</groupId>
+                        <artifactId>netty-codec-haproxy</artifactId>
+                    </exclusion>
                     <exclusion>
                         <artifactId>jetty-plus</artifactId>
                         <groupId>org.eclipse.jetty</groupId>
@@ -2565,6 +2578,10 @@
                 <type>test-jar</type>
                 <scope>test</scope>
                 <exclusions>
+                    <exclusion>
+                        <groupId>io.netty</groupId>
+                        <artifactId>netty-codec-haproxy</artifactId>
+                    </exclusion>
                     <exclusion>
                         <artifactId>jetty-plus</artifactId>
                         <groupId>org.eclipse.jetty</groupId>


[kylin] 05/34: KYLIN-5445 minor fix log

Posted by xx...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

xxyu pushed a commit to branch kylin5
in repository https://gitbox.apache.org/repos/asf/kylin.git

commit 306af607b27c7c996efd6af7202c3971b035c8d8
Author: jiawei.li <10...@qq.com>
AuthorDate: Sun Jan 8 10:53:47 2023 +0800

     KYLIN-5445 minor fix log
---
 .../org/apache/kylin/common/persistence/metadata/jdbc/JdbcUtil.java     | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/src/core-common/src/main/java/org/apache/kylin/common/persistence/metadata/jdbc/JdbcUtil.java b/src/core-common/src/main/java/org/apache/kylin/common/persistence/metadata/jdbc/JdbcUtil.java
index 1bd3883af8..afe9e37174 100644
--- a/src/core-common/src/main/java/org/apache/kylin/common/persistence/metadata/jdbc/JdbcUtil.java
+++ b/src/core-common/src/main/java/org/apache/kylin/common/persistence/metadata/jdbc/JdbcUtil.java
@@ -131,7 +131,7 @@ public class JdbcUtil {
                         return true;
                     }
                 } catch (Exception e) {
-                    log.warn("get primary key from table {} failed", table, e);
+                    logger.warn("get primary key from table {} failed", table, e);
                 }
             }
         } finally {


[kylin] 10/34: KYLIN-5449 refactor common-service kylin-tool

Posted by xx...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

xxyu pushed a commit to branch kylin5
in repository https://gitbox.apache.org/repos/asf/kylin.git

commit c9769b0ca2bfaf2b51d1291327e0ab7681f3a6c1
Author: qianhao.zhou <z....@gmail.com>
AuthorDate: Sun Dec 25 12:41:04 2022 +0800

    KYLIN-5449 refactor common-service kylin-tool
    
    * refactor common-service kylin-tool
    
    * fix UT
    
    * fix UT
    
    * fix UT
    
    * fix code smell
    
    * fix code smell
    
    * fix code smell
    
    * remove systools pom file
    
    Co-authored-by: qhzhou <qi...@kyligence.io>
---
 .../kylin/rest/controller/NSystemController.java   |  10 +-
 src/common-service/pom.xml                         |  11 +-
 .../org/apache/kylin/helper/HelperConstants.java}  |  18 +-
 .../apache/kylin/helper/MetadataToolHelper.java}   | 568 +++++++++------------
 .../org/apache/kylin/helper/RoutineToolHelper.java | 116 +++++
 .../kylin/helper/UpdateUserAclToolHelper.java      |  80 +++
 .../kylin/rest/security/AdminUserAspect.java       |  16 +-
 .../kylin/rest/service/MetadataBackupService.java  |  49 +-
 .../apache/kylin/rest/service/ProjectService.java  |   2 +-
 .../apache/kylin/rest/service/SystemService.java   |  36 +-
 .../apache/kylin/rest/service/UserAclService.java  |   8 +-
 .../org/apache/kylin/tool/HDFSMetadataTool.java    |   4 +
 .../apache/kylin/tool/constant/DiagTypeEnum.java   |   0
 .../org/apache/kylin/tool/constant/StageEnum.java  |   0
 .../org/apache/kylin/tool/daemon/CheckResult.java  |   0
 .../apache/kylin/tool/daemon/CheckStateEnum.java   |   0
 .../apache/kylin/tool/daemon/HealthChecker.java    |   0
 .../kylin/tool/daemon/KapGuardianHATask.java       |   0
 .../kylin/tool/daemon/ServiceOpLevelEnum.java      |   0
 .../java/org/apache/kylin/tool/daemon/Worker.java  |   4 +-
 .../tool/daemon/checker/AbstractHealthChecker.java |   0
 .../tool/daemon/checker/FullGCDurationChecker.java |   0
 .../tool/daemon/checker/KEProcessChecker.java      |   0
 .../kylin/tool/daemon/checker/KEStatusChecker.java |  10 +-
 .../kylin/tool/garbage/ExecutableCleaner.java      |   5 +-
 .../apache/kylin/tool/garbage/GarbageCleaner.java  |   4 -
 .../apache/kylin/tool/garbage/IndexCleaner.java    |   0
 .../apache/kylin/tool/garbage/MetadataCleaner.java |   4 +-
 .../apache/kylin/tool/garbage/SnapshotCleaner.java |   5 +-
 .../kylin/tool/garbage/SourceUsageCleaner.java     |   0
 .../apache/kylin/tool/garbage/StorageCleaner.java  |  88 ++--
 .../tool/kerberos/DelegationTokenManager.java      |   0
 .../kylin/tool/kerberos/KerberosLoginUtil.java     | 164 ++----
 .../java/org/apache/kylin/tool/util/LdapUtils.java |   0
 .../util/ProjectTemporaryTableCleanerHelper.java   |   0
 .../java/org/apache/kylin/tool/util/ToolUtil.java  |  25 +-
 .../kylin/rest/service/LdapUserServiceTest.java    |  11 +-
 .../kylin/rest/service/OpenUserServiceTest.java    |   7 +-
 .../ProjectTemporaryTableCleanerHelperTest.java    |   0
 .../org/apache/kylin/tool/util/ToolUtilTest.java   |   0
 .../java/org/apache/kylin/common/util/Pair.java    |   2 +-
 .../org/apache/kylin/tool/util/HashFunction.java   |   0
 .../apache/kylin/tool/util/HashFunctionTest.java   |   0
 .../kylin/job/execution/NExecutableManager.java    |   2 -
 src/core-metadata/pom.xml                          |   4 +
 .../kylin/metrics/HdfsCapacityMetricsTest.java     |   3 +
 .../apache/kylin/rest/service/ScheduleService.java |  20 +-
 src/kylin-it/pom.xml                               |   4 +
 .../rest/controller/NMetaStoreController.java      |   3 +-
 .../kylin/rest/service/MetaStoreService.java       |  18 +-
 .../org/apache/kylin/tool/bisync/BISyncModel.java  |   0
 .../kylin/tool/bisync/BISyncModelConverter.java    |   0
 .../org/apache/kylin/tool/bisync/BISyncTool.java   |   0
 .../org/apache/kylin/tool/bisync/SyncContext.java  |   0
 .../apache/kylin/tool/bisync/SyncModelBuilder.java |   0
 .../apache/kylin/tool/bisync/model/ColumnDef.java  |   0
 .../kylin/tool/bisync/model/JoinTreeNode.java      |   3 +-
 .../apache/kylin/tool/bisync/model/MeasureDef.java |   0
 .../apache/kylin/tool/bisync/model/SyncModel.java  |   0
 .../bisync/tableau/TableauDataSourceConverter.java |   2 +-
 .../bisync/tableau/TableauDatasourceModel.java     |   4 +-
 .../tool/bisync/tableau/datasource/Aliases.java    |   0
 .../tool/bisync/tableau/datasource/DrillPath.java  |  13 -
 .../tool/bisync/tableau/datasource/DrillPaths.java |   0
 .../tool/bisync/tableau/datasource/Layout.java     |   0
 .../bisync/tableau/datasource/SemanticValue.java   |   0
 .../tableau/datasource/SemanticValueList.java      |   0
 .../tableau/datasource/TableauConnection.java      |   0
 .../tableau/datasource/TableauDatasource.java      |   0
 .../tableau/datasource/column/Calculation.java     |   0
 .../bisync/tableau/datasource/column/Column.java   |   0
 .../bisync/tableau/datasource/connection/Col.java  |   0
 .../bisync/tableau/datasource/connection/Cols.java |   0
 .../tableau/datasource/connection/Connection.java  |   0
 .../connection/ConnectionCustomization.java        |   0
 .../datasource/connection/NamedConnection.java     |   0
 .../datasource/connection/NamedConnectionList.java |   0
 .../connection/customization/Customization.java    |   0
 .../customization/CustomizationList.java           |   0
 .../connection/customization/Driver.java           |   0
 .../connection/customization/Vendor.java           |   0
 .../datasource/connection/metadata/Attribute.java  |   0
 .../connection/metadata/AttributeList.java         |   0
 .../datasource/connection/metadata/Collation.java  |   0
 .../connection/metadata/MetadataRecord.java        |   0
 .../connection/metadata/MetadataRecordList.java    |   0
 .../datasource/connection/relation/Clause.java     |   0
 .../datasource/connection/relation/Expression.java |   0
 .../datasource/connection/relation/Relation.java   |   0
 .../bisync/tableau/mapping/FunctionMapping.java    |   0
 .../tool/bisync/tableau/mapping/Mappings.java      |   0
 .../tool/bisync/tableau/mapping/TypeMapping.java   |   0
 .../bisync/tds/tableau.connector.template.xml      |   0
 .../main/resources/bisync/tds/tableau.mappings.xml |   0
 .../main/resources/bisync/tds/tableau.template.xml |   0
 .../kylin/rest/service/ModelTdsServiceTest.java    |   6 +-
 .../kylin/tool/bisync/SyncModelBuilderTest.java    |   0
 .../kylin/tool/bisync/SyncModelTestUtil.java       |   0
 .../tool/bisync/tableau/TableauDatasourceTest.java |   0
 .../bisync_tableau/nmodel_basic_all_cols.tds       |   0
 .../bisync_tableau/nmodel_basic_inner_all_cols.tds |   0
 .../nmodel_full_measure_test.connector.tds         |   2 +-
 .../nmodel_full_measure_test.connector_cc.tds      |   0
 ...nmodel_full_measure_test.connector_cc_admin.tds |   0
 ...del_full_measure_test.connector_hierarchies.tds |   0
 ..._full_measure_test.connector_no_hierarchies.tds |   0
 ...odel_full_measure_test.connector_permission.tds |   0
 ...ure_test.connector_permission_agg_index_col.tds |   0
 ...l_measure_test.connector_permission_all_col.tds |   0
 ...easure_test.connector_permission_no_measure.tds |   0
 ...del_full_measure_test.table_index_connector.tds |   0
 .../bisync_tableau/nmodel_full_measure_test.tds    |   0
 .../org/apache/kylin/rest/HAConfigurationTest.java |   6 +-
 src/systools/pom.xml                               | 113 ----
 src/tool/pom.xml                                   |  10 +
 .../kylin/tool/AbstractInfoExtractorTool.java      |   2 +-
 .../java/org/apache/kylin/tool/MetadataTool.java   | 454 ++--------------
 .../java/org/apache/kylin/tool/RollbackTool.java   |  28 +-
 .../daemon/handler/AbstractCheckStateHandler.java  |   4 +-
 .../apache/kylin/tool/routine/FastRoutineTool.java |  15 +-
 .../org/apache/kylin/tool/routine/RoutineTool.java | 118 +----
 .../kylin/tool/upgrade/UpdateUserAclTool.java      |  51 +-
 .../org/apache/kylin/tool/util/MetadataUtil.java   |   9 +-
 .../org/apache/kylin/tool/MetadataToolTest.java    |  53 +-
 .../tool/security/KylinPasswordResetCLITest.java   |   1 +
 .../kylin/tool/upgrade/UpdateUserAclToolTest.java  |   5 +-
 .../nmodel_full_measure_test.connector.tds         | 125 -----
 127 files changed, 797 insertions(+), 1528 deletions(-)

diff --git a/src/common-server/src/main/java/org/apache/kylin/rest/controller/NSystemController.java b/src/common-server/src/main/java/org/apache/kylin/rest/controller/NSystemController.java
index ebf502c28b..26c02418f6 100644
--- a/src/common-server/src/main/java/org/apache/kylin/rest/controller/NSystemController.java
+++ b/src/common-server/src/main/java/org/apache/kylin/rest/controller/NSystemController.java
@@ -41,6 +41,7 @@ import org.apache.kylin.common.persistence.transaction.UnitOfWork;
 import org.apache.kylin.common.persistence.transaction.UnitOfWorkParams;
 import org.apache.kylin.common.scheduler.EventBusFactory;
 import org.apache.kylin.common.util.AddressUtil;
+import org.apache.kylin.helper.MetadataToolHelper;
 import org.apache.kylin.metadata.project.EnhancedUnitOfWork;
 import org.apache.kylin.metadata.project.ProjectInstance;
 import org.apache.kylin.rest.cluster.ClusterManager;
@@ -58,6 +59,7 @@ import org.apache.kylin.rest.service.MetadataBackupService;
 import org.apache.kylin.rest.service.ProjectService;
 import org.apache.kylin.rest.service.SystemService;
 import org.apache.kylin.rest.util.AclEvaluate;
+import org.apache.kylin.tool.HDFSMetadataTool;
 import org.apache.kylin.tool.util.ToolUtil;
 import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.beans.factory.annotation.Qualifier;
@@ -103,6 +105,7 @@ public class NSystemController extends NBasicController {
     @Autowired
     @Qualifier("projectService")
     private ProjectService projectService;
+    private MetadataToolHelper metadataToolHelper = new MetadataToolHelper();
 
     @VisibleForTesting
     public void setAclEvaluate(AclEvaluate aclEvaluate) {
@@ -118,8 +121,11 @@ public class NSystemController extends NBasicController {
     @GetMapping(value = "/metadata/dump")
     @ResponseBody
     public EnvelopeResponse<String> dumpMetadata(@RequestParam(value = "dump_path") String dumpPath) throws Exception {
-        String[] args = new String[] { "-backup", "-compress", "-dir", dumpPath };
-        metadataBackupService.backup(args);
+        val kylinConfig = KylinConfig.getInstanceFromEnv();
+        HDFSMetadataTool.cleanBeforeBackup(kylinConfig);
+        val backupConfig = kylinConfig.getMetadataBackupFromSystem() ? kylinConfig
+                : KylinConfig.createKylinConfig(kylinConfig);
+        metadataToolHelper.backup(backupConfig, null, dumpPath, null, true, false);
         return new EnvelopeResponse<>(CODE_SUCCESS, "", "");
     }
 
diff --git a/src/common-service/pom.xml b/src/common-service/pom.xml
index adaff3899b..db687b8f80 100644
--- a/src/common-service/pom.xml
+++ b/src/common-service/pom.xml
@@ -26,13 +26,16 @@
     </parent>
     <modelVersion>4.0.0</modelVersion>
     <name>Kylin - Common Service</name>
-    <groupId>org.apache.kylin</groupId>
     <artifactId>kylin-common-service</artifactId>
 
     <dependencies>
         <dependency>
             <groupId>org.apache.kylin</groupId>
-            <artifactId>kylin-tool</artifactId>
+            <artifactId>kylin-core-metadata</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.kylin</groupId>
+            <artifactId>kylin-streaming</artifactId>
         </dependency>
         <dependency>
             <groupId>org.springframework</groupId>
@@ -42,6 +45,10 @@
             <groupId>org.springframework.security</groupId>
             <artifactId>spring-security-web</artifactId>
         </dependency>
+        <dependency>
+            <groupId>org.springframework.security</groupId>
+            <artifactId>spring-security-ldap</artifactId>
+        </dependency>
         <dependency>
             <groupId>commons-fileupload</groupId>
             <artifactId>commons-fileupload</artifactId>
diff --git a/src/query-common/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/SemanticValueList.java b/src/common-service/src/main/java/org/apache/kylin/helper/HelperConstants.java
similarity index 65%
copy from src/query-common/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/SemanticValueList.java
copy to src/common-service/src/main/java/org/apache/kylin/helper/HelperConstants.java
index 4f89d1bf1d..9133d37013 100644
--- a/src/query-common/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/SemanticValueList.java
+++ b/src/common-service/src/main/java/org/apache/kylin/helper/HelperConstants.java
@@ -15,17 +15,19 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.kylin.tool.bisync.tableau.datasource;
 
-import java.util.List;
+package org.apache.kylin.helper;
 
-import com.fasterxml.jackson.dataformat.xml.annotation.JacksonXmlElementWrapper;
-import com.fasterxml.jackson.dataformat.xml.annotation.JacksonXmlProperty;
+import java.time.format.DateTimeFormatter;
+import java.util.Locale;
 
-public class SemanticValueList {
+/*
+ * this class is only for removing dependency of kylin-tool module, and should be refactor later
+ */
+class HelperConstants {
 
-    @JacksonXmlProperty(localName = "semantic-value")
-    @JacksonXmlElementWrapper(useWrapping = false)
-    private List<SemanticValue> semanticValueList;
+    private HelperConstants() {}
 
+    static final DateTimeFormatter DATE_TIME_FORMATTER = DateTimeFormatter.ofPattern("yyyy-MM-dd-HH-mm-ss",
+            Locale.getDefault(Locale.Category.FORMAT));
 }
diff --git a/src/tool/src/main/java/org/apache/kylin/tool/MetadataTool.java b/src/common-service/src/main/java/org/apache/kylin/helper/MetadataToolHelper.java
similarity index 59%
copy from src/tool/src/main/java/org/apache/kylin/tool/MetadataTool.java
copy to src/common-service/src/main/java/org/apache/kylin/helper/MetadataToolHelper.java
index daa8aed307..2804bdc4c3 100644
--- a/src/tool/src/main/java/org/apache/kylin/tool/MetadataTool.java
+++ b/src/common-service/src/main/java/org/apache/kylin/helper/MetadataToolHelper.java
@@ -16,171 +16,274 @@
  * limitations under the License.
  */
 
-package org.apache.kylin.tool;
+package org.apache.kylin.helper;
 
 import static org.apache.kylin.common.exception.code.ErrorCodeTool.FILE_ALREADY_EXISTS;
-import static org.apache.kylin.common.exception.code.ErrorCodeTool.PARAMETER_NOT_SPECIFY;
 
 import java.io.File;
 import java.io.IOException;
 import java.net.URI;
+import java.nio.file.FileSystems;
 import java.nio.file.Paths;
 import java.time.Clock;
 import java.time.LocalDateTime;
 import java.time.format.DateTimeFormatter;
 import java.util.Collections;
+import java.util.List;
 import java.util.Locale;
 import java.util.NavigableSet;
 import java.util.Objects;
 import java.util.Set;
 import java.util.stream.Collectors;
 
-import org.apache.commons.cli.Option;
-import org.apache.commons.cli.OptionGroup;
-import org.apache.commons.cli.Options;
+import javax.sql.DataSource;
+
 import org.apache.commons.lang3.StringUtils;
 import org.apache.hadoop.fs.Path;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.KylinConfigBase;
 import org.apache.kylin.common.exception.KylinException;
-import org.apache.kylin.common.persistence.ResourceStore;
-import org.apache.kylin.common.util.ExecutableApplication;
-import org.apache.kylin.common.util.HadoopUtil;
-import org.apache.kylin.common.util.JsonUtil;
-import org.apache.kylin.common.util.OptionsHelper;
-import org.apache.kylin.metadata.project.ProjectInstance;
 import org.apache.kylin.common.metrics.MetricsCategory;
 import org.apache.kylin.common.metrics.MetricsGroup;
 import org.apache.kylin.common.metrics.MetricsName;
 import org.apache.kylin.common.persistence.ImageDesc;
+import org.apache.kylin.common.persistence.ResourceStore;
 import org.apache.kylin.common.persistence.metadata.AuditLogStore;
+import org.apache.kylin.common.persistence.metadata.JdbcDataSource;
+import org.apache.kylin.common.persistence.metadata.jdbc.JdbcUtil;
 import org.apache.kylin.common.persistence.transaction.UnitOfWork;
 import org.apache.kylin.common.persistence.transaction.UnitOfWorkParams;
-import org.apache.kylin.common.util.AddressUtil;
+import org.apache.kylin.common.util.HadoopUtil;
+import org.apache.kylin.common.util.JsonUtil;
 import org.apache.kylin.common.util.MetadataChecker;
-import org.apache.kylin.common.util.OptionBuilder;
-import org.apache.kylin.common.util.Unsafe;
-import org.apache.kylin.tool.util.ScreenPrintUtil;
-import org.apache.kylin.tool.util.ToolMainWrapper;
+import org.apache.kylin.metadata.project.ProjectInstance;
+import org.apache.kylin.tool.HDFSMetadataTool;
+import org.apache.kylin.tool.garbage.StorageCleaner;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import com.google.common.base.Preconditions;
 import com.google.common.collect.Sets;
 
 import io.kyligence.kap.guava20.shaded.common.io.ByteSource;
-import lombok.Getter;
 import lombok.val;
 import lombok.var;
 
-public class MetadataTool extends ExecutableApplication {
-    public static final DateTimeFormatter DATE_TIME_FORMATTER = DateTimeFormatter.ofPattern("yyyy-MM-dd-HH-mm-ss",
-            Locale.getDefault(Locale.Category.FORMAT));
-    private static final Logger logger = LoggerFactory.getLogger("diag");
-    private static final String HDFS_METADATA_URL_FORMATTER = "kylin_metadata@hdfs,path=%s";
+/*
+* this class is only for removing dependency of kylin-tool module, and should be refactor later
+*/
+public class MetadataToolHelper {
 
+    public static final DateTimeFormatter DATE_TIME_FORMATTER = HelperConstants.DATE_TIME_FORMATTER;
     private static final String GLOBAL = "global";
+    private static final String HDFS_METADATA_URL_FORMATTER = "kylin_metadata@hdfs,path=%s";
 
-    @SuppressWarnings("static-access")
-    private static final Option OPERATE_BACKUP = OptionBuilder.getInstance()
-            .withDescription("Backup metadata to local path or HDFS path").isRequired(false).create("backup");
-
-    private static final Option OPERATE_COMPRESS = OptionBuilder.getInstance()
-            .withDescription("Backup compressed metadata to HDFS path").isRequired(false).create("compress");
-
-    private static final Option OPERATE_FETCH = OptionBuilder.getInstance()
-            .withDescription("Fetch part of metadata to local path").isRequired(false).create("fetch");
-
-    private static final Option OPERATE_LIST = OptionBuilder.getInstance()
-            .withDescription("List children of target folder").isRequired(false).create("list");
-
-    private static final Option OPERATE_RESTORE = OptionBuilder.getInstance()
-            .withDescription("Restore metadata from local path or HDFS path").isRequired(false).create("restore");
-
-    private static final Option OPTION_AFTER_TRUNCATE = OptionBuilder.getInstance()
-            .withDescription("Restore overwrite metadata from local path or HDFS path (optional)").isRequired(false)
-            .withLongOpt("after-truncate").hasArg(false).create("d");
-
-    private static final Option OPTION_DIR = OptionBuilder.getInstance().hasArg().withArgName("DIRECTORY_PATH")
-            .withDescription("Specify the target directory for backup and restore").isRequired(false).create("dir");
+    private static final Logger logger = LoggerFactory.getLogger(MetadataToolHelper.class);
 
-    private static final Option OPTION_PROJECT = OptionBuilder.getInstance().hasArg().withArgName("PROJECT_NAME")
-            .withDescription("Specify project level backup and restore (optional)").isRequired(false).create("project");
+    public void rotateAuditLog() {
+        val resourceStore = ResourceStore.getKylinMetaStore(KylinConfig.getInstanceFromEnv());
+        val auditLogStore = resourceStore.getAuditLogStore();
+        auditLogStore.rotate();
+    }
 
-    private static final Option OPTION_TARGET = OptionBuilder.getInstance().hasArg().withArgName("TARGET_FILE")
-            .withDescription("Specify part of metadata for fetch to local path").isRequired(false).create("target");
+    public void backup(KylinConfig kylinConfig) throws Exception {
+        HDFSMetadataTool.cleanBeforeBackup(kylinConfig);
+        new MetadataToolHelper().backup(kylinConfig, null, HadoopUtil.getBackupFolder(kylinConfig), null, true, false);
+    }
 
-    private static final Option FOLDER_NAME = OptionBuilder.getInstance().hasArg().withArgName("FOLDER_NAME")
-            .withDescription("Specify the folder name for backup").isRequired(false).create("folder");
+    public void backup(KylinConfig kylinConfig, String dir, String folder) throws Exception {
+        HDFSMetadataTool.cleanBeforeBackup(kylinConfig);
+        new MetadataToolHelper().backup(kylinConfig, null, dir, folder, true, false);
+    }
 
-    private static final Option OPTION_EXCLUDE_TABLE_EXD = OptionBuilder.getInstance()
-            .withDescription("Exclude metadata {project}/table_exd directory").isRequired(false)
-            .create("excludeTableExd");
+    public void backup(KylinConfig kylinConfig, String project, String path, String folder, boolean compress,
+            boolean excludeTableExd) throws Exception {
+        boolean isGlobal = null == project;
+        long startAt = System.currentTimeMillis();
+        try {
+            doBackup(kylinConfig, project, path, folder, compress, excludeTableExd);
+        } catch (Exception be) {
+            if (isGlobal) {
+                MetricsGroup.hostTagCounterInc(MetricsName.METADATA_BACKUP_FAILED, MetricsCategory.GLOBAL, GLOBAL);
+            } else {
+                MetricsGroup.hostTagCounterInc(MetricsName.METADATA_BACKUP_FAILED, MetricsCategory.PROJECT, project);
+            }
+            throw be;
+        } finally {
+            if (isGlobal) {
+                MetricsGroup.hostTagCounterInc(MetricsName.METADATA_BACKUP, MetricsCategory.GLOBAL, GLOBAL);
+                MetricsGroup.hostTagCounterInc(MetricsName.METADATA_BACKUP_DURATION, MetricsCategory.GLOBAL, GLOBAL,
+                        System.currentTimeMillis() - startAt);
+            } else {
+                MetricsGroup.hostTagCounterInc(MetricsName.METADATA_BACKUP, MetricsCategory.PROJECT, project);
+                MetricsGroup.hostTagCounterInc(MetricsName.METADATA_BACKUP_DURATION, MetricsCategory.PROJECT, project,
+                        System.currentTimeMillis() - startAt);
+            }
+        }
+    }
 
-    private final Options options;
+    void doBackup(KylinConfig kylinConfig, String project, String path, String folder, boolean compress,
+            boolean excludeTableExd) throws Exception {
+        ResourceStore resourceStore = ResourceStore.getKylinMetaStore(kylinConfig);
+        boolean isUTEnv = kylinConfig.isUTEnv();
 
-    private final KylinConfig kylinConfig;
+        if (StringUtils.isBlank(path)) {
+            path = KylinConfigBase.getKylinHome() + File.separator + "meta_backups";
+        }
+        if (StringUtils.isEmpty(folder)) {
+            folder = LocalDateTime.now(Clock.systemDefaultZone()).format(MetadataToolHelper.DATE_TIME_FORMATTER)
+                    + "_backup";
+        }
+        String backupPath = StringUtils.appendIfMissing(path, "/") + folder;
+        logger.info("The metadata backup path is {}}", backupPath);
+        val backupMetadataUrl = getMetadataUrl(backupPath, compress, kylinConfig);
+        val backupConfig = KylinConfig.createKylinConfig(kylinConfig);
+        backupConfig.setMetadataUrl(backupMetadataUrl);
+        abortIfAlreadyExists(backupPath);
+        logger.info("The backup metadataUrl is {} and backup path is {}", backupMetadataUrl, backupPath);
+        try (val backupResourceStore = ResourceStore.getKylinMetaStore(backupConfig)) {
+            val backupMetadataStore = backupResourceStore.getMetadataStore();
+            if (StringUtils.isBlank(project)) {
+                logger.info("start to copy all projects from ResourceStore.");
+                long finalOffset = getOffset(isUTEnv, resourceStore);
+                backupResourceStore.putResourceWithoutCheck(ResourceStore.METASTORE_IMAGE,
+                        ByteSource.wrap(JsonUtil.writeValueAsBytes(new ImageDesc(finalOffset))),
+                        System.currentTimeMillis(), -1);
+                var projectFolders = resourceStore.listResources("/");
+                if (projectFolders == null) {
+                    return;
+                }
+                UnitOfWork.doInTransactionWithRetry(() -> {
+                    backupProjects(projectFolders, resourceStore, backupResourceStore, excludeTableExd);
+                    return null;
+                }, UnitOfWork.GLOBAL_UNIT);
 
-    private ResourceStore resourceStore;
+                val uuid = resourceStore.getResource(ResourceStore.METASTORE_UUID_TAG);
+                if (uuid != null) {
+                    backupResourceStore.putResourceWithoutCheck(uuid.getResPath(), uuid.getByteSource(),
+                            uuid.getTimestamp(), -1);
+                }
+                logger.info("start to backup all projects");
 
-    @Getter
-    private String backupPath;
+            } else {
+                logger.info("start to copy project {} from ResourceStore.", project);
+                UnitOfWork.doInTransactionWithRetry(
+                        UnitOfWorkParams.builder().readonly(true).unitName(project).processor(() -> {
+                            copyResourceStore("/" + project, resourceStore, backupResourceStore, true, excludeTableExd);
+                            val uuid = resourceStore.getResource(ResourceStore.METASTORE_UUID_TAG);
+                            backupResourceStore.putResourceWithoutCheck(uuid.getResPath(), uuid.getByteSource(),
+                                    uuid.getTimestamp(), -1);
+                            return null;
+                        }).build());
+                if (Thread.currentThread().isInterrupted()) {
+                    throw new InterruptedException("metadata task is interrupt");
+                }
+                logger.info("start to backup project {}", project);
+            }
+            backupResourceStore.deleteResource(ResourceStore.METASTORE_TRASH_RECORD);
+            backupMetadataStore.dump(backupResourceStore);
+            logger.info("backup successfully at {}", backupPath);
+        }
+    }
 
-    @Getter
-    private String fetchPath;
+    public String getMetadataUrl(String rootPath, boolean compressed, KylinConfig kylinConfig) {
+        if (HadoopUtil.isHdfsCompatibleSchema(rootPath, kylinConfig)) {
+            val url = String.format(Locale.ROOT, HDFS_METADATA_URL_FORMATTER,
+                    Path.getPathWithoutSchemeAndAuthority(new Path(rootPath)).toString() + "/");
+            return compressed ? url + ",zip=1" : url;
+        } else if (rootPath.startsWith("file://")) {
+            rootPath = rootPath.replace("file://", "");
+            return StringUtils.appendIfMissing(rootPath, "/");
 
-    MetadataTool() {
-        kylinConfig = KylinConfig.getInstanceFromEnv();
-        this.options = new Options();
-        initOptions();
+        } else {
+            return StringUtils.appendIfMissing(rootPath, "/");
+        }
     }
 
-    public MetadataTool(KylinConfig kylinConfig) {
-        this.kylinConfig = kylinConfig;
-        this.options = new Options();
-        initOptions();
+    private void backupProjects(NavigableSet<String> projectFolders, ResourceStore resourceStore,
+            ResourceStore backupResourceStore, boolean excludeTableExd) throws InterruptedException {
+        for (String projectPath : projectFolders) {
+            if (projectPath.equals(ResourceStore.METASTORE_UUID_TAG)
+                    || projectPath.equals(ResourceStore.METASTORE_IMAGE)) {
+                continue;
+            }
+            // The "_global" directory is already included in the full backup
+            copyResourceStore(projectPath, resourceStore, backupResourceStore, false, excludeTableExd);
+            if (Thread.currentThread().isInterrupted()) {
+                throw new InterruptedException("metadata task is interrupt");
+            }
+        }
     }
 
-    public static void backup(KylinConfig kylinConfig) throws IOException {
-        HDFSMetadataTool.cleanBeforeBackup(kylinConfig);
-        String[] args = new String[] { "-backup", "-compress", "-dir", HadoopUtil.getBackupFolder(kylinConfig) };
-        val backupTool = new MetadataTool(kylinConfig);
-        backupTool.execute(args);
+    private void copyResourceStore(String projectPath, ResourceStore srcResourceStore,
+            ResourceStore destResourceStore, boolean isProjectLevel, boolean excludeTableExd) {
+        if (excludeTableExd) {
+            String tableExdPath = projectPath + ResourceStore.TABLE_EXD_RESOURCE_ROOT;
+            var projectItems = srcResourceStore.listResources(projectPath);
+            for (String item : projectItems) {
+                if (item.equals(tableExdPath)) {
+                    continue;
+                }
+                srcResourceStore.copy(item, destResourceStore);
+            }
+        } else {
+            srcResourceStore.copy(projectPath, destResourceStore);
+        }
+        if (isProjectLevel) {
+            // The project-level backup needs to contain "/_global/project/*.json"
+            val projectName = Paths.get(projectPath).getFileName().toString();
+            srcResourceStore.copy(ProjectInstance.concatResourcePath(projectName), destResourceStore);
+        }
     }
 
-    public static void backup(KylinConfig kylinConfig, String dir, String folder) throws IOException {
-        HDFSMetadataTool.cleanBeforeBackup(kylinConfig);
-        String[] args = new String[] { "-backup", "-compress", "-dir", dir, "-folder", folder };
-        val backupTool = new MetadataTool(kylinConfig);
-        backupTool.execute(args);
+    private long getOffset(boolean isUTEnv, ResourceStore resourceStore) {
+        AuditLogStore auditLogStore = resourceStore.getAuditLogStore();
+        if (isUTEnv) {
+            return auditLogStore.getMaxId();
+        } else {
+            return auditLogStore.getLogOffset() == 0 ? resourceStore.getOffset() : auditLogStore.getLogOffset();
+        }
     }
 
-    public static void restore(KylinConfig kylinConfig, String folder) throws IOException {
-        val tool = new MetadataTool(kylinConfig);
-        tool.execute(new String[] { "-restore", "-dir", folder, "--after-truncate" });
+    private void abortIfAlreadyExists(String path) throws IOException {
+        URI uri = HadoopUtil.makeURI(path);
+        if (!uri.isAbsolute()) {
+            logger.info("no scheme specified for {}, try local file system file://", path);
+            File localFile = new File(path);
+            if (localFile.exists()) {
+                logger.error("[UNEXPECTED_THINGS_HAPPENED] local file {} already exists ", path);
+                throw new KylinException(FILE_ALREADY_EXISTS, path);
+            }
+            return;
+        }
+        val fs = HadoopUtil.getWorkingFileSystem();
+        if (fs.exists(new Path(path))) {
+            logger.error("[UNEXPECTED_THINGS_HAPPENED] specified file {} already exists ", path);
+            throw new KylinException(FILE_ALREADY_EXISTS, path);
+        }
     }
 
-    public static void main(String[] args) {
-        ToolMainWrapper.wrap(args, () -> {
-            val config = KylinConfig.getInstanceFromEnv();
-            val tool = new MetadataTool(config);
-            val optionsHelper = new OptionsHelper();
-            optionsHelper.parseOptions(tool.getOptions(), args);
-            boolean isBackup = optionsHelper.hasOption(OPERATE_BACKUP);
-            boolean isFetch = optionsHelper.hasOption(OPERATE_FETCH);
-            if ((isBackup || isFetch) && ScreenPrintUtil.isMainThread()) {
-                config.setProperty("kylin.env.metadata.only-for-read", "true");
-            }
-            val resourceStore = ResourceStore.getKylinMetaStore(config);
-            resourceStore.getAuditLogStore().setInstance(AddressUtil.getMockPortAddress());
-            tool.execute(args);
-            if (isBackup && StringUtils.isNotEmpty(tool.getBackupPath())) {
-                System.out.printf(Locale.ROOT, "The metadata backup path is %s.%n", tool.getBackupPath());
-            }
-        });
-        Unsafe.systemExit(0);
+    public void restore(KylinConfig kylinConfig, String project, String path, boolean delete) throws Exception {
+        logger.info("Restore metadata with delete : {}", delete);
+        ResourceStore resourceStore = ResourceStore.getKylinMetaStore(kylinConfig);
+        val restoreMetadataUrl = getMetadataUrl(path, false, kylinConfig);
+        val restoreConfig = KylinConfig.createKylinConfig(kylinConfig);
+        restoreConfig.setMetadataUrl(restoreMetadataUrl);
+        logger.info("The restore metadataUrl is {} and restore path is {} ", restoreMetadataUrl, path);
+
+        val restoreResourceStore = ResourceStore.getKylinMetaStore(restoreConfig);
+        val restoreMetadataStore = restoreResourceStore.getMetadataStore();
+        MetadataChecker metadataChecker = new MetadataChecker(restoreMetadataStore);
+
+        val verifyResult = metadataChecker.verify();
+        Preconditions.checkState(verifyResult.isQualified(),
+                verifyResult.getResultMessage() + "\n the metadata dir is not qualified");
+        restore(resourceStore, restoreResourceStore, project, delete);
+        backup(kylinConfig);
+
     }
 
-    public static void restore(ResourceStore currentResourceStore, ResourceStore restoreResourceStore, String project,
+    public void restore(ResourceStore currentResourceStore, ResourceStore restoreResourceStore, String project,
             boolean delete) {
         if (StringUtils.isBlank(project)) {
             logger.info("start to restore all projects");
@@ -222,7 +325,7 @@ public class MetadataTool extends ExecutableApplication {
             UnitOfWork.doInTransactionWithRetry(() -> doRestore(currentResourceStore, restoreResourceStore,
                     finalGlobalDestResources, globalSrcResources, delete), UnitOfWork.GLOBAL_UNIT, 1);
 
-            val projectPath = "/" + project;
+            val projectPath = FileSystems.getDefault().getSeparator() + project;
             val destResources = currentResourceStore.listResourcesRecursively(projectPath);
             val srcResources = restoreResourceStore.listResourcesRecursively(projectPath);
 
@@ -234,7 +337,7 @@ public class MetadataTool extends ExecutableApplication {
         logger.info("restore successfully");
     }
 
-    private static int doRestore(ResourceStore currentResourceStore, ResourceStore restoreResourceStore,
+    private int doRestore(ResourceStore currentResourceStore, ResourceStore restoreResourceStore,
             Set<String> destResources, Set<String> srcResources, boolean delete) throws IOException {
         val threadViewRS = ResourceStore.getKylinMetaStore(KylinConfig.getInstanceFromEnv());
 
@@ -267,94 +370,29 @@ public class MetadataTool extends ExecutableApplication {
         return 0;
     }
 
-    private void initOptions() {
-        final OptionGroup optionGroup = new OptionGroup();
-        optionGroup.setRequired(true);
-        optionGroup.addOption(OPERATE_BACKUP);
-        optionGroup.addOption(OPERATE_FETCH);
-        optionGroup.addOption(OPERATE_LIST);
-        optionGroup.addOption(OPERATE_RESTORE);
-
-        options.addOptionGroup(optionGroup);
-        options.addOption(OPTION_DIR);
-        options.addOption(OPTION_PROJECT);
-        options.addOption(FOLDER_NAME);
-        options.addOption(OPTION_TARGET);
-        options.addOption(OPERATE_COMPRESS);
-        options.addOption(OPTION_EXCLUDE_TABLE_EXD);
-        options.addOption(OPTION_AFTER_TRUNCATE);
-    }
-
-    @Override
-    protected Options getOptions() {
-        return options;
-    }
-
-    @Override
-    protected void execute(OptionsHelper optionsHelper) throws Exception {
-        logger.info("start to init ResourceStore");
-        resourceStore = ResourceStore.getKylinMetaStore(kylinConfig);
-        if (optionsHelper.hasOption(OPERATE_BACKUP)) {
-            boolean isGlobal = null == optionsHelper.getOptionValue(OPTION_PROJECT);
-            long startAt = System.currentTimeMillis();
-
-            try {
-                backup(optionsHelper);
-            } catch (Exception be) {
-                if (isGlobal) {
-                    MetricsGroup.hostTagCounterInc(MetricsName.METADATA_BACKUP_FAILED, MetricsCategory.GLOBAL, GLOBAL);
-                } else {
-                    MetricsGroup.hostTagCounterInc(MetricsName.METADATA_BACKUP_FAILED, MetricsCategory.PROJECT,
-                            optionsHelper.getOptionValue(OPTION_PROJECT));
-                }
-                throw be;
-            } finally {
-                if (isGlobal) {
-                    MetricsGroup.hostTagCounterInc(MetricsName.METADATA_BACKUP, MetricsCategory.GLOBAL, GLOBAL);
-                    MetricsGroup.hostTagCounterInc(MetricsName.METADATA_BACKUP_DURATION, MetricsCategory.GLOBAL, GLOBAL,
-                            System.currentTimeMillis() - startAt);
-                } else {
-                    MetricsGroup.hostTagCounterInc(MetricsName.METADATA_BACKUP, MetricsCategory.PROJECT,
-                            optionsHelper.getOptionValue(OPTION_PROJECT));
-                    MetricsGroup.hostTagCounterInc(MetricsName.METADATA_BACKUP_DURATION, MetricsCategory.PROJECT,
-                            optionsHelper.getOptionValue(OPTION_PROJECT), System.currentTimeMillis() - startAt);
-                }
-            }
-
-        } else if (optionsHelper.hasOption(OPERATE_FETCH)) {
-            fetch(optionsHelper);
-        } else if (optionsHelper.hasOption(OPERATE_LIST)) {
-            list(optionsHelper);
-        } else if (optionsHelper.hasOption(OPERATE_RESTORE)) {
-            restore(optionsHelper, optionsHelper.hasOption(OPTION_AFTER_TRUNCATE));
-        } else {
-            throw new KylinException(PARAMETER_NOT_SPECIFY, "-restore");
+    public void cleanStorage(boolean storageCleanup, List<String> projects, double requestFSRate,
+            int retryTimes) {
+        try {
+            StorageCleaner storageCleaner = new StorageCleaner(storageCleanup, projects, requestFSRate, retryTimes);
+            System.out.println("Start to cleanup HDFS");
+            storageCleaner.execute();
+            System.out.println("cleanup HDFS finished");
+        } catch (Exception e) {
+            logger.error("cleanup HDFS failed", e);
+            System.out.println(StorageCleaner.ANSI_RED
+                    + "cleanup HDFS failed. Detailed Message is at ${KYLIN_HOME}/logs/shell.stderr"
+                    + StorageCleaner.ANSI_RESET);
         }
     }
 
-    private void abortIfAlreadyExists(String path) throws IOException {
-        URI uri = HadoopUtil.makeURI(path);
-        if (!uri.isAbsolute()) {
-            logger.info("no scheme specified for {}, try local file system file://", path);
-            File localFile = new File(path);
-            if (localFile.exists()) {
-                logger.error("[UNEXPECTED_THINGS_HAPPENED] local file {} already exists ", path);
-                throw new KylinException(FILE_ALREADY_EXISTS, path);
-            }
-            return;
-        }
-        val fs = HadoopUtil.getWorkingFileSystem();
-        if (fs.exists(new Path(path))) {
-            logger.error("[UNEXPECTED_THINGS_HAPPENED] specified file {} already exists ", path);
-            throw new KylinException(FILE_ALREADY_EXISTS, path);
-        }
+    public DataSource getDataSource(KylinConfig kylinConfig) throws Exception {
+        val url = kylinConfig.getMetadataUrl();
+        val props = JdbcUtil.datasourceParameters(url);
+        return JdbcDataSource.getDataSource(props);
     }
 
-    private void fetch(OptionsHelper optionsHelper) throws Exception {
-        var path = optionsHelper.getOptionValue(OPTION_DIR);
-        var folder = optionsHelper.getOptionValue(FOLDER_NAME);
-        val excludeTableExd = optionsHelper.hasOption(OPTION_EXCLUDE_TABLE_EXD);
-        val target = optionsHelper.getOptionValue(OPTION_TARGET);
+    public void fetch(KylinConfig kylinConfig, String path, String folder, String target, boolean excludeTableExd) throws Exception {
+        ResourceStore resourceStore = ResourceStore.getKylinMetaStore(kylinConfig);
         if (StringUtils.isBlank(path)) {
             path = KylinConfigBase.getKylinHome() + File.separator + "meta_fetch";
         }
@@ -364,9 +402,9 @@ public class MetadataTool extends ExecutableApplication {
         if (target == null) {
             System.out.println("target file must be set with fetch mode");
         } else {
-            fetchPath = StringUtils.appendIfMissing(path, "/") + folder;
+            val fetchPath = StringUtils.appendIfMissing(path, "/") + folder;
             // currently do not support compress with fetch
-            val fetchMetadataUrl = getMetadataUrl(fetchPath, false);
+            val fetchMetadataUrl = getMetadataUrl(fetchPath, false, kylinConfig);
             val fetchConfig = KylinConfig.createKylinConfig(kylinConfig);
             fetchConfig.setMetadataUrl(fetchMetadataUrl);
             abortIfAlreadyExists(fetchPath);
@@ -401,8 +439,8 @@ public class MetadataTool extends ExecutableApplication {
         }
     }
 
-    private NavigableSet<String> list(OptionsHelper optionsHelper) throws Exception {
-        val target = optionsHelper.getOptionValue(OPTION_TARGET);
+    public NavigableSet<String> list(KylinConfig kylinConfig, String target) throws Exception {
+        ResourceStore resourceStore = ResourceStore.getKylinMetaStore(kylinConfig);
         var res = resourceStore.listResources(target);
         if (res == null) {
             System.out.printf("%s is not exist%n", target);
@@ -412,154 +450,4 @@ public class MetadataTool extends ExecutableApplication {
         return res;
     }
 
-    private void backup(OptionsHelper optionsHelper) throws Exception {
-        val project = optionsHelper.getOptionValue(OPTION_PROJECT);
-        var path = optionsHelper.getOptionValue(OPTION_DIR);
-        var folder = optionsHelper.getOptionValue(FOLDER_NAME);
-        var compress = optionsHelper.hasOption(OPERATE_COMPRESS);
-        val excludeTableExd = optionsHelper.hasOption(OPTION_EXCLUDE_TABLE_EXD);
-        if (StringUtils.isBlank(path)) {
-            path = KylinConfigBase.getKylinHome() + File.separator + "meta_backups";
-        }
-        if (StringUtils.isEmpty(folder)) {
-            folder = LocalDateTime.now(Clock.systemDefaultZone()).format(DATE_TIME_FORMATTER) + "_backup";
-        }
-        backupPath = StringUtils.appendIfMissing(path, "/") + folder;
-        val backupMetadataUrl = getMetadataUrl(backupPath, compress);
-        val backupConfig = KylinConfig.createKylinConfig(kylinConfig);
-        backupConfig.setMetadataUrl(backupMetadataUrl);
-        abortIfAlreadyExists(backupPath);
-        logger.info("The backup metadataUrl is {} and backup path is {}", backupMetadataUrl, backupPath);
-
-        try (val backupResourceStore = ResourceStore.getKylinMetaStore(backupConfig)) {
-
-            val backupMetadataStore = backupResourceStore.getMetadataStore();
-
-            if (StringUtils.isBlank(project)) {
-                logger.info("start to copy all projects from ResourceStore.");
-                val auditLogStore = resourceStore.getAuditLogStore();
-                long finalOffset = getOffset(auditLogStore);
-                backupResourceStore.putResourceWithoutCheck(ResourceStore.METASTORE_IMAGE,
-                        ByteSource.wrap(JsonUtil.writeValueAsBytes(new ImageDesc(finalOffset))),
-                        System.currentTimeMillis(), -1);
-                var projectFolders = resourceStore.listResources("/");
-                if (projectFolders == null) {
-                    return;
-                }
-                UnitOfWork.doInTransactionWithRetry(() -> {
-                    backupProjects(projectFolders, backupResourceStore, excludeTableExd);
-                    return null;
-                }, UnitOfWork.GLOBAL_UNIT);
-
-                val uuid = resourceStore.getResource(ResourceStore.METASTORE_UUID_TAG);
-                if (uuid != null) {
-                    backupResourceStore.putResourceWithoutCheck(uuid.getResPath(), uuid.getByteSource(),
-                            uuid.getTimestamp(), -1);
-                }
-                logger.info("start to backup all projects");
-
-            } else {
-                logger.info("start to copy project {} from ResourceStore.", project);
-                UnitOfWork.doInTransactionWithRetry(
-                        UnitOfWorkParams.builder().readonly(true).unitName(project).processor(() -> {
-                            copyResourceStore("/" + project, resourceStore, backupResourceStore, true, excludeTableExd);
-                            val uuid = resourceStore.getResource(ResourceStore.METASTORE_UUID_TAG);
-                            backupResourceStore.putResourceWithoutCheck(uuid.getResPath(), uuid.getByteSource(),
-                                    uuid.getTimestamp(), -1);
-                            return null;
-                        }).build());
-                if (Thread.currentThread().isInterrupted()) {
-                    throw new InterruptedException("metadata task is interrupt");
-                }
-                logger.info("start to backup project {}", project);
-            }
-            backupResourceStore.deleteResource(ResourceStore.METASTORE_TRASH_RECORD);
-            backupMetadataStore.dump(backupResourceStore);
-            logger.info("backup successfully at {}", backupPath);
-        }
-    }
-
-    private long getOffset(AuditLogStore auditLogStore) {
-        long offset = 0;
-        if (kylinConfig.isUTEnv())
-            offset = auditLogStore.getMaxId();
-        else
-            offset = auditLogStore.getLogOffset() == 0 ? resourceStore.getOffset() : auditLogStore.getLogOffset();
-        return offset;
-    }
-
-    private void backupProjects(NavigableSet<String> projectFolders, ResourceStore backupResourceStore,
-            boolean excludeTableExd) throws InterruptedException {
-        for (String projectPath : projectFolders) {
-            if (projectPath.equals(ResourceStore.METASTORE_UUID_TAG)
-                    || projectPath.equals(ResourceStore.METASTORE_IMAGE)) {
-                continue;
-            }
-            // The "_global" directory is already included in the full backup
-            copyResourceStore(projectPath, resourceStore, backupResourceStore, false, excludeTableExd);
-            if (Thread.currentThread().isInterrupted()) {
-                throw new InterruptedException("metadata task is interrupt");
-            }
-        }
-    }
-
-    private void copyResourceStore(String projectPath, ResourceStore srcResourceStore, ResourceStore destResourceStore,
-            boolean isProjectLevel, boolean excludeTableExd) {
-        if (excludeTableExd) {
-            String tableExdPath = projectPath + ResourceStore.TABLE_EXD_RESOURCE_ROOT;
-            var projectItems = srcResourceStore.listResources(projectPath);
-            for (String item : projectItems) {
-                if (item.equals(tableExdPath)) {
-                    continue;
-                }
-                srcResourceStore.copy(item, destResourceStore);
-            }
-        } else {
-            srcResourceStore.copy(projectPath, destResourceStore);
-        }
-        if (isProjectLevel) {
-            // The project-level backup needs to contain "/_global/project/*.json"
-            val projectName = Paths.get(projectPath).getFileName().toString();
-            srcResourceStore.copy(ProjectInstance.concatResourcePath(projectName), destResourceStore);
-        }
-    }
-
-    private void restore(OptionsHelper optionsHelper, boolean delete) throws IOException {
-        logger.info("Restore metadata with delete : {}", delete);
-        val project = optionsHelper.getOptionValue(OPTION_PROJECT);
-        val restorePath = optionsHelper.getOptionValue(OPTION_DIR);
-
-        val restoreMetadataUrl = getMetadataUrl(restorePath, false);
-        val restoreConfig = KylinConfig.createKylinConfig(kylinConfig);
-        restoreConfig.setMetadataUrl(restoreMetadataUrl);
-        logger.info("The restore metadataUrl is {} and restore path is {} ", restoreMetadataUrl, restorePath);
-
-        val restoreResourceStore = ResourceStore.getKylinMetaStore(restoreConfig);
-        val restoreMetadataStore = restoreResourceStore.getMetadataStore();
-        MetadataChecker metadataChecker = new MetadataChecker(restoreMetadataStore);
-
-        val verifyResult = metadataChecker.verify();
-        if (!verifyResult.isQualified()) {
-            throw new RuntimeException(verifyResult.getResultMessage() + "\n the metadata dir is not qualified");
-        }
-        restore(resourceStore, restoreResourceStore, project, delete);
-        backup(kylinConfig);
-
-    }
-
-    String getMetadataUrl(String rootPath, boolean compressed) {
-        if (HadoopUtil.isHdfsCompatibleSchema(rootPath, kylinConfig)) {
-            val url = String.format(Locale.ROOT, HDFS_METADATA_URL_FORMATTER,
-                    Path.getPathWithoutSchemeAndAuthority(new Path(rootPath)).toString() + "/");
-            return compressed ? url + ",zip=1" : url;
-
-        } else if (rootPath.startsWith("file://")) {
-            rootPath = rootPath.replace("file://", "");
-            return StringUtils.appendIfMissing(rootPath, "/");
-
-        } else {
-            return StringUtils.appendIfMissing(rootPath, "/");
-
-        }
-    }
 }
diff --git a/src/common-service/src/main/java/org/apache/kylin/helper/RoutineToolHelper.java b/src/common-service/src/main/java/org/apache/kylin/helper/RoutineToolHelper.java
new file mode 100644
index 0000000000..8c9b06d441
--- /dev/null
+++ b/src/common-service/src/main/java/org/apache/kylin/helper/RoutineToolHelper.java
@@ -0,0 +1,116 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.kylin.helper;
+
+import lombok.extern.slf4j.Slf4j;
+import lombok.val;
+import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.common.persistence.transaction.UnitOfWork;
+import org.apache.kylin.common.util.SetThreadName;
+import org.apache.kylin.metadata.project.EnhancedUnitOfWork;
+import org.apache.kylin.metadata.project.NProjectManager;
+import org.apache.kylin.metadata.project.ProjectInstance;
+import org.apache.kylin.metadata.query.util.QueryHisStoreUtil;
+import org.apache.kylin.metadata.recommendation.candidate.JdbcRawRecStore;
+import org.apache.kylin.metadata.streaming.util.StreamingJobRecordStoreUtil;
+import org.apache.kylin.metadata.streaming.util.StreamingJobStatsStoreUtil;
+import org.apache.kylin.tool.garbage.GarbageCleaner;
+import org.apache.kylin.tool.garbage.SourceUsageCleaner;
+import org.apache.kylin.tool.garbage.StorageCleaner;
+
+import java.util.List;
+import java.util.stream.Collectors;
+
+/*
+ * this class is only for removing dependency of kylin-tool module, and should be refactor later
+ */
+@Slf4j
+public class RoutineToolHelper {
+
+    private RoutineToolHelper() {
+    }
+
+    public static void cleanQueryHistories() {
+        QueryHisStoreUtil.cleanQueryHistory();
+    }
+
+    public static void cleanStreamingStats() {
+        StreamingJobStatsStoreUtil.cleanStreamingJobStats();
+        StreamingJobRecordStoreUtil.cleanStreamingJobRecord();
+    }
+
+    public static void deleteRawRecItems() {
+        KylinConfig config = KylinConfig.getInstanceFromEnv();
+        List<ProjectInstance> projectInstances = NProjectManager.getInstance(config).listAllProjects().stream()
+                .filter(projectInstance -> !projectInstance.isExpertMode()).collect(Collectors.toList());
+        if (projectInstances.isEmpty()) {
+            return;
+        }
+        try (SetThreadName ignored = new SetThreadName("DeleteRawRecItemsInDB")) {
+            val jdbcRawRecStore = new JdbcRawRecStore(KylinConfig.getInstanceFromEnv());
+            jdbcRawRecStore.deleteOutdated();
+        } catch (Exception e) {
+            log.error("delete outdated advice fail: ", e);
+        }
+    }
+
+    public static void cleanGlobalSourceUsage() {
+        log.info("Start to clean up global meta");
+        try {
+            EnhancedUnitOfWork.doInTransactionWithCheckAndRetry(() -> {
+                new SourceUsageCleaner().cleanup();
+                return null;
+            }, UnitOfWork.GLOBAL_UNIT);
+        } catch (Exception e) {
+            log.error("Failed to clean global meta", e);
+        }
+        log.info("Clean up global meta finished");
+
+    }
+
+    public static void cleanMetaByProject(String projectName) {
+        log.info("Start to clean up {} meta", projectName);
+        try {
+            GarbageCleaner.cleanMetadata(projectName);
+        } catch (Exception e) {
+            log.error("Project[{}] cleanup Metadata failed", projectName, e);
+        }
+        log.info("Clean up {} meta finished", projectName);
+    }
+
+    public static void cleanMeta(List<String> projectsToCleanup) {
+        try {
+            cleanGlobalSourceUsage();
+            for (String projName : projectsToCleanup) {
+                cleanMetaByProject(projName);
+            }
+            cleanQueryHistories();
+            cleanStreamingStats();
+            deleteRawRecItems();
+            System.out.println("Metadata cleanup finished");
+        } catch (Exception e) {
+            log.error("Metadata cleanup failed", e);
+            System.out.println(StorageCleaner.ANSI_RED
+                    + "Metadata cleanup failed. Detailed Message is at ${KYLIN_HOME}/logs/shell.stderr"
+                    + StorageCleaner.ANSI_RESET);
+        }
+
+    }
+
+}
diff --git a/src/common-service/src/main/java/org/apache/kylin/helper/UpdateUserAclToolHelper.java b/src/common-service/src/main/java/org/apache/kylin/helper/UpdateUserAclToolHelper.java
new file mode 100644
index 0000000000..a73fd07ce8
--- /dev/null
+++ b/src/common-service/src/main/java/org/apache/kylin/helper/UpdateUserAclToolHelper.java
@@ -0,0 +1,80 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.kylin.helper;
+
+import lombok.val;
+import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.common.util.EncryptUtil;
+import org.apache.kylin.metadata.upgrade.GlobalAclVersionManager;
+import org.apache.kylin.tool.util.LdapUtils;
+import org.springframework.security.ldap.DefaultSpringSecurityContextSource;
+import org.springframework.security.ldap.SpringSecurityLdapTemplate;
+
+import javax.naming.directory.SearchControls;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Optional;
+import java.util.Properties;
+import java.util.Set;
+
+public class UpdateUserAclToolHelper {
+    private UpdateUserAclToolHelper() {}
+
+    public static UpdateUserAclToolHelper getInstance() {
+        return new UpdateUserAclToolHelper();
+    }
+
+    public Set<String> getLdapAdminUsers() {
+        val ldapTemplate = createLdapTemplate();
+        val ldapUserDNs = LdapUtils.getAllGroupMembers(ldapTemplate,
+                KylinConfig.getInstanceFromEnv().getLDAPAdminRole());
+        val searchControls = new SearchControls();
+        searchControls.setSearchScope(2);
+        Map<String, String> dnMapperMap = LdapUtils.getAllValidUserDnMap(ldapTemplate, searchControls);
+        val users = new HashSet<String>();
+        for (String u : ldapUserDNs) {
+            Optional.ofNullable(dnMapperMap.get(u)).ifPresent(users::add);
+        }
+        return users;
+    }
+
+    private SpringSecurityLdapTemplate createLdapTemplate() {
+        val properties = KylinConfig.getInstanceFromEnv().exportToProperties();
+        val contextSource = new DefaultSpringSecurityContextSource(
+                properties.getProperty("kylin.security.ldap.connection-server"));
+        contextSource.setUserDn(properties.getProperty("kylin.security.ldap.connection-username"));
+        contextSource.setPassword(getPassword(properties));
+        contextSource.afterPropertiesSet();
+        return new SpringSecurityLdapTemplate(contextSource);
+    }
+
+    public String getPassword(Properties properties) {
+        val password = properties.getProperty("kylin.security.ldap.connection-password");
+        return EncryptUtil.decrypt(password);
+    }
+
+    public boolean isUpgraded() {
+        val versionManager = GlobalAclVersionManager.getInstance(KylinConfig.getInstanceFromEnv());
+        return versionManager.exists();
+    }
+
+
+
+
+}
diff --git a/src/common-service/src/main/java/org/apache/kylin/rest/security/AdminUserAspect.java b/src/common-service/src/main/java/org/apache/kylin/rest/security/AdminUserAspect.java
index b9f4af991d..f31598aed4 100644
--- a/src/common-service/src/main/java/org/apache/kylin/rest/security/AdminUserAspect.java
+++ b/src/common-service/src/main/java/org/apache/kylin/rest/security/AdminUserAspect.java
@@ -24,8 +24,8 @@ import java.util.Objects;
 import org.apache.commons.collections4.CollectionUtils;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.scheduler.EventBusFactory;
+import org.apache.kylin.metadata.upgrade.GlobalAclVersionManager;
 import org.apache.kylin.rest.service.UserAclService;
-import org.apache.kylin.tool.upgrade.UpdateUserAclTool;
 import org.aspectj.lang.annotation.AfterReturning;
 import org.aspectj.lang.annotation.Aspect;
 import org.springframework.beans.factory.annotation.Autowired;
@@ -41,14 +41,22 @@ import lombok.extern.slf4j.Slf4j;
 public class AdminUserAspect {
     private List<String> adminUserList;
 
-    private UpdateUserAclTool tool = new UpdateUserAclTool();
-
     @Autowired
     @Qualifier("userAclService")
     private UserAclService userAclService;
 
     private boolean superAdminInitialized = false;
 
+    private boolean isUpgraded() {
+        val versionManager = GlobalAclVersionManager.getInstance(KylinConfig.getInstanceFromEnv());
+        return versionManager.exists();
+    }
+
+    private boolean isAdminUserUpgraded() {
+        val userAclManager = UserAclManager.getInstance(KylinConfig.getInstanceFromEnv());
+        return userAclManager.listAclUsernames().size() > 0;
+    }
+
     @AfterReturning(value = "execution(* org.apache.kylin.rest.service.OpenUserService.listAdminUsers(..))", returning = "adminUserList")
     public void doAfterListAdminUsers(List<String> adminUserList) {
         val kylinConfig = KylinConfig.getInstanceFromEnv();
@@ -56,7 +64,7 @@ public class AdminUserAspect {
             return;
         }
         // upgrade admin user acl from job node
-        if (kylinConfig.isJobNode() && tool.isUpgraded() && !tool.isAdminUserUpgraded()) {
+        if (kylinConfig.isJobNode() && isUpgraded() && !isAdminUserUpgraded()) {
             userAclService.syncAdminUserAcl(adminUserList, false);
         }
 
diff --git a/src/common-service/src/main/java/org/apache/kylin/rest/service/MetadataBackupService.java b/src/common-service/src/main/java/org/apache/kylin/rest/service/MetadataBackupService.java
index 63c4d22329..bb9fb9bd34 100644
--- a/src/common-service/src/main/java/org/apache/kylin/rest/service/MetadataBackupService.java
+++ b/src/common-service/src/main/java/org/apache/kylin/rest/service/MetadataBackupService.java
@@ -17,17 +17,15 @@
  */
 package org.apache.kylin.rest.service;
 
-import java.io.IOException;
 import java.time.Clock;
 import java.time.LocalDateTime;
 
 import org.apache.commons.lang3.StringUtils;
 import org.apache.kylin.common.KylinConfig;
-import org.apache.kylin.common.persistence.ResourceStore;
 import org.apache.kylin.common.util.HadoopUtil;
 import org.apache.kylin.common.util.SetThreadName;
+import org.apache.kylin.helper.MetadataToolHelper;
 import org.apache.kylin.tool.HDFSMetadataTool;
-import org.apache.kylin.tool.MetadataTool;
 import org.springframework.stereotype.Service;
 
 import lombok.SneakyThrows;
@@ -36,42 +34,35 @@ import lombok.val;
 @Service
 public class MetadataBackupService {
 
-    @SneakyThrows(IOException.class)
-    public void backupAll(){
+    private final MetadataToolHelper helper = new MetadataToolHelper();
+
+    @SneakyThrows(Exception.class)
+    public void backupAll() {
 
         try (SetThreadName ignored = new SetThreadName("MetadataBackupWorker")) {
-            String[] args = new String[] { "-backup", "-compress", "-dir", getBackupDir() };
-            backup(args);
-            rotateAuditLog();
+            val kylinConfig = KylinConfig.getInstanceFromEnv();
+            HDFSMetadataTool.cleanBeforeBackup(kylinConfig);
+            val backupConfig = kylinConfig.getMetadataBackupFromSystem() ? kylinConfig
+                    : KylinConfig.createKylinConfig(kylinConfig);
+            helper.backup(backupConfig, null, getBackupDir(kylinConfig), null, true, false);
+            helper.rotateAuditLog();
         }
     }
 
-    public void backup(String[] args) throws IOException {
+    public String backupProject(String project) throws Exception {
+        val folder = LocalDateTime.now(Clock.systemDefaultZone()).format(MetadataToolHelper.DATE_TIME_FORMATTER)
+                + "_backup";
         val kylinConfig = KylinConfig.getInstanceFromEnv();
-        HDFSMetadataTool.cleanBeforeBackup(KylinConfig.getInstanceFromEnv());
+        HDFSMetadataTool.cleanBeforeBackup(kylinConfig);
         val backupConfig = kylinConfig.getMetadataBackupFromSystem() ? kylinConfig
                 : KylinConfig.createKylinConfig(kylinConfig);
-        val metadataTool = new MetadataTool(backupConfig);
-        metadataTool.execute(args);
-    }
-
-    public void rotateAuditLog() {
-        val kylinConfig = KylinConfig.getInstanceFromEnv();
-        val resourceStore = ResourceStore.getKylinMetaStore(kylinConfig);
-        val auditLogStore = resourceStore.getAuditLogStore();
-        auditLogStore.rotate();
-    }
-
-    public String backupProject(String project) throws IOException {
-        val folder = LocalDateTime.now(Clock.systemDefaultZone()).format(MetadataTool.DATE_TIME_FORMATTER) + "_backup";
-        String[] args = new String[] { "-backup", "-compress", "-project", project, "-folder", folder, "-dir",
-                getBackupDir() };
-        backup(args);
-        return StringUtils.appendIfMissing(getBackupDir(), "/") + folder;
+        String backupDir = getBackupDir(kylinConfig);
+        helper.backup(backupConfig, project, backupDir, folder, true, false);
+        return StringUtils.appendIfMissing(backupDir, "/") + folder;
     }
 
-    private String getBackupDir() {
-        return HadoopUtil.getBackupFolder(KylinConfig.getInstanceFromEnv());
+    private String getBackupDir(KylinConfig kylinConfig) {
+        return HadoopUtil.getBackupFolder(kylinConfig);
 
     }
 
diff --git a/src/common-service/src/main/java/org/apache/kylin/rest/service/ProjectService.java b/src/common-service/src/main/java/org/apache/kylin/rest/service/ProjectService.java
index 3658763858..393d7a0653 100644
--- a/src/common-service/src/main/java/org/apache/kylin/rest/service/ProjectService.java
+++ b/src/common-service/src/main/java/org/apache/kylin/rest/service/ProjectService.java
@@ -948,7 +948,7 @@ public class ProjectService extends BasicService {
     }
 
     @PreAuthorize(Constant.ACCESS_HAS_ROLE_ADMIN + " or hasPermission(#project, 'ADMINISTRATION')")
-    public String backupProject(String project) throws IOException {
+    public String backupProject(String project) throws Exception {
         return metadataBackupService.backupProject(project);
     }
 
diff --git a/src/common-service/src/main/java/org/apache/kylin/rest/service/SystemService.java b/src/common-service/src/main/java/org/apache/kylin/rest/service/SystemService.java
index 4eda8de07e..63ddcda042 100644
--- a/src/common-service/src/main/java/org/apache/kylin/rest/service/SystemService.java
+++ b/src/common-service/src/main/java/org/apache/kylin/rest/service/SystemService.java
@@ -29,7 +29,6 @@ import static org.apache.kylin.tool.constant.StageEnum.DONE;
 
 import java.io.File;
 import java.io.IOException;
-import java.util.List;
 import java.util.Locale;
 import java.util.Map;
 import java.util.Objects;
@@ -53,6 +52,7 @@ import org.apache.kylin.common.persistence.transaction.MessageSynchronization;
 import org.apache.kylin.common.scheduler.EventBusFactory;
 import org.apache.kylin.common.util.BufferedLogger;
 import org.apache.kylin.common.util.CliCommandExecutor;
+import org.apache.kylin.helper.MetadataToolHelper;
 import org.apache.kylin.job.execution.AbstractExecutable;
 import org.apache.kylin.job.execution.NExecutableManager;
 import org.apache.kylin.metadata.cube.model.NIndexPlanManager;
@@ -66,7 +66,6 @@ import org.apache.kylin.rest.request.DiagProgressRequest;
 import org.apache.kylin.rest.response.DiagStatusResponse;
 import org.apache.kylin.rest.response.EnvelopeResponse;
 import org.apache.kylin.rest.util.AclEvaluate;
-import org.apache.kylin.tool.MetadataTool;
 import org.apache.kylin.tool.constant.DiagTypeEnum;
 import org.apache.kylin.tool.constant.StageEnum;
 import org.slf4j.Logger;
@@ -78,7 +77,6 @@ import org.springframework.stereotype.Service;
 
 import com.google.common.cache.Cache;
 import com.google.common.cache.CacheBuilder;
-import com.google.common.collect.Lists;
 
 import lombok.Data;
 import lombok.NoArgsConstructor;
@@ -89,6 +87,7 @@ public class SystemService extends BasicService {
 
     private static final Logger logger = LoggerFactory.getLogger(SystemService.class);
 
+    private final MetadataToolHelper helper = new MetadataToolHelper();
     @Autowired
     private AclEvaluate aclEvaluate;
 
@@ -112,34 +111,17 @@ public class SystemService extends BasicService {
         }
     }
 
-    private Cache<String, DiagInfo> diagMap = CacheBuilder.newBuilder().expireAfterAccess(1, TimeUnit.DAYS).build();
-    private Cache<String, DiagStatusResponse> exceptionMap = CacheBuilder.newBuilder()
+    private final Cache<String, DiagInfo> diagMap = CacheBuilder.newBuilder().expireAfterAccess(1, TimeUnit.DAYS).build();
+    private final Cache<String, DiagStatusResponse> exceptionMap = CacheBuilder.newBuilder()
             .expireAfterAccess(1, TimeUnit.DAYS).build();
-    private ExecutorService executorService = Executors.newSingleThreadExecutor();
+    private final ExecutorService executorService = Executors.newSingleThreadExecutor();
 
     @PreAuthorize(Constant.ACCESS_HAS_ROLE_ADMIN + " or hasPermission(#backupRequest.getProject(), 'ADMINISTRATION')")
     public void backup(BackupRequest backupRequest) throws Exception {
-        String[] args = createBackupArgs(backupRequest);
-        val metadataTool = new MetadataTool(getConfig());
-        metadataTool.execute(args);
-    }
-
-    private String[] createBackupArgs(BackupRequest backupRequest) {
-        List<String> args = Lists.newArrayList("-backup");
-        if (backupRequest.isCompress()) {
-            args.add("-compress");
-        }
-        if (StringUtils.isNotBlank(backupRequest.getBackupPath())) {
-            args.add("-dir");
-            args.add(backupRequest.getBackupPath());
-        }
-        if (StringUtils.isNotBlank(backupRequest.getProject())) {
-            args.add("-project");
-            args.add(backupRequest.getProject());
-        }
-
-        logger.info("SystemService {}", args);
-        return args.toArray(new String[0]);
+        String project = StringUtils.isNotBlank(backupRequest.getProject()) ? backupRequest.getProject() : null;
+        String path = StringUtils.isNotBlank(backupRequest.getBackupPath()) ? backupRequest.getBackupPath(): null;
+        boolean compress = backupRequest.isCompress();
+        helper.backup(getConfig(), project, path, null, compress, false);
     }
 
     //    @PreAuthorize(Constant.ACCESS_HAS_ROLE_ADMIN)
diff --git a/src/common-service/src/main/java/org/apache/kylin/rest/service/UserAclService.java b/src/common-service/src/main/java/org/apache/kylin/rest/service/UserAclService.java
index 379291cbdb..90641b3154 100644
--- a/src/common-service/src/main/java/org/apache/kylin/rest/service/UserAclService.java
+++ b/src/common-service/src/main/java/org/apache/kylin/rest/service/UserAclService.java
@@ -52,7 +52,6 @@ import org.apache.kylin.rest.security.AdminUserSyncEventNotifier;
 import org.apache.kylin.rest.security.ExternalAclProvider;
 import org.apache.kylin.rest.security.UserAcl;
 import org.apache.kylin.rest.security.UserAclManager;
-import org.apache.kylin.tool.upgrade.UpdateUserAclTool;
 import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.beans.factory.annotation.Qualifier;
 import org.springframework.security.access.AccessDeniedException;
@@ -291,10 +290,15 @@ public class UserAclService extends BasicService implements UserAclServiceSuppor
         remoteRequest(eventNotifier, StringUtils.EMPTY);
     }
 
+    private static boolean isCustomProfile() {
+        val kylinConfig = KylinConfig.getInstanceFromEnv();
+        return "custom".equals(kylinConfig.getSecurityProfile());
+    }
+
     @SneakyThrows(IOException.class)
     public void syncAdminUserAcl() {
         val config = KylinConfig.getInstanceFromEnv();
-        if (UpdateUserAclTool.isCustomProfile()) {
+        if (isCustomProfile()) {
             // invoke the AdminUserAspect
             userService.listAdminUsers();
         } else if ("ldap".equals(config.getSecurityProfile())) {
diff --git a/src/tool/src/main/java/org/apache/kylin/tool/HDFSMetadataTool.java b/src/common-service/src/main/java/org/apache/kylin/tool/HDFSMetadataTool.java
similarity index 98%
rename from src/tool/src/main/java/org/apache/kylin/tool/HDFSMetadataTool.java
rename to src/common-service/src/main/java/org/apache/kylin/tool/HDFSMetadataTool.java
index 343a750aa7..9b6aceb46e 100644
--- a/src/tool/src/main/java/org/apache/kylin/tool/HDFSMetadataTool.java
+++ b/src/common-service/src/main/java/org/apache/kylin/tool/HDFSMetadataTool.java
@@ -29,6 +29,10 @@ import org.apache.kylin.common.util.HadoopUtil;
 import lombok.val;
 
 public class HDFSMetadataTool {
+
+    private HDFSMetadataTool() {
+    }
+
     public static void cleanBeforeBackup(KylinConfig kylinConfig) throws IOException {
         val rootMetadataBackupPath = new Path(HadoopUtil.getBackupFolder(KylinConfig.getInstanceFromEnv()));
         val fs = HadoopUtil.getWorkingFileSystem();
diff --git a/src/tool/src/main/java/org/apache/kylin/tool/constant/DiagTypeEnum.java b/src/common-service/src/main/java/org/apache/kylin/tool/constant/DiagTypeEnum.java
similarity index 100%
rename from src/tool/src/main/java/org/apache/kylin/tool/constant/DiagTypeEnum.java
rename to src/common-service/src/main/java/org/apache/kylin/tool/constant/DiagTypeEnum.java
diff --git a/src/tool/src/main/java/org/apache/kylin/tool/constant/StageEnum.java b/src/common-service/src/main/java/org/apache/kylin/tool/constant/StageEnum.java
similarity index 100%
rename from src/tool/src/main/java/org/apache/kylin/tool/constant/StageEnum.java
rename to src/common-service/src/main/java/org/apache/kylin/tool/constant/StageEnum.java
diff --git a/src/tool/src/main/java/org/apache/kylin/tool/daemon/CheckResult.java b/src/common-service/src/main/java/org/apache/kylin/tool/daemon/CheckResult.java
similarity index 100%
rename from src/tool/src/main/java/org/apache/kylin/tool/daemon/CheckResult.java
rename to src/common-service/src/main/java/org/apache/kylin/tool/daemon/CheckResult.java
diff --git a/src/tool/src/main/java/org/apache/kylin/tool/daemon/CheckStateEnum.java b/src/common-service/src/main/java/org/apache/kylin/tool/daemon/CheckStateEnum.java
similarity index 100%
rename from src/tool/src/main/java/org/apache/kylin/tool/daemon/CheckStateEnum.java
rename to src/common-service/src/main/java/org/apache/kylin/tool/daemon/CheckStateEnum.java
diff --git a/src/tool/src/main/java/org/apache/kylin/tool/daemon/HealthChecker.java b/src/common-service/src/main/java/org/apache/kylin/tool/daemon/HealthChecker.java
similarity index 100%
rename from src/tool/src/main/java/org/apache/kylin/tool/daemon/HealthChecker.java
rename to src/common-service/src/main/java/org/apache/kylin/tool/daemon/HealthChecker.java
diff --git a/src/tool/src/main/java/org/apache/kylin/tool/daemon/KapGuardianHATask.java b/src/common-service/src/main/java/org/apache/kylin/tool/daemon/KapGuardianHATask.java
similarity index 100%
rename from src/tool/src/main/java/org/apache/kylin/tool/daemon/KapGuardianHATask.java
rename to src/common-service/src/main/java/org/apache/kylin/tool/daemon/KapGuardianHATask.java
diff --git a/src/tool/src/main/java/org/apache/kylin/tool/daemon/ServiceOpLevelEnum.java b/src/common-service/src/main/java/org/apache/kylin/tool/daemon/ServiceOpLevelEnum.java
similarity index 100%
rename from src/tool/src/main/java/org/apache/kylin/tool/daemon/ServiceOpLevelEnum.java
rename to src/common-service/src/main/java/org/apache/kylin/tool/daemon/ServiceOpLevelEnum.java
diff --git a/src/tool/src/main/java/org/apache/kylin/tool/daemon/Worker.java b/src/common-service/src/main/java/org/apache/kylin/tool/daemon/Worker.java
similarity index 97%
rename from src/tool/src/main/java/org/apache/kylin/tool/daemon/Worker.java
rename to src/common-service/src/main/java/org/apache/kylin/tool/daemon/Worker.java
index 45001a74c7..b3eb1ad3b4 100644
--- a/src/tool/src/main/java/org/apache/kylin/tool/daemon/Worker.java
+++ b/src/common-service/src/main/java/org/apache/kylin/tool/daemon/Worker.java
@@ -40,7 +40,7 @@ public class Worker {
     private static SecretKey kgSecretKey;
 
     @Getter
-    private static String KE_PID;
+    private static String kePid;
 
     static {
         int serverPort = Integer.parseInt(getKylinConfig().getServerPort());
@@ -58,7 +58,7 @@ public class Worker {
     }
 
     public synchronized void setKEPid(String pid) {
-        KE_PID = pid;
+        kePid = pid;
     }
 
     public synchronized void setKgSecretKey(SecretKey secretKey) {
diff --git a/src/tool/src/main/java/org/apache/kylin/tool/daemon/checker/AbstractHealthChecker.java b/src/common-service/src/main/java/org/apache/kylin/tool/daemon/checker/AbstractHealthChecker.java
similarity index 100%
rename from src/tool/src/main/java/org/apache/kylin/tool/daemon/checker/AbstractHealthChecker.java
rename to src/common-service/src/main/java/org/apache/kylin/tool/daemon/checker/AbstractHealthChecker.java
diff --git a/src/tool/src/main/java/org/apache/kylin/tool/daemon/checker/FullGCDurationChecker.java b/src/common-service/src/main/java/org/apache/kylin/tool/daemon/checker/FullGCDurationChecker.java
similarity index 100%
rename from src/tool/src/main/java/org/apache/kylin/tool/daemon/checker/FullGCDurationChecker.java
rename to src/common-service/src/main/java/org/apache/kylin/tool/daemon/checker/FullGCDurationChecker.java
diff --git a/src/tool/src/main/java/org/apache/kylin/tool/daemon/checker/KEProcessChecker.java b/src/common-service/src/main/java/org/apache/kylin/tool/daemon/checker/KEProcessChecker.java
similarity index 100%
rename from src/tool/src/main/java/org/apache/kylin/tool/daemon/checker/KEProcessChecker.java
rename to src/common-service/src/main/java/org/apache/kylin/tool/daemon/checker/KEProcessChecker.java
diff --git a/src/tool/src/main/java/org/apache/kylin/tool/daemon/checker/KEStatusChecker.java b/src/common-service/src/main/java/org/apache/kylin/tool/daemon/checker/KEStatusChecker.java
similarity index 96%
rename from src/tool/src/main/java/org/apache/kylin/tool/daemon/checker/KEStatusChecker.java
rename to src/common-service/src/main/java/org/apache/kylin/tool/daemon/checker/KEStatusChecker.java
index 43f462a643..95896c420a 100644
--- a/src/tool/src/main/java/org/apache/kylin/tool/daemon/checker/KEStatusChecker.java
+++ b/src/common-service/src/main/java/org/apache/kylin/tool/daemon/checker/KEStatusChecker.java
@@ -40,7 +40,7 @@ import lombok.Setter;
 
 public class KEStatusChecker extends AbstractHealthChecker {
     public static final String PERMISSION_DENIED = "Check permission failed!";
-    private static final Logger logger = LoggerFactory.getLogger(AbstractHealthChecker.class);
+    private static final Logger logger = LoggerFactory.getLogger(KEStatusChecker.class);
     private int failCount = 0;
 
     public KEStatusChecker() {
@@ -57,12 +57,12 @@ public class KEStatusChecker extends AbstractHealthChecker {
                 setKgSecretKey(SecretKeyUtil.readKGSecretKeyFromFile());
             }
 
-            if (null == getKE_PID()) {
+            if (null == getKePid()) {
                 setKEPid(ToolUtil.getKylinPid());
             }
-            return SecretKeyUtil.generateEncryptedTokenWithPid(getKgSecretKey(), getKE_PID());
+            return SecretKeyUtil.generateEncryptedTokenWithPid(getKgSecretKey(), getKePid());
         } catch (Exception e) {
-            logger.error("Read KG secret key from file failed.", e);
+            logger.error("Read KG secret key from file failed.");
             throw e;
         }
     }
@@ -84,7 +84,7 @@ public class KEStatusChecker extends AbstractHealthChecker {
                     setKgSecretKey(null);
                 }
 
-                throw new RuntimeException("Get KE health status failed: " + response.msg);
+                throw new IllegalStateException("Get KE health status failed: " + response.msg);
             }
 
             Status status = response.getData();
diff --git a/src/tool/src/main/java/org/apache/kylin/tool/garbage/ExecutableCleaner.java b/src/common-service/src/main/java/org/apache/kylin/tool/garbage/ExecutableCleaner.java
similarity index 95%
rename from src/tool/src/main/java/org/apache/kylin/tool/garbage/ExecutableCleaner.java
rename to src/common-service/src/main/java/org/apache/kylin/tool/garbage/ExecutableCleaner.java
index 13a706dfad..498e0fcb35 100644
--- a/src/tool/src/main/java/org/apache/kylin/tool/garbage/ExecutableCleaner.java
+++ b/src/common-service/src/main/java/org/apache/kylin/tool/garbage/ExecutableCleaner.java
@@ -53,10 +53,7 @@ public class ExecutableCleaner extends MetadataCleaner {
                 return false;
             }
             ExecutableState state = job.getStatus();
-            if (!state.isFinalState()) {
-                return false;
-            }
-            return true;
+            return state.isFinalState();
         }).collect(Collectors.toList());
 
         for (AbstractExecutable executable : filteredExecutables) {
diff --git a/src/tool/src/main/java/org/apache/kylin/tool/garbage/GarbageCleaner.java b/src/common-service/src/main/java/org/apache/kylin/tool/garbage/GarbageCleaner.java
similarity index 94%
rename from src/tool/src/main/java/org/apache/kylin/tool/garbage/GarbageCleaner.java
rename to src/common-service/src/main/java/org/apache/kylin/tool/garbage/GarbageCleaner.java
index 33daae9e56..5a05e52f8e 100644
--- a/src/tool/src/main/java/org/apache/kylin/tool/garbage/GarbageCleaner.java
+++ b/src/common-service/src/main/java/org/apache/kylin/tool/garbage/GarbageCleaner.java
@@ -29,15 +29,11 @@ import org.apache.kylin.common.scheduler.EventBusFactory;
 import org.apache.kylin.common.scheduler.SourceUsageUpdateNotifier;
 import org.apache.kylin.metadata.project.EnhancedUnitOfWork;
 import org.apache.kylin.metadata.project.NProjectManager;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 
 import lombok.val;
 
 public class GarbageCleaner {
 
-    private static final Logger logger = LoggerFactory.getLogger(GarbageCleaner.class);
-
     private GarbageCleaner() {
     }
 
diff --git a/src/tool/src/main/java/org/apache/kylin/tool/garbage/IndexCleaner.java b/src/common-service/src/main/java/org/apache/kylin/tool/garbage/IndexCleaner.java
similarity index 100%
rename from src/tool/src/main/java/org/apache/kylin/tool/garbage/IndexCleaner.java
rename to src/common-service/src/main/java/org/apache/kylin/tool/garbage/IndexCleaner.java
diff --git a/src/tool/src/main/java/org/apache/kylin/tool/garbage/MetadataCleaner.java b/src/common-service/src/main/java/org/apache/kylin/tool/garbage/MetadataCleaner.java
similarity index 92%
rename from src/tool/src/main/java/org/apache/kylin/tool/garbage/MetadataCleaner.java
rename to src/common-service/src/main/java/org/apache/kylin/tool/garbage/MetadataCleaner.java
index 6cc8c3cb39..84f04556d8 100644
--- a/src/tool/src/main/java/org/apache/kylin/tool/garbage/MetadataCleaner.java
+++ b/src/common-service/src/main/java/org/apache/kylin/tool/garbage/MetadataCleaner.java
@@ -19,9 +19,9 @@
 package org.apache.kylin.tool.garbage;
 
 public abstract class MetadataCleaner {
-    protected String project;
+    protected final String project;
 
-    public MetadataCleaner(String project) {
+    protected MetadataCleaner(String project) {
         this.project = project;
     }
 
diff --git a/src/tool/src/main/java/org/apache/kylin/tool/garbage/SnapshotCleaner.java b/src/common-service/src/main/java/org/apache/kylin/tool/garbage/SnapshotCleaner.java
similarity index 96%
rename from src/tool/src/main/java/org/apache/kylin/tool/garbage/SnapshotCleaner.java
rename to src/common-service/src/main/java/org/apache/kylin/tool/garbage/SnapshotCleaner.java
index dea8c537e5..1add64dc9c 100644
--- a/src/tool/src/main/java/org/apache/kylin/tool/garbage/SnapshotCleaner.java
+++ b/src/common-service/src/main/java/org/apache/kylin/tool/garbage/SnapshotCleaner.java
@@ -19,6 +19,7 @@
 package org.apache.kylin.tool.garbage;
 
 import java.io.IOException;
+import java.nio.file.FileSystems;
 import java.util.HashSet;
 import java.util.Set;
 
@@ -27,9 +28,9 @@ import org.apache.hadoop.fs.Path;
 import org.apache.kylin.common.KapConfig;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.util.HadoopUtil;
+import org.apache.kylin.metadata.model.NTableMetadataManager;
 import org.apache.kylin.metadata.model.TableDesc;
 import org.apache.kylin.metadata.model.TableExtDesc;
-import org.apache.kylin.metadata.model.NTableMetadataManager;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -59,7 +60,7 @@ public class SnapshotCleaner extends MetadataCleaner {
         }
         FileSystem fs = HadoopUtil.getWorkingFileSystem();
         String baseDir = config.getMetadataWorkingDirectory();
-        String resourcePath = baseDir + "/" + snapshotPath;
+        String resourcePath = baseDir + FileSystems.getDefault().getSeparator() + snapshotPath;
         try {
             return fs.exists(new Path(resourcePath));
         } catch (IOException e) {
diff --git a/src/tool/src/main/java/org/apache/kylin/tool/garbage/SourceUsageCleaner.java b/src/common-service/src/main/java/org/apache/kylin/tool/garbage/SourceUsageCleaner.java
similarity index 100%
rename from src/tool/src/main/java/org/apache/kylin/tool/garbage/SourceUsageCleaner.java
rename to src/common-service/src/main/java/org/apache/kylin/tool/garbage/SourceUsageCleaner.java
diff --git a/src/tool/src/main/java/org/apache/kylin/tool/garbage/StorageCleaner.java b/src/common-service/src/main/java/org/apache/kylin/tool/garbage/StorageCleaner.java
similarity index 92%
rename from src/tool/src/main/java/org/apache/kylin/tool/garbage/StorageCleaner.java
rename to src/common-service/src/main/java/org/apache/kylin/tool/garbage/StorageCleaner.java
index c54726c4f9..ff8e833bcf 100644
--- a/src/tool/src/main/java/org/apache/kylin/tool/garbage/StorageCleaner.java
+++ b/src/common-service/src/main/java/org/apache/kylin/tool/garbage/StorageCleaner.java
@@ -30,7 +30,6 @@ import java.io.IOException;
 import java.util.Arrays;
 import java.util.Collection;
 import java.util.Collections;
-import java.util.HashMap;
 import java.util.HashSet;
 import java.util.List;
 import java.util.Locale;
@@ -82,6 +81,7 @@ import lombok.AllArgsConstructor;
 import lombok.Data;
 import lombok.EqualsAndHashCode;
 import lombok.Getter;
+import lombok.NoArgsConstructor;
 import lombok.NonNull;
 import lombok.RequiredArgsConstructor;
 import lombok.ToString;
@@ -97,17 +97,16 @@ public class StorageCleaner {
     public static final String ANSI_RESET = "\u001B[0m";
 
     private final boolean cleanup;
-    private boolean timeMachineEnabled;
+    private final boolean timeMachineEnabled;
     private final Collection<String> projectNames;
-    private long duration;
-    private KylinConfig kylinConfig;
+    private final KylinConfig kylinConfig;
 
     // for s3 https://olapio.atlassian.net/browse/AL-3154
-    private static RateLimiter rateLimiter = RateLimiter.create(Integer.MAX_VALUE);
+    private static final RateLimiter rateLimiter = RateLimiter.create(Integer.MAX_VALUE);
 
     @Getter
-    private Map<String, String> trashRecord;
-    private ResourceStore resourceStore;
+    private final Map<String, String> trashRecord;
+    private final ResourceStore resourceStore;
 
     public StorageCleaner() throws Exception {
         this(true);
@@ -154,10 +153,8 @@ public class StorageCleaner {
                 .collect(Collectors.toList());
 
         projects.stream().map(project -> NDataflowManager.getInstance(config, project.getName()).listAllDataflows())
-                .flatMap(Collection::stream)
-                .map(dataflow -> KapConfig.wrap(dataflow.getConfig()))
-                .map(KapConfig::getMetadataWorkingDirectory)
-                .forEach(hdfsWorkingDir -> {
+                .flatMap(Collection::stream).map(dataflow -> KapConfig.wrap(dataflow.getConfig()))
+                .map(KapConfig::getMetadataWorkingDirectory).forEach(hdfsWorkingDir -> {
                     val fs = HadoopUtil.getWorkingFileSystem();
                     allFileSystems.add(new StorageItem(FileSystemDecorator.getInstance(fs), hdfsWorkingDir));
                 });
@@ -167,8 +164,9 @@ public class StorageCleaner {
         // For build tasks it is a project-level parameter(Higher project-level priority), but for cleaning up storage garbage,
         // WRITING_CLUSTER_WORKING_DIR is a system-level parameter
         if (kylinConfig.isBuildFilesSeparationEnabled()) {
-            allFileSystems.add(new StorageItem(FileSystemDecorator.getInstance(HadoopUtil.getWritingClusterFileSystem()),
-                    config.getWritingClusterWorkingDir("")));
+            allFileSystems
+                    .add(new StorageItem(FileSystemDecorator.getInstance(HadoopUtil.getWritingClusterFileSystem()),
+                            config.getWritingClusterWorkingDir("")));
         }
         log.info("all file systems are {}", allFileSystems);
         for (StorageItem allFileSystem : allFileSystems) {
@@ -203,11 +201,10 @@ public class StorageCleaner {
             }
         }
         boolean allSuccess = cleanup();
-        duration = System.currentTimeMillis() - start;
-        printConsole(allSuccess);
+        printConsole(allSuccess, System.currentTimeMillis() - start);
     }
 
-    public void printConsole(boolean success) {
+    public void printConsole(boolean success, long duration) {
         System.out.println(ANSI_BLUE + "Kyligence Enterprise garbage report: (cleanup=" + cleanup + ")" + ANSI_RESET);
         for (StorageItem item : outdatedItems) {
             System.out.println("  Storage File: " + item.getPath());
@@ -227,7 +224,7 @@ public class StorageCleaner {
 
     }
 
-    public void collectDeletedProject() throws IOException {
+    public void collectDeletedProject() {
         val config = KylinConfig.getInstanceFromEnv();
         val projects = NProjectManager.getInstance(config).listAllProjects().stream().map(ProjectInstance::getName)
                 .collect(Collectors.toSet());
@@ -369,7 +366,7 @@ public class StorageCleaner {
                     .forEach(layout -> {
                         activeIndexDataPath.add(getDataLayoutDir(layout));
                         layout.getMultiPartition().forEach(partition -> //
-                                activeBucketDataPath.add(getDataPartitionDir(layout, partition)));
+                        activeBucketDataPath.add(getDataPartitionDir(layout, partition)));
                     }));
             activeIndexDataPath
                     .forEach(path -> activeFastBitmapIndexDataPath.add(path + HadoopUtil.FAST_BITMAP_SUFFIX));
@@ -513,8 +510,9 @@ public class StorageCleaner {
     }
 
     private void collectFromHDFS(StorageItem item) throws Exception {
-        val projectFolders = item.getFileSystemDecorator().listStatus(new Path(item.getPath()), path -> !path.getName().startsWith("_")
-                && (this.projectNames.isEmpty() || this.projectNames.contains(path.getName())));
+        val projectFolders = item.getFileSystemDecorator().listStatus(new Path(item.getPath()),
+                path -> !path.getName().startsWith("_")
+                        && (this.projectNames.isEmpty() || this.projectNames.contains(path.getName())));
         for (FileStatus projectFolder : projectFolders) {
             List<FileTreeNode> tableSnapshotParents = Lists.newArrayList();
             val projectNode = new ProjectFileTreeNode(projectFolder.getPath().getName());
@@ -528,7 +526,8 @@ public class StorageCleaner {
                 val treeNode = new FileTreeNode(pair.getFirst(), projectNode);
                 try {
                     log.debug("collect files from {}", pair.getFirst());
-                    Stream.of(item.getFileSystemDecorator().listStatus(new Path(item.getPath(), treeNode.getRelativePath())))
+                    Stream.of(item.getFileSystemDecorator()
+                            .listStatus(new Path(item.getPath(), treeNode.getRelativePath())))
                             .forEach(x -> pair.getSecond().add(new FileTreeNode(x.getPath().getName(), treeNode)));
                 } catch (FileNotFoundException e) {
                     log.info("folder {} not found", new Path(item.getPath(), treeNode.getRelativePath()));
@@ -545,42 +544,43 @@ public class StorageCleaner {
                 val slot = pair.getSecond();
                 for (FileTreeNode node : pair.getFirst()) {
                     log.debug("collect from {} -> {}", node.getName(), node);
-                    Stream.of(item.getFileSystemDecorator().listStatus(new Path(item.getPath(), node.getRelativePath())))
+                    Stream.of(
+                            item.getFileSystemDecorator().listStatus(new Path(item.getPath(), node.getRelativePath())))
                             .forEach(x -> slot.add(new FileTreeNode(x.getPath().getName(), node)));
                 }
             }
-            collectMultiPartitions(item, projectNode);
+            projectNode.getBuckets().addAll(collectMultiPartitions(item, projectNode.getName(), projectNode.getLayouts()));
         }
 
     }
 
-    private void collectMultiPartitions(StorageItem item, ProjectFileTreeNode projectNode) throws IOException {
-        String project = projectNode.getName();
+    private List<FileTreeNode> collectMultiPartitions(StorageItem item, String project, List<FileTreeNode> layouts)
+            throws IOException {
         NDataflowManager manager = NDataflowManager.getInstance(kylinConfig, project);
-        Map<String, Boolean> cached = new HashMap<>();
+        FileSystemDecorator fileSystemDecorator = item.getFileSystemDecorator();
+        String itemPath = item.getPath();
+        List<FileTreeNode> result = Lists.newArrayList();
+        HashSet<String> cached = Sets.newHashSet();
         // Buckets do not certainly exist.
         // Only multi level partition model should do this.
-        val buckets = projectNode.getBuckets();
-        for (FileTreeNode node : projectNode.getLayouts()) {
-            String dataflowId = node.getParent() // segment
-                    .getParent().getName(); // dataflow
-            if (!cached.containsKey(dataflowId)) {
-                NDataflow dataflow = manager.getDataflow(dataflowId);
-                if (Objects.nonNull(dataflow) //
-                        && Objects.nonNull(dataflow.getModel()) //
-                        && dataflow.getModel().isMultiPartitionModel()) {
-                    cached.put(dataflowId, true);
-                } else {
-                    cached.put(dataflowId, false);
-                }
+        for (FileTreeNode node : layouts) {
+            String dataflowId = node.getParent().getParent().getName(); // dataflow
+            if (cached.contains(dataflowId)) {
+                continue;
             }
-
-            if (Boolean.TRUE.equals(cached.get(dataflowId))) {
-                Stream.of(item.getFileSystemDecorator().listStatus(new Path(item.getPath(), node.getRelativePath())))
+            NDataflow dataflow = manager.getDataflow(dataflowId);
+            if (Objects.nonNull(dataflow) //
+                    && Objects.nonNull(dataflow.getModel()) //
+                    && dataflow.getModel().isMultiPartitionModel()) {
+                cached.add(dataflowId);
+                result.addAll(Stream.of(fileSystemDecorator.listStatus(new Path(itemPath, node.getRelativePath())))
                         .filter(FileStatus::isDirectory) // Essential check in case of bad design.
-                        .forEach(x -> buckets.add(new FileTreeNode(x.getPath().getName(), node)));
+                        .map(x -> new FileTreeNode(x.getPath().getName(), node)).collect(Collectors.toList()));
+            } else {
+                cached.add(dataflowId);
             }
         }
+        return result;
     }
 
     @AllArgsConstructor
@@ -610,7 +610,6 @@ public class StorageCleaner {
                     Thread.sleep(1000);
                 } catch (InterruptedException ie) {
                     log.error("Failed to sleep!", ie);
-                    ie.printStackTrace();
                     Thread.currentThread().interrupt();
                 }
             }
@@ -711,6 +710,7 @@ public class StorageCleaner {
     }
 
     @Data
+    @NoArgsConstructor
     @AllArgsConstructor
     @RequiredArgsConstructor
     public static class FileTreeNode {
diff --git a/src/tool/src/main/java/org/apache/kylin/tool/kerberos/DelegationTokenManager.java b/src/common-service/src/main/java/org/apache/kylin/tool/kerberos/DelegationTokenManager.java
similarity index 100%
rename from src/tool/src/main/java/org/apache/kylin/tool/kerberos/DelegationTokenManager.java
rename to src/common-service/src/main/java/org/apache/kylin/tool/kerberos/DelegationTokenManager.java
diff --git a/src/tool/src/main/java/org/apache/kylin/tool/kerberos/KerberosLoginUtil.java b/src/common-service/src/main/java/org/apache/kylin/tool/kerberos/KerberosLoginUtil.java
similarity index 71%
rename from src/tool/src/main/java/org/apache/kylin/tool/kerberos/KerberosLoginUtil.java
rename to src/common-service/src/main/java/org/apache/kylin/tool/kerberos/KerberosLoginUtil.java
index af8ab6d18f..4b8ef3e211 100644
--- a/src/tool/src/main/java/org/apache/kylin/tool/kerberos/KerberosLoginUtil.java
+++ b/src/common-service/src/main/java/org/apache/kylin/tool/kerberos/KerberosLoginUtil.java
@@ -19,11 +19,12 @@ package org.apache.kylin.tool.kerberos;
 
 import java.io.BufferedWriter;
 import java.io.File;
-import java.io.FileOutputStream;
 import java.io.IOException;
 import java.io.OutputStream;
 import java.io.OutputStreamWriter;
 import java.nio.charset.Charset;
+import java.nio.file.Files;
+import java.nio.file.Paths;
 import java.util.HashMap;
 import java.util.Map;
 
@@ -72,7 +73,7 @@ public class KerberosLoginUtil {
     private static final String LOGIN_FAILED_CAUSE_TIME_OUT = "(time out) can not connect to kdc server or there is fire wall in the network";
     private static final boolean IS_IBM_JDK = System.getProperty("java.vendor").contains("IBM");
 
-    public synchronized static void login(String userPrincipal, String userKeytabPath, String krb5ConfPath,
+    public static synchronized void login(String userPrincipal, String userKeytabPath, String krb5ConfPath,
             Configuration conf) throws IOException {
         // 1.check input parameters
         if ((userPrincipal == null) || (userPrincipal.length() <= 0)) {
@@ -97,23 +98,29 @@ public class KerberosLoginUtil {
 
         // 2.check file exsits
         File userKeytabFile = new File(userKeytabPath);
+        String userKeytabFilename = "userKeytabFile(" + userKeytabFile.getAbsolutePath() + ")";
         if (!userKeytabFile.exists()) {
-            LOG.error("userKeytabFile(" + userKeytabFile.getAbsolutePath() + ") does not exsit.");
-            throw new IOException("userKeytabFile(" + userKeytabFile.getAbsolutePath() + ") does not exsit.");
+            String message = userKeytabFilename + " does not exist.";
+            LOG.error(message);
+            throw new IOException(message);
         }
         if (!userKeytabFile.isFile()) {
-            LOG.error("userKeytabFile(" + userKeytabFile.getAbsolutePath() + ") is not a file.");
-            throw new IOException("userKeytabFile(" + userKeytabFile.getAbsolutePath() + ") is not a file.");
+            String message = userKeytabFilename + " is not a file.";
+            LOG.error(message);
+            throw new IOException(message);
         }
 
         File krb5ConfFile = new File(krb5ConfPath);
+        String krb5ConfFilename = "krb5ConfFile(" + krb5ConfFile.getAbsolutePath() + ")";
         if (!krb5ConfFile.exists()) {
-            LOG.error("krb5ConfFile(" + krb5ConfFile.getAbsolutePath() + ") does not exsit.");
-            throw new IOException("krb5ConfFile(" + krb5ConfFile.getAbsolutePath() + ") does not exsit.");
+            String message = krb5ConfFilename + " does not exist.";
+            LOG.error(message);
+            throw new IOException(message);
         }
         if (!krb5ConfFile.isFile()) {
-            LOG.error("krb5ConfFile(" + krb5ConfFile.getAbsolutePath() + ") is not a file.");
-            throw new IOException("krb5ConfFile(" + krb5ConfFile.getAbsolutePath() + ") is not a file.");
+            String message = krb5ConfFilename + " is not a file.";
+            LOG.error(message);
+            throw new IOException(message);
         }
 
         // 3.set and check krb5config
@@ -125,50 +132,10 @@ public class KerberosLoginUtil {
         LOG.info("Login fi success!!!!!!!!!!!!!!");
     }
 
-    private static void setConfiguration(Configuration conf) throws IOException {
+    private static void setConfiguration(Configuration conf) {
         UserGroupInformation.setConfiguration(conf);
     }
 
-    private static boolean checkNeedLogin(String principal) throws IOException {
-        if (!UserGroupInformation.isSecurityEnabled()) {
-            LOG.error(
-                    "UserGroupInformation is not SecurityEnabled, please check if core-site.xml exists in classpath.");
-            throw new IOException(
-                    "UserGroupInformation is not SecurityEnabled, please check if core-site.xml exists in classpath.");
-        }
-        UserGroupInformation currentUser = UserGroupInformation.getCurrentUser();
-        if ((currentUser != null) && (currentUser.hasKerberosCredentials())) {
-            if (checkCurrentUserCorrect(principal)) {
-                LOG.info("current user is " + currentUser + "has logined.");
-                if (!currentUser.isFromKeytab()) {
-                    LOG.error("current user is not from keytab.");
-                    throw new IOException("current user is not from keytab.");
-                }
-                return false;
-            } else {
-                LOG.error("current user is " + currentUser
-                        + "has logined. please check your enviroment , especially when it used IBM JDK or kerberos for OS count login!!");
-                throw new IOException(
-                        "current user is " + currentUser + " has logined. And please check your enviroment!!");
-            }
-        }
-
-        return true;
-    }
-
-    public static void setKrb5Config(String krb5ConfFile) throws IOException {
-        Unsafe.setProperty(JAVA_SECURITY_KRB5_CONF_KEY, krb5ConfFile);
-        String ret = System.getProperty(JAVA_SECURITY_KRB5_CONF_KEY);
-        if (ret == null) {
-            LOG.error(JAVA_SECURITY_KRB5_CONF_KEY + " is null.");
-            throw new IOException(JAVA_SECURITY_KRB5_CONF_KEY + " is null.");
-        }
-        if (!ret.equals(krb5ConfFile)) {
-            LOG.error(JAVA_SECURITY_KRB5_CONF_KEY + " is " + ret + " is not " + krb5ConfFile + ".");
-            throw new IOException(JAVA_SECURITY_KRB5_CONF_KEY + " is " + ret + " is not " + krb5ConfFile + ".");
-        }
-    }
-
     public static void setJaasFile(String principal, String keytabPath) throws IOException {
         String jaasPath = new File(System.getProperty("java.io.tmpdir")) + File.separator
                 + System.getProperty("user.name") + JAAS_POSTFIX;
@@ -183,8 +150,8 @@ public class KerberosLoginUtil {
     }
 
     private static void writeJaasFile(String jaasPath, String principal, String keytabPath) throws IOException {
-        try (OutputStream os = new FileOutputStream(jaasPath);
-                BufferedWriter writer = new BufferedWriter(
+        try (OutputStream os = Files.newOutputStream(Paths.get(jaasPath));
+             BufferedWriter writer = new BufferedWriter(
                         new OutputStreamWriter(os, Charset.defaultCharset().name()))) {
             writer.write(getJaasConfContext(principal, keytabPath));
             writer.flush();
@@ -196,9 +163,7 @@ public class KerberosLoginUtil {
     private static void deleteJaasFile(String jaasPath) throws IOException {
         File jaasFile = new File(jaasPath);
         if (jaasFile.exists()) {
-            if (!jaasFile.delete()) {
-                throw new IOException("Failed to delete exists jaas file.");
-            }
+            Files.delete(jaasFile.toPath());
         }
     }
 
@@ -311,30 +276,26 @@ public class KerberosLoginUtil {
     }
 
     public static void setZookeeperServerPrincipal(String zkServerPrincipal) throws IOException {
-        Unsafe.setProperty(ZOOKEEPER_SERVER_PRINCIPAL_KEY, zkServerPrincipal);
-        String ret = System.getProperty(ZOOKEEPER_SERVER_PRINCIPAL_KEY);
-        if (ret == null) {
-            LOG.error(ZOOKEEPER_SERVER_PRINCIPAL_KEY + " is null.");
-            throw new IOException(ZOOKEEPER_SERVER_PRINCIPAL_KEY + " is null.");
-        }
-        if (!ret.equals(zkServerPrincipal)) {
-            LOG.error(ZOOKEEPER_SERVER_PRINCIPAL_KEY + " is " + ret + " is not " + zkServerPrincipal + ".");
-            throw new IOException(ZOOKEEPER_SERVER_PRINCIPAL_KEY + " is " + ret + " is not " + zkServerPrincipal + ".");
-        }
+        setZookeeperServerPrincipal(ZOOKEEPER_SERVER_PRINCIPAL_KEY, zkServerPrincipal);
+    }
+    public static void setKrb5Config(String krb5ConfFile) throws IOException {
+        setZookeeperServerPrincipal(JAVA_SECURITY_KRB5_CONF_KEY, krb5ConfFile);
     }
 
-    @Deprecated
+
     public static void setZookeeperServerPrincipal(String zkServerPrincipalKey, String zkServerPrincipal)
             throws IOException {
         Unsafe.setProperty(zkServerPrincipalKey, zkServerPrincipal);
         String ret = System.getProperty(zkServerPrincipalKey);
         if (ret == null) {
-            LOG.error(zkServerPrincipalKey + " is null.");
-            throw new IOException(zkServerPrincipalKey + " is null.");
+            String message = zkServerPrincipalKey + " is null.";
+            LOG.error(message);
+            throw new IOException(message);
         }
         if (!ret.equals(zkServerPrincipal)) {
-            LOG.error(zkServerPrincipalKey + " is " + ret + " is not " + zkServerPrincipal + ".");
-            throw new IOException(zkServerPrincipalKey + " is " + ret + " is not " + zkServerPrincipal + ".");
+            String message = zkServerPrincipalKey + " is " + ret + " is not " + zkServerPrincipal + ".";
+            LOG.error(message);
+            throw new IOException(message);
         }
     }
 
@@ -353,55 +314,6 @@ public class KerberosLoginUtil {
         }
     }
 
-    private static void checkAuthenticateOverKrb() throws IOException {
-        UserGroupInformation loginUser = UserGroupInformation.getLoginUser();
-        UserGroupInformation currentUser = UserGroupInformation.getCurrentUser();
-        if (loginUser == null) {
-            LOG.error("current user is " + currentUser + ", but loginUser is null.");
-            throw new IOException("current user is " + currentUser + ", but loginUser is null.");
-        }
-        if (!loginUser.equals(currentUser)) {
-            LOG.error("current user is " + currentUser + ", but loginUser is " + loginUser + ".");
-            throw new IOException("current user is " + currentUser + ", but loginUser is " + loginUser + ".");
-        }
-        if (!loginUser.hasKerberosCredentials()) {
-            LOG.error("current user is " + currentUser + " has no Kerberos Credentials.");
-            throw new IOException("current user is " + currentUser + " has no Kerberos Credentials.");
-        }
-        if (!UserGroupInformation.isLoginKeytabBased()) {
-            LOG.error("current user is " + currentUser + " is not Login Keytab Based.");
-            throw new IOException("current user is " + currentUser + " is not Login Keytab Based.");
-        }
-    }
-
-    private static boolean checkCurrentUserCorrect(String principal) throws IOException {
-        UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
-        if (ugi == null) {
-            LOG.error("current user still null.");
-            throw new IOException("current user still null.");
-        }
-
-        String defaultRealm = null;
-        try {
-            defaultRealm = KerberosUtil.getDefaultRealm();
-        } catch (Exception e) {
-            LOG.warn("getDefaultRealm failed.");
-            throw new IOException(e);
-        }
-
-        if ((defaultRealm != null) && (defaultRealm.length() > 0)) {
-            StringBuilder realm = new StringBuilder();
-            StringBuilder principalWithRealm = new StringBuilder();
-            realm.append("@").append(defaultRealm);
-            if (!principal.endsWith(realm.toString())) {
-                principalWithRealm.append(principal).append(realm);
-                principal = principalWithRealm.toString();
-            }
-        }
-
-        return principal.equals(ugi.getUserName());
-    }
-
     public static boolean checkKeyTabIsValid(String path) {
         return KeyTab.getInstance(new File(path)).isValid();
     }
@@ -429,8 +341,8 @@ public class KerberosLoginUtil {
      * login.
      */
     private static class JaasConfiguration extends javax.security.auth.login.Configuration {
-        private static final Map<String, String> BASIC_JAAS_OPTIONS = new HashMap<String, String>();
-        private static final Map<String, String> KEYTAB_KERBEROS_OPTIONS = new HashMap<String, String>();
+        private static final Map<String, String> BASIC_JAAS_OPTIONS = new HashMap<>();
+        private static final Map<String, String> KEYTAB_KERBEROS_OPTIONS = new HashMap<>();
         private static final AppConfigurationEntry KEYTAB_KERBEROS_LOGIN = new AppConfigurationEntry(
                 KerberosUtil.getKrb5LoginModuleName(), AppConfigurationEntry.LoginModuleControlFlag.REQUIRED,
                 KEYTAB_KERBEROS_OPTIONS);
@@ -463,12 +375,12 @@ public class KerberosLoginUtil {
         private final String principal;
         private javax.security.auth.login.Configuration baseConfig;
 
-        public JaasConfiguration(String loginContextName, String principal, String keytabFile) throws IOException {
+        public JaasConfiguration(String loginContextName, String principal, String keytabFile) {
             this(loginContextName, principal, keytabFile, keytabFile == null || keytabFile.length() == 0);
         }
 
-        private JaasConfiguration(String loginContextName, String principal, String keytabFile, boolean useTicketCache)
-                throws IOException {
+        private JaasConfiguration(String loginContextName, String principal, String keytabFile,
+                boolean useTicketCache) {
             try {
                 this.baseConfig = javax.security.auth.login.Configuration.getConfiguration();
             } catch (SecurityException e) {
@@ -484,7 +396,7 @@ public class KerberosLoginUtil {
                     + " useTicketCache=" + useTicketCache + " keytabFile=" + keytabFile);
         }
 
-        private void initKerberosOption() throws IOException {
+        private void initKerberosOption() {
             if (!useTicketCache) {
                 if (IS_IBM_JDK) {
                     KEYTAB_KERBEROS_OPTIONS.put("useKeytab", keytabFile);
diff --git a/src/tool/src/main/java/org/apache/kylin/tool/util/LdapUtils.java b/src/common-service/src/main/java/org/apache/kylin/tool/util/LdapUtils.java
similarity index 100%
rename from src/tool/src/main/java/org/apache/kylin/tool/util/LdapUtils.java
rename to src/common-service/src/main/java/org/apache/kylin/tool/util/LdapUtils.java
diff --git a/src/tool/src/main/java/org/apache/kylin/tool/util/ProjectTemporaryTableCleanerHelper.java b/src/common-service/src/main/java/org/apache/kylin/tool/util/ProjectTemporaryTableCleanerHelper.java
similarity index 100%
rename from src/tool/src/main/java/org/apache/kylin/tool/util/ProjectTemporaryTableCleanerHelper.java
rename to src/common-service/src/main/java/org/apache/kylin/tool/util/ProjectTemporaryTableCleanerHelper.java
diff --git a/src/tool/src/main/java/org/apache/kylin/tool/util/ToolUtil.java b/src/common-service/src/main/java/org/apache/kylin/tool/util/ToolUtil.java
similarity index 90%
rename from src/tool/src/main/java/org/apache/kylin/tool/util/ToolUtil.java
rename to src/common-service/src/main/java/org/apache/kylin/tool/util/ToolUtil.java
index ab91162b3d..b4cccb2868 100644
--- a/src/tool/src/main/java/org/apache/kylin/tool/util/ToolUtil.java
+++ b/src/common-service/src/main/java/org/apache/kylin/tool/util/ToolUtil.java
@@ -23,6 +23,7 @@ import java.net.InetAddress;
 import java.net.InetSocketAddress;
 import java.net.Socket;
 import java.net.UnknownHostException;
+import java.nio.charset.Charset;
 import java.util.Locale;
 import java.util.concurrent.ExecutorService;
 import java.util.concurrent.Executors;
@@ -35,10 +36,10 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.persistence.ResourceStore;
+import org.apache.kylin.common.util.AddressUtil;
 import org.apache.kylin.common.util.CliCommandExecutor;
 import org.apache.kylin.common.util.HadoopUtil;
 import org.apache.kylin.common.util.ShellException;
-import org.apache.kylin.common.util.AddressUtil;
 import org.apache.kylin.query.util.ExtractFactory;
 import org.apache.spark.sql.SparderEnv;
 
@@ -56,19 +57,19 @@ public class ToolUtil {
     public static void dumpKylinJStack(File outputFile) throws IOException, ShellException {
         String jstackDumpCmd = String.format(Locale.ROOT, "jstack -l %s", getKylinPid());
         val result = new CliCommandExecutor().execute(jstackDumpCmd, null);
-        FileUtils.writeStringToFile(outputFile, result.getCmd());
+        FileUtils.writeStringToFile(outputFile, result.getCmd(), Charset.defaultCharset());
     }
 
     public static String getKylinPid() {
         File pidFile = new File(getKylinHome(), "pid");
         if (pidFile.exists()) {
             try {
-                return FileUtils.readFileToString(pidFile);
+                return FileUtils.readFileToString(pidFile, Charset.defaultCharset());
             } catch (IOException e) {
-                throw new RuntimeException("Error reading KYLIN PID file.", e);
+                throw new IllegalStateException("Error reading KYLIN PID file.", e);
             }
         }
-        throw new RuntimeException("Cannot find KYLIN PID file.");
+        throw new IllegalStateException("Cannot find KYLIN PID file.");
     }
 
     public static String getKylinHome() {
@@ -80,7 +81,7 @@ public class ToolUtil {
         if (StringUtils.isNotEmpty(path)) {
             return path;
         }
-        throw new RuntimeException("Cannot find KYLIN_HOME.");
+        throw new IllegalStateException("Cannot find KYLIN_HOME.");
     }
 
     public static String getBinFolder() {
@@ -157,12 +158,11 @@ public class ToolUtil {
     }
 
     public static boolean waitForSparderRollUp() {
-        boolean isRollUp = false;
         val extractor = ExtractFactory.create();
         String check = SparderEnv.rollUpEventLog();
         if (StringUtils.isBlank(check)) {
             log.info("Failed to roll up eventLog because the spader is closed.");
-            return isRollUp;
+            return false;
         }
         String logDir = extractor.getSparderEvenLogDir();
         ExecutorService es = Executors.newSingleThreadExecutor();
@@ -176,16 +176,19 @@ public class ToolUtil {
                     Thread.sleep(1000);
                 }
             });
-            if (task.get(10, TimeUnit.SECONDS)) {
+            if (Boolean.TRUE.equals(task.get(10, TimeUnit.SECONDS))) {
                 fs.delete(new Path(logDir, check), false);
-                isRollUp = true;
+                return true;
             }
+        } catch (InterruptedException e) {
+            log.warn("Sparder eventLog rollUp failed.", e);
+            Thread.currentThread().interrupt();
         } catch (Exception e) {
             log.warn("Sparder eventLog rollUp failed.", e);
         } finally {
             es.shutdown();
         }
-        return isRollUp;
+        return false;
     }
 
     public static boolean isPortAvailable(String ip, int port) {
diff --git a/src/common-service/src/test/java/org/apache/kylin/rest/service/LdapUserServiceTest.java b/src/common-service/src/test/java/org/apache/kylin/rest/service/LdapUserServiceTest.java
index f897e4735a..1b07ad2985 100644
--- a/src/common-service/src/test/java/org/apache/kylin/rest/service/LdapUserServiceTest.java
+++ b/src/common-service/src/test/java/org/apache/kylin/rest/service/LdapUserServiceTest.java
@@ -40,6 +40,7 @@ import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.util.EncryptUtil;
 import org.apache.kylin.common.util.NLocalFileMetadataTestCase;
 import org.apache.kylin.common.util.Pair;
+import org.apache.kylin.helper.UpdateUserAclToolHelper;
 import org.apache.kylin.metadata.usergroup.UserGroup;
 import org.apache.kylin.rest.response.UserGroupResponseKI;
 import org.apache.kylin.rest.security.AclPermission;
@@ -47,7 +48,6 @@ import org.apache.kylin.rest.security.UserAclManager;
 import org.apache.kylin.rest.util.AclEvaluate;
 import org.apache.kylin.rest.util.AclUtil;
 import org.apache.kylin.rest.util.SpringContext;
-import org.apache.kylin.tool.upgrade.UpdateUserAclTool;
 import org.junit.After;
 import org.junit.AfterClass;
 import org.junit.Assert;
@@ -255,6 +255,9 @@ public class LdapUserServiceTest extends NLocalFileMetadataTestCase {
     public void testCompleteUserInfoWithNotExistUser() {
         ManagedUser user = new ManagedUser("NotExist", "", false);
         ldapUserService.completeUserInfo(user);
+        Assert.assertEquals("NotExist", user.getUsername());
+        Assert.assertEquals("", user.getPassword());
+        Assert.assertFalse(user.isDefaultPassword());
     }
 
     @Test
@@ -395,11 +398,11 @@ public class LdapUserServiceTest extends NLocalFileMetadataTestCase {
 
     @Test
     public void testGetLdapAdminUsers() {
-        UpdateUserAclTool tool = Mockito.spy(new UpdateUserAclTool());
         val properties = getTestConfig().exportToProperties();
         val password = properties.getProperty("kylin.security.ldap.connection-password");
-        Mockito.when(tool.getPassword(properties)).thenReturn(EncryptUtil.decrypt(password));
-        Assert.assertNotNull(tool.getLdapAdminUsers());
+        UpdateUserAclToolHelper helper = Mockito.spy(UpdateUserAclToolHelper.getInstance());
+        Mockito.when(helper.getPassword(properties)).thenReturn(EncryptUtil.decrypt(password));
+        Assert.assertNotNull(helper.getLdapAdminUsers());
     }
 
     @Test
diff --git a/src/common-service/src/test/java/org/apache/kylin/rest/service/OpenUserServiceTest.java b/src/common-service/src/test/java/org/apache/kylin/rest/service/OpenUserServiceTest.java
index ef2311de30..85852ff3ab 100644
--- a/src/common-service/src/test/java/org/apache/kylin/rest/service/OpenUserServiceTest.java
+++ b/src/common-service/src/test/java/org/apache/kylin/rest/service/OpenUserServiceTest.java
@@ -28,13 +28,13 @@ import java.util.Properties;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.scheduler.EventBusFactory;
 import org.apache.kylin.common.util.NLocalFileMetadataTestCase;
+import org.apache.kylin.helper.UpdateUserAclToolHelper;
 import org.apache.kylin.rest.config.initialize.UserAclListener;
 import org.apache.kylin.rest.constant.Constant;
 import org.apache.kylin.rest.security.AclPermission;
 import org.apache.kylin.rest.security.AdminUserAspect;
 import org.apache.kylin.rest.security.UserAclManager;
 import org.apache.kylin.rest.util.SpringContext;
-import org.apache.kylin.tool.upgrade.UpdateUserAclTool;
 import org.junit.After;
 import org.junit.AfterClass;
 import org.junit.Assert;
@@ -143,9 +143,8 @@ public class OpenUserServiceTest extends NLocalFileMetadataTestCase {
         //test list admin
         getTestConfig().setProperty("kylin.security.profile", "custom");
         val adminUserAspect = SpringContext.getBean(AdminUserAspect.class);
-        ReflectionTestUtils.setField(adminUserAspect, "tool", Mockito.spy(new UpdateUserAclTool()));
-        val tool = (UpdateUserAclTool) ReflectionTestUtils.getField(adminUserAspect, "tool");
-        Mockito.when(tool.isUpgraded()).thenReturn(true);
+        UpdateUserAclToolHelper helper = Mockito.spy(UpdateUserAclToolHelper.getInstance());
+        Mockito.when(helper.isUpgraded()).thenReturn(true);
         adminUserAspect.doAfterListAdminUsers(Collections.emptyList());
         Assert.assertFalse((Boolean) ReflectionTestUtils.getField(adminUserAspect, "superAdminInitialized"));
         List<String> admins = userService.listAdminUsers();
diff --git a/src/tool/src/test/java/org/apache/kylin/tool/util/ProjectTemporaryTableCleanerHelperTest.java b/src/common-service/src/test/java/org/apache/kylin/tool/util/ProjectTemporaryTableCleanerHelperTest.java
similarity index 100%
rename from src/tool/src/test/java/org/apache/kylin/tool/util/ProjectTemporaryTableCleanerHelperTest.java
rename to src/common-service/src/test/java/org/apache/kylin/tool/util/ProjectTemporaryTableCleanerHelperTest.java
diff --git a/src/tool/src/test/java/org/apache/kylin/tool/util/ToolUtilTest.java b/src/common-service/src/test/java/org/apache/kylin/tool/util/ToolUtilTest.java
similarity index 100%
rename from src/tool/src/test/java/org/apache/kylin/tool/util/ToolUtilTest.java
rename to src/common-service/src/test/java/org/apache/kylin/tool/util/ToolUtilTest.java
diff --git a/src/core-common/src/main/java/org/apache/kylin/common/util/Pair.java b/src/core-common/src/main/java/org/apache/kylin/common/util/Pair.java
index 102a81d0cf..2fd62b6450 100644
--- a/src/core-common/src/main/java/org/apache/kylin/common/util/Pair.java
+++ b/src/core-common/src/main/java/org/apache/kylin/common/util/Pair.java
@@ -56,7 +56,7 @@ public class Pair<T1, T2> implements Serializable {
      * @return a new pair containing the passed arguments
      */
     public static <T1, T2> Pair<T1, T2> newPair(T1 a, T2 b) {
-        return new Pair<T1, T2>(a, b);
+        return new Pair<>(a, b);
     }
 
     private static boolean equals(Object x, Object y) {
diff --git a/src/tool/src/main/java/org/apache/kylin/tool/util/HashFunction.java b/src/core-common/src/main/java/org/apache/kylin/tool/util/HashFunction.java
similarity index 100%
rename from src/tool/src/main/java/org/apache/kylin/tool/util/HashFunction.java
rename to src/core-common/src/main/java/org/apache/kylin/tool/util/HashFunction.java
diff --git a/src/tool/src/test/java/org/apache/kylin/tool/util/HashFunctionTest.java b/src/core-common/src/test/java/org/apache/kylin/tool/util/HashFunctionTest.java
similarity index 100%
rename from src/tool/src/test/java/org/apache/kylin/tool/util/HashFunctionTest.java
rename to src/core-common/src/test/java/org/apache/kylin/tool/util/HashFunctionTest.java
diff --git a/src/core-job/src/main/java/org/apache/kylin/job/execution/NExecutableManager.java b/src/core-job/src/main/java/org/apache/kylin/job/execution/NExecutableManager.java
index 66582f5bb8..b22dee724f 100644
--- a/src/core-job/src/main/java/org/apache/kylin/job/execution/NExecutableManager.java
+++ b/src/core-job/src/main/java/org/apache/kylin/job/execution/NExecutableManager.java
@@ -245,8 +245,6 @@ public class NExecutableManager {
         }
     }
 
-    //for ut
-    @VisibleForTesting
     public void deleteJob(String jobId) {
         checkJobCanBeDeleted(jobId);
         executableDao.deleteJob(jobId);
diff --git a/src/core-metadata/pom.xml b/src/core-metadata/pom.xml
index 74c4765391..d3c0bf0c87 100644
--- a/src/core-metadata/pom.xml
+++ b/src/core-metadata/pom.xml
@@ -40,6 +40,10 @@
             <groupId>org.apache.kylin</groupId>
             <artifactId>kylin-core-common</artifactId>
         </dependency>
+        <dependency>
+            <groupId>com.google.guava</groupId>
+            <artifactId>guava</artifactId>
+        </dependency>
         <dependency>
             <groupId>com.tdunning</groupId>
             <artifactId>t-digest</artifactId>
diff --git a/src/core-metadata/src/test/java/org/apache/kylin/metrics/HdfsCapacityMetricsTest.java b/src/core-metadata/src/test/java/org/apache/kylin/metrics/HdfsCapacityMetricsTest.java
index 689fbdc831..ce18241853 100644
--- a/src/core-metadata/src/test/java/org/apache/kylin/metrics/HdfsCapacityMetricsTest.java
+++ b/src/core-metadata/src/test/java/org/apache/kylin/metrics/HdfsCapacityMetricsTest.java
@@ -88,7 +88,10 @@ public class HdfsCapacityMetricsTest extends NLocalFileMetadataTestCase {
             fs.mkdirs(projectPath);
             fs.createNewFile(projectPath);
         }
+        Assert.assertTrue(hdfsCapacityMetrics.getWorkingDirCapacity().isEmpty());
         hdfsCapacityMetrics.writeHdfsMetrics();
+        Assert.assertEquals(28, hdfsCapacityMetrics.getWorkingDirCapacity().size());
+
     }
 
     @Test
diff --git a/src/job-service/src/main/java/org/apache/kylin/rest/service/ScheduleService.java b/src/job-service/src/main/java/org/apache/kylin/rest/service/ScheduleService.java
index 05b744c6fe..d78e05d64a 100644
--- a/src/job-service/src/main/java/org/apache/kylin/rest/service/ScheduleService.java
+++ b/src/job-service/src/main/java/org/apache/kylin/rest/service/ScheduleService.java
@@ -17,6 +17,7 @@
  */
 package org.apache.kylin.rest.service;
 
+import java.util.Collections;
 import java.util.concurrent.ExecutionException;
 import java.util.concurrent.ExecutorService;
 import java.util.concurrent.Executors;
@@ -30,8 +31,8 @@ import org.apache.kylin.common.metrics.MetricsGroup;
 import org.apache.kylin.common.metrics.MetricsName;
 import org.apache.kylin.common.util.NamedThreadFactory;
 import org.apache.kylin.common.util.SetThreadName;
-import org.apache.kylin.tool.routine.FastRoutineTool;
-import org.apache.kylin.tool.routine.RoutineTool;
+import org.apache.kylin.helper.MetadataToolHelper;
+import org.apache.kylin.helper.RoutineToolHelper;
 import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.scheduling.annotation.Scheduled;
 import org.springframework.stereotype.Service;
@@ -61,6 +62,7 @@ public class ScheduleService {
     private long opsCronTimeout;
 
     private static final ThreadLocal<Future<?>> CURRENT_FUTURE = new ThreadLocal<>();
+    private MetadataToolHelper metadataToolHelper = new MetadataToolHelper();
 
     @Scheduled(cron = "${kylin.metadata.ops-cron:0 0 0 * * *}")
     public void routineTask() {
@@ -74,15 +76,16 @@ public class ScheduleService {
             try (SetThreadName ignored = new SetThreadName("RoutineOpsWorker")) {
                 if (epochManager.checkEpochOwner(EpochManager.GLOBAL)) {
                     executeTask(() -> backupService.backupAll(), "MetadataBackup", startTime);
-                    executeTask(RoutineTool::cleanQueryHistories, "QueryHistoriesCleanup", startTime);
-                    executeTask(RoutineTool::cleanStreamingStats, "StreamingStatsCleanup", startTime);
-                    executeTask(RoutineTool::deleteRawRecItems, "RawRecItemsDeletion", startTime);
-                    executeTask(RoutineTool::cleanGlobalSourceUsage, "SourceUsageCleanup", startTime);
+                    executeTask(RoutineToolHelper::cleanQueryHistories, "QueryHistoriesCleanup", startTime);
+                    executeTask(RoutineToolHelper::cleanStreamingStats, "StreamingStatsCleanup", startTime);
+                    executeTask(RoutineToolHelper::deleteRawRecItems, "RawRecItemsDeletion", startTime);
+                    executeTask(RoutineToolHelper::cleanGlobalSourceUsage, "SourceUsageCleanup", startTime);
                     executeTask(() -> projectService.cleanupAcl(), "AclCleanup", startTime);
                 }
                 executeTask(() -> projectService.garbageCleanup(getRemainingTime(startTime)), "ProjectGarbageCleanup",
                         startTime);
-                executeTask(() -> newFastRoutineTool().execute(new String[] { "-c" }), "HdfsCleanup", startTime);
+                executeTask(() -> metadataToolHelper.cleanStorage(true, Collections.emptyList(), 0, 0), "HdfsCleanup",
+                        startTime);
                 log.info("Finish to work, cost {}ms", System.currentTimeMillis() - startTime);
             }
         } catch (InterruptedException e) {
@@ -114,7 +117,4 @@ public class ScheduleService {
         return opsCronTimeout - (System.currentTimeMillis() - startTime);
     }
 
-    public FastRoutineTool newFastRoutineTool() {
-        return new FastRoutineTool();
-    }
 }
diff --git a/src/kylin-it/pom.xml b/src/kylin-it/pom.xml
index ebeb329508..091bdaaea6 100644
--- a/src/kylin-it/pom.xml
+++ b/src/kylin-it/pom.xml
@@ -102,6 +102,10 @@
             <groupId>org.apache.kylin</groupId>
             <artifactId>kylin-streaming</artifactId>
         </dependency>
+        <dependency>
+            <groupId>org.apache.kylin</groupId>
+            <artifactId>kylin-tool</artifactId>
+        </dependency>
         <dependency>
             <groupId>org.apache.calcite</groupId>
             <artifactId>calcite-linq4j</artifactId>
diff --git a/src/metadata-server/src/main/java/org/apache/kylin/rest/controller/NMetaStoreController.java b/src/metadata-server/src/main/java/org/apache/kylin/rest/controller/NMetaStoreController.java
index 84649c4bcb..8fad872876 100644
--- a/src/metadata-server/src/main/java/org/apache/kylin/rest/controller/NMetaStoreController.java
+++ b/src/metadata-server/src/main/java/org/apache/kylin/rest/controller/NMetaStoreController.java
@@ -161,8 +161,7 @@ public class NMetaStoreController extends NBasicController {
     @ApiOperation(value = "cleanupStorage", tags = { "SM" })
     @PostMapping(value = "/cleanup_storage")
     @ResponseBody
-    public EnvelopeResponse<String> cleanupStorage(@RequestBody StorageCleanupRequest request) throws Exception {
-
+    public EnvelopeResponse<String> cleanupStorage(@RequestBody StorageCleanupRequest request) {
         metaStoreService.cleanupStorage(request.getProjectsToClean(), request.isCleanupStorage());
         return new EnvelopeResponse<>(KylinException.CODE_SUCCESS, "", "");
     }
diff --git a/src/modeling-service/src/main/java/org/apache/kylin/rest/service/MetaStoreService.java b/src/modeling-service/src/main/java/org/apache/kylin/rest/service/MetaStoreService.java
index 32d3f93901..3680b15cc4 100644
--- a/src/modeling-service/src/main/java/org/apache/kylin/rest/service/MetaStoreService.java
+++ b/src/modeling-service/src/main/java/org/apache/kylin/rest/service/MetaStoreService.java
@@ -37,6 +37,7 @@ import java.io.IOException;
 import java.io.InputStream;
 import java.nio.charset.Charset;
 import java.util.ArrayList;
+import java.util.Arrays;
 import java.util.Collection;
 import java.util.Collections;
 import java.util.Comparator;
@@ -72,6 +73,8 @@ import org.apache.kylin.common.util.JsonUtil;
 import org.apache.kylin.common.util.MetadataChecker;
 import org.apache.kylin.common.util.Pair;
 import org.apache.kylin.common.util.RandomUtil;
+import org.apache.kylin.helper.MetadataToolHelper;
+import org.apache.kylin.helper.RoutineToolHelper;
 import org.apache.kylin.metadata.cube.model.IndexEntity;
 import org.apache.kylin.metadata.cube.model.IndexPlan;
 import org.apache.kylin.metadata.cube.model.NDataflowManager;
@@ -106,7 +109,6 @@ import org.apache.kylin.rest.util.AclEvaluate;
 import org.apache.kylin.rest.util.AclPermissionUtil;
 import org.apache.kylin.source.ISourceMetadataExplorer;
 import org.apache.kylin.source.SourceFactory;
-import org.apache.kylin.tool.routine.RoutineTool;
 import org.apache.kylin.tool.util.HashFunction;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -152,6 +154,8 @@ public class MetaStoreService extends BasicService {
     @Autowired(required = false)
     private List<ModelChangeSupporter> modelChangeSupporters = Lists.newArrayList();
 
+    MetadataToolHelper metadataToolHelper = new MetadataToolHelper();
+
     public List<ModelPreviewResponse> getPreviewModels(String project, List<String> ids) {
         aclEvaluate.checkProjectWritePermission(project);
         return modelService.getManager(NDataflowManager.class, project).listAllDataflows(true).stream()
@@ -706,8 +710,7 @@ public class MetaStoreService extends BasicService {
                     updateIndexPlan(project, nDataModel, targetIndexPlan, hasModelOverrideProps);
                     addWhiteListIndex(project, modelSchemaChange, targetIndexPlan);
 
-                    importRecommendations(project, nDataModel.getUuid(), importDataModel.getUuid(),
-                            targetKylinConfig);
+                    importRecommendations(project, nDataModel.getUuid(), importDataModel.getUuid(), targetKylinConfig);
                 }
             } catch (Exception e) {
                 logger.warn("Import model {} exception", modelImport.getOriginalName(), e);
@@ -787,17 +790,14 @@ public class MetaStoreService extends BasicService {
 
     public void cleanupMeta(String project) {
         if (project.equals(UnitOfWork.GLOBAL_UNIT)) {
-            RoutineTool.cleanGlobalSourceUsage();
+            RoutineToolHelper.cleanGlobalSourceUsage();
             QueryHisStoreUtil.cleanQueryHistory();
         } else {
-            RoutineTool.cleanMetaByProject(project);
+            RoutineToolHelper.cleanMetaByProject(project);
         }
     }
 
     public void cleanupStorage(String[] projectsToClean, boolean cleanupStorage) {
-        RoutineTool routineTool = new RoutineTool();
-        routineTool.setProjects(projectsToClean);
-        routineTool.setStorageCleanup(cleanupStorage);
-        routineTool.cleanStorage();
+        metadataToolHelper.cleanStorage(cleanupStorage, Arrays.asList(projectsToClean), 0D, 0);
     }
 }
diff --git a/src/query-common/src/main/java/org/apache/kylin/tool/bisync/BISyncModel.java b/src/modeling-service/src/main/java/org/apache/kylin/tool/bisync/BISyncModel.java
similarity index 100%
rename from src/query-common/src/main/java/org/apache/kylin/tool/bisync/BISyncModel.java
rename to src/modeling-service/src/main/java/org/apache/kylin/tool/bisync/BISyncModel.java
diff --git a/src/query-common/src/main/java/org/apache/kylin/tool/bisync/BISyncModelConverter.java b/src/modeling-service/src/main/java/org/apache/kylin/tool/bisync/BISyncModelConverter.java
similarity index 100%
rename from src/query-common/src/main/java/org/apache/kylin/tool/bisync/BISyncModelConverter.java
rename to src/modeling-service/src/main/java/org/apache/kylin/tool/bisync/BISyncModelConverter.java
diff --git a/src/query-common/src/main/java/org/apache/kylin/tool/bisync/BISyncTool.java b/src/modeling-service/src/main/java/org/apache/kylin/tool/bisync/BISyncTool.java
similarity index 100%
rename from src/query-common/src/main/java/org/apache/kylin/tool/bisync/BISyncTool.java
rename to src/modeling-service/src/main/java/org/apache/kylin/tool/bisync/BISyncTool.java
diff --git a/src/query-common/src/main/java/org/apache/kylin/tool/bisync/SyncContext.java b/src/modeling-service/src/main/java/org/apache/kylin/tool/bisync/SyncContext.java
similarity index 100%
rename from src/query-common/src/main/java/org/apache/kylin/tool/bisync/SyncContext.java
rename to src/modeling-service/src/main/java/org/apache/kylin/tool/bisync/SyncContext.java
diff --git a/src/query-common/src/main/java/org/apache/kylin/tool/bisync/SyncModelBuilder.java b/src/modeling-service/src/main/java/org/apache/kylin/tool/bisync/SyncModelBuilder.java
similarity index 100%
rename from src/query-common/src/main/java/org/apache/kylin/tool/bisync/SyncModelBuilder.java
rename to src/modeling-service/src/main/java/org/apache/kylin/tool/bisync/SyncModelBuilder.java
diff --git a/src/query-common/src/main/java/org/apache/kylin/tool/bisync/model/ColumnDef.java b/src/modeling-service/src/main/java/org/apache/kylin/tool/bisync/model/ColumnDef.java
similarity index 100%
rename from src/query-common/src/main/java/org/apache/kylin/tool/bisync/model/ColumnDef.java
rename to src/modeling-service/src/main/java/org/apache/kylin/tool/bisync/model/ColumnDef.java
diff --git a/src/query-common/src/main/java/org/apache/kylin/tool/bisync/model/JoinTreeNode.java b/src/modeling-service/src/main/java/org/apache/kylin/tool/bisync/model/JoinTreeNode.java
similarity index 96%
rename from src/query-common/src/main/java/org/apache/kylin/tool/bisync/model/JoinTreeNode.java
rename to src/modeling-service/src/main/java/org/apache/kylin/tool/bisync/model/JoinTreeNode.java
index 0613b23d81..1547608907 100644
--- a/src/query-common/src/main/java/org/apache/kylin/tool/bisync/model/JoinTreeNode.java
+++ b/src/modeling-service/src/main/java/org/apache/kylin/tool/bisync/model/JoinTreeNode.java
@@ -17,6 +17,7 @@
  */
 package org.apache.kylin.tool.bisync.model;
 
+import java.util.Collections;
 import java.util.Deque;
 import java.util.LinkedList;
 import java.util.List;
@@ -50,7 +51,7 @@ public class JoinTreeNode {
      */
     public List<JoinTableDesc> iteratorAsList() {
         if (this.value == null) {
-            return null;
+            return Collections.emptyList();
         }
 
         Deque<JoinTreeNode> nodeDeque = new LinkedList<>();
diff --git a/src/query-common/src/main/java/org/apache/kylin/tool/bisync/model/MeasureDef.java b/src/modeling-service/src/main/java/org/apache/kylin/tool/bisync/model/MeasureDef.java
similarity index 100%
rename from src/query-common/src/main/java/org/apache/kylin/tool/bisync/model/MeasureDef.java
rename to src/modeling-service/src/main/java/org/apache/kylin/tool/bisync/model/MeasureDef.java
diff --git a/src/query-common/src/main/java/org/apache/kylin/tool/bisync/model/SyncModel.java b/src/modeling-service/src/main/java/org/apache/kylin/tool/bisync/model/SyncModel.java
similarity index 100%
rename from src/query-common/src/main/java/org/apache/kylin/tool/bisync/model/SyncModel.java
rename to src/modeling-service/src/main/java/org/apache/kylin/tool/bisync/model/SyncModel.java
diff --git a/src/query-common/src/main/java/org/apache/kylin/tool/bisync/tableau/TableauDataSourceConverter.java b/src/modeling-service/src/main/java/org/apache/kylin/tool/bisync/tableau/TableauDataSourceConverter.java
similarity index 99%
rename from src/query-common/src/main/java/org/apache/kylin/tool/bisync/tableau/TableauDataSourceConverter.java
rename to src/modeling-service/src/main/java/org/apache/kylin/tool/bisync/tableau/TableauDataSourceConverter.java
index 6540df9238..b1860cc4f9 100644
--- a/src/query-common/src/main/java/org/apache/kylin/tool/bisync/tableau/TableauDataSourceConverter.java
+++ b/src/modeling-service/src/main/java/org/apache/kylin/tool/bisync/tableau/TableauDataSourceConverter.java
@@ -61,7 +61,7 @@ import org.slf4j.LoggerFactory;
 
 import com.fasterxml.jackson.dataformat.xml.XmlMapper;
 
-public class TableauDataSourceConverter implements BISyncModelConverter {
+public class TableauDataSourceConverter implements BISyncModelConverter<TableauDatasourceModel> {
 
     private static final String ODBC_CONNECTION_PROJECT_PREFIX = "PROJECT=";
     private static final String ODBC_CONNECTION_MODEL_PREFIX = "CUBE=";
diff --git a/src/query-common/src/main/java/org/apache/kylin/tool/bisync/tableau/TableauDatasourceModel.java b/src/modeling-service/src/main/java/org/apache/kylin/tool/bisync/tableau/TableauDatasourceModel.java
similarity index 95%
rename from src/query-common/src/main/java/org/apache/kylin/tool/bisync/tableau/TableauDatasourceModel.java
rename to src/modeling-service/src/main/java/org/apache/kylin/tool/bisync/tableau/TableauDatasourceModel.java
index dbec106791..f0f22ba0fc 100644
--- a/src/query-common/src/main/java/org/apache/kylin/tool/bisync/tableau/TableauDatasourceModel.java
+++ b/src/modeling-service/src/main/java/org/apache/kylin/tool/bisync/tableau/TableauDatasourceModel.java
@@ -42,7 +42,7 @@ public class TableauDatasourceModel implements BISyncModel {
         this.tableauDatasource = tableauDatasource;
     }
 
-    public static void dumpModelAsXML(TableauDatasource BISyncModel, OutputStream outputStream)
+    public static void dumpModelAsXML(TableauDatasource biSyncModel, OutputStream outputStream)
             throws XMLStreamException, IOException {
         XmlMapper xmlMapper = new XmlMapper();
         XMLStreamWriter writer = xmlMapper.getFactory().getXMLOutputFactory().createXMLStreamWriter(outputStream);
@@ -50,7 +50,7 @@ public class TableauDatasourceModel implements BISyncModel {
         xmlMapper.enable(SerializationFeature.INDENT_OUTPUT);
         xmlMapper.setSerializationInclusion(JsonInclude.Include.NON_NULL);
         xmlMapper.getFactory().getXMLOutputFactory().setProperty("javax.xml.stream.isRepairingNamespaces", false);
-        xmlMapper.writeValue(writer, BISyncModel);
+        xmlMapper.writeValue(writer, biSyncModel);
     }
 
     @Override
diff --git a/src/query-common/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/Aliases.java b/src/modeling-service/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/Aliases.java
similarity index 100%
rename from src/query-common/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/Aliases.java
rename to src/modeling-service/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/Aliases.java
diff --git a/src/query-common/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/DrillPath.java b/src/modeling-service/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/DrillPath.java
similarity index 80%
rename from src/query-common/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/DrillPath.java
rename to src/modeling-service/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/DrillPath.java
index c4838da6bd..b18deb6c10 100644
--- a/src/query-common/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/DrillPath.java
+++ b/src/modeling-service/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/DrillPath.java
@@ -66,17 +66,4 @@ public class DrillPath {
         return Objects.hash(getName(), getFields());
     }
 
-    private boolean fieldsEquals(List<String> thatFields) {
-        if (getFields() == thatFields) {
-            return true;
-        }
-        if (getFields() != null && thatFields != null && getFields().size() == thatFields.size()) {
-            boolean flag = true;
-            for (int i = 0; i < getFields().size() && flag; i++) {
-                flag = Objects.equals(getFields().get(i), thatFields.get(i));
-            }
-            return flag;
-        }
-        return false;
-    }
 }
diff --git a/src/query-common/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/DrillPaths.java b/src/modeling-service/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/DrillPaths.java
similarity index 100%
rename from src/query-common/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/DrillPaths.java
rename to src/modeling-service/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/DrillPaths.java
diff --git a/src/query-common/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/Layout.java b/src/modeling-service/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/Layout.java
similarity index 100%
rename from src/query-common/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/Layout.java
rename to src/modeling-service/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/Layout.java
diff --git a/src/query-common/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/SemanticValue.java b/src/modeling-service/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/SemanticValue.java
similarity index 100%
rename from src/query-common/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/SemanticValue.java
rename to src/modeling-service/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/SemanticValue.java
diff --git a/src/query-common/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/SemanticValueList.java b/src/modeling-service/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/SemanticValueList.java
similarity index 100%
rename from src/query-common/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/SemanticValueList.java
rename to src/modeling-service/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/SemanticValueList.java
diff --git a/src/query-common/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/TableauConnection.java b/src/modeling-service/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/TableauConnection.java
similarity index 100%
rename from src/query-common/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/TableauConnection.java
rename to src/modeling-service/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/TableauConnection.java
diff --git a/src/query-common/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/TableauDatasource.java b/src/modeling-service/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/TableauDatasource.java
similarity index 100%
rename from src/query-common/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/TableauDatasource.java
rename to src/modeling-service/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/TableauDatasource.java
diff --git a/src/query-common/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/column/Calculation.java b/src/modeling-service/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/column/Calculation.java
similarity index 100%
rename from src/query-common/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/column/Calculation.java
rename to src/modeling-service/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/column/Calculation.java
diff --git a/src/query-common/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/column/Column.java b/src/modeling-service/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/column/Column.java
similarity index 100%
rename from src/query-common/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/column/Column.java
rename to src/modeling-service/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/column/Column.java
diff --git a/src/query-common/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/connection/Col.java b/src/modeling-service/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/connection/Col.java
similarity index 100%
rename from src/query-common/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/connection/Col.java
rename to src/modeling-service/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/connection/Col.java
diff --git a/src/query-common/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/connection/Cols.java b/src/modeling-service/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/connection/Cols.java
similarity index 100%
rename from src/query-common/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/connection/Cols.java
rename to src/modeling-service/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/connection/Cols.java
diff --git a/src/query-common/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/connection/Connection.java b/src/modeling-service/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/connection/Connection.java
similarity index 100%
rename from src/query-common/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/connection/Connection.java
rename to src/modeling-service/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/connection/Connection.java
diff --git a/src/query-common/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/connection/ConnectionCustomization.java b/src/modeling-service/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/connection/ConnectionCustomization.java
similarity index 100%
rename from src/query-common/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/connection/ConnectionCustomization.java
rename to src/modeling-service/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/connection/ConnectionCustomization.java
diff --git a/src/query-common/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/connection/NamedConnection.java b/src/modeling-service/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/connection/NamedConnection.java
similarity index 100%
rename from src/query-common/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/connection/NamedConnection.java
rename to src/modeling-service/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/connection/NamedConnection.java
diff --git a/src/query-common/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/connection/NamedConnectionList.java b/src/modeling-service/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/connection/NamedConnectionList.java
similarity index 100%
rename from src/query-common/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/connection/NamedConnectionList.java
rename to src/modeling-service/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/connection/NamedConnectionList.java
diff --git a/src/query-common/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/connection/customization/Customization.java b/src/modeling-service/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/connection/customization/Customization.java
similarity index 100%
rename from src/query-common/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/connection/customization/Customization.java
rename to src/modeling-service/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/connection/customization/Customization.java
diff --git a/src/query-common/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/connection/customization/CustomizationList.java b/src/modeling-service/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/connection/customization/CustomizationList.java
similarity index 100%
rename from src/query-common/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/connection/customization/CustomizationList.java
rename to src/modeling-service/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/connection/customization/CustomizationList.java
diff --git a/src/query-common/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/connection/customization/Driver.java b/src/modeling-service/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/connection/customization/Driver.java
similarity index 100%
rename from src/query-common/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/connection/customization/Driver.java
rename to src/modeling-service/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/connection/customization/Driver.java
diff --git a/src/query-common/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/connection/customization/Vendor.java b/src/modeling-service/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/connection/customization/Vendor.java
similarity index 100%
rename from src/query-common/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/connection/customization/Vendor.java
rename to src/modeling-service/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/connection/customization/Vendor.java
diff --git a/src/query-common/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/connection/metadata/Attribute.java b/src/modeling-service/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/connection/metadata/Attribute.java
similarity index 100%
rename from src/query-common/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/connection/metadata/Attribute.java
rename to src/modeling-service/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/connection/metadata/Attribute.java
diff --git a/src/query-common/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/connection/metadata/AttributeList.java b/src/modeling-service/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/connection/metadata/AttributeList.java
similarity index 100%
rename from src/query-common/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/connection/metadata/AttributeList.java
rename to src/modeling-service/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/connection/metadata/AttributeList.java
diff --git a/src/query-common/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/connection/metadata/Collation.java b/src/modeling-service/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/connection/metadata/Collation.java
similarity index 100%
rename from src/query-common/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/connection/metadata/Collation.java
rename to src/modeling-service/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/connection/metadata/Collation.java
diff --git a/src/query-common/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/connection/metadata/MetadataRecord.java b/src/modeling-service/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/connection/metadata/MetadataRecord.java
similarity index 100%
rename from src/query-common/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/connection/metadata/MetadataRecord.java
rename to src/modeling-service/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/connection/metadata/MetadataRecord.java
diff --git a/src/query-common/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/connection/metadata/MetadataRecordList.java b/src/modeling-service/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/connection/metadata/MetadataRecordList.java
similarity index 100%
rename from src/query-common/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/connection/metadata/MetadataRecordList.java
rename to src/modeling-service/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/connection/metadata/MetadataRecordList.java
diff --git a/src/query-common/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/connection/relation/Clause.java b/src/modeling-service/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/connection/relation/Clause.java
similarity index 100%
rename from src/query-common/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/connection/relation/Clause.java
rename to src/modeling-service/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/connection/relation/Clause.java
diff --git a/src/query-common/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/connection/relation/Expression.java b/src/modeling-service/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/connection/relation/Expression.java
similarity index 100%
rename from src/query-common/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/connection/relation/Expression.java
rename to src/modeling-service/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/connection/relation/Expression.java
diff --git a/src/query-common/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/connection/relation/Relation.java b/src/modeling-service/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/connection/relation/Relation.java
similarity index 100%
rename from src/query-common/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/connection/relation/Relation.java
rename to src/modeling-service/src/main/java/org/apache/kylin/tool/bisync/tableau/datasource/connection/relation/Relation.java
diff --git a/src/query-common/src/main/java/org/apache/kylin/tool/bisync/tableau/mapping/FunctionMapping.java b/src/modeling-service/src/main/java/org/apache/kylin/tool/bisync/tableau/mapping/FunctionMapping.java
similarity index 100%
rename from src/query-common/src/main/java/org/apache/kylin/tool/bisync/tableau/mapping/FunctionMapping.java
rename to src/modeling-service/src/main/java/org/apache/kylin/tool/bisync/tableau/mapping/FunctionMapping.java
diff --git a/src/query-common/src/main/java/org/apache/kylin/tool/bisync/tableau/mapping/Mappings.java b/src/modeling-service/src/main/java/org/apache/kylin/tool/bisync/tableau/mapping/Mappings.java
similarity index 100%
rename from src/query-common/src/main/java/org/apache/kylin/tool/bisync/tableau/mapping/Mappings.java
rename to src/modeling-service/src/main/java/org/apache/kylin/tool/bisync/tableau/mapping/Mappings.java
diff --git a/src/query-common/src/main/java/org/apache/kylin/tool/bisync/tableau/mapping/TypeMapping.java b/src/modeling-service/src/main/java/org/apache/kylin/tool/bisync/tableau/mapping/TypeMapping.java
similarity index 100%
rename from src/query-common/src/main/java/org/apache/kylin/tool/bisync/tableau/mapping/TypeMapping.java
rename to src/modeling-service/src/main/java/org/apache/kylin/tool/bisync/tableau/mapping/TypeMapping.java
diff --git a/src/tool/src/main/resources/bisync/tds/tableau.connector.template.xml b/src/modeling-service/src/main/resources/bisync/tds/tableau.connector.template.xml
similarity index 100%
rename from src/tool/src/main/resources/bisync/tds/tableau.connector.template.xml
rename to src/modeling-service/src/main/resources/bisync/tds/tableau.connector.template.xml
diff --git a/src/tool/src/main/resources/bisync/tds/tableau.mappings.xml b/src/modeling-service/src/main/resources/bisync/tds/tableau.mappings.xml
similarity index 100%
rename from src/tool/src/main/resources/bisync/tds/tableau.mappings.xml
rename to src/modeling-service/src/main/resources/bisync/tds/tableau.mappings.xml
diff --git a/src/tool/src/main/resources/bisync/tds/tableau.template.xml b/src/modeling-service/src/main/resources/bisync/tds/tableau.template.xml
similarity index 100%
rename from src/tool/src/main/resources/bisync/tds/tableau.template.xml
rename to src/modeling-service/src/main/resources/bisync/tds/tableau.template.xml
diff --git a/src/modeling-service/src/test/java/org/apache/kylin/rest/service/ModelTdsServiceTest.java b/src/modeling-service/src/test/java/org/apache/kylin/rest/service/ModelTdsServiceTest.java
index f4194a0be4..f1db49147f 100644
--- a/src/modeling-service/src/test/java/org/apache/kylin/rest/service/ModelTdsServiceTest.java
+++ b/src/modeling-service/src/test/java/org/apache/kylin/rest/service/ModelTdsServiceTest.java
@@ -27,6 +27,7 @@ import java.util.Arrays;
 import java.util.Collections;
 import java.util.HashSet;
 import java.util.List;
+import java.util.Objects;
 import java.util.Set;
 import java.util.stream.Collectors;
 
@@ -449,7 +450,7 @@ public class ModelTdsServiceTest extends SourceTestCase {
         val modelId = "cb596712-3a09-46f8-aea1-988b43fe9b6c";
         prepareBasic(project);
         SyncContext syncContext = tdsService.prepareSyncContext(project, modelId, SyncContext.BI.TABLEAU_CONNECTOR_TDS,
-                SyncContext.ModelElement.AGG_INDEX_AND_TABLE_INDEX_COL, "localhost", 8080);
+                SyncContext.ModelElement.AGG_INDEX_AND_TABLE_INDEX_COL, "localhost", 7070);
         SyncModel syncModel = tdsService.exportModel(syncContext);
         TableauDatasourceModel datasource1 = (TableauDatasourceModel) BISyncTool.getBISyncModel(syncContext, syncModel);
         ByteArrayOutputStream outStream4 = new ByteArrayOutputStream();
@@ -459,7 +460,8 @@ public class ModelTdsServiceTest extends SourceTestCase {
     }
 
     private String getExpectedTds(String path) throws IOException {
-        return CharStreams.toString(new InputStreamReader(getClass().getResourceAsStream(path), Charsets.UTF_8));
+        return CharStreams.toString(
+                new InputStreamReader(Objects.requireNonNull(getClass().getResourceAsStream(path)), Charsets.UTF_8));
     }
 
     private void prepareBasic(String project) {
diff --git a/src/tool/src/test/java/org/apache/kylin/tool/bisync/SyncModelBuilderTest.java b/src/modeling-service/src/test/java/org/apache/kylin/tool/bisync/SyncModelBuilderTest.java
similarity index 100%
rename from src/tool/src/test/java/org/apache/kylin/tool/bisync/SyncModelBuilderTest.java
rename to src/modeling-service/src/test/java/org/apache/kylin/tool/bisync/SyncModelBuilderTest.java
diff --git a/src/tool/src/test/java/org/apache/kylin/tool/bisync/SyncModelTestUtil.java b/src/modeling-service/src/test/java/org/apache/kylin/tool/bisync/SyncModelTestUtil.java
similarity index 100%
rename from src/tool/src/test/java/org/apache/kylin/tool/bisync/SyncModelTestUtil.java
rename to src/modeling-service/src/test/java/org/apache/kylin/tool/bisync/SyncModelTestUtil.java
diff --git a/src/tool/src/test/java/org/apache/kylin/tool/bisync/tableau/TableauDatasourceTest.java b/src/modeling-service/src/test/java/org/apache/kylin/tool/bisync/tableau/TableauDatasourceTest.java
similarity index 100%
rename from src/tool/src/test/java/org/apache/kylin/tool/bisync/tableau/TableauDatasourceTest.java
rename to src/modeling-service/src/test/java/org/apache/kylin/tool/bisync/tableau/TableauDatasourceTest.java
diff --git a/src/tool/src/test/resources/bisync_tableau/nmodel_basic_all_cols.tds b/src/modeling-service/src/test/resources/bisync_tableau/nmodel_basic_all_cols.tds
similarity index 100%
rename from src/tool/src/test/resources/bisync_tableau/nmodel_basic_all_cols.tds
rename to src/modeling-service/src/test/resources/bisync_tableau/nmodel_basic_all_cols.tds
diff --git a/src/tool/src/test/resources/bisync_tableau/nmodel_basic_inner_all_cols.tds b/src/modeling-service/src/test/resources/bisync_tableau/nmodel_basic_inner_all_cols.tds
similarity index 100%
rename from src/tool/src/test/resources/bisync_tableau/nmodel_basic_inner_all_cols.tds
rename to src/modeling-service/src/test/resources/bisync_tableau/nmodel_basic_inner_all_cols.tds
diff --git a/src/modeling-service/src/test/resources/bisync_tableau/nmodel_full_measure_test.connector.tds b/src/modeling-service/src/test/resources/bisync_tableau/nmodel_full_measure_test.connector.tds
index f8bcbfcd62..ac3e77ff62 100644
--- a/src/modeling-service/src/test/resources/bisync_tableau/nmodel_full_measure_test.connector.tds
+++ b/src/modeling-service/src/test/resources/bisync_tableau/nmodel_full_measure_test.connector.tds
@@ -3,7 +3,7 @@
   <connection class="federated">
     <named-connections>
       <named-connection caption="localhost" name="kyligence_odbc.06xjot407mgsfe1bnnyt60p4vjuf">
-        <connection class="kyligence_odbc" dbname="" odbc-connect-string-extras="PROJECT=default;CUBE=nmodel_full_measure_test" port="8080" schema="DEFAULT" server="localhost" username="ADMIN" vendor1="default" vendor2="nmodel_full_measure_test"/>
+        <connection class="kyligence_odbc" dbname="" odbc-connect-string-extras="PROJECT=default;CUBE=nmodel_full_measure_test" port="7070" schema="DEFAULT" server="localhost" username="ADMIN" vendor1="default" vendor2="nmodel_full_measure_test"/>
       </named-connection>
     </named-connections>
     <relation join="left" type="join">
diff --git a/src/tool/src/test/resources/bisync_tableau/nmodel_full_measure_test.connector_cc.tds b/src/modeling-service/src/test/resources/bisync_tableau/nmodel_full_measure_test.connector_cc.tds
similarity index 100%
rename from src/tool/src/test/resources/bisync_tableau/nmodel_full_measure_test.connector_cc.tds
rename to src/modeling-service/src/test/resources/bisync_tableau/nmodel_full_measure_test.connector_cc.tds
diff --git a/src/tool/src/test/resources/bisync_tableau/nmodel_full_measure_test.connector_cc_admin.tds b/src/modeling-service/src/test/resources/bisync_tableau/nmodel_full_measure_test.connector_cc_admin.tds
similarity index 100%
rename from src/tool/src/test/resources/bisync_tableau/nmodel_full_measure_test.connector_cc_admin.tds
rename to src/modeling-service/src/test/resources/bisync_tableau/nmodel_full_measure_test.connector_cc_admin.tds
diff --git a/src/tool/src/test/resources/bisync_tableau/nmodel_full_measure_test.connector_hierarchies.tds b/src/modeling-service/src/test/resources/bisync_tableau/nmodel_full_measure_test.connector_hierarchies.tds
similarity index 100%
rename from src/tool/src/test/resources/bisync_tableau/nmodel_full_measure_test.connector_hierarchies.tds
rename to src/modeling-service/src/test/resources/bisync_tableau/nmodel_full_measure_test.connector_hierarchies.tds
diff --git a/src/tool/src/test/resources/bisync_tableau/nmodel_full_measure_test.connector_no_hierarchies.tds b/src/modeling-service/src/test/resources/bisync_tableau/nmodel_full_measure_test.connector_no_hierarchies.tds
similarity index 100%
rename from src/tool/src/test/resources/bisync_tableau/nmodel_full_measure_test.connector_no_hierarchies.tds
rename to src/modeling-service/src/test/resources/bisync_tableau/nmodel_full_measure_test.connector_no_hierarchies.tds
diff --git a/src/tool/src/test/resources/bisync_tableau/nmodel_full_measure_test.connector_permission.tds b/src/modeling-service/src/test/resources/bisync_tableau/nmodel_full_measure_test.connector_permission.tds
similarity index 100%
rename from src/tool/src/test/resources/bisync_tableau/nmodel_full_measure_test.connector_permission.tds
rename to src/modeling-service/src/test/resources/bisync_tableau/nmodel_full_measure_test.connector_permission.tds
diff --git a/src/tool/src/test/resources/bisync_tableau/nmodel_full_measure_test.connector_permission_agg_index_col.tds b/src/modeling-service/src/test/resources/bisync_tableau/nmodel_full_measure_test.connector_permission_agg_index_col.tds
similarity index 100%
rename from src/tool/src/test/resources/bisync_tableau/nmodel_full_measure_test.connector_permission_agg_index_col.tds
rename to src/modeling-service/src/test/resources/bisync_tableau/nmodel_full_measure_test.connector_permission_agg_index_col.tds
diff --git a/src/tool/src/test/resources/bisync_tableau/nmodel_full_measure_test.connector_permission_all_col.tds b/src/modeling-service/src/test/resources/bisync_tableau/nmodel_full_measure_test.connector_permission_all_col.tds
similarity index 100%
rename from src/tool/src/test/resources/bisync_tableau/nmodel_full_measure_test.connector_permission_all_col.tds
rename to src/modeling-service/src/test/resources/bisync_tableau/nmodel_full_measure_test.connector_permission_all_col.tds
diff --git a/src/tool/src/test/resources/bisync_tableau/nmodel_full_measure_test.connector_permission_no_measure.tds b/src/modeling-service/src/test/resources/bisync_tableau/nmodel_full_measure_test.connector_permission_no_measure.tds
similarity index 100%
rename from src/tool/src/test/resources/bisync_tableau/nmodel_full_measure_test.connector_permission_no_measure.tds
rename to src/modeling-service/src/test/resources/bisync_tableau/nmodel_full_measure_test.connector_permission_no_measure.tds
diff --git a/src/tool/src/test/resources/bisync_tableau/nmodel_full_measure_test.table_index_connector.tds b/src/modeling-service/src/test/resources/bisync_tableau/nmodel_full_measure_test.table_index_connector.tds
similarity index 100%
rename from src/tool/src/test/resources/bisync_tableau/nmodel_full_measure_test.table_index_connector.tds
rename to src/modeling-service/src/test/resources/bisync_tableau/nmodel_full_measure_test.table_index_connector.tds
diff --git a/src/tool/src/test/resources/bisync_tableau/nmodel_full_measure_test.tds b/src/modeling-service/src/test/resources/bisync_tableau/nmodel_full_measure_test.tds
similarity index 100%
rename from src/tool/src/test/resources/bisync_tableau/nmodel_full_measure_test.tds
rename to src/modeling-service/src/test/resources/bisync_tableau/nmodel_full_measure_test.tds
diff --git a/src/server/src/test/java/org/apache/kylin/rest/HAConfigurationTest.java b/src/server/src/test/java/org/apache/kylin/rest/HAConfigurationTest.java
index 3b25cd6717..d0e35ad355 100644
--- a/src/server/src/test/java/org/apache/kylin/rest/HAConfigurationTest.java
+++ b/src/server/src/test/java/org/apache/kylin/rest/HAConfigurationTest.java
@@ -23,9 +23,9 @@ import javax.sql.DataSource;
 
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.persistence.metadata.jdbc.JdbcUtil;
+import org.apache.kylin.helper.MetadataToolHelper;
 import org.apache.kylin.junit.annotation.MetadataInfo;
 import org.apache.kylin.junit.annotation.OverwriteProp;
-import org.apache.kylin.tool.util.MetadataUtil;
 import org.junit.jupiter.api.Assertions;
 import org.junit.jupiter.api.BeforeEach;
 import org.junit.jupiter.api.Test;
@@ -48,11 +48,13 @@ class HAConfigurationTest {
     @Mock
     SessionProperties sessionProperties;
 
+    MetadataToolHelper metadataToolHelper = new MetadataToolHelper();
+
     DataSource dataSource;
 
     @BeforeEach
     public void setup() throws Exception {
-        dataSource = Mockito.spy(MetadataUtil.getDataSource(getTestConfig()));
+        dataSource = Mockito.spy(metadataToolHelper.getDataSource(getTestConfig()));
         ReflectionTestUtils.setField(configuration, "dataSource", dataSource);
     }
 
diff --git a/src/systools/pom.xml b/src/systools/pom.xml
index 9e68da2eba..e69de29bb2 100644
--- a/src/systools/pom.xml
+++ b/src/systools/pom.xml
@@ -1,113 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
-  Licensed to the Apache Software Foundation (ASF) under one or more
-  contributor license agreements.  See the NOTICE file distributed with
-  this work for additional information regarding copyright ownership.
-  The ASF licenses this file to You under the Apache License, Version 2.0
-  (the "License"); you may not use this file except in compliance with
-  the License.  You may obtain a copy of the License at
-
-      http://www.apache.org/licenses/LICENSE-2.0
-
-  Unless required by applicable law or agreed to in writing, software
-  distributed under the License is distributed on an "AS IS" BASIS,
-  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  See the License for the specific language governing permissions and
-  limitations under the License.
--->
-<project xmlns="http://maven.apache.org/POM/4.0.0"
-         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
-    <parent>
-        <artifactId>kylin</artifactId>
-        <groupId>org.apache.kylin</groupId>
-        <version>5.0.0-alpha-SNAPSHOT</version>
-        <relativePath>../../pom.xml</relativePath>
-    </parent>
-    <modelVersion>4.0.0</modelVersion>
-    <name>Kylin - System Tools</name>
-    <artifactId>kylin-systools</artifactId>
-
-    <dependencies>
-        <dependency>
-            <groupId>org.apache.kylin</groupId>
-            <artifactId>kylin-core-metadata</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>org.springframework</groupId>
-            <artifactId>spring-webmvc</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>org.springframework.security</groupId>
-            <artifactId>spring-security-ldap</artifactId>
-        </dependency>
-        <dependency>
-            <groupId>org.springframework.security</groupId>
-            <artifactId>spring-security-acl</artifactId>
-        </dependency>
-
-        <dependency>
-            <groupId>org.apache.hadoop</groupId>
-            <artifactId>hadoop-common</artifactId>
-            <scope>provided</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.projectlombok</groupId>
-            <artifactId>lombok</artifactId>
-            <scope>provided</scope>
-        </dependency>
-        <dependency>
-            <groupId>javax.servlet</groupId>
-            <artifactId>servlet-api</artifactId>
-        </dependency>
-
-        <dependency>
-            <groupId>org.apache.kylin</groupId>
-            <artifactId>kylin-core-common</artifactId>
-            <type>test-jar</type>
-            <scope>test</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.springframework.security</groupId>
-            <artifactId>spring-security-test</artifactId>
-            <scope>test</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.mockito</groupId>
-            <artifactId>mockito-core</artifactId>
-            <scope>test</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.javassist</groupId>
-            <artifactId>javassist</artifactId>
-            <scope>test</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.powermock</groupId>
-            <artifactId>powermock-module-junit4</artifactId>
-            <scope>test</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.powermock</groupId>
-            <artifactId>powermock-api-mockito2</artifactId>
-            <scope>test</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.awaitility</groupId>
-            <artifactId>awaitility</artifactId>
-            <scope>test</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.junit.vintage</groupId>
-            <artifactId>junit-vintage-engine</artifactId>
-            <scope>test</scope>
-        </dependency>
-
-        <!--FIX ME-->
-        <dependency>
-            <groupId>org.apache.kylin</groupId>
-            <artifactId>kylin-engine-spark</artifactId>
-            <scope>test</scope>
-        </dependency>
-    </dependencies>
-</project>
diff --git a/src/tool/pom.xml b/src/tool/pom.xml
index d7ff907fb3..9af9d5d9ab 100644
--- a/src/tool/pom.xml
+++ b/src/tool/pom.xml
@@ -42,6 +42,10 @@
             <groupId>org.apache.kylin</groupId>
             <artifactId>kylin-streaming</artifactId>
         </dependency>
+        <dependency>
+            <groupId>org.apache.kylin</groupId>
+            <artifactId>kylin-common-service</artifactId>
+        </dependency>
 
         <dependency>
             <groupId>org.springframework.security</groupId>
@@ -127,6 +131,12 @@
             <type>test-jar</type>
             <scope>test</scope>
         </dependency>
+        <dependency>
+            <groupId>org.apache.kylin</groupId>
+            <artifactId>kylin-common-service</artifactId>
+            <type>test-jar</type>
+            <scope>test</scope>
+        </dependency>
         <dependency>
             <groupId>org.junit.jupiter</groupId>
             <artifactId>junit-jupiter-api</artifactId>
diff --git a/src/tool/src/main/java/org/apache/kylin/tool/AbstractInfoExtractorTool.java b/src/tool/src/main/java/org/apache/kylin/tool/AbstractInfoExtractorTool.java
index b92d652dc1..8548fea87e 100644
--- a/src/tool/src/main/java/org/apache/kylin/tool/AbstractInfoExtractorTool.java
+++ b/src/tool/src/main/java/org/apache/kylin/tool/AbstractInfoExtractorTool.java
@@ -667,7 +667,7 @@ public abstract class AbstractInfoExtractorTool extends ExecutableApplication {
     }
 
     protected void exportJstack(File recordTime) {
-        Future jstackTask = executorService.submit(() -> {
+        Future<?> jstackTask = executorService.submit(() -> {
             recordTaskStartTime(JSTACK);
             JStackTool.extractJstack(exportDir);
             recordTaskExecutorTimeToFile(JSTACK, recordTime);
diff --git a/src/tool/src/main/java/org/apache/kylin/tool/MetadataTool.java b/src/tool/src/main/java/org/apache/kylin/tool/MetadataTool.java
index daa8aed307..91217d4e48 100644
--- a/src/tool/src/main/java/org/apache/kylin/tool/MetadataTool.java
+++ b/src/tool/src/main/java/org/apache/kylin/tool/MetadataTool.java
@@ -18,67 +18,33 @@
 
 package org.apache.kylin.tool;
 
-import static org.apache.kylin.common.exception.code.ErrorCodeTool.FILE_ALREADY_EXISTS;
 import static org.apache.kylin.common.exception.code.ErrorCodeTool.PARAMETER_NOT_SPECIFY;
 
-import java.io.File;
 import java.io.IOException;
-import java.net.URI;
-import java.nio.file.Paths;
-import java.time.Clock;
-import java.time.LocalDateTime;
-import java.time.format.DateTimeFormatter;
-import java.util.Collections;
-import java.util.Locale;
-import java.util.NavigableSet;
-import java.util.Objects;
-import java.util.Set;
-import java.util.stream.Collectors;
 
 import org.apache.commons.cli.Option;
 import org.apache.commons.cli.OptionGroup;
 import org.apache.commons.cli.Options;
-import org.apache.commons.lang3.StringUtils;
-import org.apache.hadoop.fs.Path;
 import org.apache.kylin.common.KylinConfig;
-import org.apache.kylin.common.KylinConfigBase;
 import org.apache.kylin.common.exception.KylinException;
 import org.apache.kylin.common.persistence.ResourceStore;
+import org.apache.kylin.common.util.AddressUtil;
 import org.apache.kylin.common.util.ExecutableApplication;
 import org.apache.kylin.common.util.HadoopUtil;
-import org.apache.kylin.common.util.JsonUtil;
-import org.apache.kylin.common.util.OptionsHelper;
-import org.apache.kylin.metadata.project.ProjectInstance;
-import org.apache.kylin.common.metrics.MetricsCategory;
-import org.apache.kylin.common.metrics.MetricsGroup;
-import org.apache.kylin.common.metrics.MetricsName;
-import org.apache.kylin.common.persistence.ImageDesc;
-import org.apache.kylin.common.persistence.metadata.AuditLogStore;
-import org.apache.kylin.common.persistence.transaction.UnitOfWork;
-import org.apache.kylin.common.persistence.transaction.UnitOfWorkParams;
-import org.apache.kylin.common.util.AddressUtil;
-import org.apache.kylin.common.util.MetadataChecker;
 import org.apache.kylin.common.util.OptionBuilder;
+import org.apache.kylin.common.util.OptionsHelper;
 import org.apache.kylin.common.util.Unsafe;
+import org.apache.kylin.helper.MetadataToolHelper;
 import org.apache.kylin.tool.util.ScreenPrintUtil;
 import org.apache.kylin.tool.util.ToolMainWrapper;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import com.google.common.collect.Sets;
-
-import io.kyligence.kap.guava20.shaded.common.io.ByteSource;
-import lombok.Getter;
 import lombok.val;
-import lombok.var;
 
 public class MetadataTool extends ExecutableApplication {
-    public static final DateTimeFormatter DATE_TIME_FORMATTER = DateTimeFormatter.ofPattern("yyyy-MM-dd-HH-mm-ss",
-            Locale.getDefault(Locale.Category.FORMAT));
-    private static final Logger logger = LoggerFactory.getLogger("diag");
-    private static final String HDFS_METADATA_URL_FORMATTER = "kylin_metadata@hdfs,path=%s";
 
-    private static final String GLOBAL = "global";
+    private static final Logger logger = LoggerFactory.getLogger("diag");
 
     @SuppressWarnings("static-access")
     private static final Option OPERATE_BACKUP = OptionBuilder.getInstance()
@@ -119,44 +85,39 @@ public class MetadataTool extends ExecutableApplication {
     private final Options options;
 
     private final KylinConfig kylinConfig;
+    private final MetadataToolHelper helper;
 
-    private ResourceStore resourceStore;
-
-    @Getter
-    private String backupPath;
-
-    @Getter
-    private String fetchPath;
-
-    MetadataTool() {
-        kylinConfig = KylinConfig.getInstanceFromEnv();
-        this.options = new Options();
-        initOptions();
+    public MetadataTool() {
+        this(KylinConfig.getInstanceFromEnv());
     }
 
     public MetadataTool(KylinConfig kylinConfig) {
+        this(kylinConfig, new MetadataToolHelper());
+    }
+
+    public MetadataTool(KylinConfig kylinConfig, MetadataToolHelper helper) {
         this.kylinConfig = kylinConfig;
-        this.options = new Options();
-        initOptions();
+        this.helper = helper;
+        this.options = initOptions();
     }
 
     public static void backup(KylinConfig kylinConfig) throws IOException {
         HDFSMetadataTool.cleanBeforeBackup(kylinConfig);
-        String[] args = new String[] { "-backup", "-compress", "-dir", HadoopUtil.getBackupFolder(kylinConfig) };
+        String[] args = new String[]{"-backup", "-compress", "-dir", HadoopUtil.getBackupFolder(kylinConfig)};
         val backupTool = new MetadataTool(kylinConfig);
         backupTool.execute(args);
     }
 
     public static void backup(KylinConfig kylinConfig, String dir, String folder) throws IOException {
         HDFSMetadataTool.cleanBeforeBackup(kylinConfig);
-        String[] args = new String[] { "-backup", "-compress", "-dir", dir, "-folder", folder };
+        String[] args = new String[]{"-backup", "-compress", "-dir", dir, "-folder", folder};
         val backupTool = new MetadataTool(kylinConfig);
         backupTool.execute(args);
     }
 
     public static void restore(KylinConfig kylinConfig, String folder) throws IOException {
         val tool = new MetadataTool(kylinConfig);
-        tool.execute(new String[] { "-restore", "-dir", folder, "--after-truncate" });
+        tool.execute(new String[]{"-restore", "-dir", folder, "--after-truncate"});
     }
 
     public static void main(String[] args) {
@@ -173,116 +134,28 @@ public class MetadataTool extends ExecutableApplication {
             val resourceStore = ResourceStore.getKylinMetaStore(config);
             resourceStore.getAuditLogStore().setInstance(AddressUtil.getMockPortAddress());
             tool.execute(args);
-            if (isBackup && StringUtils.isNotEmpty(tool.getBackupPath())) {
-                System.out.printf(Locale.ROOT, "The metadata backup path is %s.%n", tool.getBackupPath());
-            }
         });
         Unsafe.systemExit(0);
     }
 
-    public static void restore(ResourceStore currentResourceStore, ResourceStore restoreResourceStore, String project,
-            boolean delete) {
-        if (StringUtils.isBlank(project)) {
-            logger.info("start to restore all projects");
-            var srcProjectFolders = restoreResourceStore.listResources("/");
-            var destProjectFolders = currentResourceStore.listResources("/");
-            srcProjectFolders = srcProjectFolders == null ? Sets.newTreeSet() : srcProjectFolders;
-            destProjectFolders = destProjectFolders == null ? Sets.newTreeSet() : destProjectFolders;
-            val projectFolders = Sets.union(srcProjectFolders, destProjectFolders);
-
-            for (String projectPath : projectFolders) {
-                if (projectPath.equals(ResourceStore.METASTORE_UUID_TAG)
-                        || projectPath.equals(ResourceStore.METASTORE_IMAGE)) {
-                    continue;
-                }
-                val projectName = Paths.get(projectPath).getName(0).toString();
-                val destResources = currentResourceStore.listResourcesRecursively(projectPath);
-                val srcResources = restoreResourceStore.listResourcesRecursively(projectPath);
-                UnitOfWork.doInTransactionWithRetry(() -> doRestore(currentResourceStore, restoreResourceStore,
-                        destResources, srcResources, delete), projectName, 1);
-            }
-
-        } else {
-            logger.info("start to restore project {}", project);
-            val destGlobalProjectResources = currentResourceStore.listResourcesRecursively(ResourceStore.PROJECT_ROOT);
-
-            Set<String> globalDestResources = null;
-            if (Objects.nonNull(destGlobalProjectResources)) {
-                globalDestResources = destGlobalProjectResources.stream().filter(x -> Paths.get(x).getFileName()
-                        .toString().equals(String.format(Locale.ROOT, "%s.json", project))).collect(Collectors.toSet());
-            }
-
-            val globalSrcResources = restoreResourceStore
-                    .listResourcesRecursively(ResourceStore.PROJECT_ROOT).stream().filter(x -> Paths.get(x)
-                            .getFileName().toString().equals(String.format(Locale.ROOT, "%s.json", project)))
-                    .collect(Collectors.toSet());
-
-            Set<String> finalGlobalDestResources = globalDestResources;
-
-            UnitOfWork.doInTransactionWithRetry(() -> doRestore(currentResourceStore, restoreResourceStore,
-                    finalGlobalDestResources, globalSrcResources, delete), UnitOfWork.GLOBAL_UNIT, 1);
-
-            val projectPath = "/" + project;
-            val destResources = currentResourceStore.listResourcesRecursively(projectPath);
-            val srcResources = restoreResourceStore.listResourcesRecursively(projectPath);
-
-            UnitOfWork.doInTransactionWithRetry(
-                    () -> doRestore(currentResourceStore, restoreResourceStore, destResources, srcResources, delete),
-                    project, 1);
-        }
-
-        logger.info("restore successfully");
-    }
-
-    private static int doRestore(ResourceStore currentResourceStore, ResourceStore restoreResourceStore,
-            Set<String> destResources, Set<String> srcResources, boolean delete) throws IOException {
-        val threadViewRS = ResourceStore.getKylinMetaStore(KylinConfig.getInstanceFromEnv());
-
-        //check destResources and srcResources are null,because  Sets.difference(srcResources, destResources) will report NullPointerException
-        destResources = destResources == null ? Collections.emptySet() : destResources;
-        srcResources = srcResources == null ? Collections.emptySet() : srcResources;
-
-        logger.info("Start insert metadata resource...");
-        val insertRes = Sets.difference(srcResources, destResources);
-        for (val res : insertRes) {
-            val metadataRaw = restoreResourceStore.getResource(res);
-            threadViewRS.checkAndPutResource(res, metadataRaw.getByteSource(), -1L);
-        }
-
-        logger.info("Start update metadata resource...");
-        val updateRes = Sets.intersection(destResources, srcResources);
-        for (val res : updateRes) {
-            val raw = currentResourceStore.getResource(res);
-            val metadataRaw = restoreResourceStore.getResource(res);
-            threadViewRS.checkAndPutResource(res, metadataRaw.getByteSource(), raw.getMvcc());
-        }
-        if (delete) {
-            logger.info("Start delete metadata resource...");
-            val deleteRes = Sets.difference(destResources, srcResources);
-            for (val res : deleteRes) {
-                threadViewRS.deleteResource(res);
-            }
-        }
-
-        return 0;
-    }
-
-    private void initOptions() {
-        final OptionGroup optionGroup = new OptionGroup();
+    private Options initOptions() {
+        Options result = new Options();
+        OptionGroup optionGroup = new OptionGroup();
         optionGroup.setRequired(true);
         optionGroup.addOption(OPERATE_BACKUP);
         optionGroup.addOption(OPERATE_FETCH);
         optionGroup.addOption(OPERATE_LIST);
         optionGroup.addOption(OPERATE_RESTORE);
 
-        options.addOptionGroup(optionGroup);
-        options.addOption(OPTION_DIR);
-        options.addOption(OPTION_PROJECT);
-        options.addOption(FOLDER_NAME);
-        options.addOption(OPTION_TARGET);
-        options.addOption(OPERATE_COMPRESS);
-        options.addOption(OPTION_EXCLUDE_TABLE_EXD);
-        options.addOption(OPTION_AFTER_TRUNCATE);
+        result.addOptionGroup(optionGroup);
+        result.addOption(OPTION_DIR);
+        result.addOption(OPTION_PROJECT);
+        result.addOption(FOLDER_NAME);
+        result.addOption(OPTION_TARGET);
+        result.addOption(OPERATE_COMPRESS);
+        result.addOption(OPTION_EXCLUDE_TABLE_EXD);
+        result.addOption(OPTION_AFTER_TRUNCATE);
+        return result;
     }
 
     @Override
@@ -293,273 +166,24 @@ public class MetadataTool extends ExecutableApplication {
     @Override
     protected void execute(OptionsHelper optionsHelper) throws Exception {
         logger.info("start to init ResourceStore");
-        resourceStore = ResourceStore.getKylinMetaStore(kylinConfig);
+        String project = optionsHelper.getOptionValue(OPTION_PROJECT);
+        String path = optionsHelper.getOptionValue(OPTION_DIR);
+        String folder = optionsHelper.getOptionValue(FOLDER_NAME);
+        String target = optionsHelper.getOptionValue(OPTION_TARGET);
+        boolean compress = optionsHelper.hasOption(OPERATE_COMPRESS);
+        boolean excludeTableExd = optionsHelper.hasOption(OPTION_EXCLUDE_TABLE_EXD);
         if (optionsHelper.hasOption(OPERATE_BACKUP)) {
-            boolean isGlobal = null == optionsHelper.getOptionValue(OPTION_PROJECT);
-            long startAt = System.currentTimeMillis();
-
-            try {
-                backup(optionsHelper);
-            } catch (Exception be) {
-                if (isGlobal) {
-                    MetricsGroup.hostTagCounterInc(MetricsName.METADATA_BACKUP_FAILED, MetricsCategory.GLOBAL, GLOBAL);
-                } else {
-                    MetricsGroup.hostTagCounterInc(MetricsName.METADATA_BACKUP_FAILED, MetricsCategory.PROJECT,
-                            optionsHelper.getOptionValue(OPTION_PROJECT));
-                }
-                throw be;
-            } finally {
-                if (isGlobal) {
-                    MetricsGroup.hostTagCounterInc(MetricsName.METADATA_BACKUP, MetricsCategory.GLOBAL, GLOBAL);
-                    MetricsGroup.hostTagCounterInc(MetricsName.METADATA_BACKUP_DURATION, MetricsCategory.GLOBAL, GLOBAL,
-                            System.currentTimeMillis() - startAt);
-                } else {
-                    MetricsGroup.hostTagCounterInc(MetricsName.METADATA_BACKUP, MetricsCategory.PROJECT,
-                            optionsHelper.getOptionValue(OPTION_PROJECT));
-                    MetricsGroup.hostTagCounterInc(MetricsName.METADATA_BACKUP_DURATION, MetricsCategory.PROJECT,
-                            optionsHelper.getOptionValue(OPTION_PROJECT), System.currentTimeMillis() - startAt);
-                }
-            }
-
+            helper.backup(kylinConfig, project, path, folder, compress, excludeTableExd);
         } else if (optionsHelper.hasOption(OPERATE_FETCH)) {
-            fetch(optionsHelper);
+            helper.fetch(kylinConfig, path, folder, target, excludeTableExd);
         } else if (optionsHelper.hasOption(OPERATE_LIST)) {
-            list(optionsHelper);
+            helper.list(kylinConfig, target);
         } else if (optionsHelper.hasOption(OPERATE_RESTORE)) {
-            restore(optionsHelper, optionsHelper.hasOption(OPTION_AFTER_TRUNCATE));
+            boolean delete = optionsHelper.hasOption(OPTION_AFTER_TRUNCATE);
+            helper.restore(kylinConfig, project, path, delete);
         } else {
             throw new KylinException(PARAMETER_NOT_SPECIFY, "-restore");
         }
     }
 
-    private void abortIfAlreadyExists(String path) throws IOException {
-        URI uri = HadoopUtil.makeURI(path);
-        if (!uri.isAbsolute()) {
-            logger.info("no scheme specified for {}, try local file system file://", path);
-            File localFile = new File(path);
-            if (localFile.exists()) {
-                logger.error("[UNEXPECTED_THINGS_HAPPENED] local file {} already exists ", path);
-                throw new KylinException(FILE_ALREADY_EXISTS, path);
-            }
-            return;
-        }
-        val fs = HadoopUtil.getWorkingFileSystem();
-        if (fs.exists(new Path(path))) {
-            logger.error("[UNEXPECTED_THINGS_HAPPENED] specified file {} already exists ", path);
-            throw new KylinException(FILE_ALREADY_EXISTS, path);
-        }
-    }
-
-    private void fetch(OptionsHelper optionsHelper) throws Exception {
-        var path = optionsHelper.getOptionValue(OPTION_DIR);
-        var folder = optionsHelper.getOptionValue(FOLDER_NAME);
-        val excludeTableExd = optionsHelper.hasOption(OPTION_EXCLUDE_TABLE_EXD);
-        val target = optionsHelper.getOptionValue(OPTION_TARGET);
-        if (StringUtils.isBlank(path)) {
-            path = KylinConfigBase.getKylinHome() + File.separator + "meta_fetch";
-        }
-        if (StringUtils.isEmpty(folder)) {
-            folder = LocalDateTime.now(Clock.systemDefaultZone()).format(DATE_TIME_FORMATTER) + "_fetch";
-        }
-        if (target == null) {
-            System.out.println("target file must be set with fetch mode");
-        } else {
-            fetchPath = StringUtils.appendIfMissing(path, "/") + folder;
-            // currently do not support compress with fetch
-            val fetchMetadataUrl = getMetadataUrl(fetchPath, false);
-            val fetchConfig = KylinConfig.createKylinConfig(kylinConfig);
-            fetchConfig.setMetadataUrl(fetchMetadataUrl);
-            abortIfAlreadyExists(fetchPath);
-            logger.info("The fetch metadataUrl is {} and backup path is {}", fetchMetadataUrl, fetchPath);
-
-            try (val fetchResourceStore = ResourceStore.getKylinMetaStore(fetchConfig)) {
-
-                val fetchMetadataStore = fetchResourceStore.getMetadataStore();
-
-                String targetPath = target.startsWith("/") ? target.substring(1) : target;
-
-                logger.info("start to copy target file {} from ResourceStore.", target);
-                UnitOfWork.doInTransactionWithRetry(
-                        UnitOfWorkParams.builder().readonly(true).unitName(target).processor(() -> {
-                            copyResourceStore("/" + targetPath, resourceStore, fetchResourceStore, true, excludeTableExd);
-                            // uuid
-                            val uuid = resourceStore.getResource(ResourceStore.METASTORE_UUID_TAG);
-                            fetchResourceStore.putResourceWithoutCheck(uuid.getResPath(), uuid.getByteSource(),
-                                    uuid.getTimestamp(), -1);
-                            return null;
-                        }).build());
-                if (Thread.currentThread().isInterrupted()) {
-                    throw new InterruptedException("metadata task is interrupt");
-                }
-                logger.info("start to fetch target file {}", target);
-
-                // fetchResourceStore is read-only, currently we don't do any write operation on it.
-                // fetchResourceStore.deleteResource(ResourceStore.METASTORE_TRASH_RECORD);
-                fetchMetadataStore.dump(fetchResourceStore);
-                logger.info("fetch successfully at {}", fetchPath);
-            }
-        }
-    }
-
-    private NavigableSet<String> list(OptionsHelper optionsHelper) throws Exception {
-        val target = optionsHelper.getOptionValue(OPTION_TARGET);
-        var res = resourceStore.listResources(target);
-        if (res == null) {
-            System.out.printf("%s is not exist%n", target);
-        } else {
-            System.out.println("" + res);
-        }
-        return res;
-    }
-
-    private void backup(OptionsHelper optionsHelper) throws Exception {
-        val project = optionsHelper.getOptionValue(OPTION_PROJECT);
-        var path = optionsHelper.getOptionValue(OPTION_DIR);
-        var folder = optionsHelper.getOptionValue(FOLDER_NAME);
-        var compress = optionsHelper.hasOption(OPERATE_COMPRESS);
-        val excludeTableExd = optionsHelper.hasOption(OPTION_EXCLUDE_TABLE_EXD);
-        if (StringUtils.isBlank(path)) {
-            path = KylinConfigBase.getKylinHome() + File.separator + "meta_backups";
-        }
-        if (StringUtils.isEmpty(folder)) {
-            folder = LocalDateTime.now(Clock.systemDefaultZone()).format(DATE_TIME_FORMATTER) + "_backup";
-        }
-        backupPath = StringUtils.appendIfMissing(path, "/") + folder;
-        val backupMetadataUrl = getMetadataUrl(backupPath, compress);
-        val backupConfig = KylinConfig.createKylinConfig(kylinConfig);
-        backupConfig.setMetadataUrl(backupMetadataUrl);
-        abortIfAlreadyExists(backupPath);
-        logger.info("The backup metadataUrl is {} and backup path is {}", backupMetadataUrl, backupPath);
-
-        try (val backupResourceStore = ResourceStore.getKylinMetaStore(backupConfig)) {
-
-            val backupMetadataStore = backupResourceStore.getMetadataStore();
-
-            if (StringUtils.isBlank(project)) {
-                logger.info("start to copy all projects from ResourceStore.");
-                val auditLogStore = resourceStore.getAuditLogStore();
-                long finalOffset = getOffset(auditLogStore);
-                backupResourceStore.putResourceWithoutCheck(ResourceStore.METASTORE_IMAGE,
-                        ByteSource.wrap(JsonUtil.writeValueAsBytes(new ImageDesc(finalOffset))),
-                        System.currentTimeMillis(), -1);
-                var projectFolders = resourceStore.listResources("/");
-                if (projectFolders == null) {
-                    return;
-                }
-                UnitOfWork.doInTransactionWithRetry(() -> {
-                    backupProjects(projectFolders, backupResourceStore, excludeTableExd);
-                    return null;
-                }, UnitOfWork.GLOBAL_UNIT);
-
-                val uuid = resourceStore.getResource(ResourceStore.METASTORE_UUID_TAG);
-                if (uuid != null) {
-                    backupResourceStore.putResourceWithoutCheck(uuid.getResPath(), uuid.getByteSource(),
-                            uuid.getTimestamp(), -1);
-                }
-                logger.info("start to backup all projects");
-
-            } else {
-                logger.info("start to copy project {} from ResourceStore.", project);
-                UnitOfWork.doInTransactionWithRetry(
-                        UnitOfWorkParams.builder().readonly(true).unitName(project).processor(() -> {
-                            copyResourceStore("/" + project, resourceStore, backupResourceStore, true, excludeTableExd);
-                            val uuid = resourceStore.getResource(ResourceStore.METASTORE_UUID_TAG);
-                            backupResourceStore.putResourceWithoutCheck(uuid.getResPath(), uuid.getByteSource(),
-                                    uuid.getTimestamp(), -1);
-                            return null;
-                        }).build());
-                if (Thread.currentThread().isInterrupted()) {
-                    throw new InterruptedException("metadata task is interrupt");
-                }
-                logger.info("start to backup project {}", project);
-            }
-            backupResourceStore.deleteResource(ResourceStore.METASTORE_TRASH_RECORD);
-            backupMetadataStore.dump(backupResourceStore);
-            logger.info("backup successfully at {}", backupPath);
-        }
-    }
-
-    private long getOffset(AuditLogStore auditLogStore) {
-        long offset = 0;
-        if (kylinConfig.isUTEnv())
-            offset = auditLogStore.getMaxId();
-        else
-            offset = auditLogStore.getLogOffset() == 0 ? resourceStore.getOffset() : auditLogStore.getLogOffset();
-        return offset;
-    }
-
-    private void backupProjects(NavigableSet<String> projectFolders, ResourceStore backupResourceStore,
-            boolean excludeTableExd) throws InterruptedException {
-        for (String projectPath : projectFolders) {
-            if (projectPath.equals(ResourceStore.METASTORE_UUID_TAG)
-                    || projectPath.equals(ResourceStore.METASTORE_IMAGE)) {
-                continue;
-            }
-            // The "_global" directory is already included in the full backup
-            copyResourceStore(projectPath, resourceStore, backupResourceStore, false, excludeTableExd);
-            if (Thread.currentThread().isInterrupted()) {
-                throw new InterruptedException("metadata task is interrupt");
-            }
-        }
-    }
-
-    private void copyResourceStore(String projectPath, ResourceStore srcResourceStore, ResourceStore destResourceStore,
-            boolean isProjectLevel, boolean excludeTableExd) {
-        if (excludeTableExd) {
-            String tableExdPath = projectPath + ResourceStore.TABLE_EXD_RESOURCE_ROOT;
-            var projectItems = srcResourceStore.listResources(projectPath);
-            for (String item : projectItems) {
-                if (item.equals(tableExdPath)) {
-                    continue;
-                }
-                srcResourceStore.copy(item, destResourceStore);
-            }
-        } else {
-            srcResourceStore.copy(projectPath, destResourceStore);
-        }
-        if (isProjectLevel) {
-            // The project-level backup needs to contain "/_global/project/*.json"
-            val projectName = Paths.get(projectPath).getFileName().toString();
-            srcResourceStore.copy(ProjectInstance.concatResourcePath(projectName), destResourceStore);
-        }
-    }
-
-    private void restore(OptionsHelper optionsHelper, boolean delete) throws IOException {
-        logger.info("Restore metadata with delete : {}", delete);
-        val project = optionsHelper.getOptionValue(OPTION_PROJECT);
-        val restorePath = optionsHelper.getOptionValue(OPTION_DIR);
-
-        val restoreMetadataUrl = getMetadataUrl(restorePath, false);
-        val restoreConfig = KylinConfig.createKylinConfig(kylinConfig);
-        restoreConfig.setMetadataUrl(restoreMetadataUrl);
-        logger.info("The restore metadataUrl is {} and restore path is {} ", restoreMetadataUrl, restorePath);
-
-        val restoreResourceStore = ResourceStore.getKylinMetaStore(restoreConfig);
-        val restoreMetadataStore = restoreResourceStore.getMetadataStore();
-        MetadataChecker metadataChecker = new MetadataChecker(restoreMetadataStore);
-
-        val verifyResult = metadataChecker.verify();
-        if (!verifyResult.isQualified()) {
-            throw new RuntimeException(verifyResult.getResultMessage() + "\n the metadata dir is not qualified");
-        }
-        restore(resourceStore, restoreResourceStore, project, delete);
-        backup(kylinConfig);
-
-    }
-
-    String getMetadataUrl(String rootPath, boolean compressed) {
-        if (HadoopUtil.isHdfsCompatibleSchema(rootPath, kylinConfig)) {
-            val url = String.format(Locale.ROOT, HDFS_METADATA_URL_FORMATTER,
-                    Path.getPathWithoutSchemeAndAuthority(new Path(rootPath)).toString() + "/");
-            return compressed ? url + ",zip=1" : url;
-
-        } else if (rootPath.startsWith("file://")) {
-            rootPath = rootPath.replace("file://", "");
-            return StringUtils.appendIfMissing(rootPath, "/");
-
-        } else {
-            return StringUtils.appendIfMissing(rootPath, "/");
-
-        }
-    }
 }
diff --git a/src/tool/src/main/java/org/apache/kylin/tool/RollbackTool.java b/src/tool/src/main/java/org/apache/kylin/tool/RollbackTool.java
index 0f3d737145..40073cf398 100644
--- a/src/tool/src/main/java/org/apache/kylin/tool/RollbackTool.java
+++ b/src/tool/src/main/java/org/apache/kylin/tool/RollbackTool.java
@@ -42,21 +42,21 @@ import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.kylin.common.KapConfig;
 import org.apache.kylin.common.KylinConfig;
-import org.apache.kylin.common.persistence.ResourceStore;
-import org.apache.kylin.common.util.ExecutableApplication;
-import org.apache.kylin.common.util.HadoopUtil;
-import org.apache.kylin.common.util.JsonUtil;
-import org.apache.kylin.common.util.OptionsHelper;
-import org.apache.kylin.job.execution.NExecutableManager;
-import org.apache.kylin.metadata.project.ProjectInstance;
 import org.apache.kylin.common.persistence.AuditLog;
 import org.apache.kylin.common.persistence.ImageDesc;
+import org.apache.kylin.common.persistence.ResourceStore;
 import org.apache.kylin.common.persistence.event.Event;
 import org.apache.kylin.common.persistence.event.ResourceCreateOrUpdateEvent;
 import org.apache.kylin.common.persistence.event.ResourceDeleteEvent;
+import org.apache.kylin.common.util.ExecutableApplication;
+import org.apache.kylin.common.util.HadoopUtil;
+import org.apache.kylin.common.util.JsonUtil;
 import org.apache.kylin.common.util.MetadataChecker;
 import org.apache.kylin.common.util.OptionBuilder;
+import org.apache.kylin.common.util.OptionsHelper;
 import org.apache.kylin.common.util.Unsafe;
+import org.apache.kylin.helper.MetadataToolHelper;
+import org.apache.kylin.job.execution.NExecutableManager;
 import org.apache.kylin.metadata.cube.model.NDataLayout;
 import org.apache.kylin.metadata.cube.model.NDataSegDetails;
 import org.apache.kylin.metadata.cube.model.NDataSegment;
@@ -67,6 +67,7 @@ import org.apache.kylin.metadata.model.NTableMetadataManager;
 import org.apache.kylin.metadata.project.NProjectManager;
 import org.apache.kylin.metadata.user.ManagedUser;
 import org.apache.kylin.metadata.user.NKylinUserManager;
+import org.apache.kylin.metadata.project.ProjectInstance;
 import org.apache.kylin.tool.general.RollbackStatusEnum;
 import org.joda.time.format.DateTimeFormat;
 
@@ -81,6 +82,7 @@ import lombok.extern.slf4j.Slf4j;
 @Slf4j
 public class RollbackTool extends ExecutableApplication {
 
+    private MetadataToolHelper helper = new MetadataToolHelper();
     @SuppressWarnings("static-access")
     private static final String HDFS_METADATA_URL_FORMATTER = "kylin_metadata@hdfs,path=%s";
 
@@ -133,7 +135,6 @@ public class RollbackTool extends ExecutableApplication {
         return options;
     }
 
-    @Override
     protected void execute(OptionsHelper optionsHelper) throws Exception {
         log.info("start roll back");
         log.info("start to init ResourceStore");
@@ -227,7 +228,7 @@ public class RollbackTool extends ExecutableApplication {
         long userTargetTimeMillis = formatter.parseDateTime(userTargetTime).getMillis();
         long protectionTime = System.currentTimeMillis() - kylinConfig.getStorageResourceSurvivalTimeThreshold();
         if (userTargetTimeMillis < protectionTime) {
-            log.error("user specified time is less than protection time");
+            log.error("user specified time  is less than protection time");
             return false;
         }
 
@@ -443,10 +444,9 @@ public class RollbackTool extends ExecutableApplication {
             if (!verifyResult.isQualified()) {
                 log.error("{} \n the metadata dir is not qualified", verifyResult.getResultMessage());
             }
-
-            MetadataTool.restore(currentResourceStore, restoreResourceStore, project, true);
+            helper.restore(currentResourceStore, restoreResourceStore, project, true);
         } catch (Exception e) {
-            log.error("restore mirror resource store failed: {} ", e);
+            log.error("restore mirror resource store failed", e);
         }
         return true;
     }
@@ -552,9 +552,9 @@ public class RollbackTool extends ExecutableApplication {
     }
 
     private String backupCurrentMetadata(KylinConfig kylinConfig) throws Exception {
-        val currentBackupFolder = LocalDateTime.now(Clock.systemDefaultZone()).format(MetadataTool.DATE_TIME_FORMATTER)
+        val currentBackupFolder = LocalDateTime.now(Clock.systemDefaultZone()).format(MetadataToolHelper.DATE_TIME_FORMATTER)
                 + "_backup";
-        MetadataTool.backup(kylinConfig, kylinConfig.getHdfsWorkingDirectory() + "_current_backup",
+        helper.backup(kylinConfig, kylinConfig.getHdfsWorkingDirectory() + "_current_backup",
                 currentBackupFolder);
         return currentBackupFolder;
     }
diff --git a/src/tool/src/main/java/org/apache/kylin/tool/daemon/handler/AbstractCheckStateHandler.java b/src/tool/src/main/java/org/apache/kylin/tool/daemon/handler/AbstractCheckStateHandler.java
index f7ef926b16..4773c53c09 100644
--- a/src/tool/src/main/java/org/apache/kylin/tool/daemon/handler/AbstractCheckStateHandler.java
+++ b/src/tool/src/main/java/org/apache/kylin/tool/daemon/handler/AbstractCheckStateHandler.java
@@ -48,10 +48,10 @@ public abstract class AbstractCheckStateHandler extends Worker implements CheckS
             }
             Preconditions.checkNotNull(getKgSecretKey(), "kg secret key is null!");
 
-            if (null == getKE_PID()) {
+            if (null == getKePid()) {
                 setKEPid(ToolUtil.getKylinPid());
             }
-            byte[] encryptedToken = SecretKeyUtil.generateEncryptedTokenWithPid(getKgSecretKey(), getKE_PID());
+            byte[] encryptedToken = SecretKeyUtil.generateEncryptedTokenWithPid(getKgSecretKey(), getKePid());
             getRestClient().downOrUpGradeKE(opLevelEnum.getOpType(), encryptedToken);
         } catch (Exception e) {
             logger.error("Failed to operate service {}", opLevelEnum.getOpType(), e);
diff --git a/src/tool/src/main/java/org/apache/kylin/tool/routine/FastRoutineTool.java b/src/tool/src/main/java/org/apache/kylin/tool/routine/FastRoutineTool.java
index 1648e9ee4b..a623ef8046 100644
--- a/src/tool/src/main/java/org/apache/kylin/tool/routine/FastRoutineTool.java
+++ b/src/tool/src/main/java/org/apache/kylin/tool/routine/FastRoutineTool.java
@@ -18,15 +18,11 @@
 
 package org.apache.kylin.tool.routine;
 
-import java.util.Arrays;
 import java.util.List;
-import java.util.stream.Collectors;
 
-import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.util.OptionsHelper;
 import org.apache.kylin.common.util.Unsafe;
-import org.apache.kylin.metadata.project.NProjectManager;
-import org.apache.kylin.metadata.project.ProjectInstance;
+import org.apache.kylin.helper.RoutineToolHelper;
 import org.apache.kylin.tool.MaintainModeTool;
 import org.apache.kylin.tool.util.ToolMainWrapper;
 
@@ -42,12 +38,7 @@ public class FastRoutineTool extends RoutineTool {
             return;
         }
         initOptionValues(optionsHelper);
-        KylinConfig kylinConfig = KylinConfig.getInstanceFromEnv();
-        List<ProjectInstance> instances = NProjectManager.getInstance(kylinConfig).listAllProjects();
-        List<String> projectsToCleanup = Arrays.asList(getProjects());
-        if (projectsToCleanup.isEmpty()) {
-            projectsToCleanup = instances.stream().map(ProjectInstance::getName).collect(Collectors.toList());
-        }
+        List<String> projectsToCleanup = getProjectsToCleanup();
         try {
             if (isMetadataCleanup()) {
                 System.out.println("Start to fast cleanup metadata");
@@ -57,7 +48,7 @@ public class FastRoutineTool extends RoutineTool {
                 if (EpochManager.getInstance().isMaintenanceMode()) {
                     Runtime.getRuntime().addShutdownHook(new Thread(maintainModeTool::releaseEpochs));
                 }
-                cleanMeta(projectsToCleanup);
+                RoutineToolHelper.cleanMeta(projectsToCleanup);
             }
             System.out.println("Start to fast cleanup hdfs");
             cleanStorage();
diff --git a/src/tool/src/main/java/org/apache/kylin/tool/routine/RoutineTool.java b/src/tool/src/main/java/org/apache/kylin/tool/routine/RoutineTool.java
index 0eb64ae63e..a1f0500f8e 100644
--- a/src/tool/src/main/java/org/apache/kylin/tool/routine/RoutineTool.java
+++ b/src/tool/src/main/java/org/apache/kylin/tool/routine/RoutineTool.java
@@ -17,7 +17,6 @@
  */
 package org.apache.kylin.tool.routine;
 
-import java.io.IOException;
 import java.util.Arrays;
 import java.util.List;
 import java.util.stream.Collectors;
@@ -25,27 +24,18 @@ import java.util.stream.Collectors;
 import org.apache.commons.cli.Option;
 import org.apache.commons.cli.Options;
 import org.apache.kylin.common.KylinConfig;
-import org.apache.kylin.common.persistence.transaction.UnitOfWork;
 import org.apache.kylin.common.util.ExecutableApplication;
 import org.apache.kylin.common.util.OptionsHelper;
-import org.apache.kylin.common.util.SetThreadName;
 import org.apache.kylin.common.util.Unsafe;
-import org.apache.kylin.metadata.project.EnhancedUnitOfWork;
+import org.apache.kylin.helper.MetadataToolHelper;
+import org.apache.kylin.helper.RoutineToolHelper;
 import org.apache.kylin.metadata.project.NProjectManager;
 import org.apache.kylin.metadata.project.ProjectInstance;
-import org.apache.kylin.metadata.query.util.QueryHisStoreUtil;
-import org.apache.kylin.metadata.streaming.util.StreamingJobRecordStoreUtil;
-import org.apache.kylin.metadata.streaming.util.StreamingJobStatsStoreUtil;
 import org.apache.kylin.tool.MaintainModeTool;
-import org.apache.kylin.tool.garbage.GarbageCleaner;
-import org.apache.kylin.tool.garbage.SourceUsageCleaner;
-import org.apache.kylin.tool.garbage.StorageCleaner;
 import org.apache.kylin.tool.util.ToolMainWrapper;
-
 import org.apache.kylin.metadata.epoch.EpochManager;
-import org.apache.kylin.metadata.recommendation.candidate.JdbcRawRecStore;
+
 import lombok.Getter;
-import lombok.val;
 import lombok.extern.slf4j.Slf4j;
 
 @Getter
@@ -64,6 +54,8 @@ public class RoutineTool extends ExecutableApplication {
     private int retryTimes;
     private double requestFSRate;
 
+    private MetadataToolHelper helper = new MetadataToolHelper();
+
     public static void main(String[] args) {
         ToolMainWrapper.wrap(args, () -> {
             RoutineTool tool = new RoutineTool();
@@ -73,27 +65,15 @@ public class RoutineTool extends ExecutableApplication {
     }
 
     public static void deleteRawRecItems() {
-        KylinConfig config = KylinConfig.getInstanceFromEnv();
-        List<ProjectInstance> projectInstances = NProjectManager.getInstance(config).listAllProjects().stream()
-                .filter(projectInstance -> !projectInstance.isExpertMode()).collect(Collectors.toList());
-        if (projectInstances.isEmpty()) {
-            return;
-        }
-        try (SetThreadName ignored = new SetThreadName("DeleteRawRecItemsInDB")) {
-            val jdbcRawRecStore = new JdbcRawRecStore(KylinConfig.getInstanceFromEnv());
-            jdbcRawRecStore.deleteOutdated();
-        } catch (Exception e) {
-            log.error("delete outdated advice fail: ", e);
-        }
+        RoutineToolHelper.deleteRawRecItems();
     }
 
     public static void cleanQueryHistories() {
-        QueryHisStoreUtil.cleanQueryHistory();
+        RoutineToolHelper.cleanQueryHistories();
     }
 
     public static void cleanStreamingStats() {
-        StreamingJobStatsStoreUtil.cleanStreamingJobStats();
-        StreamingJobRecordStoreUtil.cleanStreamingJobRecord();
+        RoutineToolHelper.cleanStreamingStats();
     }
 
     @Override
@@ -108,19 +88,25 @@ public class RoutineTool extends ExecutableApplication {
         return options;
     }
 
+    protected final List<String> getProjectsToCleanup() {
+        if (getProjects().length != 0) {
+            return Arrays.asList(getProjects());
+        } else {
+            KylinConfig kylinConfig = KylinConfig.getInstanceFromEnv();
+            List<ProjectInstance> instances = NProjectManager.getInstance(kylinConfig).listAllProjects();
+            return instances.stream().map(ProjectInstance::getName).collect(Collectors.toList());
+        }
+    }
+
+
     @Override
     protected void execute(OptionsHelper optionsHelper) throws Exception {
         if (printUsage(optionsHelper)) {
             return;
         }
         initOptionValues(optionsHelper);
-        KylinConfig kylinConfig = KylinConfig.getInstanceFromEnv();
-        List<ProjectInstance> instances = NProjectManager.getInstance(kylinConfig).listAllProjects();
         System.out.println("Start to cleanup metadata");
-        List<String> projectsToCleanup = Arrays.asList(projects);
-        if (projectsToCleanup.isEmpty()) {
-            projectsToCleanup = instances.stream().map(ProjectInstance::getName).collect(Collectors.toList());
-        }
+        List<String> projectsToCleanup = getProjectsToCleanup();
         MaintainModeTool maintainModeTool = new MaintainModeTool("routine tool");
         maintainModeTool.init();
         maintainModeTool.markEpochs();
@@ -133,7 +119,7 @@ public class RoutineTool extends ExecutableApplication {
     private void doCleanup(List<String> projectsToCleanup) {
         try {
             if (metadataCleanup) {
-                cleanMeta(projectsToCleanup);
+                RoutineToolHelper.cleanMeta(projectsToCleanup);
             }
             cleanStorage();
         } catch (Exception e) {
@@ -141,62 +127,8 @@ public class RoutineTool extends ExecutableApplication {
         }
     }
 
-    protected void cleanMeta(List<String> projectsToCleanup) throws IOException {
-        try {
-            cleanGlobalSourceUsage();
-            for (String projName : projectsToCleanup) {
-                cleanMetaByProject(projName);
-            }
-            cleanQueryHistories();
-            cleanStreamingStats();
-            deleteRawRecItems();
-            System.out.println("Metadata cleanup finished");
-        } catch (Exception e) {
-            log.error("Metadata cleanup failed", e);
-            System.out.println(StorageCleaner.ANSI_RED
-                    + "Metadata cleanup failed. Detailed Message is at ${KYLIN_HOME}/logs/shell.stderr"
-                    + StorageCleaner.ANSI_RESET);
-        }
-
-    }
-
-    public static void cleanGlobalSourceUsage() {
-        log.info("Start to clean up global meta");
-        try {
-            EnhancedUnitOfWork.doInTransactionWithCheckAndRetry(() -> {
-                new SourceUsageCleaner().cleanup();
-                return null;
-            }, UnitOfWork.GLOBAL_UNIT);
-        } catch (Exception e) {
-            log.error("Failed to clean global meta", e);
-        }
-        log.info("Clean up global meta finished");
-
-    }
-
-    public static void cleanMetaByProject(String projectName) {
-        log.info("Start to clean up {} meta", projectName);
-        try {
-            GarbageCleaner.cleanMetadata(projectName);
-        } catch (Exception e) {
-            log.error("Project[{}] cleanup Metadata failed", projectName, e);
-        }
-        log.info("Clean up {} meta finished", projectName);
-    }
-
     public void cleanStorage() {
-        try {
-            StorageCleaner storageCleaner = new StorageCleaner(storageCleanup, Arrays.asList(projects), requestFSRate,
-                    retryTimes);
-            System.out.println("Start to cleanup HDFS");
-            storageCleaner.execute();
-            System.out.println("cleanup HDFS finished");
-        } catch (Exception e) {
-            log.error("cleanup HDFS failed", e);
-            System.out.println(StorageCleaner.ANSI_RED
-                    + "cleanup HDFS failed. Detailed Message is at ${KYLIN_HOME}/logs/shell.stderr"
-                    + StorageCleaner.ANSI_RESET);
-        }
+        helper.cleanStorage(storageCleanup, Arrays.asList(projects), requestFSRate, retryTimes);
     }
 
     protected boolean printUsage(OptionsHelper optionsHelper) {
@@ -230,11 +162,5 @@ public class RoutineTool extends ExecutableApplication {
                         + " Request FileSystem rate: " + requestFSRate + " Retry Times: " + retryTimes);
     }
 
-    public void setProjects(String[] projects) {
-        this.projects = projects;
-    }
 
-    public void setStorageCleanup(boolean storageCleanup) {
-        this.storageCleanup = storageCleanup;
-    }
 }
diff --git a/src/tool/src/main/java/org/apache/kylin/tool/upgrade/UpdateUserAclTool.java b/src/tool/src/main/java/org/apache/kylin/tool/upgrade/UpdateUserAclTool.java
index e58738fd2c..4f6700d790 100644
--- a/src/tool/src/main/java/org/apache/kylin/tool/upgrade/UpdateUserAclTool.java
+++ b/src/tool/src/main/java/org/apache/kylin/tool/upgrade/UpdateUserAclTool.java
@@ -25,15 +25,11 @@ import java.util.HashMap;
 import java.util.HashSet;
 import java.util.List;
 import java.util.Locale;
-import java.util.Map;
 import java.util.Optional;
-import java.util.Properties;
 import java.util.Set;
 import java.util.regex.Pattern;
 import java.util.stream.Collectors;
 
-import javax.naming.directory.SearchControls;
-
 import org.apache.commons.cli.Option;
 import org.apache.commons.cli.Options;
 import org.apache.commons.io.FileUtils;
@@ -41,14 +37,15 @@ import org.apache.commons.lang3.StringUtils;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.persistence.transaction.UnitOfWork;
 import org.apache.kylin.common.util.CliCommandExecutor;
-import org.apache.kylin.common.util.EncryptUtil;
 import org.apache.kylin.common.util.ExecutableApplication;
 import org.apache.kylin.common.util.OptionBuilder;
 import org.apache.kylin.common.util.OptionsHelper;
 import org.apache.kylin.common.util.ShellException;
+import org.apache.kylin.helper.UpdateUserAclToolHelper;
 import org.apache.kylin.metadata.project.EnhancedUnitOfWork;
 import org.apache.kylin.metadata.upgrade.GlobalAclVersion;
 import org.apache.kylin.metadata.upgrade.GlobalAclVersionManager;
+import org.apache.kylin.metadata.user.ManagedUser;
 import org.apache.kylin.metadata.user.NKylinUserManager;
 import org.apache.kylin.rest.constant.Constant;
 import org.apache.kylin.rest.security.AclManager;
@@ -59,13 +56,10 @@ import org.apache.kylin.rest.security.UserAcl;
 import org.apache.kylin.rest.security.UserAclManager;
 import org.apache.kylin.rest.util.AclPermissionUtil;
 import org.apache.kylin.tool.MaintainModeTool;
-import org.apache.kylin.tool.util.LdapUtils;
 import org.springframework.security.acls.domain.ConsoleAuditLogger;
 import org.springframework.security.acls.model.Permission;
 import org.springframework.security.acls.model.Sid;
 import org.springframework.security.core.authority.SimpleGrantedAuthority;
-import org.springframework.security.ldap.DefaultSpringSecurityContextSource;
-import org.springframework.security.ldap.SpringSecurityLdapTemplate;
 
 import lombok.val;
 import lombok.extern.slf4j.Slf4j;
@@ -168,7 +162,7 @@ public class UpdateUserAclTool extends ExecutableApplication {
 
     @Override
     protected void execute(OptionsHelper optionsHelper) throws Exception {
-        if (isUpgraded() && !optionsHelper.hasOption(OPTION_ROLLBACK)
+        if (UpdateUserAclToolHelper.getInstance().isUpgraded() && !optionsHelper.hasOption(OPTION_ROLLBACK)
                 && !optionsHelper.hasOption(OPTION_FORCE_UPGRADE)) {
             log.info("The acl related metadata have been upgraded.");
             return;
@@ -197,11 +191,6 @@ public class UpdateUserAclTool extends ExecutableApplication {
         return userAclManager.listAclUsernames().size() > 0;
     }
 
-    public boolean isUpgraded() {
-        val versionManager = GlobalAclVersionManager.getInstance(KylinConfig.getInstanceFromEnv());
-        return versionManager.exists();
-    }
-
     private Set<String> getAdminUsers() {
         val config = KylinConfig.getInstanceFromEnv();
         val profile = config.getSecurityProfile().toLowerCase(Locale.ROOT);
@@ -249,41 +238,11 @@ public class UpdateUserAclTool extends ExecutableApplication {
         val userManager = NKylinUserManager.getInstance(KylinConfig.getInstanceFromEnv());
         return userManager.list().stream()
                 .filter(user -> user.getAuthorities().contains(new SimpleGrantedAuthority(Constant.ROLE_ADMIN)))
-                .map(user -> user.getUsername()).collect(Collectors.toList());
+                .map(ManagedUser::getUsername).collect(Collectors.toList());
     }
 
     public Set<String> getLdapAdminUsers() {
-        val ldapTemplate = createLdapTemplate();
-        val ldapUserDNs = LdapUtils.getAllGroupMembers(ldapTemplate,
-                KylinConfig.getInstanceFromEnv().getLDAPAdminRole());
-        val searchControls = new SearchControls();
-        searchControls.setSearchScope(2);
-        Map<String, String> dnMapperMap = LdapUtils.getAllValidUserDnMap(ldapTemplate, searchControls);
-        val users = new HashSet<String>();
-        for (String u : ldapUserDNs) {
-            Optional.ofNullable(dnMapperMap.get(u)).ifPresent(users::add);
-        }
-        return users;
-    }
-
-    public static boolean isCustomProfile() {
-        val kylinConfig = KylinConfig.getInstanceFromEnv();
-        return "custom".equals(kylinConfig.getSecurityProfile());
-    }
-
-    private SpringSecurityLdapTemplate createLdapTemplate() {
-        val properties = KylinConfig.getInstanceFromEnv().exportToProperties();
-        val contextSource = new DefaultSpringSecurityContextSource(
-                properties.getProperty("kylin.security.ldap.connection-server"));
-        contextSource.setUserDn(properties.getProperty("kylin.security.ldap.connection-username"));
-        contextSource.setPassword(getPassword(properties));
-        contextSource.afterPropertiesSet();
-        return new SpringSecurityLdapTemplate(contextSource);
-    }
-
-    public String getPassword(Properties properties) {
-        val password = properties.getProperty("kylin.security.ldap.connection-password");
-        return EncryptUtil.decrypt(password);
+        return UpdateUserAclToolHelper.getInstance().getLdapAdminUsers();
     }
 
     public void updateProjectAcl(String operation) {
diff --git a/src/tool/src/main/java/org/apache/kylin/tool/util/MetadataUtil.java b/src/tool/src/main/java/org/apache/kylin/tool/util/MetadataUtil.java
index 86f649f3c6..0863295492 100644
--- a/src/tool/src/main/java/org/apache/kylin/tool/util/MetadataUtil.java
+++ b/src/tool/src/main/java/org/apache/kylin/tool/util/MetadataUtil.java
@@ -36,18 +36,18 @@ import org.apache.commons.dbcp2.BasicDataSource;
 import org.apache.ibatis.jdbc.ScriptRunner;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.logging.LogOutputStream;
-import org.apache.kylin.common.persistence.metadata.JdbcDataSource;
 import org.apache.kylin.common.persistence.metadata.jdbc.JdbcUtil;
+import org.apache.kylin.helper.MetadataToolHelper;
 
 import com.google.common.collect.Lists;
 
-import lombok.val;
 import lombok.extern.slf4j.Slf4j;
 
 @Slf4j
 public class MetadataUtil {
 
     private static final Charset DEFAULT_CHARSET = Charset.defaultCharset();
+    private static MetadataToolHelper metadataToolHelper = new MetadataToolHelper();
 
     private MetadataUtil() {
     }
@@ -60,10 +60,7 @@ public class MetadataUtil {
     }
 
     public static DataSource getDataSource(KylinConfig kylinConfig) throws Exception {
-        val url = kylinConfig.getMetadataUrl();
-        val props = JdbcUtil.datasourceParameters(url);
-
-        return JdbcDataSource.getDataSource(props);
+        return metadataToolHelper.getDataSource(kylinConfig);
     }
 
     public static void createTableIfNotExist(BasicDataSource dataSource, String tableName, String tableSql,
diff --git a/src/tool/src/test/java/org/apache/kylin/tool/MetadataToolTest.java b/src/tool/src/test/java/org/apache/kylin/tool/MetadataToolTest.java
index f906029aea..7db3669a90 100644
--- a/src/tool/src/test/java/org/apache/kylin/tool/MetadataToolTest.java
+++ b/src/tool/src/test/java/org/apache/kylin/tool/MetadataToolTest.java
@@ -64,6 +64,7 @@ import org.apache.kylin.common.util.JsonUtil;
 import org.apache.kylin.common.util.NLocalFileMetadataTestCase;
 import org.apache.kylin.common.util.OptionBuilder;
 import org.apache.kylin.common.util.OptionsHelper;
+import org.apache.kylin.helper.MetadataToolHelper;
 import org.apache.kylin.metadata.model.NDataModel;
 import org.apache.kylin.metadata.model.NDataModelManager;
 import org.apache.kylin.metadata.project.NProjectManager;
@@ -76,7 +77,6 @@ import org.junit.Ignore;
 import org.junit.Rule;
 import org.junit.Test;
 import org.junit.rules.TemporaryFolder;
-import org.mockito.Mockito;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.springframework.jdbc.core.JdbcTemplate;
@@ -118,11 +118,13 @@ public class MetadataToolTest extends NLocalFileMetadataTestCase {
     }
 
     private MetadataTool tool(String path) {
-        val originTool = new MetadataTool(getTestConfig());
-        val tool = Mockito.spy(originTool);
-        Mockito.when(tool.getMetadataUrl(Mockito.anyString(), Mockito.anyBoolean()))
-                .thenReturn("kylin_metadata@hdfs,zip=1,path=file://" + path);
-        return tool;
+        KylinConfig kylinConfig = getTestConfig();
+        return new MetadataTool(kylinConfig, new MetadataToolHelper() {
+            @Override
+            public String getMetadataUrl(String rootPath, boolean compressed, KylinConfig kylinConfig) {
+                return "kylin_metadata@hdfs,zip=1,path=file://" + path;
+            }
+        });
     }
 
     @Test
@@ -235,7 +237,6 @@ public class MetadataToolTest extends NLocalFileMetadataTestCase {
         val archiveFolder = junitFolder.listFiles()[0];
         Assertions.assertThat(archiveFolder).exists();
         Assertions.assertThat(archiveFolder.list()).isNotEmpty().contains(COMPRESSED_FILE);
-        Assert.assertNotNull(tool.getBackupPath());
     }
 
     private boolean assertProjectFolder(File projectFolder, File archiveFolder) {
@@ -463,15 +464,12 @@ public class MetadataToolTest extends NLocalFileMetadataTestCase {
         Assertions.assertThat(NProjectManager.getInstance(getTestConfig()).getProject("demo")).isNotNull();
         Assertions.assertThat(NProjectManager.getInstance(getTestConfig()).getProject("ssb")).isNotNull();
         Assertions.assertThat(NProjectManager.getInstance(getTestConfig()).getProject("default")).isNotNull();
-        val tool = tool(emptyFolder.getAbsolutePath());
+        MetadataTool tool = tool(emptyFolder.getAbsolutePath());
         tool.execute(new String[] { "-restore", "-compress", "-dir", "ignored", "--after-truncate" });
         Assertions.assertThat(NProjectManager.getInstance(getTestConfig()).listAllProjects()).isEmpty();
 
-        Mockito.when(tool.getMetadataUrl(Mockito.anyString(), Mockito.anyBoolean()))
-                .thenReturn("kylin_metadata@hdfs,zip=1,path=file://" + restoreFolder.getAbsolutePath());
-
+        tool = tool(restoreFolder.getAbsolutePath());
         Thread.sleep(TimeUnit.SECONDS.toMillis(1));
-
         tool.execute(new String[] { "-restore", "-compress", "-dir", "ignored", "--after-truncate" });
         Assertions.assertThat(NProjectManager.getInstance(getTestConfig()).getProject("demo")).isNotNull();
         Assertions.assertThat(NProjectManager.getInstance(getTestConfig()).getProject("ssb")).isNotNull();
@@ -702,48 +700,49 @@ public class MetadataToolTest extends NLocalFileMetadataTestCase {
 
     @Test
     public void testGetMetadataUrl() {
-        val tool = new MetadataTool();
+        KylinConfig kylinConfig = KylinConfig.getInstanceFromEnv();
+        MetadataToolHelper metadataToolHelper = new MetadataToolHelper();
 
         var hdfsPath = "hdfs://host/path/to/hdfs/dir";
-        var hdfsMetadataUrl = tool.getMetadataUrl(hdfsPath, true);
+        var hdfsMetadataUrl = metadataToolHelper.getMetadataUrl(hdfsPath, true, kylinConfig);
         Assert.assertEquals("kylin_metadata@hdfs,path=/path/to/hdfs/dir/,zip=1", hdfsMetadataUrl);
-        hdfsMetadataUrl = tool.getMetadataUrl(hdfsPath, false);
+        hdfsMetadataUrl = metadataToolHelper.getMetadataUrl(hdfsPath, false, kylinConfig);
         Assert.assertEquals("kylin_metadata@hdfs,path=/path/to/hdfs/dir/", hdfsMetadataUrl);
 
         var maprfsPath = "maprfs://host/path/to/maprfs/dir";
-        var maprfsMetadataUrl = tool.getMetadataUrl(maprfsPath, true);
+        var maprfsMetadataUrl = metadataToolHelper.getMetadataUrl(maprfsPath, true, kylinConfig);
         Assert.assertEquals("kylin_metadata@hdfs,path=/path/to/maprfs/dir/,zip=1", maprfsMetadataUrl);
-        maprfsMetadataUrl = tool.getMetadataUrl(maprfsPath, false);
+        maprfsMetadataUrl = metadataToolHelper.getMetadataUrl(maprfsPath, false, kylinConfig);
         Assert.assertEquals("kylin_metadata@hdfs,path=/path/to/maprfs/dir/", maprfsMetadataUrl);
 
         var s3Path = "s3://host/path/to/s3/dir";
-        var s3MetadataUrl = tool.getMetadataUrl(s3Path, true);
+        var s3MetadataUrl = metadataToolHelper.getMetadataUrl(s3Path, true, kylinConfig);
         Assert.assertEquals("kylin_metadata@hdfs,path=/path/to/s3/dir/,zip=1", s3MetadataUrl);
-        s3MetadataUrl = tool.getMetadataUrl(s3Path, false);
+        s3MetadataUrl = metadataToolHelper.getMetadataUrl(s3Path, false, kylinConfig);
         Assert.assertEquals("kylin_metadata@hdfs,path=/path/to/s3/dir/", s3MetadataUrl);
 
         var s3aPath = "s3a://host/path/to/s3a/dir";
-        var s3aMetadataUrl = tool.getMetadataUrl(s3aPath, true);
+        var s3aMetadataUrl = metadataToolHelper.getMetadataUrl(s3aPath, true, kylinConfig);
         Assert.assertEquals("kylin_metadata@hdfs,path=/path/to/s3a/dir/,zip=1", s3aMetadataUrl);
-        s3aMetadataUrl = tool.getMetadataUrl(s3aPath, false);
+        s3aMetadataUrl = metadataToolHelper.getMetadataUrl(s3aPath, false, kylinConfig);
         Assert.assertEquals("kylin_metadata@hdfs,path=/path/to/s3a/dir/", s3aMetadataUrl);
 
         var wasbPath = "wasb://host/path/to/wasb/dir";
-        var wasbMetadataUrl = tool.getMetadataUrl(wasbPath, true);
+        var wasbMetadataUrl = metadataToolHelper.getMetadataUrl(wasbPath, true, kylinConfig);
         Assert.assertEquals("kylin_metadata@hdfs,path=/path/to/wasb/dir/,zip=1", wasbMetadataUrl);
-        wasbMetadataUrl = tool.getMetadataUrl(wasbPath, false);
+        wasbMetadataUrl = metadataToolHelper.getMetadataUrl(wasbPath, false, kylinConfig);
         Assert.assertEquals("kylin_metadata@hdfs,path=/path/to/wasb/dir/", wasbMetadataUrl);
 
         var filePath = "file:///path/to/file/dir";
-        var fileMetadataUrl = tool.getMetadataUrl(filePath, true);
+        var fileMetadataUrl = metadataToolHelper.getMetadataUrl(filePath, true, kylinConfig);
         Assert.assertEquals("/path/to/file/dir/", fileMetadataUrl);
-        fileMetadataUrl = tool.getMetadataUrl(filePath, false);
+        fileMetadataUrl = metadataToolHelper.getMetadataUrl(filePath, false, kylinConfig);
         Assert.assertEquals("/path/to/file/dir/", fileMetadataUrl);
 
         var simplePath = "/just/a/path";
-        var simpleMetadataUrl = tool.getMetadataUrl(simplePath, true);
+        var simpleMetadataUrl = metadataToolHelper.getMetadataUrl(simplePath, true, kylinConfig);
         Assert.assertEquals("/just/a/path/", simpleMetadataUrl);
-        simpleMetadataUrl = tool.getMetadataUrl(simplePath, false);
+        simpleMetadataUrl = metadataToolHelper.getMetadataUrl(simplePath, false, kylinConfig);
         Assert.assertEquals("/just/a/path/", simpleMetadataUrl);
     }
 
diff --git a/src/tool/src/test/java/org/apache/kylin/tool/security/KylinPasswordResetCLITest.java b/src/tool/src/test/java/org/apache/kylin/tool/security/KylinPasswordResetCLITest.java
index e8ca0ae812..6e677dd0e4 100644
--- a/src/tool/src/test/java/org/apache/kylin/tool/security/KylinPasswordResetCLITest.java
+++ b/src/tool/src/test/java/org/apache/kylin/tool/security/KylinPasswordResetCLITest.java
@@ -68,6 +68,7 @@ public class KylinPasswordResetCLITest extends LogOutputTestCase {
         overwriteSystemProp("kylin.metadata.random-admin-password.enabled", "true");
         val pwdEncoder = new BCryptPasswordEncoder();
         overwriteSystemProp("kylin.security.user-password-encoder", pwdEncoder.getClass().getName());
+        overwriteSystemProp("kylin.metadata.random-admin-password.enabled", "true");
         val user = new ManagedUser("ADMIN", "KYLIN", true, Constant.ROLE_ADMIN, Constant.GROUP_ALL_USERS);
         user.setPassword(pwdEncoder.encode(user.getPassword()));
         val config = KylinConfig.getInstanceFromEnv();
diff --git a/src/tool/src/test/java/org/apache/kylin/tool/upgrade/UpdateUserAclToolTest.java b/src/tool/src/test/java/org/apache/kylin/tool/upgrade/UpdateUserAclToolTest.java
index f4b78db084..eec36ce0f3 100644
--- a/src/tool/src/test/java/org/apache/kylin/tool/upgrade/UpdateUserAclToolTest.java
+++ b/src/tool/src/test/java/org/apache/kylin/tool/upgrade/UpdateUserAclToolTest.java
@@ -20,6 +20,7 @@ package org.apache.kylin.tool.upgrade;
 
 import org.apache.kylin.common.exception.KylinException;
 import org.apache.kylin.common.util.NLocalFileMetadataTestCase;
+import org.apache.kylin.helper.UpdateUserAclToolHelper;
 import org.apache.kylin.rest.security.AclManager;
 import org.apache.kylin.rest.security.AclPermission;
 import org.apache.kylin.rest.security.UserAclManager;
@@ -56,7 +57,7 @@ public class UpdateUserAclToolTest extends NLocalFileMetadataTestCase {
         Mockito.when(tool.matchUpgradeCondition(args)).thenReturn(true);
         tool.execute(args);
         Assert.assertTrue(tool.isAdminUserUpgraded());
-        Assert.assertTrue(tool.isUpgraded());
+        Assert.assertTrue(UpdateUserAclToolHelper.getInstance().isUpgraded());
         val userAclManager = UserAclManager.getInstance(getTestConfig());
         Assert.assertTrue(userAclManager.get("admin_user").hasPermission(AclPermission.DATA_QUERY.getMask()));
         val aclManager = createAclManager(tool);
@@ -96,7 +97,7 @@ public class UpdateUserAclToolTest extends NLocalFileMetadataTestCase {
     public void testUpdateUserAcl() {
         getTestConfig().setProperty("kylin.security.profile", "custom");
         tool.execute(new String[] { "-f", "-s=migrate", "-v=4.5.10", "-h=." });
-        Assert.assertTrue(tool.isUpgraded());
+        Assert.assertTrue(UpdateUserAclToolHelper.getInstance().isUpgraded());
     }
 
     @Test
diff --git a/src/tool/src/test/resources/bisync_tableau/nmodel_full_measure_test.connector.tds b/src/tool/src/test/resources/bisync_tableau/nmodel_full_measure_test.connector.tds
deleted file mode 100644
index ac3e77ff62..0000000000
--- a/src/tool/src/test/resources/bisync_tableau/nmodel_full_measure_test.connector.tds
+++ /dev/null
@@ -1,125 +0,0 @@
-<?xml version='1.0' encoding='UTF-8'?>
-<datasource formatted-name="federated.0e6gjbn18cj0a41an9pi309itkyi" inline="true" source-platform="win" version="10.0">
-  <connection class="federated">
-    <named-connections>
-      <named-connection caption="localhost" name="kyligence_odbc.06xjot407mgsfe1bnnyt60p4vjuf">
-        <connection class="kyligence_odbc" dbname="" odbc-connect-string-extras="PROJECT=default;CUBE=nmodel_full_measure_test" port="7070" schema="DEFAULT" server="localhost" username="ADMIN" vendor1="default" vendor2="nmodel_full_measure_test"/>
-      </named-connection>
-    </named-connections>
-    <relation join="left" type="join">
-      <clause type="join">
-        <expression op="=">
-          <expression op="[TEST_MEASURE].[ID1]"/>
-          <expression op="[TEST_MEASURE1].[ID1]"/>
-        </expression>
-      </clause>
-      <relation type="table" connection="kyligence_odbc.06xjot407mgsfe1bnnyt60p4vjuf" name="TEST_MEASURE" table="[DEFAULT].[TEST_MEASURE]"/>
-      <relation type="table" connection="kyligence_odbc.06xjot407mgsfe1bnnyt60p4vjuf" name="TEST_MEASURE1" table="[DEFAULT].[TEST_MEASURE1]"/>
-    </relation>
-    <cols>
-      <map key="[FLAG (TEST_MEASURE)]" value="[TEST_MEASURE].[FLAG]"/>
-      <map key="[ID2 (TEST_MEASURE1)]" value="[TEST_MEASURE1].[ID2]"/>
-      <map key="[PRICE1 (TEST_MEASURE)]" value="[TEST_MEASURE].[PRICE1]"/>
-      <map key="[PRICE7 (TEST_MEASURE1)]" value="[TEST_MEASURE1].[PRICE7]"/>
-      <map key="[ID1 (TEST_MEASURE1)]" value="[TEST_MEASURE1].[ID1]"/>
-      <map key="[PRICE2 (TEST_MEASURE)]" value="[TEST_MEASURE].[PRICE2]"/>
-      <map key="[FLAG (TEST_MEASURE1)]" value="[TEST_MEASURE1].[FLAG]"/>
-      <map key="[ID4 (TEST_MEASURE1)]" value="[TEST_MEASURE1].[ID4]"/>
-      <map key="[PRICE3 (TEST_MEASURE)]" value="[TEST_MEASURE].[PRICE3]"/>
-      <map key="[ID3 (TEST_MEASURE1)]" value="[TEST_MEASURE1].[ID3]"/>
-      <map key="[CC_PRICE8]" value="[TEST_MEASURE].[CC_PRICE8]"/>
-      <map key="[CC_PRICE7]" value="[TEST_MEASURE].[CC_PRICE7]"/>
-      <map key="[PRICE3 (TEST_MEASURE1)]" value="[TEST_MEASURE1].[PRICE3]"/>
-      <map key="[PRICE6 (TEST_MEASURE1)]" value="[TEST_MEASURE1].[PRICE6]"/>
-      <map key="[CC_PRICE9]" value="[TEST_MEASURE].[CC_PRICE9]"/>
-      <map key="[PRICE5 (TEST_MEASURE1)]" value="[TEST_MEASURE1].[PRICE5]"/>
-      <map key="[PRICE2 (TEST_MEASURE1)]" value="[TEST_MEASURE1].[PRICE2]"/>
-      <map key="[PRICE1 (TEST_MEASURE1)]" value="[TEST_MEASURE1].[PRICE1]"/>
-      <map key="[NAME3 (TEST_MEASURE1)]" value="[TEST_MEASURE1].[NAME3]"/>
-      <map key="[PRICE5 (TEST_MEASURE)]" value="[TEST_MEASURE].[PRICE5]"/>
-      <map key="[NAME4 (TEST_MEASURE1)]" value="[TEST_MEASURE1].[NAME4]"/>
-      <map key="[PRICE6 (TEST_MEASURE)]" value="[TEST_MEASURE].[PRICE6]"/>
-      <map key="[NAME1 (TEST_MEASURE1)]" value="[TEST_MEASURE1].[NAME1]"/>
-      <map key="[PRICE7 (TEST_MEASURE)]" value="[TEST_MEASURE].[PRICE7]"/>
-      <map key="[NAME2 (TEST_MEASURE1)]" value="[TEST_MEASURE1].[NAME2]"/>
-      <map key="[NAME2 (TEST_MEASURE)]" value="[TEST_MEASURE].[NAME2]"/>
-      <map key="[ID3 (TEST_MEASURE)]" value="[TEST_MEASURE].[ID3]"/>
-      <map key="[NAME3 (TEST_MEASURE)]" value="[TEST_MEASURE].[NAME3]"/>
-      <map key="[ID2 (TEST_MEASURE)]" value="[TEST_MEASURE].[ID2]"/>
-      <map key="[ID1 (TEST_MEASURE)]" value="[TEST_MEASURE].[ID1]"/>
-      <map key="[NAME1 (TEST_MEASURE)]" value="[TEST_MEASURE].[NAME1]"/>
-      <map key="[NAME4 (TEST_MEASURE)]" value="[TEST_MEASURE].[NAME4]"/>
-      <map key="[ID4 (TEST_MEASURE)]" value="[TEST_MEASURE].[ID4]"/>
-      <map key="[CC_PRICE3]" value="[TEST_MEASURE].[CC_PRICE3]"/>
-      <map key="[CC_PRICE6]" value="[TEST_MEASURE].[CC_PRICE6]"/>
-      <map key="[CC_PRICE5]" value="[TEST_MEASURE].[CC_PRICE5]"/>
-      <map key="[CC_PRICE2]" value="[TEST_MEASURE].[CC_PRICE2]"/>
-      <map key="[CC_PRICE1]" value="[TEST_MEASURE].[CC_PRICE1]"/>
-      <map key="[TIME1 (TEST_MEASURE)]" value="[TEST_MEASURE].[TIME1]"/>
-      <map key="[TIME2 (TEST_MEASURE1)]" value="[TEST_MEASURE1].[TIME2]"/>
-      <map key="[CC_PRICE10]" value="[TEST_MEASURE].[CC_PRICE10]"/>
-      <map key="[TIME1 (TEST_MEASURE1)]" value="[TEST_MEASURE1].[TIME1]"/>
-      <map key="[TIME2 (TEST_MEASURE)]" value="[TEST_MEASURE].[TIME2]"/>
-    </cols>
-  </connection>
-  <aliases enabled="yes"/>
-  <column caption="FLAG" datatype="boolean" name="[FLAG (TEST_MEASURE)]" role="dimension" type="nominal" hidden="true"/>
-  <column caption="ID2-2" datatype="integer" name="[ID2 (TEST_MEASURE1)]" role="dimension" type="ordinal" hidden="true"/>
-  <column caption="PRICE1" datatype="real" name="[PRICE1 (TEST_MEASURE)]" role="dimension" type="ordinal" hidden="true"/>
-  <column caption="PRICE7" datatype="integer" name="[PRICE7 (TEST_MEASURE1)]" role="dimension" type="ordinal" hidden="true"/>
-  <column caption="ID1-2" datatype="integer" name="[ID1 (TEST_MEASURE1)]" role="dimension" type="ordinal" hidden="true"/>
-  <column caption="PRICE2" datatype="real" name="[PRICE2 (TEST_MEASURE)]" role="dimension" type="ordinal" hidden="true"/>
-  <column caption="FLAG" datatype="boolean" name="[FLAG (TEST_MEASURE1)]" role="dimension" type="nominal" hidden="true"/>
-  <column caption="ID4" datatype="integer" name="[ID4 (TEST_MEASURE1)]" role="dimension" type="ordinal" hidden="true"/>
-  <column caption="PRICE3" datatype="real" name="[PRICE3 (TEST_MEASURE)]" role="dimension" type="ordinal" hidden="true"/>
-  <column caption="ID3" datatype="integer" name="[ID3 (TEST_MEASURE1)]" role="dimension" type="ordinal" hidden="true"/>
-  <column caption="CC_CROSSTABLE_PRICE1" datatype="integer" name="[CC_PRICE8]" role="dimension" type="ordinal" hidden="true"/>
-  <column caption="CC_PRICE7" datatype="integer" name="[CC_PRICE7]" role="dimension" type="ordinal" hidden="true"/>
-  <column caption="PRICE3" datatype="real" name="[PRICE3 (TEST_MEASURE1)]" role="dimension" type="ordinal" hidden="true"/>
-  <column caption="PRICE6" datatype="integer" name="[PRICE6 (TEST_MEASURE1)]" role="dimension" type="ordinal" hidden="true"/>
-  <column caption="CC_CROSSTABLE_PRICE2" datatype="integer" name="[CC_PRICE9]" role="dimension" type="ordinal" hidden="true"/>
-  <column caption="PRICE5" datatype="integer" name="[PRICE5 (TEST_MEASURE1)]" role="dimension" type="ordinal" hidden="true"/>
-  <column caption="PRICE2" datatype="real" name="[PRICE2 (TEST_MEASURE1)]" role="dimension" type="ordinal" hidden="true"/>
-  <column caption="PRICE1-2" datatype="real" name="[PRICE1 (TEST_MEASURE1)]" role="dimension" type="ordinal" hidden="true"/>
-  <column caption="NAME3" datatype="string" name="[NAME3 (TEST_MEASURE1)]" role="dimension" type="nominal" hidden="true"/>
-  <column caption="PRICE5" datatype="integer" name="[PRICE5 (TEST_MEASURE)]" role="dimension" type="ordinal" hidden="true"/>
-  <column caption="NAME4" datatype="integer" name="[NAME4 (TEST_MEASURE1)]" role="dimension" type="ordinal" hidden="true"/>
-  <column caption="PRICE6" datatype="integer" name="[PRICE6 (TEST_MEASURE)]" role="dimension" type="ordinal" hidden="true"/>
-  <column caption="NAME1" datatype="string" name="[NAME1 (TEST_MEASURE1)]" role="dimension" type="nominal" hidden="true"/>
-  <column caption="PRICE7" datatype="integer" name="[PRICE7 (TEST_MEASURE)]" role="dimension" type="ordinal" hidden="true"/>
-  <column caption="NAME2" datatype="string" name="[NAME2 (TEST_MEASURE1)]" role="dimension" type="nominal" hidden="true"/>
-  <column caption="NAME2" datatype="string" name="[NAME2 (TEST_MEASURE)]" role="dimension" type="nominal" hidden="true"/>
-  <column caption="ID3" datatype="integer" name="[ID3 (TEST_MEASURE)]" role="dimension" type="ordinal" hidden="true"/>
-  <column caption="NAME2" datatype="string" name="[NAME3 (TEST_MEASURE)]" role="dimension" type="nominal" hidden="true"/>
-  <column caption="ID2" datatype="integer" name="[ID2 (TEST_MEASURE)]" role="dimension" type="ordinal" hidden="true"/>
-  <column caption="ID1" datatype="integer" name="[ID1 (TEST_MEASURE)]" role="dimension" type="ordinal"/>
-  <column caption="NAME1" datatype="string" name="[NAME1 (TEST_MEASURE)]" role="dimension" type="nominal" hidden="true"/>
-  <column caption="NAME4" datatype="integer" name="[NAME4 (TEST_MEASURE)]" role="dimension" type="ordinal" hidden="true"/>
-  <column caption="ID4" datatype="integer" name="[ID4 (TEST_MEASURE)]" role="dimension" type="ordinal" hidden="true"/>
-  <column caption="CC_PRICE3" datatype="real" name="[CC_PRICE3]" role="dimension" type="ordinal" hidden="true"/>
-  <column caption="CC_PRICE6" datatype="integer" name="[CC_PRICE6]" role="dimension" type="ordinal" hidden="true"/>
-  <column caption="CC_PRICE5" datatype="integer" name="[CC_PRICE5]" role="dimension" type="ordinal" hidden="true"/>
-  <column caption="CC_PRICE2" datatype="real" name="[CC_PRICE2]" role="dimension" type="ordinal" hidden="true"/>
-  <column caption="CC_PRICE1" datatype="real" name="[CC_PRICE1]" role="dimension" type="ordinal" hidden="true"/>
-  <column caption="TIME1" datatype="date" name="[TIME1 (TEST_MEASURE)]" role="dimension" type="ordinal" hidden="true"/>
-  <column caption="TIME2" datatype="datetime" name="[TIME2 (TEST_MEASURE1)]" role="dimension" type="nominal" hidden="true"/>
-  <column caption="CC_PRICE10" datatype="integer" name="[CC_PRICE10]" role="dimension" type="ordinal" hidden="true"/>
-  <column caption="TIME1" datatype="date" name="[TIME1 (TEST_MEASURE1)]" role="dimension" type="ordinal" hidden="true"/>
-  <column caption="TIME2" datatype="datetime" name="[TIME2 (TEST_MEASURE)]" role="dimension" type="nominal" hidden="true"/>
-  <column caption="COUNT_STAR" datatype="integer" name="[COUNT_STAR]" role="measure" type="quantitative">
-    <calculation class="tableau" formula="COUNT(*)"/>
-  </column>
-  <column caption="SUM_1" datatype="integer" name="[SUM_1]" role="measure" type="quantitative">
-    <calculation class="tableau" formula="SUM(1)"/>
-  </column>
-  <column caption="SUM_2" datatype="real" name="[SUM_2]" role="measure" type="quantitative">
-    <calculation class="tableau" formula="SUM(1.0)"/>
-  </column>
-  <column caption="SUM_3" datatype="real" name="[SUM_3]" role="measure" type="quantitative">
-    <calculation class="tableau" formula="SUM(1.0)"/>
-  </column>
-  <drill-paths/>
-  <semantic-values>
-    <semantic-value key="[Country].[Name]" value="&quot;美国&quot;"/>
-  </semantic-values>
-</datasource>