You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@kylin.apache.org by xx...@apache.org on 2022/12/05 10:20:52 UTC

[kylin] branch kylin5 updated (0408501504 -> 21a6b9f7f6)

This is an automated email from the ASF dual-hosted git repository.

xxyu pushed a change to branch kylin5
in repository https://gitbox.apache.org/repos/asf/kylin.git


    from 0408501504 KYLIN-5308 add transaction timeout when epoch renew
     new 7cab52b1f0 fix secondstorage index refresh locked
     new c583f43324 KYLIN-5311 Improve performance of getSubstitutor
     new 4989854945 KYLIN-5312 Add verification to the parameters about update password
     new 59f05cd572 fix second storage skipping index status error after rebalance
     new 3937edeaa0 KYLIN-5313 Support move flat_table from readCluster to writeCluster
     new f4f2b21ee8 KYLIN-5314 check name conflict before export tds file
     new 275d57dea5 KYLIN-5315 update AutoRefreshSnapshotScheduler afterPropertiesSet
     new b04afb32a2 add second storage index lock check
     new 6836fba580 KYLIN-5316 fix stackOverflowError when cc colmun name equals dimension name
     new 13bd871bdf skip secondary index while modify column
     new a79a9b487d KYLIN-5317 Change parameter kylin.metrics.hdfs-periodic-calculation-enabled to default true
     new b55dcac51b fix skipping index status wrong when deletenode
     new 8b31539dab KYLIN-5318 adjust CC names Adjust dimensions measure and filter condition simultaneously
     new 2902a64d43 check project admin permission
     new 19232aba8f KYLIN-5319 Earlier Init Segment LayoutInfo In FilePruner
     new 294895e4ee KYLIN-5320 check and update dataflow lastQueryTime
     new 6cba5e6339 KYLIN-5322 fix select count when out of segment range
     new 5c8bb01017 KYLIN-5323 fix segment matched to wrong model
     new a5875f29c1 KYLIN-5324 tableindex answer select start
     new fb7f07c680 Revert Fix QueryHistory Clean
     new c359214319 KYLIN-5325 Fix the number type conversion problem caused by writing JSON files
     new 21a6b9f7f6 KYLIN-5326 Fix request parameter json deserializer

The 22 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 build/sbin/prepare-flat-table.sh                   | 122 +++++
 .../kylin/rest/controller/NBasicController.java    |  39 +-
 .../service/task/QueryHistoryTaskScheduler.java    |  19 +-
 .../kylin/rest/service/UserAclServiceTest.java     |   7 +-
 .../task/QueryHistoryTaskSchedulerRunnerTest.java  |  34 +-
 .../task/QueryHistoryTaskSchedulerTest.java        |  48 +-
 .../kylin/common/ICachedExternalConfigLoader.java  |   2 +-
 .../org/apache/kylin/common/KylinConfigBase.java   |  60 +-
 .../kylin/common/KylinExternalConfigLoader.java    |   9 +-
 .../apache/kylin/common/PropertiesDelegate.java    |  34 +-
 .../common/exception/code/ErrorCodeServer.java     |   2 +
 .../org/apache/kylin/common/msg/CnMessage.java     |  16 -
 .../java/org/apache/kylin/common/msg/Message.java  |  14 +-
 .../common/util/ArgsTypeJsonDeserializer.java      |  35 +-
 .../resources/kylin_error_msg_conf_cn.properties   |   4 +-
 .../resources/kylin_error_msg_conf_en.properties   |   4 +-
 .../main/resources/kylin_errorcode_conf.properties |   2 +
 .../apache/kylin/common/KylinConfigBaseTest.java   |   9 +
 .../kylin/common/PropertiesDelegateTest.java       | 116 ++++
 .../common/util/ArgsTypeJsonDeserializerTest.java  | 156 ++++++
 .../kylin/common/util/CompositeMapViewTest.java    | 167 ++++++
 .../metadata/cube/cuboid/NQueryLayoutChooser.java  |  33 +-
 .../metadata/cube/cuboid/TableIndexMatcher.java    |  24 +-
 .../kylin/metadata/cube/model/IndexPlan.java       |  22 +-
 .../kylin/metadata/cube/model/NDataSegment.java    |   4 +-
 .../kylin/metadata/cube/model/NDataflow.java       |  21 +-
 .../metadata/cube/model/NDataflowManager.java      |  57 +-
 .../cube/realization/HybridRealization.java        |   1 -
 .../metadata/model/util/ComputedColumnUtil.java    |  20 +-
 .../metadata/query/JdbcQueryHistoryStore.java      |  73 +--
 .../kylin/metadata/query/RDBMSQueryHistoryDAO.java |  44 +-
 .../metadata/realization/CapabilityResult.java     |   6 +-
 .../apache/kylin/metrics/HdfsCapacityMetrics.java  |  38 +-
 .../kylin/metadata/cube/model/NDataflowTest.java   |  91 ++-
 .../metadata/query/RDBMSQueryHistoryDaoTest.java   |  68 ---
 .../kylin/metrics/HdfsCapacityMetricsTest.java     |  26 +-
 .../rest/scheduler/AutoRefreshSnapshotRunner.java  |  64 +--
 .../scheduler/AutoRefreshSnapshotScheduler.java    |  38 +-
 ...pshotThread.java => BuildSnapshotRunnable.java} |   4 +-
 ...leThread.java => CheckSourceTableRunnable.java} |   2 +-
 .../kylin/rest/service/ModelBuildService.java      |  34 +-
 .../scheduler/AutoRefreshSnapshotConfigTest.java   |  58 ++
 .../scheduler/AutoRefreshSnapshotRunnerTest.java   |  66 +--
 ...eadTest.java => BuildSnapshotRunnableTest.java} |  20 +-
 ...Test.java => CheckSourceTableRunnableTest.java} |  11 +-
 .../rest/service/FusionModelServiceBuildTest.java  |   8 +-
 .../kylin/rest/service/ModelServiceBuildTest.java  |  68 +--
 .../rest/controller/open/OpenModelController.java  |  59 +-
 .../kylin/rest/controller/NModelController.java    |  95 +---
 .../kylin/rest/controller/NTableController.java    |  21 +-
 .../kylin/rest/controller/NUserController.java     |  13 +-
 .../rest/controller/v2/NModelControllerV2.java     |   5 +-
 .../rest/controller/v2/NProjectControllerV2.java   |   6 +-
 .../controller/open/OpenModelControllerTest.java   |  69 +++
 .../rest/controller/NModelControllerTest.java      |  74 +--
 .../kylin/rest/controller/NUserControllerTest.java |   6 +-
 .../controller/open/OpenTableControllerTest.java   |  50 +-
 .../kylin/rest/service/AbstractModelService.java   | 140 +++++
 .../kylin/rest/service/FusionModelService.java     |   6 +-
 .../apache/kylin/rest/service/ModelService.java    | 460 +++-------------
 .../apache/kylin/rest/service/ModelTdsService.java | 336 ++++++++++++
 .../kylin/rest/service/ModelServiceTest.java       | 577 ++++----------------
 .../kylin/rest/service/ModelTdsServiceTest.java    | 607 +++++++++++++++++++++
 .../org/apache/kylin/query/schema/OLAPTable.java   |  32 +-
 .../org/apache/kylin/tool/bisync/BISyncTool.java   |  20 +-
 .../org/apache/kylin/tool/bisync/SyncContext.java  |   2 +
 .../apache/kylin/tool/bisync/SyncModelBuilder.java | 308 +++++------
 .../apache/kylin/tool/bisync/model/ColumnDef.java  |  80 +--
 .../apache/kylin/tool/bisync/model/SyncModel.java  |  72 +--
 .../bisync/tableau/TableauDataSourceConverter.java |   2 +-
 .../SnapshotSourceTableStatsServiceTest.scala      |   7 +-
 .../service/SnapshotSourceTableStatsService.java   |  21 +-
 .../TestSnapshotSourceTableStatsService.java       |  36 --
 .../engine/exec/sparder/SparderQueryPlanExec.java  |  10 +-
 .../kylin/query/routing/CandidateSortTest.java     |  59 ++
 .../kap/secondstorage/SecondStorageIndexTest.java  |  23 +-
 .../kap/secondstorage/SecondStorageLockTest.java   |  20 +-
 .../database/ClickHouseQueryOperator.java          |   8 +-
 .../job/ClickhouseRefreshSecondaryIndex.java       |  47 +-
 .../kap/clickhouse/job/RefreshSecondaryIndex.java  |  60 +-
 .../metadata/ClickHouseMetadataOperator.java       | 101 ++--
 .../management/SecondStorageScheduleService.java   |   7 +-
 .../management/SecondStorageService.java           |  19 +-
 .../kap/secondstorage/database/QueryOperator.java  |   3 +-
 .../kap/secondstorage/metadata/TableData.java      |   1 +
 .../kap/secondstorage/metadata/TableEntity.java    |   9 +-
 .../kylin/query/runtime/plan/TableScanPlan.scala   |  43 +-
 .../apache/kylin/query/util/RuntimeHelper.scala    |  17 +-
 .../query/runtime/plan/SegmentEmptyTest.scala      |  50 ++
 .../org/apache/spark/sql/SparderTypeUtil.scala     |  13 +-
 .../sql/execution/datasource/FilePruner.scala      |  21 +-
 .../sql/execution/datasource/FilePrunerSuite.scala |  10 +-
 .../java/org/apache/kylin/tool/ProjectTool.java}   |  54 +-
 .../org/apache/kylin/tool/ProjectToolTest.java}    |  32 +-
 .../kylin/tool/bisync/SyncModelBuilderTest.java    |  78 ++-
 .../kylin/tool/bisync/SyncModelTestUtil.java       |   1 +
 .../upgrade/RenameProjectResourceToolTest.java     |   2 +-
 97 files changed, 3349 insertions(+), 2243 deletions(-)
 create mode 100644 build/sbin/prepare-flat-table.sh
 rename src/{query-service => common-service}/src/test/java/org/apache/kylin/rest/service/task/QueryHistoryTaskSchedulerRunnerTest.java (82%)
 rename src/{query-service => common-service}/src/test/java/org/apache/kylin/rest/service/task/QueryHistoryTaskSchedulerTest.java (93%)
 create mode 100644 src/core-common/src/test/java/org/apache/kylin/common/util/ArgsTypeJsonDeserializerTest.java
 create mode 100644 src/core-common/src/test/java/org/apache/kylin/common/util/CompositeMapViewTest.java
 rename src/data-loading-service/src/main/java/org/apache/kylin/rest/scheduler/{BuildSnapshotThread.java => BuildSnapshotRunnable.java} (99%)
 rename src/data-loading-service/src/main/java/org/apache/kylin/rest/scheduler/{CheckSourceTableThread.java => CheckSourceTableRunnable.java} (98%)
 create mode 100644 src/data-loading-service/src/test/java/org/apache/kylin/rest/scheduler/AutoRefreshSnapshotConfigTest.java
 rename src/data-loading-service/src/test/java/org/apache/kylin/rest/scheduler/{BuildSnapshotThreadTest.java => BuildSnapshotRunnableTest.java} (97%)
 rename src/data-loading-service/src/test/java/org/apache/kylin/rest/scheduler/{CheckSourceTableThreadTest.java => CheckSourceTableRunnableTest.java} (93%)
 create mode 100644 src/modeling-service/src/main/java/org/apache/kylin/rest/service/AbstractModelService.java
 create mode 100644 src/modeling-service/src/main/java/org/apache/kylin/rest/service/ModelTdsService.java
 create mode 100644 src/modeling-service/src/test/java/org/apache/kylin/rest/service/ModelTdsServiceTest.java
 delete mode 100644 src/query-service/src/test/java/org/apache/kylin/rest/service/TestSnapshotSourceTableStatsService.java
 create mode 100644 src/spark-project/sparder/src/test/scala/org/apache/kylin/query/runtime/plan/SegmentEmptyTest.scala
 copy src/{common-service/src/main/java/org/apache/kylin/rest/util/JStackDumpTask.java => tool/src/main/java/org/apache/kylin/tool/ProjectTool.java} (50%)
 copy src/{common-service/src/test/java/org/apache/kylin/rest/util/JStackDumpTaskTest.java => tool/src/test/java/org/apache/kylin/tool/ProjectToolTest.java} (61%)


[kylin] 08/22: add second storage index lock check

Posted by xx...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

xxyu pushed a commit to branch kylin5
in repository https://gitbox.apache.org/repos/asf/kylin.git

commit b04afb32a291a1ba7c223f3b71525444e931539c
Author: Shuai li <lo...@live.cn>
AuthorDate: Fri Oct 14 11:06:41 2022 +0800

    add second storage index lock check
---
 .../kyligence/kap/secondstorage/SecondStorageIndexTest.java | 13 ++++++++++---
 .../kap/secondstorage/management/SecondStorageService.java  |  8 ++++++--
 2 files changed, 16 insertions(+), 5 deletions(-)

diff --git a/src/second-storage/clickhouse-it/src/test/java/io/kyligence/kap/secondstorage/SecondStorageIndexTest.java b/src/second-storage/clickhouse-it/src/test/java/io/kyligence/kap/secondstorage/SecondStorageIndexTest.java
index 38065420ca..0a8ad1b714 100644
--- a/src/second-storage/clickhouse-it/src/test/java/io/kyligence/kap/secondstorage/SecondStorageIndexTest.java
+++ b/src/second-storage/clickhouse-it/src/test/java/io/kyligence/kap/secondstorage/SecondStorageIndexTest.java
@@ -28,7 +28,6 @@ import java.io.File;
 import java.io.IOException;
 import java.util.HashSet;
 import java.util.List;
-import java.util.Locale;
 import java.util.Set;
 import java.util.stream.Collectors;
 
@@ -349,8 +348,8 @@ public class SecondStorageIndexTest implements JobWaiter {
         String jobId = updatePrimaryIndexAndSecondaryIndex(modelName, null, Sets.newHashSet());
         waitJobEnd(getProject(), jobId);
 
-        assertThrows(String.format(Locale.ROOT, MsgPicker.getMsg().getSecondStorageConcurrentOperate(), getProject()),
-                KylinException.class, () -> updatePrimaryIndexAndSecondaryIndex(modelName, null, secondaryIndex));
+        assertThrows(MsgPicker.getMsg().getSecondStorageConcurrentOperate(), KylinException.class,
+                () -> updatePrimaryIndexAndSecondaryIndex(modelName, null, secondaryIndex));
         clickhouse[0].start();
         ClickHouseUtils.internalConfigClickHouse(clickhouse, replica);
 
@@ -465,6 +464,14 @@ public class SecondStorageIndexTest implements JobWaiter {
         assertEquals(jobCnt, getNExecutableManager().getAllExecutables().stream()
                 .filter(job -> job instanceof ClickHouseRefreshSecondaryIndexJob).count());
 
+        // test range lock
+        val lockSecondaryIndex = Sets.newHashSet("TEST_KYLIN_FACT.TRANS_ID");
+        SegmentRange<Long> range = SegmentRange.TimePartitionedSegmentRange.createInfinite();
+        SecondStorageLockUtils.acquireLock(modelId, range).lock();
+        assertThrows(MsgPicker.getMsg().getSecondStorageConcurrentOperate(), KylinException.class,
+                () -> updatePrimaryIndexAndSecondaryIndex(modelName, null, lockSecondaryIndex));
+        SecondStorageLockUtils.unlock(modelId, range);
+
         tableData = getTableFlow(modelId).getTableDataList().get(0);
         partition = tableData.getPartitions().get(0);
         assertTrue(partition.getSecondaryIndexColumns().isEmpty());
diff --git a/src/second-storage/core-ui/src/main/java/io/kyligence/kap/secondstorage/management/SecondStorageService.java b/src/second-storage/core-ui/src/main/java/io/kyligence/kap/secondstorage/management/SecondStorageService.java
index b0886204aa..460e40faed 100644
--- a/src/second-storage/core-ui/src/main/java/io/kyligence/kap/secondstorage/management/SecondStorageService.java
+++ b/src/second-storage/core-ui/src/main/java/io/kyligence/kap/secondstorage/management/SecondStorageService.java
@@ -1159,13 +1159,17 @@ public class SecondStorageService extends BasicService implements SecondStorageU
         List<AbstractExecutable> jobs = getRelationJobsWithoutFinish(project, modelId);
         if (!jobs.isEmpty()) {
             throw new KylinException(JobErrorCode.SECOND_STORAGE_JOB_EXISTS,
-                    String.format(Locale.ROOT, MsgPicker.getMsg().getSecondStorageConcurrentOperate(), project));
+                    MsgPicker.getMsg().getSecondStorageConcurrentOperate());
         }
         jobs = getJobs(project, modelId, Sets.newHashSet(ExecutableState.ERROR),
                 Sets.newHashSet(JobTypeEnum.SECOND_STORAGE_REFRESH_SECONDARY_INDEXES));
         if (!jobs.isEmpty()) {
             throw new KylinException(JobErrorCode.SECOND_STORAGE_JOB_EXISTS,
-                    String.format(Locale.ROOT, MsgPicker.getMsg().getSecondStorageConcurrentOperate(), project));
+                    MsgPicker.getMsg().getSecondStorageConcurrentOperate());
+        }
+        if (SecondStorageLockUtils.containsKey(modelId)) {
+            throw new KylinException(JobErrorCode.SECOND_STORAGE_JOB_EXISTS,
+                    MsgPicker.getMsg().getSecondStorageConcurrentOperate());
         }
     }
 


[kylin] 09/22: KYLIN-5316 fix stackOverflowError when cc colmun name equals dimension name

Posted by xx...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

xxyu pushed a commit to branch kylin5
in repository https://gitbox.apache.org/repos/asf/kylin.git

commit 6836fba58026aaa913808259facc13863e83f17d
Author: Shuai li <lo...@live.cn>
AuthorDate: Fri Oct 14 12:27:20 2022 +0800

    KYLIN-5316 fix stackOverflowError when cc colmun name equals dimension name
---
 .../apache/kylin/metadata/model/util/ComputedColumnUtil.java | 12 ++++--------
 1 file changed, 4 insertions(+), 8 deletions(-)

diff --git a/src/core-metadata/src/main/java/org/apache/kylin/metadata/model/util/ComputedColumnUtil.java b/src/core-metadata/src/main/java/org/apache/kylin/metadata/model/util/ComputedColumnUtil.java
index 451b8280d4..b07c7cf5b0 100644
--- a/src/core-metadata/src/main/java/org/apache/kylin/metadata/model/util/ComputedColumnUtil.java
+++ b/src/core-metadata/src/main/java/org/apache/kylin/metadata/model/util/ComputedColumnUtil.java
@@ -702,7 +702,7 @@ public class ComputedColumnUtil {
                 if (checkedCC.contains(cc)) {
                     continue;
                 }
-                ccUsedColsMap.put(cc.getColumnName(), ComputedColumnUtil.getCCUsedColsWithModel(model, cc));
+                ccUsedColsMap.put(cc.getIdentName(), ComputedColumnUtil.getCCUsedColsWithModel(model, cc));
             }
 
             // parse inner expression might cause error, for example timestampdiff
@@ -712,7 +712,7 @@ public class ComputedColumnUtil {
                     continue;
                 }
                 val ccUsedSourceCols = Sets.<String> newHashSet();
-                collectCCUsedSourceCols(cc.getColumnName(), ccUsedColsMap, ccUsedSourceCols);
+                collectCCUsedSourceCols(cc.getIdentName(), ccUsedColsMap, ccUsedSourceCols);
                 ccUsedSourceCols.removeIf(checkedCCUsedSourceCols::contains);
                 if (ccUsedSourceCols.isEmpty() || isColumnAuthorizedFunc.test(ccUsedSourceCols)) {
                     authorizedCC.add(cc);
@@ -726,15 +726,11 @@ public class ComputedColumnUtil {
 
     public static void collectCCUsedSourceCols(String ccColName, Map<String, Set<String>> ccUsedColsMap,
             Set<String> ccUsedSourceCols) {
-        String ccColNameWithoutDot = ccColName.contains(".") ? ccColName.substring(ccColName.lastIndexOf(".") + 1)
-                : ccColName;
-
-        if (!ccUsedColsMap.containsKey(ccColNameWithoutDot)) {
+        if (!ccUsedColsMap.containsKey(ccColName)) {
             ccUsedSourceCols.add(ccColName);
             return;
         }
-
-        for (String usedColumn : ccUsedColsMap.get(ccColNameWithoutDot)) {
+        for (String usedColumn : ccUsedColsMap.get(ccColName)) {
             collectCCUsedSourceCols(usedColumn, ccUsedColsMap, ccUsedSourceCols);
         }
     }


[kylin] 05/22: KYLIN-5313 Support move flat_table from readCluster to writeCluster

Posted by xx...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

xxyu pushed a commit to branch kylin5
in repository https://gitbox.apache.org/repos/asf/kylin.git

commit 3937edeaa09154ace480bf8138e506d2722632af
Author: Guoliang Sun <gu...@kyligence.io>
AuthorDate: Thu Oct 13 22:20:51 2022 +0800

    KYLIN-5313 Support move flat_table from readCluster to writeCluster
---
 build/sbin/prepare-flat-table.sh                   | 122 +++++++++++++++++++++
 .../java/org/apache/kylin/tool/ProjectTool.java    |  51 +++++++++
 .../org/apache/kylin/tool/ProjectToolTest.java     |  54 +++++++++
 .../upgrade/RenameProjectResourceToolTest.java     |   2 +-
 4 files changed, 228 insertions(+), 1 deletion(-)

diff --git a/build/sbin/prepare-flat-table.sh b/build/sbin/prepare-flat-table.sh
new file mode 100644
index 0000000000..0dec6a749f
--- /dev/null
+++ b/build/sbin/prepare-flat-table.sh
@@ -0,0 +1,122 @@
+#!/bin/bash
+
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+function help() {
+  echo "Usage: prepare-flat-table.sh <projects> [--update]"
+  echo
+  echo "<projects>          Optional, Specify the projects to move flat table directory from"
+  echo "                    readCluster to writeCluster. Use ',' as separator."
+  echo "                    If projects is empty, all projects are fetched by default."
+  echo
+  echo "--update            Optional, whether to update an existing flat table directory."
+  echo "Note:               Not support [--overwrite], please execute manually if necessary."
+  exit 1
+}
+
+if [[ "$1" == "--help" ]] || [[ "$1" == "--h" ]] || [[ "$1" == "-h" ]] || [[ "$1" == "-help" ]]; then
+  help
+  exit 0
+fi
+
+projects_or_update=$1
+projects_to_handle=
+update_flag="false"
+projects_arr=
+
+# 1. script with no args
+# 2. script --update
+# 3. script project1,project2,...
+# 4. script project1,project2,... --update
+if [[ -n ${projects_or_update} ]]; then
+  if [[ "${projects_or_update}" == "--update" ]]; then
+    update_flag="true"
+    shift
+  else
+    projects_to_handle=${projects_or_update}
+    shift
+    update=$1
+    if [[ -n ${update} ]]; then
+      if [[ "${update}" == "--update" ]]; then
+        update_flag="true"
+        shift
+      else
+        help
+        exit 0
+      fi
+    fi
+  fi
+fi
+
+source "$(cd -P -- "$(dirname -- "$0")" && pwd -P)"/header.sh
+source "${KYLIN_HOME}"/sbin/init-kerberos.sh
+
+prepare_flat_table_log=${KYLIN_HOME}/logs/prepare-flatTable.log
+
+KYLIN_METADATA_URL_IDENTIFIER=$("${KYLIN_HOME}"/bin/get-properties.sh kylin.metadata.url.identifier)
+
+READ_CLUSTER_WORKING_DIR=$("${KYLIN_HOME}"/bin/get-properties.sh kylin.env.hdfs-working-dir)
+READ_CLUSTER_FLAT_TABLE_DIR=${READ_CLUSTER_WORKING_DIR}/${KYLIN_METADATA_URL_IDENTIFIER}
+
+WRITE_CLUSTER_WORKING_DIR=$("${KYLIN_HOME}"/bin/get-properties.sh kylin.env.hdfs-write-working-dir)
+WRITE_CLUSTER_FLAT_TABLE_DIR=${WRITE_CLUSTER_WORKING_DIR}/${KYLIN_METADATA_URL_IDENTIFIER}
+
+TIME=$(date "+%Y-%m-%d %H:%M:%S")
+echo "INFO : [Operation: move flat table start] user: $(whoami), time:${TIME}" | tee -a "${prepare_flat_table_log}"
+echo "Preparing to move flat table from ${READ_CLUSTER_FLAT_TABLE_DIR} to ${WRITE_CLUSTER_FLAT_TABLE_DIR}" | tee -a "${prepare_flat_table_log}"
+
+function printInfo() {
+  result=$1
+  print_msg=$2
+  if [[ ${result} == 0 ]]; then
+    echo "${print_msg}" succeed
+  else
+    echo "${print_msg}" failed
+  fi
+}
+
+function moveFlatTable() {
+  for project in ${projects_arr}; do
+    project="${project//,/}"
+    echo "Ready to move project: ${project}"
+    if [ "${update_flag}" == "true" ]; then
+      hadoop distcp -update "${READ_CLUSTER_FLAT_TABLE_DIR}"/"${project}"/flat_table "${WRITE_CLUSTER_FLAT_TABLE_DIR}"/"${project}"/flat_table
+      printInfo $? "hadoop -update distcp ${READ_CLUSTER_FLAT_TABLE_DIR}/${project}/flat_table ${WRITE_CLUSTER_FLAT_TABLE_DIR}/${project}/flat_table"
+    else
+      hadoop distcp "${READ_CLUSTER_FLAT_TABLE_DIR}"/"${project}"/flat_table "${WRITE_CLUSTER_FLAT_TABLE_DIR}"/"${project}/flat_table"
+      printInfo $? "hadoop distcp ${READ_CLUSTER_FLAT_TABLE_DIR}/${project}/flat_table ${WRITE_CLUSTER_FLAT_TABLE_DIR}/${project}/flat_table"
+    fi
+  done
+}
+
+# If no project is passed in, all projects will be retrieved by default, multiple projects are supported, separated by ","
+# If the project already has a flat_table directory, it is skipped by default unless updated by passing update
+if [[ -z ${projects_to_handle} ]]; then
+  echo "Ready to get all projects..." | tee -a "${prepare_flat_table_log}"
+  projects_arr=$("${KYLIN_HOME}"/bin/kylin.sh org.apache.kylin.tool.ProjectTool | tail -1)
+else
+  ## init Kerberos if needed
+  initKerberosIfNeeded
+  projects_arr="${projects_to_handle//,/, }"
+fi
+echo "project_list: ${projects_arr[*]}" | tee -a "${prepare_flat_table_log}"
+moveFlatTable "${projects_arr}" | tee -a "${prepare_flat_table_log}"
+
+TIME=$(date "+%Y-%m-%d %H:%M:%S")
+echo "INFO : [Operation: move flat table end.] user: $(whoami), time:${TIME}" | tee -a "${prepare_flat_table_log}"
+echo >>"${prepare_flat_table_log}"
diff --git a/src/tool/src/main/java/org/apache/kylin/tool/ProjectTool.java b/src/tool/src/main/java/org/apache/kylin/tool/ProjectTool.java
new file mode 100644
index 0000000000..ef89073c84
--- /dev/null
+++ b/src/tool/src/main/java/org/apache/kylin/tool/ProjectTool.java
@@ -0,0 +1,51 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.kylin.tool;
+
+import java.util.List;
+import java.util.stream.Collectors;
+
+import org.apache.commons.lang3.StringUtils;
+import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.metadata.project.NProjectManager;
+import org.apache.kylin.metadata.project.ProjectInstance;
+
+public class ProjectTool {
+
+    /**
+     * System.out must be used to pass values to the shell script
+     * @param args Don't pass in parameters
+     */
+    public static void main(String[] args) {
+        ProjectTool projectTool = new ProjectTool();
+        System.out.println(projectTool.projectsList());
+    }
+
+    /**
+     *
+     * @return all projects such as: project1,project2,...
+     */
+    public String projectsList() {
+        KylinConfig kylinConfig = KylinConfig.getInstanceFromEnv();
+        List<String> projects = NProjectManager.getInstance(kylinConfig).listAllProjects().stream()
+                .map(ProjectInstance::getName).collect(Collectors.toList());
+        return StringUtils.strip(projects.toString(), "[]");
+    }
+
+}
diff --git a/src/tool/src/test/java/org/apache/kylin/tool/ProjectToolTest.java b/src/tool/src/test/java/org/apache/kylin/tool/ProjectToolTest.java
new file mode 100644
index 0000000000..51644ae2ce
--- /dev/null
+++ b/src/tool/src/test/java/org/apache/kylin/tool/ProjectToolTest.java
@@ -0,0 +1,54 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.kylin.tool;
+
+import org.apache.kylin.common.util.NLocalFileMetadataTestCase;
+import org.junit.After;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+
+public class ProjectToolTest extends NLocalFileMetadataTestCase {
+
+    @Before
+    public void setup() throws Exception {
+        createTestMetadata();
+    }
+
+    @After
+    public void teardown() {
+        cleanupTestMetadata();
+    }
+
+    @Test
+    public void testProjectsList() {
+        Assert.assertFalse(new ProjectTool().projectsList().isEmpty());
+    }
+
+    @Test
+    public void testProjectTool() {
+        String errorMsg = "";
+        try {
+            ProjectTool.main(new String[]{});
+        } catch (Exception e) {
+            errorMsg = e.getMessage();
+        }
+        Assert.assertTrue(errorMsg.isEmpty());
+    }
+}
\ No newline at end of file
diff --git a/src/tool/src/test/java/org/apache/kylin/tool/upgrade/RenameProjectResourceToolTest.java b/src/tool/src/test/java/org/apache/kylin/tool/upgrade/RenameProjectResourceToolTest.java
index 9d89bc6aa2..f68138410b 100644
--- a/src/tool/src/test/java/org/apache/kylin/tool/upgrade/RenameProjectResourceToolTest.java
+++ b/src/tool/src/test/java/org/apache/kylin/tool/upgrade/RenameProjectResourceToolTest.java
@@ -27,7 +27,6 @@ import org.apache.hadoop.fs.Path;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.persistence.ResourceStore;
 import org.apache.kylin.common.util.HadoopUtil;
-import org.apache.kylin.metadata.project.ProjectInstance;
 import org.apache.kylin.common.util.NLocalFileMetadataTestCase;
 import org.apache.kylin.metadata.cube.model.NDataflowManager;
 import org.apache.kylin.metadata.cube.model.NIndexPlanManager;
@@ -35,6 +34,7 @@ import org.apache.kylin.metadata.favorite.FavoriteRuleManager;
 import org.apache.kylin.metadata.model.NDataModelManager;
 import org.apache.kylin.metadata.model.NTableMetadataManager;
 import org.apache.kylin.metadata.project.NProjectManager;
+import org.apache.kylin.metadata.project.ProjectInstance;
 import org.junit.After;
 import org.junit.Assert;
 import org.junit.Before;


[kylin] 04/22: fix second storage skipping index status error after rebalance

Posted by xx...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

xxyu pushed a commit to branch kylin5
in repository https://gitbox.apache.org/repos/asf/kylin.git

commit 59f05cd5725604cff96df3f89a39454a73deb169
Author: Shuai li <lo...@live.cn>
AuthorDate: Thu Oct 13 18:01:11 2022 +0800

    fix second storage skipping index status error after rebalance
---
 .../kap/secondstorage/SecondStorageIndexTest.java  |  10 ++
 .../metadata/ClickHouseMetadataOperator.java       | 101 +++++++++++----------
 .../kap/secondstorage/metadata/TableEntity.java    |   9 +-
 3 files changed, 65 insertions(+), 55 deletions(-)

diff --git a/src/second-storage/clickhouse-it/src/test/java/io/kyligence/kap/secondstorage/SecondStorageIndexTest.java b/src/second-storage/clickhouse-it/src/test/java/io/kyligence/kap/secondstorage/SecondStorageIndexTest.java
index 767b045172..38065420ca 100644
--- a/src/second-storage/clickhouse-it/src/test/java/io/kyligence/kap/secondstorage/SecondStorageIndexTest.java
+++ b/src/second-storage/clickhouse-it/src/test/java/io/kyligence/kap/secondstorage/SecondStorageIndexTest.java
@@ -488,6 +488,16 @@ public class SecondStorageIndexTest implements JobWaiter {
             assertEquals(SecondStorageIndexLoadStatus.ALL, r.getSecondaryIndexStatus());
         });
 
+        secondStorageService.sizeInNode(getProject());
+        EnvelopeResponse<List<SecondStorageIndexResponse>> res3 = secondStorageEndpoint.listIndex(getProject(),
+                modelName);
+        assertEquals(KylinException.CODE_SUCCESS, res3.getCode());
+        assertEquals(1, res3.getData().size());
+        res3.getData().forEach(r -> {
+            assertEquals(SecondStorageIndexLoadStatus.ALL, r.getPrimaryIndexStatus());
+            assertEquals(SecondStorageIndexLoadStatus.ALL, r.getSecondaryIndexStatus());
+        });
+
         secondStorageService.triggerSegmentsClean(getProject(), modelId,
                 getDataFlow(modelId).getSegments().stream().map(NDataSegment::getId).collect(Collectors.toSet()));
         waitAllJoEnd();
diff --git a/src/second-storage/clickhouse/src/main/java/io/kyligence/kap/clickhouse/metadata/ClickHouseMetadataOperator.java b/src/second-storage/clickhouse/src/main/java/io/kyligence/kap/clickhouse/metadata/ClickHouseMetadataOperator.java
index 18a04037a0..6185ba5765 100644
--- a/src/second-storage/clickhouse/src/main/java/io/kyligence/kap/clickhouse/metadata/ClickHouseMetadataOperator.java
+++ b/src/second-storage/clickhouse/src/main/java/io/kyligence/kap/clickhouse/metadata/ClickHouseMetadataOperator.java
@@ -17,13 +17,28 @@
  */
 package io.kyligence.kap.clickhouse.metadata;
 
+import java.sql.SQLException;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.stream.Collectors;
+
+import org.apache.commons.lang3.exception.ExceptionUtils;
+import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.common.persistence.transaction.UnitOfWork;
+import org.apache.kylin.metadata.project.EnhancedUnitOfWork;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
 import com.google.common.base.Preconditions;
+
 import io.kyligence.kap.clickhouse.job.ClickHouse;
 import io.kyligence.kap.clickhouse.job.ClickHouseTableStorageMetric;
 import io.kyligence.kap.clickhouse.parser.ExistsQueryParser;
 import io.kyligence.kap.clickhouse.parser.ShowCreateQueryParser;
-import org.apache.kylin.common.persistence.transaction.UnitOfWork;
-import org.apache.kylin.metadata.project.EnhancedUnitOfWork;
 import io.kyligence.kap.secondstorage.SecondStorageConstants;
 import io.kyligence.kap.secondstorage.SecondStorageNodeHelper;
 import io.kyligence.kap.secondstorage.SecondStorageQueryRouteUtil;
@@ -49,19 +64,6 @@ import io.kyligence.kap.secondstorage.response.SizeInNodeResponse;
 import io.kyligence.kap.secondstorage.response.TableSyncResponse;
 import io.kyligence.kap.secondstorage.util.SecondStorageSqlUtils;
 import lombok.val;
-import org.apache.commons.lang3.exception.ExceptionUtils;
-import org.apache.kylin.common.KylinConfig;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.sql.SQLException;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.stream.Collectors;
 
 public class ClickHouseMetadataOperator implements MetadataOperator {
     private static final Logger logger = LoggerFactory.getLogger(ClickHouseMetadataOperator.class);
@@ -80,13 +82,10 @@ public class ClickHouseMetadataOperator implements MetadataOperator {
         KylinConfig config = KylinConfig.getInstanceFromEnv();
 
         List<NodeGroup> nodeGroups = SecondStorageUtil.listNodeGroup(config, project);
-        Set<String> nodes = nodeGroups.stream()
-                .flatMap(x -> x.getNodeNames().stream())
-                .collect(Collectors.toSet());
+        Set<String> nodes = nodeGroups.stream().flatMap(x -> x.getNodeNames().stream()).collect(Collectors.toSet());
 
         List<TableFlow> tableFlows = SecondStorageUtil.listTableFlow(config, project);
-        tableFlows = tableFlows.stream()
-                .filter(x -> x.getTableDataList() != null && x.getTableDataList().size() > 0)
+        tableFlows = tableFlows.stream().filter(x -> x.getTableDataList() != null && x.getTableDataList().size() > 0)
                 .collect(Collectors.toList());
 
         if (tableFlows.isEmpty()) {
@@ -97,9 +96,7 @@ public class ClickHouseMetadataOperator implements MetadataOperator {
         //one project one database
         String database = tableFlows.get(0).getTableDataList().get(0).getDatabase();
 
-        Set<String> tables = tableFlows.stream()
-                .flatMap(x -> x.getTableDataList().stream())
-                .map(TableData::getTable)
+        Set<String> tables = tableFlows.stream().flatMap(x -> x.getTableDataList().stream()).map(TableData::getTable)
                 .collect(Collectors.toSet());
 
         Map<String, String> tableCreateSqlMap = new HashMap<>();
@@ -112,17 +109,24 @@ public class ClickHouseMetadataOperator implements MetadataOperator {
 
             try (ClickHouse clickHouse = new ClickHouse(SecondStorageNodeHelper.resolve(node))) {
                 if (databaseCreateSql == null) {
-                    int existCode = clickHouse.query(new ExistsDatabase(database).toSql(), ExistsQueryParser.EXISTS).get(0);
+                    int existCode = clickHouse.query(new ExistsDatabase(database).toSql(), ExistsQueryParser.EXISTS)
+                            .get(0);
                     if (existCode == 1) {
-                        databaseCreateSql = clickHouse.query(new ShowCreateDatabase(database).toSql(), ShowCreateQueryParser.SHOW_CREATE).get(0);
+                        databaseCreateSql = clickHouse
+                                .query(new ShowCreateDatabase(database).toSql(), ShowCreateQueryParser.SHOW_CREATE)
+                                .get(0);
                     }
                 }
                 for (String table : tables) {
                     if (tableCreateSqlMap.get(table) == null) {
-                        int existCode = clickHouse.query(new ExistsTable(TableIdentifier.table(database, table)).toSql(), ExistsQueryParser.EXISTS).get(0);
+                        int existCode = clickHouse
+                                .query(new ExistsTable(TableIdentifier.table(database, table)).toSql(),
+                                        ExistsQueryParser.EXISTS)
+                                .get(0);
                         if (existCode == 1) {
                             tableCreateSqlMap.put(table,
-                                    SecondStorageSqlUtils.addIfNotExists(clickHouse.query(new ShowCreateTable(TableIdentifier.table(database, table)).toSql(),
+                                    SecondStorageSqlUtils.addIfNotExists(clickHouse.query(
+                                            new ShowCreateTable(TableIdentifier.table(database, table)).toSql(),
                                             ShowCreateQueryParser.SHOW_CREATE).get(0), "TABLE")
                             );
                         }
@@ -150,15 +154,15 @@ public class ClickHouseMetadataOperator implements MetadataOperator {
         return new TableSyncResponse(project, new ArrayList<>(nodes), database, new ArrayList<>(tables));
     }
 
-    private NodeGroup getNodeGroup(List<NodeGroup> nodeGroups, Set<String> existShardNodes){
+    private NodeGroup getNodeGroup(List<NodeGroup> nodeGroups, Set<String> existShardNodes) {
         Preconditions.checkArgument(!nodeGroups.isEmpty());
         val existShardNodesList = new ArrayList<>(existShardNodes);
         NodeGroup addGroup = nodeGroups.get(0);
         if (existShardNodesList.size() > 0) {
-            for (NodeGroup nodeGroup : nodeGroups){
+            for (NodeGroup nodeGroup : nodeGroups) {
                 val nodeNames = nodeGroup.getNodeNames();
                 val item = existShardNodesList.get(0);
-                if (nodeNames.contains(item)){
+                if (nodeNames.contains(item)) {
                     addGroup = nodeGroup;
                     break;
                 }
@@ -169,7 +173,8 @@ public class ClickHouseMetadataOperator implements MetadataOperator {
 
     @Override
     public SizeInNodeResponse sizeInNode() {
-        SecondStorageProjectModelSegment projectModelSegment = properties.get(new ConfigOption<>(SecondStorageConstants.PROJECT_MODEL_SEGMENT_PARAM, SecondStorageProjectModelSegment.class));
+        SecondStorageProjectModelSegment projectModelSegment = properties.get(new ConfigOption<>(
+                SecondStorageConstants.PROJECT_MODEL_SEGMENT_PARAM, SecondStorageProjectModelSegment.class));
         String project = projectModelSegment.getProject();
         Map<String, SecondStorageModelSegment> modelSegmentMap = projectModelSegment.getModelSegmentMap();
         KylinConfig config = KylinConfig.getInstanceFromEnv();
@@ -177,9 +182,7 @@ public class ClickHouseMetadataOperator implements MetadataOperator {
         List<TableFlow> tableFlows = SecondStorageUtil.listTableFlow(config, project);
 
         List<NodeGroup> nodeGroups = SecondStorageUtil.listNodeGroup(config, project);
-        Set<String> nodes = nodeGroups.stream()
-                .flatMap(x -> x.getNodeNames().stream())
-                .collect(Collectors.toSet());
+        Set<String> nodes = nodeGroups.stream().flatMap(x -> x.getNodeNames().stream()).collect(Collectors.toSet());
 
         ClickHouseTableStorageMetric storageMetric = new ClickHouseTableStorageMetric(new ArrayList<>(nodes));
         storageMetric.collect(true);
@@ -191,28 +194,28 @@ public class ClickHouseMetadataOperator implements MetadataOperator {
                         val newTablePartitions = new ArrayList<TablePartition>();
                         for (TablePartition tablePartition : tablePartitions) {
                             SecondStorageModelSegment modelSegment = modelSegmentMap.get(tableFlow.getUuid());
-                            SecondStorageSegment secondStorageSegment = modelSegment.getSegmentMap().get(tablePartition.getSegmentId());
-                            Map<String, Long> sizeInNodeMap = storageMetric.getByPartitions(tableData.getDatabase(), tableData.getTable(), secondStorageSegment.getSegmentRange(), modelSegment.getDateFormat());
+                            SecondStorageSegment secondStorageSegment = modelSegment.getSegmentMap()
+                                    .get(tablePartition.getSegmentId());
+                            Map<String, Long> sizeInNodeMap = storageMetric.getByPartitions(tableData.getDatabase(),
+                                    tableData.getTable(), secondStorageSegment.getSegmentRange(),
+                                    modelSegment.getDateFormat());
                             Set<String> existShardNodes = new HashSet<>(tablePartition.getShardNodes());
                             NodeGroup addGroup = getNodeGroup(nodeGroups, existShardNodes);
                             List<String> addShardNodes = addGroup.getNodeNames().stream()
-                                    .filter(node -> !existShardNodes.contains(node))
-                                    .collect(Collectors.toList());
+                                    .filter(node -> !existShardNodes.contains(node)).collect(Collectors.toList());
 
-                            tablePartition.getSizeInNode().entrySet().forEach(
-                                    e -> e.setValue(sizeInNodeMap.getOrDefault(e.getKey(), 0L))
-                            );
+                            tablePartition.getSizeInNode().entrySet()
+                                    .forEach(e -> e.setValue(sizeInNodeMap.getOrDefault(e.getKey(), 0L)));
 
                             List<String> shardNodes = new ArrayList<>(tablePartition.getShardNodes());
                             shardNodes.addAll(addShardNodes);
 
                             Map<String, Long> sizeInNode = new HashMap<>(tablePartition.getSizeInNode());
 
-                            sizeInNode.entrySet().forEach(
-                                    e -> e.setValue(sizeInNodeMap.getOrDefault(e.getKey(), 0L))
-                            );
+                            sizeInNode.entrySet().forEach(e -> e.setValue(sizeInNodeMap.getOrDefault(e.getKey(), 0L)));
 
-                            Map<String, List<SegmentFileStatus>> nodeFileMap = new HashMap<>(tablePartition.getNodeFileMap());
+                            Map<String, List<SegmentFileStatus>> nodeFileMap = new HashMap<>(
+                                    tablePartition.getNodeFileMap());
 
                             for (String node : addShardNodes) {
                                 sizeInNode.put(node, sizeInNodeMap.getOrDefault(node, 0L));
@@ -220,11 +223,9 @@ public class ClickHouseMetadataOperator implements MetadataOperator {
                             }
 
                             TablePartition.Builder builder = new TablePartition.Builder();
-                            builder.setId(tablePartition.getId())
-                                    .setSegmentId(tablePartition.getSegmentId())
-                                    .setShardNodes(shardNodes)
-                                    .setSizeInNode(sizeInNode)
-                                    .setNodeFileMap(nodeFileMap);
+                            builder.setId(tablePartition.getId()).setSegmentId(tablePartition.getSegmentId())
+                                    .setShardNodes(shardNodes).setSizeInNode(sizeInNode).setNodeFileMap(nodeFileMap)
+                                    .setSecondaryIndexColumns(tablePartition.getSecondaryIndexColumns());
                             newTablePartitions.add(builder.build());
                         }
                         newTablePartitions.forEach(tableData::addPartition);
diff --git a/src/second-storage/core/src/main/java/io/kyligence/kap/secondstorage/metadata/TableEntity.java b/src/second-storage/core/src/main/java/io/kyligence/kap/secondstorage/metadata/TableEntity.java
index c21feb7ebd..3bc5719b8a 100644
--- a/src/second-storage/core/src/main/java/io/kyligence/kap/secondstorage/metadata/TableEntity.java
+++ b/src/second-storage/core/src/main/java/io/kyligence/kap/secondstorage/metadata/TableEntity.java
@@ -31,11 +31,7 @@ import io.kyligence.kap.guava20.shaded.common.collect.Lists;
 import io.kyligence.kap.guava20.shaded.common.collect.Sets;
 import io.kyligence.kap.secondstorage.metadata.annotation.TableDefinition;
 
-@JsonAutoDetect(
-        fieldVisibility = JsonAutoDetect.Visibility.NONE,
-        getterVisibility = JsonAutoDetect.Visibility.NONE,
-        isGetterVisibility = JsonAutoDetect.Visibility.NONE,
-        setterVisibility = JsonAutoDetect.Visibility.NONE)
+@JsonAutoDetect(fieldVisibility = JsonAutoDetect.Visibility.NONE, getterVisibility = JsonAutoDetect.Visibility.NONE, isGetterVisibility = JsonAutoDetect.Visibility.NONE, setterVisibility = JsonAutoDetect.Visibility.NONE)
 @TableDefinition
 public class TableEntity implements Serializable, WithLayout {
 
@@ -67,13 +63,16 @@ public class TableEntity implements Serializable, WithLayout {
             table.layoutID = layoutEntity.getId();
             if (primaryIndexColumns != null) {
                 table.primaryIndexColumns = primaryIndexColumns;
+                table.primaryIndexLastModified = System.currentTimeMillis();
             }
             if (secondaryIndexColumns != null) {
                 table.secondaryIndexColumns = secondaryIndexColumns;
+                table.secondaryIndexLastModified = System.currentTimeMillis();
             }
             return table;
         }
     }
+
     public static Builder builder() {
         return new Builder();
     }


[kylin] 15/22: KYLIN-5319 Earlier Init Segment LayoutInfo In FilePruner

Posted by xx...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

xxyu pushed a commit to branch kylin5
in repository https://gitbox.apache.org/repos/asf/kylin.git

commit 19232aba8f01b42360199c481ed08856f4e65522
Author: Jiale He <35...@users.noreply.github.com>
AuthorDate: Thu Sep 29 15:31:01 2022 +0800

    KYLIN-5319 Earlier Init Segment LayoutInfo In FilePruner
---
 .../kylin/metadata/cube/model/NDataSegment.java    |  4 +-
 .../kylin/metadata/cube/model/NDataflow.java       | 17 +++-
 .../metadata/cube/model/NDataflowManager.java      | 57 ++++++++++----
 .../kylin/metadata/cube/model/NDataflowTest.java   | 91 ++++++++++++++++++++--
 .../sql/execution/datasource/FilePruner.scala      | 21 ++---
 .../sql/execution/datasource/FilePrunerSuite.scala | 10 +--
 6 files changed, 161 insertions(+), 39 deletions(-)

diff --git a/src/core-metadata/src/main/java/org/apache/kylin/metadata/cube/model/NDataSegment.java b/src/core-metadata/src/main/java/org/apache/kylin/metadata/cube/model/NDataSegment.java
index 9f9c3dfec3..a3abc92b37 100644
--- a/src/core-metadata/src/main/java/org/apache/kylin/metadata/cube/model/NDataSegment.java
+++ b/src/core-metadata/src/main/java/org/apache/kylin/metadata/cube/model/NDataSegment.java
@@ -34,12 +34,12 @@ import org.apache.commons.lang.StringUtils;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.util.RandomUtil;
 import org.apache.kylin.metadata.model.ISegment;
+import org.apache.kylin.metadata.model.NDataModel;
 import org.apache.kylin.metadata.model.SegmentRange;
 import org.apache.kylin.metadata.model.SegmentStatusEnum;
 import org.apache.kylin.metadata.model.Segments;
 import org.apache.kylin.metadata.model.TblColRef;
 import org.apache.kylin.metadata.model.TimeRange;
-import org.apache.kylin.metadata.model.NDataModel;
 import org.apache.kylin.metadata.model.util.MultiPartitionUtil;
 
 import com.fasterxml.jackson.annotation.JsonAutoDetect;
@@ -303,7 +303,7 @@ public class NDataSegment implements ISegment, Serializable {
         return getLayoutInfo().isAlreadyBuilt(layoutId);
     }
 
-    private LayoutInfo getLayoutInfo() {
+    public LayoutInfo getLayoutInfo() {
         if (layoutInfo == null) {
             synchronized (this) {
                 if (layoutInfo == null) {
diff --git a/src/core-metadata/src/main/java/org/apache/kylin/metadata/cube/model/NDataflow.java b/src/core-metadata/src/main/java/org/apache/kylin/metadata/cube/model/NDataflow.java
index 37936ba4c0..58e3c3abb2 100644
--- a/src/core-metadata/src/main/java/org/apache/kylin/metadata/cube/model/NDataflow.java
+++ b/src/core-metadata/src/main/java/org/apache/kylin/metadata/cube/model/NDataflow.java
@@ -36,9 +36,13 @@ import org.apache.kylin.common.KylinConfigExt;
 import org.apache.kylin.common.persistence.MissingRootPersistentEntity;
 import org.apache.kylin.common.persistence.RootPersistentEntity;
 import org.apache.kylin.metadata.MetadataConstants;
+import org.apache.kylin.metadata.cube.optimization.FrequencyMap;
 import org.apache.kylin.metadata.model.FunctionDesc;
 import org.apache.kylin.metadata.model.IStorageAware;
 import org.apache.kylin.metadata.model.MeasureDesc;
+import org.apache.kylin.metadata.model.NDataModel;
+import org.apache.kylin.metadata.model.NDataModelManager;
+import org.apache.kylin.metadata.model.NTableMetadataManager;
 import org.apache.kylin.metadata.model.SegmentRange;
 import org.apache.kylin.metadata.model.SegmentStatusEnum;
 import org.apache.kylin.metadata.model.Segments;
@@ -48,10 +52,6 @@ import org.apache.kylin.metadata.realization.CapabilityResult;
 import org.apache.kylin.metadata.realization.IRealization;
 import org.apache.kylin.metadata.realization.RealizationStatusEnum;
 import org.apache.kylin.metadata.realization.SQLDigest;
-import org.apache.kylin.metadata.cube.optimization.FrequencyMap;
-import org.apache.kylin.metadata.model.NDataModel;
-import org.apache.kylin.metadata.model.NDataModelManager;
-import org.apache.kylin.metadata.model.NTableMetadataManager;
 
 import com.fasterxml.jackson.annotation.JsonAutoDetect;
 import com.fasterxml.jackson.annotation.JsonIgnore;
@@ -585,4 +585,13 @@ public class NDataflow extends RootPersistentEntity implements Serializable, IRe
     public boolean hasReadySegments() {
         return isReady() && CollectionUtils.isNotEmpty(getQueryableSegments());
     }
+
+    public void initAllSegLayoutInfo() {
+        getSegments().forEach(NDataSegment::getLayoutInfo);
+    }
+
+    public void initSegLayoutInfoById(Set<String> segmentIdList) {
+        getSegments(segmentIdList).forEach(NDataSegment::getLayoutInfo);
+    }
+
 }
diff --git a/src/core-metadata/src/main/java/org/apache/kylin/metadata/cube/model/NDataflowManager.java b/src/core-metadata/src/main/java/org/apache/kylin/metadata/cube/model/NDataflowManager.java
index 53eee8b63c..f22dd1838d 100644
--- a/src/core-metadata/src/main/java/org/apache/kylin/metadata/cube/model/NDataflowManager.java
+++ b/src/core-metadata/src/main/java/org/apache/kylin/metadata/cube/model/NDataflowManager.java
@@ -22,8 +22,8 @@ import static java.util.stream.Collectors.groupingBy;
 import static org.apache.kylin.common.exception.code.ErrorCodeServer.SEGMENT_MERGE_CHECK_INDEX_ILLEGAL;
 import static org.apache.kylin.common.exception.code.ErrorCodeServer.SEGMENT_MERGE_CHECK_PARTITION_ILLEGAL;
 import static org.apache.kylin.common.exception.code.ErrorCodeServer.SEGMENT_MERGE_CONTAINS_GAPS;
-import static org.apache.kylin.metadata.realization.RealizationStatusEnum.ONLINE;
 import static org.apache.kylin.common.util.SegmentMergeStorageChecker.checkMergeSegmentThreshold;
+import static org.apache.kylin.metadata.realization.RealizationStatusEnum.ONLINE;
 
 import java.util.ArrayList;
 import java.util.Arrays;
@@ -41,28 +41,28 @@ import java.util.stream.Collectors;
 
 import org.apache.commons.collections.CollectionUtils;
 import org.apache.commons.collections.MapUtils;
-import org.apache.commons.lang.StringUtils;
 import org.apache.commons.lang3.ArrayUtils;
+import org.apache.commons.lang3.StringUtils;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.KylinConfigExt;
 import org.apache.kylin.common.exception.KylinException;
 import org.apache.kylin.common.persistence.ResourceStore;
+import org.apache.kylin.common.persistence.transaction.UnitOfWork;
 import org.apache.kylin.metadata.cachesync.CachedCrudAssist;
+import org.apache.kylin.metadata.model.ManagementType;
+import org.apache.kylin.metadata.model.NDataModel;
+import org.apache.kylin.metadata.model.NDataModelManager;
+import org.apache.kylin.metadata.model.NTableMetadataManager;
 import org.apache.kylin.metadata.model.SegmentRange;
 import org.apache.kylin.metadata.model.SegmentStatusEnum;
 import org.apache.kylin.metadata.model.Segments;
 import org.apache.kylin.metadata.model.TableDesc;
 import org.apache.kylin.metadata.model.TimeRange;
+import org.apache.kylin.metadata.model.util.scd2.SCD2CondChecker;
+import org.apache.kylin.metadata.project.NProjectManager;
 import org.apache.kylin.metadata.realization.IRealization;
 import org.apache.kylin.metadata.realization.IRealizationProvider;
 import org.apache.kylin.metadata.realization.RealizationStatusEnum;
-import org.apache.kylin.common.persistence.transaction.UnitOfWork;
-import org.apache.kylin.metadata.model.ManagementType;
-import org.apache.kylin.metadata.model.NDataModel;
-import org.apache.kylin.metadata.model.NDataModelManager;
-import org.apache.kylin.metadata.model.NTableMetadataManager;
-import org.apache.kylin.metadata.model.util.scd2.SCD2CondChecker;
-import org.apache.kylin.metadata.project.NProjectManager;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -258,10 +258,7 @@ public class NDataflowManager implements IRealizationProvider {
     }
 
     public NDataflow getDataflow(String id) {
-        if (StringUtils.isEmpty(id)) {
-            return null;
-        }
-        return crud.get(id);
+        return getDataflow(id, false);
     }
 
     public NDataflow getDataflowByModelAlias(String name) {
@@ -874,4 +871,38 @@ public class NDataflowManager implements IRealizationProvider {
         return offlineManually || isOfflineMultiPartitionModel || isOfflineScdModel;
     }
 
+    /**
+     * get dataflow choose whether init all Segment LayoutInfo.
+     * Segment LayoutInfo is lazy load, It can be loaded immediately if needed.
+     */
+    public NDataflow getDataflow(String id, boolean loadSegLayoutInfo) {
+        if (StringUtils.isEmpty(id)) {
+            return null;
+        }
+        NDataflow dataflow = crud.get(id);
+        if (!loadSegLayoutInfo) {
+            return dataflow;
+        }
+        dataflow.initAllSegLayoutInfo();
+        return dataflow;
+    }
+
+    /**
+     * get dataflow and init specified Segment LayoutInfo.
+     */
+    public NDataflow getDataflow(String id, Set<String> segmentIds) {
+        if (StringUtils.isEmpty(id)) {
+            return null;
+        }
+        NDataflow dataflow = getDataflow(id, false);
+        if (CollectionUtils.isEmpty(segmentIds)) {
+            return dataflow;
+        }
+        if (Objects.isNull(dataflow)) {
+            return null;
+        }
+        dataflow.initSegLayoutInfoById(segmentIds);
+        return dataflow;
+    }
+
 }
diff --git a/src/core-metadata/src/test/java/org/apache/kylin/metadata/cube/model/NDataflowTest.java b/src/core-metadata/src/test/java/org/apache/kylin/metadata/cube/model/NDataflowTest.java
index f962c863a7..983d70c94b 100644
--- a/src/core-metadata/src/test/java/org/apache/kylin/metadata/cube/model/NDataflowTest.java
+++ b/src/core-metadata/src/test/java/org/apache/kylin/metadata/cube/model/NDataflowTest.java
@@ -20,19 +20,22 @@ package org.apache.kylin.metadata.cube.model;
 
 import java.io.IOException;
 
+import org.apache.kylin.common.util.NLocalFileMetadataTestCase;
 import org.apache.kylin.metadata.model.Segments;
 import org.apache.kylin.metadata.realization.RealizationStatusEnum;
-import org.apache.kylin.common.util.NLocalFileMetadataTestCase;
 import org.junit.After;
 import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Test;
+import org.springframework.test.util.ReflectionTestUtils;
 
+import io.kyligence.kap.guava20.shaded.common.collect.Sets;
 import lombok.val;
 import lombok.var;
 
 public class NDataflowTest extends NLocalFileMetadataTestCase {
-    private String projectDefault = "default";
+    private final String projectDefault = "default";
+    private final String projectStreaming = "streaming_test";
 
     @Before
     public void setUp() throws Exception {
@@ -76,9 +79,8 @@ public class NDataflowTest extends NLocalFileMetadataTestCase {
         Assert.assertEquals(indexPlanConfig.base(), config.base());
         Assert.assertEquals(2, config.getExtendedOverrides().size());
 
-        indexPlanManager.updateIndexPlan("89af4ee2-2cdb-4b07-b39e-4c29856309aa", copyForWrite -> {
-            copyForWrite.getOverrideProps().put("test", "test");
-        });
+        indexPlanManager.updateIndexPlan("89af4ee2-2cdb-4b07-b39e-4c29856309aa",
+                copyForWrite -> copyForWrite.getOverrideProps().put("test", "test"));
 
         config = df.getConfig();
         Assert.assertEquals(indexPlanConfig.base(), config.base());
@@ -103,7 +105,7 @@ public class NDataflowTest extends NLocalFileMetadataTestCase {
 
     @Test
     public void testCollectPrecalculationResource_Streaming() {
-        val dsMgr = NDataflowManager.getInstance(getTestConfig(), "streaming_test");
+        val dsMgr = NDataflowManager.getInstance(getTestConfig(), projectStreaming);
         val df = dsMgr.getDataflow("4965c827-fbb4-4ea1-a744-3f341a3b030d");
         val strings = df.collectPrecalculationResource();
         Assert.assertEquals(7, strings.size());
@@ -122,4 +124,81 @@ public class NDataflowTest extends NLocalFileMetadataTestCase {
         Assert.assertTrue(
                 strings.stream().anyMatch(path -> path.startsWith("/streaming_test/kafka/DEFAULT.SSB_STREAMING.json")));
     }
+
+    @Test
+    public void testGetDataflow() {
+        val dsMgr = NDataflowManager.getInstance(getTestConfig(), projectStreaming);
+        {
+            val df = dsMgr.getDataflow(null);
+            Assert.assertNull(df);
+        }
+
+        {
+            val df = dsMgr.getDataflow("4965c827-fbb4-4ea1-a744-3f341a3b030d");
+            Assert.assertNotNull(df);
+        }
+
+        {
+            val df = dsMgr.getDataflow("4965c827-fbb4-4ea1-a744-3f341a3b030d-AAA", Sets.newHashSet("1"));
+            Assert.assertNull(df);
+        }
+
+        {
+            val df = dsMgr.getDataflow(null, Sets.newHashSet("1"));
+            Assert.assertNull(df);
+        }
+    }
+
+    @Test
+    public void testLazyLoadSegmentDetail() {
+        val fieldName = "layoutInfo";
+        val dsMgr = NDataflowManager.getInstance(getTestConfig(), projectStreaming);
+        val df = dsMgr.getDataflow("4965c827-fbb4-4ea1-a744-3f341a3b030d");
+
+        df.getSegments().forEach(segment -> {
+            // lazy init Segment LayoutInfo, it is null
+            Object layoutInfoBefore = ReflectionTestUtils.getField(segment, fieldName);
+            Assert.assertNull(layoutInfoBefore);
+
+            // init Segment LayoutInfo, it is not null
+            segment.getLayoutInfo();
+            Object layoutInfoAfter = ReflectionTestUtils.getField(segment, fieldName);
+            Assert.assertNotNull(layoutInfoAfter);
+        });
+    }
+
+    @Test
+    public void testLoadSegmentDetail() {
+        val dsMgr = NDataflowManager.getInstance(getTestConfig(), projectStreaming);
+        // init Segment LayoutInfo right now
+        val df = dsMgr.getDataflow("4965c827-fbb4-4ea1-a744-3f341a3b030d", true);
+        df.getSegments().forEach(segment -> {
+            val layoutInfoAfter = ReflectionTestUtils.getField(segment, "layoutInfo");
+            Assert.assertNotNull(layoutInfoAfter);
+        });
+    }
+
+    @Test
+    public void testLoadSpecifiedSegmentDetail() {
+        val dataflowId = "4965c827-fbb4-4ea1-a744-3f341a3b030d";
+        val segmentId = "3e560d22-b749-48c3-9f64-d4230207f120";
+        val fieldName = "layoutInfo";
+
+        val dsMgr = NDataflowManager.getInstance(getTestConfig(), projectStreaming);
+        {
+            val df = dsMgr.getDataflow(dataflowId, Sets.newHashSet());
+            val segment = df.getSegment(segmentId);
+            val layoutInfo = ReflectionTestUtils.getField(segment, fieldName);
+            Assert.assertNull(layoutInfo);
+        }
+
+        {
+            // init Specified Segment LayoutInfo
+            val df = dsMgr.getDataflow(dataflowId, Sets.newHashSet(segmentId));
+            val segmentAfter = df.getSegment(segmentId);
+            val layoutInfo = ReflectionTestUtils.getField(segmentAfter, fieldName);
+            Assert.assertNotNull(layoutInfo);
+        }
+    }
+
 }
diff --git a/src/spark-project/spark-common/src/main/scala/org/apache/spark/sql/execution/datasource/FilePruner.scala b/src/spark-project/spark-common/src/main/scala/org/apache/spark/sql/execution/datasource/FilePruner.scala
index 9937f3628b..8e8c046410 100644
--- a/src/spark-project/spark-common/src/main/scala/org/apache/spark/sql/execution/datasource/FilePruner.scala
+++ b/src/spark-project/spark-common/src/main/scala/org/apache/spark/sql/execution/datasource/FilePruner.scala
@@ -18,6 +18,8 @@
 
 package org.apache.spark.sql.execution.datasource
 
+import io.kyligence.kap.guava20.shaded.common.collect.Sets
+
 import org.apache.hadoop.fs.{FileStatus, Path}
 import org.apache.kylin.common.exception.TargetSegmentNotFoundException
 import org.apache.kylin.common.util.{DateFormat, HadoopUtil}
@@ -39,6 +41,7 @@ import org.apache.spark.util.collection.BitSet
 import java.sql.{Date, Timestamp}
 import java.util
 import scala.collection.JavaConverters._
+import scala.collection.mutable
 
 case class SegmentDirectory(segmentID: String, partitions: List[Long], files: Seq[FileStatus])
 
@@ -79,9 +82,11 @@ class FilePruner(val session: SparkSession,
   private val dataflow: NDataflow = {
     val dataflowId = options.getOrElse("dataflowId", sys.error("dataflowId option is required"))
     val prj = options.getOrElse("project", sys.error("project option is required"))
+    val prunedSegmentIds = Sets.newHashSet(prunedSegmentDirs.map(_.segmentID).asJavaCollection)
     val dfMgr = NDataflowManager.getInstance(KylinConfig.getInstanceFromEnv, prj)
-    val dataflow = dfMgr.getDataflow(dataflowId)
-    FilePruner.checkSegmentStatus(prunedSegmentDirs, dataflow)
+    // init pruned Segment LayoutInfo immediately
+    val dataflow = dfMgr.getDataflow(dataflowId, prunedSegmentIds)
+    FilePruner.checkSegmentStatus(prunedSegmentIds, dataflow)
     dataflow
   }
 
@@ -546,14 +551,12 @@ object FilePruner {
     }
   }
 
-  def checkSegmentStatus(segDirs: Seq[SegmentDirectory], dataflow: NDataflow): Unit = {
+  def checkSegmentStatus(prunedSegmentIds: util.HashSet[String], dataflow: NDataflow): Unit = {
     // check whether each segment id corresponds to the segment in NDataflow
-    val candidateSegIds = new util.HashSet[String]
-    segDirs.foreach(seg => candidateSegIds.add(seg.segmentID))
-    val filterSegmentIds = dataflow.getSegments(candidateSegIds).asScala.map(e => e.getId).toSet
-    if(candidateSegIds.size != filterSegmentIds.size) {
-      val missSegId = new StringBuilder
-      candidateSegIds.asScala.foreach(e => {
+    val filterSegmentIds = dataflow.getSegments(prunedSegmentIds).asScala.map(e => e.getId).toSet
+    if (prunedSegmentIds.size != filterSegmentIds.size) {
+      val missSegId = new mutable.StringBuilder
+      prunedSegmentIds.asScala.foreach(e => {
         if (!filterSegmentIds.contains(e)) {
           missSegId.append(e).append(";")
         }
diff --git a/src/spark-project/spark-common/src/test/scala/org/apache/spark/sql/execution/datasource/FilePrunerSuite.scala b/src/spark-project/spark-common/src/test/scala/org/apache/spark/sql/execution/datasource/FilePrunerSuite.scala
index 1bc6191e65..240bd85b86 100644
--- a/src/spark-project/spark-common/src/test/scala/org/apache/spark/sql/execution/datasource/FilePrunerSuite.scala
+++ b/src/spark-project/spark-common/src/test/scala/org/apache/spark/sql/execution/datasource/FilePrunerSuite.scala
@@ -18,11 +18,14 @@
 
 package org.apache.spark.sql.execution.datasource
 
+import io.kyligence.kap.guava20.shaded.common.collect.Sets
 import org.apache.kylin.common.exception.TargetSegmentNotFoundException
 import org.apache.kylin.metadata.cube.model.{NDataSegment, NDataflow}
 import org.apache.kylin.metadata.model.{SegmentStatusEnum, Segments}
 import org.apache.spark.sql.common.SparderBaseFunSuite
 
+import scala.collection.JavaConverters._
+
 class FilePrunerSuite extends SparderBaseFunSuite {
 
   test("KE-37730: test check segment status") {
@@ -37,12 +40,9 @@ class FilePrunerSuite extends SparderBaseFunSuite {
     val segDir1 = SegmentDirectory("1", List.empty[Long], null)
     val segDir2 = SegmentDirectory("2", List.empty[Long], null)
 
-    val segDirSeq1 = Seq(segDir1)
-    FilePruner.checkSegmentStatus(segDirSeq1, mockDataFlow)
-
-    val segDirSeq2 = Seq(segDir1, segDir2)
+    FilePruner.checkSegmentStatus(Sets.newHashSet(Seq(segDir1).map(_.segmentID).asJavaCollection), mockDataFlow)
     val catchEx = intercept[TargetSegmentNotFoundException] {
-      FilePruner.checkSegmentStatus(segDirSeq2, mockDataFlow)
+      FilePruner.checkSegmentStatus(Sets.newHashSet(Seq(segDir1, segDir2).map(_.segmentID).asJavaCollection), mockDataFlow)
     }
     assert(catchEx.getMessage.equals("Cannot find target segment, and missing segment id: 2;"))
   }


[kylin] 03/22: KYLIN-5312 Add verification to the parameters about update password

Posted by xx...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

xxyu pushed a commit to branch kylin5
in repository https://gitbox.apache.org/repos/asf/kylin.git

commit 4989854945e1ba3dfe3b49e51a5c921c71227e31
Author: KmCherry0 <86...@users.noreply.github.com>
AuthorDate: Thu Oct 13 15:18:44 2022 +0800

    KYLIN-5312 Add verification to the parameters about update password
---
 .../org/apache/kylin/rest/controller/NUserController.java   | 13 +++++--------
 .../apache/kylin/rest/controller/NUserControllerTest.java   |  6 +++---
 2 files changed, 8 insertions(+), 11 deletions(-)

diff --git a/src/metadata-server/src/main/java/org/apache/kylin/rest/controller/NUserController.java b/src/metadata-server/src/main/java/org/apache/kylin/rest/controller/NUserController.java
index baf08f0375..5321112578 100644
--- a/src/metadata-server/src/main/java/org/apache/kylin/rest/controller/NUserController.java
+++ b/src/metadata-server/src/main/java/org/apache/kylin/rest/controller/NUserController.java
@@ -54,6 +54,7 @@ import org.apache.kylin.common.persistence.transaction.AclTCRRevokeEventNotifier
 import org.apache.kylin.common.scheduler.EventBusFactory;
 import org.apache.kylin.common.util.RandomUtil;
 import org.apache.kylin.metadata.MetadataConstants;
+import org.apache.kylin.metadata.user.ManagedUser;
 import org.apache.kylin.rest.config.initialize.AfterMetadataReadyEvent;
 import org.apache.kylin.rest.constant.Constant;
 import org.apache.kylin.rest.exception.UnauthorizedException;
@@ -101,7 +102,6 @@ import org.springframework.web.bind.annotation.ResponseBody;
 import com.google.common.collect.Lists;
 import com.google.common.collect.Sets;
 
-import org.apache.kylin.metadata.user.ManagedUser;
 import io.swagger.annotations.ApiOperation;
 import lombok.SneakyThrows;
 import lombok.val;
@@ -423,21 +423,15 @@ public class NUserController extends NBasicController implements ApplicationList
             throw new KylinException(PERMISSION_DENIED, msg.getPermissionDenied());
         }
         accessService.checkDefaultAdmin(username, true);
-        val oldPassword = pwdBase64Decode(StringUtils.isEmpty(user.getPassword()) ? StringUtils.EMPTY : user.getPassword());
-        val newPassword = pwdBase64Decode(user.getNewPassword());
 
         checkUsername(username);
 
-        checkPasswordLength(newPassword);
-
-        checkPasswordCharacter(newPassword);
-
         ManagedUser existingUser = getManagedUser(username);
         if (existingUser == null) {
             throw new KylinException(USER_NOT_EXIST, String.format(Locale.ROOT, msg.getUserNotFound(), username));
         }
         val actualOldPassword = existingUser.getPassword();
-
+        val oldPassword = pwdBase64Decode(StringUtils.isEmpty(user.getPassword()) ? StringUtils.EMPTY : user.getPassword());
         // when reset oneself's password (includes ADMIN users), check old password
         if (StringUtils.equals(getPrincipal(), username)) {
             checkRequiredArg("password", user.getPassword());
@@ -447,6 +441,9 @@ public class NUserController extends NBasicController implements ApplicationList
         }
 
         checkRequiredArg("new_password", user.getNewPassword());
+        val newPassword = pwdBase64Decode(StringUtils.isEmpty(user.getNewPassword()) ? StringUtils.EMPTY : user.getNewPassword());
+        checkPasswordLength(newPassword);
+        checkPasswordCharacter(newPassword);
 
         if (newPassword.equals(oldPassword)) {
             throw new KylinException(FAILED_UPDATE_PASSWORD, msg.getNewPasswordSameAsOld());
diff --git a/src/metadata-server/src/test/java/org/apache/kylin/rest/controller/NUserControllerTest.java b/src/metadata-server/src/test/java/org/apache/kylin/rest/controller/NUserControllerTest.java
index 06fa4416f7..74a4f4b467 100644
--- a/src/metadata-server/src/test/java/org/apache/kylin/rest/controller/NUserControllerTest.java
+++ b/src/metadata-server/src/test/java/org/apache/kylin/rest/controller/NUserControllerTest.java
@@ -42,6 +42,7 @@ import org.apache.kylin.common.msg.Message;
 import org.apache.kylin.common.util.JsonUtil;
 import org.apache.kylin.common.util.NLocalFileMetadataTestCase;
 import org.apache.kylin.junit.rule.ClearKEPropertiesRule;
+import org.apache.kylin.metadata.user.ManagedUser;
 import org.apache.kylin.rest.request.PasswordChangeRequest;
 import org.apache.kylin.rest.request.UserRequest;
 import org.apache.kylin.rest.response.EnvelopeResponse;
@@ -79,7 +80,6 @@ import org.springframework.web.accept.ContentNegotiationManager;
 import com.google.common.collect.Lists;
 import com.google.common.collect.Maps;
 
-import org.apache.kylin.metadata.user.ManagedUser;
 import lombok.val;
 
 public class NUserControllerTest extends NLocalFileMetadataTestCase {
@@ -408,7 +408,7 @@ public class NUserControllerTest extends NLocalFileMetadataTestCase {
 
     @Test
     public void testUpdatePassword_InvalidPasswordPattern() throws Exception {
-        val user = new ManagedUser();
+        val user = new ManagedUser("ADMIN", pwdEncoder.encode("KYLIN"), false);
         val request = new PasswordChangeRequest();
 
         request.setUsername("ADMIN");
@@ -427,7 +427,7 @@ public class NUserControllerTest extends NLocalFileMetadataTestCase {
 
     @Test
     public void testUpdatePassword_InvalidPasswordLength() throws Exception {
-        val user = new ManagedUser();
+        val user = new ManagedUser("ADMIN", pwdEncoder.encode("KYLIN"), false);
         val request = new PasswordChangeRequest();
 
         request.setUsername("ADMIN");


[kylin] 12/22: fix skipping index status wrong when deletenode

Posted by xx...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

xxyu pushed a commit to branch kylin5
in repository https://gitbox.apache.org/repos/asf/kylin.git

commit b55dcac51b4102fd266f183d4bfb4b03c177602a
Author: Shuai li <lo...@live.cn>
AuthorDate: Fri Oct 14 14:00:25 2022 +0800

    fix skipping index status wrong when deletenode
---
 .../src/main/java/io/kyligence/kap/secondstorage/metadata/TableData.java | 1 +
 1 file changed, 1 insertion(+)

diff --git a/src/second-storage/core/src/main/java/io/kyligence/kap/secondstorage/metadata/TableData.java b/src/second-storage/core/src/main/java/io/kyligence/kap/secondstorage/metadata/TableData.java
index 9c9c96d83a..e6c58e7f79 100644
--- a/src/second-storage/core/src/main/java/io/kyligence/kap/secondstorage/metadata/TableData.java
+++ b/src/second-storage/core/src/main/java/io/kyligence/kap/secondstorage/metadata/TableData.java
@@ -281,6 +281,7 @@ public class TableData implements Serializable, WithLayout {
                     .setShardNodes(shardNodes)
                     .setSizeInNode(sizeInNode)
                     .setNodeFileMap(nodeFileMap)
+                    .setSecondaryIndexColumns(partition.getSecondaryIndexColumns())
                     .build();
         }).collect(Collectors.toList());
 


[kylin] 06/22: KYLIN-5314 check name conflict before export tds file

Posted by xx...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

xxyu pushed a commit to branch kylin5
in repository https://gitbox.apache.org/repos/asf/kylin.git

commit f4f2b21ee8127424f13f3739b4324a7bcfcc8f53
Author: Pengfei Zhan <pe...@kyligence.io>
AuthorDate: Wed Oct 12 20:50:19 2022 +0800

    KYLIN-5314 check name conflict before export tds file
---
 .../kylin/rest/controller/NBasicController.java    |  39 +-
 .../org/apache/kylin/common/KylinConfigBase.java   |  40 +-
 .../common/exception/code/ErrorCodeServer.java     |   2 +
 .../org/apache/kylin/common/msg/CnMessage.java     |  16 -
 .../java/org/apache/kylin/common/msg/Message.java  |  14 +-
 .../resources/kylin_error_msg_conf_cn.properties   |   2 +
 .../resources/kylin_error_msg_conf_en.properties   |   2 +
 .../main/resources/kylin_errorcode_conf.properties |   2 +
 .../kylin/rest/service/ModelBuildService.java      |  34 +-
 .../rest/service/FusionModelServiceBuildTest.java  |   8 +-
 .../kylin/rest/service/ModelServiceBuildTest.java  |  68 +--
 .../rest/controller/open/OpenModelController.java  |  59 +-
 .../kylin/rest/controller/NModelController.java    |  95 +---
 .../kylin/rest/controller/NTableController.java    |  21 +-
 .../rest/controller/v2/NModelControllerV2.java     |   5 +-
 .../rest/controller/v2/NProjectControllerV2.java   |   6 +-
 .../controller/open/OpenModelControllerTest.java   |  69 +++
 .../rest/controller/NModelControllerTest.java      |  74 +--
 .../kylin/rest/service/AbstractModelService.java   | 140 +++++
 .../kylin/rest/service/FusionModelService.java     |   6 +-
 .../apache/kylin/rest/service/ModelService.java    | 407 +-------------
 .../apache/kylin/rest/service/ModelTdsService.java | 336 ++++++++++++
 .../kylin/rest/service/ModelServiceTest.java       | 479 +---------------
 .../kylin/rest/service/ModelTdsServiceTest.java    | 607 +++++++++++++++++++++
 .../org/apache/kylin/tool/bisync/BISyncTool.java   |  20 +-
 .../org/apache/kylin/tool/bisync/SyncContext.java  |   2 +
 .../apache/kylin/tool/bisync/SyncModelBuilder.java | 308 +++++------
 .../apache/kylin/tool/bisync/model/ColumnDef.java  |  80 +--
 .../apache/kylin/tool/bisync/model/SyncModel.java  |  72 +--
 .../bisync/tableau/TableauDataSourceConverter.java |   2 +-
 .../kap/secondstorage/SecondStorageLockTest.java   |   6 +-
 .../kylin/tool/bisync/SyncModelBuilderTest.java    |  78 ++-
 .../kylin/tool/bisync/SyncModelTestUtil.java       |   1 +
 33 files changed, 1597 insertions(+), 1503 deletions(-)

diff --git a/src/common-service/src/main/java/org/apache/kylin/rest/controller/NBasicController.java b/src/common-service/src/main/java/org/apache/kylin/rest/controller/NBasicController.java
index 24bce526ed..4ffe05c013 100644
--- a/src/common-service/src/main/java/org/apache/kylin/rest/controller/NBasicController.java
+++ b/src/common-service/src/main/java/org/apache/kylin/rest/controller/NBasicController.java
@@ -58,7 +58,9 @@ import java.util.Collections;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Locale;
+import java.util.Map;
 import java.util.Objects;
+import java.util.Optional;
 import java.util.Set;
 import java.util.concurrent.TimeUnit;
 import java.util.stream.Collectors;
@@ -78,28 +80,28 @@ import org.apache.kylin.common.exception.KylinException;
 import org.apache.kylin.common.exception.ServerErrorCode;
 import org.apache.kylin.common.msg.Message;
 import org.apache.kylin.common.msg.MsgPicker;
+import org.apache.kylin.common.persistence.transaction.TransactionException;
 import org.apache.kylin.common.util.DateFormat;
 import org.apache.kylin.common.util.JsonUtil;
+import org.apache.kylin.common.util.Unsafe;
 import org.apache.kylin.job.constant.JobStatusEnum;
 import org.apache.kylin.job.dao.ExecutablePO;
 import org.apache.kylin.job.execution.JobTypeEnum;
+import org.apache.kylin.metadata.model.NDataModel;
+import org.apache.kylin.metadata.model.NDataModelManager;
+import org.apache.kylin.metadata.project.NProjectManager;
 import org.apache.kylin.metadata.project.ProjectInstance;
+import org.apache.kylin.metadata.streaming.KafkaConfigManager;
 import org.apache.kylin.rest.constant.Constant;
 import org.apache.kylin.rest.exception.ForbiddenException;
 import org.apache.kylin.rest.exception.NotFoundException;
 import org.apache.kylin.rest.exception.UnauthorizedException;
+import org.apache.kylin.rest.request.Validation;
 import org.apache.kylin.rest.response.EnvelopeResponse;
 import org.apache.kylin.rest.response.ErrorResponse;
+import org.apache.kylin.rest.service.ProjectService;
 import org.apache.kylin.rest.service.UserService;
 import org.apache.kylin.rest.util.PagingUtil;
-import org.apache.kylin.common.persistence.transaction.TransactionException;
-import org.apache.kylin.common.util.Unsafe;
-import org.apache.kylin.metadata.model.NDataModel;
-import org.apache.kylin.metadata.model.NDataModelManager;
-import org.apache.kylin.metadata.project.NProjectManager;
-import org.apache.kylin.metadata.streaming.KafkaConfigManager;
-import org.apache.kylin.rest.request.Validation;
-import org.apache.kylin.rest.service.ProjectService;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.springframework.beans.factory.annotation.Autowired;
@@ -370,8 +372,8 @@ public class NBasicController {
         return isAdmin;
     }
 
-    public HashMap<String, Object> getDataResponse(String name, List<?> result, int offset, int limit) {
-        HashMap<String, Object> data = new HashMap<>();
+    public Map<String, Object> getDataResponse(String name, List<?> result, int offset, int limit) {
+        Map<String, Object> data = new HashMap<>();
         data.put(name, PagingUtil.cutPage(result, offset, limit));
         data.put("size", result.size());
         return data;
@@ -381,6 +383,19 @@ public class NBasicController {
         return PagingUtil.cutPage(result, offset, limit);
     }
 
+    public String getHost(String serverHost, String serverName) {
+        String host = KylinConfig.getInstanceFromEnv().getModelExportHost();
+        host = Optional.ofNullable(Optional.ofNullable(host).orElse(serverHost)).orElse(serverName);
+        return host;
+    }
+
+    public int getPort(Integer serverPort, Integer requestServerPort) {
+        Integer port = KylinConfig.getInstanceFromEnv().getModelExportPort() == -1 ? null
+                : KylinConfig.getInstanceFromEnv().getModelExportPort();
+        port = Optional.ofNullable(Optional.ofNullable(port).orElse(serverPort)).orElse(requestServerPort);
+        return port;
+    }
+
     public String checkProjectName(String project) {
         if (StringUtils.isEmpty(project)) {
             throw new KylinException(EMPTY_PROJECT_NAME, MsgPicker.getMsg().getEmptyProjectName());
@@ -632,8 +647,8 @@ public class NBasicController {
         if (CollectionUtils.isEmpty(statuses)) {
             return;
         }
-        List<String> streamingJobsStatus = Arrays.asList(JobStatusEnum.STARTING.name(),
-                JobStatusEnum.RUNNING.name(), JobStatusEnum.STOPPING.name(), JobStatusEnum.ERROR.name(), JobStatusEnum.STOPPED.name());
+        List<String> streamingJobsStatus = Arrays.asList(JobStatusEnum.STARTING.name(), JobStatusEnum.RUNNING.name(),
+                JobStatusEnum.STOPPING.name(), JobStatusEnum.ERROR.name(), JobStatusEnum.STOPPED.name());
         for (String status : statuses) {
             if (!streamingJobsStatus.contains(status)) {
                 throw new KylinException(PARAMETER_INVALID_SUPPORT_LIST, "statuses",
diff --git a/src/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java b/src/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
index d0b4fda104..acfe61c924 100644
--- a/src/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
+++ b/src/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
@@ -553,6 +553,7 @@ public abstract class KylinConfigBase implements Serializable {
     public boolean getSecondStorageUseLowCardinality() {
         return Boolean.parseBoolean(getOptional("kylin.second-storage.use-low-cardinality", TRUE));
     }
+
     public long getSecondStorageLowCardinalityNumber() {
         return Long.parseLong(getOptional("kylin.second-storage.low-cardinality-number", "10000"));
     }
@@ -966,8 +967,9 @@ public abstract class KylinConfigBase implements Serializable {
     public String getWritingClusterWorkingDir(String withSuffix) {
         // This step will remove the '/' symbol from the end of the writingClusterWorkingDir
         Path writingClusterPath = new Path(getWritingClusterWorkingDir());
-        if (!writingClusterPath.isAbsolute()){
-            throw new IllegalArgumentException("kylin.env.hdfs-write-working-dir must be absolute, but got " + writingClusterPath);
+        if (!writingClusterPath.isAbsolute()) {
+            throw new IllegalArgumentException(
+                    "kylin.env.hdfs-write-working-dir must be absolute, but got " + writingClusterPath);
         }
 
         // make sure path is qualified
@@ -2032,16 +2034,16 @@ public abstract class KylinConfigBase implements Serializable {
 
     public String[] getTableDetectorTransformers() {
         String value = getOptional("kylin.query.table-detect-transformers");
-        return value == null ? new String[] { POWER_BI_CONVERTER,
-                "org.apache.kylin.query.util.DefaultQueryTransformer", "org.apache.kylin.query.util.EscapeTransformer" }
+        return value == null
+                ? new String[] { POWER_BI_CONVERTER, "org.apache.kylin.query.util.DefaultQueryTransformer",
+                        "org.apache.kylin.query.util.EscapeTransformer" }
                 : getOptionalStringArray("kylin.query.table-detect-transformers", new String[0]);
     }
 
     public String[] getQueryTransformers() {
         String value = getOptional("kylin.query.transformers");
-        return value == null ? new String[] { POWER_BI_CONVERTER,
-                "org.apache.kylin.query.util.DefaultQueryTransformer", "org.apache.kylin.query.util.EscapeTransformer",
-                "org.apache.kylin.query.util.ConvertToComputedColumn",
+        return value == null ? new String[] { POWER_BI_CONVERTER, "org.apache.kylin.query.util.DefaultQueryTransformer",
+                "org.apache.kylin.query.util.EscapeTransformer", "org.apache.kylin.query.util.ConvertToComputedColumn",
                 "org.apache.kylin.query.util.KeywordDefaultDirtyHack", "org.apache.kylin.query.security.RowFilter" }
                 : getOptionalStringArray("kylin.query.transformers", new String[0]);
     }
@@ -2211,12 +2213,13 @@ public abstract class KylinConfigBase implements Serializable {
     }
 
     public String[] getPushDownConverterClassNames() {
-        return getOptionalStringArray("kylin.query.pushdown.converter-class-names", new String[] {
-                "org.apache.kylin.source.adhocquery.DoubleQuotePushDownConverter",
-                POWER_BI_CONVERTER, "org.apache.kylin.query.util.KeywordDefaultDirtyHack",
-                "org.apache.kylin.query.util.RestoreFromComputedColumn", "org.apache.kylin.query.security.RowFilter",
-                "org.apache.kylin.query.security.HackSelectStarWithColumnACL",
-                "org.apache.kylin.query.util.SparkSQLFunctionConverter" });
+        return getOptionalStringArray("kylin.query.pushdown.converter-class-names",
+                new String[] { "org.apache.kylin.source.adhocquery.DoubleQuotePushDownConverter", POWER_BI_CONVERTER,
+                        "org.apache.kylin.query.util.KeywordDefaultDirtyHack",
+                        "org.apache.kylin.query.util.RestoreFromComputedColumn",
+                        "org.apache.kylin.query.security.RowFilter",
+                        "org.apache.kylin.query.security.HackSelectStarWithColumnACL",
+                        "org.apache.kylin.query.util.SparkSQLFunctionConverter" });
     }
 
     @ThirdPartyDependencies({
@@ -2821,11 +2824,13 @@ public abstract class KylinConfigBase implements Serializable {
     }
 
     public int getQueryHistoryAccelerateMaxSize() {
-        return Integer.parseInt(this.getOptional("kylin.favorite.query-history-accelerate-max-size", ONE_HUNDRED_THOUSAND));
+        return Integer
+                .parseInt(this.getOptional("kylin.favorite.query-history-accelerate-max-size", ONE_HUNDRED_THOUSAND));
     }
 
     public int getQueryHistoryStatMetaUpdateMaxSize() {
-        return Integer.parseInt(this.getOptional("kylin.query.query-history-stat-update-max-size", ONE_HUNDRED_THOUSAND));
+        return Integer
+                .parseInt(this.getOptional("kylin.query.query-history-stat-update-max-size", ONE_HUNDRED_THOUSAND));
     }
 
     public long getQueryHistoryAccelerateInterval() {
@@ -3606,6 +3611,10 @@ public abstract class KylinConfigBase implements Serializable {
         return Boolean.parseBoolean(getOptional("kylin.model.tds-expose-model-join-key", TRUE));
     }
 
+    public boolean skipCheckTds() {
+        return Boolean.parseBoolean(getOptional("kylin.model.skip-check-tds", FALSE));
+    }
+
     public boolean isHdfsMetricsPeriodicCalculationEnabled() {
         return Boolean.parseBoolean(getOptional("kylin.metrics.hdfs-periodic-calculation-enabled", FALSE));
     }
@@ -3622,6 +3631,7 @@ public abstract class KylinConfigBase implements Serializable {
     public boolean isSkipResourceCheck() {
         return Boolean.parseBoolean(getOptional("kylin.build.resource.skip-resource-check", FALSE));
     }
+
     public int getSecondStorageSkippingIndexGranularity() {
         int granularity = Integer.parseInt(getOptional("kylin.second-storage.skipping-index.granularity", "3"));
         return granularity <= 0 ? 3 : granularity;
diff --git a/src/core-common/src/main/java/org/apache/kylin/common/exception/code/ErrorCodeServer.java b/src/core-common/src/main/java/org/apache/kylin/common/exception/code/ErrorCodeServer.java
index 3a0ca0195d..c8abaa1142 100644
--- a/src/core-common/src/main/java/org/apache/kylin/common/exception/code/ErrorCodeServer.java
+++ b/src/core-common/src/main/java/org/apache/kylin/common/exception/code/ErrorCodeServer.java
@@ -32,6 +32,8 @@ public enum ErrorCodeServer implements ErrorCodeProducer {
     MODEL_NAME_DUPLICATE("KE-010002206"),
     SIMPLIFIED_MEASURES_MISSING_ID("KE-010002207"),
     MODEL_NOT_EXIST_SEGMENTS("KE-010002208"),
+    MODEL_TDS_EXPORT_DIM_COL_AND_MEASURE_NAME_CONFLICT("KE-010002301"),
+    MODEL_TDS_EXPORT_COLUMN_AND_MEASURE_NAME_CONFLICT("KE-010002302"),
 
     // 100252XX Cube
     CUBE_NOT_EXIST("KE-010025201"),
diff --git a/src/core-common/src/main/java/org/apache/kylin/common/msg/CnMessage.java b/src/core-common/src/main/java/org/apache/kylin/common/msg/CnMessage.java
index 482d58fef5..3688c2db26 100644
--- a/src/core-common/src/main/java/org/apache/kylin/common/msg/CnMessage.java
+++ b/src/core-common/src/main/java/org/apache/kylin/common/msg/CnMessage.java
@@ -1700,21 +1700,6 @@ public class CnMessage extends Message {
         return "当前 Segments 所包含的索引未加载至 HDFS 存储/对象存储,请确保索引加载至 HDFS 存储后再合并。";
     }
 
-    @Override
-    public String getDuplicateModelColumnAndMeasureName() {
-        return "模型中的列名 %s 与度量名 %s 重复,无法导出 TDS。请去除重名后再重试。";
-    }
-
-    @Override
-    public String getDuplicateDimensionNameAndMeasureName() {
-        return "维度名 %s 与度量名 %s 重复,无法导出 TDS。请去除重名后再重试。";
-    }
-
-    @Override
-    public String getDuplicateDimensionColAndMeasureName() {
-        return "维度的列名 %s 与度量名 %s 重复,无法导出 TDS。请去除重名后再重试。";
-    }
-
     @Override
     public String getProfilingNotEnabled() {
         return "构建火焰图" + PARAMETER_NOT_ENABLED;
@@ -1750,7 +1735,6 @@ public class CnMessage extends Message {
         return "构建火焰图任务" + TASK_TIMEOUT;
     }
 
-
     @Override
     public String getSecondStorageIndexNotSupport() {
         return "Order by列和Skipping Index列不支持使用分区列";
diff --git a/src/core-common/src/main/java/org/apache/kylin/common/msg/Message.java b/src/core-common/src/main/java/org/apache/kylin/common/msg/Message.java
index 84a274ca50..cad28ace11 100644
--- a/src/core-common/src/main/java/org/apache/kylin/common/msg/Message.java
+++ b/src/core-common/src/main/java/org/apache/kylin/common/msg/Message.java
@@ -39,7 +39,6 @@ public class Message {
     private static final String LACK_PROJECT = "Please fill in the project parameters.";
     private static final String NON_EXIST_PROJECT = "Project %s doesn't exist. Please confirm and try again later.";
     private static final String DUP_MODCOL_MEASURE_NAME = "There are duplicated names among model column %s and measure name %s. Cannot export a valid TDS file. Please correct the duplicated names and try again.";
-    private static final String DUP_DIM_MEASURE_NAME = "There are duplicated names among dimension name %s and measure name %s. Cannot export a valid TDS file. Please correct the duplicated names and try again.";
     private static final String DUP_DIMCOL_MEASURE_NAME = "There are duplicated names among dimension column %s and measure name %s. Cannot export a valid TDS file. Please correct the duplicated names and try again.";
     private static final String MODIFY_PERMISSION_OF_SUPER_ADMIN = "Super Admin’s permission can’t be modified.";
     private static final String ILLEGAL_AUTHORIZING_USER = "Unable to modify. Only Super Admin or System Admin with query permission can modify query permission.";
@@ -1303,6 +1302,7 @@ public class Message {
     public String getInvalidLowCardinalityDataType() {
         return SECOND_STORAGE_CARDINALITY_DATATYPE_INVALID;
     }
+
     public String getJobRestartFailed() {
         return "Tiered storage task doesn't support restart.\n";
     }
@@ -1525,18 +1525,6 @@ public class Message {
         return "The indexes included in the selected segments are not loaded to HDFS storage/object storage. Please ensure the indexes are loaded into HDFS storage and try merging again.";
     }
 
-    public String getDuplicateModelColumnAndMeasureName() {
-        return DUP_MODCOL_MEASURE_NAME;
-    }
-
-    public String getDuplicateDimensionNameAndMeasureName() {
-        return DUP_DIM_MEASURE_NAME;
-    }
-
-    public String getDuplicateDimensionColAndMeasureName() {
-        return DUP_DIMCOL_MEASURE_NAME;
-    }
-
     public String getProfilingNotEnabled() {
         return PROFILING_NOT_ENABLED;
     }
diff --git a/src/core-common/src/main/resources/kylin_error_msg_conf_cn.properties b/src/core-common/src/main/resources/kylin_error_msg_conf_cn.properties
index 27641ca5b2..327c28d18d 100644
--- a/src/core-common/src/main/resources/kylin_error_msg_conf_cn.properties
+++ b/src/core-common/src/main/resources/kylin_error_msg_conf_cn.properties
@@ -32,6 +32,8 @@ KE-010002205=无效的模型名称 “%s”。请使用字母、数字或下划
 KE-010002206=模型 “%s” 已存在。请重新命名。
 KE-010002207=修改模型时,simplified_measures 参数中的每个度量必须传入 id 值。请为以下度量传入 id 之后重试:%s。
 KE-010002208=模型上线必须存在 Segment。请重新输入。
+KE-010002301=维度的列名 %s 与度量名 %s 重复,无法导出 TDS。请去除重名后再重试。
+KE-010002302=模型中的列名 %s 与度量名 %s 重复,无法导出 TDS。请去除重名后再重试。
 
 ## 100252XX Cube
 KE-010025201=无法找到相关 Cube。
diff --git a/src/core-common/src/main/resources/kylin_error_msg_conf_en.properties b/src/core-common/src/main/resources/kylin_error_msg_conf_en.properties
index 3f87894a80..5a5f9ea5fb 100644
--- a/src/core-common/src/main/resources/kylin_error_msg_conf_en.properties
+++ b/src/core-common/src/main/resources/kylin_error_msg_conf_en.properties
@@ -32,6 +32,8 @@ KE-010002205=The model name "%s" is invalid. Please use letters, numbers and und
 KE-010002206=Model "%s" already exists. Please rename it.
 KE-010002207=When modifying model, each measure id is required in simplified_measures parameter. Please pass ids for following measures and try again: %s.
 KE-010002208=The online model must have a segment. Please re-enter.
+KE-010002301=There are duplicated names among dimension column %s and measure name %s. Cannot export a valid TDS file. Please correct the duplicated names and try again.
+KE-010002302=There are duplicated names among model column %s and measure name %s. Cannot export a valid TDS file. Please correct the duplicated names and try again.
 
 ## 100252XX Cube
 KE-010025201=Can't find the cube.
diff --git a/src/core-common/src/main/resources/kylin_errorcode_conf.properties b/src/core-common/src/main/resources/kylin_errorcode_conf.properties
index 5b506d68fb..7f07828a6a 100644
--- a/src/core-common/src/main/resources/kylin_errorcode_conf.properties
+++ b/src/core-common/src/main/resources/kylin_errorcode_conf.properties
@@ -33,6 +33,8 @@ KE-010002205
 KE-010002206
 KE-010002207
 KE-010002208
+KE-010002301
+KE-010002302
 
 ## 100252XX Cube
 KE-010025201
diff --git a/src/data-loading-service/src/main/java/org/apache/kylin/rest/service/ModelBuildService.java b/src/data-loading-service/src/main/java/org/apache/kylin/rest/service/ModelBuildService.java
index 8b08ee171d..5548fd30d5 100644
--- a/src/data-loading-service/src/main/java/org/apache/kylin/rest/service/ModelBuildService.java
+++ b/src/data-loading-service/src/main/java/org/apache/kylin/rest/service/ModelBuildService.java
@@ -48,14 +48,6 @@ import org.apache.kylin.job.exception.JobSubmissionException;
 import org.apache.kylin.job.execution.JobTypeEnum;
 import org.apache.kylin.job.manager.JobManager;
 import org.apache.kylin.job.model.JobParam;
-import org.apache.kylin.metadata.model.PartitionDesc;
-import org.apache.kylin.metadata.model.SegmentRange;
-import org.apache.kylin.metadata.model.SegmentStatusEnum;
-import org.apache.kylin.metadata.model.SegmentStatusEnumToDisplay;
-import org.apache.kylin.metadata.model.TableDesc;
-import org.apache.kylin.query.util.PushDownUtil;
-import org.apache.kylin.rest.util.AclEvaluate;
-import org.apache.kylin.source.SourceFactory;
 import org.apache.kylin.metadata.cube.model.IndexPlan;
 import org.apache.kylin.metadata.cube.model.NBatchConstants;
 import org.apache.kylin.metadata.cube.model.NDataSegment;
@@ -67,9 +59,15 @@ import org.apache.kylin.metadata.model.MultiPartitionDesc;
 import org.apache.kylin.metadata.model.NDataModel;
 import org.apache.kylin.metadata.model.NDataModelManager;
 import org.apache.kylin.metadata.model.NTableMetadataManager;
+import org.apache.kylin.metadata.model.PartitionDesc;
+import org.apache.kylin.metadata.model.SegmentRange;
+import org.apache.kylin.metadata.model.SegmentStatusEnum;
+import org.apache.kylin.metadata.model.SegmentStatusEnumToDisplay;
+import org.apache.kylin.metadata.model.TableDesc;
 import org.apache.kylin.metadata.model.util.MultiPartitionUtil;
 import org.apache.kylin.metadata.project.EnhancedUnitOfWork;
 import org.apache.kylin.metadata.sourceusage.SourceUsageManager;
+import org.apache.kylin.query.util.PushDownUtil;
 import org.apache.kylin.rest.aspect.Transaction;
 import org.apache.kylin.rest.request.PartitionsRefreshRequest;
 import org.apache.kylin.rest.request.SegmentTimeRequest;
@@ -82,6 +80,7 @@ import org.apache.kylin.rest.service.params.FullBuildSegmentParams;
 import org.apache.kylin.rest.service.params.IncrementBuildSegmentParams;
 import org.apache.kylin.rest.service.params.MergeSegmentParams;
 import org.apache.kylin.rest.service.params.RefreshSegmentParams;
+import org.apache.kylin.source.SourceFactory;
 import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.stereotype.Component;
 
@@ -93,14 +92,11 @@ import lombok.val;
 import lombok.var;
 
 @Component("modelBuildService")
-public class ModelBuildService extends BasicService implements ModelBuildSupporter {
+public class ModelBuildService extends AbstractModelService implements ModelBuildSupporter {
 
     @Autowired
     private ModelService modelService;
 
-    @Autowired
-    private AclEvaluate aclEvaluate;
-
     @Autowired
     private SegmentHelper segmentHelper;
 
@@ -150,7 +146,7 @@ public class ModelBuildService extends BasicService implements ModelBuildSupport
 
     public JobInfoResponse fullBuildSegmentsManually(FullBuildSegmentParams params) {
         aclEvaluate.checkProjectOperationPermission(params.getProject());
-        modelService.checkModelPermission(params.getProject(), params.getModelId());
+        checkModelPermission(params.getProject(), params.getModelId());
         List<JobInfoResponse.JobInfo> jobIds = EnhancedUnitOfWork
                 .doInTransactionWithCheckAndRetry(() -> constructFullBuild(params), params.getProject());
         JobInfoResponse jobInfoResponse = new JobInfoResponse();
@@ -228,7 +224,7 @@ public class ModelBuildService extends BasicService implements ModelBuildSupport
         List<JobInfoResponse.JobInfo> jobIds = new ArrayList<>();
         NDataflowManager dfMgr = getManager(NDataflowManager.class, params.getProject());
         val jobManager = getManager(JobManager.class, params.getProject());
-        IndexPlan indexPlan = modelService.getIndexPlan(params.getModelId(), params.getProject());
+        IndexPlan indexPlan = getIndexPlan(params.getModelId(), params.getProject());
         NDataflow df = dfMgr.getDataflow(indexPlan.getUuid());
 
         for (String id : params.getSegmentIds()) {
@@ -263,7 +259,7 @@ public class ModelBuildService extends BasicService implements ModelBuildSupport
     public JobInfoResponse incrementBuildSegmentsManually(IncrementBuildSegmentParams params) throws Exception {
         String project = params.getProject();
         aclEvaluate.checkProjectOperationPermission(project);
-        modelService.checkModelPermission(project, params.getModelId());
+        checkModelPermission(project, params.getModelId());
         val modelManager = getManager(NDataModelManager.class, project);
         if (PartitionDesc.isEmptyPartitionDesc(params.getPartitionDesc())) {
             throw new KylinException(EMPTY_PARTITION_COLUMN, "Partition column is null.'");
@@ -441,7 +437,7 @@ public class ModelBuildService extends BasicService implements ModelBuildSupport
             List<String[]> partitionValues, boolean parallelBuild, boolean buildAllPartitions, int priority,
             String yarnQueue, Object tag) {
         aclEvaluate.checkProjectOperationPermission(project);
-        modelService.checkModelPermission(project, modelId);
+        checkModelPermission(project, modelId);
         modelService.checkSegmentsExistById(modelId, project, new String[] { segmentId });
         modelService.checkModelIsMLP(modelId, project);
         val dfm = getManager(NDataflowManager.class, project);
@@ -527,7 +523,7 @@ public class ModelBuildService extends BasicService implements ModelBuildSupport
         val segment = df.getSegment(param.getSegmentId());
         var partitions = param.getPartitionIds();
         aclEvaluate.checkProjectOperationPermission(project);
-        modelService.checkModelPermission(project, modelId);
+        checkModelPermission(project, modelId);
 
         if (CollectionUtils.isEmpty(param.getPartitionIds())) {
             partitions = modelService.getModelById(modelId, project).getMultiPartitionDesc()
@@ -561,7 +557,7 @@ public class ModelBuildService extends BasicService implements ModelBuildSupport
 
         val dfManager = getManager(NDataflowManager.class, project);
         val jobManager = getManager(JobManager.class, project);
-        val indexPlan = modelService.getIndexPlan(modelId, project);
+        val indexPlan = getIndexPlan(modelId, project);
         val df = dfManager.getDataflow(indexPlan.getUuid());
 
         NDataSegment mergeSeg = NDataflowManager.getInstance(KylinConfig.getInstanceFromEnv(), project).mergeSegments(
@@ -588,7 +584,7 @@ public class ModelBuildService extends BasicService implements ModelBuildSupport
             List<Long> indexIds, boolean parallelBuildBySegment, int priority, boolean partialBuild, String yarnQueue,
             Object tag) {
         aclEvaluate.checkProjectOperationPermission(project);
-        modelService.checkModelPermission(project, modelId);
+        checkModelPermission(project, modelId);
         val dfManger = getManager(NDataflowManager.class, project);
         NDataflow dataflow = dfManger.getDataflow(modelId);
         modelService.checkSegmentsExistById(modelId, project, segmentIds.toArray(new String[0]));
diff --git a/src/data-loading-service/src/test/java/org/apache/kylin/rest/service/FusionModelServiceBuildTest.java b/src/data-loading-service/src/test/java/org/apache/kylin/rest/service/FusionModelServiceBuildTest.java
index 9b9e5f814b..cad5a66e73 100644
--- a/src/data-loading-service/src/test/java/org/apache/kylin/rest/service/FusionModelServiceBuildTest.java
+++ b/src/data-loading-service/src/test/java/org/apache/kylin/rest/service/FusionModelServiceBuildTest.java
@@ -22,17 +22,17 @@ import java.util.Arrays;
 
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.exception.KylinException;
-import org.apache.kylin.rest.constant.Constant;
-import org.apache.kylin.rest.util.AclEvaluate;
-import org.apache.kylin.rest.util.AclUtil;
 import org.apache.kylin.common.scheduler.EventBusFactory;
 import org.apache.kylin.metadata.cube.model.NDataflowManager;
 import org.apache.kylin.metadata.model.ManagementType;
 import org.apache.kylin.metadata.model.NDataModel;
 import org.apache.kylin.metadata.model.NDataModelManager;
 import org.apache.kylin.rest.config.initialize.ModelUpdateListener;
+import org.apache.kylin.rest.constant.Constant;
 import org.apache.kylin.rest.request.IndexesToSegmentsRequest;
 import org.apache.kylin.rest.service.params.IncrementBuildSegmentParams;
+import org.apache.kylin.rest.util.AclEvaluate;
+import org.apache.kylin.rest.util.AclUtil;
 import org.junit.After;
 import org.junit.Assert;
 import org.junit.Before;
@@ -98,7 +98,9 @@ public class FusionModelServiceBuildTest extends SourceTestCase {
         ReflectionTestUtils.setField(modelService, "aclEvaluate", aclEvaluate);
         ReflectionTestUtils.setField(modelBuildService, "aclEvaluate", aclEvaluate);
         ReflectionTestUtils.setField(modelBuildService, "modelService", modelService);
+        ReflectionTestUtils.setField(modelBuildService, "userGroupService", userGroupService);
         ReflectionTestUtils.setField(fusionModelService, "modelBuildService", modelBuildService);
+        ReflectionTestUtils.setField(fusionModelService, "userGroupService", userGroupService);
         ReflectionTestUtils.setField(modelService, "userGroupService", userGroupService);
         ReflectionTestUtils.setField(semanticService, "userGroupService", userGroupService);
         ReflectionTestUtils.setField(indexPlanService, "aclEvaluate", aclEvaluate);
diff --git a/src/data-loading-service/src/test/java/org/apache/kylin/rest/service/ModelServiceBuildTest.java b/src/data-loading-service/src/test/java/org/apache/kylin/rest/service/ModelServiceBuildTest.java
index 03dad20631..3dad56193f 100644
--- a/src/data-loading-service/src/test/java/org/apache/kylin/rest/service/ModelServiceBuildTest.java
+++ b/src/data-loading-service/src/test/java/org/apache/kylin/rest/service/ModelServiceBuildTest.java
@@ -18,14 +18,31 @@
 
 package org.apache.kylin.rest.service;
 
-import com.google.common.collect.Lists;
-import com.google.common.collect.Sets;
-import org.apache.kylin.engine.spark.job.ExecutableAddCuboidHandler;
-import org.apache.kylin.engine.spark.job.ExecutableAddSegmentHandler;
-import org.apache.kylin.engine.spark.job.ExecutableMergeOrRefreshHandler;
-import org.apache.kylin.engine.spark.job.NSparkCubingJob;
-import lombok.val;
-import lombok.var;
+import static org.apache.kylin.common.exception.code.ErrorCodeServer.JOB_CONCURRENT_SUBMIT_LIMIT;
+import static org.apache.kylin.common.exception.code.ErrorCodeServer.JOB_CREATE_CHECK_FAIL;
+import static org.apache.kylin.common.exception.code.ErrorCodeServer.JOB_CREATE_CHECK_MULTI_PARTITION_ABANDON;
+import static org.apache.kylin.common.exception.code.ErrorCodeServer.JOB_CREATE_CHECK_MULTI_PARTITION_DUPLICATE;
+import static org.apache.kylin.common.exception.code.ErrorCodeServer.SEGMENT_LOCKED;
+import static org.apache.kylin.common.exception.code.ErrorCodeServer.SEGMENT_MERGE_CHECK_INDEX_ILLEGAL;
+import static org.apache.kylin.common.exception.code.ErrorCodeServer.SEGMENT_MERGE_CHECK_PARTITION_ILLEGAL;
+import static org.apache.kylin.common.exception.code.ErrorCodeServer.SEGMENT_NOT_EXIST_ID;
+import static org.apache.kylin.common.exception.code.ErrorCodeServer.SEGMENT_REFRESH_SELECT_RANGE_EMPTY;
+import static org.apache.kylin.common.exception.code.ErrorCodeServer.SEGMENT_STATUS;
+import static org.mockito.Mockito.doReturn;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
+
+import java.io.IOException;
+import java.text.SimpleDateFormat;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Comparator;
+import java.util.List;
+import java.util.Locale;
+import java.util.Set;
+import java.util.TimeZone;
+import java.util.stream.Collectors;
+
 import org.apache.commons.collections.CollectionUtils;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.exception.KylinException;
@@ -35,6 +52,10 @@ import org.apache.kylin.common.persistence.transaction.UnitOfWork;
 import org.apache.kylin.common.scheduler.EventBusFactory;
 import org.apache.kylin.common.util.DateFormat;
 import org.apache.kylin.common.util.JsonUtil;
+import org.apache.kylin.engine.spark.job.ExecutableAddCuboidHandler;
+import org.apache.kylin.engine.spark.job.ExecutableAddSegmentHandler;
+import org.apache.kylin.engine.spark.job.ExecutableMergeOrRefreshHandler;
+import org.apache.kylin.engine.spark.job.NSparkCubingJob;
 import org.apache.kylin.engine.spark.utils.ComputedColumnEvalUtil;
 import org.apache.kylin.job.dao.ExecutablePO;
 import org.apache.kylin.job.execution.AbstractExecutable;
@@ -103,30 +124,11 @@ import org.mockito.Mock;
 import org.mockito.Mockito;
 import org.springframework.test.util.ReflectionTestUtils;
 
-import java.io.IOException;
-import java.text.SimpleDateFormat;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Comparator;
-import java.util.List;
-import java.util.Locale;
-import java.util.Set;
-import java.util.TimeZone;
-import java.util.stream.Collectors;
+import com.google.common.collect.Lists;
+import com.google.common.collect.Sets;
 
-import static org.apache.kylin.common.exception.code.ErrorCodeServer.JOB_CONCURRENT_SUBMIT_LIMIT;
-import static org.apache.kylin.common.exception.code.ErrorCodeServer.JOB_CREATE_CHECK_FAIL;
-import static org.apache.kylin.common.exception.code.ErrorCodeServer.JOB_CREATE_CHECK_MULTI_PARTITION_ABANDON;
-import static org.apache.kylin.common.exception.code.ErrorCodeServer.JOB_CREATE_CHECK_MULTI_PARTITION_DUPLICATE;
-import static org.apache.kylin.common.exception.code.ErrorCodeServer.SEGMENT_LOCKED;
-import static org.apache.kylin.common.exception.code.ErrorCodeServer.SEGMENT_MERGE_CHECK_INDEX_ILLEGAL;
-import static org.apache.kylin.common.exception.code.ErrorCodeServer.SEGMENT_MERGE_CHECK_PARTITION_ILLEGAL;
-import static org.apache.kylin.common.exception.code.ErrorCodeServer.SEGMENT_NOT_EXIST_ID;
-import static org.apache.kylin.common.exception.code.ErrorCodeServer.SEGMENT_REFRESH_SELECT_RANGE_EMPTY;
-import static org.apache.kylin.common.exception.code.ErrorCodeServer.SEGMENT_STATUS;
-import static org.mockito.Mockito.doReturn;
-import static org.mockito.Mockito.mock;
-import static org.mockito.Mockito.when;
+import lombok.val;
+import lombok.var;
 
 public class ModelServiceBuildTest extends SourceTestCase {
     @InjectMocks
@@ -147,9 +149,6 @@ public class ModelServiceBuildTest extends SourceTestCase {
     @InjectMocks
     private final TableService tableService = Mockito.spy(new TableService());
 
-    @InjectMocks
-    private final TableExtService tableExtService = Mockito.spy(new TableExtService());
-
     @InjectMocks
     private final IndexPlanService indexPlanService = Mockito.spy(new IndexPlanService());
 
@@ -196,6 +195,7 @@ public class ModelServiceBuildTest extends SourceTestCase {
         ReflectionTestUtils.setField(modelService, "accessService", accessService);
         ReflectionTestUtils.setField(modelService, "userGroupService", userGroupService);
         ReflectionTestUtils.setField(semanticService, "userGroupService", userGroupService);
+        ReflectionTestUtils.setField(modelBuildService, "userGroupService", userGroupService);
         ReflectionTestUtils.setField(semanticService, "expandableMeasureUtil",
                 new ExpandableMeasureUtil((model, ccDesc) -> {
                     String ccExpression = KapQueryUtil.massageComputedColumn(model, model.getProject(), ccDesc,
diff --git a/src/metadata-server/src/main/java/io/kyligence/kap/rest/controller/open/OpenModelController.java b/src/metadata-server/src/main/java/io/kyligence/kap/rest/controller/open/OpenModelController.java
index 8f2b405c4e..d9642ade0c 100644
--- a/src/metadata-server/src/main/java/io/kyligence/kap/rest/controller/open/OpenModelController.java
+++ b/src/metadata-server/src/main/java/io/kyligence/kap/rest/controller/open/OpenModelController.java
@@ -28,6 +28,7 @@ import static org.apache.kylin.common.exception.code.ErrorCodeServer.PROJECT_MUL
 
 import java.io.IOException;
 import java.util.Arrays;
+import java.util.Collections;
 import java.util.List;
 import java.util.Locale;
 import java.util.Set;
@@ -70,8 +71,12 @@ import org.apache.kylin.rest.response.OpenGetIndexResponse.IndexDetail;
 import org.apache.kylin.rest.service.FusionIndexService;
 import org.apache.kylin.rest.service.FusionModelService;
 import org.apache.kylin.rest.service.ModelService;
+import org.apache.kylin.rest.service.ModelTdsService;
+import org.apache.kylin.rest.util.AclPermissionUtil;
 import org.apache.kylin.tool.bisync.SyncContext;
+import org.apache.kylin.tool.bisync.model.SyncModel;
 import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.beans.factory.annotation.Qualifier;
 import org.springframework.stereotype.Controller;
 import org.springframework.web.bind.annotation.DeleteMapping;
 import org.springframework.web.bind.annotation.GetMapping;
@@ -84,6 +89,7 @@ import org.springframework.web.bind.annotation.RequestParam;
 import org.springframework.web.bind.annotation.ResponseBody;
 
 import com.google.common.annotations.VisibleForTesting;
+import com.google.common.collect.ImmutableList;
 import com.google.common.collect.ImmutableSet;
 import com.google.common.collect.Lists;
 
@@ -108,6 +114,10 @@ public class OpenModelController extends NBasicController {
     @Autowired
     private NModelController modelController;
 
+    @Autowired
+    @Qualifier("modelTdsService")
+    private ModelTdsService tdsService;
+
     @Autowired
     private FusionIndexService fusionIndexService;
 
@@ -150,8 +160,8 @@ public class OpenModelController extends NBasicController {
             @RequestParam(value = "only_normal_dim", required = false, defaultValue = "true") boolean onlyNormalDim) {
         String projectName = checkProjectName(project);
         return modelController.getModels(modelId, modelAlias, exactMatch, projectName, owner, status, table, offset,
-                limit, sortBy, reverse, modelAliasOrOwner, Arrays.asList(ModelAttributeEnum.BATCH), lastModifyFrom,
-                lastModifyTo, onlyNormalDim);
+                limit, sortBy, reverse, modelAliasOrOwner, Collections.singletonList(ModelAttributeEnum.BATCH),
+                lastModifyFrom, lastModifyTo, onlyNormalDim);
     }
 
     @ApiOperation(value = "getIndexes", tags = { "AI" })
@@ -354,28 +364,24 @@ public class OpenModelController extends NBasicController {
         return modelController.updatePartitionSemantic(modelId, param);
     }
 
-    @ApiOperation(value = "validate tds export", tags = { "QE" })
-    @GetMapping(value = "/validate_export")
-    @ResponseBody
-    public EnvelopeResponse<Boolean> validateExport(@RequestParam(value = "model_name") String modelAlias,
-            @RequestParam(value = "project") String project) {
-        String projectName = checkProjectName(project);
-        String modelId = getModel(modelAlias, projectName).getId();
-        return modelController.validateExport(modelId, projectName);
-    }
-
     @ApiOperation(value = "export model", tags = { "QE" }, notes = "Add URL: {model}")
     @GetMapping(value = "/{model_name:.+}/export")
     @ResponseBody
     public void exportModel(@PathVariable("model_name") String modelAlias,
             @RequestParam(value = "project") String project, @RequestParam(value = "export_as") SyncContext.BI exportAs,
             @RequestParam(value = "element", required = false, defaultValue = "AGG_INDEX_COL") SyncContext.ModelElement element,
-            @RequestParam(value = "server_host", required = false) String host,
-            @RequestParam(value = "server_port", required = false) Integer port, HttpServletRequest request,
+            @RequestParam(value = "server_host", required = false) String serverHost,
+            @RequestParam(value = "server_port", required = false) Integer serverPort, HttpServletRequest request,
             HttpServletResponse response) throws IOException {
         String projectName = checkProjectName(project);
         String modelId = getModel(modelAlias, projectName).getId();
-        modelController.exportModel(modelId, projectName, exportAs, element, host, port, request, response);
+        String host = getHost(serverHost, request.getServerName());
+        int port = getPort(serverPort, request.getServerPort());
+
+        SyncContext syncContext = tdsService.prepareSyncContext(projectName, modelId, exportAs, element, host, port);
+        SyncModel syncModel = tdsService.exportModel(syncContext);
+        tdsService.preCheckNameConflict(syncModel);
+        tdsService.dumpSyncModel(syncContext, syncModel, response);
     }
 
     @ApiOperation(value = "bi export", tags = { "QE" })
@@ -384,15 +390,30 @@ public class OpenModelController extends NBasicController {
     public void biExport(@RequestParam("model_name") String modelAlias, @RequestParam(value = "project") String project,
             @RequestParam(value = "export_as") SyncContext.BI exportAs,
             @RequestParam(value = "element", required = false, defaultValue = "AGG_INDEX_COL") SyncContext.ModelElement element,
-            @RequestParam(value = "server_host", required = false) String host,
-            @RequestParam(value = "server_port", required = false) Integer port,
+            @RequestParam(value = "server_host", required = false) String serverHost,
+            @RequestParam(value = "server_port", required = false) Integer serverPort,
             @RequestParam(value = "dimensions", required = false) List<String> dimensions,
             @RequestParam(value = "measures", required = false) List<String> measures, HttpServletRequest request,
             HttpServletResponse response) throws IOException {
         String projectName = checkProjectName(project);
         String modelId = getModel(modelAlias, projectName).getId();
-        modelController.biExport(modelId, projectName, exportAs, element, host, port, dimensions, measures, request,
-                response);
+        String host = getHost(serverHost, request.getServerName());
+        int port = getPort(serverPort, request.getServerPort());
+        if (dimensions == null) {
+            // no need filter of given dimensions
+            dimensions = ImmutableList.of();
+        }
+        if (measures == null) {
+            // no need filter of given measures
+            measures = ImmutableList.of();
+        }
+
+        SyncContext syncContext = tdsService.prepareSyncContext(projectName, modelId, exportAs, element, host, port);
+        SyncModel syncModel = AclPermissionUtil.isAdmin()
+                ? tdsService.exportTDSDimensionsAndMeasuresByAdmin(syncContext, dimensions, measures)
+                : tdsService.exportTDSDimensionsAndMeasuresByNormalUser(syncContext, dimensions, measures);
+        tdsService.preCheckNameConflict(syncModel);
+        tdsService.dumpSyncModel(syncContext, syncModel, response);
     }
 
     @ApiOperation(value = "updateModelName", tags = { "AI" })
diff --git a/src/metadata-server/src/main/java/org/apache/kylin/rest/controller/NModelController.java b/src/metadata-server/src/main/java/org/apache/kylin/rest/controller/NModelController.java
index 8b3ffd04b1..6efa43ec6e 100644
--- a/src/metadata-server/src/main/java/org/apache/kylin/rest/controller/NModelController.java
+++ b/src/metadata-server/src/main/java/org/apache/kylin/rest/controller/NModelController.java
@@ -30,18 +30,14 @@ import java.util.Collections;
 import java.util.Date;
 import java.util.List;
 import java.util.Locale;
-import java.util.Optional;
 
 import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletResponse;
 
 import org.apache.commons.lang.StringUtils;
 import org.apache.commons.lang.exception.ExceptionUtils;
-import org.apache.kylin.common.KylinConfig;
-import org.apache.kylin.common.exception.CommonErrorCode;
 import org.apache.kylin.common.exception.KylinException;
 import org.apache.kylin.metadata.model.NDataModel;
-import org.apache.kylin.metadata.model.NDataModelManager;
 import org.apache.kylin.metadata.model.PartitionDesc;
 import org.apache.kylin.metadata.model.exception.LookupTableException;
 import org.apache.kylin.rest.constant.ModelAttributeEnum;
@@ -73,13 +69,14 @@ import org.apache.kylin.rest.response.ModelConfigResponse;
 import org.apache.kylin.rest.response.ModelSaveCheckResponse;
 import org.apache.kylin.rest.response.MultiPartitionValueResponse;
 import org.apache.kylin.rest.response.PurgeModelAffectedResponse;
+import org.apache.kylin.rest.service.AbstractModelService;
 import org.apache.kylin.rest.service.FusionIndexService;
 import org.apache.kylin.rest.service.FusionModelService;
 import org.apache.kylin.rest.service.IndexPlanService;
 import org.apache.kylin.rest.service.ModelService;
-import org.apache.kylin.rest.util.AclPermissionUtil;
-import org.apache.kylin.tool.bisync.BISyncModel;
+import org.apache.kylin.rest.service.ModelTdsService;
 import org.apache.kylin.tool.bisync.SyncContext;
+import org.apache.kylin.tool.bisync.model.SyncModel;
 import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.beans.factory.annotation.Qualifier;
 import org.springframework.stereotype.Controller;
@@ -112,6 +109,10 @@ public class NModelController extends NBasicController {
     @Qualifier("modelService")
     private ModelService modelService;
 
+    @Autowired
+    @Qualifier("modelTdsService")
+    private ModelTdsService tdsService;
+
     @Autowired
     private FusionModelService fusionModelService;
 
@@ -484,7 +485,7 @@ public class NModelController extends NBasicController {
         checkRequiredArg(MODEL_ID, modelId);
         String newAlias = modelRenameRequest.getNewModelName();
         String description = modelRenameRequest.getDescription();
-        if (!StringUtils.containsOnly(newAlias, ModelService.VALID_NAME_FOR_MODEL)) {
+        if (!StringUtils.containsOnly(newAlias, AbstractModelService.VALID_NAME_FOR_MODEL)) {
             throw new KylinException(MODEL_NAME_INVALID, newAlias);
         }
 
@@ -545,7 +546,7 @@ public class NModelController extends NBasicController {
         String newModelName = request.getNewModelName();
         checkRequiredArg(MODEL_ID, modelId);
         checkRequiredArg(NEW_MODEL_NAME, newModelName);
-        if (!StringUtils.containsOnly(newModelName, ModelService.VALID_NAME_FOR_MODEL)) {
+        if (!StringUtils.containsOnly(newModelName, AbstractModelService.VALID_NAME_FOR_MODEL)) {
             throw new KylinException(MODEL_NAME_INVALID, newModelName);
         }
         modelService.cloneModel(modelId, request.getNewModelName(), request.getProject());
@@ -636,9 +637,12 @@ public class NModelController extends NBasicController {
     @GetMapping(value = "/validate_export")
     @ResponseBody
     public EnvelopeResponse<Boolean> validateExport(@RequestParam(value = "model") String modelId,
-            @RequestParam(value = "project") String project) {
+            @RequestParam(value = "project") String project,
+            @RequestParam(value = "element", required = false, defaultValue = "AGG_INDEX_COL") SyncContext.ModelElement element) {
         String projectName = checkProjectName(project);
-        Boolean result = modelService.validateExport(projectName, modelId);
+        SyncContext virtualContext = tdsService.prepareSyncContext(projectName, modelId, null, element, "", -1);
+        SyncModel syncModel = tdsService.exportModel(virtualContext);
+        Boolean result = tdsService.preCheckNameConflict(syncModel);
         return new EnvelopeResponse<>(KylinException.CODE_SUCCESS, result, "");
     }
 
@@ -652,75 +656,12 @@ public class NModelController extends NBasicController {
             @RequestParam(value = "server_port", required = false) Integer serverPort, HttpServletRequest request,
             HttpServletResponse response) throws IOException {
         String projectName = checkProjectName(project);
-
         String host = getHost(serverHost, request.getServerName());
-        Integer port = getPort(serverPort, request.getServerPort());
-
-        modelService.validateExport(projectName, modelId);
-        BISyncModel syncModel = modelService.exportModel(projectName, modelId, exportAs, element, host, port);
-
-        dumpSyncModel(modelId, exportAs, projectName, syncModel, response);
-    }
-
-    @ApiOperation(value = "biExport", tags = { "QE" })
-    @GetMapping(value = "/bi_export")
-    @ResponseBody
-    public void biExport(@RequestParam("model") String modelId, @RequestParam(value = "project") String project,
-            @RequestParam(value = "export_as") SyncContext.BI exportAs,
-            @RequestParam(value = "element", required = false, defaultValue = "AGG_INDEX_COL") SyncContext.ModelElement element,
-            @RequestParam(value = "server_host", required = false) String serverHost,
-            @RequestParam(value = "server_port", required = false) Integer serverPort,
-            @RequestParam(value = "dimensions", required = false) List<String> dimensions,
-            @RequestParam(value = "measures", required = false) List<String> measures, HttpServletRequest request,
-            HttpServletResponse response) throws IOException {
-        String projectName = checkProjectName(project);
-
-        String host = getHost(serverHost, request.getServerName());
-        Integer port = getPort(serverPort, request.getServerPort());
-
-        modelService.validateExport(projectName, modelId);
-        SyncContext syncContext = modelService.getSyncContext(projectName, modelId, exportAs, element, host, port);
-
-        BISyncModel syncModel = AclPermissionUtil.isAdmin()
-                ? modelService.exportTDSDimensionsAndMeasuresByAdmin(syncContext, dimensions, measures)
-                : modelService.exportTDSDimensionsAndMeasuresByNormalUser(syncContext, dimensions, measures);
-
-        dumpSyncModel(modelId, exportAs, projectName, syncModel, response);
-    }
-
-    private void dumpSyncModel(String modelId, SyncContext.BI exportAs, String projectName, BISyncModel syncModel,
-            HttpServletResponse response) throws IOException {
-        NDataModelManager manager = NDataModelManager.getInstance(KylinConfig.getInstanceFromEnv(), projectName);
-        NDataModel dataModel = manager.getDataModelDesc(modelId);
-        String alias = dataModel.getAlias();
-        String fileName = String.format(Locale.ROOT, "%s_%s_%s", projectName, alias,
-                new SimpleDateFormat("yyyyMMddHHmmss", Locale.getDefault(Locale.Category.FORMAT)).format(new Date()));
-        switch (exportAs) {
-        case TABLEAU_CONNECTOR_TDS:
-        case TABLEAU_ODBC_TDS:
-            response.setContentType("application/xml");
-            response.setHeader("Content-Disposition",
-                    String.format(Locale.ROOT, "attachment; filename=\"%s.tds\"", fileName));
-            break;
-        default:
-            throw new KylinException(CommonErrorCode.UNKNOWN_ERROR_CODE, "unrecognized export target");
-        }
-        syncModel.dump(response.getOutputStream());
-        response.getOutputStream().flush();
-        response.getOutputStream().close();
-    }
-
-    private String getHost(String serverHost, String serverName) {
-        String host = KylinConfig.getInstanceFromEnv().getModelExportHost();
-        host = Optional.ofNullable(Optional.ofNullable(host).orElse(serverHost)).orElse(serverName);
-        return host;
-    }
+        int port = getPort(serverPort, request.getServerPort());
 
-    private Integer getPort(Integer serverPort, Integer requestServerPort) {
-        Integer port = KylinConfig.getInstanceFromEnv().getModelExportPort() == -1 ? null
-                : KylinConfig.getInstanceFromEnv().getModelExportPort();
-        port = Optional.ofNullable(Optional.ofNullable(port).orElse(serverPort)).orElse(requestServerPort);
-        return port;
+        SyncContext syncContext = tdsService.prepareSyncContext(projectName, modelId, exportAs, element, host, port);
+        SyncModel syncModel = tdsService.exportModel(syncContext);
+        tdsService.dumpSyncModel(syncContext, syncModel, response);
     }
 
     @ApiOperation(value = "updateMultiPartitionMapping", tags = { "QE" }, notes = "Add URL: {model}")
diff --git a/src/metadata-server/src/main/java/org/apache/kylin/rest/controller/NTableController.java b/src/metadata-server/src/main/java/org/apache/kylin/rest/controller/NTableController.java
index 8b6181ef8f..5e43fc3ea3 100644
--- a/src/metadata-server/src/main/java/org/apache/kylin/rest/controller/NTableController.java
+++ b/src/metadata-server/src/main/java/org/apache/kylin/rest/controller/NTableController.java
@@ -246,13 +246,12 @@ public class NTableController extends NBasicController {
         return new EnvelopeResponse<>(KylinException.CODE_SUCCESS, loadTableResponse, "");
     }
 
-
-    @ApiOperation(value = "loadAWSTablesCompatibleCrossAccount", tags = {"KC"},
-            notes = "Update Body: data_source_type, need_sampling, sampling_rows, data_source_properties")
+    @ApiOperation(value = "loadAWSTablesCompatibleCrossAccount", tags = {
+            "KC" }, notes = "Update Body: data_source_type, need_sampling, sampling_rows, data_source_properties")
     @PostMapping(value = "/compatibility/aws")
     @ResponseBody
-    public EnvelopeResponse<LoadTableResponse> loadAWSTablesCompatibleCrossAccount(@RequestBody AWSTableLoadRequest tableLoadRequest)
-            throws Exception {
+    public EnvelopeResponse<LoadTableResponse> loadAWSTablesCompatibleCrossAccount(
+            @RequestBody AWSTableLoadRequest tableLoadRequest) throws Exception {
         checkProjectName(tableLoadRequest.getProject());
         if (NProjectManager.getInstance(KylinConfig.getInstanceFromEnv())
                 .getProject(tableLoadRequest.getProject()) == null) {
@@ -263,8 +262,8 @@ public class NTableController extends NBasicController {
         }
 
         LoadTableResponse loadTableResponse = new LoadTableResponse();
-        LoadTableResponse loadByTable = tableExtService.loadAWSTablesCompatibleCrossAccount(tableLoadRequest.getTables(),
-                tableLoadRequest.getProject());
+        LoadTableResponse loadByTable = tableExtService
+                .loadAWSTablesCompatibleCrossAccount(tableLoadRequest.getTables(), tableLoadRequest.getProject());
         loadTableResponse.getFailed().addAll(loadByTable.getFailed());
         loadTableResponse.getLoaded().addAll(loadByTable.getLoaded());
 
@@ -277,13 +276,13 @@ public class NTableController extends NBasicController {
         return new EnvelopeResponse<>(KylinException.CODE_SUCCESS, loadTableResponse, "");
     }
 
-    @ApiOperation(value = "updateLoadedAWSTableExtProp", tags = {"KC" }, notes = "Update Body: data_source_properties")
+    @ApiOperation(value = "updateLoadedAWSTableExtProp", tags = { "KC" }, notes = "Update Body: data_source_properties")
     @PutMapping(value = "/ext/prop/aws")
     @ResponseBody
-    public EnvelopeResponse<UpdateAWSTableExtDescResponse> updateLoadedAWSTableExtProp(@RequestBody UpdateAWSTableExtDescRequest request) {
+    public EnvelopeResponse<UpdateAWSTableExtDescResponse> updateLoadedAWSTableExtProp(
+            @RequestBody UpdateAWSTableExtDescRequest request) {
         checkProjectName(request.getProject());
-        if (NProjectManager.getInstance(KylinConfig.getInstanceFromEnv())
-                .getProject(request.getProject()) == null) {
+        if (NProjectManager.getInstance(KylinConfig.getInstanceFromEnv()).getProject(request.getProject()) == null) {
             throw new KylinException(PROJECT_NOT_EXIST, request.getProject());
         }
         if (CollectionUtils.isEmpty(request.getTables())) {
diff --git a/src/metadata-server/src/main/java/org/apache/kylin/rest/controller/v2/NModelControllerV2.java b/src/metadata-server/src/main/java/org/apache/kylin/rest/controller/v2/NModelControllerV2.java
index 5de11e8846..7552b476d5 100644
--- a/src/metadata-server/src/main/java/org/apache/kylin/rest/controller/v2/NModelControllerV2.java
+++ b/src/metadata-server/src/main/java/org/apache/kylin/rest/controller/v2/NModelControllerV2.java
@@ -20,14 +20,13 @@ package org.apache.kylin.rest.controller.v2;
 import static org.apache.kylin.common.constant.HttpConstant.HTTP_VND_APACHE_KYLIN_V2_JSON;
 
 import java.util.ArrayList;
-import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 
 import org.apache.kylin.common.exception.KylinException;
-import org.apache.kylin.rest.response.EnvelopeResponse;
 import org.apache.kylin.metadata.model.NDataModel;
 import org.apache.kylin.rest.controller.NBasicController;
+import org.apache.kylin.rest.response.EnvelopeResponse;
 import org.apache.kylin.rest.service.ModelService;
 import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.beans.factory.annotation.Qualifier;
@@ -65,7 +64,7 @@ public class NModelControllerV2 extends NBasicController {
                 modelService.getModels(modelAlias, project, exactMatch, null, Lists.newArrayList(), sortBy, reverse));
         models = modelService.addOldParams(project, models);
 
-        HashMap<String, Object> modelResponse = getDataResponse("models", models, offset, limit);
+        Map<String, Object> modelResponse = getDataResponse("models", models, offset, limit);
         return new EnvelopeResponse<>(KylinException.CODE_SUCCESS, modelResponse, "");
     }
 
diff --git a/src/metadata-server/src/main/java/org/apache/kylin/rest/controller/v2/NProjectControllerV2.java b/src/metadata-server/src/main/java/org/apache/kylin/rest/controller/v2/NProjectControllerV2.java
index e9f864080d..060dc323a0 100644
--- a/src/metadata-server/src/main/java/org/apache/kylin/rest/controller/v2/NProjectControllerV2.java
+++ b/src/metadata-server/src/main/java/org/apache/kylin/rest/controller/v2/NProjectControllerV2.java
@@ -19,13 +19,13 @@ package org.apache.kylin.rest.controller.v2;
 
 import static org.apache.kylin.common.constant.HttpConstant.HTTP_VND_APACHE_KYLIN_V2_JSON;
 
-import java.util.HashMap;
 import java.util.List;
+import java.util.Map;
 
 import org.apache.kylin.common.exception.KylinException;
 import org.apache.kylin.metadata.project.ProjectInstance;
-import org.apache.kylin.rest.response.EnvelopeResponse;
 import org.apache.kylin.rest.controller.NBasicController;
+import org.apache.kylin.rest.response.EnvelopeResponse;
 import org.apache.kylin.rest.service.ProjectService;
 import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.beans.factory.annotation.Qualifier;
@@ -54,7 +54,7 @@ public class NProjectControllerV2 extends NBasicController {
             @RequestParam(value = "exact", required = false, defaultValue = "true") boolean exactMatch) {
 
         List<ProjectInstance> readableProjects = projectService.getReadableProjects(project, exactMatch);
-        HashMap<String, Object> projects = getDataResponse("projects", readableProjects, offset, size);
+        Map<String, Object> projects = getDataResponse("projects", readableProjects, offset, size);
         return new EnvelopeResponse<>(KylinException.CODE_SUCCESS, projects, "");
     }
 
diff --git a/src/metadata-server/src/test/java/io/kyligence/kap/rest/controller/open/OpenModelControllerTest.java b/src/metadata-server/src/test/java/io/kyligence/kap/rest/controller/open/OpenModelControllerTest.java
index 0d22ea3cef..e36ccf48de 100644
--- a/src/metadata-server/src/test/java/io/kyligence/kap/rest/controller/open/OpenModelControllerTest.java
+++ b/src/metadata-server/src/test/java/io/kyligence/kap/rest/controller/open/OpenModelControllerTest.java
@@ -37,6 +37,7 @@ import org.apache.kylin.common.util.NLocalFileMetadataTestCase;
 import org.apache.kylin.common.util.Pair;
 import org.apache.kylin.common.util.RandomUtil;
 import org.apache.kylin.metadata.cube.model.IndexEntity;
+import org.apache.kylin.metadata.cube.model.NDataflowManager;
 import org.apache.kylin.metadata.model.MultiPartitionDesc;
 import org.apache.kylin.metadata.model.NDataModel;
 import org.apache.kylin.metadata.model.NDataModelManager;
@@ -61,7 +62,10 @@ import org.apache.kylin.rest.response.OpenGetIndexResponse;
 import org.apache.kylin.rest.service.FusionIndexService;
 import org.apache.kylin.rest.service.FusionModelService;
 import org.apache.kylin.rest.service.ModelService;
+import org.apache.kylin.rest.service.ModelTdsService;
 import org.apache.kylin.rest.util.AclEvaluate;
+import org.apache.kylin.tool.bisync.SyncContext;
+import org.apache.kylin.tool.bisync.model.SyncModel;
 import org.hamcrest.Matchers;
 import org.junit.After;
 import org.junit.Assert;
@@ -102,6 +106,9 @@ public class OpenModelControllerTest extends NLocalFileMetadataTestCase {
     @Mock
     private FusionModelService fusionModelService;
 
+    @Mock
+    private ModelTdsService tdsService;
+
     @Mock
     private AclEvaluate aclEvaluate;
 
@@ -502,6 +509,66 @@ public class OpenModelControllerTest extends NLocalFileMetadataTestCase {
                 .andExpect(MockMvcResultMatchers.status().isOk());
     }
 
+    @Test
+    public void testBIExportByADMIN() throws Exception {
+        String project = "default";
+        String modelName = "741ca86a-1f13-46da-a59f-95fb68615e3a";
+        SyncContext syncContext = new SyncContext();
+        syncContext.setProjectName(project);
+        syncContext.setModelId(modelName);
+        syncContext.setTargetBI(SyncContext.BI.TABLEAU_CONNECTOR_TDS);
+        syncContext.setModelElement(SyncContext.ModelElement.AGG_INDEX_AND_TABLE_INDEX_COL);
+        syncContext.setHost("localhost");
+        syncContext.setPort(8080);
+        syncContext.setDataflow(NDataflowManager.getInstance(getTestConfig(), project).getDataflow(modelName));
+        syncContext.setKylinConfig(getTestConfig());
+        SyncModel syncModel = Mockito.mock(SyncModel.class);
+        NDataModel model = new NDataModel();
+        model.setUuid("aaa");
+        Mockito.doReturn(model).when(openModelController).getModel(Mockito.anyString(), Mockito.anyString());
+        Mockito.doReturn(syncContext).when(tdsService).prepareSyncContext(project, modelName,
+                SyncContext.BI.TABLEAU_CONNECTOR_TDS, SyncContext.ModelElement.CUSTOM_COLS, "localhost", 8080);
+        Mockito.doReturn(syncModel).when(tdsService).exportTDSDimensionsAndMeasuresByAdmin(syncContext,
+                Lists.newArrayList(), Lists.newArrayList());
+        mockMvc.perform(MockMvcRequestBuilders.get("/api/models/bi_export").param("model_name", modelName)
+                .param("project", project).param("export_as", "TABLEAU_CONNECTOR_TDS").param("element", "CUSTOM_COLS")
+                .param("server_host", "localhost").param("server_port", "8080").param("dimensions", "")
+                .param("measures", "").contentType(MediaType.APPLICATION_JSON)
+                .accept(MediaType.parseMediaType(HTTP_VND_APACHE_KYLIN_V4_PUBLIC_JSON)))
+                .andExpect(MockMvcResultMatchers.status().isOk());
+    }
+
+    @Test
+    public void testBIExportByNormalUser() throws Exception {
+        String project = "default";
+        String modelName = "741ca86a-1f13-46da-a59f-95fb68615e3a";
+        SyncContext syncContext = new SyncContext();
+        syncContext.setProjectName(project);
+        syncContext.setModelId(modelName);
+        syncContext.setTargetBI(SyncContext.BI.TABLEAU_CONNECTOR_TDS);
+        syncContext.setModelElement(SyncContext.ModelElement.AGG_INDEX_AND_TABLE_INDEX_COL);
+        syncContext.setHost("localhost");
+        syncContext.setPort(8080);
+        syncContext.setDataflow(NDataflowManager.getInstance(getTestConfig(), project).getDataflow(modelName));
+        syncContext.setKylinConfig(getTestConfig());
+        SyncModel syncModel = Mockito.mock(SyncModel.class);
+        SecurityContextHolder.getContext()
+                .setAuthentication(new TestingAuthenticationToken("u1", "ANALYST", Constant.ROLE_ANALYST));
+        NDataModel model = new NDataModel();
+        model.setUuid("aaa");
+        Mockito.doReturn(model).when(openModelController).getModel(Mockito.anyString(), Mockito.anyString());
+        Mockito.doReturn(syncContext).when(tdsService).prepareSyncContext(project, modelName,
+                SyncContext.BI.TABLEAU_CONNECTOR_TDS, SyncContext.ModelElement.CUSTOM_COLS, "localhost", 8080);
+        Mockito.doReturn(syncModel).when(tdsService).exportTDSDimensionsAndMeasuresByNormalUser(syncContext,
+                Lists.newArrayList(), Lists.newArrayList());
+        mockMvc.perform(MockMvcRequestBuilders.get("/api/models/bi_export").param("model_name", modelName)
+                .param("project", project).param("export_as", "TABLEAU_CONNECTOR_TDS").param("element", "CUSTOM_COLS")
+                .param("server_host", "localhost").param("server_port", "8080").param("dimensions", "")
+                .param("measures", "").contentType(MediaType.APPLICATION_JSON)
+                .accept(MediaType.parseMediaType(HTTP_VND_APACHE_KYLIN_V4_PUBLIC_JSON)))
+                .andExpect(MockMvcResultMatchers.status().isOk());
+    }
+
     @Test
     public void testUpdateModelStatus() throws Exception {
         String project = "default";
@@ -566,6 +633,8 @@ public class OpenModelControllerTest extends NLocalFileMetadataTestCase {
         OpenModelRequest request = new OpenModelRequest();
         request.setProject(project);
         request.setModelName(modelAlias);
+        Mockito.doReturn(Mockito.mock(BuildBaseIndexResponse.class)).when(fusionModelService)
+                .updateDataModelSemantic(request.getProject(), request);
         mockMvc.perform(MockMvcRequestBuilders.put("/api/models/modification").contentType(MediaType.APPLICATION_JSON)
                 .content(JsonUtil.writeValueAsString(request))
                 .accept(MediaType.parseMediaType(HTTP_VND_APACHE_KYLIN_V4_PUBLIC_JSON)))
diff --git a/src/metadata-server/src/test/java/org/apache/kylin/rest/controller/NModelControllerTest.java b/src/metadata-server/src/test/java/org/apache/kylin/rest/controller/NModelControllerTest.java
index 7fccbc44aa..47520fa814 100644
--- a/src/metadata-server/src/test/java/org/apache/kylin/rest/controller/NModelControllerTest.java
+++ b/src/metadata-server/src/test/java/org/apache/kylin/rest/controller/NModelControllerTest.java
@@ -62,9 +62,9 @@ import org.apache.kylin.rest.response.NDataSegmentResponse;
 import org.apache.kylin.rest.response.RelatedModelResponse;
 import org.apache.kylin.rest.service.FusionModelService;
 import org.apache.kylin.rest.service.ModelService;
-import org.apache.kylin.tool.bisync.BISyncModel;
-import org.apache.kylin.tool.bisync.BISyncTool;
+import org.apache.kylin.rest.service.ModelTdsService;
 import org.apache.kylin.tool.bisync.SyncContext;
+import org.apache.kylin.tool.bisync.model.SyncModel;
 import org.junit.After;
 import org.junit.Before;
 import org.junit.Rule;
@@ -96,6 +96,9 @@ public class NModelControllerTest extends NLocalFileMetadataTestCase {
     @Mock
     private ModelService modelService;
 
+    @Mock
+    private ModelTdsService tdsService;
+
     @Mock
     private FusionModelService fusionModelService;
 
@@ -673,7 +676,7 @@ public class NModelControllerTest extends NLocalFileMetadataTestCase {
         UpdateMultiPartitionValueRequest request = new UpdateMultiPartitionValueRequest();
         request.setProject("default");
         List<String[]> partition_values = Lists.newArrayList();
-        String[] value = new String[]{"5"};
+        String[] value = new String[] { "5" };
         partition_values.add(value);
         request.setSubPartitionValues(partition_values);
         Mockito.doNothing().when(modelService).addMultiPartitionValues(request.getProject(),
@@ -713,64 +716,18 @@ public class NModelControllerTest extends NLocalFileMetadataTestCase {
     }
 
     @Test
-    public void testBIExportByADMIN() throws Exception {
-        String project = "default";
-        String modelName = "741ca86a-1f13-46da-a59f-95fb68615e3a";
-        SyncContext syncContext = new SyncContext();
-        syncContext.setProjectName(project);
-        syncContext.setModelId(modelName);
-        syncContext.setTargetBI(SyncContext.BI.TABLEAU_CONNECTOR_TDS);
-        syncContext.setModelElement(SyncContext.ModelElement.AGG_INDEX_AND_TABLE_INDEX_COL);
-        syncContext.setHost("localhost");
-        syncContext.setPort(8080);
-        syncContext.setDataflow(NDataflowManager.getInstance(getTestConfig(), project).getDataflow(modelName));
-        syncContext.setKylinConfig(getTestConfig());
-        BISyncModel syncModel = BISyncTool.dumpToBISyncModel(syncContext);
-        Mockito.doReturn(syncContext).when(modelService).getSyncContext(project, modelName,
-                SyncContext.BI.TABLEAU_CONNECTOR_TDS, SyncContext.ModelElement.CUSTOM_COLS, "localhost", 8080);
-        Mockito.doReturn(syncModel).when(modelService).exportTDSDimensionsAndMeasuresByAdmin(syncContext,
-                Lists.newArrayList(), Lists.newArrayList());
-        mockMvc.perform(MockMvcRequestBuilders.get("/api/models/bi_export").param("model", modelName)
-                .param("project", project).param("export_as", "TABLEAU_CONNECTOR_TDS").param("element", "CUSTOM_COLS")
-                .param("server_host", "localhost").param("server_port", "8080").param("dimensions", "")
-                .param("measures", "").contentType(MediaType.APPLICATION_JSON)
-                .accept(MediaType.parseMediaType(HTTP_VND_APACHE_KYLIN_JSON)))
-                .andExpect(MockMvcResultMatchers.status().isOk());
-    }
-
-    @Test
-    public void testBIExportByNormalUser() throws Exception {
+    public void testValidateExport() throws Exception {
         String project = "default";
         String modelName = "741ca86a-1f13-46da-a59f-95fb68615e3a";
         SyncContext syncContext = new SyncContext();
         syncContext.setProjectName(project);
         syncContext.setModelId(modelName);
-        syncContext.setTargetBI(SyncContext.BI.TABLEAU_CONNECTOR_TDS);
-        syncContext.setModelElement(SyncContext.ModelElement.AGG_INDEX_AND_TABLE_INDEX_COL);
-        syncContext.setHost("localhost");
-        syncContext.setPort(8080);
-        syncContext.setDataflow(NDataflowManager.getInstance(getTestConfig(), project).getDataflow(modelName));
-        syncContext.setKylinConfig(getTestConfig());
-        BISyncModel syncModel = BISyncTool.dumpToBISyncModel(syncContext);
-        SecurityContextHolder.getContext()
-                .setAuthentication(new TestingAuthenticationToken("u1", "ANALYST", Constant.ROLE_ANALYST));
-        Mockito.doReturn(syncContext).when(modelService).getSyncContext(project, modelName,
+        syncContext.setModelElement(SyncContext.ModelElement.ALL_COLS);
+        SyncModel syncModel = Mockito.mock(SyncModel.class);
+        Mockito.doReturn(syncContext).when(tdsService).prepareSyncContext(project, modelName,
                 SyncContext.BI.TABLEAU_CONNECTOR_TDS, SyncContext.ModelElement.CUSTOM_COLS, "localhost", 8080);
-        Mockito.doReturn(syncModel).when(modelService).exportTDSDimensionsAndMeasuresByNormalUser(syncContext,
-                Lists.newArrayList(), Lists.newArrayList());
-        mockMvc.perform(MockMvcRequestBuilders.get("/api/models/bi_export").param("model", modelName)
-                .param("project", project).param("export_as", "TABLEAU_CONNECTOR_TDS").param("element", "CUSTOM_COLS")
-                .param("server_host", "localhost").param("server_port", "8080").param("dimensions", "")
-                .param("measures", "").contentType(MediaType.APPLICATION_JSON)
-                .accept(MediaType.parseMediaType(HTTP_VND_APACHE_KYLIN_JSON)))
-                .andExpect(MockMvcResultMatchers.status().isOk());
-    }
-
-    @Test
-    public void testValidateExport() throws Exception {
-        String project = "default";
-        String modelName = "741ca86a-1f13-46da-a59f-95fb68615e3a";
-        when(modelService.validateExport(project, modelName)).thenReturn(Boolean.TRUE);
+        Mockito.doReturn(syncModel).when(tdsService).exportModel(syncContext);
+        Mockito.doReturn(Boolean.TRUE).when(tdsService).preCheckNameConflict(syncModel);
         mockMvc.perform(MockMvcRequestBuilders.get("/api/models/validate_export").param("model", modelName)
                 .param("project", project).contentType(MediaType.APPLICATION_JSON))
                 .andExpect(MockMvcResultMatchers.status().isOk());
@@ -789,10 +746,9 @@ public class NModelControllerTest extends NLocalFileMetadataTestCase {
         syncContext.setPort(8080);
         syncContext.setDataflow(NDataflowManager.getInstance(getTestConfig(), project).getDataflow(modelName));
         syncContext.setKylinConfig(getTestConfig());
-        BISyncModel syncModel = BISyncTool.dumpToBISyncModel(syncContext);
-        Mockito.doReturn(syncModel).when(modelService).exportModel(project, modelName,
-                SyncContext.BI.TABLEAU_CONNECTOR_TDS, SyncContext.ModelElement.AGG_INDEX_AND_TABLE_INDEX_COL,
-                "localhost", 8080);
+        syncContext.setAdmin(true);
+        SyncModel syncModel = Mockito.mock(SyncModel.class);
+        Mockito.doReturn(syncModel).when(tdsService).exportModel(syncContext);
         mockMvc.perform(MockMvcRequestBuilders.get("/api/models/{model}/export", modelName).param("project", project)
                 .param("export_as", "TABLEAU_CONNECTOR_TDS").param("element", "AGG_INDEX_AND_TABLE_INDEX_COL")
                 .param("server_host", "localhost").param("server_port", "8080").contentType(MediaType.APPLICATION_JSON)
diff --git a/src/modeling-service/src/main/java/org/apache/kylin/rest/service/AbstractModelService.java b/src/modeling-service/src/main/java/org/apache/kylin/rest/service/AbstractModelService.java
new file mode 100644
index 0000000000..b46314b2e1
--- /dev/null
+++ b/src/modeling-service/src/main/java/org/apache/kylin/rest/service/AbstractModelService.java
@@ -0,0 +1,140 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.kylin.rest.service;
+
+import static org.apache.kylin.common.exception.ServerErrorCode.FAILED_UPDATE_MODEL;
+import static org.apache.kylin.common.exception.code.ErrorCodeServer.MODEL_ID_NOT_EXIST;
+import static org.apache.kylin.common.exception.code.ErrorCodeServer.MODEL_NAME_EMPTY;
+import static org.apache.kylin.common.exception.code.ErrorCodeServer.MODEL_NAME_INVALID;
+import static org.apache.kylin.common.exception.code.ErrorCodeServer.MODEL_NAME_NOT_EXIST;
+import static org.apache.kylin.common.exception.code.ErrorCodeServer.MODEL_NOT_EXIST;
+
+import java.util.Arrays;
+import java.util.Set;
+
+import org.apache.commons.lang.StringUtils;
+import org.apache.kylin.common.exception.KylinException;
+import org.apache.kylin.common.msg.MsgPicker;
+import org.apache.kylin.metadata.acl.AclTCRManager;
+import org.apache.kylin.metadata.cube.model.IndexPlan;
+import org.apache.kylin.metadata.cube.model.NIndexPlanManager;
+import org.apache.kylin.metadata.model.ColumnDesc;
+import org.apache.kylin.metadata.model.NDataModel;
+import org.apache.kylin.metadata.model.NDataModelManager;
+import org.apache.kylin.metadata.model.NTableMetadataManager;
+import org.apache.kylin.rest.util.AclEvaluate;
+import org.apache.kylin.rest.util.AclPermissionUtil;
+import org.springframework.beans.factory.annotation.Autowired;
+
+import com.google.common.collect.Sets;
+
+import lombok.val;
+import lombok.var;
+
+public class AbstractModelService extends BasicService {
+
+    public static final String VALID_NAME_FOR_MODEL = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890_";
+
+    @Autowired
+    public AclEvaluate aclEvaluate;
+
+    @Autowired
+    public AccessService accessService;
+
+    public void checkModelPermission(String project, String modelId) {
+        String userName = aclEvaluate.getCurrentUserName();
+        Set<String> groups = getCurrentUserGroups();
+        if (AclPermissionUtil.isAdmin() || AclPermissionUtil.isAdminInProject(project, groups)) {
+            return;
+        }
+        Set<String> allAuthTables = Sets.newHashSet();
+        Set<String> allAuthColumns = Sets.newHashSet();
+        var auths = getManager(AclTCRManager.class, project).getAuthTablesAndColumns(project, userName, true);
+        allAuthTables.addAll(auths.getTables());
+        allAuthColumns.addAll(auths.getColumns());
+        for (val group : groups) {
+            auths = getManager(AclTCRManager.class, project).getAuthTablesAndColumns(project, group, false);
+            allAuthTables.addAll(auths.getTables());
+            allAuthColumns.addAll(auths.getColumns());
+        }
+
+        NDataModel model = getModelById(modelId, project);
+        Set<String> tablesInModel = Sets.newHashSet();
+        model.getJoinTables().forEach(table -> tablesInModel.add(table.getTable()));
+        tablesInModel.add(model.getRootFactTableName());
+        tablesInModel.forEach(table -> {
+            if (!allAuthTables.contains(table)) {
+                throw new KylinException(FAILED_UPDATE_MODEL, MsgPicker.getMsg().getModelModifyAbandon(table));
+            }
+        });
+        tablesInModel.stream().filter(allAuthTables::contains).forEach(table -> {
+            ColumnDesc[] columnDescs = NTableMetadataManager.getInstance(getConfig(), project).getTableDesc(table)
+                    .getColumns();
+            Arrays.stream(columnDescs).map(column -> table + "." + column.getName()).forEach(column -> {
+                if (!allAuthColumns.contains(column)) {
+                    throw new KylinException(FAILED_UPDATE_MODEL, MsgPicker.getMsg().getModelModifyAbandon(column));
+                }
+            });
+        });
+    }
+
+    public NDataModel getModelById(String modelId, String project) {
+        NDataModelManager modelManager = getManager(NDataModelManager.class, project);
+        NDataModel nDataModel = modelManager.getDataModelDesc(modelId);
+        if (null == nDataModel) {
+            throw new KylinException(MODEL_ID_NOT_EXIST, modelId);
+        }
+        return nDataModel;
+    }
+
+    public NDataModel getModelByAlias(String modelAlias, String project) {
+        NDataModelManager modelManager = getManager(NDataModelManager.class, project);
+        NDataModel nDataModel = modelManager.getDataModelDescByAlias(modelAlias);
+        if (null == nDataModel) {
+            throw new KylinException(MODEL_NAME_NOT_EXIST, modelAlias);
+        }
+        return nDataModel;
+    }
+
+    public Set<String> listAllModelIdsInProject(String project) {
+        NDataModelManager dataModelManager = getManager(NDataModelManager.class, project);
+        return dataModelManager.listAllModelIds();
+    }
+
+    public IndexPlan getIndexPlan(String modelId, String project) {
+        NIndexPlanManager indexPlanManager = getManager(NIndexPlanManager.class, project);
+        return indexPlanManager.getIndexPlan(modelId);
+    }
+
+    public void primaryCheck(NDataModel modelDesc) {
+        if (modelDesc == null) {
+            throw new KylinException(MODEL_NOT_EXIST);
+        }
+
+        String modelAlias = modelDesc.getAlias();
+
+        if (StringUtils.isEmpty(modelAlias)) {
+            throw new KylinException(MODEL_NAME_EMPTY);
+        }
+        if (!StringUtils.containsOnly(modelAlias, VALID_NAME_FOR_MODEL)) {
+            throw new KylinException(MODEL_NAME_INVALID, modelAlias);
+        }
+    }
+
+}
diff --git a/src/modeling-service/src/main/java/org/apache/kylin/rest/service/FusionModelService.java b/src/modeling-service/src/main/java/org/apache/kylin/rest/service/FusionModelService.java
index 23f2e11328..84bf26d971 100644
--- a/src/modeling-service/src/main/java/org/apache/kylin/rest/service/FusionModelService.java
+++ b/src/modeling-service/src/main/java/org/apache/kylin/rest/service/FusionModelService.java
@@ -26,10 +26,9 @@ import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.exception.KylinException;
 import org.apache.kylin.common.exception.ServerErrorCode;
 import org.apache.kylin.common.msg.MsgPicker;
+import org.apache.kylin.common.scheduler.EventBusFactory;
 import org.apache.kylin.common.util.JsonUtil;
 import org.apache.kylin.common.util.Pair;
-import org.apache.kylin.rest.response.DataResult;
-import org.apache.kylin.common.scheduler.EventBusFactory;
 import org.apache.kylin.metadata.model.FusionModel;
 import org.apache.kylin.metadata.model.FusionModelManager;
 import org.apache.kylin.metadata.model.NDataModel;
@@ -39,6 +38,7 @@ import org.apache.kylin.rest.request.IndexesToSegmentsRequest;
 import org.apache.kylin.rest.request.ModelRequest;
 import org.apache.kylin.rest.request.OwnerChangeRequest;
 import org.apache.kylin.rest.response.BuildBaseIndexResponse;
+import org.apache.kylin.rest.response.DataResult;
 import org.apache.kylin.rest.response.JobInfoResponse;
 import org.apache.kylin.rest.response.JobInfoResponseWithFailure;
 import org.apache.kylin.rest.response.NDataModelResponse;
@@ -53,7 +53,7 @@ import lombok.extern.slf4j.Slf4j;
 
 @Slf4j
 @Service("fusionModelService")
-public class FusionModelService extends BasicService implements TableFusionModelSupporter {
+public class FusionModelService extends AbstractModelService implements TableFusionModelSupporter {
 
     @Autowired
     private ModelService modelService;
diff --git a/src/modeling-service/src/main/java/org/apache/kylin/rest/service/ModelService.java b/src/modeling-service/src/main/java/org/apache/kylin/rest/service/ModelService.java
index 474b10fe82..7082c4890b 100644
--- a/src/modeling-service/src/main/java/org/apache/kylin/rest/service/ModelService.java
+++ b/src/modeling-service/src/main/java/org/apache/kylin/rest/service/ModelService.java
@@ -42,13 +42,11 @@ import static org.apache.kylin.common.exception.ServerErrorCode.INVALID_PARTITIO
 import static org.apache.kylin.common.exception.ServerErrorCode.INVALID_RANGE;
 import static org.apache.kylin.common.exception.ServerErrorCode.INVALID_SEGMENT_PARAMETER;
 import static org.apache.kylin.common.exception.ServerErrorCode.MODEL_BROKEN;
-import static org.apache.kylin.common.exception.ServerErrorCode.MODEL_EXPORT_ERROR;
 import static org.apache.kylin.common.exception.ServerErrorCode.MODEL_ONLINE_ABANDON;
 import static org.apache.kylin.common.exception.ServerErrorCode.PERMISSION_DENIED;
 import static org.apache.kylin.common.exception.ServerErrorCode.STREAMING_INDEX_UPDATE_DISABLE;
 import static org.apache.kylin.common.exception.ServerErrorCode.TABLE_NOT_EXIST;
 import static org.apache.kylin.common.exception.ServerErrorCode.TIMESTAMP_COLUMN_NOT_EXIST;
-import static org.apache.kylin.common.exception.ServerErrorCode.UNAUTHORIZED_ENTITY;
 import static org.apache.kylin.common.exception.ServerErrorCode.VIEW_PARTITION_DATE_FORMAT_DETECTION_FORBIDDEN;
 import static org.apache.kylin.common.exception.code.ErrorCodeServer.COMPUTED_COLUMN_CONFLICT;
 import static org.apache.kylin.common.exception.code.ErrorCodeServer.COMPUTED_COLUMN_NAME_OR_EXPR_EMPTY;
@@ -56,10 +54,7 @@ import static org.apache.kylin.common.exception.code.ErrorCodeServer.DATETIME_FO
 import static org.apache.kylin.common.exception.code.ErrorCodeServer.DATETIME_FORMAT_PARSE_ERROR;
 import static org.apache.kylin.common.exception.code.ErrorCodeServer.MODEL_ID_NOT_EXIST;
 import static org.apache.kylin.common.exception.code.ErrorCodeServer.MODEL_NAME_DUPLICATE;
-import static org.apache.kylin.common.exception.code.ErrorCodeServer.MODEL_NAME_EMPTY;
-import static org.apache.kylin.common.exception.code.ErrorCodeServer.MODEL_NAME_INVALID;
 import static org.apache.kylin.common.exception.code.ErrorCodeServer.MODEL_NAME_NOT_EXIST;
-import static org.apache.kylin.common.exception.code.ErrorCodeServer.MODEL_NOT_EXIST;
 import static org.apache.kylin.common.exception.code.ErrorCodeServer.PARAMETER_INVALID_SUPPORT_LIST;
 import static org.apache.kylin.common.exception.code.ErrorCodeServer.PROJECT_NOT_EXIST;
 import static org.apache.kylin.common.exception.code.ErrorCodeServer.SEGMENT_BUILD_RANGE_OVERLAP;
@@ -133,7 +128,6 @@ import org.apache.kylin.common.util.JsonUtil;
 import org.apache.kylin.common.util.Pair;
 import org.apache.kylin.common.util.RandomUtil;
 import org.apache.kylin.common.util.StringUtil;
-import org.apache.kylin.engine.spark.smarter.IndexDependencyParser;
 import org.apache.kylin.engine.spark.utils.ComputedColumnEvalUtil;
 import org.apache.kylin.job.SecondStorageJobParamUtil;
 import org.apache.kylin.job.common.SegmentUtil;
@@ -146,7 +140,6 @@ import org.apache.kylin.job.handler.SecondStorageSegmentCleanJobHandler;
 import org.apache.kylin.job.handler.SecondStorageSegmentLoadJobHandler;
 import org.apache.kylin.job.manager.JobManager;
 import org.apache.kylin.job.model.JobParam;
-import org.apache.kylin.metadata.acl.AclTCRDigest;
 import org.apache.kylin.metadata.acl.AclTCRManager;
 import org.apache.kylin.metadata.acl.NDataModelAclParams;
 import org.apache.kylin.metadata.cube.cuboid.NAggregationGroup;
@@ -163,6 +156,7 @@ import org.apache.kylin.metadata.cube.model.NDataflowManager;
 import org.apache.kylin.metadata.cube.model.NDataflowUpdate;
 import org.apache.kylin.metadata.cube.model.NIndexPlanManager;
 import org.apache.kylin.metadata.cube.model.RuleBasedIndex;
+import org.apache.kylin.metadata.favorite.FavoriteRuleManager;
 import org.apache.kylin.metadata.model.AutoMergeTimeEnum;
 import org.apache.kylin.metadata.model.ColumnDesc;
 import org.apache.kylin.metadata.model.ComputedColumnDesc;
@@ -196,7 +190,6 @@ import org.apache.kylin.metadata.model.UpdateImpact;
 import org.apache.kylin.metadata.model.VolatileRange;
 import org.apache.kylin.metadata.model.schema.AffectedModelContext;
 import org.apache.kylin.metadata.model.tool.CalciteParser;
-import org.apache.kylin.metadata.model.util.ComputedColumnUtil;
 import org.apache.kylin.metadata.model.util.MultiPartitionUtil;
 import org.apache.kylin.metadata.model.util.scd2.SCD2CondChecker;
 import org.apache.kylin.metadata.project.EnhancedUnitOfWork;
@@ -204,6 +197,7 @@ import org.apache.kylin.metadata.project.NProjectManager;
 import org.apache.kylin.metadata.project.ProjectInstance;
 import org.apache.kylin.metadata.realization.RealizationStatusEnum;
 import org.apache.kylin.metadata.streaming.KafkaConfig;
+import org.apache.kylin.query.util.KapQueryUtil;
 import org.apache.kylin.query.util.PushDownUtil;
 import org.apache.kylin.query.util.QueryParams;
 import org.apache.kylin.rest.aspect.Transaction;
@@ -244,12 +238,10 @@ import org.apache.kylin.rest.response.SegmentCheckResponse;
 import org.apache.kylin.rest.response.SegmentPartitionResponse;
 import org.apache.kylin.rest.response.SegmentRangeResponse;
 import org.apache.kylin.rest.response.SimplifiedMeasure;
-import org.apache.kylin.rest.security.MutableAclRecord;
 import org.apache.kylin.rest.service.params.FullBuildSegmentParams;
 import org.apache.kylin.rest.service.params.IncrementBuildSegmentParams;
 import org.apache.kylin.rest.service.params.MergeSegmentParams;
 import org.apache.kylin.rest.service.params.ModelQueryParams;
-import org.apache.kylin.rest.util.AclEvaluate;
 import org.apache.kylin.rest.util.AclPermissionUtil;
 import org.apache.kylin.rest.util.ModelTriple;
 import org.apache.kylin.rest.util.ModelUtils;
@@ -260,10 +252,6 @@ import org.apache.kylin.source.adhocquery.PushDownConverterKeyWords;
 import org.apache.kylin.streaming.event.StreamingJobDropEvent;
 import org.apache.kylin.streaming.event.StreamingJobKillEvent;
 import org.apache.kylin.streaming.manager.StreamingJobManager;
-import org.apache.kylin.tool.bisync.BISyncModel;
-import org.apache.kylin.tool.bisync.BISyncTool;
-import org.apache.kylin.tool.bisync.SyncContext;
-import org.apache.kylin.tool.bisync.model.MeasureDef;
 import org.apache.spark.sql.SparderEnv;
 import org.apache.spark.sql.SparkSession;
 import org.slf4j.Logger;
@@ -286,8 +274,6 @@ import com.google.common.collect.Maps;
 import com.google.common.collect.Sets;
 
 import io.kyligence.kap.guava20.shaded.common.base.Supplier;
-import org.apache.kylin.metadata.favorite.FavoriteRuleManager;
-import org.apache.kylin.query.util.KapQueryUtil;
 import io.kyligence.kap.secondstorage.SecondStorage;
 import io.kyligence.kap.secondstorage.SecondStorageNodeHelper;
 import io.kyligence.kap.secondstorage.SecondStorageUpdater;
@@ -304,15 +290,13 @@ import lombok.extern.slf4j.Slf4j;
 
 @Slf4j
 @Component("modelService")
-public class ModelService extends BasicService implements TableModelSupporter, ProjectModelSupporter {
+public class ModelService extends AbstractModelService implements TableModelSupporter, ProjectModelSupporter {
 
     private static final Logger logger = LoggerFactory.getLogger(ModelService.class);
 
     private static final String LAST_MODIFY = "last_modify";
     public static final String REC_COUNT = "recommendations_count";
 
-    public static final String VALID_NAME_FOR_MODEL = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890_";
-
     public static final String VALID_NAME_FOR_DIMENSION = "^[\\u4E00-\\u9FA5a-zA-Z0-9 _\\-()%?()]+$";
 
     public static final String VALID_NAME_FOR_MEASURE = "^[\\u4E00-\\u9FA5a-zA-Z0-9 _\\-()%?().]+$";
@@ -334,15 +318,9 @@ public class ModelService extends BasicService implements TableModelSupporter, P
     @Qualifier("segmentHelper")
     private SegmentHelperSupporter segmentHelper;
 
-    @Autowired
-    public AclEvaluate aclEvaluate;
-
     @Autowired
     private ProjectService projectService;
 
-    @Autowired
-    private AccessService accessService;
-
     @Setter
     @Autowired(required = false)
     private ModelQuerySupporter modelQuerySupporter;
@@ -359,24 +337,6 @@ public class ModelService extends BasicService implements TableModelSupporter, P
     @Autowired(required = false)
     private List<ModelChangeSupporter> modelChangeSupporters = Lists.newArrayList();
 
-    public NDataModel getModelById(String modelId, String project) {
-        NDataModelManager modelManager = getManager(NDataModelManager.class, project);
-        NDataModel nDataModel = modelManager.getDataModelDesc(modelId);
-        if (null == nDataModel) {
-            throw new KylinException(MODEL_ID_NOT_EXIST, modelId);
-        }
-        return nDataModel;
-    }
-
-    public NDataModel getModelByAlias(String modelAlias, String project) {
-        NDataModelManager modelManager = getManager(NDataModelManager.class, project);
-        NDataModel nDataModel = modelManager.getDataModelDescByAlias(modelAlias);
-        if (null == nDataModel) {
-            throw new KylinException(MODEL_NAME_NOT_EXIST, modelAlias);
-        }
-        return nDataModel;
-    }
-
     /**
      * for 3x rest api
      *
@@ -875,10 +835,10 @@ public class ModelService extends BasicService implements TableModelSupporter, P
         return modelResponseStatus;
     }
 
-    private long getEmptyIndexesCount(String project, String id) {
+    private long getEmptyIndexesCount(String project, String modelId) {
         val indexPlanManager = getManager(NIndexPlanManager.class, project);
-        val indexPlan = indexPlanManager.getIndexPlan(id);
-        return indexPlan.getAllLayoutsReadOnly().size() - indexPlanManager.getAvailableIndexesCount(project, id);
+        val indexPlan = indexPlanManager.getIndexPlan(modelId);
+        return indexPlan.getAllLayoutsReadOnly().size() - indexPlanManager.getAvailableIndexesCount(project, modelId);
     }
 
     private List<NDataModelResponse> sortExpansionRate(boolean reverse, List<NDataModelResponse> filterModels) {
@@ -934,7 +894,7 @@ public class ModelService extends BasicService implements TableModelSupporter, P
                 && isSCD2;
     }
 
-    protected RealizationStatusEnum getModelStatus(String modelId, String projectName) {
+    public RealizationStatusEnum getModelStatus(String modelId, String projectName) {
         val indexPlan = getIndexPlan(modelId, projectName);
         if (indexPlan != null) {
             return getManager(NDataflowManager.class, projectName).getDataflow(indexPlan.getUuid()).getStatus();
@@ -1366,12 +1326,6 @@ public class ModelService extends BasicService implements TableModelSupporter, P
         return relatedModel;
     }
 
-    @VisibleForTesting
-    public IndexPlan getIndexPlan(String modelId, String project) {
-        NIndexPlanManager indexPlanManager = getManager(NIndexPlanManager.class, project);
-        return indexPlanManager.getIndexPlan(modelId);
-    }
-
     private void checkAliasExist(String modelId, String newAlias, String project) {
         if (!checkModelAliasUniqueness(modelId, newAlias, project)) {
             throw new KylinException(MODEL_NAME_DUPLICATE, newAlias);
@@ -1539,11 +1493,6 @@ public class ModelService extends BasicService implements TableModelSupporter, P
         }
     }
 
-    public Set<String> listAllModelIdsInProject(String project) {
-        NDataModelManager dataModelManager = getManager(NDataModelManager.class, project);
-        return dataModelManager.listAllModelIds();
-    }
-
     @Transaction(project = 0)
     public void offlineAllModelsInProject(String project) {
         aclEvaluate.checkProjectWritePermission(project);
@@ -1848,7 +1797,7 @@ public class ModelService extends BasicService implements TableModelSupporter, P
                         .anyMatch(column -> column.getName().equalsIgnoreCase(columnName));
                 if (!hasPartitionColumn && !modelRequest.getDimensionNameIdMap().containsKey(fullColumnName)) {
                     throw new KylinException(TIMESTAMP_COLUMN_NOT_EXIST,
-                            String.format(Locale.ROOT, MsgPicker.getMsg().getTimestampPartitionColumnNotExist()));
+                            MsgPicker.getMsg().getTimestampPartitionColumnNotExist());
                 }
             }
         }
@@ -2624,21 +2573,6 @@ public class ModelService extends BasicService implements TableModelSupporter, P
         }
     }
 
-    public void primaryCheck(NDataModel modelDesc) {
-        if (modelDesc == null) {
-            throw new KylinException(MODEL_NOT_EXIST);
-        }
-
-        String modelAlias = modelDesc.getAlias();
-
-        if (StringUtils.isEmpty(modelAlias)) {
-            throw new KylinException(MODEL_NAME_EMPTY);
-        }
-        if (!StringUtils.containsOnly(modelAlias, VALID_NAME_FOR_MODEL)) {
-            throw new KylinException(MODEL_NAME_INVALID, modelAlias);
-        }
-    }
-
     public ComputedColumnUsageResponse getComputedColumnUsages(String project) {
         aclEvaluate.checkProjectWritePermission(project);
         ComputedColumnUsageResponse ret = new ComputedColumnUsageResponse();
@@ -4007,241 +3941,6 @@ public class ModelService extends BasicService implements TableModelSupporter, P
                 : dataModelDesc.getPartitionDesc().getPartitionDateFormat();
     }
 
-    public BISyncModel exportModel(String projectName, String modelId, SyncContext.BI targetBI,
-            SyncContext.ModelElement modelElement, String host, int port) {
-        SyncContext syncContext = getADMINSyncContext(projectName, modelId, targetBI, modelElement, host, port);
-
-        return BISyncTool.dumpToBISyncModel(syncContext);
-    }
-
-    public BISyncModel exportTDSDimensionsAndMeasuresByNormalUser(SyncContext syncContext, List<String> dimensions,
-            List<String> measures) {
-        Set<String> groups = getCurrentUserGroups();
-        String currentUserName = aclEvaluate.getCurrentUserName();
-        String projectName = syncContext.getProjectName();
-        String modelId = syncContext.getModelId();
-        NDataflow dataflow = getManager(NDataflowManager.class, projectName).getDataflow(modelId);
-        if (dataflow.getStatus() == RealizationStatusEnum.BROKEN) {
-            throw new KylinException(ServerErrorCode.MODEL_BROKEN,
-                    "The model is broken and cannot be exported TDS file");
-        }
-
-        Set<String> authTables = getAllAuthTables(projectName, groups, currentUserName);
-        Set<String> authColumns = getAllAuthColumns(projectName, groups, currentUserName);
-
-        Set<String> newAuthColumns = Sets.newHashSet();
-        dataflow.getModel().getAllTables().forEach(tableRef -> {
-            List<TblColRef> collect = tableRef.getColumns().stream()
-                    .filter(column -> authColumns.contains(column.getCanonicalName())).collect(Collectors.toList());
-            collect.forEach(x -> newAuthColumns.add(x.getAliasDotName()));
-        });
-
-        checkTableHasColumnPermission(syncContext.getModelElement(), projectName, modelId, newAuthColumns, dimensions,
-                measures);
-
-        return BISyncTool.dumpHasPermissionToBISyncModel(syncContext, authTables, newAuthColumns, dimensions, measures);
-    }
-
-    public BISyncModel exportTDSDimensionsAndMeasuresByAdmin(SyncContext syncContext, List<String> dimensions,
-            List<String> measures) {
-        String projectName = syncContext.getProjectName();
-        String modelId = syncContext.getModelId();
-        NDataflow dataflow = getManager(NDataflowManager.class, projectName).getDataflow(modelId);
-        if (dataflow.getStatus() == RealizationStatusEnum.BROKEN) {
-            throw new KylinException(MODEL_BROKEN, "The model is broken and cannot be exported TDS file");
-        }
-        checkModelExportPermission(projectName, modelId);
-        checkModelPermission(projectName, modelId);
-        return BISyncTool.dumpBISyncModel(syncContext, dimensions, measures);
-    }
-
-    public SyncContext getADMINSyncContext(String projectName, String modelId, SyncContext.BI targetBI,
-            SyncContext.ModelElement element, String host, int port) {
-        NDataflow dataflow = getManager(NDataflowManager.class, projectName).getDataflow(modelId);
-        if (dataflow.getStatus() == RealizationStatusEnum.BROKEN) {
-            throw new KylinException(MODEL_BROKEN, "The model is broken and cannot be exported TDS file");
-        }
-        checkModelExportPermission(projectName, modelId);
-        checkModelPermission(projectName, modelId);
-
-        return getSyncContext(projectName, modelId, targetBI, element, host, port);
-    }
-
-    public SyncContext getSyncContext(String projectName, String modelId, SyncContext.BI targetBI,
-            SyncContext.ModelElement modelElement, String host, int port) {
-        SyncContext syncContext = new SyncContext();
-        syncContext.setProjectName(projectName);
-        syncContext.setModelId(modelId);
-        syncContext.setTargetBI(targetBI);
-        syncContext.setModelElement(modelElement);
-        syncContext.setHost(host);
-        syncContext.setPort(port);
-        syncContext.setDataflow(getManager(NDataflowManager.class, projectName).getDataflow(modelId));
-        syncContext.setKylinConfig(getManager(NProjectManager.class).getProject(projectName).getConfig());
-        return syncContext;
-    }
-
-    public void checkTableHasColumnPermission(SyncContext.ModelElement modelElement, String project, String modeId,
-            Set<String> authColumns, List<String> dimensions, List<String> measures) {
-        if (AclPermissionUtil.isAdmin()) {
-            return;
-        }
-        aclEvaluate.checkProjectReadPermission(project);
-
-        NDataModel model = getManager(NDataModelManager.class, project).getDataModelDesc(modeId);
-        long jointCount = model.getJoinTables().stream()
-                .filter(table -> authColumns
-                        .containsAll(Arrays.stream(table.getJoin().getPrimaryKeyColumns())
-                                .map(TblColRef::getAliasDotName).collect(Collectors.toSet()))
-                        && authColumns.containsAll(Arrays.stream(table.getJoin().getForeignKeyColumns())
-                                .map(TblColRef::getAliasDotName).collect(Collectors.toSet())))
-                .count();
-        long singleTableCount = model.getAllTables().stream().filter(ref -> ref.getColumns().stream()
-                .map(TblColRef::getAliasDotName).collect(Collectors.toSet()).stream().anyMatch(authColumns::contains))
-                .count();
-
-        if (jointCount != model.getJoinTables().size() || singleTableCount == 0
-                || (modelElement.equals(SyncContext.ModelElement.CUSTOM_COLS)
-                        && !checkColumnPermission(model, authColumns, dimensions, measures))) {
-            throw new KylinException(ServerErrorCode.INVALID_TABLE_AUTH,
-                    MsgPicker.getMsg().getTableNoColumnsPermission());
-        }
-    }
-
-    public boolean checkColumnPermission(NDataModel model, Set<String> authColumns, List<String> dimensions,
-            List<String> measures) {
-
-        if (!checkDimensionPermission(model, authColumns, dimensions)) {
-            return false;
-        }
-        if (CollectionUtils.isEmpty(measures)) {
-            return true;
-        }
-        List<MeasureDef> authMeasures = model.getEffectiveMeasures().values().stream()
-                .filter(measure -> measures.contains(measure.getName()))
-                .filter(measure -> checkMeasurePermission(authColumns, measure, model)).map(MeasureDef::new)
-                .collect(Collectors.toList());
-        return authMeasures.size() == measures.size();
-
-    }
-
-    private boolean checkDimensionPermission(NDataModel model, Set<String> authColumns, List<String> dimensions) {
-        if (CollectionUtils.isEmpty(dimensions)) {
-            return true;
-        }
-        List<ComputedColumnDesc> computedColumnDescs = model.getComputedColumnDescs().stream()
-                .filter(cc -> dimensions.contains(cc.getFullName())).collect(Collectors.toList());
-
-        long authComputedCount = computedColumnDescs.stream()
-                .filter(cc -> authColumns.containsAll(convertCCToNormalCols(model, cc))).count();
-
-        if (computedColumnDescs.size() != authComputedCount) {
-            return false;
-        }
-
-        List<String> normalColumns = dimensions.stream().filter(column -> !computedColumnDescs.stream()
-                .map(ComputedColumnDesc::getFullName).collect(Collectors.toList()).contains(column))
-                .collect(Collectors.toList());
-        return authColumns.containsAll(normalColumns);
-    }
-
-    public Set<String> convertCCToNormalCols(NDataModel model, ComputedColumnDesc computedColumnDesc) {
-        IndexDependencyParser parser = new IndexDependencyParser(model);
-        try {
-            Set<TblColRef> tblColRefList = parser.unwrapComputeColumn(computedColumnDesc.getInnerExpression());
-            return tblColRefList.stream().map(TblColRef::getAliasDotName).collect(Collectors.toSet());
-        } catch (Exception e) {
-            log.warn("UnWrap computed column {} in project {} model {} exception",
-                    computedColumnDesc.getInnerExpression(), model.getProject(), model.getAlias(), e);
-        }
-        return Collections.emptySet();
-    }
-
-    private boolean checkMeasurePermission(Set<String> authColumns, NDataModel.Measure measure, NDataModel model) {
-        Set<String> measureColumns = measure.getFunction().getParameters().stream()
-                .filter(parameterDesc -> parameterDesc.getColRef() != null)
-                .map(parameterDesc -> parameterDesc.getColRef().getAliasDotName()).collect(Collectors.toSet());
-
-        List<ComputedColumnDesc> computedColumnDescs = model.getComputedColumnDescs().stream()
-                .filter(cc -> measureColumns.contains(cc.getFullName())).collect(Collectors.toList());
-
-        long authComputedCount = computedColumnDescs.stream()
-                .filter(cc -> authColumns.containsAll(convertCCToNormalCols(model, cc))).count();
-
-        if (computedColumnDescs.size() != authComputedCount) {
-            return false;
-        }
-
-        List<String> normalColumns = measureColumns.stream().filter(column -> !computedColumnDescs.stream()
-                .map(ComputedColumnDesc::getFullName).collect(Collectors.toList()).contains(column))
-                .collect(Collectors.toList());
-
-        return authColumns.containsAll(normalColumns);
-    }
-
-    private Set<String> getAllAuthTables(String project, Set<String> groups, String user) {
-        Set<String> allAuthTables = Sets.newHashSet();
-        AclTCRDigest auths = getManager(AclTCRManager.class, project).getAuthTablesAndColumns(project, user, true);
-        allAuthTables.addAll(auths.getTables());
-        for (String group : groups) {
-            auths = getManager(AclTCRManager.class, project).getAuthTablesAndColumns(project, group, false);
-            allAuthTables.addAll(auths.getTables());
-        }
-        return allAuthTables;
-    }
-
-    private Set<String> getAllAuthColumns(String project, Set<String> groups, String user) {
-        Set<String> allAuthColumns = Sets.newHashSet();
-        AclTCRDigest auths = getManager(AclTCRManager.class, project).getAuthTablesAndColumns(project, user, true);
-        allAuthColumns.addAll(auths.getColumns());
-        for (String group : groups) {
-            auths = getManager(AclTCRManager.class, project).getAuthTablesAndColumns(project, group, false);
-            allAuthColumns.addAll(auths.getColumns());
-        }
-        return allAuthColumns;
-    }
-
-    private void checkModelExportPermission(String project, String modeId) {
-        if (AclPermissionUtil.isAdmin()) {
-            return;
-        }
-        aclEvaluate.checkProjectReadPermission(project);
-
-        NDataModel model = getManager(NDataModelManager.class, project).getDataModelDesc(modeId);
-        Map<String, Set<String>> modelTableColumns = new HashMap<>();
-        for (TableRef tableRef : model.getAllTables()) {
-            modelTableColumns.putIfAbsent(tableRef.getTableIdentity(), new HashSet<>());
-            modelTableColumns.get(tableRef.getTableIdentity())
-                    .addAll(tableRef.getColumns().stream().map(TblColRef::getName).collect(Collectors.toSet()));
-        }
-        AclTCRManager aclManager = AclTCRManager.getInstance(KylinConfig.getInstanceFromEnv(), project);
-
-        String currentUserName = AclPermissionUtil.getCurrentUsername();
-        Set<String> groupsOfExecuteUser = accessService.getGroupsOfExecuteUser(currentUserName);
-        MutableAclRecord acl = AclPermissionUtil.getProjectAcl(project);
-        Set<String> groupsInProject = AclPermissionUtil.filterGroupsInProject(groupsOfExecuteUser, acl);
-        if (AclPermissionUtil.isAdminInProject(project, groupsOfExecuteUser)) {
-            return;
-        }
-        AclTCRDigest digest = aclManager.getAllUnauthorizedTableColumn(currentUserName, groupsInProject,
-                modelTableColumns);
-        Set<String> authorizedCC = ComputedColumnUtil
-                .getAuthorizedCC(Arrays.asList(model),
-                        ccSourceCols -> aclManager.isColumnsAuthorized(currentUserName, groupsOfExecuteUser,
-                                ccSourceCols))
-                .stream().map(ccDesc -> ccDesc.getTableIdentity() + "." + ccDesc.getColumnName())
-                .collect(Collectors.toSet());
-        if (digest.getColumns() != null && !digest.getColumns().isEmpty()
-                && digest.getColumns().stream().anyMatch(column -> !authorizedCC.contains(column))) {
-            throw new KylinException(UNAUTHORIZED_ENTITY,
-                    "current user does not have full permission on requesting model");
-        }
-        if (digest.getTables() != null && !digest.getTables().isEmpty()) {
-            throw new KylinException(UNAUTHORIZED_ENTITY,
-                    "current user does not have full permission on requesting model");
-        }
-    }
-
     public List<SegmentPartitionResponse> getSegmentPartitions(String project, String modelId, String segmentId,
             List<String> status, String sortBy, boolean reverse) {
         aclEvaluate.checkProjectReadPermission(project);
@@ -4300,20 +3999,17 @@ public class ModelService extends BasicService implements TableModelSupporter, P
         if (CollectionUtils.isEmpty(partitions)) {
             return;
         }
+        NDataflowManager dataflowManager = getManager(NDataflowManager.class, project);
         if (StringUtils.isNotEmpty(segmentId)) {
             // remove partition in target segment
-            getManager(NDataflowManager.class, project).removeLayoutPartition(modelId, partitions,
-                    Sets.newHashSet(segmentId));
+            dataflowManager.removeLayoutPartition(modelId, partitions, Sets.newHashSet(segmentId));
             // remove partition in target segment
-            getManager(NDataflowManager.class, project).removeSegmentPartition(modelId, partitions,
-                    Sets.newHashSet(segmentId));
+            dataflowManager.removeSegmentPartition(modelId, partitions, Sets.newHashSet(segmentId));
         } else {
             // remove partition in all layouts
-            getManager(NDataflowManager.class, project).removeLayoutPartition(modelId, Sets.newHashSet(partitions),
-                    null);
+            dataflowManager.removeLayoutPartition(modelId, Sets.newHashSet(partitions), null);
             // remove partition in all  segments
-            getManager(NDataflowManager.class, project).removeSegmentPartition(modelId, Sets.newHashSet(partitions),
-                    null);
+            dataflowManager.removeSegmentPartition(modelId, Sets.newHashSet(partitions), null);
             // remove partition in model
             getManager(NDataModelManager.class, project).updateDataModel(modelId, copyForWrite -> {
                 val multiPartitionDesc = copyForWrite.getMultiPartitionDesc();
@@ -4331,43 +4027,6 @@ public class ModelService extends BasicService implements TableModelSupporter, P
         return model;
     }
 
-    public void checkModelPermission(String project, String modelId) {
-        String userName = aclEvaluate.getCurrentUserName();
-        Set<String> groups = getCurrentUserGroups();
-        if (AclPermissionUtil.isAdmin() || AclPermissionUtil.isAdminInProject(project, groups)) {
-            return;
-        }
-        Set<String> allAuthTables = Sets.newHashSet();
-        Set<String> allAuthColumns = Sets.newHashSet();
-        var auths = getManager(AclTCRManager.class, project).getAuthTablesAndColumns(project, userName, true);
-        allAuthTables.addAll(auths.getTables());
-        allAuthColumns.addAll(auths.getColumns());
-        for (val group : groups) {
-            auths = getManager(AclTCRManager.class, project).getAuthTablesAndColumns(project, group, false);
-            allAuthTables.addAll(auths.getTables());
-            allAuthColumns.addAll(auths.getColumns());
-        }
-
-        NDataModel model = getModelById(modelId, project);
-        Set<String> tablesInModel = Sets.newHashSet();
-        model.getJoinTables().forEach(table -> tablesInModel.add(table.getTable()));
-        tablesInModel.add(model.getRootFactTableName());
-        tablesInModel.forEach(table -> {
-            if (!allAuthTables.contains(table)) {
-                throw new KylinException(FAILED_UPDATE_MODEL, MsgPicker.getMsg().getModelModifyAbandon(table));
-            }
-        });
-        tablesInModel.stream().filter(allAuthTables::contains).forEach(table -> {
-            ColumnDesc[] columnDescs = NTableMetadataManager.getInstance(getConfig(), project).getTableDesc(table)
-                    .getColumns();
-            Arrays.stream(columnDescs).map(column -> table + "." + column.getName()).forEach(column -> {
-                if (!allAuthColumns.contains(column)) {
-                    throw new KylinException(FAILED_UPDATE_MODEL, MsgPicker.getMsg().getModelModifyAbandon(column));
-                }
-            });
-        });
-    }
-
     public InvalidIndexesResponse detectInvalidIndexes(ModelRequest request) {
         String project = request.getProject();
         aclEvaluate.checkProjectReadPermission(project);
@@ -4540,46 +4199,6 @@ public class ModelService extends BasicService implements TableModelSupporter, P
         request.setProject(projectName);
     }
 
-    public Boolean validateExport(String projectName, String modelId) {
-        val dataModelDesc = getModelById(modelId, projectName);
-        List<MeasureDef> measureDefs = dataModelDesc.getEffectiveMeasures().values().stream().map(MeasureDef::new)
-                .collect(Collectors.toList());
-        val measures = measureDefs.stream().map(measureDef -> measureDef.getMeasure().getName())
-                .collect(Collectors.toSet());
-        val columns = dataModelDesc.getAllNamedColumns().stream().filter(column -> !column.isDimension())
-                .map(NDataModel.NamedColumn::getName).collect(Collectors.toSet());
-
-        // check duplicate name in measures and other model columns
-        val duplicateMeasureAndModelColumn = Sets.intersection(measures, columns);
-        if (CollectionUtils.isNotEmpty(duplicateMeasureAndModelColumn)) {
-            val duplicateName = duplicateMeasureAndModelColumn.stream().findFirst().orElse(null);
-            throw new KylinException(MODEL_EXPORT_ERROR, String.format(Locale.ROOT,
-                    MsgPicker.getMsg().getDuplicateModelColumnAndMeasureName(), duplicateName, duplicateName));
-        }
-
-        // check duplicate name in dimension columns and measures
-        val dimensionCols = dataModelDesc.getEffectiveDimensions().values().stream().map(TblColRef::getColumnDesc)
-                .map(ColumnDesc::getName).collect(Collectors.toSet());
-        val duplicateDimColAndMeasureNames = Sets.intersection(dimensionCols, measures);
-        if (CollectionUtils.isNotEmpty(duplicateDimColAndMeasureNames)) {
-            val duplicateName = duplicateDimColAndMeasureNames.stream().findFirst().orElse(null);
-            throw new KylinException(MODEL_EXPORT_ERROR, String.format(Locale.ROOT,
-                    MsgPicker.getMsg().getDuplicateDimensionColAndMeasureName(), duplicateName, duplicateName));
-        }
-
-        // check duplicate name in dimensions and measures
-        val dimNames = dataModelDesc.getAllNamedColumns().stream().filter(NDataModel.NamedColumn::isDimension)
-                .map(NDataModel.NamedColumn::getName).collect(Collectors.toSet());
-        val duplicateDimMeasureNames = Sets.intersection(dimNames, measures);
-        if (CollectionUtils.isNotEmpty(duplicateDimMeasureNames)) {
-            val duplicateName = duplicateDimMeasureNames.stream().findFirst().orElse(null);
-            throw new KylinException(MODEL_EXPORT_ERROR, String.format(Locale.ROOT,
-                    MsgPicker.getMsg().getDuplicateDimensionNameAndMeasureName(), duplicateName, duplicateName));
-        }
-
-        return true;
-    }
-
     public void checkCCEmpty(ModelRequest modelRequest) {
         List<ComputedColumnDesc> ccList = modelRequest.getComputedColumnDescs();
         if (CollectionUtils.isEmpty(ccList)) {
diff --git a/src/modeling-service/src/main/java/org/apache/kylin/rest/service/ModelTdsService.java b/src/modeling-service/src/main/java/org/apache/kylin/rest/service/ModelTdsService.java
new file mode 100644
index 0000000000..df277831e5
--- /dev/null
+++ b/src/modeling-service/src/main/java/org/apache/kylin/rest/service/ModelTdsService.java
@@ -0,0 +1,336 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.kylin.rest.service;
+
+import static org.apache.kylin.common.exception.ServerErrorCode.MODEL_BROKEN;
+import static org.apache.kylin.common.exception.ServerErrorCode.UNAUTHORIZED_ENTITY;
+import static org.apache.kylin.common.exception.code.ErrorCodeServer.MODEL_TDS_EXPORT_COLUMN_AND_MEASURE_NAME_CONFLICT;
+import static org.apache.kylin.common.exception.code.ErrorCodeServer.MODEL_TDS_EXPORT_DIM_COL_AND_MEASURE_NAME_CONFLICT;
+
+import java.io.IOException;
+import java.text.SimpleDateFormat;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Locale;
+import java.util.Map;
+import java.util.Set;
+import java.util.function.Function;
+import java.util.stream.Collectors;
+
+import javax.servlet.http.HttpServletResponse;
+
+import org.apache.commons.collections.CollectionUtils;
+import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.common.exception.CommonErrorCode;
+import org.apache.kylin.common.exception.KylinException;
+import org.apache.kylin.common.exception.ServerErrorCode;
+import org.apache.kylin.common.msg.MsgPicker;
+import org.apache.kylin.engine.spark.smarter.IndexDependencyParser;
+import org.apache.kylin.metadata.acl.AclTCRDigest;
+import org.apache.kylin.metadata.acl.AclTCRManager;
+import org.apache.kylin.metadata.cube.model.NDataflow;
+import org.apache.kylin.metadata.cube.model.NDataflowManager;
+import org.apache.kylin.metadata.model.ComputedColumnDesc;
+import org.apache.kylin.metadata.model.NDataModel;
+import org.apache.kylin.metadata.model.NDataModelManager;
+import org.apache.kylin.metadata.model.TableRef;
+import org.apache.kylin.metadata.model.TblColRef;
+import org.apache.kylin.metadata.model.util.ComputedColumnUtil;
+import org.apache.kylin.metadata.project.NProjectManager;
+import org.apache.kylin.metadata.project.ProjectInstance;
+import org.apache.kylin.metadata.realization.RealizationStatusEnum;
+import org.apache.kylin.rest.security.MutableAclRecord;
+import org.apache.kylin.rest.util.AclPermissionUtil;
+import org.apache.kylin.tool.bisync.BISyncModel;
+import org.apache.kylin.tool.bisync.BISyncTool;
+import org.apache.kylin.tool.bisync.SyncContext;
+import org.apache.kylin.tool.bisync.SyncModelBuilder;
+import org.apache.kylin.tool.bisync.model.ColumnDef;
+import org.apache.kylin.tool.bisync.model.MeasureDef;
+import org.apache.kylin.tool.bisync.model.SyncModel;
+import org.springframework.stereotype.Component;
+
+import com.google.common.collect.Sets;
+
+import lombok.extern.slf4j.Slf4j;
+
+@Slf4j
+@Component("modelTdsService")
+public class ModelTdsService extends AbstractModelService {
+
+    public void dumpSyncModel(SyncContext syncContext, SyncModel syncModel, HttpServletResponse response)
+            throws IOException {
+        String projectName = syncContext.getProjectName();
+        String modelId = syncContext.getModelId();
+        SyncContext.BI exportAs = syncContext.getTargetBI();
+        BISyncModel biSyncModel = BISyncTool.getBISyncModel(syncContext, syncModel);
+
+        NDataModelManager manager = NDataModelManager.getInstance(KylinConfig.getInstanceFromEnv(), projectName);
+        NDataModel dataModel = manager.getDataModelDesc(modelId);
+        String alias = dataModel.getAlias();
+        String fileName = String.format(Locale.ROOT, "%s_%s_%s", projectName, alias,
+                new SimpleDateFormat("yyyyMMddHHmmss", Locale.getDefault(Locale.Category.FORMAT)).format(new Date()));
+        switch (exportAs) {
+        case TABLEAU_CONNECTOR_TDS:
+        case TABLEAU_ODBC_TDS:
+            response.setContentType("application/xml");
+            response.setHeader("Content-Disposition",
+                    String.format(Locale.ROOT, "attachment; filename=\"%s.tds\"", fileName));
+            break;
+        default:
+            throw new KylinException(CommonErrorCode.UNKNOWN_ERROR_CODE, "unrecognized export target");
+        }
+        biSyncModel.dump(response.getOutputStream());
+        response.getOutputStream().flush();
+        response.getOutputStream().close();
+    }
+
+    public boolean preCheckNameConflict(SyncModel syncModel) {
+        ProjectInstance prjInstance = getManager(NProjectManager.class).getProject(syncModel.getProject());
+        boolean skipCheckTds = prjInstance.getConfig().skipCheckTds();
+        Set<String> measureNames = syncModel.getMetrics().stream().filter(measureDef -> !measureDef.isHidden())
+                .map(measureDef -> measureDef.getMeasure().getName()).collect(Collectors.toSet());
+        Map<String, ColumnDef> nameOfColDefMap = syncModel.getColumnDefMap().values().stream()
+                .collect(Collectors.toMap(ColumnDef::getColumnName, Function.identity()));
+        Sets.SetView<String> intersection = Sets.intersection(nameOfColDefMap.keySet(), measureNames);
+        if (skipCheckTds || CollectionUtils.isEmpty(intersection)) {
+            return true;
+        }
+
+        String name = intersection.iterator().next();
+        ColumnDef columnDef = nameOfColDefMap.get(name);
+        if (columnDef.isDimension()) {
+            throw new KylinException(MODEL_TDS_EXPORT_DIM_COL_AND_MEASURE_NAME_CONFLICT, name, name);
+        } else {
+            throw new KylinException(MODEL_TDS_EXPORT_COLUMN_AND_MEASURE_NAME_CONFLICT, name, name);
+        }
+    }
+
+    public SyncModel exportModel(SyncContext syncContext) {
+        checkModelExportPermission(syncContext.getProjectName(), syncContext.getModelId());
+        checkModelPermission(syncContext.getProjectName(), syncContext.getModelId());
+        return new SyncModelBuilder(syncContext).buildSourceSyncModel();
+    }
+
+    public SyncModel exportTDSDimensionsAndMeasuresByNormalUser(SyncContext syncContext, List<String> dimensions,
+            List<String> measures) {
+        String project = syncContext.getProjectName();
+        String modelId = syncContext.getModelId();
+
+        Set<String> authTables = Sets.newHashSet();
+        Set<String> authColumns = Sets.newHashSet();
+        AclTCRManager aclMgr = getManager(AclTCRManager.class, project);
+        AclTCRDigest uDigest = aclMgr.getAuthTablesAndColumns(project, aclEvaluate.getCurrentUserName(), true);
+        authTables.addAll(uDigest.getTables());
+        authColumns.addAll(uDigest.getColumns());
+        getCurrentUserGroups().forEach(group -> {
+            AclTCRDigest gDigest = aclMgr.getAuthTablesAndColumns(project, group, false);
+            authTables.addAll(gDigest.getTables());
+            authColumns.addAll(gDigest.getColumns());
+        });
+
+        Set<String> authorizedCols = Sets.newHashSet();
+        getModelById(modelId, project).getAllTables().forEach(tableRef -> {
+            List<String> colIdentityList = tableRef.getColumns().stream()
+                    .filter(colRef -> authColumns.contains(colRef.getCanonicalName())) //
+                    .map(TblColRef::getAliasDotName) //
+                    .collect(Collectors.toList());
+            authorizedCols.addAll(colIdentityList);
+        });
+
+        checkTableHasColumnPermission(syncContext.getModelElement(), project, modelId, authorizedCols, dimensions,
+                measures);
+        return new SyncModelBuilder(syncContext).buildHasPermissionSourceSyncModel(authTables, authColumns, dimensions,
+                measures);
+    }
+
+    public SyncModel exportTDSDimensionsAndMeasuresByAdmin(SyncContext syncContext, List<String> dimensions,
+            List<String> measures) {
+        String projectName = syncContext.getProjectName();
+        String modelId = syncContext.getModelId();
+        checkModelExportPermission(projectName, modelId);
+        checkModelPermission(projectName, modelId);
+        return new SyncModelBuilder(syncContext).buildSourceSyncModel(dimensions, measures);
+    }
+
+    private void checkBrokenModel(String projectName, String modelId) {
+        NDataflow dataflow = getManager(NDataflowManager.class, projectName).getDataflow(modelId);
+        if (dataflow.getStatus() == RealizationStatusEnum.BROKEN) {
+            throw new KylinException(MODEL_BROKEN, "The model is broken and cannot be exported TDS file");
+        }
+    }
+
+    public SyncContext prepareSyncContext(String projectName, String modelId, SyncContext.BI targetBI,
+            SyncContext.ModelElement modelElement, String host, int port) {
+        checkBrokenModel(projectName, modelId);
+        SyncContext syncContext = new SyncContext();
+        syncContext.setProjectName(projectName);
+        syncContext.setModelId(modelId);
+        syncContext.setTargetBI(targetBI);
+        syncContext.setModelElement(modelElement);
+        syncContext.setHost(host);
+        syncContext.setPort(port);
+        syncContext.setAdmin(AclPermissionUtil.isAdmin());
+        syncContext.setDataflow(getManager(NDataflowManager.class, projectName).getDataflow(modelId));
+        syncContext.setKylinConfig(getManager(NProjectManager.class).getProject(projectName).getConfig());
+        return syncContext;
+    }
+
+    public void checkTableHasColumnPermission(SyncContext.ModelElement modelElement, String project, String modelId,
+            Set<String> authColumns, List<String> dimensions, List<String> measures) {
+        if (AclPermissionUtil.isAdmin()) {
+            return;
+        }
+        aclEvaluate.checkProjectReadPermission(project);
+
+        NDataModel model = getModelById(modelId, project);
+        long jointCount = model.getJoinTables().stream()
+                .filter(table -> authColumns
+                        .containsAll(Arrays.stream(table.getJoin().getPrimaryKeyColumns())
+                                .map(TblColRef::getAliasDotName).collect(Collectors.toSet()))
+                        && authColumns.containsAll(Arrays.stream(table.getJoin().getForeignKeyColumns())
+                                .map(TblColRef::getAliasDotName).collect(Collectors.toSet())))
+                .count();
+        long singleTableCount = model.getAllTables().stream().filter(ref -> ref.getColumns().stream()
+                .map(TblColRef::getAliasDotName).collect(Collectors.toSet()).stream().anyMatch(authColumns::contains))
+                .count();
+
+        if (jointCount != model.getJoinTables().size() || singleTableCount == 0
+                || (modelElement.equals(SyncContext.ModelElement.CUSTOM_COLS)
+                        && !checkColumnPermission(model, authColumns, dimensions, measures))) {
+            throw new KylinException(ServerErrorCode.INVALID_TABLE_AUTH,
+                    MsgPicker.getMsg().getTableNoColumnsPermission());
+        }
+    }
+
+    public boolean checkColumnPermission(NDataModel model, Set<String> authColumns, List<String> dimensions,
+            List<String> measures) {
+
+        if (!checkDimensionPermission(model, authColumns, dimensions)) {
+            return false;
+        }
+        if (CollectionUtils.isEmpty(measures)) {
+            return true;
+        }
+        List<MeasureDef> authMeasures = model.getEffectiveMeasures().values().stream()
+                .filter(measure -> measures.contains(measure.getName()))
+                .filter(measure -> checkMeasurePermission(authColumns, measure, model)).map(MeasureDef::new)
+                .collect(Collectors.toList());
+        return authMeasures.size() == measures.size();
+
+    }
+
+    private boolean checkDimensionPermission(NDataModel model, Set<String> authColumns, List<String> dimensions) {
+        if (CollectionUtils.isEmpty(dimensions)) {
+            return true;
+        }
+        List<ComputedColumnDesc> computedColumnDescs = model.getComputedColumnDescs().stream()
+                .filter(cc -> dimensions.contains(cc.getFullName())).collect(Collectors.toList());
+
+        long authComputedCount = computedColumnDescs.stream()
+                .filter(cc -> authColumns.containsAll(convertCCToNormalCols(model, cc))).count();
+
+        if (computedColumnDescs.size() != authComputedCount) {
+            return false;
+        }
+
+        List<String> normalColumns = dimensions.stream().filter(column -> !computedColumnDescs.stream()
+                .map(ComputedColumnDesc::getFullName).collect(Collectors.toList()).contains(column))
+                .collect(Collectors.toList());
+        return authColumns.containsAll(normalColumns);
+    }
+
+    public Set<String> convertCCToNormalCols(NDataModel model, ComputedColumnDesc computedColumnDesc) {
+        IndexDependencyParser parser = new IndexDependencyParser(model);
+        try {
+            Set<TblColRef> tblColRefList = parser.unwrapComputeColumn(computedColumnDesc.getInnerExpression());
+            return tblColRefList.stream().map(TblColRef::getAliasDotName).collect(Collectors.toSet());
+        } catch (Exception e) {
+            log.warn("UnWrap computed column {} in project {} model {} exception",
+                    computedColumnDesc.getInnerExpression(), model.getProject(), model.getAlias(), e);
+        }
+        return Collections.emptySet();
+    }
+
+    private boolean checkMeasurePermission(Set<String> authColumns, NDataModel.Measure measure, NDataModel model) {
+        Set<String> measureColumns = measure.getFunction().getParameters().stream()
+                .filter(parameterDesc -> parameterDesc.getColRef() != null)
+                .map(parameterDesc -> parameterDesc.getColRef().getAliasDotName()).collect(Collectors.toSet());
+
+        List<ComputedColumnDesc> computedColumnDescs = model.getComputedColumnDescs().stream()
+                .filter(cc -> measureColumns.contains(cc.getFullName())).collect(Collectors.toList());
+
+        long authComputedCount = computedColumnDescs.stream()
+                .filter(cc -> authColumns.containsAll(convertCCToNormalCols(model, cc))).count();
+
+        if (computedColumnDescs.size() != authComputedCount) {
+            return false;
+        }
+
+        List<String> normalColumns = measureColumns.stream().filter(column -> !computedColumnDescs.stream()
+                .map(ComputedColumnDesc::getFullName).collect(Collectors.toList()).contains(column))
+                .collect(Collectors.toList());
+
+        return authColumns.containsAll(normalColumns);
+    }
+
+    private void checkModelExportPermission(String project, String modeId) {
+        if (AclPermissionUtil.isAdmin()) {
+            return;
+        }
+        aclEvaluate.checkProjectReadPermission(project);
+
+        NDataModel model = getManager(NDataModelManager.class, project).getDataModelDesc(modeId);
+        Map<String, Set<String>> modelTableColumns = new HashMap<>();
+        for (TableRef tableRef : model.getAllTables()) {
+            modelTableColumns.putIfAbsent(tableRef.getTableIdentity(), new HashSet<>());
+            modelTableColumns.get(tableRef.getTableIdentity())
+                    .addAll(tableRef.getColumns().stream().map(TblColRef::getName).collect(Collectors.toSet()));
+        }
+        AclTCRManager aclManager = AclTCRManager.getInstance(KylinConfig.getInstanceFromEnv(), project);
+
+        String currentUserName = AclPermissionUtil.getCurrentUsername();
+        Set<String> groupsOfExecuteUser = accessService.getGroupsOfExecuteUser(currentUserName);
+        MutableAclRecord acl = AclPermissionUtil.getProjectAcl(project);
+        Set<String> groupsInProject = AclPermissionUtil.filterGroupsInProject(groupsOfExecuteUser, acl);
+        if (AclPermissionUtil.isAdminInProject(project, groupsOfExecuteUser)) {
+            return;
+        }
+        AclTCRDigest digest = aclManager.getAllUnauthorizedTableColumn(currentUserName, groupsInProject,
+                modelTableColumns);
+        Set<String> authorizedCC = ComputedColumnUtil
+                .getAuthorizedCC(Collections.singletonList(model),
+                        ccSourceCols -> aclManager.isColumnsAuthorized(currentUserName, groupsOfExecuteUser,
+                                ccSourceCols))
+                .stream().map(ComputedColumnDesc::getIdentName).collect(Collectors.toSet());
+        if (digest.getColumns() != null && !digest.getColumns().isEmpty()
+                && digest.getColumns().stream().anyMatch(column -> !authorizedCC.contains(column))) {
+            throw new KylinException(UNAUTHORIZED_ENTITY,
+                    "current user does not have full permission on requesting model");
+        }
+        if (digest.getTables() != null && !digest.getTables().isEmpty()) {
+            throw new KylinException(UNAUTHORIZED_ENTITY,
+                    "current user does not have full permission on requesting model");
+        }
+    }
+}
diff --git a/src/modeling-service/src/test/java/org/apache/kylin/rest/service/ModelServiceTest.java b/src/modeling-service/src/test/java/org/apache/kylin/rest/service/ModelServiceTest.java
index 6ed591f405..adad3571f6 100644
--- a/src/modeling-service/src/test/java/org/apache/kylin/rest/service/ModelServiceTest.java
+++ b/src/modeling-service/src/test/java/org/apache/kylin/rest/service/ModelServiceTest.java
@@ -51,7 +51,6 @@ import java.io.DataOutputStream;
 import java.io.File;
 import java.io.IOException;
 import java.io.InputStream;
-import java.io.InputStreamReader;
 import java.lang.reflect.Field;
 import java.nio.charset.Charset;
 import java.nio.file.Files;
@@ -123,6 +122,8 @@ import org.apache.kylin.metadata.cube.model.PartitionStatusEnum;
 import org.apache.kylin.metadata.cube.model.PartitionStatusEnumToDisplay;
 import org.apache.kylin.metadata.cube.model.RuleBasedIndex;
 import org.apache.kylin.metadata.cube.optimization.FrequencyMap;
+import org.apache.kylin.metadata.favorite.FavoriteRule;
+import org.apache.kylin.metadata.favorite.FavoriteRuleManager;
 import org.apache.kylin.metadata.model.AutoMergeTimeEnum;
 import org.apache.kylin.metadata.model.BadModelException;
 import org.apache.kylin.metadata.model.BadModelException.CauseType;
@@ -154,6 +155,9 @@ import org.apache.kylin.metadata.model.util.scd2.SimplifiedJoinTableDesc;
 import org.apache.kylin.metadata.project.EnhancedUnitOfWork;
 import org.apache.kylin.metadata.query.QueryTimesResponse;
 import org.apache.kylin.metadata.realization.RealizationStatusEnum;
+import org.apache.kylin.metadata.recommendation.candidate.JdbcRawRecStore;
+import org.apache.kylin.metadata.user.ManagedUser;
+import org.apache.kylin.query.util.KapQueryUtil;
 import org.apache.kylin.rest.config.initialize.ModelBrokenListener;
 import org.apache.kylin.rest.constant.Constant;
 import org.apache.kylin.rest.constant.ModelStatusToDisplayEnum;
@@ -187,8 +191,6 @@ import org.apache.kylin.rest.util.AclUtil;
 import org.apache.kylin.rest.util.SCD2SimplificationConvertUtil;
 import org.apache.kylin.streaming.jobs.StreamingJobListener;
 import org.apache.kylin.streaming.manager.StreamingJobManager;
-import org.apache.kylin.tool.bisync.SyncContext;
-import org.apache.kylin.tool.bisync.tableau.TableauDatasourceModel;
 import org.apache.kylin.util.BrokenEntityProxy;
 import org.apache.kylin.util.PasswordEncodeFactory;
 import org.hamcrest.BaseMatcher;
@@ -209,19 +211,12 @@ import org.springframework.security.core.context.SecurityContextHolder;
 import org.springframework.security.crypto.password.PasswordEncoder;
 import org.springframework.test.util.ReflectionTestUtils;
 
-import com.google.common.base.Charsets;
 import com.google.common.collect.Lists;
 import com.google.common.collect.Maps;
 import com.google.common.collect.Sets;
-import com.google.common.io.CharStreams;
 import com.google.common.primitives.Longs;
 
 import io.kyligence.kap.clickhouse.MockSecondStorage;
-import org.apache.kylin.metadata.favorite.FavoriteRule;
-import org.apache.kylin.metadata.favorite.FavoriteRuleManager;
-import org.apache.kylin.metadata.recommendation.candidate.JdbcRawRecStore;
-import org.apache.kylin.metadata.user.ManagedUser;
-import org.apache.kylin.query.util.KapQueryUtil;
 import io.kyligence.kap.secondstorage.SecondStorageNodeHelper;
 import io.kyligence.kap.secondstorage.SecondStorageUtil;
 import io.kyligence.kap.secondstorage.config.Node;
@@ -433,7 +428,6 @@ public class ModelServiceTest extends SourceTestCase {
         Assert.assertEquals("nmodel_basic_inner", models.get(0).getAlias());
     }
 
-//    @Ignore("TODO: re-run to check.")
     @Test
     public void testGetFusionModels() {
         List<NDataModelResponse> models = modelService.getModels("", "streaming_test", false, "", null, "usage", true);
@@ -528,8 +522,7 @@ public class ModelServiceTest extends SourceTestCase {
         Assert.assertEquals(1, models.size());
         NDataModelResponse model = models.get(0);
         Assert.assertTrue(model.getSimpleTables().stream().map(SimplifiedTableResponse::getColumns)
-                .flatMap(List::stream)
-                .anyMatch(SimplifiedColumnResponse::isComputedColumn));
+                .flatMap(List::stream).anyMatch(SimplifiedColumnResponse::isComputedColumn));
     }
 
     @Test
@@ -570,8 +563,7 @@ public class ModelServiceTest extends SourceTestCase {
         NIndexPlanManager indexPlanManager = NIndexPlanManager.getInstance(getTestConfig(), getProject());
         val indexPlan = indexPlanManager.getIndexPlan(modelId);
         indexPlanManager.updateIndexPlan(modelId, copyForWrite -> {
-            copyForWrite.markIndexesToBeDeleted(modelId,
-                    new HashSet<>(indexPlan.getAllLayouts()));
+            copyForWrite.markIndexesToBeDeleted(modelId, new HashSet<>(indexPlan.getAllLayouts()));
             copyForWrite.getIndexes().clear();
         });
         NDataflowManager dataflowManager = NDataflowManager.getInstance(KylinConfig.getInstanceFromEnv(), "default");
@@ -1392,7 +1384,6 @@ public class ModelServiceTest extends SourceTestCase {
                 && models.get(0).getStatus() == ModelStatusToDisplayEnum.OFFLINE);
     }
 
-//    @Ignore("TODO: re-run to check.")
     @Test
     public void testUpdateFusionDataModelStatus() {
         val project = "streaming_test";
@@ -1417,7 +1408,6 @@ public class ModelServiceTest extends SourceTestCase {
         Assert.assertEquals(ModelStatusToDisplayEnum.OFFLINE, models.get(0).getStatus());
     }
 
-//    @Ignore("TODO: re-run to check.")
     @Test
     public void testUpdateFusionDataModelStatus1() {
         val project = "streaming_test";
@@ -1447,7 +1437,6 @@ public class ModelServiceTest extends SourceTestCase {
 
     }
 
-//    @Ignore("TODO: re-run to check.")
     @Test
     public void testUpdateFusionDataModelStatus2() {
         val project = "streaming_test";
@@ -4770,7 +4759,6 @@ public class ModelServiceTest extends SourceTestCase {
         Assert.assertTrue(model.isBroken());
     }
 
-//    @Ignore("TODO: re-run to check.")
     @Test
     public void testGetBrokenFusionModel() {
         String project = "streaming_test";
@@ -5093,394 +5081,6 @@ public class ModelServiceTest extends SourceTestCase {
         }
     }
 
-    @Test
-    public void testExportTDSByAdmin() throws Exception {
-        val project = "default";
-        val modelId = "cb596712-3a09-46f8-aea1-988b43fe9b6c";
-        prepareBasic(project);
-        List<String> dimensions = Lists.newArrayList();
-        dimensions.add("DEFAULT.TEST_MEASURE.FLAG");
-        dimensions.add("DEFAULT.TEST_MEASURE.PRICE1");
-        dimensions.add("DEFAULT.TEST_MEASURE.ID1");
-        List<String> measurs = Lists.newArrayList();
-        measurs.add("COUNT_STAR");
-        measurs.add("SUM_1");
-        SyncContext syncContext = modelService.getADMINSyncContext(project, modelId,
-                SyncContext.BI.TABLEAU_CONNECTOR_TDS, SyncContext.ModelElement.CUSTOM_COLS, "localhost", 8080);
-        TableauDatasourceModel datasource1 = (TableauDatasourceModel) modelService
-                .exportTDSDimensionsAndMeasuresByAdmin(syncContext, dimensions, measurs);
-        ByteArrayOutputStream outStream4 = new ByteArrayOutputStream();
-        datasource1.dump(outStream4);
-        Assert.assertEquals(getExpectedTds("/bisync_tableau/nmodel_full_measure_test.connector_admin.tds"),
-                outStream4.toString(Charset.defaultCharset().name()));
-    }
-
-    @Test
-    public void testExportTDSByUser() throws Exception {
-        val project = "default";
-        val modelId = "cb596712-3a09-46f8-aea1-988b43fe9b6c";
-        prepareBasic(project);
-        List<String> dimensions = Lists.newArrayList();
-        dimensions.add("TEST_MEASURE.ID1");
-        dimensions.add("TEST_MEASURE.ID2");
-        dimensions.add("TEST_MEASURE.ID3");
-        dimensions.add("TEST_MEASURE1.ID1");
-        dimensions.add("TEST_MEASURE1.NAME1");
-        dimensions.add("TEST_MEASURE1.NAME2");
-        dimensions.add("TEST_MEASURE1.NAME3");
-        List<String> measurs = Lists.newArrayList();
-        measurs.add("COUNT_STAR");
-        measurs.add("SUM_1");
-        SecurityContextHolder.getContext()
-                .setAuthentication(new TestingAuthenticationToken("u1", "ANALYST", Constant.ROLE_ANALYST));
-        SyncContext syncContext = modelService.getSyncContext(project, modelId, SyncContext.BI.TABLEAU_CONNECTOR_TDS,
-                SyncContext.ModelElement.CUSTOM_COLS, "localhost", 8080);
-        TableauDatasourceModel datasource1 = (TableauDatasourceModel) modelService
-                .exportTDSDimensionsAndMeasuresByNormalUser(syncContext, dimensions, measurs);
-        ByteArrayOutputStream outStream4 = new ByteArrayOutputStream();
-        datasource1.dump(outStream4);
-        Assert.assertEquals(getExpectedTds("/bisync_tableau/nmodel_full_measure_test.connector_user.tds"),
-                outStream4.toString(Charset.defaultCharset().name()));
-    }
-
-    @Test
-    public void testExportTDSByUserAndElement() throws Exception {
-        val project = "default";
-        val modelId = "cb596712-3a09-46f8-aea1-988b43fe9b6c";
-        prepareBasic(project);
-        try {
-            SecurityContextHolder.getContext()
-                    .setAuthentication(new TestingAuthenticationToken("u1", "ANALYST", Constant.ROLE_ANALYST));
-            SyncContext syncContext = modelService.getSyncContext(project, modelId,
-                    SyncContext.BI.TABLEAU_CONNECTOR_TDS, SyncContext.ModelElement.AGG_INDEX_COL, "localhost", 8080);
-            TableauDatasourceModel datasource1 = (TableauDatasourceModel) modelService
-                    .exportTDSDimensionsAndMeasuresByNormalUser(syncContext, null, null);
-            ByteArrayOutputStream outStream4 = new ByteArrayOutputStream();
-            datasource1.dump(outStream4);
-            Assert.assertEquals(
-                    getExpectedTds("/bisync_tableau/nmodel_full_measure_test.connector_user_agg_index_col.tds"),
-                    outStream4.toString(Charset.defaultCharset().name()));
-
-            TableauDatasourceModel datasource = (TableauDatasourceModel) modelService
-                    .exportTDSDimensionsAndMeasuresByNormalUser(syncContext, new ArrayList<>(), new ArrayList<>());
-        } finally {
-            SecurityContextHolder.getContext()
-                    .setAuthentication(new TestingAuthenticationToken("ADMIN", "ADMIN", Constant.ROLE_ADMIN));
-        }
-    }
-
-    @Test
-    public void testCheckModelExportPermissionException() {
-        val project = "default";
-        val modelId = "cb596712-3a09-46f8-aea1-988b43fe9b6c";
-        prepareBasic(project);
-        modelService.getADMINSyncContext(project, modelId, SyncContext.BI.TABLEAU_CONNECTOR_TDS,
-                SyncContext.ModelElement.AGG_INDEX_COL, "localhost", 8080);
-        try {
-            Mockito.when(accessService.getGroupsOfExecuteUser(Mockito.any(String.class)))
-                    .thenReturn(Sets.newHashSet("ROLE_ANALYST"));
-            SecurityContextHolder.getContext()
-                    .setAuthentication(new TestingAuthenticationToken("u1", "ANALYST", Constant.ROLE_ANALYST));
-            thrown.expect(KylinException.class);
-            thrown.expectMessage("current user does not have full permission on requesting model");
-            modelService.getADMINSyncContext(project, modelId, SyncContext.BI.TABLEAU_CONNECTOR_TDS,
-                    SyncContext.ModelElement.AGG_INDEX_COL, "localhost", 8080);
-        } finally {
-            SecurityContextHolder.getContext()
-                    .setAuthentication(new TestingAuthenticationToken("ADMIN", "ADMIN", Constant.ROLE_ADMIN));
-        }
-    }
-
-    @Test
-    public void testCheckModelExportPermission() {
-        val project = "default";
-        val modelId = "cb596712-3a09-46f8-aea1-988b43fe9b6c";
-        prepareBasic(project);
-        modelService.getADMINSyncContext(project, modelId, SyncContext.BI.TABLEAU_CONNECTOR_TDS,
-                SyncContext.ModelElement.AGG_INDEX_COL, "localhost", 8080);
-        modelService.getADMINSyncContext(project, modelId, SyncContext.BI.TABLEAU_CONNECTOR_TDS,
-                SyncContext.ModelElement.AGG_INDEX_COL, "localhost", 8080);
-    }
-
-    @Test
-    public void testCheckModelExportPermissionWithCC() {
-        val project = "cc_test";
-        val modelId = "0d146f1a-bdd3-4548-87ac-21c2c6f9a0da";
-        AclTCRManager manager = AclTCRManager.getInstance(getTestConfig(), project);
-        {
-            AclTCR u1a1 = new AclTCR();
-            manager.updateAclTCR(u1a1, "u1", true);
-            SecurityContextHolder.getContext()
-                    .setAuthentication(new TestingAuthenticationToken("u1", "ANALYST", Constant.ROLE_ANALYST));
-            Mockito.when(accessService.getGroupsOfExecuteUser(Mockito.any(String.class)))
-                    .thenReturn(Sets.newHashSet("ROLE_ANALYST"));
-            modelService.getADMINSyncContext(project, modelId, SyncContext.BI.TABLEAU_CONNECTOR_TDS,
-                    SyncContext.ModelElement.AGG_INDEX_COL, "localhost", 8080);
-        }
-        {
-            try {
-                AclTCR u1a1 = new AclTCR();
-                AclTCR.Table u1t1 = new AclTCR.Table();
-                AclTCR.ColumnRow u1cr1 = new AclTCR.ColumnRow();
-                AclTCR.Column u1c1 = new AclTCR.Column();
-                u1c1.add("ORDER_ID");
-                u1cr1.setColumn(u1c1);
-                u1t1.put("SSB.LINEORDER", u1cr1);
-                u1a1.setTable(u1t1);
-                manager.updateAclTCR(u1a1, "u1", true);
-                thrown.expect(KylinException.class);
-                thrown.expectMessage("current user does not have full permission on requesting model");
-                modelService.getADMINSyncContext(project, modelId, SyncContext.BI.TABLEAU_CONNECTOR_TDS,
-                        SyncContext.ModelElement.AGG_INDEX_COL, "localhost", 8080);
-            } finally {
-                SecurityContextHolder.getContext()
-                        .setAuthentication(new TestingAuthenticationToken("ADMIN", "ADMIN", Constant.ROLE_ADMIN));
-            }
-        }
-
-    }
-
-    @Test
-    public void testExportTDSByBroken() {
-        val project = "test_broken_project";
-        val modelId = "4b93b131-824e-6966-c4dd-5a4268d27095";
-        List<String> dimensions = Lists.newArrayList();
-        List<String> measurs = Lists.newArrayList();
-        SyncContext syncContext = modelService.getSyncContext(project, modelId, SyncContext.BI.TABLEAU_CONNECTOR_TDS,
-                SyncContext.ModelElement.CUSTOM_COLS, "localhost", 8080);
-        Assert.assertThrows(KylinException.class,
-                () -> modelService.exportTDSDimensionsAndMeasuresByNormalUser(syncContext, dimensions, measurs));
-
-        Assert.assertThrows(KylinException.class,
-                () -> modelService.exportTDSDimensionsAndMeasuresByAdmin(syncContext, dimensions, measurs));
-    }
-
-    @Test
-    public void testExportTDSMeasurePermission() {
-        val project = "default";
-        val modelId = "82fa7671-a935-45f5-8779-85703601f49a";
-        prepareBasicByMeasure(project);
-        List<String> dimensions = Lists.newArrayList();
-        //"ORDER_ID", "PRICE", "CAL_DT", "PRICE", "ITEM_COUNT", "LEAF_CATEG_ID"
-        dimensions.add("TEST_KYLIN_FACT.ORDER_ID");
-        dimensions.add("TEST_KYLIN_FACT.PRICE");
-        dimensions.add("TEST_KYLIN_FACT.CAL_DT");
-        dimensions.add("TEST_KYLIN_FACT.PRICE");
-        dimensions.add("TEST_KYLIN_FACT.ITEM_COUNT");
-        dimensions.add("TEST_KYLIN_FACT.LEAF_CATEG_ID");
-        //"ORDER_ID", "TEST_TIME_ENC", "TEST_DATE_ENC"
-        dimensions.add("TEST_ORDER.ORDER_ID");
-        dimensions.add("TEST_ORDER.TEST_TIME_ENC");
-        dimensions.add("TEST_ORDER.TEST_DATE_ENC");
-        //"ORDER_ID", "PRICE", "CAL_DT", "TRANS_ID"
-        dimensions.add("TEST_MEASURE.ORDER_ID");
-        dimensions.add("TEST_MEASURE.PRICE");
-        dimensions.add("TEST_MEASURE.CAL_DT");
-        dimensions.add("TEST_MEASURE.TRANS_ID");
-
-        List<String> measures = Lists.newArrayList();
-        measures.add("TRANS_CNT");
-        measures.add("GMV_SUM");
-        measures.add("GMV_MIN");
-        measures.add("GMV_MAX");
-        measures.add("ITEM_COUNT_SUM");
-        measures.add("ITEM_COUNT_MAX");
-        measures.add("ITEM_COUNT_MIN");
-        measures.add("SELLER_HLL");
-        measures.add("COUNT_DISTINCT");
-        measures.add("TOP_SELLER");
-        measures.add("TEST_COUNT_DISTINCT_BITMAP");
-        measures.add("GVM_PERCENTILE");
-        SecurityContextHolder.getContext()
-                .setAuthentication(new TestingAuthenticationToken("u1", "ANALYST", Constant.ROLE_ANALYST));
-        SyncContext syncContext = modelService.getSyncContext(project, modelId, SyncContext.BI.TABLEAU_CONNECTOR_TDS,
-                SyncContext.ModelElement.CUSTOM_COLS, "localhost", 8080);
-        Assert.assertThrows(KylinException.class,
-                () -> modelService.exportTDSDimensionsAndMeasuresByNormalUser(syncContext, dimensions, measures));
-    }
-
-    private void prepareBasicByMeasure(String project) {
-        AclTCRManager manager = AclTCRManager.getInstance(getTestConfig(), project);
-
-        AclTCR u1a1 = new AclTCR();
-        AclTCR.Table u1t1 = new AclTCR.Table();
-        AclTCR.ColumnRow u1cr1 = new AclTCR.ColumnRow();
-        AclTCR.Column u1c1 = new AclTCR.Column();
-        u1c1.addAll(Arrays.asList("ORDER_ID", "PRICE", "CAL_DT", "PRICE", "ITEM_COUNT", "LEAF_CATEG_ID"));
-        u1cr1.setColumn(u1c1);
-
-        AclTCR.ColumnRow u1cr2 = new AclTCR.ColumnRow();
-        AclTCR.Column u1c2 = new AclTCR.Column();
-        u1c2.addAll(Arrays.asList("ORDER_ID", "TEST_TIME_ENC", "TEST_DATE_ENC"));
-        u1cr2.setColumn(u1c2);
-        u1t1.put("DEFAULT.TEST_KYLIN_FACT", u1cr1);
-        u1t1.put("DEFAULT.TEST_ORDER", u1cr2);
-        u1a1.setTable(u1t1);
-        manager.updateAclTCR(u1a1, "u1", true);
-    }
-
-    @Test
-    public void testExportModel() throws Exception {
-        val project = "default";
-        val modelId = "cb596712-3a09-46f8-aea1-988b43fe9b6c";
-        prepareBasic(project);
-        TableauDatasourceModel datasource1 = (TableauDatasourceModel) modelService.exportModel(project, modelId,
-                SyncContext.BI.TABLEAU_CONNECTOR_TDS, SyncContext.ModelElement.AGG_INDEX_AND_TABLE_INDEX_COL,
-                "localhost", 8080);
-        ByteArrayOutputStream outStream4 = new ByteArrayOutputStream();
-        datasource1.dump(outStream4);
-        Assert.assertEquals(getExpectedTds("/bisync_tableau/nmodel_full_measure_test.connector.tds"),
-                outStream4.toString(Charset.defaultCharset().name()));
-    }
-
-    private String getExpectedTds(String path) throws IOException {
-        return CharStreams.toString(new InputStreamReader(getClass().getResourceAsStream(path), Charsets.UTF_8));
-    }
-
-    private void prepareBasic(String project) {
-        AclTCRManager manager = AclTCRManager.getInstance(getTestConfig(), project);
-
-        AclTCR u1a1 = new AclTCR();
-        AclTCR.Table u1t1 = new AclTCR.Table();
-        AclTCR.ColumnRow u1cr1 = new AclTCR.ColumnRow();
-        AclTCR.Column u1c1 = new AclTCR.Column();
-        u1c1.addAll(Arrays.asList("ID1", "ID2", "ID3"));
-        u1cr1.setColumn(u1c1);
-
-        AclTCR.ColumnRow u1cr2 = new AclTCR.ColumnRow();
-        AclTCR.Column u1c2 = new AclTCR.Column();
-        u1c2.addAll(Arrays.asList("ID1", "NAME1", "NAME2", "NAME3"));
-        u1cr2.setColumn(u1c2);
-        u1t1.put("DEFAULT.TEST_MEASURE", u1cr1);
-        u1t1.put("DEFAULT.TEST_MEASURE1", u1cr2);
-        u1a1.setTable(u1t1);
-        manager.updateAclTCR(u1a1, "u1", true);
-
-        AclTCR g1a1 = new AclTCR();
-        AclTCR.Table g1t1 = new AclTCR.Table();
-        AclTCR.ColumnRow g1cr1 = new AclTCR.ColumnRow();
-        AclTCR.Column g1c1 = new AclTCR.Column();
-        g1c1.addAll(Arrays.asList("ID1", "ID2", "ID3", "ID4"));
-        g1cr1.setColumn(g1c1);
-        g1t1.put("DEFAULT.TEST_MEASURE", g1cr1);
-        g1a1.setTable(g1t1);
-        manager.updateAclTCR(g1a1, "g1", false);
-    }
-
-    @Test
-    public void testCheckTablePermission() {
-        val project = "default";
-        val modelId = "cb596712-3a09-46f8-aea1-988b43fe9b6c";
-        thrown.expect(KylinException.class);
-        thrown.expectMessage(MsgPicker.getMsg().getTableNoColumnsPermission());
-
-        AclTCRManager manager = AclTCRManager.getInstance(getTestConfig(), project);
-        Set<String> columns = new HashSet<>();
-        columns.add("DEFAULT.TEST_MEASURE1.NAME1");
-        columns.add("DEFAULT.TEST_MEASURE1.NAME2");
-        columns.add("DEFAULT.TEST_MEASURE1.NAME3");
-
-        AclTCR u1a1 = new AclTCR();
-        AclTCR.Table u1t1 = new AclTCR.Table();
-        AclTCR.ColumnRow u1cr1 = new AclTCR.ColumnRow();
-        AclTCR.Column u1c1 = new AclTCR.Column();
-        u1cr1.setColumn(u1c1);
-
-        AclTCR.ColumnRow u1cr2 = new AclTCR.ColumnRow();
-        AclTCR.Column u1c2 = new AclTCR.Column();
-        u1c2.addAll(Arrays.asList("NAME1", "NAME2", "NAME3"));
-        u1cr2.setColumn(u1c2);
-        u1t1.put("DEFAULT.TEST_MEASURE", u1cr1);
-        u1t1.put("DEFAULT.TEST_MEASURE1", u1cr2);
-        u1a1.setTable(u1t1);
-        manager.updateAclTCR(u1a1, "u1", true);
-        SecurityContextHolder.getContext()
-                .setAuthentication(new TestingAuthenticationToken("u1", "ANALYST", Constant.ROLE_ANALYST));
-        List<String> dimensions = Lists.newArrayList();
-        dimensions.add("TEST_MEASURE.FLAG");
-        dimensions.add("TEST_MEASURE.PRICE1");
-        dimensions.add("TEST_MEASURE.ID1");
-        List<String> measurs = Lists.newArrayList();
-        measurs.add("COUNT_STAR");
-        measurs.add("SUM_1");
-        modelService.checkTableHasColumnPermission(SyncContext.ModelElement.CUSTOM_COLS, project, modelId, columns,
-                dimensions, measurs);
-
-        dimensions.add("TEST_MEASURE.ID4");
-        Assert.assertThrows(KylinException.class,
-                () -> modelService.checkTableHasColumnPermission(SyncContext.ModelElement.CUSTOM_COLS, project, modelId,
-                        columns, dimensions, measurs));
-    }
-
-    @Test
-    public void testExportTDSCheckColumnPermission() {
-        val project = "default";
-        val modelId = "89af4ee2-2cdb-4b07-b39e-4c29856309aa";
-
-        NDataModelManager modelManager = NDataModelManager.getInstance(KylinConfig.getInstanceFromEnv(), project);
-        NDataModel dataModel = modelManager.getDataModelDesc(modelId);
-
-        Set<String> authColumns = Sets.newHashSet();
-        List<String> dimensions = Lists.newArrayList();
-        List<String> measurs = Lists.newArrayList();
-
-        Assert.assertTrue(modelService.checkColumnPermission(dataModel, authColumns, null, measurs));
-        Assert.assertTrue(modelService.checkColumnPermission(dataModel, authColumns, null, null));
-        Assert.assertTrue(modelService.checkColumnPermission(dataModel, authColumns, dimensions, null));
-        Assert.assertTrue(modelService.checkColumnPermission(dataModel, authColumns, dimensions, measurs));
-
-        authColumns.add("DEFAULT.TEST_KYLIN_FACT.PRICE");
-        authColumns.add("DEFAULT.TEST_KYLIN_FACT.ITEM_COUNT");
-        authColumns.add("EDW.TEST_CAL_DT.CAL_DT");
-        authColumns.add("DEFAULT.TEST_ACCOUNT.ACCOUNT_ID");
-
-        Set<String> newAuthColumns = Sets.newHashSet();
-        dataModel.getAllTables().forEach(tableRef -> {
-            List<TblColRef> collect = tableRef.getColumns().stream()
-                    .filter(column -> authColumns.contains(column.getCanonicalName())).collect(Collectors.toList());
-            collect.forEach(x -> newAuthColumns.add(x.getAliasDotName()));
-        });
-
-        dimensions.add("TEST_KYLIN_FACT.DEAL_AMOUNT");
-        dimensions.add("TEST_KYLIN_FACT.TRANS_ID");
-
-        Assert.assertFalse(modelService.checkColumnPermission(dataModel, newAuthColumns, dimensions, measurs));
-
-        newAuthColumns.add("TEST_KYLIN_FACT.TRANS_ID");
-
-        measurs.add("SUM_NEST4");
-        measurs.add("COUNT_CAL_DT");
-        Assert.assertTrue(modelService.checkColumnPermission(dataModel, newAuthColumns, dimensions, measurs));
-
-        Assert.assertTrue(modelService.checkColumnPermission(dataModel, newAuthColumns, dimensions, measurs));
-
-    }
-
-    @Test
-    public void testConvertCCToNormalCols() {
-        val project = "default";
-        val modelId = "89af4ee2-2cdb-4b07-b39e-4c29856309aa";
-        NDataModelManager modelManager = NDataModelManager.getInstance(KylinConfig.getInstanceFromEnv(), project);
-        NDataModel dataModel = modelManager.getDataModelDesc(modelId);
-        NDataModel.Measure measure = dataModel.getEffectiveMeasures().values().stream()
-                .filter(x -> x.getName().equals("SUM_NEST4")).findFirst().get();
-        Set<String> measureColumns = measure.getFunction().getParameters().stream()
-                .filter(parameterDesc -> parameterDesc.getColRef() != null)
-                .map(parameterDesc -> parameterDesc.getColRef().getCanonicalName()).collect(Collectors.toSet());
-        ComputedColumnDesc sumNest4 = dataModel.getComputedColumnDescs().stream()
-                .filter(x -> measureColumns.contains(x.getIdentName())).findFirst().get();
-        Set<String> strings = modelService.convertCCToNormalCols(dataModel, sumNest4);
-        Assert.assertEquals("TEST_KYLIN_FACT.PRICE, TEST_KYLIN_FACT.ITEM_COUNT", String.join(", ", strings));
-
-        sumNest4.setInnerExpression("1 + 2");
-        Set<String> set = modelService.convertCCToNormalCols(dataModel, sumNest4);
-        Assert.assertEquals(Collections.emptySet(), set);
-
-        HashSet<Object> authColumns = Sets.newHashSet();
-        authColumns.add("DEFAULT.TEST_KYLIN_FACT.PRICE");
-        Assert.assertTrue(authColumns.containsAll(set));
-    }
-
     @Test
     public void testBuildExceptionMessage() {
         NDataModelManager modelManager = NDataModelManager.getInstance(KylinConfig.getInstanceFromEnv(), "default");
@@ -5517,8 +5117,8 @@ public class ModelServiceTest extends SourceTestCase {
     public void testBuildDuplicateCCException() {
         Set<String> set = Sets.newHashSet("test");
         Assert.assertThrows("The computed column name \"test\" has been used in the current model. Please rename it.\n",
-                KylinException.class, () -> ReflectionTestUtils.invokeMethod(modelService, "buildDuplicateCCException",
-                        set));
+                KylinException.class,
+                () -> ReflectionTestUtils.invokeMethod(modelService, "buildDuplicateCCException", set));
     }
 
     @Test
@@ -5612,63 +5212,6 @@ public class ModelServiceTest extends SourceTestCase {
         }
     }
 
-    @Test
-    public void testExportTDSWithDupMeasureDimColumnNames() throws IOException {
-        String projectName = "default";
-        String modelId = "199ee99e-8419-3e7a-7cad-97059999ec0a";
-        val modelRequest = JsonUtil.readValue(
-                new File("src/test/resources/ut_meta/dup_name_test/model_desc/model_dup_mea_dimcol.json"),
-                ModelRequest.class);
-        modelRequest.setProject(projectName);
-        List<NamedColumn> simplifiedDims = modelRequest.getAllNamedColumns().stream().filter(NamedColumn::isDimension)
-                .collect(Collectors.toList());
-        modelRequest.setSimplifiedDimensions(simplifiedDims);
-        modelService.createModel(projectName, modelRequest);
-        NDataModelManager projectInstance = NDataModelManager.getInstance(getTestConfig(), projectName);
-        Assert.assertNotNull(projectInstance.getDataModelDescByAlias("model_dup_mea_dimcol"));
-        Assert.assertThrows(
-                "There are duplicated names among dimension column LO_LINENUMBER and measure name LO_LINENUMBER. Cannot export a valid TDS file. Please correct the duplicated names and try again.",
-                KylinException.class, () -> modelService.validateExport(projectName, modelId));
-    }
-
-    @Test
-    public void testExportTDSWithDupMeasureDimensionNames() throws IOException {
-        String projectName = "default";
-        String modelId = "6f8cd656-9beb-47f6-87f5-89a8c548d17c";
-        val modelRequest = JsonUtil.readValue(
-                new File("src/test/resources/ut_meta/dup_name_test/model_desc/model_dup_mea_dim.json"),
-                ModelRequest.class);
-        modelRequest.setProject(projectName);
-        List<NamedColumn> simplifiedDims = modelRequest.getAllNamedColumns().stream().filter(NamedColumn::isDimension)
-                .collect(Collectors.toList());
-        modelRequest.setSimplifiedDimensions(simplifiedDims);
-        modelService.createModel(projectName, modelRequest);
-        NDataModelManager projectInstance = NDataModelManager.getInstance(getTestConfig(), projectName);
-        Assert.assertNotNull(projectInstance.getDataModelDescByAlias("model_dup_mea_dim"));
-        Assert.assertThrows(
-                "There are duplicated names among dimension name LO_TEST and measure name LO_TEST. Cannot export a valid TDS file. Please correct the duplicated names and try again.",
-                KylinException.class, () -> modelService.validateExport(projectName, modelId));
-    }
-
-    @Test
-    public void testExportTDSWithDupMeasureColumnNames() throws IOException {
-        String projectName = "default";
-        String modelId = "2ed3bf12-ad40-e8a0-73da-8dc3b4c798bb";
-        val modelRequest = JsonUtil.readValue(
-                new File("src/test/resources/ut_meta/dup_name_test/model_desc/model_dup_mea_col.json"),
-                ModelRequest.class);
-        modelRequest.setProject(projectName);
-        List<NamedColumn> simplifiedDims = modelRequest.getAllNamedColumns().stream().filter(NamedColumn::isDimension)
-                .collect(Collectors.toList());
-        modelRequest.setSimplifiedDimensions(simplifiedDims);
-        modelService.createModel(projectName, modelRequest);
-        NDataModelManager projectInstance = NDataModelManager.getInstance(getTestConfig(), projectName);
-        Assert.assertNotNull(projectInstance.getDataModelDescByAlias("model_dup_mea_col"));
-        Assert.assertThrows(
-                "There are duplicated names among model column LO_LINENUMBER and measure name LO_LINENUMBER. Cannot export a valid TDS file. Please correct the duplicated names and try again.",
-                KylinException.class, () -> modelService.validateExport(projectName, modelId));
-    }
-
     @Test
     public void testCheckComputedColumnExprWithSqlKeyword() throws IOException {
         String projectName = "keyword";
@@ -5836,8 +5379,8 @@ public class ModelServiceTest extends SourceTestCase {
         Assert.assertEquals(1, details.size());
         Assert.assertEquals(COMPUTED_COLUMN_CONFLICT_ADJUST_INFO.getErrorCode().getCode(),
                 details.get(0).getDetailCode());
-        Assert.assertEquals(COMPUTED_COLUMN_CONFLICT_ADJUST_INFO.getMsg("CC_1", "CUSTOMER.C_NAME +'USA'",
-                "CC_CNAME", "CUSTOMER.C_NAME +'USA'", "CC_CNAME"), details.get(0).getDetailMsg());
+        Assert.assertEquals(COMPUTED_COLUMN_CONFLICT_ADJUST_INFO.getMsg("CC_1", "CUSTOMER.C_NAME +'USA'", "CC_CNAME",
+                "CUSTOMER.C_NAME +'USA'", "CC_CNAME"), details.get(0).getDetailMsg());
     }
 
     private void testNoCCConflict(ModelRequest originRequest) {
diff --git a/src/modeling-service/src/test/java/org/apache/kylin/rest/service/ModelTdsServiceTest.java b/src/modeling-service/src/test/java/org/apache/kylin/rest/service/ModelTdsServiceTest.java
new file mode 100644
index 0000000000..f4194a0be4
--- /dev/null
+++ b/src/modeling-service/src/test/java/org/apache/kylin/rest/service/ModelTdsServiceTest.java
@@ -0,0 +1,607 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.kylin.rest.service;
+
+import java.io.ByteArrayOutputStream;
+import java.io.File;
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.nio.charset.Charset;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+import java.util.stream.Collectors;
+
+import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.common.exception.KylinException;
+import org.apache.kylin.common.msg.MsgPicker;
+import org.apache.kylin.common.scheduler.EventBusFactory;
+import org.apache.kylin.common.util.JsonUtil;
+import org.apache.kylin.junit.rule.TransactionExceptedException;
+import org.apache.kylin.metadata.acl.AclTCR;
+import org.apache.kylin.metadata.acl.AclTCRManager;
+import org.apache.kylin.metadata.cube.model.NDataflowManager;
+import org.apache.kylin.metadata.model.ComputedColumnDesc;
+import org.apache.kylin.metadata.model.NDataModel;
+import org.apache.kylin.metadata.model.NDataModelManager;
+import org.apache.kylin.metadata.model.TblColRef;
+import org.apache.kylin.metadata.recommendation.candidate.JdbcRawRecStore;
+import org.apache.kylin.rest.constant.Constant;
+import org.apache.kylin.rest.request.ModelRequest;
+import org.apache.kylin.rest.util.AclEvaluate;
+import org.apache.kylin.rest.util.AclUtil;
+import org.apache.kylin.tool.bisync.BISyncTool;
+import org.apache.kylin.tool.bisync.SyncContext;
+import org.apache.kylin.tool.bisync.model.SyncModel;
+import org.apache.kylin.tool.bisync.tableau.TableauDatasourceModel;
+import org.junit.After;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+import org.mockito.InjectMocks;
+import org.mockito.Mock;
+import org.mockito.Mockito;
+import org.springframework.security.authentication.TestingAuthenticationToken;
+import org.springframework.security.core.context.SecurityContextHolder;
+import org.springframework.test.util.ReflectionTestUtils;
+
+import com.google.common.base.Charsets;
+import com.google.common.collect.ImmutableList;
+import com.google.common.collect.Lists;
+import com.google.common.collect.Sets;
+import com.google.common.io.CharStreams;
+
+import lombok.val;
+import lombok.extern.slf4j.Slf4j;
+
+@Slf4j
+public class ModelTdsServiceTest extends SourceTestCase {
+
+    @InjectMocks
+    private final ModelService modelService = Mockito.spy(new ModelService());
+
+    @InjectMocks
+    private final ModelTdsService tdsService = Mockito.spy(new ModelTdsService());
+
+    @InjectMocks
+    private final ModelSemanticHelper semanticService = Mockito.spy(new ModelSemanticHelper());
+
+    @InjectMocks
+    private final IndexPlanService indexPlanService = Mockito.spy(new IndexPlanService());
+
+    @Mock
+    private final AclUtil aclUtil = Mockito.spy(AclUtil.class);
+
+    @Mock
+    private final AclEvaluate aclEvaluate = Mockito.spy(AclEvaluate.class);
+
+    @Mock
+    protected IUserGroupService userGroupService = Mockito.spy(NUserGroupService.class);
+
+    @Mock
+    private final AccessService accessService = Mockito.spy(AccessService.class);
+
+    @Rule
+    public TransactionExceptedException thrown = TransactionExceptedException.none();
+
+    protected String getProject() {
+        return "default";
+    }
+
+    @Before
+    public void setup() {
+        super.setup();
+        overwriteSystemProp("HADOOP_USER_NAME", "root");
+        ReflectionTestUtils.setField(aclEvaluate, "aclUtil", aclUtil);
+        ReflectionTestUtils.setField(modelService, "aclEvaluate", aclEvaluate);
+        ReflectionTestUtils.setField(modelService, "accessService", accessService);
+        ReflectionTestUtils.setField(modelService, "userGroupService", userGroupService);
+        ReflectionTestUtils.setField(modelService, "userGroupService", userGroupService);
+
+        ReflectionTestUtils.setField(tdsService, "accessService", accessService);
+        ReflectionTestUtils.setField(tdsService, "userGroupService", userGroupService);
+        ReflectionTestUtils.setField(tdsService, "aclEvaluate", aclEvaluate);
+
+        modelService.setSemanticUpdater(semanticService);
+        modelService.setIndexPlanService(indexPlanService);
+
+        try {
+            new JdbcRawRecStore(getTestConfig());
+        } catch (Exception e) {
+            //
+        }
+    }
+
+    @After
+    public void tearDown() {
+        getTestConfig().setProperty("kylin.metadata.semi-automatic-mode", "false");
+        EventBusFactory.getInstance().restart();
+        cleanupTestMetadata();
+    }
+
+    @Test
+    public void testExportTDSWithDupMeasureDimColumnNames() throws IOException {
+        String projectName = "default";
+        String modelId = "199ee99e-8419-3e7a-7cad-97059999ec0a";
+        val modelRequest = JsonUtil.readValue(
+                new File("src/test/resources/ut_meta/dup_name_test/model_desc/model_dup_mea_dimcol.json"),
+                ModelRequest.class);
+        modelRequest.setProject(projectName);
+        List<NDataModel.NamedColumn> simplifiedDims = modelRequest.getAllNamedColumns().stream()
+                .filter(NDataModel.NamedColumn::isDimension).collect(Collectors.toList());
+        modelRequest.setSimplifiedDimensions(simplifiedDims);
+        modelService.createModel(projectName, modelRequest);
+        NDataModelManager projectInstance = NDataModelManager.getInstance(getTestConfig(), projectName);
+        Assert.assertNotNull(projectInstance.getDataModelDescByAlias("model_dup_mea_dimcol"));
+        SyncContext syncContext = new SyncContext();
+        syncContext.setProjectName(projectName);
+        syncContext.setModelId(modelId);
+        syncContext.setModelElement(SyncContext.ModelElement.ALL_COLS);
+        syncContext.setAdmin(true);
+        syncContext.setDataflow(NDataflowManager.getInstance(getTestConfig(), projectName).getDataflow(modelId));
+        syncContext.setKylinConfig(getTestConfig());
+        SyncModel syncModel = tdsService.exportModel(syncContext);
+        Assert.assertThrows(
+                "There are duplicated names among dimension column LO_LINENUMBER and measure name LO_LINENUMBER. Cannot export a valid TDS file. Please correct the duplicated names and try again.",
+                KylinException.class, () -> tdsService.preCheckNameConflict(syncModel));
+    }
+
+    @Test
+    public void testExportTdsWithDupMeasureDimensionNamesNoConflict() throws IOException {
+        String projectName = "default";
+        String modelId = "6f8cd656-9beb-47f6-87f5-89a8c548d17c";
+        val modelRequest = JsonUtil.readValue(
+                new File("src/test/resources/ut_meta/dup_name_test/model_desc/model_dup_mea_dim.json"),
+                ModelRequest.class);
+        modelRequest.setProject(projectName);
+        List<NDataModel.NamedColumn> simplifiedDims = modelRequest.getAllNamedColumns().stream()
+                .filter(NDataModel.NamedColumn::isDimension).collect(Collectors.toList());
+        modelRequest.setSimplifiedDimensions(simplifiedDims);
+        modelService.createModel(projectName, modelRequest);
+        NDataModelManager projectInstance = NDataModelManager.getInstance(getTestConfig(), projectName);
+        Assert.assertNotNull(projectInstance.getDataModelDescByAlias("model_dup_mea_dim"));
+        SyncContext syncContext = new SyncContext();
+        syncContext.setProjectName(projectName);
+        syncContext.setModelId(modelId);
+        syncContext.setModelElement(SyncContext.ModelElement.ALL_COLS);
+        syncContext.setAdmin(true);
+        syncContext.setDataflow(NDataflowManager.getInstance(getTestConfig(), projectName).getDataflow(modelId));
+        syncContext.setKylinConfig(getTestConfig());
+        SyncModel syncModel = tdsService.exportModel(syncContext);
+        Assert.assertTrue(tdsService.preCheckNameConflict(syncModel));
+    }
+
+    @Test
+    public void testExportTDSWithDupMeasureColumnNames() throws IOException {
+        String projectName = "default";
+        String modelId = "2ed3bf12-ad40-e8a0-73da-8dc3b4c798bb";
+        val modelRequest = JsonUtil.readValue(
+                new File("src/test/resources/ut_meta/dup_name_test/model_desc/model_dup_mea_col.json"),
+                ModelRequest.class);
+        modelRequest.setProject(projectName);
+        List<NDataModel.NamedColumn> simplifiedDims = modelRequest.getAllNamedColumns().stream()
+                .filter(NDataModel.NamedColumn::isDimension).collect(Collectors.toList());
+        modelRequest.setSimplifiedDimensions(simplifiedDims);
+        modelService.createModel(projectName, modelRequest);
+        NDataModelManager projectInstance = NDataModelManager.getInstance(getTestConfig(), projectName);
+        Assert.assertNotNull(projectInstance.getDataModelDescByAlias("model_dup_mea_col"));
+        SyncContext syncContext = new SyncContext();
+        syncContext.setProjectName(projectName);
+        syncContext.setModelId(modelId);
+        syncContext.setModelElement(SyncContext.ModelElement.ALL_COLS);
+        syncContext.setDataflow(NDataflowManager.getInstance(getTestConfig(), projectName).getDataflow(modelId));
+        syncContext.setKylinConfig(getTestConfig());
+        syncContext.setAdmin(true);
+        SyncModel syncModel = tdsService.exportModel(syncContext);
+        Assert.assertThrows(
+                "There are duplicated names among model column LO_LINENUMBER and measure name LO_LINENUMBER. Cannot export a valid TDS file. Please correct the duplicated names and try again.",
+                KylinException.class, () -> tdsService.preCheckNameConflict(syncModel));
+    }
+
+    @Test
+    public void testExportTDSByAdmin() throws Exception {
+        val project = "default";
+        val modelId = "cb596712-3a09-46f8-aea1-988b43fe9b6c";
+        prepareBasic(project);
+        List<String> dimensions = Lists.newArrayList();
+        dimensions.add("DEFAULT.TEST_MEASURE.FLAG");
+        dimensions.add("DEFAULT.TEST_MEASURE.PRICE1");
+        dimensions.add("DEFAULT.TEST_MEASURE.ID1");
+        List<String> measurs = Lists.newArrayList();
+        measurs.add("COUNT_STAR");
+        measurs.add("SUM_1");
+        SyncContext syncContext = tdsService.prepareSyncContext(project, modelId, SyncContext.BI.TABLEAU_CONNECTOR_TDS,
+                SyncContext.ModelElement.CUSTOM_COLS, "localhost", 8080);
+        SyncModel syncModel = tdsService.exportTDSDimensionsAndMeasuresByAdmin(syncContext, dimensions, measurs);
+        TableauDatasourceModel datasource1 = (TableauDatasourceModel) BISyncTool.getBISyncModel(syncContext, syncModel);
+        ByteArrayOutputStream outStream4 = new ByteArrayOutputStream();
+        datasource1.dump(outStream4);
+        Assert.assertEquals(getExpectedTds("/bisync_tableau/nmodel_full_measure_test.connector_admin.tds"),
+                outStream4.toString(Charset.defaultCharset().name()));
+    }
+
+    @Test
+    public void testExportTDSByUser() throws Exception {
+        val project = "default";
+        val modelId = "cb596712-3a09-46f8-aea1-988b43fe9b6c";
+        prepareBasic(project);
+        List<String> dimensions = Lists.newArrayList();
+        dimensions.add("TEST_MEASURE.ID1");
+        dimensions.add("TEST_MEASURE.ID2");
+        dimensions.add("TEST_MEASURE.ID3");
+        dimensions.add("TEST_MEASURE1.ID1");
+        dimensions.add("TEST_MEASURE1.NAME1");
+        dimensions.add("TEST_MEASURE1.NAME2");
+        dimensions.add("TEST_MEASURE1.NAME3");
+        List<String> measurs = Lists.newArrayList();
+        measurs.add("COUNT_STAR");
+        measurs.add("SUM_1");
+        SecurityContextHolder.getContext()
+                .setAuthentication(new TestingAuthenticationToken("u1", "ANALYST", Constant.ROLE_ANALYST));
+        SyncContext syncContext = tdsService.prepareSyncContext(project, modelId, SyncContext.BI.TABLEAU_CONNECTOR_TDS,
+                SyncContext.ModelElement.CUSTOM_COLS, "localhost", 8080);
+        syncContext.setAdmin(false);
+        SyncModel syncModel = tdsService.exportTDSDimensionsAndMeasuresByNormalUser(syncContext, dimensions, measurs);
+        TableauDatasourceModel datasource1 = (TableauDatasourceModel) BISyncTool.getBISyncModel(syncContext, syncModel);
+        ByteArrayOutputStream outStream4 = new ByteArrayOutputStream();
+        datasource1.dump(outStream4);
+        Assert.assertEquals(getExpectedTds("/bisync_tableau/nmodel_full_measure_test.connector_user.tds"),
+                outStream4.toString(Charset.defaultCharset().name()));
+    }
+
+    @Test
+    public void testExportTDSByUserAndElement() throws Exception {
+        val project = "default";
+        val modelId = "cb596712-3a09-46f8-aea1-988b43fe9b6c";
+        prepareBasic(project);
+        List<String> dimensions = Lists.newArrayList();
+        dimensions.add("TEST_MEASURE.ID1");
+        try {
+            SecurityContextHolder.getContext()
+                    .setAuthentication(new TestingAuthenticationToken("u1", "ANALYST", Constant.ROLE_ANALYST));
+            SyncContext syncContext = tdsService.prepareSyncContext(project, modelId,
+                    SyncContext.BI.TABLEAU_CONNECTOR_TDS, SyncContext.ModelElement.AGG_INDEX_COL, "localhost", 8080);
+            SyncModel syncModel = tdsService.exportTDSDimensionsAndMeasuresByNormalUser(syncContext, dimensions,
+                    ImmutableList.of());
+            TableauDatasourceModel datasource1 = (TableauDatasourceModel) BISyncTool.getBISyncModel(syncContext,
+                    syncModel);
+            ByteArrayOutputStream outStream4 = new ByteArrayOutputStream();
+            datasource1.dump(outStream4);
+            Assert.assertEquals(
+                    getExpectedTds("/bisync_tableau/nmodel_full_measure_test.connector_user_agg_index_col.tds"),
+                    outStream4.toString(Charset.defaultCharset().name()));
+        } finally {
+            SecurityContextHolder.getContext()
+                    .setAuthentication(new TestingAuthenticationToken("ADMIN", "ADMIN", Constant.ROLE_ADMIN));
+        }
+    }
+
+    @Test
+    public void testCheckModelExportPermissionException() {
+        val project = "default";
+        val modelId = "cb596712-3a09-46f8-aea1-988b43fe9b6c";
+        prepareBasic(project);
+        tdsService.prepareSyncContext(project, modelId, SyncContext.BI.TABLEAU_CONNECTOR_TDS,
+                SyncContext.ModelElement.AGG_INDEX_COL, "localhost", 8080);
+        try {
+            Mockito.when(accessService.getGroupsOfExecuteUser(Mockito.any(String.class)))
+                    .thenReturn(Sets.newHashSet("ROLE_ANALYST"));
+            SecurityContextHolder.getContext()
+                    .setAuthentication(new TestingAuthenticationToken("u1", "ANALYST", Constant.ROLE_ANALYST));
+            thrown.expect(KylinException.class);
+            thrown.expectMessage("current user does not have full permission on requesting model");
+            SyncContext syncContext = tdsService.prepareSyncContext(project, modelId,
+                    SyncContext.BI.TABLEAU_CONNECTOR_TDS, SyncContext.ModelElement.AGG_INDEX_COL, "localhost", 8080);
+            tdsService.exportModel(syncContext);
+        } finally {
+            SecurityContextHolder.getContext()
+                    .setAuthentication(new TestingAuthenticationToken("ADMIN", "ADMIN", Constant.ROLE_ADMIN));
+        }
+    }
+
+    @Test
+    public void testCheckModelExportPermission() {
+        val project = "default";
+        val modelId = "cb596712-3a09-46f8-aea1-988b43fe9b6c";
+        prepareBasic(project);
+        tdsService.prepareSyncContext(project, modelId, SyncContext.BI.TABLEAU_CONNECTOR_TDS,
+                SyncContext.ModelElement.AGG_INDEX_COL, "localhost", 8080);
+        tdsService.prepareSyncContext(project, modelId, SyncContext.BI.TABLEAU_CONNECTOR_TDS,
+                SyncContext.ModelElement.AGG_INDEX_COL, "localhost", 8080);
+    }
+
+    @Test
+    public void testCheckModelExportPermissionWithCC() {
+        val project = "cc_test";
+        val modelId = "0d146f1a-bdd3-4548-87ac-21c2c6f9a0da";
+        AclTCRManager manager = AclTCRManager.getInstance(getTestConfig(), project);
+        {
+            AclTCR u1a1 = new AclTCR();
+            manager.updateAclTCR(u1a1, "u1", true);
+            SecurityContextHolder.getContext()
+                    .setAuthentication(new TestingAuthenticationToken("u1", "ANALYST", Constant.ROLE_ANALYST));
+            Mockito.when(accessService.getGroupsOfExecuteUser(Mockito.any(String.class)))
+                    .thenReturn(Sets.newHashSet("ROLE_ANALYST"));
+            tdsService.prepareSyncContext(project, modelId, SyncContext.BI.TABLEAU_CONNECTOR_TDS,
+                    SyncContext.ModelElement.AGG_INDEX_COL, "localhost", 8080);
+        }
+        {
+            try {
+                AclTCR u1a1 = new AclTCR();
+                AclTCR.Table u1t1 = new AclTCR.Table();
+                AclTCR.ColumnRow u1cr1 = new AclTCR.ColumnRow();
+                AclTCR.Column u1c1 = new AclTCR.Column();
+                u1c1.add("ORDER_ID");
+                u1cr1.setColumn(u1c1);
+                u1t1.put("SSB.LINEORDER", u1cr1);
+                u1a1.setTable(u1t1);
+                manager.updateAclTCR(u1a1, "u1", true);
+                thrown.expect(KylinException.class);
+                thrown.expectMessage("current user does not have full permission on requesting model");
+                SyncContext syncContext = tdsService.prepareSyncContext(project, modelId,
+                        SyncContext.BI.TABLEAU_CONNECTOR_TDS, SyncContext.ModelElement.AGG_INDEX_COL, "localhost",
+                        8080);
+                tdsService.exportModel(syncContext);
+            } finally {
+                SecurityContextHolder.getContext()
+                        .setAuthentication(new TestingAuthenticationToken("ADMIN", "ADMIN", Constant.ROLE_ADMIN));
+            }
+        }
+
+    }
+
+    @Test
+    public void testExportTDSByBroken() {
+        val project = "test_broken_project";
+        val modelId = "4b93b131-824e-6966-c4dd-5a4268d27095";
+        List<String> dimensions = Lists.newArrayList();
+        List<String> measures = Lists.newArrayList();
+        Assert.assertThrows(KylinException.class, () -> tdsService.prepareSyncContext(project, modelId,
+                SyncContext.BI.TABLEAU_CONNECTOR_TDS, SyncContext.ModelElement.CUSTOM_COLS, "localhost", 8080));
+    }
+
+    @Test
+    public void testExportTDSMeasurePermission() {
+        val project = "default";
+        val modelId = "82fa7671-a935-45f5-8779-85703601f49a";
+        prepareBasicByMeasure(project);
+        List<String> dimensions = Lists.newArrayList();
+        //"ORDER_ID", "PRICE", "CAL_DT", "PRICE", "ITEM_COUNT", "LEAF_CATEG_ID"
+        dimensions.add("TEST_KYLIN_FACT.ORDER_ID");
+        dimensions.add("TEST_KYLIN_FACT.PRICE");
+        dimensions.add("TEST_KYLIN_FACT.CAL_DT");
+        dimensions.add("TEST_KYLIN_FACT.PRICE");
+        dimensions.add("TEST_KYLIN_FACT.ITEM_COUNT");
+        dimensions.add("TEST_KYLIN_FACT.LEAF_CATEG_ID");
+        //"ORDER_ID", "TEST_TIME_ENC", "TEST_DATE_ENC"
+        dimensions.add("TEST_ORDER.ORDER_ID");
+        dimensions.add("TEST_ORDER.TEST_TIME_ENC");
+        dimensions.add("TEST_ORDER.TEST_DATE_ENC");
+        //"ORDER_ID", "PRICE", "CAL_DT", "TRANS_ID"
+        dimensions.add("TEST_MEASURE.ORDER_ID");
+        dimensions.add("TEST_MEASURE.PRICE");
+        dimensions.add("TEST_MEASURE.CAL_DT");
+        dimensions.add("TEST_MEASURE.TRANS_ID");
+
+        List<String> measures = Lists.newArrayList();
+        measures.add("TRANS_CNT");
+        measures.add("GMV_SUM");
+        measures.add("GMV_MIN");
+        measures.add("GMV_MAX");
+        measures.add("ITEM_COUNT_SUM");
+        measures.add("ITEM_COUNT_MAX");
+        measures.add("ITEM_COUNT_MIN");
+        measures.add("SELLER_HLL");
+        measures.add("COUNT_DISTINCT");
+        measures.add("TOP_SELLER");
+        measures.add("TEST_COUNT_DISTINCT_BITMAP");
+        measures.add("GVM_PERCENTILE");
+        SecurityContextHolder.getContext()
+                .setAuthentication(new TestingAuthenticationToken("u1", "ANALYST", Constant.ROLE_ANALYST));
+        SyncContext syncContext = tdsService.prepareSyncContext(project, modelId, SyncContext.BI.TABLEAU_CONNECTOR_TDS,
+                SyncContext.ModelElement.CUSTOM_COLS, "localhost", 8080);
+        Assert.assertThrows(KylinException.class,
+                () -> tdsService.exportTDSDimensionsAndMeasuresByNormalUser(syncContext, dimensions, measures));
+    }
+
+    private void prepareBasicByMeasure(String project) {
+        AclTCRManager manager = AclTCRManager.getInstance(getTestConfig(), project);
+
+        AclTCR u1a1 = new AclTCR();
+        AclTCR.Table u1t1 = new AclTCR.Table();
+        AclTCR.ColumnRow u1cr1 = new AclTCR.ColumnRow();
+        AclTCR.Column u1c1 = new AclTCR.Column();
+        u1c1.addAll(Arrays.asList("ORDER_ID", "PRICE", "CAL_DT", "PRICE", "ITEM_COUNT", "LEAF_CATEG_ID"));
+        u1cr1.setColumn(u1c1);
+
+        AclTCR.ColumnRow u1cr2 = new AclTCR.ColumnRow();
+        AclTCR.Column u1c2 = new AclTCR.Column();
+        u1c2.addAll(Arrays.asList("ORDER_ID", "TEST_TIME_ENC", "TEST_DATE_ENC"));
+        u1cr2.setColumn(u1c2);
+        u1t1.put("DEFAULT.TEST_KYLIN_FACT", u1cr1);
+        u1t1.put("DEFAULT.TEST_ORDER", u1cr2);
+        u1a1.setTable(u1t1);
+        manager.updateAclTCR(u1a1, "u1", true);
+    }
+
+    @Test
+    public void testExportModel() throws Exception {
+        val project = "default";
+        val modelId = "cb596712-3a09-46f8-aea1-988b43fe9b6c";
+        prepareBasic(project);
+        SyncContext syncContext = tdsService.prepareSyncContext(project, modelId, SyncContext.BI.TABLEAU_CONNECTOR_TDS,
+                SyncContext.ModelElement.AGG_INDEX_AND_TABLE_INDEX_COL, "localhost", 8080);
+        SyncModel syncModel = tdsService.exportModel(syncContext);
+        TableauDatasourceModel datasource1 = (TableauDatasourceModel) BISyncTool.getBISyncModel(syncContext, syncModel);
+        ByteArrayOutputStream outStream4 = new ByteArrayOutputStream();
+        datasource1.dump(outStream4);
+        Assert.assertEquals(getExpectedTds("/bisync_tableau/nmodel_full_measure_test.connector.tds"),
+                outStream4.toString(Charset.defaultCharset().name()));
+    }
+
+    private String getExpectedTds(String path) throws IOException {
+        return CharStreams.toString(new InputStreamReader(getClass().getResourceAsStream(path), Charsets.UTF_8));
+    }
+
+    private void prepareBasic(String project) {
+        AclTCRManager manager = AclTCRManager.getInstance(getTestConfig(), project);
+
+        AclTCR u1a1 = new AclTCR();
+        AclTCR.Table u1t1 = new AclTCR.Table();
+        AclTCR.ColumnRow u1cr1 = new AclTCR.ColumnRow();
+        AclTCR.Column u1c1 = new AclTCR.Column();
+        u1c1.addAll(Arrays.asList("ID1", "ID2", "ID3"));
+        u1cr1.setColumn(u1c1);
+
+        AclTCR.ColumnRow u1cr2 = new AclTCR.ColumnRow();
+        AclTCR.Column u1c2 = new AclTCR.Column();
+        u1c2.addAll(Arrays.asList("ID1", "NAME1", "NAME2", "NAME3"));
+        u1cr2.setColumn(u1c2);
+        u1t1.put("DEFAULT.TEST_MEASURE", u1cr1);
+        u1t1.put("DEFAULT.TEST_MEASURE1", u1cr2);
+        u1a1.setTable(u1t1);
+        manager.updateAclTCR(u1a1, "u1", true);
+
+        AclTCR g1a1 = new AclTCR();
+        AclTCR.Table g1t1 = new AclTCR.Table();
+        AclTCR.ColumnRow g1cr1 = new AclTCR.ColumnRow();
+        AclTCR.Column g1c1 = new AclTCR.Column();
+        g1c1.addAll(Arrays.asList("ID1", "ID2", "ID3", "ID4"));
+        g1cr1.setColumn(g1c1);
+        g1t1.put("DEFAULT.TEST_MEASURE", g1cr1);
+        g1a1.setTable(g1t1);
+        manager.updateAclTCR(g1a1, "g1", false);
+    }
+
+    @Test
+    public void testCheckTablePermission() {
+        val project = "default";
+        val modelId = "cb596712-3a09-46f8-aea1-988b43fe9b6c";
+        thrown.expect(KylinException.class);
+        thrown.expectMessage(MsgPicker.getMsg().getTableNoColumnsPermission());
+
+        AclTCRManager manager = AclTCRManager.getInstance(getTestConfig(), project);
+        Set<String> columns = new HashSet<>();
+        columns.add("DEFAULT.TEST_MEASURE1.NAME1");
+        columns.add("DEFAULT.TEST_MEASURE1.NAME2");
+        columns.add("DEFAULT.TEST_MEASURE1.NAME3");
+
+        AclTCR u1a1 = new AclTCR();
+        AclTCR.Table u1t1 = new AclTCR.Table();
+        AclTCR.ColumnRow u1cr1 = new AclTCR.ColumnRow();
+        AclTCR.Column u1c1 = new AclTCR.Column();
+        u1cr1.setColumn(u1c1);
+
+        AclTCR.ColumnRow u1cr2 = new AclTCR.ColumnRow();
+        AclTCR.Column u1c2 = new AclTCR.Column();
+        u1c2.addAll(Arrays.asList("NAME1", "NAME2", "NAME3"));
+        u1cr2.setColumn(u1c2);
+        u1t1.put("DEFAULT.TEST_MEASURE", u1cr1);
+        u1t1.put("DEFAULT.TEST_MEASURE1", u1cr2);
+        u1a1.setTable(u1t1);
+        manager.updateAclTCR(u1a1, "u1", true);
+        SecurityContextHolder.getContext()
+                .setAuthentication(new TestingAuthenticationToken("u1", "ANALYST", Constant.ROLE_ANALYST));
+        List<String> dimensions = Lists.newArrayList();
+        dimensions.add("TEST_MEASURE.FLAG");
+        dimensions.add("TEST_MEASURE.PRICE1");
+        dimensions.add("TEST_MEASURE.ID1");
+        List<String> measurs = Lists.newArrayList();
+        measurs.add("COUNT_STAR");
+        measurs.add("SUM_1");
+        tdsService.checkTableHasColumnPermission(SyncContext.ModelElement.CUSTOM_COLS, project, modelId, columns,
+                dimensions, measurs);
+
+        dimensions.add("TEST_MEASURE.ID4");
+        Assert.assertThrows(KylinException.class,
+                () -> tdsService.checkTableHasColumnPermission(SyncContext.ModelElement.CUSTOM_COLS, project, modelId,
+                        columns, dimensions, measurs));
+    }
+
+    @Test
+    public void testExportTDSCheckColumnPermission() {
+        val project = "default";
+        val modelId = "89af4ee2-2cdb-4b07-b39e-4c29856309aa";
+
+        NDataModelManager modelManager = NDataModelManager.getInstance(KylinConfig.getInstanceFromEnv(), project);
+        NDataModel dataModel = modelManager.getDataModelDesc(modelId);
+
+        Set<String> authColumns = Sets.newHashSet();
+        List<String> dimensions = Lists.newArrayList();
+        List<String> measurs = Lists.newArrayList();
+
+        Assert.assertTrue(tdsService.checkColumnPermission(dataModel, authColumns, null, measurs));
+        Assert.assertTrue(tdsService.checkColumnPermission(dataModel, authColumns, null, null));
+        Assert.assertTrue(tdsService.checkColumnPermission(dataModel, authColumns, dimensions, null));
+        Assert.assertTrue(tdsService.checkColumnPermission(dataModel, authColumns, dimensions, measurs));
+
+        authColumns.add("DEFAULT.TEST_KYLIN_FACT.PRICE");
+        authColumns.add("DEFAULT.TEST_KYLIN_FACT.ITEM_COUNT");
+        authColumns.add("EDW.TEST_CAL_DT.CAL_DT");
+        authColumns.add("DEFAULT.TEST_ACCOUNT.ACCOUNT_ID");
+
+        Set<String> newAuthColumns = Sets.newHashSet();
+        dataModel.getAllTables().forEach(tableRef -> {
+            List<TblColRef> collect = tableRef.getColumns().stream()
+                    .filter(column -> authColumns.contains(column.getCanonicalName())).collect(Collectors.toList());
+            collect.forEach(x -> newAuthColumns.add(x.getAliasDotName()));
+        });
+
+        dimensions.add("TEST_KYLIN_FACT.DEAL_AMOUNT");
+        dimensions.add("TEST_KYLIN_FACT.TRANS_ID");
+
+        Assert.assertFalse(tdsService.checkColumnPermission(dataModel, newAuthColumns, dimensions, measurs));
+
+        newAuthColumns.add("TEST_KYLIN_FACT.TRANS_ID");
+
+        measurs.add("SUM_NEST4");
+        measurs.add("COUNT_CAL_DT");
+        Assert.assertTrue(tdsService.checkColumnPermission(dataModel, newAuthColumns, dimensions, measurs));
+
+        Assert.assertTrue(tdsService.checkColumnPermission(dataModel, newAuthColumns, dimensions, measurs));
+    }
+
+    @Test
+    public void testConvertCCToNormalCols() {
+        val project = "default";
+        val modelId = "89af4ee2-2cdb-4b07-b39e-4c29856309aa";
+        NDataModelManager modelManager = NDataModelManager.getInstance(KylinConfig.getInstanceFromEnv(), project);
+        NDataModel dataModel = modelManager.getDataModelDesc(modelId);
+        NDataModel.Measure measure = dataModel.getEffectiveMeasures().values().stream()
+                .filter(x -> x.getName().equals("SUM_NEST4")).findFirst().get();
+        Set<String> measureColumns = measure.getFunction().getParameters().stream()
+                .filter(parameterDesc -> parameterDesc.getColRef() != null)
+                .map(parameterDesc -> parameterDesc.getColRef().getCanonicalName()).collect(Collectors.toSet());
+        ComputedColumnDesc sumNest4 = dataModel.getComputedColumnDescs().stream()
+                .filter(x -> measureColumns.contains(x.getIdentName())).findFirst().get();
+        Set<String> strings = tdsService.convertCCToNormalCols(dataModel, sumNest4);
+        Assert.assertEquals("TEST_KYLIN_FACT.PRICE, TEST_KYLIN_FACT.ITEM_COUNT", String.join(", ", strings));
+
+        sumNest4.setInnerExpression("1 + 2");
+        Set<String> set = tdsService.convertCCToNormalCols(dataModel, sumNest4);
+        Assert.assertEquals(Collections.emptySet(), set);
+
+        HashSet<Object> authColumns = Sets.newHashSet();
+        authColumns.add("DEFAULT.TEST_KYLIN_FACT.PRICE");
+        Assert.assertTrue(authColumns.containsAll(set));
+    }
+}
diff --git a/src/query-common/src/main/java/org/apache/kylin/tool/bisync/BISyncTool.java b/src/query-common/src/main/java/org/apache/kylin/tool/bisync/BISyncTool.java
index 9449dee777..4834af6595 100644
--- a/src/query-common/src/main/java/org/apache/kylin/tool/bisync/BISyncTool.java
+++ b/src/query-common/src/main/java/org/apache/kylin/tool/bisync/BISyncTool.java
@@ -24,26 +24,30 @@ import java.util.Set;
 import org.apache.kylin.tool.bisync.model.SyncModel;
 import org.apache.kylin.tool.bisync.tableau.TableauDataSourceConverter;
 
+import com.google.common.annotations.VisibleForTesting;
+
 public class BISyncTool {
 
     private BISyncTool() {
     }
 
+    @VisibleForTesting
     public static BISyncModel dumpToBISyncModel(SyncContext syncContext) {
         SyncModel syncModel = new SyncModelBuilder(syncContext).buildSourceSyncModel();
         return getBISyncModel(syncContext, syncModel);
     }
 
-    private static BISyncModel getBISyncModel(SyncContext syncContext, SyncModel syncModel) {
+    public static BISyncModel getBISyncModel(SyncContext syncContext, SyncModel syncModel) {
         switch (syncContext.getTargetBI()) {
-            case TABLEAU_ODBC_TDS:
-            case TABLEAU_CONNECTOR_TDS:
-                return new TableauDataSourceConverter().convert(syncModel, syncContext);
-            default:
-                throw new IllegalArgumentException();
+        case TABLEAU_ODBC_TDS:
+        case TABLEAU_CONNECTOR_TDS:
+            return new TableauDataSourceConverter().convert(syncModel, syncContext);
+        default:
+            throw new IllegalArgumentException();
         }
     }
 
+    @VisibleForTesting
     public static BISyncModel dumpHasPermissionToBISyncModel(SyncContext syncContext, Set<String> authTables,
             Set<String> authColumns, List<String> dimensions, List<String> measures) {
         SyncModel syncModel = new SyncModelBuilder(syncContext).buildHasPermissionSourceSyncModel(authTables,
@@ -51,8 +55,8 @@ public class BISyncTool {
         return getBISyncModel(syncContext, syncModel);
     }
 
-    public static BISyncModel dumpBISyncModel(SyncContext syncContext,
-            List<String> dimensions, List<String> measures) {
+    @VisibleForTesting
+    public static BISyncModel dumpBISyncModel(SyncContext syncContext, List<String> dimensions, List<String> measures) {
         SyncModel syncModel = new SyncModelBuilder(syncContext).buildSourceSyncModel(dimensions, measures);
         return getBISyncModel(syncContext, syncModel);
     }
diff --git a/src/query-common/src/main/java/org/apache/kylin/tool/bisync/SyncContext.java b/src/query-common/src/main/java/org/apache/kylin/tool/bisync/SyncContext.java
index b43a898726..895897e031 100644
--- a/src/query-common/src/main/java/org/apache/kylin/tool/bisync/SyncContext.java
+++ b/src/query-common/src/main/java/org/apache/kylin/tool/bisync/SyncContext.java
@@ -53,4 +53,6 @@ public class SyncContext {
     private NDataflow dataflow;
 
     private KylinConfig kylinConfig;
+
+    private boolean isAdmin;
 }
diff --git a/src/query-common/src/main/java/org/apache/kylin/tool/bisync/SyncModelBuilder.java b/src/query-common/src/main/java/org/apache/kylin/tool/bisync/SyncModelBuilder.java
index cb444fe016..74b402bc29 100644
--- a/src/query-common/src/main/java/org/apache/kylin/tool/bisync/SyncModelBuilder.java
+++ b/src/query-common/src/main/java/org/apache/kylin/tool/bisync/SyncModelBuilder.java
@@ -20,13 +20,13 @@ package org.apache.kylin.tool.bisync;
 
 import java.util.Arrays;
 import java.util.HashMap;
-import java.util.HashSet;
 import java.util.LinkedList;
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
 import java.util.stream.Collectors;
 
+import org.apache.commons.lang3.ArrayUtils;
 import org.apache.kylin.common.util.ImmutableBitSet;
 import org.apache.kylin.cube.model.SelectRule;
 import org.apache.kylin.metadata.cube.cuboid.NAggregationGroup;
@@ -44,6 +44,8 @@ import org.apache.kylin.tool.bisync.model.JoinTreeNode;
 import org.apache.kylin.tool.bisync.model.MeasureDef;
 import org.apache.kylin.tool.bisync.model.SyncModel;
 
+import com.google.common.collect.ImmutableList;
+import com.google.common.collect.ImmutableSet;
 import com.google.common.collect.Maps;
 import com.google.common.collect.Sets;
 
@@ -56,22 +58,7 @@ public class SyncModelBuilder {
     }
 
     public SyncModel buildSourceSyncModel() {
-        NDataModel dataModelDesc = syncContext.getDataflow().getModel();
-        IndexPlan indexPlan = syncContext.getDataflow().getIndexPlan();
-
-        // init joinTree, dimension cols, measure cols, hierarchies
-        Map<String, ColumnDef> columnDefMap = getAllColumns(dataModelDesc);
-
-        List<MeasureDef> measureDefs = dataModelDesc.getEffectiveMeasures().values().stream().map(MeasureDef::new)
-                .collect(Collectors.toList());
-        markHasPermissionIndexedColumnsAndMeasures(columnDefMap, measureDefs, indexPlan, null, null,
-                syncContext.getModelElement());
-        markComputedColumnVisibility(columnDefMap, measureDefs, syncContext.getKylinConfig().exposeComputedColumn());
-
-        Set<String[]> hierarchies = getHierarchies(indexPlan);
-        JoinTreeNode joinTree = generateJoinTree(dataModelDesc.getJoinTables(), dataModelDesc.getRootFactTableName());
-
-        return getSyncModel(dataModelDesc, columnDefMap, measureDefs, hierarchies, joinTree);
+        return buildSourceSyncModel(ImmutableList.of(), ImmutableList.of());
     }
 
     public SyncModel buildSourceSyncModel(List<String> dimensions, List<String> measures) {
@@ -79,16 +66,17 @@ public class SyncModelBuilder {
         IndexPlan indexPlan = syncContext.getDataflow().getIndexPlan();
 
         // init joinTree, dimension cols, measure cols, hierarchies
-        Map<String, ColumnDef> columnDefMap = authColumns(dataModelDesc);
+        Map<String, ColumnDef> columnDefMap = authColumns(dataModelDesc, syncContext.isAdmin(), ImmutableSet.of(),
+                ImmutableSet.of());
+        List<MeasureDef> measureDefs = dataModelDesc.getEffectiveMeasures().values().stream() //
+                .map(MeasureDef::new).collect(Collectors.toList());
 
-        List<MeasureDef> measureDefs = dataModelDesc.getEffectiveMeasures().values().stream().map(MeasureDef::new)
-                .collect(Collectors.toList());
-        markHasPermissionIndexedColumnsAndMeasures(columnDefMap, measureDefs, indexPlan, dimensions, measures,
-                syncContext.getModelElement());
+        markHasPermissionIndexedColumnsAndMeasures(columnDefMap, measureDefs, indexPlan, ImmutableSet.of(), dimensions,
+                measures, syncContext.getModelElement());
         markComputedColumnVisibility(columnDefMap, measureDefs, syncContext.getKylinConfig().exposeComputedColumn());
+
         Set<String[]> hierarchies = getHierarchies(indexPlan);
         JoinTreeNode joinTree = generateJoinTree(dataModelDesc.getJoinTables(), dataModelDesc.getRootFactTableName());
-
         return getSyncModel(dataModelDesc, columnDefMap, measureDefs, hierarchies, joinTree);
     }
 
@@ -99,20 +87,22 @@ public class SyncModelBuilder {
 
         Set<String> allAuthColumns = addHasPermissionCCColumn(dataModelDesc, authColumns);
         // init joinTree, dimension cols, measure cols, hierarchies
-        Map<String, ColumnDef> columnDefMap = authColumns(dataModelDesc, authTables, allAuthColumns);
+        Map<String, ColumnDef> columnDefMap = authColumns(dataModelDesc, syncContext.isAdmin(), authTables,
+                allAuthColumns);
+        List<MeasureDef> measureDefs = dataModelDesc.getEffectiveMeasures().values().stream() //
+                .filter(measure -> checkMeasurePermission(allAuthColumns, measure)) //
+                .map(MeasureDef::new).collect(Collectors.toList());
 
-        List<MeasureDef> measureDefs = dataModelDesc.getEffectiveMeasures().values().stream()
-                .filter(measure -> checkMeasurePermission(allAuthColumns, measure)).map(MeasureDef::new)
-                .collect(Collectors.toList());
         markHasPermissionIndexedColumnsAndMeasures(columnDefMap, measureDefs, indexPlan, allAuthColumns, dimensions,
                 measures, syncContext.getModelElement());
         markComputedColumnVisibility(columnDefMap, measureDefs, syncContext.getKylinConfig().exposeComputedColumn());
+
+        Set<String> omitDbColSet = renameColumnName(allAuthColumns);
         Set<String[]> hierarchies = getHierarchies(indexPlan).stream()
-                .map(hierarchyArray -> Arrays.stream(hierarchyArray).filter(renameColumnName(allAuthColumns)::contains)
+                .map(hierarchyArray -> Arrays.stream(hierarchyArray).filter(omitDbColSet::contains)
                         .collect(Collectors.toSet()).toArray(new String[0]))
                 .collect(Collectors.toSet()).stream().filter(x -> !Arrays.asList(x).isEmpty())
                 .collect(Collectors.toSet());
-
         JoinTreeNode joinTree = generateJoinTree(dataModelDesc.getJoinTables(), dataModelDesc.getRootFactTableName());
         return getSyncModel(dataModelDesc, columnDefMap, measureDefs, hierarchies, joinTree);
     }
@@ -125,7 +115,7 @@ public class SyncModelBuilder {
         syncModel.setJoinTree(joinTree);
         syncModel.setMetrics(measureDefs);
         syncModel.setHierarchies(hierarchies);
-        syncModel.setProjectName(syncContext.getProjectName());
+        syncModel.setProject(syncContext.getProjectName());
         syncModel.setModelName(dataModelDesc.getAlias());
         syncModel.setHost(syncContext.getHost());
         syncModel.setPort(String.valueOf(syncContext.getPort()));
@@ -141,135 +131,105 @@ public class SyncModelBuilder {
 
     private void markComputedColumnVisibility(Map<String, ColumnDef> columnDefMap, List<MeasureDef> measureDefs,
             boolean exposeComputedColumns) {
-        if (!exposeComputedColumns) {
-            // hide all CC cols and related measures
-            for (ColumnDef columnDef : columnDefMap.values()) {
-                if (columnDef.isComputedColumn()) {
-                    columnDef.setHidden(true);
-                }
+        if (exposeComputedColumns) {
+            return;
+        }
+        // hide all CC cols and related measures
+        for (ColumnDef columnDef : columnDefMap.values()) {
+            if (columnDef.isComputedColumn()) {
+                columnDef.setHidden(true);
             }
-            for (MeasureDef measureDef : measureDefs) {
-                for (TblColRef paramColRef : measureDef.getMeasure().getFunction().getColRefs()) {
-                    if (columnDefMap.get(paramColRef.getAliasDotName()).isComputedColumn()) {
-                        measureDef.setHidden(true);
-                        break;
-                    }
+        }
+        for (MeasureDef measureDef : measureDefs) {
+            for (TblColRef paramColRef : measureDef.getMeasure().getFunction().getColRefs()) {
+                ColumnDef columnDef = columnDefMap.get(paramColRef.getAliasDotName());
+                if (columnDef != null && columnDef.isComputedColumn()) {
+                    measureDef.setHidden(true);
+                    break;
                 }
             }
         }
     }
 
     private void markHasPermissionIndexedColumnsAndMeasures(Map<String, ColumnDef> columnDefMap,
-            List<MeasureDef> measureDefs, IndexPlan indexPlan, Set<String> columns, List<String> dimensions,
+            List<MeasureDef> measureDefs, IndexPlan indexPlan, Set<String> authorizedCols, List<String> dimensions,
             List<String> measures, SyncContext.ModelElement modelElement) {
         Set<String> colsToShow = Sets.newHashSet();
         Set<String> measuresToShow = Sets.newHashSet();
         switch (modelElement) {
         case AGG_INDEX_COL:
-            ImmutableBitSet aggDimBitSet = indexPlan.getAllIndexes().stream().filter(index -> !index.isTableIndex())
-                    .map(IndexEntity::getDimensionBitset).reduce(ImmutableBitSet.EMPTY, ImmutableBitSet::or);
-            Set<TblColRef> tblColRefs = indexPlan.getEffectiveDimCols().entrySet().stream()
-                    .filter(entry -> aggDimBitSet.get(entry.getKey())).map(Map.Entry::getValue)
+            ImmutableBitSet aggDimBitSet = indexPlan.getAllIndexes().stream() //
+                    .filter(index -> !index.isTableIndex()) //
+                    .map(IndexEntity::getDimensionBitset) //
+                    .reduce(ImmutableBitSet.EMPTY, ImmutableBitSet::or);
+            Set<TblColRef> tblColRefs = indexPlan.getEffectiveDimCols().entrySet().stream() //
+                    .filter(entry -> aggDimBitSet.get(entry.getKey())) //
+                    .map(Map.Entry::getValue) //
                     .collect(Collectors.toSet());
-            colsToShow = tblColRefs.stream().filter(column -> columns.contains(column.getAliasDotName()))
-                    .map(TblColRef::getAliasDotName).collect(Collectors.toSet());
-            measuresToShow = indexPlan.getEffectiveMeasures().values().stream()
-                    .filter(measureDef -> checkMeasurePermission(columns, measureDef)).map(MeasureDesc::getName)
+            colsToShow = tblColRefs.stream() //
+                    .filter(colRef -> testAuthorizedCols(authorizedCols, colRef)) //
+                    .map(TblColRef::getAliasDotName) //
+                    .collect(Collectors.toSet());
+            measuresToShow = indexPlan.getEffectiveMeasures().values().stream() //
+                    .filter(measureDef -> testAuthorizedMeasures(authorizedCols, measureDef)) //
+                    .map(MeasureDesc::getName) //
                     .collect(Collectors.toSet());
             break;
         case AGG_INDEX_AND_TABLE_INDEX_COL:
-            colsToShow = indexPlan.getEffectiveDimCols().values().stream()
-                    .filter(column -> columns.contains(column.getAliasDotName())).map(TblColRef::getAliasDotName)
+            colsToShow = indexPlan.getEffectiveDimCols().values().stream() //
+                    .filter(colRef -> testAuthorizedCols(authorizedCols, colRef)) //
+                    .map(TblColRef::getAliasDotName) //
                     .collect(Collectors.toSet());
             measuresToShow = indexPlan.getEffectiveMeasures().values().stream()
-                    .filter(measureDef -> checkMeasurePermission(columns, measureDef)).map(MeasureDesc::getName)
+                    .filter(measureDef -> testAuthorizedMeasures(authorizedCols, measureDef)) //
+                    .map(MeasureDesc::getName) //
                     .collect(Collectors.toSet());
             break;
         case ALL_COLS:
-            colsToShow = indexPlan.getModel().getDimensionNameIdMap().keySet().stream()
-                    .filter(renameColumnName(columns)::contains).collect(Collectors.toSet());
-            measuresToShow = indexPlan.getModel().getEffectiveMeasures().values().stream()
-                    .filter(measureDef -> checkMeasurePermission(columns, measureDef)).map(MeasureDesc::getName)
+            colsToShow = indexPlan.getModel().getEffectiveDimensions().values().stream()
+                    .filter(colRef -> testAuthorizedCols(authorizedCols, colRef)) //
+                    .map(TblColRef::getAliasDotName) //
+                    .collect(Collectors.toSet());
+            measuresToShow = indexPlan.getModel().getEffectiveMeasures().values().stream() //
+                    .filter(measureDef -> testAuthorizedMeasures(authorizedCols, measureDef)) //
+                    .map(MeasureDesc::getName) //
                     .collect(Collectors.toSet());
-            for (MeasureDef measureDef : measureDefs) {
-                measureDef.setHidden(false);
-            }
             break;
         case CUSTOM_COLS:
-            colsToShow = indexPlan.getModel().getDimensionNameIdMap().keySet().stream()
-                    .filter(renameColumnName(new HashSet<>(dimensions))::contains).collect(Collectors.toSet());
-            measuresToShow = indexPlan.getModel().getEffectiveMeasures().values().stream()
-                    .filter(measureDef -> measures.contains(measureDef.getName())).map(MeasureDesc::getName)
+            Set<String> dimensionSet = Sets.newHashSet(dimensions);
+            colsToShow = indexPlan.getModel().getEffectiveDimensions().values().stream()
+                    .filter(colRef -> testAuthorizedDimensions(dimensionSet, colRef)) //
+                    .map(TblColRef::getAliasDotName) //
+                    .collect(Collectors.toSet());
+            measuresToShow = indexPlan.getModel().getEffectiveMeasures().values().stream() //
+                    .map(MeasureDesc::getName) //
+                    .filter(measures::contains) //
                     .collect(Collectors.toSet());
-
-            for (MeasureDef measureDef : measureDefs) {
-                if (measuresToShow.contains(measureDef.getMeasure().getName())
-                        && (measures != null && measures.contains(measureDef.getMeasure().getName()))) {
-                    measureDef.setHidden(false);
-                }
-            }
             break;
         default:
             break;
         }
-        setDimensionAndMeasureHidden(columnDefMap, measureDefs, modelElement, colsToShow, measuresToShow);
+        showDimsAndMeasures(columnDefMap, measureDefs, colsToShow, measuresToShow);
     }
 
-    private void markHasPermissionIndexedColumnsAndMeasures(Map<String, ColumnDef> columnDefMap,
-            List<MeasureDef> measureDefs, IndexPlan indexPlan, List<String> dimensions, List<String> measures,
-            SyncContext.ModelElement modelElement) {
-        Set<String> colsToShow = new HashSet<>();
-        Set<String> measuresToShow = new HashSet<>();
-        switch (modelElement) {
-        case AGG_INDEX_COL:
-            ImmutableBitSet aggDimBitSet = indexPlan.getAllIndexes().stream().filter(index -> !index.isTableIndex())
-                    .map(IndexEntity::getDimensionBitset).reduce(ImmutableBitSet.EMPTY, ImmutableBitSet::or);
-            Set<TblColRef> tblColRefs = indexPlan.getEffectiveDimCols().entrySet().stream()
-                    .filter(entry -> aggDimBitSet.get(entry.getKey())).map(Map.Entry::getValue)
-                    .collect(Collectors.toSet());
-            colsToShow = tblColRefs.stream().map(TblColRef::getAliasDotName).collect(Collectors.toSet());
-            measuresToShow = indexPlan.getEffectiveMeasures().values().stream().map(MeasureDesc::getName)
-                    .collect(Collectors.toSet());
-            break;
-        case AGG_INDEX_AND_TABLE_INDEX_COL:
-            colsToShow = indexPlan.getEffectiveDimCols().values().stream().map(TblColRef::getAliasDotName)
-                    .collect(Collectors.toSet());
-            measuresToShow = indexPlan.getEffectiveMeasures().values().stream().map(MeasureDesc::getName)
-                    .collect(Collectors.toSet());
-            break;
-        case ALL_COLS:
-            colsToShow = indexPlan.getModel().getDimensionNameIdMap().keySet();
-            measuresToShow = indexPlan.getModel().getEffectiveMeasures().values().stream().map(MeasureDesc::getName)
-                    .collect(Collectors.toSet());
-            for (MeasureDef measureDef : measureDefs) {
-                measureDef.setHidden(false);
-            }
-            break;
-        case CUSTOM_COLS:
-            colsToShow = indexPlan.getModel().getDimensionNameIdMap().keySet().stream()
-                    .filter(renameColumnName(new HashSet<>(dimensions))::contains).collect(Collectors.toSet());
-            measuresToShow = indexPlan.getModel().getEffectiveMeasures().values().stream()
-                    .filter(measureDef -> measures != null && measures.contains(measureDef.getName()))
-                    .map(MeasureDesc::getName).collect(Collectors.toSet());
-            for (MeasureDef measureDef : measureDefs) {
-                if (measuresToShow.contains(measureDef.getMeasure().getName())
-                        && (measures != null && measures.contains(measureDef.getMeasure().getName()))) {
-                    measureDef.setHidden(false);
-                }
-            }
-            break;
-        default:
-            break;
-        }
+    private boolean testAuthorizedCols(Set<String> authorizedCols, TblColRef colRef) {
+        return syncContext.isAdmin() || authorizedCols.contains(colRef.getColumnWithTableAndSchema())
+                || authorizedCols.contains(colRef.getAliasDotName());
+    }
 
-        setDimensionAndMeasureHidden(columnDefMap, measureDefs, modelElement, colsToShow, measuresToShow);
+    private boolean testAuthorizedDimensions(Set<String> dimensions, TblColRef colRef) {
+        return dimensions.contains(colRef.getColumnWithTableAndSchema())
+                || dimensions.contains(colRef.getAliasDotName());
     }
 
-    private void setDimensionAndMeasureHidden(Map<String, ColumnDef> columnDefMap, List<MeasureDef> measureDefs,
-            SyncContext.ModelElement modelElement, Set<String> colsToShow, Set<String> measuresToShow) {
-        colsToShow.forEach(colToShow -> columnDefMap.get(colToShow).setHidden(false));
-        if (modelElement.equals(SyncContext.ModelElement.CUSTOM_COLS)) {
-            return;
+    private boolean testAuthorizedMeasures(Set<String> authorizedCols, NDataModel.Measure measureDef) {
+        return syncContext.isAdmin() || checkMeasurePermission(authorizedCols, measureDef);
+    }
+
+    private void showDimsAndMeasures(Map<String, ColumnDef> columnDefMap, List<MeasureDef> measureDefs,
+            Set<String> colsToShow, Set<String> measuresToShow) {
+        for (String colToShow : colsToShow) {
+            columnDefMap.get(colToShow).setHidden(false);
         }
         for (MeasureDef measureDef : measureDefs) {
             if (measuresToShow.contains(measureDef.getMeasure().getName())) {
@@ -288,60 +248,38 @@ public class SyncModelBuilder {
         }).collect(Collectors.toSet());
     }
 
-    private Map<String, ColumnDef> getAllColumns(NDataModel modelDesc) {
-        Map<String, ColumnDef> modelColsMap = new HashMap<>();
-        for (TableRef tableRef : modelDesc.getAllTables()) {
-            for (TblColRef column : tableRef.getColumns()) {
-                ColumnDef columnDef = new ColumnDef("dimension", tableRef.getAlias(), null, column.getName(),
-                        column.getDatatype(), true, column.getColumnDesc().isComputedColumn());
-                String colName = tableRef.getAlias() + "." + column.getName();
-                modelColsMap.put(colName, columnDef);
+    private Map<String, ColumnDef> authColumns(NDataModel model, boolean isAdmin, Set<String> tables,
+            Set<String> columns) {
+        Map<String, ColumnDef> modelColsMap = Maps.newHashMap();
+        for (TableRef tableRef : model.getAllTables()) {
+            if (!isAdmin && !tables.contains(tableRef.getTableIdentity())) {
+                continue;
             }
-        }
-
-        // sync col alias
-        for (NDataModel.NamedColumn namedColumn : modelDesc.getAllNamedColumns()) {
-            if (modelColsMap.get(namedColumn.getAliasDotColumn()) != null) {
-                modelColsMap.get(namedColumn.getAliasDotColumn()).setColumnAlias(namedColumn.getName());
+            for (TblColRef colRef : tableRef.getColumns()) {
+                if (isAdmin || columns.contains(colRef.getAliasDotName())
+                        || columns.contains(colRef.getColumnWithTableAndSchema())) {
+                    ColumnDef columnDef = ColumnDef.builder() //
+                            .role("dimension") //
+                            .tableAlias(tableRef.getAlias()) //
+                            .columnName(colRef.getName()) //
+                            .columnType(colRef.getDatatype()) //
+                            .isHidden(true) //
+                            .isComputedColumn(colRef.getColumnDesc().isComputedColumn()) //
+                            .build();
+                    modelColsMap.put(colRef.getIdentity(), columnDef);
+                }
             }
         }
-        return modelColsMap;
-    }
-
-    private Map<String, ColumnDef> authColumns(NDataModel modelDesc) {
-        Map<String, ColumnDef> modelColsMap = Maps.newHashMap();
-        modelDesc.getAllTables().stream().forEach(tableRef -> tableRef.getColumns().stream().forEach(column -> {
-            ColumnDef columnDef = new ColumnDef("dimension", tableRef.getAlias(), null, column.getName(),
-                    column.getDatatype(), true, column.getColumnDesc().isComputedColumn());
-            String colName = tableRef.getAlias() + "." + column.getName();
-            modelColsMap.put(colName, columnDef);
-        }));
 
         // sync col alias
-        modelDesc.getAllNamedColumns().stream()
-                .filter(namedColumn -> modelColsMap.get(namedColumn.getAliasDotColumn()) != null)
-                .forEach(namedColumn -> modelColsMap.get(namedColumn.getAliasDotColumn())
-                        .setColumnAlias(namedColumn.getName()));
-        return modelColsMap;
-    }
-
-    private Map<String, ColumnDef> authColumns(NDataModel modelDesc, Set<String> tables, Set<String> columns) {
-        Map<String, ColumnDef> modelColsMap = Maps.newHashMap();
-        modelDesc.getAllTables().stream().filter(table -> tables.contains(table.getTableIdentity()))
-                .forEach(tableRef -> tableRef.getColumns().stream()
-                        .filter(column -> columns.contains(column.getAliasDotName())).forEach(column -> {
-                            ColumnDef columnDef = new ColumnDef("dimension", tableRef.getAlias(), null,
-                                    column.getName(), column.getDatatype(), true,
-                                    column.getColumnDesc().isComputedColumn());
-                            String colName = tableRef.getAlias() + "." + column.getName();
-                            modelColsMap.put(colName, columnDef);
-                        }));
-
-        // sync col alias
-        modelDesc.getAllNamedColumns().stream()
-                .filter(namedColumn -> modelColsMap.get(namedColumn.getAliasDotColumn()) != null)
-                .forEach(namedColumn -> modelColsMap.get(namedColumn.getAliasDotColumn())
-                        .setColumnAlias(namedColumn.getName()));
+        model.getAllNamedColumns().stream() //
+                .filter(NDataModel.NamedColumn::isExist) //
+                .forEach(namedColumn -> {
+                    ColumnDef columnDef = modelColsMap.get(namedColumn.getAliasDotColumn());
+                    if (columnDef != null) {
+                        columnDef.setColumnAlias(namedColumn.getName());
+                    }
+                });
         return modelColsMap;
     }
 
@@ -394,17 +332,17 @@ public class SyncModelBuilder {
         Set<String> hierarchyNameSet = Sets.newHashSet();
         for (NAggregationGroup group : indexPlan.getRuleBasedIndex().getAggregationGroups()) {
             SelectRule rule = group.getSelectRule();
-            if (rule != null) {
-                for (Integer[] hierarchyIds : rule.hierarchyDims) {
-                    if (hierarchyIds != null && hierarchyIds.length != 0) {
-
-                        String[] hierarchyNames = Arrays.stream(hierarchyIds)
-                                .map(id -> indexPlan.getModel().getColumnNameByColumnId(id)).toArray(String[]::new);
-                        String hierarchyNamesJoined = String.join(",", hierarchyNames);
-                        if (!hierarchyNameSet.contains(hierarchyNamesJoined)) {
-                            hierarchies.add(hierarchyNames);
-                            hierarchyNameSet.add(hierarchyNamesJoined);
-                        }
+            if (rule == null) {
+                continue;
+            }
+            for (Integer[] hierarchyIds : rule.hierarchyDims) {
+                if (ArrayUtils.isNotEmpty(hierarchyIds)) {
+                    String[] hierarchyNames = Arrays.stream(hierarchyIds)
+                            .map(id -> indexPlan.getModel().getColumnNameByColumnId(id)).toArray(String[]::new);
+                    String hierarchyNamesJoined = String.join(",", hierarchyNames);
+                    if (!hierarchyNameSet.contains(hierarchyNamesJoined)) {
+                        hierarchies.add(hierarchyNames);
+                        hierarchyNameSet.add(hierarchyNamesJoined);
                     }
                 }
             }
diff --git a/src/query-common/src/main/java/org/apache/kylin/tool/bisync/model/ColumnDef.java b/src/query-common/src/main/java/org/apache/kylin/tool/bisync/model/ColumnDef.java
index 0871f6ea5b..c67f56a432 100644
--- a/src/query-common/src/main/java/org/apache/kylin/tool/bisync/model/ColumnDef.java
+++ b/src/query-common/src/main/java/org/apache/kylin/tool/bisync/model/ColumnDef.java
@@ -17,6 +17,17 @@
  */
 package org.apache.kylin.tool.bisync.model;
 
+import lombok.AllArgsConstructor;
+import lombok.Builder;
+import lombok.Getter;
+import lombok.NoArgsConstructor;
+import lombok.Setter;
+
+@Builder
+@NoArgsConstructor
+@AllArgsConstructor
+@Getter
+@Setter
 public class ColumnDef {
 
     private String role;
@@ -29,74 +40,11 @@ public class ColumnDef {
 
     private String columnType;
 
-    private boolean isHidden;
+    private boolean isHidden = true;
 
     private boolean isComputedColumn;
 
-    public ColumnDef(String role, String tableAlias, String columnAlias, String columnName, String columnType,
-            boolean isHidden, boolean isComputedColumn) {
-        this.role = role;
-        this.tableAlias = tableAlias;
-        this.columnAlias = columnAlias;
-        this.columnName = columnName;
-        this.columnType = columnType;
-        this.isHidden = isHidden;
-        this.isComputedColumn = isComputedColumn;
-    }
-
-    public String getRole() {
-        return role;
-    }
-
-    public void setRole(String role) {
-        this.role = role;
-    }
-
-    public String getTableAlias() {
-        return tableAlias;
-    }
-
-    public void setTableAlias(String tableAlias) {
-        this.tableAlias = tableAlias;
-    }
-
-    public boolean isHidden() {
-        return isHidden;
-    }
-
-    public void setHidden(boolean hidden) {
-        isHidden = hidden;
-    }
-
-    public String getColumnAlias() {
-        return columnAlias;
-    }
-
-    public void setColumnAlias(String columnAlias) {
-        this.columnAlias = columnAlias;
-    }
-
-    public String getColumnName() {
-        return columnName;
-    }
-
-    public void setColumnName(String columnName) {
-        this.columnName = columnName;
-    }
-
-    public String getColumnType() {
-        return columnType;
-    }
-
-    public void setColumnType(String columnType) {
-        this.columnType = columnType;
-    }
-
-    public boolean isComputedColumn() {
-        return isComputedColumn;
-    }
-
-    public void setComputedColumn(boolean computedColumn) {
-        isComputedColumn = computedColumn;
+    public boolean isDimension() {
+        return columnType.equalsIgnoreCase("nominal");
     }
 }
diff --git a/src/query-common/src/main/java/org/apache/kylin/tool/bisync/model/SyncModel.java b/src/query-common/src/main/java/org/apache/kylin/tool/bisync/model/SyncModel.java
index b8c8ddfafa..60151e17ef 100644
--- a/src/query-common/src/main/java/org/apache/kylin/tool/bisync/model/SyncModel.java
+++ b/src/query-common/src/main/java/org/apache/kylin/tool/bisync/model/SyncModel.java
@@ -21,9 +21,14 @@ import java.util.List;
 import java.util.Map;
 import java.util.Set;
 
+import lombok.Getter;
+import lombok.Setter;
+
+@Getter
+@Setter
 public class SyncModel {
 
-    private String projectName;
+    private String project;
 
     private String modelName;
 
@@ -38,69 +43,4 @@ public class SyncModel {
     private List<MeasureDef> metrics;
 
     private Set<String[]> hierarchies;
-
-    public JoinTreeNode getJoinTree() {
-        return joinTree;
-    }
-
-    public void setJoinTree(JoinTreeNode joinTree) {
-        this.joinTree = joinTree;
-    }
-
-    public String getProjectName() {
-        return projectName;
-    }
-
-    public void setProjectName(String projectName) {
-        this.projectName = projectName;
-    }
-
-    public String getModelName() {
-        return modelName;
-    }
-
-    public void setModelName(String modelName) {
-        this.modelName = modelName;
-    }
-
-    public Map<String, ColumnDef> getColumnDefMap() {
-        return columnDefMap;
-    }
-
-    public void setColumnDefMap(Map<String, ColumnDef> columnDefMap) {
-        this.columnDefMap = columnDefMap;
-    }
-
-    public Set<String[]> getHierarchies() {
-        return hierarchies;
-    }
-
-    public void setHierarchies(Set<String[]> hierarchies) {
-        this.hierarchies = hierarchies;
-    }
-
-    public List<MeasureDef> getMetrics() {
-        return metrics;
-    }
-
-    public void setMetrics(List<MeasureDef> metrics) {
-        this.metrics = metrics;
-    }
-
-    public String getHost() {
-        return host;
-    }
-
-    public void setHost(String host) {
-        this.host = host;
-    }
-
-    public String getPort() {
-        return port;
-    }
-
-    public void setPort(String port) {
-        this.port = port;
-    }
-
 }
diff --git a/src/query-common/src/main/java/org/apache/kylin/tool/bisync/tableau/TableauDataSourceConverter.java b/src/query-common/src/main/java/org/apache/kylin/tool/bisync/tableau/TableauDataSourceConverter.java
index ced8616fa0..6540df9238 100644
--- a/src/query-common/src/main/java/org/apache/kylin/tool/bisync/tableau/TableauDataSourceConverter.java
+++ b/src/query-common/src/main/java/org/apache/kylin/tool/bisync/tableau/TableauDataSourceConverter.java
@@ -126,7 +126,7 @@ public class TableauDataSourceConverter implements BISyncModelConverter {
     }
 
     protected void fillTemplate(TableauDatasource tds, SyncModel syncModel) {
-        fillConnectionProperties(tds, syncModel.getHost(), syncModel.getPort(), syncModel.getProjectName(),
+        fillConnectionProperties(tds, syncModel.getHost(), syncModel.getPort(), syncModel.getProject(),
                 syncModel.getModelName());
         Map<String, Pair<Col, ColumnDef>> colMap = fillCols(tds, syncModel.getColumnDefMap());
         fillColumns(tds, colMap);
diff --git a/src/second-storage/clickhouse-it/src/test/java/io/kyligence/kap/secondstorage/SecondStorageLockTest.java b/src/second-storage/clickhouse-it/src/test/java/io/kyligence/kap/secondstorage/SecondStorageLockTest.java
index 026a056965..03c191ab16 100644
--- a/src/second-storage/clickhouse-it/src/test/java/io/kyligence/kap/secondstorage/SecondStorageLockTest.java
+++ b/src/second-storage/clickhouse-it/src/test/java/io/kyligence/kap/secondstorage/SecondStorageLockTest.java
@@ -77,6 +77,7 @@ import org.apache.kylin.common.util.JsonUtil;
 import org.apache.kylin.common.util.RandomUtil;
 import org.apache.kylin.common.util.Unsafe;
 import org.apache.kylin.engine.spark.IndexDataConstructor;
+import org.apache.kylin.engine.spark.job.NSparkCubingJob;
 import org.apache.kylin.job.SecondStorageCleanJobBuildParams;
 import org.apache.kylin.job.SecondStorageJobParamUtil;
 import org.apache.kylin.job.execution.AbstractExecutable;
@@ -92,6 +93,7 @@ import org.apache.kylin.metadata.cube.model.NDataSegment;
 import org.apache.kylin.metadata.cube.model.NDataflow;
 import org.apache.kylin.metadata.cube.model.NDataflowManager;
 import org.apache.kylin.metadata.cube.model.NIndexPlanManager;
+import org.apache.kylin.metadata.epoch.EpochManager;
 import org.apache.kylin.metadata.epoch.EpochOrchestrator;
 import org.apache.kylin.metadata.model.ColumnDesc;
 import org.apache.kylin.metadata.model.ManagementType;
@@ -191,10 +193,8 @@ import io.kyligence.kap.clickhouse.job.LoadContext;
 import io.kyligence.kap.clickhouse.job.S3TableSource;
 import io.kyligence.kap.clickhouse.management.ClickHouseConfigLoader;
 import io.kyligence.kap.clickhouse.parser.ShowDatabasesParser;
-import org.apache.kylin.engine.spark.job.NSparkCubingJob;
 import io.kyligence.kap.guava20.shaded.common.collect.ImmutableSet;
 import io.kyligence.kap.guava20.shaded.common.collect.Lists;
-import org.apache.kylin.metadata.epoch.EpochManager;
 import io.kyligence.kap.newten.clickhouse.ClickHouseSimpleITTestUtils;
 import io.kyligence.kap.newten.clickhouse.ClickHouseUtils;
 import io.kyligence.kap.newten.clickhouse.EmbeddedHttpServer;
@@ -349,6 +349,8 @@ public class SecondStorageLockTest implements JobWaiter {
         ReflectionTestUtils.setField(modelBuildService, "modelService", modelService);
         ReflectionTestUtils.setField(modelBuildService, "segmentHelper", segmentHelper);
         ReflectionTestUtils.setField(modelBuildService, "aclEvaluate", aclEvaluate);
+        ReflectionTestUtils.setField(modelBuildService, "accessService", accessService);
+        ReflectionTestUtils.setField(modelBuildService, "userGroupService", userGroupService);
 
         ReflectionTestUtils.setField(nModelController, "modelService", modelService);
         ReflectionTestUtils.setField(nModelController, "fusionModelService", fusionModelService);
diff --git a/src/tool/src/test/java/org/apache/kylin/tool/bisync/SyncModelBuilderTest.java b/src/tool/src/test/java/org/apache/kylin/tool/bisync/SyncModelBuilderTest.java
index 5a5b635d90..d45eb7b97c 100644
--- a/src/tool/src/test/java/org/apache/kylin/tool/bisync/SyncModelBuilderTest.java
+++ b/src/tool/src/test/java/org/apache/kylin/tool/bisync/SyncModelBuilderTest.java
@@ -19,16 +19,19 @@
 package org.apache.kylin.tool.bisync;
 
 import java.io.ByteArrayOutputStream;
+import java.io.File;
 import java.io.IOException;
-import java.io.InputStreamReader;
+import java.net.URL;
 import java.nio.charset.Charset;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.HashSet;
 import java.util.List;
+import java.util.Objects;
 import java.util.Set;
 import java.util.stream.Collectors;
 
+import org.apache.commons.io.FileUtils;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.util.NLocalFileMetadataTestCase;
 import org.apache.kylin.metadata.acl.AclTCR;
@@ -44,10 +47,9 @@ import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Test;
 
-import com.google.common.base.Charsets;
+import com.google.common.collect.ImmutableList;
 import com.google.common.collect.Lists;
 import com.google.common.collect.Sets;
-import com.google.common.io.CharStreams;
 
 import lombok.val;
 
@@ -63,17 +65,22 @@ public class SyncModelBuilderTest extends NLocalFileMetadataTestCase {
         this.cleanupTestMetadata();
     }
 
+    private String getProject() {
+        return "default";
+    }
+
     @Test
     public void testBuildSyncModel() {
         val project = "default";
         val modelId = "cb596712-3a09-46f8-aea1-988b43fe9b6c";
         val syncContext = SyncModelTestUtil.createSyncContext(project, modelId, KylinConfig.getInstanceFromEnv());
         syncContext.setModelElement(SyncContext.ModelElement.ALL_COLS);
+        syncContext.setAdmin(true);
         val syncModel = new SyncModelBuilder(syncContext).buildSourceSyncModel();
         val df = NDataflowManager.getInstance(KylinConfig.getInstanceFromEnv(), project).getDataflow(modelId);
         val model = df.getModel();
 
-        Assert.assertEquals(project, syncModel.getProjectName());
+        Assert.assertEquals(project, syncModel.getProject());
         Assert.assertEquals(model.getAlias(), syncModel.getModelName());
         Assert.assertEquals("localhost", syncModel.getHost());
         Assert.assertEquals("7070", syncModel.getPort());
@@ -98,9 +105,9 @@ public class SyncModelBuilderTest extends NLocalFileMetadataTestCase {
 
         Assert.assertEquals(model.getAllMeasures().size(), syncModel.getMetrics().size());
         val syncMeasure = syncModel.getMetrics().get(0).getMeasure();
-        val modelMeasure = model.getAllMeasures().stream().filter(m -> m.getId() == syncMeasure.getId()).findFirst()
-                .get();
-        Assert.assertEquals(modelMeasure, syncMeasure);
+        val modelMeasure = model.getAllMeasures().stream().filter(m -> m.getId() == syncMeasure.getId()).findFirst();
+        Assert.assertTrue(modelMeasure.isPresent());
+        Assert.assertEquals(modelMeasure.get(), syncMeasure);
     }
 
     @Test
@@ -110,7 +117,7 @@ public class SyncModelBuilderTest extends NLocalFileMetadataTestCase {
         val project = "default";
         val modelId = "82fa7671-a935-45f5-8779-85703601f49a";
         val syncContext = SyncModelTestUtil.createSyncContext(project, modelId, KylinConfig.getInstanceFromEnv());
-        prepareBasic(project);
+        prepareBasic();
 
         Set<String> allAuthTables = Sets.newHashSet();
         Set<String> allAuthColumns = Sets.newHashSet();
@@ -157,6 +164,7 @@ public class SyncModelBuilderTest extends NLocalFileMetadataTestCase {
         measures.add("TEST_COUNT_DISTINCT_BITMAP");
         measures.add("GVM_PERCENTILE");
 
+        syncContext.setAdmin(false);
         syncContext.setModelElement(SyncContext.ModelElement.CUSTOM_COLS);
         TableauDatasourceModel datasource = (TableauDatasourceModel) BISyncTool
                 .dumpHasPermissionToBISyncModel(syncContext, allAuthTables, newAuthColumns, dimensions, measures);
@@ -165,35 +173,40 @@ public class SyncModelBuilderTest extends NLocalFileMetadataTestCase {
         Assert.assertEquals(getExpectedTds("/bisync_tableau/nmodel_full_measure_test.connector_permission.tds"),
                 outStream.toString(Charset.defaultCharset().name()));
 
+        syncContext.setAdmin(true);
+        syncContext.setModelElement(SyncContext.ModelElement.CUSTOM_COLS);
         TableauDatasourceModel datasource1 = (TableauDatasourceModel) BISyncTool.dumpBISyncModel(syncContext,
-                dimensions, null);
+                dimensions, ImmutableList.of());
         ByteArrayOutputStream outStream1 = new ByteArrayOutputStream();
         datasource1.dump(outStream1);
         Assert.assertEquals(
                 getExpectedTds("/bisync_tableau/nmodel_full_measure_test.connector_permission_no_measure.tds"),
                 outStream1.toString(Charset.defaultCharset().name()));
 
+        syncContext.setAdmin(true);
         syncContext.setModelElement(SyncContext.ModelElement.AGG_INDEX_COL);
-        TableauDatasourceModel datasource2 = (TableauDatasourceModel) BISyncTool.dumpBISyncModel(syncContext, null,
-                null);
+        TableauDatasourceModel datasource2 = (TableauDatasourceModel) BISyncTool.dumpBISyncModel(syncContext,
+                ImmutableList.of(), ImmutableList.of());
         ByteArrayOutputStream outStream2 = new ByteArrayOutputStream();
         datasource2.dump(outStream2);
         Assert.assertEquals(
                 getExpectedTds("/bisync_tableau/nmodel_full_measure_test.connector_permission_agg_index_col.tds"),
                 outStream2.toString(Charset.defaultCharset().name()));
 
+        syncContext.setAdmin(true);
         syncContext.setModelElement(SyncContext.ModelElement.AGG_INDEX_AND_TABLE_INDEX_COL);
-        TableauDatasourceModel datasource3 = (TableauDatasourceModel) BISyncTool.dumpBISyncModel(syncContext, null,
-                null);
+        TableauDatasourceModel datasource3 = (TableauDatasourceModel) BISyncTool.dumpBISyncModel(syncContext,
+                ImmutableList.of(), ImmutableList.of());
         ByteArrayOutputStream outStream3 = new ByteArrayOutputStream();
         datasource3.dump(outStream3);
         Assert.assertEquals(
                 getExpectedTds("/bisync_tableau/nmodel_full_measure_test.connector_permission_agg_index_col.tds"),
                 outStream3.toString(Charset.defaultCharset().name()));
 
+        syncContext.setAdmin(true);
         syncContext.setModelElement(SyncContext.ModelElement.ALL_COLS);
-        TableauDatasourceModel datasource4 = (TableauDatasourceModel) BISyncTool.dumpBISyncModel(syncContext, null,
-                null);
+        TableauDatasourceModel datasource4 = (TableauDatasourceModel) BISyncTool.dumpBISyncModel(syncContext,
+                ImmutableList.of(), ImmutableList.of());
         ByteArrayOutputStream outStream4 = new ByteArrayOutputStream();
         datasource4.dump(outStream4);
         Assert.assertEquals(getExpectedTds("/bisync_tableau/nmodel_full_measure_test.connector_permission_all_col.tds"),
@@ -206,7 +219,8 @@ public class SyncModelBuilderTest extends NLocalFileMetadataTestCase {
         val modelId = "cb596712-3a09-46f8-aea1-988b43fe9b6c";
         val syncContext = SyncModelTestUtil.createSyncContext(project, modelId, KylinConfig.getInstanceFromEnv());
         syncContext.setModelElement(SyncContext.ModelElement.ALL_COLS);
-        prepareBasic(project);
+        syncContext.setAdmin(true);
+        prepareBasic();
 
         TableauDatasourceModel datasource = (TableauDatasourceModel) BISyncTool.dumpToBISyncModel(syncContext);
         ByteArrayOutputStream outStream = new ByteArrayOutputStream();
@@ -230,6 +244,7 @@ public class SyncModelBuilderTest extends NLocalFileMetadataTestCase {
 
         val syncContext1 = SyncModelTestUtil.createSyncContext(project, "89af4ee2-2cdb-4b07-b39e-4c29856309aa",
                 KylinConfig.getInstanceFromEnv());
+        syncContext1.setAdmin(true);
         syncContext1.setModelElement(SyncContext.ModelElement.AGG_INDEX_COL);
         TableauDatasourceModel datasource3 = (TableauDatasourceModel) BISyncTool.dumpToBISyncModel(syncContext1);
         ByteArrayOutputStream outStream3 = new ByteArrayOutputStream();
@@ -254,7 +269,7 @@ public class SyncModelBuilderTest extends NLocalFileMetadataTestCase {
         groups.add("g1");
         val project = "default";
         val modelId = "741ca86a-1f13-46da-a59f-95fb68615e3a";
-        prepareBasic(project);
+        prepareBasic();
         Set<String> allAuthTables = Sets.newHashSet();
         Set<String> allAuthColumns = Sets.newHashSet();
         AclTCRManager aclTCRManager = AclTCRManager.getInstance(KylinConfig.getInstanceFromEnv(), project);
@@ -301,6 +316,7 @@ public class SyncModelBuilderTest extends NLocalFileMetadataTestCase {
         measures.add("GVM_PERCENTILE");
 
         cc_syncContext.setModelElement(SyncContext.ModelElement.CUSTOM_COLS);
+        cc_syncContext.setAdmin(false);
         TableauDatasourceModel datasource3 = (TableauDatasourceModel) BISyncTool
                 .dumpHasPermissionToBISyncModel(cc_syncContext, allAuthTables, newAuthColumns, dimensions, measures);
         ByteArrayOutputStream outStream3 = new ByteArrayOutputStream();
@@ -309,6 +325,7 @@ public class SyncModelBuilderTest extends NLocalFileMetadataTestCase {
                 outStream3.toString(Charset.defaultCharset().name()));
 
         cc_syncContext.setModelElement(SyncContext.ModelElement.CUSTOM_COLS);
+        cc_syncContext.setAdmin(true);
         TableauDatasourceModel datasource1 = (TableauDatasourceModel) BISyncTool.dumpBISyncModel(cc_syncContext,
                 dimensions, measures);
         ByteArrayOutputStream outStream1 = new ByteArrayOutputStream();
@@ -323,7 +340,7 @@ public class SyncModelBuilderTest extends NLocalFileMetadataTestCase {
         groups.add("g1");
         val project = "default";
         val modelId = "82fa7671-a935-45f5-8779-85703601f49a";
-        prepareBasic(project);
+        prepareBasic();
         Set<String> allAuthTables = Sets.newHashSet();
         Set<String> allAuthColumns = Sets.newHashSet();
         AclTCRManager aclTCRManager = AclTCRManager.getInstance(KylinConfig.getInstanceFromEnv(), project);
@@ -337,6 +354,7 @@ public class SyncModelBuilderTest extends NLocalFileMetadataTestCase {
         }
         val cc_syncContext = SyncModelTestUtil.createSyncContext(project, modelId, KylinConfig.getInstanceFromEnv());
         cc_syncContext.setModelElement(SyncContext.ModelElement.ALL_COLS);
+        cc_syncContext.setAdmin(true);
 
         Set<String> newAuthColumns = convertColumns(cc_syncContext.getDataflow().getModel(), allAuthColumns);
         List<String> dimensions = Lists.newArrayList();
@@ -372,6 +390,7 @@ public class SyncModelBuilderTest extends NLocalFileMetadataTestCase {
         measures.add("GVM_PERCENTILE");
 
         cc_syncContext.setModelElement(SyncContext.ModelElement.CUSTOM_COLS);
+        cc_syncContext.setAdmin(false);
         TableauDatasourceModel datasource3 = (TableauDatasourceModel) BISyncTool
                 .dumpHasPermissionToBISyncModel(cc_syncContext, allAuthTables, newAuthColumns, dimensions, measures);
         ByteArrayOutputStream outStream3 = new ByteArrayOutputStream();
@@ -380,6 +399,7 @@ public class SyncModelBuilderTest extends NLocalFileMetadataTestCase {
                 outStream3.toString(Charset.defaultCharset().name()));
 
         cc_syncContext.setModelElement(SyncContext.ModelElement.AGG_INDEX_COL);
+        cc_syncContext.setAdmin(false);
         TableauDatasourceModel datasource4 = (TableauDatasourceModel) BISyncTool.dumpHasPermissionToBISyncModel(
                 cc_syncContext, allAuthTables, newAuthColumns, new ArrayList<>(), new ArrayList<>());
         ByteArrayOutputStream outStream4 = new ByteArrayOutputStream();
@@ -388,6 +408,7 @@ public class SyncModelBuilderTest extends NLocalFileMetadataTestCase {
                 outStream4.toString(Charset.defaultCharset().name()));
 
         cc_syncContext.setModelElement(SyncContext.ModelElement.AGG_INDEX_AND_TABLE_INDEX_COL);
+        cc_syncContext.setAdmin(false);
         TableauDatasourceModel datasource5 = (TableauDatasourceModel) BISyncTool.dumpHasPermissionToBISyncModel(
                 cc_syncContext, allAuthTables, newAuthColumns, new ArrayList<>(), new ArrayList<>());
         ByteArrayOutputStream outStream5 = new ByteArrayOutputStream();
@@ -396,6 +417,7 @@ public class SyncModelBuilderTest extends NLocalFileMetadataTestCase {
                 outStream5.toString(Charset.defaultCharset().name()));
 
         cc_syncContext.setModelElement(SyncContext.ModelElement.ALL_COLS);
+        cc_syncContext.setAdmin(false);
         TableauDatasourceModel datasource6 = (TableauDatasourceModel) BISyncTool.dumpHasPermissionToBISyncModel(
                 cc_syncContext, allAuthTables, newAuthColumns, new ArrayList<>(), new ArrayList<>());
         ByteArrayOutputStream outStream6 = new ByteArrayOutputStream();
@@ -410,7 +432,7 @@ public class SyncModelBuilderTest extends NLocalFileMetadataTestCase {
         groups.add("g1");
         val project = "default";
         val modelId = "82fa7671-a935-45f5-8779-85703601f49a";
-        prepareBasicNoHierarchies(project);
+        prepareBasicNoHierarchies();
         Set<String> allAuthTables = Sets.newHashSet();
         Set<String> allAuthColumns = Sets.newHashSet();
         AclTCRManager aclTCRManager = AclTCRManager.getInstance(KylinConfig.getInstanceFromEnv(), project);
@@ -423,7 +445,6 @@ public class SyncModelBuilderTest extends NLocalFileMetadataTestCase {
             allAuthColumns.addAll(auths.getColumns());
         }
         val cc_syncContext = SyncModelTestUtil.createSyncContext(project, modelId, KylinConfig.getInstanceFromEnv());
-        cc_syncContext.setModelElement(SyncContext.ModelElement.ALL_COLS);
         Set<String> newAuthColumns = convertColumns(cc_syncContext.getDataflow().getModel(), allAuthColumns);
         List<String> dimensions = Lists.newArrayList();
         //"ORDER_ID", "PRICE", "CAL_DT", "PRICE", "ITEM_COUNT"
@@ -457,6 +478,7 @@ public class SyncModelBuilderTest extends NLocalFileMetadataTestCase {
         measures.add("TEST_COUNT_DISTINCT_BITMAP");
         measures.add("GVM_PERCENTILE");
         cc_syncContext.setModelElement(SyncContext.ModelElement.CUSTOM_COLS);
+        cc_syncContext.setAdmin(false);
         TableauDatasourceModel datasource3 = (TableauDatasourceModel) BISyncTool
                 .dumpHasPermissionToBISyncModel(cc_syncContext, allAuthTables, newAuthColumns, dimensions, measures);
         ByteArrayOutputStream outStream3 = new ByteArrayOutputStream();
@@ -466,11 +488,13 @@ public class SyncModelBuilderTest extends NLocalFileMetadataTestCase {
     }
 
     private String getExpectedTds(String path) throws IOException {
-        return CharStreams.toString(new InputStreamReader(getClass().getResourceAsStream(path), Charsets.UTF_8));
+        URL resource = getClass().getResource(path);
+        String fullPath = Objects.requireNonNull(resource).getPath();
+        return FileUtils.readFileToString(new File(fullPath), Charset.defaultCharset());
     }
 
-    private void prepareBasic(String project) {
-        AclTCRManager manager = AclTCRManager.getInstance(getTestConfig(), project);
+    private void prepareBasic() {
+        AclTCRManager manager = AclTCRManager.getInstance(getTestConfig(), getProject());
 
         AclTCR u1a1 = new AclTCR();
         AclTCR.Table u1t1 = new AclTCR.Table();
@@ -499,8 +523,8 @@ public class SyncModelBuilderTest extends NLocalFileMetadataTestCase {
         manager.updateAclTCR(g1a1, "g1", false);
     }
 
-    private void prepareBasicNoHierarchies(String project) {
-        AclTCRManager manager = AclTCRManager.getInstance(getTestConfig(), project);
+    private void prepareBasicNoHierarchies() {
+        AclTCRManager manager = AclTCRManager.getInstance(getTestConfig(), getProject());
 
         AclTCR u1a1 = new AclTCR();
         AclTCR.Table u1t1 = new AclTCR.Table();
@@ -554,8 +578,10 @@ public class SyncModelBuilderTest extends NLocalFileMetadataTestCase {
         val syncContext1 = SyncModelTestUtil.createSyncContext(project, model1Id, KylinConfig.getInstanceFromEnv());
         val syncContext2 = SyncModelTestUtil.createSyncContext(project, model2Id, KylinConfig.getInstanceFromEnv());
         syncContext1.setModelElement(SyncContext.ModelElement.AGG_INDEX_AND_TABLE_INDEX_COL);
+        syncContext1.setAdmin(true);
         syncContext2.setModelElement(SyncContext.ModelElement.AGG_INDEX_AND_TABLE_INDEX_COL);
-        prepareBasic(project);
+        syncContext2.setAdmin(true);
+        prepareBasic();
 
         TableauDatasourceModel datasource1 = (TableauDatasourceModel) BISyncTool.dumpToBISyncModel(syncContext1);
         ByteArrayOutputStream outStream1 = new ByteArrayOutputStream();
diff --git a/src/tool/src/test/java/org/apache/kylin/tool/bisync/SyncModelTestUtil.java b/src/tool/src/test/java/org/apache/kylin/tool/bisync/SyncModelTestUtil.java
index 91e3894873..28cf1cf270 100644
--- a/src/tool/src/test/java/org/apache/kylin/tool/bisync/SyncModelTestUtil.java
+++ b/src/tool/src/test/java/org/apache/kylin/tool/bisync/SyncModelTestUtil.java
@@ -33,6 +33,7 @@ public class SyncModelTestUtil {
         syncContext.setPort(7070);
         syncContext.setDataflow(NDataflowManager.getInstance(kylinConfig, project).getDataflow(modelId));
         syncContext.setKylinConfig(kylinConfig);
+        syncContext.setAdmin(true);
         return syncContext;
     }
 }


[kylin] 13/22: KYLIN-5318 adjust CC names Adjust dimensions measure and filter condition simultaneously

Posted by xx...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

xxyu pushed a commit to branch kylin5
in repository https://gitbox.apache.org/repos/asf/kylin.git

commit 8b31539dab85375937597d7bfff8ef17a89a27bc
Author: Jiale He <35...@users.noreply.github.com>
AuthorDate: Fri Oct 14 14:08:25 2022 +0800

    KYLIN-5318 adjust CC names Adjust dimensions measure and filter condition simultaneously
---
 .../resources/kylin_error_msg_conf_cn.properties   |   2 +-
 .../resources/kylin_error_msg_conf_en.properties   |   2 +-
 .../metadata/model/util/ComputedColumnUtil.java    |   8 +-
 .../apache/kylin/rest/service/ModelService.java    |  55 ++++++++++-
 .../kylin/rest/service/ModelServiceTest.java       | 102 ++++++++++++++++++---
 5 files changed, 149 insertions(+), 20 deletions(-)

diff --git a/src/core-common/src/main/resources/kylin_error_msg_conf_cn.properties b/src/core-common/src/main/resources/kylin_error_msg_conf_cn.properties
index 327c28d18d..759514ee40 100644
--- a/src/core-common/src/main/resources/kylin_error_msg_conf_cn.properties
+++ b/src/core-common/src/main/resources/kylin_error_msg_conf_cn.properties
@@ -98,7 +98,7 @@ KE-010031201=因为查询结果行数超过最大值 ”%s”,无法完成查
 KE-010031202=SQL 语法或格式异常,请检查并修正后重试。
 
 ## 100102XX computed column
-KE-010010201=模型中定义的可计算列的名和表达式与其它模型存在冲突,请修改名称以保持一致,或使用其他的表达式。
+KE-010010201=模型中定义的可计算列的名和表达式与其它模型存在冲突。
 KE-010010202=重复的可计算列名,名为 “%s” 表达式为 “%s” 的可计算列,与模型 “%s” 中的可计算列名存在冲突。
 KE-010010203=重复的可计算列表达式,名为 “%s” 表达式为 “%s” 的可计算列,与模型 “%s” 中的可计算列表达式存在冲突。
 KE-010010204=名为 “%s” 表达式为 “%s” 的可计算列,与项目中名为 “%s” 表达式为 “%s” 的可计算列表达式存在冲突,将当前可计算列重命名为 “%s” 。
diff --git a/src/core-common/src/main/resources/kylin_error_msg_conf_en.properties b/src/core-common/src/main/resources/kylin_error_msg_conf_en.properties
index 5a5f9ea5fb..02ecccdb25 100644
--- a/src/core-common/src/main/resources/kylin_error_msg_conf_en.properties
+++ b/src/core-common/src/main/resources/kylin_error_msg_conf_en.properties
@@ -96,7 +96,7 @@ KE-010031201=Can't get query result, as the rows of query result exceeds the max
 KE-010031202=SQL syntax or format is abnormal, please check and fix and try again.
 
 ## 100102XX computed column
-KE-010010201=The name and expression of the computed column defined in the model conflict with other models. Please modify the name to be consistent, or use another expression.
+KE-010010201=The name and expression of the computed column defined in the model conflict with other models.
 KE-010010202=Duplicate computed column name, defined computed column named "%s" with expression "%s", conflicts with a computed column name in model "%s".
 KE-010010203=Duplicate computed column expression, defined computed column named "%s" with expression "%s", conflicts with a computed column expression in model "%s".
 KE-010010204=Defined computed column named "%s" with expression "%s" is inconsistent with the name of the computed column named "%s" with the expression "%s" in the project. Renamed to "%s".
diff --git a/src/core-metadata/src/main/java/org/apache/kylin/metadata/model/util/ComputedColumnUtil.java b/src/core-metadata/src/main/java/org/apache/kylin/metadata/model/util/ComputedColumnUtil.java
index b07c7cf5b0..ead8a3e8fd 100644
--- a/src/core-metadata/src/main/java/org/apache/kylin/metadata/model/util/ComputedColumnUtil.java
+++ b/src/core-metadata/src/main/java/org/apache/kylin/metadata/model/util/ComputedColumnUtil.java
@@ -626,10 +626,10 @@ public class ComputedColumnUtil {
             return exceptionList;
         }
 
-        public Pair<List<ComputedColumnDesc>, List<KylinException>> getAdjustedCCList(
+        public Pair<List<ComputedColumnDesc>, List<CCConflictDetail>> getAdjustedCCList(
                 List<ComputedColumnDesc> inputCCDescList) {
             List<ComputedColumnDesc> resultCCDescList = Lists.newArrayList();
-            List<KylinException> adjustExceptionList = Lists.newArrayList();
+            List<CCConflictDetail> adjustDetails = Lists.newArrayList();
 
             for (ComputedColumnDesc ccDesc : inputCCDescList) {
                 for (CCConflictDetail detail : this.sameExprDiffNameDetails) {
@@ -638,13 +638,13 @@ public class ComputedColumnUtil {
                     if (newCC.equals(ccDesc)) {
                         logger.info("adjust cc name {} to {}", newCC.getColumnName(), existingCC.getColumnName());
                         ccDesc.setColumnName(existingCC.getColumnName());
-                        adjustExceptionList.add(detail.getAdjustKylinException());
+                        adjustDetails.add(detail);
                         break;
                     }
                 }
                 resultCCDescList.add(ccDesc);
             }
-            return Pair.newPair(resultCCDescList, adjustExceptionList);
+            return Pair.newPair(resultCCDescList, adjustDetails);
         }
     }
 
diff --git a/src/modeling-service/src/main/java/org/apache/kylin/rest/service/ModelService.java b/src/modeling-service/src/main/java/org/apache/kylin/rest/service/ModelService.java
index 7082c4890b..7fcf9f610c 100644
--- a/src/modeling-service/src/main/java/org/apache/kylin/rest/service/ModelService.java
+++ b/src/modeling-service/src/main/java/org/apache/kylin/rest/service/ModelService.java
@@ -70,6 +70,7 @@ import static org.apache.kylin.job.execution.JobTypeEnum.INC_BUILD;
 import static org.apache.kylin.job.execution.JobTypeEnum.INDEX_BUILD;
 import static org.apache.kylin.job.execution.JobTypeEnum.INDEX_MERGE;
 import static org.apache.kylin.job.execution.JobTypeEnum.INDEX_REFRESH;
+import static org.apache.kylin.metadata.model.FunctionDesc.PARAMETER_TYPE_COLUMN;
 
 import java.io.IOException;
 import java.math.BigDecimal;
@@ -190,6 +191,7 @@ import org.apache.kylin.metadata.model.UpdateImpact;
 import org.apache.kylin.metadata.model.VolatileRange;
 import org.apache.kylin.metadata.model.schema.AffectedModelContext;
 import org.apache.kylin.metadata.model.tool.CalciteParser;
+import org.apache.kylin.metadata.model.util.ComputedColumnUtil;
 import org.apache.kylin.metadata.model.util.MultiPartitionUtil;
 import org.apache.kylin.metadata.model.util.scd2.SCD2CondChecker;
 import org.apache.kylin.metadata.project.EnhancedUnitOfWork;
@@ -4242,8 +4244,59 @@ public class ModelService extends AbstractModelService implements TableModelSupp
         List<ComputedColumnDesc> inputCCDescList = Lists.newArrayList(modelRequest.getComputedColumnDescs());
         // deal with conflicts
         val pair = ccConflictInfo.getAdjustedCCList(inputCCDescList);
+        val adjustExceptions = pair.getSecond().stream() //
+                .map(ComputedColumnUtil.CCConflictDetail::getAdjustKylinException).collect(Collectors.toList());
+        ModelRequest resultModelRequest = adjustModelRequestCCName(modelRequest, pair);
+
+        return Pair.newPair(resultModelRequest, handleOnConflictResponse(adjustExceptions));
+    }
+
+    public ModelRequest adjustModelRequestCCName(ModelRequest modelRequest,
+            Pair<List<ComputedColumnDesc>, List<ComputedColumnUtil.CCConflictDetail>> pair) {
+        val adjustDetails = pair.getSecond();
+        // adjust cc name
         modelRequest.setComputedColumnDescs(pair.getFirst());
-        return Pair.newPair(modelRequest, handleOnConflictResponse(pair.getSecond()));
+
+        val dimensions = modelRequest.getSimplifiedDimensions();
+        val measures = modelRequest.getSimplifiedMeasures();
+        for (val detail : adjustDetails) {
+            String newCCFullName = detail.getNewCC().getFullName();
+            String existingCCFullName = detail.getExistingCC().getFullName();
+
+            // adjust dimensions
+            dimensions.stream() //
+                    .filter(NDataModel.NamedColumn::isExist) //
+                    // column equals
+                    .filter(d -> StringUtils.equalsIgnoreCase(d.getAliasDotColumn(), newCCFullName))
+                    .forEach(d -> d.setAliasDotColumn(existingCCFullName));
+
+            // adjust measures
+            measures.forEach(m -> m.getParameterValue().stream() //
+                    // type = column
+                    .filter(pr -> StringUtils.equalsIgnoreCase(pr.getType(), PARAMETER_TYPE_COLUMN))
+                    // value equals
+                    .filter(pr -> StringUtils.equalsIgnoreCase(pr.getValue(), newCCFullName))
+                    .forEach(pr -> pr.setValue(existingCCFullName)));
+        }
+
+        // adjust filter condition
+        String filterCondition = modelRequest.getFilterCondition();
+        if (StringUtils.isEmpty(filterCondition)) {
+            return modelRequest;
+        }
+        for (val detail : adjustDetails) {
+            String newCCFullName = detail.getNewCC().getFullName();
+            String existingCCFullName = detail.getExistingCC().getFullName();
+            if (StringUtils.containsIgnoreCase(filterCondition, newCCFullName)) {
+                filterCondition = replaceAllIgnoreCase(filterCondition, newCCFullName, existingCCFullName);
+            }
+        }
+        modelRequest.setFilterCondition(filterCondition);
+        return modelRequest;
+    }
+
+    public String replaceAllIgnoreCase(String input, String regex, String replacement) {
+        return Pattern.compile(regex, Pattern.CASE_INSENSITIVE).matcher(input).replaceAll(replacement);
     }
 
     public ComputedColumnConflictResponse handleOnConflictResponse(List<KylinException> exceptionList) {
diff --git a/src/modeling-service/src/test/java/org/apache/kylin/rest/service/ModelServiceTest.java b/src/modeling-service/src/test/java/org/apache/kylin/rest/service/ModelServiceTest.java
index adad3571f6..e9dc0f22c3 100644
--- a/src/modeling-service/src/test/java/org/apache/kylin/rest/service/ModelServiceTest.java
+++ b/src/modeling-service/src/test/java/org/apache/kylin/rest/service/ModelServiceTest.java
@@ -5276,8 +5276,8 @@ public class ModelServiceTest extends SourceTestCase {
         testCheckCCConflictAllExprConflict(originRequest);
         testCheckCCConflictExprAndNameConflict(originRequest);
         testCheckCCConflictExprAndNameConflict2(originRequest);
-        testCheckCCConflictAdjust(originRequest);
         testNoCCConflict(originRequest);
+        testCheckCCConflictAdjust(originRequest);
     }
 
     private void testCheckCCConflictAllExprConflict(ModelRequest originRequest) {
@@ -5369,18 +5369,78 @@ public class ModelServiceTest extends SourceTestCase {
     }
 
     private void testCheckCCConflictAdjust(ModelRequest originRequest) {
-        val ccList = Lists.newArrayList(//
-                getComputedColumnDesc("CC_1", "CUSTOMER.C_NAME +'USA'", "DOUBLE"),
-                getComputedColumnDesc("CC_LTAX", "LINEORDER.LO_TAX + 1", "BIGINT"));
-        originRequest.setComputedColumnDescs(ccList);
-        originRequest.setComputedColumnNameAutoAdjust(true);
-        val pair = modelService.checkCCConflict(originRequest);
-        val details = pair.getSecond().getConflictDetails();
-        Assert.assertEquals(1, details.size());
-        Assert.assertEquals(COMPUTED_COLUMN_CONFLICT_ADJUST_INFO.getErrorCode().getCode(),
-                details.get(0).getDetailCode());
-        Assert.assertEquals(COMPUTED_COLUMN_CONFLICT_ADJUST_INFO.getMsg("CC_1", "CUSTOMER.C_NAME +'USA'", "CC_CNAME",
-                "CUSTOMER.C_NAME +'USA'", "CC_CNAME"), details.get(0).getDetailMsg());
+        {
+            val ccList = Lists.newArrayList(//
+                    getComputedColumnDesc("CC_1", "CUSTOMER.C_NAME +'USA'", "DOUBLE"),
+                    getComputedColumnDesc("CC_LTAX", "LINEORDER.LO_TAX + 1", "BIGINT"));
+            originRequest.setComputedColumnDescs(ccList);
+            originRequest.setComputedColumnNameAutoAdjust(true);
+            val pair = modelService.checkCCConflict(originRequest);
+            val details = pair.getSecond().getConflictDetails();
+            Assert.assertEquals(1, details.size());
+            Assert.assertEquals(COMPUTED_COLUMN_CONFLICT_ADJUST_INFO.getErrorCode().getCode(),
+                    details.get(0).getDetailCode());
+            Assert.assertEquals(COMPUTED_COLUMN_CONFLICT_ADJUST_INFO.getMsg("CC_1", "CUSTOMER.C_NAME +'USA'",
+                    "CC_CNAME", "CUSTOMER.C_NAME +'USA'", "CC_CNAME"), details.get(0).getDetailMsg());
+        }
+
+        {
+            val ccList = Lists.newArrayList(//
+                    getComputedColumnDesc("CC_1", "CUSTOMER.C_NAME +'USA'", "DOUBLE"),
+                    getComputedColumnDesc("CC_LTAX", "LINEORDER.LO_TAX + 1", "BIGINT"));
+            originRequest.setComputedColumnDescs(ccList);
+            originRequest.setComputedColumnNameAutoAdjust(true);
+            originRequest.setFilterCondition("LINEORDER.LO_TAX = 'Kylin' or LINEORDER.LO_TAX = 'Kylin2'");
+            val pair = modelService.checkCCConflict(originRequest);
+            val details = pair.getSecond().getConflictDetails();
+            Assert.assertEquals(1, details.size());
+            Assert.assertEquals(COMPUTED_COLUMN_CONFLICT_ADJUST_INFO.getErrorCode().getCode(),
+                    details.get(0).getDetailCode());
+            Assert.assertEquals(COMPUTED_COLUMN_CONFLICT_ADJUST_INFO.getMsg("CC_1", "CUSTOMER.C_NAME +'USA'",
+                    "CC_CNAME", "CUSTOMER.C_NAME +'USA'", "CC_CNAME"), details.get(0).getDetailMsg());
+            Assert.assertEquals("LINEORDER.LO_TAX = 'Kylin' or LINEORDER.LO_TAX = 'Kylin2'",
+                    pair.getFirst().getFilterCondition());
+        }
+
+        {
+            val dimList = Lists.newArrayList(getNamedColumn("CC_1", "LINEORDER.CC_1"));
+            val measureList = Lists.newArrayList(//
+                    getSimplifiedMeasure("cc_count", "COUNT", "column", "LINEORDER.CC_1"),
+                    getSimplifiedMeasure("COUNT_ALL", "COUNT", "constant", "1"));
+            val ccList = Lists.newArrayList(//
+                    getComputedColumnDesc("CC_1", "CUSTOMER.C_NAME +'USA'", "DOUBLE"),
+                    getComputedColumnDesc("CC_LTAX", "LINEORDER.LO_TAX + 1", "BIGINT"));
+            originRequest.setComputedColumnDescs(ccList);
+            originRequest.setComputedColumnNameAutoAdjust(true);
+            originRequest.setSimplifiedDimensions(dimList);
+            originRequest.setSimplifiedMeasures(measureList);
+            originRequest.setFilterCondition("LINEORDER.Cc_1 = 'Kylin' or LINEORDER.cC_1 = 'Kylin2'");
+            val pair = modelService.checkCCConflict(originRequest);
+            val details = pair.getSecond().getConflictDetails();
+            Assert.assertEquals(1, details.size());
+            Assert.assertEquals(COMPUTED_COLUMN_CONFLICT_ADJUST_INFO.getErrorCode().getCode(),
+                    details.get(0).getDetailCode());
+            Assert.assertEquals(COMPUTED_COLUMN_CONFLICT_ADJUST_INFO.getMsg("CC_1", "CUSTOMER.C_NAME +'USA'",
+                    "CC_CNAME", "CUSTOMER.C_NAME +'USA'", "CC_CNAME"), details.get(0).getDetailMsg());
+
+            ModelRequest modelRequest = pair.getFirst();
+            val simplifiedDimensions = modelRequest.getSimplifiedDimensions();
+            Assert.assertEquals(1, simplifiedDimensions.size());
+            Assert.assertEquals("LINEORDER.CC_CNAME", simplifiedDimensions.get(0).getAliasDotColumn());
+            Assert.assertEquals("CC_1", simplifiedDimensions.get(0).getName());
+
+            List<SimplifiedMeasure> simplifiedMeasures = modelRequest.getSimplifiedMeasures();
+            Assert.assertEquals(2, simplifiedMeasures.size());
+            simplifiedMeasures = simplifiedMeasures.stream().filter(measure -> measure.getName().equals("cc_count"))
+                    .collect(Collectors.toList());
+            Assert.assertEquals(1, simplifiedMeasures.size());
+            Assert.assertEquals("COUNT", simplifiedMeasures.get(0).getExpression());
+            Assert.assertEquals("column", simplifiedMeasures.get(0).getParameterValue().get(0).getType());
+            Assert.assertEquals("LINEORDER.CC_CNAME", simplifiedMeasures.get(0).getParameterValue().get(0).getValue());
+
+            Assert.assertEquals("LINEORDER.CC_CNAME = 'Kylin' or LINEORDER.CC_CNAME = 'Kylin2'",
+                    modelRequest.getFilterCondition());
+        }
     }
 
     private void testNoCCConflict(ModelRequest originRequest) {
@@ -5438,4 +5498,20 @@ public class ModelServiceTest extends SourceTestCase {
         return ccDesc;
     }
 
+    private NamedColumn getNamedColumn(String name, String aliasDotName) {
+        NamedColumn namedColumn = new NamedColumn();
+        namedColumn.setName(name);
+        namedColumn.setAliasDotColumn(aliasDotName);
+        namedColumn.setStatus(NDataModel.ColumnStatus.DIMENSION);
+        return namedColumn;
+    }
+
+    private SimplifiedMeasure getSimplifiedMeasure(String name, String expr, String type, String value) {
+        ParameterResponse parameterResponse = new ParameterResponse(type, value);
+        SimplifiedMeasure measure = new SimplifiedMeasure();
+        measure.setName(name);
+        measure.setExpression(expr);
+        measure.setParameterValue(Lists.newArrayList(parameterResponse));
+        return measure;
+    }
 }


[kylin] 16/22: KYLIN-5320 check and update dataflow lastQueryTime

Posted by xx...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

xxyu pushed a commit to branch kylin5
in repository https://gitbox.apache.org/repos/asf/kylin.git

commit 294895e4eec2a99dd22635bf41e51fac7d509465
Author: Pengfei Zhan <pe...@kyligence.io>
AuthorDate: Tue Sep 27 21:29:12 2022 +0800

    KYLIN-5320 check and update dataflow lastQueryTime
---
 .../service/task/QueryHistoryTaskScheduler.java    | 19 +++++----
 .../kylin/rest/service/UserAclServiceTest.java     |  7 ++--
 .../task/QueryHistoryTaskSchedulerRunnerTest.java  | 34 +++++++--------
 .../task/QueryHistoryTaskSchedulerTest.java        | 48 ++++++++++++++++++----
 4 files changed, 71 insertions(+), 37 deletions(-)

diff --git a/src/common-service/src/main/java/org/apache/kylin/rest/service/task/QueryHistoryTaskScheduler.java b/src/common-service/src/main/java/org/apache/kylin/rest/service/task/QueryHistoryTaskScheduler.java
index 21764634a9..1d3664e4ec 100644
--- a/src/common-service/src/main/java/org/apache/kylin/rest/service/task/QueryHistoryTaskScheduler.java
+++ b/src/common-service/src/main/java/org/apache/kylin/rest/service/task/QueryHistoryTaskScheduler.java
@@ -36,6 +36,13 @@ import org.apache.kylin.common.util.NamedThreadFactory;
 import org.apache.kylin.common.util.Pair;
 import org.apache.kylin.metadata.cube.model.NDataflowManager;
 import org.apache.kylin.metadata.cube.optimization.FrequencyMap;
+import org.apache.kylin.metadata.epoch.EpochManager;
+import org.apache.kylin.metadata.favorite.AbstractAsyncTask;
+import org.apache.kylin.metadata.favorite.AccelerateRuleUtil;
+import org.apache.kylin.metadata.favorite.AsyncAccelerationTask;
+import org.apache.kylin.metadata.favorite.AsyncTaskManager;
+import org.apache.kylin.metadata.favorite.QueryHistoryIdOffset;
+import org.apache.kylin.metadata.favorite.QueryHistoryIdOffsetManager;
 import org.apache.kylin.metadata.model.NTableMetadataManager;
 import org.apache.kylin.metadata.model.TableExtDesc;
 import org.apache.kylin.metadata.project.EnhancedUnitOfWork;
@@ -53,13 +60,6 @@ import com.google.common.annotations.VisibleForTesting;
 import com.google.common.collect.Lists;
 import com.google.common.collect.Maps;
 
-import org.apache.kylin.metadata.epoch.EpochManager;
-import org.apache.kylin.metadata.favorite.AbstractAsyncTask;
-import org.apache.kylin.metadata.favorite.AccelerateRuleUtil;
-import org.apache.kylin.metadata.favorite.AsyncAccelerationTask;
-import org.apache.kylin.metadata.favorite.AsyncTaskManager;
-import org.apache.kylin.metadata.favorite.QueryHistoryIdOffset;
-import org.apache.kylin.metadata.favorite.QueryHistoryIdOffsetManager;
 import lombok.Data;
 import lombok.Getter;
 import lombok.val;
@@ -254,7 +254,7 @@ public class QueryHistoryTaskScheduler {
                 }
                 val snapshotsInRealization = queryHistory.getQueryHistoryInfo().getQuerySnapshots();
                 for (val snapshots : snapshotsInRealization) {
-                    snapshots.stream().forEach(tableIdentify -> {
+                    snapshots.forEach(tableIdentify -> {
                         results.merge(tableManager.getOrCreateTableExt(tableIdentify), 1, Integer::sum);
                     });
                 }
@@ -305,6 +305,9 @@ public class QueryHistoryTaskScheduler {
             for (Map.Entry<String, Long> entry : modelsLastQueryTime.entrySet()) {
                 String dataflowId = entry.getKey();
                 Long lastQueryTime = entry.getValue();
+                if (dfManager.getDataflow(dataflowId) == null) {
+                    continue;
+                }
                 dfManager.updateDataflow(dataflowId, copyForWrite -> copyForWrite.setLastQueryTime(lastQueryTime));
             }
         }
diff --git a/src/common-service/src/test/java/org/apache/kylin/rest/service/UserAclServiceTest.java b/src/common-service/src/test/java/org/apache/kylin/rest/service/UserAclServiceTest.java
index 9ac59fb40c..35c7e70657 100644
--- a/src/common-service/src/test/java/org/apache/kylin/rest/service/UserAclServiceTest.java
+++ b/src/common-service/src/test/java/org/apache/kylin/rest/service/UserAclServiceTest.java
@@ -29,6 +29,8 @@ import java.util.Locale;
 import org.apache.commons.collections4.CollectionUtils;
 import org.apache.kylin.common.exception.KylinException;
 import org.apache.kylin.common.msg.MsgPicker;
+import org.apache.kylin.metadata.epoch.EpochManager;
+import org.apache.kylin.metadata.user.ManagedUser;
 import org.apache.kylin.rest.constant.Constant;
 import org.apache.kylin.rest.request.GlobalAccessRequest;
 import org.apache.kylin.rest.request.GlobalBatchAccessRequest;
@@ -38,6 +40,7 @@ import org.apache.kylin.rest.util.AclEvaluate;
 import org.apache.kylin.rest.util.SpringContext;
 import org.junit.Assert;
 import org.junit.Before;
+import org.junit.Ignore;
 import org.junit.Rule;
 import org.junit.Test;
 import org.junit.rules.ExpectedException;
@@ -52,9 +55,6 @@ import org.springframework.security.core.context.SecurityContextHolder;
 import org.springframework.security.core.userdetails.UserDetails;
 import org.springframework.test.util.ReflectionTestUtils;
 
-import org.apache.kylin.metadata.epoch.EpochManager;
-import org.apache.kylin.metadata.user.ManagedUser;
-
 public class UserAclServiceTest extends ServiceTestBase {
 
     @Mock
@@ -120,6 +120,7 @@ public class UserAclServiceTest extends ServiceTestBase {
         userAclService.grantUserAclPermission("admin", "DATA_QUERY");
     }
 
+    @Ignore("very unstable")
     @Test
     public void testGetAllUsersHasGlobalPermission() {
         KylinUserService kylinUserService = new KylinUserService() {
diff --git a/src/query-service/src/test/java/org/apache/kylin/rest/service/task/QueryHistoryTaskSchedulerRunnerTest.java b/src/common-service/src/test/java/org/apache/kylin/rest/service/task/QueryHistoryTaskSchedulerRunnerTest.java
similarity index 82%
rename from src/query-service/src/test/java/org/apache/kylin/rest/service/task/QueryHistoryTaskSchedulerRunnerTest.java
rename to src/common-service/src/test/java/org/apache/kylin/rest/service/task/QueryHistoryTaskSchedulerRunnerTest.java
index b6fa055350..64a6aa44b9 100644
--- a/src/query-service/src/test/java/org/apache/kylin/rest/service/task/QueryHistoryTaskSchedulerRunnerTest.java
+++ b/src/common-service/src/test/java/org/apache/kylin/rest/service/task/QueryHistoryTaskSchedulerRunnerTest.java
@@ -18,13 +18,15 @@
 
 package org.apache.kylin.rest.service.task;
 
+import static org.awaitility.Awaitility.await;
+
 import java.util.concurrent.Executors;
 import java.util.concurrent.ScheduledExecutorService;
 import java.util.concurrent.TimeUnit;
 
+import org.apache.kylin.common.util.NLocalFileMetadataTestCase;
 import org.apache.kylin.common.util.NamedThreadFactory;
 import org.apache.kylin.rest.util.SpringContext;
-import org.apache.kylin.common.util.NLocalFileMetadataTestCase;
 import org.junit.After;
 import org.junit.Assert;
 import org.junit.Before;
@@ -69,29 +71,20 @@ public class QueryHistoryTaskSchedulerRunnerTest extends NLocalFileMetadataTestC
         val queryHistoryAccelerateRunnerMock = qhAccelerateScheduler.new QueryHistoryAccelerateRunner(false) {
             @Override
             public void work() {
-                try {
-                    TimeUnit.SECONDS.sleep(mockSleepTimeSecs);
+                await().pollDelay(mockSleepTimeSecs, TimeUnit.SECONDS).until(() -> {
                     internalExecute.add((System.currentTimeMillis() - startTime) / 1000);
 
                     //mock exception
                     throw new RuntimeException("test for exception");
-                } catch (InterruptedException e) {
-                    log.error("queryHistoryAccelerateRunnerMock is interrupted", e);
-                }
+                });
             }
-
         };
 
         val queryHistoryMetaUpdateRunnerMock = qhAccelerateScheduler.new QueryHistoryMetaUpdateRunner() {
             @Override
             public void work() {
-                try {
-                    TimeUnit.SECONDS.sleep(mockSleepTimeSecs);
-                } catch (InterruptedException e) {
-                    log.error("queryHistoryMetaUpdateRunner is interrupted", e);
-                }
+                await().pollDelay(mockSleepTimeSecs, TimeUnit.SECONDS);
             }
-
         };
 
         ReflectionTestUtils.setField(qhAccelerateScheduler, "taskScheduler", Executors.newScheduledThreadPool(1,
@@ -101,20 +94,23 @@ public class QueryHistoryTaskSchedulerRunnerTest extends NLocalFileMetadataTestC
             val schedulerService = (ScheduledExecutorService) ReflectionTestUtils.getField(qhAccelerateScheduler,
                     "taskScheduler");
 
+            Assert.assertNotNull(schedulerService);
             schedulerService.scheduleWithFixedDelay(queryHistoryAccelerateRunnerMock, 0, mockSchedulerDelay,
                     TimeUnit.SECONDS);
             schedulerService.scheduleWithFixedDelay(queryHistoryMetaUpdateRunnerMock, 0, mockSchedulerDelay,
                     TimeUnit.SECONDS);
 
             val schedulerNum = 10;
+            await().pollDelay(schedulerNum, TimeUnit.SECONDS).until(() -> {
+                Assert.assertEquals(internalExecute.size(), schedulerNum / (mockSchedulerDelay + mockSleepTimeSecs));
 
-            TimeUnit.SECONDS.sleep(schedulerNum);
-
-            Assert.assertEquals(internalExecute.size(), schedulerNum / (mockSchedulerDelay + mockSleepTimeSecs));
+                for (int i = 0; i < internalExecute.size(); i++) {
+                    Assert.assertEquals(internalExecute.get(i), i * mockSchedulerDelay + mockSleepTimeSecs * (i + 1),
+                            1);
+                }
+                return null;
+            });
 
-            for (int i = 0; i < internalExecute.size(); i++) {
-                Assert.assertEquals(internalExecute.get(i), i * mockSchedulerDelay + mockSleepTimeSecs * (i + 1), 1);
-            }
         } catch (Exception e) {
             log.error("test qhAccelerateScheduler error :", e);
         } finally {
diff --git a/src/query-service/src/test/java/org/apache/kylin/rest/service/task/QueryHistoryTaskSchedulerTest.java b/src/common-service/src/test/java/org/apache/kylin/rest/service/task/QueryHistoryTaskSchedulerTest.java
similarity index 93%
rename from src/query-service/src/test/java/org/apache/kylin/rest/service/task/QueryHistoryTaskSchedulerTest.java
rename to src/common-service/src/test/java/org/apache/kylin/rest/service/task/QueryHistoryTaskSchedulerTest.java
index c026e85c34..dc4dba454e 100644
--- a/src/query-service/src/test/java/org/apache/kylin/rest/service/task/QueryHistoryTaskSchedulerTest.java
+++ b/src/common-service/src/test/java/org/apache/kylin/rest/service/task/QueryHistoryTaskSchedulerTest.java
@@ -18,17 +18,15 @@
 
 package org.apache.kylin.rest.service.task;
 
+import java.lang.reflect.InvocationTargetException;
+import java.lang.reflect.Method;
 import java.util.List;
 import java.util.Map;
 
-
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.kylin.common.KylinConfig;
-import org.apache.kylin.common.util.TimeUtil;
-import org.apache.kylin.metadata.model.TableExtDesc;
-import org.apache.kylin.rest.service.IUserGroupService;
-import org.apache.kylin.rest.util.SpringContext;
 import org.apache.kylin.common.util.NLocalFileMetadataTestCase;
+import org.apache.kylin.common.util.TimeUtil;
 import org.apache.kylin.junit.TimeZoneTestRunner;
 import org.apache.kylin.metadata.cube.model.NDataflow;
 import org.apache.kylin.metadata.cube.model.NDataflowManager;
@@ -38,11 +36,15 @@ import org.apache.kylin.metadata.favorite.AsyncTaskManager;
 import org.apache.kylin.metadata.favorite.QueryHistoryIdOffset;
 import org.apache.kylin.metadata.favorite.QueryHistoryIdOffsetManager;
 import org.apache.kylin.metadata.model.NTableMetadataManager;
+import org.apache.kylin.metadata.model.TableExtDesc;
 import org.apache.kylin.metadata.query.QueryHistory;
 import org.apache.kylin.metadata.query.QueryHistoryInfo;
 import org.apache.kylin.metadata.query.QueryMetrics;
 import org.apache.kylin.metadata.query.RDBMSQueryHistoryDAO;
+import org.apache.kylin.rest.service.IUserGroupService;
 import org.apache.kylin.rest.service.NUserGroupService;
+import org.apache.kylin.rest.service.task.QueryHistoryTaskScheduler.QueryHistoryMetaUpdateRunner;
+import org.apache.kylin.rest.util.SpringContext;
 import org.junit.After;
 import org.junit.Assert;
 import org.junit.Before;
@@ -60,9 +62,12 @@ import org.springframework.security.acls.domain.PermissionFactory;
 import org.springframework.security.acls.model.PermissionGrantingStrategy;
 import org.springframework.test.util.ReflectionTestUtils;
 
+import com.google.common.collect.ImmutableMap;
 import com.google.common.collect.Lists;
 import com.google.common.collect.Maps;
 
+import lombok.val;
+
 @RunWith(PowerMockRunner.class)
 @PowerMockRunnerDelegate(TimeZoneTestRunner.class)
 @PrepareForTest({ SpringContext.class, UserGroupInformation.class })
@@ -73,10 +78,11 @@ public class QueryHistoryTaskSchedulerTest extends NLocalFileMetadataTestCase {
     private static final String LAYOUT1 = "20000000001";
     private static final String LAYOUT2 = "1000001";
     private static final Long QUERY_TIME = 1586760398338L;
+
+    private QueryHistoryTaskScheduler qhAccelerateScheduler;
+
     @Mock
     private final IUserGroupService userGroupService = Mockito.spy(NUserGroupService.class);
-    int startOffset = 0;
-    private QueryHistoryTaskScheduler qhAccelerateScheduler;
 
     @Before
     public void setUp() throws Exception {
@@ -214,6 +220,32 @@ public class QueryHistoryTaskSchedulerTest extends NLocalFileMetadataTestCase {
         Assert.assertEquals(8, idOffsetManager.get().getStatMetaUpdateOffset());
     }
 
+    @Test
+    public void testUpdateLastQueryTime()
+            throws NoSuchMethodException, InvocationTargetException, IllegalAccessException {
+
+        // before update dataflow usage, layout usage and last query time
+        NDataflow dataflow = NDataflowManager.getInstance(KylinConfig.getInstanceFromEnv(), PROJECT)
+                .getDataflow(DATAFLOW);
+        Assert.assertEquals(3, dataflow.getQueryHitCount());
+        Assert.assertNull(dataflow.getLayoutHitCount().get(20000000001L));
+        Assert.assertNull(dataflow.getLayoutHitCount().get(1000001L));
+        Assert.assertEquals(0L, dataflow.getLastQueryTime());
+
+        val queryHistoryAccelerateRunner = qhAccelerateScheduler.new QueryHistoryMetaUpdateRunner();
+        Class<? extends QueryHistoryMetaUpdateRunner> clazz = queryHistoryAccelerateRunner.getClass();
+        Method method = clazz.getDeclaredMethod("updateLastQueryTime", Map.class, String.class);
+        method.setAccessible(true);
+        method.invoke(queryHistoryAccelerateRunner, ImmutableMap.of("aaa", 100L), PROJECT);
+        method.invoke(queryHistoryAccelerateRunner, ImmutableMap.of(DATAFLOW, 100L), PROJECT);
+        method.setAccessible(false);
+
+        NDataflow dataflow1 = NDataflowManager.getInstance(KylinConfig.getInstanceFromEnv(), PROJECT)
+                .getDataflow(DATAFLOW);
+        long lastQueryTime = dataflow1.getLastQueryTime();
+        Assert.assertEquals(100L, lastQueryTime);
+    }
+
     @Test
     public void testUpdateMetadataWithStringRealization() {
         qhAccelerateScheduler.queryHistoryDAO = Mockito.mock(RDBMSQueryHistoryDAO.class);
@@ -485,4 +517,6 @@ public class QueryHistoryTaskSchedulerTest extends NLocalFileMetadataTestCase {
         return histories;
     }
 
+    int startOffset = 0;
+
 }


[kylin] 21/22: KYLIN-5325 Fix the number type conversion problem caused by writing JSON files

Posted by xx...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

xxyu pushed a commit to branch kylin5
in repository https://gitbox.apache.org/repos/asf/kylin.git

commit c35921431951f41bb5c4a645835d0fd10747c786
Author: Guoliang Sun <gu...@kyligence.io>
AuthorDate: Tue Oct 18 11:02:37 2022 +0800

    KYLIN-5325 Fix the number type conversion problem caused by writing JSON files
---
 .../src/main/java/org/apache/kylin/metrics/HdfsCapacityMetrics.java   | 4 +++-
 1 file changed, 3 insertions(+), 1 deletion(-)

diff --git a/src/core-metadata/src/main/java/org/apache/kylin/metrics/HdfsCapacityMetrics.java b/src/core-metadata/src/main/java/org/apache/kylin/metrics/HdfsCapacityMetrics.java
index 9b114779ea..236fb5c556 100644
--- a/src/core-metadata/src/main/java/org/apache/kylin/metrics/HdfsCapacityMetrics.java
+++ b/src/core-metadata/src/main/java/org/apache/kylin/metrics/HdfsCapacityMetrics.java
@@ -155,7 +155,9 @@ public class HdfsCapacityMetrics {
      */
     public static Long getHdfsCapacityByProject(String project) {
         if (hdfsMetricsPeriodicCalculationEnabled) {
-            return workingDirCapacity.getOrDefault(project, 0L);
+            // Writing numbers in JSON may be read as integer
+            Object orDefault = workingDirCapacity.getOrDefault(project, 0L);
+            return Long.parseLong(orDefault.toString());
         }
         return -1L;
     }


[kylin] 02/22: KYLIN-5311 Improve performance of getSubstitutor

Posted by xx...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

xxyu pushed a commit to branch kylin5
in repository https://gitbox.apache.org/repos/asf/kylin.git

commit c583f433242f482168db596abd1183bac23f4d72
Author: Junqing Cai <ca...@163.com>
AuthorDate: Thu Oct 13 15:08:03 2022 +0800

    KYLIN-5311 Improve performance of getSubstitutor
---
 .../kylin/common/ICachedExternalConfigLoader.java  |   2 +-
 .../org/apache/kylin/common/KylinConfigBase.java   |   9 +-
 .../kylin/common/KylinExternalConfigLoader.java    |   9 +-
 .../apache/kylin/common/PropertiesDelegate.java    |  34 ++---
 .../apache/kylin/common/KylinConfigBaseTest.java   |   8 +
 .../kylin/common/PropertiesDelegateTest.java       | 116 ++++++++++++++
 .../kylin/common/util/CompositeMapViewTest.java    | 167 +++++++++++++++++++++
 7 files changed, 316 insertions(+), 29 deletions(-)

diff --git a/src/core-common/src/main/java/org/apache/kylin/common/ICachedExternalConfigLoader.java b/src/core-common/src/main/java/org/apache/kylin/common/ICachedExternalConfigLoader.java
index 9af569f8d1..6d805eb85c 100644
--- a/src/core-common/src/main/java/org/apache/kylin/common/ICachedExternalConfigLoader.java
+++ b/src/core-common/src/main/java/org/apache/kylin/common/ICachedExternalConfigLoader.java
@@ -23,5 +23,5 @@ import com.google.common.collect.ImmutableMap;
 import io.kyligence.config.core.loader.IExternalConfigLoader;
 
 public interface ICachedExternalConfigLoader extends IExternalConfigLoader {
-    ImmutableMap getPropertyEntries();
+    ImmutableMap<Object, Object> getPropertyEntries();
 }
diff --git a/src/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java b/src/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
index 8498eba0e6..d0b4fda104 100644
--- a/src/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
+++ b/src/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
@@ -179,8 +179,8 @@ public abstract class KylinConfigBase implements Serializable {
     /**
      * only reload properties
      */
-    volatile PropertiesDelegate properties;
-    volatile StrSubstitutor substitutor;
+    final PropertiesDelegate properties;
+    final transient StrSubstitutor substitutor;
 
     protected KylinConfigBase(IExternalConfigLoader configLoader) {
         this(new Properties(), configLoader);
@@ -190,6 +190,7 @@ public abstract class KylinConfigBase implements Serializable {
         this(props, false, configLoader);
     }
 
+    @SuppressWarnings("rawtypes")
     protected KylinConfigBase(Properties props, boolean force, IExternalConfigLoader configLoader) {
         if (props instanceof PropertiesDelegate) {
             this.properties = (PropertiesDelegate) props;
@@ -220,12 +221,12 @@ public abstract class KylinConfigBase implements Serializable {
      * @return
      */
     protected Properties getProperties(Collection<String> propertyKeys) {
-        final StrSubstitutor substitutor = getSubstitutor();
+        val subStitutorTmp = getSubstitutor();
 
         Properties result = new Properties();
         for (Entry<Object, Object> entry : this.properties.entrySet()) {
             if (propertyKeys == null || propertyKeys.contains(entry.getKey())) {
-                result.put(entry.getKey(), substitutor.replace((String) entry.getValue()));
+                result.put(entry.getKey(), subStitutorTmp.replace((String) entry.getValue()));
             }
         }
 
diff --git a/src/core-common/src/main/java/org/apache/kylin/common/KylinExternalConfigLoader.java b/src/core-common/src/main/java/org/apache/kylin/common/KylinExternalConfigLoader.java
index d4fe301060..206539dd7a 100644
--- a/src/core-common/src/main/java/org/apache/kylin/common/KylinExternalConfigLoader.java
+++ b/src/core-common/src/main/java/org/apache/kylin/common/KylinExternalConfigLoader.java
@@ -33,11 +33,11 @@ import java.net.URL;
 import java.nio.charset.StandardCharsets;
 import java.nio.file.Files;
 import java.util.Map;
+import java.util.Objects;
 import java.util.Properties;
 
 import javax.annotation.Nonnull;
 
-import com.google.common.collect.Maps;
 import org.apache.kylin.common.exception.KylinException;
 import org.apache.kylin.common.util.OrderedProperties;
 import org.slf4j.Logger;
@@ -45,6 +45,7 @@ import org.slf4j.LoggerFactory;
 
 import com.google.common.base.Preconditions;
 import com.google.common.collect.ImmutableMap;
+import com.google.common.collect.Maps;
 
 public class KylinExternalConfigLoader implements ICachedExternalConfigLoader {
 
@@ -153,7 +154,7 @@ public class KylinExternalConfigLoader implements ICachedExternalConfigLoader {
     @Override
     public String getConfig() {
         StringWriter writer = new StringWriter();
-        for (Map.Entry<String, String> entry: properties.entrySet()) {
+        for (Map.Entry<String, String> entry : properties.entrySet()) {
             writer.append(entry.getKey() + "=" + entry.getValue()).append("\n");
         }
         return writer.toString();
@@ -161,7 +162,7 @@ public class KylinExternalConfigLoader implements ICachedExternalConfigLoader {
 
     @Override
     public String getProperty(String key) {
-        return properties.get(key);
+        return Objects.toString(properties.get(key), null);
     }
 
     /**
@@ -176,7 +177,7 @@ public class KylinExternalConfigLoader implements ICachedExternalConfigLoader {
     }
 
     @Override
-    public ImmutableMap getPropertyEntries() {
+    public ImmutableMap<Object, Object> getPropertyEntries() {
         return propertyEntries;
     }
 }
diff --git a/src/core-common/src/main/java/org/apache/kylin/common/PropertiesDelegate.java b/src/core-common/src/main/java/org/apache/kylin/common/PropertiesDelegate.java
index 24f28a79cc..54dbf6d096 100644
--- a/src/core-common/src/main/java/org/apache/kylin/common/PropertiesDelegate.java
+++ b/src/core-common/src/main/java/org/apache/kylin/common/PropertiesDelegate.java
@@ -29,12 +29,13 @@ import java.util.function.BiConsumer;
 import java.util.function.BiFunction;
 import java.util.function.Function;
 
-import io.kyligence.config.core.loader.IExternalConfigLoader;
+import javax.validation.constraints.NotNull;
+
 import org.apache.kylin.common.util.CompositeMapView;
 
 import com.google.common.collect.Maps;
 
-import io.kyligence.config.external.loader.NacosExternalConfigLoader;
+import io.kyligence.config.core.loader.IExternalConfigLoader;
 import lombok.EqualsAndHashCode;
 
 /**
@@ -42,28 +43,28 @@ import lombok.EqualsAndHashCode;
  * A few functions of hashtable are disabled.
  * In the future, we should replace the java.util.Properties
  */
-@EqualsAndHashCode
+@EqualsAndHashCode(callSuper = false)
+@SuppressWarnings("sync-override")
 public class PropertiesDelegate extends Properties {
 
     @EqualsAndHashCode.Include
-    private final ConcurrentMap<Object, Object> properties = Maps.newConcurrentMap();
+    private final transient ConcurrentMap<Object, Object> properties = Maps.newConcurrentMap();
 
     @EqualsAndHashCode.Include
     private final transient IExternalConfigLoader configLoader;
 
-    private final Map delegation;
+    private final transient Map<Object, Object> delegation;
 
     public PropertiesDelegate(Properties properties, IExternalConfigLoader configLoader) {
         this.properties.putAll(properties);
         this.configLoader = configLoader;
         if (configLoader == null) {
             this.delegation = this.properties;
-        } else if (configLoader instanceof KylinExternalConfigLoader) {
-            this.delegation = new CompositeMapView(((ICachedExternalConfigLoader)this.configLoader).getPropertyEntries(), this.properties);
-        } else if (configLoader instanceof NacosExternalConfigLoader) {
-            this.delegation = new CompositeMapView((this.configLoader).getProperties(), this.properties);
+        } else if (configLoader instanceof ICachedExternalConfigLoader) {
+            this.delegation = new CompositeMapView<>(
+                    ((ICachedExternalConfigLoader) this.configLoader).getPropertyEntries(), this.properties);
         } else {
-            this.delegation = new CompositeMapView((this.configLoader).getProperties(), this.properties);
+            this.delegation = new CompositeMapView<>((this.configLoader).getProperties(), this.properties);
         }
     }
 
@@ -110,7 +111,6 @@ public class PropertiesDelegate extends Properties {
         return delegation.size();
     }
 
-
     @Override
     public boolean isEmpty() {
         return delegation.isEmpty();
@@ -142,7 +142,7 @@ public class PropertiesDelegate extends Properties {
     }
 
     @Override
-    public void putAll(Map<?, ?> t) {
+    public void putAll(@NotNull Map<?, ?> t) {
         properties.putAll(t);
     }
 
@@ -151,11 +151,6 @@ public class PropertiesDelegate extends Properties {
         properties.clear();
     }
 
-    @Override
-    public Object clone() {
-        throw new UnsupportedOperationException();
-    }
-
     @Override
     public String toString() {
         throw new UnsupportedOperationException();
@@ -213,13 +208,12 @@ public class PropertiesDelegate extends Properties {
     }
 
     @Override
-    public synchronized Object compute(Object key,
-            BiFunction<? super Object, ? super Object, ? extends Object> remappingFunction) {
+    public Object compute(Object key, BiFunction<? super Object, ? super Object, ? extends Object> remappingFunction) {
         throw new UnsupportedOperationException();
     }
 
     @Override
-    public synchronized Object merge(Object key, Object value,
+    public Object merge(Object key, Object value,
             BiFunction<? super Object, ? super Object, ? extends Object> remappingFunction) {
         throw new UnsupportedOperationException();
     }
diff --git a/src/core-common/src/test/java/org/apache/kylin/common/KylinConfigBaseTest.java b/src/core-common/src/test/java/org/apache/kylin/common/KylinConfigBaseTest.java
index 903fb8f6a0..3630c7bbd1 100644
--- a/src/core-common/src/test/java/org/apache/kylin/common/KylinConfigBaseTest.java
+++ b/src/core-common/src/test/java/org/apache/kylin/common/KylinConfigBaseTest.java
@@ -1371,6 +1371,14 @@ class KylinConfigBaseTest {
         config.setProperty("kylin.server.leader-race.heart-beat-timeout-rate", "1");
         Assertions.assertEquals(1.0, config.getEpochRenewTimeoutRate());
     }
+
+    @Test
+    void testGetSubstitutor() {
+        KylinConfig config = KylinConfig.getInstanceFromEnv();
+        val sub1 = config.getSubstitutor();
+        val sub2 = config.getSubstitutor();
+        Assertions.assertSame(sub1, sub2);
+    }
 }
 
 class EnvironmentUpdateUtils {
diff --git a/src/core-common/src/test/java/org/apache/kylin/common/PropertiesDelegateTest.java b/src/core-common/src/test/java/org/apache/kylin/common/PropertiesDelegateTest.java
index bea2ab9321..f515659fd2 100644
--- a/src/core-common/src/test/java/org/apache/kylin/common/PropertiesDelegateTest.java
+++ b/src/core-common/src/test/java/org/apache/kylin/common/PropertiesDelegateTest.java
@@ -22,6 +22,7 @@ import java.util.ArrayList;
 import java.util.Enumeration;
 import java.util.HashSet;
 import java.util.List;
+import java.util.Map;
 import java.util.Properties;
 import java.util.Set;
 import java.util.stream.Collectors;
@@ -30,6 +31,11 @@ import org.junit.jupiter.api.Assertions;
 import org.junit.jupiter.api.BeforeEach;
 import org.junit.jupiter.api.Test;
 
+import com.google.common.collect.ImmutableMap;
+import com.google.common.collect.Maps;
+
+import lombok.val;
+
 class PropertiesDelegateTest {
 
     private PropertiesDelegate delegate;
@@ -83,6 +89,19 @@ class PropertiesDelegateTest {
         Assertions.assertEquals("update_v2", delegate.getProperty("key_in_external"));
     }
 
+    @Test
+    void testPutAll() {
+        val originSize = delegate.size();
+        val map1 = Maps.<String, String> newHashMap();
+        map1.put("k1", "v1");
+        map1.put("k2", "v2");
+        map1.put("k3", "v3");
+        delegate.putAll(map1);
+        Assertions.assertEquals(originSize + map1.size(), delegate.size());
+
+        Assertions.assertEquals("v1", delegate.getProperty("k1"));
+    }
+
     @Test
     void testSetProperty() {
         delegate.setProperty("key_in_prop", "update_v0");
@@ -95,6 +114,17 @@ class PropertiesDelegateTest {
         Assertions.assertEquals("update_v2", delegate.getProperty("key_in_external"));
     }
 
+    @Test
+    void testSize() {
+        Assertions.assertEquals(3, delegate.size());
+    }
+
+    @Test
+    void testEntrySet() {
+        Set<Map.Entry<Object, Object>> entries = delegate.entrySet();
+        Assertions.assertEquals(3, entries.size());
+    }
+
     @Test
     void testKeys() {
         List<String> keys = new ArrayList<>();
@@ -135,4 +165,90 @@ class PropertiesDelegateTest {
         sets.add(properties);
         Assertions.assertEquals(4, sets.size());
     }
+
+    @Test
+    void testContainsKey() {
+        Assertions.assertTrue(delegate.containsKey("key_in_prop"));
+        Assertions.assertTrue(delegate.containsKey("key_override_external"));
+        Assertions.assertTrue(delegate.containsKey("key_in_external"));
+
+        Assertions.assertFalse(delegate.containsKey("not_key"));
+    }
+
+    @Test
+    void testContainsValue() {
+        Assertions.assertTrue(delegate.containsValue("v0"));
+        Assertions.assertTrue(delegate.containsValue("v11"));
+        Assertions.assertTrue(delegate.containsValue("v2"));
+
+        Assertions.assertFalse(delegate.containsValue("not_value"));
+    }
+
+    @Test
+    void testIsEmpty() {
+        Assertions.assertFalse(delegate.isEmpty());
+
+        val emptyDelegate = new PropertiesDelegate(new Properties(), null);
+        Assertions.assertTrue(emptyDelegate.isEmpty());
+    }
+
+    @Test
+    void testClear() {
+        Assertions.assertEquals(3, delegate.size());
+        delegate.clear();
+        Assertions.assertEquals(2, delegate.size());
+    }
+
+    @Test
+    void testConstruct() {
+        Properties properties = new Properties();
+        properties.put("key_in_prop", "v0");
+        {
+            val p = new PropertiesDelegate(properties, null);
+            Assertions.assertEquals(1, p.size());
+        }
+
+        {
+            TestExternalConfigLoader testExternalConfigLoader = new TestExternalConfigLoader(properties);
+            val p = new PropertiesDelegate(new Properties(), testExternalConfigLoader);
+            Assertions.assertEquals(1, p.size());
+        }
+
+        {
+            ICachedExternalConfigLoader iCachedExternalConfigLoader = new ICachedExternalConfigLoader() {
+                @Override
+                public ImmutableMap<Object, Object> getPropertyEntries() {
+                    return ImmutableMap.of("key_in_prop", "v0");
+                }
+
+                @Override
+                public String getConfig() {
+                    return null;
+                }
+
+                @Override
+                public String getProperty(String s) {
+                    return null;
+                }
+            };
+            val p = new PropertiesDelegate(new Properties(), iCachedExternalConfigLoader);
+            Assertions.assertEquals(1, p.size());
+        }
+
+    }
+
+    @Test
+    void testNotSupport() {
+        Assertions.assertThrows(UnsupportedOperationException.class, () -> delegate.remove("a", "b"));
+        Assertions.assertThrows(UnsupportedOperationException.class, delegate::toString);
+        Assertions.assertThrows(UnsupportedOperationException.class, () -> delegate.forEach((k, v) -> {
+        }));
+        Assertions.assertThrows(UnsupportedOperationException.class, () -> delegate.replace("a", "b"));
+        Assertions.assertThrows(UnsupportedOperationException.class, () -> delegate.replace("a", "b", "b2"));
+        Assertions.assertThrows(UnsupportedOperationException.class, () -> delegate.computeIfAbsent("a", (k) -> ""));
+        Assertions.assertThrows(UnsupportedOperationException.class,
+                () -> delegate.computeIfPresent("a", (k, v) -> ""));
+        Assertions.assertThrows(UnsupportedOperationException.class, () -> delegate.compute("a", (k, v) -> ""));
+        Assertions.assertThrows(UnsupportedOperationException.class, () -> delegate.merge("a", "b", (k, v) -> ""));
+    }
 }
diff --git a/src/core-common/src/test/java/org/apache/kylin/common/util/CompositeMapViewTest.java b/src/core-common/src/test/java/org/apache/kylin/common/util/CompositeMapViewTest.java
new file mode 100644
index 0000000000..f3623fe996
--- /dev/null
+++ b/src/core-common/src/test/java/org/apache/kylin/common/util/CompositeMapViewTest.java
@@ -0,0 +1,167 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.kylin.common.util;
+
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.Map;
+import java.util.stream.Collectors;
+
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
+
+import com.google.common.collect.Maps;
+import com.google.common.collect.Sets;
+
+import lombok.val;
+
+class CompositeMapViewTest {
+
+    private CompositeMapView<String, String> compositeMapView;
+
+    @BeforeEach
+    void setup() {
+        compositeMapView = prepareData();
+    }
+
+    private CompositeMapView<String, String> prepareData() {
+        val map1 = Maps.<String, String> newHashMap();
+        map1.put("k1", "v1");
+        map1.put("k2", "v2");
+        map1.put("k3", "v3");
+
+        val map2 = Maps.<String, String> newHashMap();
+        map2.put("k1", "vv1");
+        map2.put("kk2", "vv2");
+        map2.put("kk3", "v3");
+        return new CompositeMapView<>(map1, map2);
+    }
+
+    @Test
+    void testSize() {
+        Assertions.assertEquals(5, compositeMapView.size());
+    }
+
+    @Test
+    void testIsEmpty() {
+        {
+            Assertions.assertFalse(compositeMapView.isEmpty());
+        }
+
+        {
+            CompositeMapView<String, String> compositeMapView = new CompositeMapView<>(Maps.newHashMap(),
+                    Maps.newHashMap());
+            Assertions.assertTrue(compositeMapView.isEmpty());
+        }
+    }
+
+    @Test
+    void testIsEmpty_Null() {
+        val emptyMap = Collections.emptyMap();
+        Assertions.assertThrows(NullPointerException.class, () -> new CompositeMapView<>(null, null));
+
+        Assertions.assertThrows(NullPointerException.class, () -> new CompositeMapView<>(emptyMap, null));
+
+        Assertions.assertThrows(NullPointerException.class, () -> new CompositeMapView<>(null, emptyMap));
+    }
+
+    @Test
+    void testContainsKey() {
+
+        Assertions.assertTrue(compositeMapView.containsKey("k1"));
+        Assertions.assertTrue(compositeMapView.containsKey("kk3"));
+        Assertions.assertTrue(compositeMapView.containsKey("k2"));
+
+        Assertions.assertFalse(compositeMapView.containsKey("noKey"));
+    }
+
+    @Test
+    void testContainsValue() {
+
+        Assertions.assertTrue(compositeMapView.containsValue("v1"));
+        Assertions.assertTrue(compositeMapView.containsValue("vv2"));
+        Assertions.assertTrue(compositeMapView.containsValue("v2"));
+
+        Assertions.assertFalse(compositeMapView.containsValue("noValue"));
+    }
+
+    @Test
+    void testGetKey() {
+
+        //both
+        Assertions.assertEquals("vv1", compositeMapView.get("k1"));
+        //left only
+        Assertions.assertEquals("v2", compositeMapView.get("k2"));
+        //right only
+        Assertions.assertEquals("v3", compositeMapView.get("kk3"));
+
+        Assertions.assertNull(compositeMapView.get("notKey"));
+    }
+
+    @Test
+    void testKeySet() {
+
+        val expectedKeySet = Sets.newHashSet(Arrays.asList("k1", "k2", "k3", "kk2", "kk3"));
+        Assertions.assertEquals(expectedKeySet, compositeMapView.keySet());
+    }
+
+    @Test
+    void testValues() {
+
+        val expectedValueSet = Sets.newHashSet(Arrays.asList("v1", "v2", "v3", "vv1", "vv2", "v3"));
+        Assertions.assertTrue(expectedValueSet.containsAll(compositeMapView.values()));
+        Assertions.assertEquals(expectedValueSet.size(), compositeMapView.values().size());
+    }
+
+    @Test
+    void testEntrySet() {
+
+        val entryList = compositeMapView.entrySet().stream().sorted(Map.Entry.comparingByKey(String::compareTo))
+                .map(e -> e.getKey() + "," + e.getValue()).collect(Collectors.toList());
+        val expectedEntryList = Arrays.asList("k1,vv1", "k2,v2", "k3,v3", "kk2,vv2", "kk3,v3");
+
+        Assertions.assertEquals(expectedEntryList, entryList);
+    }
+
+    @Test
+    void testNotSupport() {
+
+        val emptyMap = Collections.emptyMap();
+        Assertions.assertThrows(UnsupportedOperationException.class, () -> compositeMapView.put("a", "b"));
+        Assertions.assertThrows(UnsupportedOperationException.class, () -> compositeMapView.putIfAbsent("a", "b"));
+        Assertions.assertThrows(UnsupportedOperationException.class, () -> compositeMapView.remove("a"));
+        Assertions.assertThrows(UnsupportedOperationException.class, () -> compositeMapView.remove("a", "b"));
+        Assertions.assertThrows(UnsupportedOperationException.class, () -> compositeMapView.putAll(emptyMap));
+        Assertions.assertThrows(UnsupportedOperationException.class, compositeMapView::clear);
+        Assertions.assertThrows(UnsupportedOperationException.class, compositeMapView::toString);
+        Assertions.assertThrows(UnsupportedOperationException.class, () -> compositeMapView.forEach((k, v) -> {
+        }));
+        Assertions.assertThrows(UnsupportedOperationException.class, () -> compositeMapView.replaceAll((k, v) -> ""));
+        Assertions.assertThrows(UnsupportedOperationException.class, () -> compositeMapView.replace("a", "b"));
+        Assertions.assertThrows(UnsupportedOperationException.class, () -> compositeMapView.replace("a", "b", "b2"));
+        Assertions.assertThrows(UnsupportedOperationException.class,
+                () -> compositeMapView.computeIfAbsent("a", (k) -> ""));
+        Assertions.assertThrows(UnsupportedOperationException.class,
+                () -> compositeMapView.computeIfPresent("a", (k, v) -> ""));
+        Assertions.assertThrows(UnsupportedOperationException.class, () -> compositeMapView.compute("a", (k, v) -> ""));
+        Assertions.assertThrows(UnsupportedOperationException.class,
+                () -> compositeMapView.merge("a", "b", (k, v) -> ""));
+
+    }
+}


[kylin] 01/22: fix secondstorage index refresh locked

Posted by xx...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

xxyu pushed a commit to branch kylin5
in repository https://gitbox.apache.org/repos/asf/kylin.git

commit 7cab52b1f03102d43bfac700f3b7cddc3de1cd96
Author: Shuai li <lo...@live.cn>
AuthorDate: Wed Oct 12 19:54:30 2022 +0800

    fix secondstorage index refresh locked
---
 .../kap/secondstorage/SecondStorageIndexTest.java  |  2 +-
 .../job/ClickhouseRefreshSecondaryIndex.java       | 47 +++--------------
 .../kap/clickhouse/job/RefreshSecondaryIndex.java  | 60 ++++++++++------------
 .../management/SecondStorageService.java           |  8 +--
 4 files changed, 39 insertions(+), 78 deletions(-)

diff --git a/src/second-storage/clickhouse-it/src/test/java/io/kyligence/kap/secondstorage/SecondStorageIndexTest.java b/src/second-storage/clickhouse-it/src/test/java/io/kyligence/kap/secondstorage/SecondStorageIndexTest.java
index b5473c9f1f..767b045172 100644
--- a/src/second-storage/clickhouse-it/src/test/java/io/kyligence/kap/secondstorage/SecondStorageIndexTest.java
+++ b/src/second-storage/clickhouse-it/src/test/java/io/kyligence/kap/secondstorage/SecondStorageIndexTest.java
@@ -349,7 +349,7 @@ public class SecondStorageIndexTest implements JobWaiter {
         String jobId = updatePrimaryIndexAndSecondaryIndex(modelName, null, Sets.newHashSet());
         waitJobEnd(getProject(), jobId);
 
-        assertThrows(String.format(Locale.ROOT, MsgPicker.getMsg().getSecondStorageProjectJobExists(), getProject()),
+        assertThrows(String.format(Locale.ROOT, MsgPicker.getMsg().getSecondStorageConcurrentOperate(), getProject()),
                 KylinException.class, () -> updatePrimaryIndexAndSecondaryIndex(modelName, null, secondaryIndex));
         clickhouse[0].start();
         ClickHouseUtils.internalConfigClickHouse(clickhouse, replica);
diff --git a/src/second-storage/clickhouse/src/main/java/io/kyligence/kap/clickhouse/job/ClickhouseRefreshSecondaryIndex.java b/src/second-storage/clickhouse/src/main/java/io/kyligence/kap/clickhouse/job/ClickhouseRefreshSecondaryIndex.java
index 6f818af516..632cf60a88 100644
--- a/src/second-storage/clickhouse/src/main/java/io/kyligence/kap/clickhouse/job/ClickhouseRefreshSecondaryIndex.java
+++ b/src/second-storage/clickhouse/src/main/java/io/kyligence/kap/clickhouse/job/ClickhouseRefreshSecondaryIndex.java
@@ -22,14 +22,12 @@ import static io.kyligence.kap.secondstorage.SecondStorageConstants.STEP_SECOND_
 import static io.kyligence.kap.secondstorage.SecondStorageUtil.getTableFlow;
 
 import java.io.IOException;
-import java.util.ArrayList;
 import java.util.List;
 import java.util.Set;
 import java.util.concurrent.Future;
 import java.util.concurrent.LinkedBlockingQueue;
 import java.util.concurrent.ThreadPoolExecutor;
 import java.util.concurrent.TimeUnit;
-import java.util.stream.Collectors;
 
 import org.apache.commons.lang3.exception.ExceptionUtils;
 import org.apache.kylin.common.persistence.transaction.UnitOfWork;
@@ -39,9 +37,7 @@ import org.apache.kylin.job.exception.ExecuteException;
 import org.apache.kylin.job.execution.AbstractExecutable;
 import org.apache.kylin.job.execution.ExecutableContext;
 import org.apache.kylin.job.execution.ExecuteResult;
-import org.apache.kylin.metadata.cube.model.LayoutEntity;
 import org.apache.kylin.metadata.cube.model.NDataflowManager;
-import org.apache.kylin.metadata.cube.model.NIndexPlanManager;
 import org.apache.kylin.metadata.project.EnhancedUnitOfWork;
 
 import com.fasterxml.jackson.core.type.TypeReference;
@@ -102,14 +98,15 @@ public class ClickhouseRefreshSecondaryIndex extends AbstractExecutable {
                 }
             }
 
+            val dataflow = NDataflowManager.getInstance(getConfig(), getProject()).getDataflow(modelId);
+            String database = NameUtil.getDatabase(getConfig(), getProject());
+            String table = NameUtil.getTable(dataflow, layoutId);
+            List<Future<?>> results = Lists.newArrayList();
             List<SecondStorageNode> nodes = SecondStorageUtil.listProjectNodes(getProject());
-            List<RefreshSecondaryIndex> allJob = getAddIndexJob(nodes, newIndexes, layoutId);
-            allJob.addAll(getToBeDeleteIndexJob(nodes, toBeDeleteIndexed, layoutId));
-
-            List<Future<?>> results = new ArrayList<>();
             val taskPool = new ThreadPoolExecutor(nodes.size(), nodes.size(), 0L, TimeUnit.MILLISECONDS,
                     new LinkedBlockingQueue<>(), new NamedThreadFactory("Refresh Tiered Storage Index"));
-            allJob.forEach(job -> results.add(taskPool.submit(job::refresh)));
+            nodes.forEach(node -> results.add(taskPool.submit(() -> new RefreshSecondaryIndex(node.getName(), database,
+                    table, newIndexes, toBeDeleteIndexed, dataflow).refresh())));
 
             try {
                 for (Future<?> result : results) {
@@ -128,36 +125,4 @@ public class ClickhouseRefreshSecondaryIndex extends AbstractExecutable {
             return ExecuteResult.createSucceed();
         });
     }
-
-    private List<RefreshSecondaryIndex> getAddIndexJob(List<SecondStorageNode> nodes, Set<Integer> newIndexes,
-            long layoutId) {
-        String modelId = getTargetSubject();
-        val indexPlan = NIndexPlanManager.getInstance(getConfig(), project).getIndexPlan(modelId);
-
-        if (indexPlan == null || indexPlan.getLayoutEntity(layoutId) == null) {
-            return Lists.newArrayList();
-        }
-
-        LayoutEntity layout = indexPlan.getLayoutEntity(layoutId);
-        String database = NameUtil.getDatabase(getConfig(), getProject());
-        String table = NameUtil.getTable(NDataflowManager.getInstance(getConfig(), getProject()).getDataflow(modelId),
-                layoutId);
-        return nodes.stream()
-                .flatMap(node -> newIndexes.stream().map(column -> new RefreshSecondaryIndex(node.getName(), database,
-                        table, column, layout, RefreshSecondaryIndex.Type.ADD)))
-                .collect(Collectors.toList());
-    }
-
-    private List<RefreshSecondaryIndex> getToBeDeleteIndexJob(List<SecondStorageNode> nodes,
-            Set<Integer> toBeDeleteIndexed, long layoutId) {
-        String modelId = getTargetSubject();
-        String database = NameUtil.getDatabase(getConfig(), getProject());
-        String table = NameUtil.getTable(NDataflowManager.getInstance(getConfig(), getProject()).getDataflow(modelId),
-                layoutId);
-
-        return nodes.stream()
-                .flatMap(node -> toBeDeleteIndexed.stream().map(column -> new RefreshSecondaryIndex(node.getName(),
-                        database, table, column, null, RefreshSecondaryIndex.Type.DELETE)))
-                .collect(Collectors.toList());
-    }
 }
diff --git a/src/second-storage/clickhouse/src/main/java/io/kyligence/kap/clickhouse/job/RefreshSecondaryIndex.java b/src/second-storage/clickhouse/src/main/java/io/kyligence/kap/clickhouse/job/RefreshSecondaryIndex.java
index 6c6c71193c..8b021828cc 100644
--- a/src/second-storage/clickhouse/src/main/java/io/kyligence/kap/clickhouse/job/RefreshSecondaryIndex.java
+++ b/src/second-storage/clickhouse/src/main/java/io/kyligence/kap/clickhouse/job/RefreshSecondaryIndex.java
@@ -26,9 +26,7 @@ import java.util.Set;
 
 import org.apache.commons.lang3.exception.ExceptionUtils;
 import org.apache.kylin.common.KylinConfig;
-import org.apache.kylin.metadata.cube.model.LayoutEntity;
-import org.apache.kylin.metadata.cube.model.NDataflowManager;
-import org.apache.kylin.metadata.model.NDataModel;
+import org.apache.kylin.metadata.cube.model.NDataflow;
 
 import com.fasterxml.jackson.annotation.JsonIgnore;
 import com.fasterxml.jackson.annotation.JsonProperty;
@@ -49,33 +47,32 @@ import lombok.extern.slf4j.Slf4j;
 @Getter
 @Slf4j
 public class RefreshSecondaryIndex {
-
     @JsonProperty("node")
     private String node;
     @JsonProperty("database")
     private String database;
     @JsonProperty("table")
     private String table;
-    @JsonProperty("column_id")
-    private Integer columnId;
-    @JsonProperty("type")
-    private Type type;
+    @JsonProperty("add_indexes")
+    private Set<Integer> addIndexes;
+    @JsonProperty("delete_indexes")
+    private Set<Integer> deleteIndexes;
 
     @JsonIgnore
-    private LayoutEntity layoutEntity;
+    private NDataflow dataflow;
 
     public RefreshSecondaryIndex() {
         // empty
     }
 
-    public RefreshSecondaryIndex(String node, String database, String table, Integer columnId,
-            LayoutEntity layoutEntity, Type type) {
+    public RefreshSecondaryIndex(String node, String database, String table, Set<Integer> addIndexes,
+            Set<Integer> deleteIndexes, NDataflow dataflow) {
         this.node = node;
         this.database = database;
         this.table = table;
-        this.columnId = columnId;
-        this.layoutEntity = layoutEntity;
-        this.type = type;
+        this.dataflow = dataflow;
+        this.addIndexes = addIndexes;
+        this.deleteIndexes = deleteIndexes;
     }
 
     public void refresh() {
@@ -88,29 +85,31 @@ public class RefreshSecondaryIndex {
                 return;
             }
             Set<String> existSkipIndex = existSkippingIndex(clickHouse, database, table);
-            String column = getPrefixColumn(String.valueOf(columnId));
-            String indexName = ClickHouseNameUtil.getSkippingIndexName(table, column);
-            if (type == Type.ADD) {
-                addSkippingIndex(clickHouse, tableIdentifier, column, indexName, existSkipIndex);
-            } else if (type == Type.DELETE) {
-                deleteSkippingIndex(clickHouse, tableIdentifier, indexName, existSkipIndex);
+
+            for (Integer deleteIndexColumnId : deleteIndexes) {
+                deleteSkippingIndex(clickHouse, tableIdentifier, deleteIndexColumnId, existSkipIndex);
+            }
+
+            for (Integer addIndexColumnId : addIndexes) {
+                addSkippingIndex(clickHouse, tableIdentifier, addIndexColumnId, existSkipIndex);
             }
         } catch (SQLException e) {
-            log.error("node {} clean index {}.{} failed", node, database, table);
+            log.error("node {} update index {}.{} failed", node, database, table);
             ExceptionUtils.rethrow(e);
         }
     }
 
-    private void addSkippingIndex(ClickHouse clickHouse, TableIdentifier tableIdentifier, String column,
-            String indexName, Set<String> existSkipIndex) throws SQLException {
-        NDataModel model = layoutEntity.getModel();
-        KylinConfig modelConfig = NDataflowManager.getInstance(KylinConfig.getInstanceFromEnv(), model.getProject())
-                .getDataflow(model.getId()).getConfig();
+    private void addSkippingIndex(ClickHouse clickHouse, TableIdentifier tableIdentifier, int columnId,
+            Set<String> existSkipIndex) throws SQLException {
+        String column = getPrefixColumn(String.valueOf(columnId));
+        String indexName = ClickHouseNameUtil.getSkippingIndexName(table, column);
+        KylinConfig modelConfig = dataflow.getConfig();
         int granularity = modelConfig.getSecondStorageSkippingIndexGranularity();
         val render = new ClickHouseRender();
 
         String expr = SkippingIndexChooser
-                .getSkippingIndexType(layoutEntity.getOrderedDimensions().get(columnId).getType()).toSql(modelConfig);
+                .getSkippingIndexType(dataflow.getModel().getEffectiveDimensions().get(columnId).getType())
+                .toSql(modelConfig);
         AlterTable alterTable = new AlterTable(tableIdentifier,
                 new AlterTable.ManipulateIndex(indexName, column, expr, granularity));
         AlterTable materializeTable = new AlterTable(tableIdentifier,
@@ -122,8 +121,9 @@ public class RefreshSecondaryIndex {
         clickHouse.apply(materializeTable.toSql(render));
     }
 
-    private void deleteSkippingIndex(ClickHouse clickHouse, TableIdentifier tableIdentifier, String indexName,
+    private void deleteSkippingIndex(ClickHouse clickHouse, TableIdentifier tableIdentifier, int columnId,
             Set<String> existSkipIndex) throws SQLException {
+        String indexName = ClickHouseNameUtil.getSkippingIndexName(table, getPrefixColumn(String.valueOf(columnId)));
         if (!existSkipIndex.contains(indexName)) {
             return;
         }
@@ -150,8 +150,4 @@ public class RefreshSecondaryIndex {
 
         return Sets.newHashSet();
     }
-
-    enum Type {
-        ADD, DELETE;
-    }
 }
diff --git a/src/second-storage/core-ui/src/main/java/io/kyligence/kap/secondstorage/management/SecondStorageService.java b/src/second-storage/core-ui/src/main/java/io/kyligence/kap/secondstorage/management/SecondStorageService.java
index eb43fddf83..b0886204aa 100644
--- a/src/second-storage/core-ui/src/main/java/io/kyligence/kap/secondstorage/management/SecondStorageService.java
+++ b/src/second-storage/core-ui/src/main/java/io/kyligence/kap/secondstorage/management/SecondStorageService.java
@@ -1158,14 +1158,14 @@ public class SecondStorageService extends BasicService implements SecondStorageU
         SecondStorageUtil.validateProjectLock(project, Collections.singletonList(LockTypeEnum.LOAD.name()));
         List<AbstractExecutable> jobs = getRelationJobsWithoutFinish(project, modelId);
         if (!jobs.isEmpty()) {
-            throw new KylinException(JobErrorCode.SECOND_STORAGE_PROJECT_JOB_EXISTS,
-                    String.format(Locale.ROOT, MsgPicker.getMsg().getSecondStorageProjectJobExists(), project));
+            throw new KylinException(JobErrorCode.SECOND_STORAGE_JOB_EXISTS,
+                    String.format(Locale.ROOT, MsgPicker.getMsg().getSecondStorageConcurrentOperate(), project));
         }
         jobs = getJobs(project, modelId, Sets.newHashSet(ExecutableState.ERROR),
                 Sets.newHashSet(JobTypeEnum.SECOND_STORAGE_REFRESH_SECONDARY_INDEXES));
         if (!jobs.isEmpty()) {
-            throw new KylinException(JobErrorCode.SECOND_STORAGE_PROJECT_JOB_EXISTS,
-                    String.format(Locale.ROOT, MsgPicker.getMsg().getSecondStorageProjectJobExists(), project));
+            throw new KylinException(JobErrorCode.SECOND_STORAGE_JOB_EXISTS,
+                    String.format(Locale.ROOT, MsgPicker.getMsg().getSecondStorageConcurrentOperate(), project));
         }
     }
 


[kylin] 18/22: KYLIN-5323 fix segment matched to wrong model

Posted by xx...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

xxyu pushed a commit to branch kylin5
in repository https://gitbox.apache.org/repos/asf/kylin.git

commit 5c8bb0101796c48091a7e9faecea9e479c4ca6a9
Author: binbin.zheng <bi...@kyligence.io>
AuthorDate: Thu Sep 29 18:44:26 2022 +0800

    KYLIN-5323 fix segment matched to wrong model
---
 .../cube/realization/HybridRealization.java        |  1 -
 .../metadata/realization/CapabilityResult.java     |  6 ++-
 .../kylin/query/routing/CandidateSortTest.java     | 59 ++++++++++++++++++++++
 3 files changed, 64 insertions(+), 2 deletions(-)

diff --git a/src/core-metadata/src/main/java/org/apache/kylin/metadata/cube/realization/HybridRealization.java b/src/core-metadata/src/main/java/org/apache/kylin/metadata/cube/realization/HybridRealization.java
index 0b3354bcf8..a40f4537d4 100644
--- a/src/core-metadata/src/main/java/org/apache/kylin/metadata/cube/realization/HybridRealization.java
+++ b/src/core-metadata/src/main/java/org/apache/kylin/metadata/cube/realization/HybridRealization.java
@@ -133,7 +133,6 @@ public class HybridRealization implements IRealization {
     public CapabilityResult isCapable(SQLDigest digest, List<NDataSegment> prunedSegments,
             List<NDataSegment> prunedStreamingSegments, Map<String, Set<Long>> secondStorageSegmentLayoutMap) {
         CapabilityResult result = new CapabilityResult();
-        result.cost = Integer.MAX_VALUE;
 
         resolveSegmentsOverlap(prunedStreamingSegments);
         for (IRealization realization : getRealizations()) {
diff --git a/src/core-metadata/src/main/java/org/apache/kylin/metadata/realization/CapabilityResult.java b/src/core-metadata/src/main/java/org/apache/kylin/metadata/realization/CapabilityResult.java
index 2368812646..6cad1412b7 100644
--- a/src/core-metadata/src/main/java/org/apache/kylin/metadata/realization/CapabilityResult.java
+++ b/src/core-metadata/src/main/java/org/apache/kylin/metadata/realization/CapabilityResult.java
@@ -48,10 +48,14 @@ public class CapabilityResult {
     @Setter
     private IRealizationCandidate selectedStreamingCandidate;
 
+    @Getter
+    @Setter
+    private int layoutUnmatchedColsSize;
+
     /**
      * The smaller the cost, the more capable the realization
      */
-    public int cost;
+    public int cost = Integer.MAX_VALUE;
 
     /**
      * reason of incapable
diff --git a/src/query/src/test/java/org/apache/kylin/query/routing/CandidateSortTest.java b/src/query/src/test/java/org/apache/kylin/query/routing/CandidateSortTest.java
index f0d093addb..bdb79596e4 100644
--- a/src/query/src/test/java/org/apache/kylin/query/routing/CandidateSortTest.java
+++ b/src/query/src/test/java/org/apache/kylin/query/routing/CandidateSortTest.java
@@ -25,6 +25,7 @@ import java.util.Set;
 
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.QueryContext;
+import org.apache.kylin.metadata.cube.cuboid.NLayoutCandidate;
 import org.apache.kylin.metadata.cube.model.NDataSegment;
 import org.apache.kylin.metadata.model.FunctionDesc;
 import org.apache.kylin.metadata.model.MeasureDesc;
@@ -100,6 +101,31 @@ public class CandidateSortTest {
             val model3 = mockCandidate("model0003", "modelC", 4, 4);
             sort(model1, model2, model3).assertFirst(model1);
         }
+
+        {
+            val model1 = mockCandidate("model0001", "modelA", 1, 1);
+            val model2 = mockEmptyCandidate("model0002", "modelB", 1);
+            sort(model1, model2).assertFirst(model1);
+        }
+
+        {
+            val model1 = mockStreamingCandidate("model0001", "modelA", 1, 1);
+            val model2 = mockEmptyCandidate("model0002", "modelB", 1);
+            sort(model1, model2).assertFirst(model1);
+        }
+
+        {
+            val model1 = mockHybridCandidate("model0001", "modelA", 1, 1, 2);
+            val model2 = mockEmptyCandidate("model0002", "modelB", 1);
+            sort(model1, model2).assertFirst(model1);
+        }
+
+        {
+            val model1 = mockCandidate("model0001", "modelA", 1, 3);
+            val model2 = mockStreamingCandidate("model0002", "modelB", 1, 2);
+            val model3 = mockHybridCandidate("model0003", "modelC", 1, 4, 2);
+            sort(model1, model2, model3).assertFirst(model2);
+        }
     }
 
     private interface SortedCandidate {
@@ -120,6 +146,39 @@ public class CandidateSortTest {
         candidate.realization = mockRealization(modelId, modelName, modelCost);
         val cap = new CapabilityResult();
         cap.setSelectedCandidate(() -> candidateCost);
+        cap.cost = (int) cap.getSelectedCandidate().getCost();
+        candidate.setCapability(cap);
+        return candidate;
+    }
+
+    private Candidate mockStreamingCandidate(String modelId, String modelName, int modelCost, double candidateCost) {
+        val candidate = new Candidate();
+        candidate.realization = mockRealization(modelId, modelName, modelCost);
+        val cap = new CapabilityResult();
+        cap.setSelectedStreamingCandidate(() -> candidateCost);
+        cap.cost = (int) cap.getSelectedStreamingCandidate().getCost();
+        candidate.setCapability(cap);
+        return candidate;
+    }
+
+    private Candidate mockHybridCandidate(String modelId, String modelName, int modelCost, double candidateCost,
+            double streamingCandidateCost) {
+        val candidate = new Candidate();
+        candidate.realization = mockRealization(modelId, modelName, modelCost);
+        val cap = new CapabilityResult();
+        cap.setSelectedCandidate(() -> candidateCost);
+        cap.setSelectedStreamingCandidate(() -> streamingCandidateCost);
+        cap.cost = (int) Math.min(cap.getSelectedCandidate().getCost(), cap.getSelectedStreamingCandidate().getCost());
+        candidate.setCapability(cap);
+        return candidate;
+    }
+
+    private Candidate mockEmptyCandidate(String modelId, String modelName, int modelCost) {
+        val candidate = new Candidate();
+        candidate.realization = mockRealization(modelId, modelName, modelCost);
+        val cap = new CapabilityResult();
+        cap.setSelectedCandidate(NLayoutCandidate.EMPTY);
+        cap.setSelectedStreamingCandidate(NLayoutCandidate.EMPTY);
         candidate.setCapability(cap);
         return candidate;
     }


[kylin] 14/22: check project admin permission

Posted by xx...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

xxyu pushed a commit to branch kylin5
in repository https://gitbox.apache.org/repos/asf/kylin.git

commit 2902a64d43600bfae46e81890df751f37748c8da
Author: Zhixiong Chen <ch...@apache.org>
AuthorDate: Sat Oct 15 22:40:20 2022 +0800

    check project admin permission
---
 .../io/kyligence/kap/secondstorage/management/SecondStorageService.java  | 1 +
 1 file changed, 1 insertion(+)

diff --git a/src/second-storage/core-ui/src/main/java/io/kyligence/kap/secondstorage/management/SecondStorageService.java b/src/second-storage/core-ui/src/main/java/io/kyligence/kap/secondstorage/management/SecondStorageService.java
index b62b4e2376..c10c6c36ea 100644
--- a/src/second-storage/core-ui/src/main/java/io/kyligence/kap/secondstorage/management/SecondStorageService.java
+++ b/src/second-storage/core-ui/src/main/java/io/kyligence/kap/secondstorage/management/SecondStorageService.java
@@ -1174,6 +1174,7 @@ public class SecondStorageService extends BasicService implements SecondStorageU
     }
 
     public void modifyColumn(String project, String model, String column, String datatype) {
+        isProjectAdmin(project);
         logger.info("Start to modify second storage low cardinality on model {}.", model);
         if (!SecondStorageUtil.isProjectEnable(project) || !SecondStorageUtil.isModelEnable(project, model)) {
             throw new KylinException(INVALID_PARAMETER, String.format("The model does not have tiered storage enabled on project %s.", project));


[kylin] 07/22: KYLIN-5315 update AutoRefreshSnapshotScheduler afterPropertiesSet

Posted by xx...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

xxyu pushed a commit to branch kylin5
in repository https://gitbox.apache.org/repos/asf/kylin.git

commit 275d57dea54a3752cf621e86b7660d4a7eddb13e
Author: jlf <lo...@kyligence.io>
AuthorDate: Fri Oct 14 10:01:24 2022 +0800

    KYLIN-5315 update AutoRefreshSnapshotScheduler afterPropertiesSet
---
 .../rest/scheduler/AutoRefreshSnapshotRunner.java  | 64 ++++++++-------------
 .../scheduler/AutoRefreshSnapshotScheduler.java    | 38 +++++++++++--
 ...pshotThread.java => BuildSnapshotRunnable.java} |  4 +-
 ...leThread.java => CheckSourceTableRunnable.java} |  2 +-
 .../scheduler/AutoRefreshSnapshotConfigTest.java   | 58 +++++++++++++++++++
 .../scheduler/AutoRefreshSnapshotRunnerTest.java   | 66 ++++++----------------
 ...eadTest.java => BuildSnapshotRunnableTest.java} | 20 +++----
 ...Test.java => CheckSourceTableRunnableTest.java} | 11 ++--
 .../SnapshotSourceTableStatsServiceTest.scala      |  7 +--
 .../service/SnapshotSourceTableStatsService.java   | 21 ++++---
 .../TestSnapshotSourceTableStatsService.java       | 36 ------------
 11 files changed, 166 insertions(+), 161 deletions(-)

diff --git a/src/data-loading-service/src/main/java/org/apache/kylin/rest/scheduler/AutoRefreshSnapshotRunner.java b/src/data-loading-service/src/main/java/org/apache/kylin/rest/scheduler/AutoRefreshSnapshotRunner.java
index c3e7ce5046..294b6ddb75 100644
--- a/src/data-loading-service/src/main/java/org/apache/kylin/rest/scheduler/AutoRefreshSnapshotRunner.java
+++ b/src/data-loading-service/src/main/java/org/apache/kylin/rest/scheduler/AutoRefreshSnapshotRunner.java
@@ -87,8 +87,6 @@ public class AutoRefreshSnapshotRunner implements Runnable {
     @Getter
     private Map<Future<String>, Long> checkSourceTableFutures = Maps.newConcurrentMap();
     @Getter
-    private Map<Future<String>, Long> buildSnapshotFutures = Maps.newConcurrentMap();
-    @Getter
     private final String project;
     @Setter
     @Getter
@@ -141,6 +139,8 @@ public class AutoRefreshSnapshotRunner implements Runnable {
                         poolExecutor.getPoolSize(), poolExecutor.getCorePoolSize(), poolExecutor.getActiveCount(),
                         poolExecutor.getMaximumPoolSize());
             }
+            projectConfig = NProjectManager.getInstance(KylinConfig.readSystemKylinConfig()).getProject(project)
+                    .getConfig();
             saveSnapshotViewMapping(project, restTemplate);
             val tables = SnapshotJobUtils.getSnapshotTables(projectConfig, project);
             val viewTableMapping = readViewTableMapping();
@@ -152,8 +152,6 @@ public class AutoRefreshSnapshotRunner implements Runnable {
 
             waitCheckSourceTableTaskDone();
 
-            waitBuildSnapshotTaskDone();
-
             log.info("Project[{}] stop check and refresh snapshot", project);
         } catch (InterruptedException ie) {
             Thread.currentThread().interrupt();
@@ -163,9 +161,7 @@ public class AutoRefreshSnapshotRunner implements Runnable {
         } finally {
             checkSourceTableQueue = new LinkedBlockingQueue<>();
             cancelFuture(checkSourceTableFutures);
-            cancelFuture(buildSnapshotFutures);
             checkSourceTableFutures = Maps.newConcurrentMap();
-            buildSnapshotFutures = Maps.newConcurrentMap();
             sourceTableSnapshotMapping = Maps.newHashMap();
             buildSnapshotCount = Maps.newConcurrentMap();
         }
@@ -214,10 +210,12 @@ public class AutoRefreshSnapshotRunner implements Runnable {
             }
         }
         for (TableDesc tableDesc : tables) {
-            val source = tableDesc.getIdentity().toLowerCase(Locale.ROOT);
-            val snapshots = result.getOrDefault(source, Lists.newArrayList());
-            snapshots.add(tableDesc);
-            result.put(source, snapshots.stream().distinct().collect(Collectors.toList()));
+            if (!tableDesc.isView()) {
+                val source = tableDesc.getIdentity().toLowerCase(Locale.ROOT);
+                val snapshots = result.getOrDefault(source, Lists.newArrayList());
+                snapshots.add(tableDesc);
+                result.put(source, snapshots.stream().distinct().collect(Collectors.toList()));
+            }
         }
         return result;
     }
@@ -259,16 +257,16 @@ public class AutoRefreshSnapshotRunner implements Runnable {
 
     public void checkSourceTable(Set<String> allSourceTable) {
         for (String table : allSourceTable) {
-            val thread = new CheckSourceTableThread();
-            thread.setProject(project);
-            thread.setConfig(projectConfig);
-            thread.setTableIdentity(table);
-            thread.setRestTemplate(restTemplate);
-            thread.setCheckSourceTableQueue(checkSourceTableQueue);
+            val runnable = new CheckSourceTableRunnable();
+            runnable.setProject(project);
+            runnable.setConfig(projectConfig);
+            runnable.setTableIdentity(table);
+            runnable.setRestTemplate(restTemplate);
+            runnable.setCheckSourceTableQueue(checkSourceTableQueue);
             sourceTableSnapshotMapping.get(table).stream()
                     .filter(tableDesc -> StringUtils.equalsIgnoreCase(table, tableDesc.getIdentity())).findFirst()
-                    .ifPresent(tableDesc -> thread.setPartitionColumn(tableDesc.getSelectedSnapshotPartitionCol()));
-            val submit = jobPool.submit(thread, "success");
+                    .ifPresent(tableDesc -> runnable.setPartitionColumn(tableDesc.getSelectedSnapshotPartitionCol()));
+            val submit = jobPool.submit(runnable, "success");
             checkSourceTableFutures.put(submit, System.currentTimeMillis());
         }
     }
@@ -304,33 +302,21 @@ public class AutoRefreshSnapshotRunner implements Runnable {
         }
     }
 
-    public void waitBuildSnapshotTaskDone() throws InterruptedException {
-        while (true) {
-            val doneCount = buildSnapshotFutures.keySet().stream().filter(Future::isDone).count();
-            if (buildSnapshotFutures.size() == doneCount) {
-                break;
-            }
-            cancelTimeoutFuture(buildSnapshotFutures);
-            TimeUnit.SECONDS.sleep(10);
-        }
-    }
-
     public void buildSnapshot(CheckSourceTableResult result) {
         val needBuildSnapshots = sourceTableSnapshotMapping.get(result.getTableIdentity());
         for (TableDesc tableDesc : needBuildSnapshots) {
             val sourceTableCount = buildSnapshotCount.getOrDefault(tableDesc.getIdentity(), new AtomicInteger(0));
             log.info("buildSnapshotCount is [{}], tableIdentity is [{}]", sourceTableCount, tableDesc.getIdentity());
             if (sourceTableCount.getAndIncrement() == 0) {
-                val thread = new BuildSnapshotThread();
-                thread.setProject(project);
-                thread.setConfig(projectConfig);
-                thread.setRestTemplate(restTemplate);
-                thread.setNeedRefresh(result.getNeedRefresh());
-                thread.setNeedRefreshPartitionsValue(result.getNeedRefreshPartitionsValue());
-                thread.setTableIdentity(tableDesc.getIdentity());
-                thread.setPartitionColumn(tableDesc.getSelectedSnapshotPartitionCol());
-                val submit = jobPool.submit(thread, "success");
-                buildSnapshotFutures.put(submit, System.currentTimeMillis());
+                val runnable = new BuildSnapshotRunnable();
+                runnable.setProject(project);
+                runnable.setConfig(projectConfig);
+                runnable.setRestTemplate(restTemplate);
+                runnable.setNeedRefresh(result.getNeedRefresh());
+                runnable.setNeedRefreshPartitionsValue(result.getNeedRefreshPartitionsValue());
+                runnable.setTableIdentity(tableDesc.getIdentity());
+                runnable.setPartitionColumn(tableDesc.getSelectedSnapshotPartitionCol());
+                runnable.run();
             }
             buildSnapshotCount.put(tableDesc.getIdentity(), sourceTableCount);
         }
diff --git a/src/data-loading-service/src/main/java/org/apache/kylin/rest/scheduler/AutoRefreshSnapshotScheduler.java b/src/data-loading-service/src/main/java/org/apache/kylin/rest/scheduler/AutoRefreshSnapshotScheduler.java
index 539bf6ac08..8639c09d30 100644
--- a/src/data-loading-service/src/main/java/org/apache/kylin/rest/scheduler/AutoRefreshSnapshotScheduler.java
+++ b/src/data-loading-service/src/main/java/org/apache/kylin/rest/scheduler/AutoRefreshSnapshotScheduler.java
@@ -26,17 +26,23 @@ import java.util.concurrent.ScheduledFuture;
 import java.util.concurrent.ThreadPoolExecutor;
 import java.util.concurrent.atomic.AtomicInteger;
 
+import javax.annotation.PostConstruct;
+
 import org.apache.commons.lang3.StringUtils;
 import org.apache.hadoop.fs.Path;
 import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.common.scheduler.EpochStartedNotifier;
+import org.apache.kylin.common.scheduler.EventBusFactory;
 import org.apache.kylin.common.util.AddressUtil;
 import org.apache.kylin.common.util.HadoopUtil;
 import org.apache.kylin.common.util.Pair;
+import org.apache.kylin.metadata.epoch.EpochManager;
 import org.apache.kylin.metadata.project.NProjectManager;
 import org.apache.kylin.metadata.project.ProjectInstance;
-import org.springframework.beans.factory.InitializingBean;
 import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.beans.factory.annotation.Qualifier;
+import org.springframework.context.annotation.Configuration;
+import org.springframework.core.annotation.Order;
 import org.springframework.scheduling.TaskScheduler;
 import org.springframework.scheduling.annotation.Scheduled;
 import org.springframework.scheduling.concurrent.ThreadPoolTaskScheduler;
@@ -46,7 +52,7 @@ import org.springframework.web.client.RestTemplate;
 
 import com.google.common.collect.Maps;
 
-import org.apache.kylin.metadata.epoch.EpochManager;
+import io.kyligence.kap.guava20.shaded.common.eventbus.Subscribe;
 import lombok.Getter;
 import lombok.val;
 import lombok.extern.slf4j.Slf4j;
@@ -66,7 +72,7 @@ import lombok.extern.slf4j.Slf4j;
  */
 @Slf4j
 @Component
-public class AutoRefreshSnapshotScheduler implements InitializingBean {
+public class AutoRefreshSnapshotScheduler {
     private static final Integer THREAD_POOL_TASK_SCHEDULER_DEFAULT_POOL_SIZE = 20;
     @Autowired
     @Qualifier("projectScheduler")
@@ -228,7 +234,6 @@ public class AutoRefreshSnapshotScheduler implements InitializingBean {
         }
     }
 
-    @Override
     public void afterPropertiesSet() throws Exception {
         log.info("AutoRefreshSnapshotScheduler init...");
         val fs = HadoopUtil.getWorkingFileSystem();
@@ -257,3 +262,28 @@ public class AutoRefreshSnapshotScheduler implements InitializingBean {
         }
     }
 }
+
+@Slf4j
+@Configuration
+@Order
+class AutoRefreshSnapshotConfig {
+    @Autowired
+    private AutoRefreshSnapshotScheduler scheduler;
+
+    @PostConstruct
+    public void init() {
+        val kylinConfig = KylinConfig.getInstanceFromEnv();
+        if (kylinConfig.isJobNode()) {
+            EventBusFactory.getInstance().register(this, false);
+        }
+    }
+
+    @Subscribe
+    public void registerScheduler(EpochStartedNotifier notifier) {
+        try {
+            scheduler.afterPropertiesSet();
+        } catch (Exception e) {
+            log.error(e.getMessage(), e);
+        }
+    }
+}
diff --git a/src/data-loading-service/src/main/java/org/apache/kylin/rest/scheduler/BuildSnapshotThread.java b/src/data-loading-service/src/main/java/org/apache/kylin/rest/scheduler/BuildSnapshotRunnable.java
similarity index 99%
rename from src/data-loading-service/src/main/java/org/apache/kylin/rest/scheduler/BuildSnapshotThread.java
rename to src/data-loading-service/src/main/java/org/apache/kylin/rest/scheduler/BuildSnapshotRunnable.java
index 4759260d54..e9ebd6c67a 100644
--- a/src/data-loading-service/src/main/java/org/apache/kylin/rest/scheduler/BuildSnapshotThread.java
+++ b/src/data-loading-service/src/main/java/org/apache/kylin/rest/scheduler/BuildSnapshotRunnable.java
@@ -42,6 +42,7 @@ import org.apache.kylin.common.exception.KylinRuntimeException;
 import org.apache.kylin.common.response.RestResponse;
 import org.apache.kylin.common.util.HadoopUtil;
 import org.apache.kylin.common.util.JsonUtil;
+import org.apache.kylin.engine.spark.job.NSparkSnapshotJob;
 import org.apache.kylin.job.execution.AbstractExecutable;
 import org.apache.kylin.job.execution.ExecutableState;
 import org.apache.kylin.job.execution.NExecutableManager;
@@ -53,7 +54,6 @@ import org.springframework.http.HttpMethod;
 import com.fasterxml.jackson.annotation.JsonProperty;
 import com.fasterxml.jackson.core.type.TypeReference;
 
-import org.apache.kylin.engine.spark.job.NSparkSnapshotJob;
 import io.kyligence.kap.guava20.shaded.common.collect.Lists;
 import io.kyligence.kap.guava20.shaded.common.collect.Maps;
 import io.kyligence.kap.guava20.shaded.common.collect.Sets;
@@ -64,7 +64,7 @@ import lombok.val;
 import lombok.extern.slf4j.Slf4j;
 
 @Slf4j
-public class BuildSnapshotThread extends AbstractSchedulerRunnable {
+public class BuildSnapshotRunnable extends AbstractSchedulerRunnable {
     private static final String BUILD_SNAPSHOT_ERROR_MESSAGE = "Project[%s] Snapshot[%s] buildSnapshot failed";
 
     @Override
diff --git a/src/data-loading-service/src/main/java/org/apache/kylin/rest/scheduler/CheckSourceTableThread.java b/src/data-loading-service/src/main/java/org/apache/kylin/rest/scheduler/CheckSourceTableRunnable.java
similarity index 98%
rename from src/data-loading-service/src/main/java/org/apache/kylin/rest/scheduler/CheckSourceTableThread.java
rename to src/data-loading-service/src/main/java/org/apache/kylin/rest/scheduler/CheckSourceTableRunnable.java
index d983fec99f..9fc0fd9d1d 100644
--- a/src/data-loading-service/src/main/java/org/apache/kylin/rest/scheduler/CheckSourceTableThread.java
+++ b/src/data-loading-service/src/main/java/org/apache/kylin/rest/scheduler/CheckSourceTableRunnable.java
@@ -46,7 +46,7 @@ import lombok.val;
 import lombok.extern.slf4j.Slf4j;
 
 @Slf4j
-public class CheckSourceTableThread extends AbstractSchedulerRunnable {
+public class CheckSourceTableRunnable extends AbstractSchedulerRunnable {
 
     private static final String SNAPSHOT_TABLE_CHECK_ERROR_MESSAGE = "Project[%s] Snapshot source table[%s] check table stats Failed";
 
diff --git a/src/data-loading-service/src/test/java/org/apache/kylin/rest/scheduler/AutoRefreshSnapshotConfigTest.java b/src/data-loading-service/src/test/java/org/apache/kylin/rest/scheduler/AutoRefreshSnapshotConfigTest.java
new file mode 100644
index 0000000000..450f0bc088
--- /dev/null
+++ b/src/data-loading-service/src/test/java/org/apache/kylin/rest/scheduler/AutoRefreshSnapshotConfigTest.java
@@ -0,0 +1,58 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.kylin.rest.scheduler;
+
+import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.common.scheduler.EpochStartedNotifier;
+import org.apache.kylin.common.scheduler.EventBusFactory;
+import org.junit.jupiter.api.Test;
+import org.mockito.Mockito;
+import org.springframework.test.util.ReflectionTestUtils;
+
+import lombok.val;
+
+class AutoRefreshSnapshotConfigTest {
+    @Test
+    void testRegisterScheduler() throws Exception {
+        registerScheduler(true);
+        registerScheduler(false);
+    }
+
+    void registerScheduler(Boolean isJobNode) throws Exception {
+        try (val mockStatic = Mockito.mockStatic(EventBusFactory.class);
+                val configStatic = Mockito.mockStatic(KylinConfig.class)) {
+            val config = Mockito.mock(KylinConfig.class);
+            Mockito.when(config.isJobNode()).thenReturn(isJobNode);
+            configStatic.when(KylinConfig::getInstanceFromEnv).thenReturn(config);
+
+            val buildConfig = new AutoRefreshSnapshotConfig();
+            val eventBus = Mockito.mock(EventBusFactory.class);
+            mockStatic.when(EventBusFactory::getInstance).thenReturn(eventBus);
+            buildConfig.init();
+            val scheduler = Mockito.mock(AutoRefreshSnapshotScheduler.class);
+            ReflectionTestUtils.setField(buildConfig, "scheduler", scheduler);
+
+            Mockito.doNothing().when(scheduler).afterPropertiesSet();
+            buildConfig.registerScheduler(new EpochStartedNotifier());
+
+            Mockito.doThrow(new Exception("test")).when(scheduler).afterPropertiesSet();
+            buildConfig.registerScheduler(new EpochStartedNotifier());
+        }
+    }
+}
diff --git a/src/data-loading-service/src/test/java/org/apache/kylin/rest/scheduler/AutoRefreshSnapshotRunnerTest.java b/src/data-loading-service/src/test/java/org/apache/kylin/rest/scheduler/AutoRefreshSnapshotRunnerTest.java
index ae62d5c7a4..5dd01bf60a 100644
--- a/src/data-loading-service/src/test/java/org/apache/kylin/rest/scheduler/AutoRefreshSnapshotRunnerTest.java
+++ b/src/data-loading-service/src/test/java/org/apache/kylin/rest/scheduler/AutoRefreshSnapshotRunnerTest.java
@@ -35,7 +35,6 @@ import java.util.Set;
 import java.util.concurrent.Future;
 import java.util.concurrent.FutureTask;
 import java.util.concurrent.ThreadPoolExecutor;
-import java.util.concurrent.atomic.AtomicBoolean;
 import java.util.stream.Collectors;
 
 import org.apache.commons.collections4.CollectionUtils;
@@ -67,7 +66,6 @@ import io.kyligence.kap.guava20.shaded.common.collect.Lists;
 import io.kyligence.kap.guava20.shaded.common.collect.Maps;
 import io.kyligence.kap.guava20.shaded.common.collect.Sets;
 import lombok.val;
-import lombok.var;
 import lombok.extern.slf4j.Slf4j;
 
 @Slf4j
@@ -116,7 +114,6 @@ class AutoRefreshSnapshotRunnerTest {
             runner.doRun();
             assertTrue(CollectionUtils.isEmpty(runner.getCheckSourceTableQueue()));
             assertTrue(MapUtils.isEmpty(runner.getBuildSnapshotCount()));
-            assertTrue(MapUtils.isEmpty(runner.getBuildSnapshotFutures()));
             assertTrue(MapUtils.isEmpty(runner.getCheckSourceTableFutures()));
             assertTrue(MapUtils.isEmpty(runner.getSourceTableSnapshotMapping()));
         } finally {
@@ -247,6 +244,16 @@ class AutoRefreshSnapshotRunnerTest {
                 if (i < 14) {
                     tables.add(allTables.get(i));
                 }
+                if (allTables.get(i).isView()) {
+                    tables.add(allTables.get(i));
+                    val sourceTables = Sets.<String> newHashSet();
+                    for (int j = 0; j < 7; j++) {
+                        sourceTables.add("default.table_" + j);
+                        excepted.add("default.table_" + j);
+                    }
+                    sourceTables.add(allTables.get(i).getIdentity().toLowerCase(Locale.ROOT));
+                    viewTableMapping.put(allTables.get(i).getIdentity(), sourceTables);
+                }
                 if (i > 7) {
                     val sourceTables = Sets.<String> newHashSet();
                     for (int j = 0; j < 7; j++) {
@@ -307,9 +314,9 @@ class AutoRefreshSnapshotRunnerTest {
             val sourceTables = sourceTableSnapshotMapping.keySet();
             runner.getSourceTableSnapshotMapping().putAll(sourceTableSnapshotMapping);
 
-            try (val ignored = Mockito.mockConstruction(CheckSourceTableThread.class,
+            try (val ignored = Mockito.mockConstruction(CheckSourceTableRunnable.class,
                     (mock, context) -> Mockito.doNothing().when(mock).checkTable())) {
-                try (val ignored2 = Mockito.mockConstruction(BuildSnapshotThread.class,
+                try (val ignored2 = Mockito.mockConstruction(BuildSnapshotRunnable.class,
                         (mock, context) -> Mockito.doNothing().when(mock).buildSnapshot())) {
                     runner.checkSourceTable(sourceTables);
 
@@ -334,9 +341,6 @@ class AutoRefreshSnapshotRunnerTest {
                     exceptedTmp.addAll(tableDescs);
                     val excepted = exceptedTmp.stream().distinct().collect(Collectors.toList());
                     assertEquals(excepted.size(), buildSnapshotCount.size());
-
-                    val buildSnapshotFutures = runner.getBuildSnapshotFutures();
-                    assertEquals(excepted.size(), buildSnapshotFutures.size());
                 }
             }
         } finally {
@@ -344,34 +348,6 @@ class AutoRefreshSnapshotRunnerTest {
         }
     }
 
-    @Test
-    void waitBuildSnapshotTaskDone() {
-        val project = "default";
-        try {
-            val runner = AutoRefreshSnapshotRunner.getInstance(project);
-            val tasks = Lists.<Future<String>> newArrayList();
-            for (int i = 0; i < 5; i++) {
-                val futureTask = new FutureTask<String>(() -> null);
-                tasks.add(futureTask);
-                runner.getBuildSnapshotFutures().put(futureTask, System.currentTimeMillis());
-            }
-            val result = new AtomicBoolean(false);
-            val thread = new Thread(() -> {
-                try {
-                    runner.waitBuildSnapshotTaskDone();
-                    result.set(true);
-                } catch (InterruptedException e) {
-                    log.error(e.getMessage(), e);
-                }
-            });
-            thread.start();
-            tasks.forEach(task -> task.cancel(true));
-            await().atMost(new Duration(12, SECONDS)).untilAsserted(() -> assertTrue(result.get()));
-        } finally {
-            AutoRefreshSnapshotRunner.shutdown(project);
-        }
-    }
-
     @Test
     void cancelTimeoutFuture() {
         val project = RandomUtil.randomUUIDStr();
@@ -385,11 +361,11 @@ class AutoRefreshSnapshotRunnerTest {
             for (int i = 0; i < 5; i++) {
                 val futureTask = new FutureTask<String>(() -> null);
                 tasks.add(futureTask);
-                runner.getBuildSnapshotFutures().put(futureTask, System.currentTimeMillis());
+                runner.getCheckSourceTableFutures().put(futureTask, System.currentTimeMillis());
             }
             await().pollDelay(new Duration(2, SECONDS)).until(() -> true);
-            runner.cancelTimeoutFuture(runner.getBuildSnapshotFutures());
-            runner.getBuildSnapshotFutures().keySet().forEach(future -> {
+            runner.cancelTimeoutFuture(runner.getCheckSourceTableFutures());
+            runner.getCheckSourceTableFutures().keySet().forEach(future -> {
                 assertTrue(future.isCancelled());
                 assertTrue(future.isDone());
             });
@@ -457,16 +433,6 @@ class AutoRefreshSnapshotRunnerTest {
             val overrideProps = Maps.<String, String> newLinkedHashMap();
             projectManager.createProject(project, "test", "", overrideProps);
             val runner = AutoRefreshSnapshotRunner.getInstance(project);
-            for (int i = 0; i < 5; i++) {
-                val futureTask = new FutureTask<String>(() -> null);
-                runner.getBuildSnapshotFutures().put(futureTask, System.currentTimeMillis());
-                if (i % 2 == 0) {
-                    futureTask.cancel(true);
-                }
-            }
-            runner.cancelFuture(runner.getBuildSnapshotFutures());
-            var actual = runner.getBuildSnapshotFutures().keySet().stream().filter(Future::isDone).count();
-            assertEquals(runner.getBuildSnapshotFutures().size(), actual);
 
             for (int i = 0; i < 5; i++) {
                 val futureTask = new FutureTask<String>(() -> null);
@@ -476,7 +442,7 @@ class AutoRefreshSnapshotRunnerTest {
                 }
             }
             runner.cancelFuture(runner.getCheckSourceTableFutures());
-            actual = runner.getCheckSourceTableFutures().keySet().stream().filter(Future::isDone).count();
+            val actual = runner.getCheckSourceTableFutures().keySet().stream().filter(Future::isDone).count();
             assertEquals(runner.getCheckSourceTableFutures().size(), actual);
         } finally {
             AutoRefreshSnapshotRunner.shutdown(project);
diff --git a/src/data-loading-service/src/test/java/org/apache/kylin/rest/scheduler/BuildSnapshotThreadTest.java b/src/data-loading-service/src/test/java/org/apache/kylin/rest/scheduler/BuildSnapshotRunnableTest.java
similarity index 97%
rename from src/data-loading-service/src/test/java/org/apache/kylin/rest/scheduler/BuildSnapshotThreadTest.java
rename to src/data-loading-service/src/test/java/org/apache/kylin/rest/scheduler/BuildSnapshotRunnableTest.java
index 214ac7954d..b2d856887e 100644
--- a/src/data-loading-service/src/test/java/org/apache/kylin/rest/scheduler/BuildSnapshotThreadTest.java
+++ b/src/data-loading-service/src/test/java/org/apache/kylin/rest/scheduler/BuildSnapshotRunnableTest.java
@@ -36,6 +36,7 @@ import org.apache.kylin.common.exception.KylinRuntimeException;
 import org.apache.kylin.common.response.RestResponse;
 import org.apache.kylin.common.util.JsonUtil;
 import org.apache.kylin.common.util.RandomUtil;
+import org.apache.kylin.engine.spark.job.NSparkSnapshotJob;
 import org.apache.kylin.job.execution.AbstractExecutable;
 import org.apache.kylin.job.execution.ExecutableState;
 import org.apache.kylin.job.execution.NExecutableManager;
@@ -51,7 +52,6 @@ import org.springframework.web.client.RestTemplate;
 
 import com.fasterxml.jackson.core.JsonProcessingException;
 
-import org.apache.kylin.engine.spark.job.NSparkSnapshotJob;
 import io.kyligence.kap.guava20.shaded.common.collect.Lists;
 import io.kyligence.kap.guava20.shaded.common.collect.Maps;
 import io.kyligence.kap.guava20.shaded.common.collect.Sets;
@@ -59,12 +59,12 @@ import lombok.val;
 import lombok.var;
 
 @MetadataInfo
-class BuildSnapshotThreadTest {
+class BuildSnapshotRunnableTest {
     private final RestTemplate restTemplate = Mockito.mock(RestTemplate.class);
 
     @Test
     void buildSnapshot() throws JsonProcessingException {
-        val thread = new BuildSnapshotThread();
+        val thread = new BuildSnapshotRunnable();
         thread.setProject("project");
         thread.setConfig(KylinConfig.readSystemKylinConfig());
         thread.setRestTemplate(restTemplate);
@@ -112,7 +112,7 @@ class BuildSnapshotThreadTest {
 
     @Test
     void buildSnapshotFailed() throws JsonProcessingException {
-        val thread = new BuildSnapshotThread();
+        val thread = new BuildSnapshotRunnable();
         thread.setProject("project");
         thread.setConfig(KylinConfig.readSystemKylinConfig());
         thread.setRestTemplate(restTemplate);
@@ -149,7 +149,7 @@ class BuildSnapshotThreadTest {
 
     @Test
     void checkSnapshotJobFile() {
-        val thread = new BuildSnapshotThread();
+        val thread = new BuildSnapshotRunnable();
         thread.setConfig(KylinConfig.getInstanceFromEnv());
         thread.setTableIdentity("default.table_" + RandomUtil.randomUUIDStr().replace("-", "_"));
         val jobId = RandomUtil.randomUUIDStr();
@@ -182,7 +182,7 @@ class BuildSnapshotThreadTest {
     @Test
     void checkAutoRefreshJobSuccessOrRunning() {
         val jobId = RandomUtil.randomUUIDStr();
-        val thread = new BuildSnapshotThread();
+        val thread = new BuildSnapshotRunnable();
         thread.setConfig(KylinConfig.getInstanceFromEnv());
         assertFalse(thread.checkAutoRefreshJobSuccessOrRunning(jobId));
 
@@ -205,7 +205,7 @@ class BuildSnapshotThreadTest {
 
     @Test
     void snapshotJobFile() {
-        val thread = new BuildSnapshotThread();
+        val thread = new BuildSnapshotRunnable();
         thread.setConfig(KylinConfig.getInstanceFromEnv());
         thread.setTableIdentity("default.table_" + RandomUtil.randomUUIDStr().replace("-", "_"));
         val jobId = RandomUtil.randomUUIDStr();
@@ -219,7 +219,7 @@ class BuildSnapshotThreadTest {
 
     @Test
     void snapshotJobFileNotExists() {
-        val thread = new BuildSnapshotThread();
+        val thread = new BuildSnapshotRunnable();
         thread.setConfig(KylinConfig.getInstanceFromEnv());
         thread.setTableIdentity("default.table_" + RandomUtil.randomUUIDStr().replace("-", "_"));
         val snapshotJob = thread.readSnapshotJobFile();
@@ -228,7 +228,7 @@ class BuildSnapshotThreadTest {
 
     @Test
     void checkNeedBuildPartitionAndSetTableOption() throws JsonProcessingException {
-        val thread = new BuildSnapshotThread();
+        val thread = new BuildSnapshotRunnable();
         thread.setTableIdentity("default.table");
         val req = Maps.newHashMap();
         val runningJobs = Lists.<NSparkSnapshotJob> newArrayList();
@@ -279,7 +279,7 @@ class BuildSnapshotThreadTest {
             Mockito.when(executableManager.listExecByJobTypeAndStatus(ExecutableState::isRunning, SNAPSHOT_BUILD,
                     SNAPSHOT_REFRESH)).thenReturn(runningJobs);
 
-            val thread = new BuildSnapshotThread();
+            val thread = new BuildSnapshotRunnable();
             thread.setTableIdentity("default.table");
             thread.setProject("default");
             try {
diff --git a/src/data-loading-service/src/test/java/org/apache/kylin/rest/scheduler/CheckSourceTableThreadTest.java b/src/data-loading-service/src/test/java/org/apache/kylin/rest/scheduler/CheckSourceTableRunnableTest.java
similarity index 93%
rename from src/data-loading-service/src/test/java/org/apache/kylin/rest/scheduler/CheckSourceTableThreadTest.java
rename to src/data-loading-service/src/test/java/org/apache/kylin/rest/scheduler/CheckSourceTableRunnableTest.java
index 920e89bd76..db82b60d66 100644
--- a/src/data-loading-service/src/test/java/org/apache/kylin/rest/scheduler/CheckSourceTableThreadTest.java
+++ b/src/data-loading-service/src/test/java/org/apache/kylin/rest/scheduler/CheckSourceTableRunnableTest.java
@@ -42,12 +42,12 @@ import io.kyligence.kap.guava20.shaded.common.collect.Sets;
 import lombok.val;
 
 @MetadataInfo
-class CheckSourceTableThreadTest {
+class CheckSourceTableRunnableTest {
     private final RestTemplate restTemplate = Mockito.mock(RestTemplate.class);
 
     @Test
     void checkTable() throws JsonProcessingException {
-        val thread = new CheckSourceTableThread();
+        val thread = new CheckSourceTableRunnable();
         thread.setProject("project");
         thread.setConfig(KylinConfig.readSystemKylinConfig());
         thread.setTableIdentity("default.table");
@@ -72,7 +72,7 @@ class CheckSourceTableThreadTest {
     @Test
     void checkTableFailed() {
         try {
-            val thread = new CheckSourceTableThread();
+            val thread = new CheckSourceTableRunnable();
             thread.setProject("project");
             thread.setConfig(KylinConfig.readSystemKylinConfig());
             thread.setTableIdentity("default.table");
@@ -83,9 +83,8 @@ class CheckSourceTableThreadTest {
             thread.checkTable();
         } catch (Exception e) {
             assertTrue(e instanceof KylinRuntimeException);
-            assertEquals(
-                    "Project[project] Snapshot source table[default.table] check table stats Failed",
+            assertEquals("Project[project] Snapshot source table[default.table] check table stats Failed",
                     e.getMessage());
         }
     }
-}
\ No newline at end of file
+}
diff --git a/src/query-server/src/test/scala/org/apache/kylin/rest/service/SnapshotSourceTableStatsServiceTest.scala b/src/query-server/src/test/scala/org/apache/kylin/rest/service/SnapshotSourceTableStatsServiceTest.scala
index 635b6e54f7..6663a71041 100644
--- a/src/query-server/src/test/scala/org/apache/kylin/rest/service/SnapshotSourceTableStatsServiceTest.scala
+++ b/src/query-server/src/test/scala/org/apache/kylin/rest/service/SnapshotSourceTableStatsServiceTest.scala
@@ -194,9 +194,8 @@ class SnapshotSourceTableStatsServiceTest extends SparderBaseFunSuite with Local
         val tableIdentity = table.qualifiedName.toLowerCase(Locale.ROOT)
         val locationPath = table.location.getPath
         val locationFilesStatus: util.List[FileStatus] = snapshotSourceTableStatsService.getLocationFileStatus(locationPath)
-        val snapshotTablesLocationsJson = Maps.newHashMap[String, SnapshotSourceTableStats]()
-        snapshotSourceTableStatsService.createSnapshotSourceTableStats(locationPath, config,
-          locationFilesStatus, snapshotTablesLocationsJson)
+        val snapshotTablesLocationsJson = snapshotSourceTableStatsService.createSnapshotSourceTableStats(locationPath, config,
+          locationFilesStatus)
         snapshotSourceTableStatsService.writeSourceTableStats(DEFAULT_PROJECT, tableIdentity, snapshotTablesLocationsJson)
 
         val fromJson = snapshotSourceTableStatsService.getSnapshotSourceTableStatsJsonFromHDFS(DEFAULT_PROJECT, tableIdentity).getSecond
@@ -522,4 +521,4 @@ class SnapshotSourceTableStatsServiceTest extends SparderBaseFunSuite with Local
       assertFalse(checkStatsFile)
     })
   }
-}
\ No newline at end of file
+}
diff --git a/src/query-service/src/main/java/org/apache/kylin/rest/service/SnapshotSourceTableStatsService.java b/src/query-service/src/main/java/org/apache/kylin/rest/service/SnapshotSourceTableStatsService.java
index 360ecad17f..8dcc09980d 100644
--- a/src/query-service/src/main/java/org/apache/kylin/rest/service/SnapshotSourceTableStatsService.java
+++ b/src/query-service/src/main/java/org/apache/kylin/rest/service/SnapshotSourceTableStatsService.java
@@ -141,6 +141,7 @@ public class SnapshotSourceTableStatsService extends BasicService {
         } catch (Exception e) {
             log.info("Project[{}] [{}.{}] refresh check and save snapshot table location files failed", project,
                     database, table);
+            log.error(e.getMessage(), e);
             return new SnapshotSourceTableStatsResponse(false);
         }
     }
@@ -197,8 +198,8 @@ public class SnapshotSourceTableStatsService extends BasicService {
         }
         val needRefresh = checkLocation(location, filesStatus, snapshotSourceTableStatsJson, projectConfig);
         if (Boolean.FALSE.equals(snapshotSourceTableStatsJsonExist) || Boolean.TRUE.equals(needRefresh)) {
-            createSnapshotSourceTableStats(location, projectConfig, filesStatus, snapshotSourceTableStatsJson);
-            writeSourceTableStats(project, tableIdentity, snapshotSourceTableStatsJson);
+            val newSnapshotSourceTableStatsJson = createSnapshotSourceTableStats(location, projectConfig, filesStatus);
+            writeSourceTableStats(project, tableIdentity, newSnapshotSourceTableStatsJson);
         }
         if (Boolean.FALSE.equals(snapshotSourceTableStatsJsonExist)) {
             return projectConfig.isSnapshotFirstAutoRefreshEnabled();
@@ -279,10 +280,10 @@ public class SnapshotSourceTableStatsService extends BasicService {
                         tableFilesModifyTimesAndSize.get(FILES_SIZE));
     }
 
-    public void createSnapshotSourceTableStats(String location, KylinConfig config,
-            List<FileStatus> locationFilesStatus, Map<String, SnapshotSourceTableStats> snapshotSourceTableStatsJson) {
-        val sourceTableStats = snapshotSourceTableStatsJson.computeIfAbsent(location,
-                key -> new SnapshotSourceTableStats());
+    public Map<String, SnapshotSourceTableStats> createSnapshotSourceTableStats(String location, KylinConfig config,
+            List<FileStatus> locationFilesStatus) {
+        Map<String, SnapshotSourceTableStats> newSnapshotSourceTableStatsJson = Maps.newHashMap();
+        val sourceTableStats = new SnapshotSourceTableStats();
         val filesSize = Lists.<Long> newArrayList();
         val filesModificationTime = Lists.<Long> newArrayList();
         locationFilesStatus.stream().limit(config.getSnapshotAutoRefreshFetchFilesCount()).forEach(fileStatus -> {
@@ -293,7 +294,8 @@ public class SnapshotSourceTableStatsService extends BasicService {
         sourceTableStats.setFilesModificationTime(filesModificationTime);
         sourceTableStats.setFilesCount(locationFilesStatus.size());
 
-        snapshotSourceTableStatsJson.put(location, sourceTableStats);
+        newSnapshotSourceTableStatsJson.put(location, sourceTableStats);
+        return newSnapshotSourceTableStatsJson;
     }
 
     public void writeSourceTableStats(String project, String tableIdentity,
@@ -332,11 +334,12 @@ public class SnapshotSourceTableStatsService extends BasicService {
         val needRefresh = checkPartitionsLocation(partitions, snapshotSourceTableStatsJson, needRefreshPartitions,
                 needSavePartitionsFilesStatus, projectConfig);
         if (Boolean.FALSE.equals(snapshotSourceTableStatsJsonExist) || Boolean.TRUE.equals(needRefresh)) {
+            Map<String, SnapshotSourceTableStats> newSnapshotSourceTableStatsJson = Maps.newHashMap();
             for (CatalogTablePartition partition : partitions) {
                 createPartitionSnapshotSourceTableStats(partition, needSavePartitionsFilesStatus,
-                        snapshotSourceTableStatsJson, projectConfig);
+                        newSnapshotSourceTableStatsJson, projectConfig);
             }
-            writeSourceTableStats(project, tableIdentity, snapshotSourceTableStatsJson);
+            writeSourceTableStats(project, tableIdentity, newSnapshotSourceTableStatsJson);
         }
         if (Boolean.FALSE.equals(snapshotSourceTableStatsJsonExist)) {
             return projectConfig.isSnapshotFirstAutoRefreshEnabled();
diff --git a/src/query-service/src/test/java/org/apache/kylin/rest/service/TestSnapshotSourceTableStatsService.java b/src/query-service/src/test/java/org/apache/kylin/rest/service/TestSnapshotSourceTableStatsService.java
deleted file mode 100644
index 45b6850d92..0000000000
--- a/src/query-service/src/test/java/org/apache/kylin/rest/service/TestSnapshotSourceTableStatsService.java
+++ /dev/null
@@ -1,36 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.kylin.rest.service;
-
-import org.apache.kylin.junit.annotation.MetadataInfo;
-import org.junit.Test;
-import org.mockito.InjectMocks;
-import org.mockito.Mockito;
-
-@MetadataInfo
-class TestSnapshotSourceTableStatsService {
-    private static final String DEFAULT_PROJECT = "default";
-    @InjectMocks
-    private final SnapshotSourceTableStatsService locationService = Mockito.spy(SnapshotSourceTableStatsService.class);
-
-    @Test
-    void saveSnapshotViewMapping() {
-
-    }
-}


[kylin] 11/22: KYLIN-5317 Change parameter kylin.metrics.hdfs-periodic-calculation-enabled to default true

Posted by xx...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

xxyu pushed a commit to branch kylin5
in repository https://gitbox.apache.org/repos/asf/kylin.git

commit a79a9b487df43b2f6b8acf1a476d94520d0532e9
Author: Guoliang Sun <gu...@kyligence.io>
AuthorDate: Fri Oct 14 13:52:29 2022 +0800

    KYLIN-5317 Change parameter kylin.metrics.hdfs-periodic-calculation-enabled to default true
---
 .../org/apache/kylin/common/KylinConfigBase.java   |  2 +-
 .../apache/kylin/common/KylinConfigBaseTest.java   |  1 +
 .../apache/kylin/metrics/HdfsCapacityMetrics.java  | 34 ++++++++++++----------
 .../kylin/metrics/HdfsCapacityMetricsTest.java     | 26 +++++++++--------
 4 files changed, 34 insertions(+), 29 deletions(-)

diff --git a/src/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java b/src/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
index acfe61c924..5fe045c115 100644
--- a/src/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
+++ b/src/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
@@ -3616,7 +3616,7 @@ public abstract class KylinConfigBase implements Serializable {
     }
 
     public boolean isHdfsMetricsPeriodicCalculationEnabled() {
-        return Boolean.parseBoolean(getOptional("kylin.metrics.hdfs-periodic-calculation-enabled", FALSE));
+        return Boolean.parseBoolean(getOptional("kylin.metrics.hdfs-periodic-calculation-enabled", TRUE));
     }
 
     public long getHdfsMetricsPeriodicCalculationInterval() {
diff --git a/src/core-common/src/test/java/org/apache/kylin/common/KylinConfigBaseTest.java b/src/core-common/src/test/java/org/apache/kylin/common/KylinConfigBaseTest.java
index 3630c7bbd1..16f653d46b 100644
--- a/src/core-common/src/test/java/org/apache/kylin/common/KylinConfigBaseTest.java
+++ b/src/core-common/src/test/java/org/apache/kylin/common/KylinConfigBaseTest.java
@@ -1258,6 +1258,7 @@ class KylinConfigBaseTest {
     @Test
     void testIsHdfsMetricsPeriodicCalculationEnabled() {
         KylinConfig config = KylinConfig.getInstanceFromEnv();
+        config.setProperty("kylin.metrics.hdfs-periodic-calculation-enabled", "false");
         assertFalse(config.isHdfsMetricsPeriodicCalculationEnabled());
         config.setProperty("kylin.metrics.hdfs-periodic-calculation-enabled", "true");
         assertTrue(config.isHdfsMetricsPeriodicCalculationEnabled());
diff --git a/src/core-metadata/src/main/java/org/apache/kylin/metrics/HdfsCapacityMetrics.java b/src/core-metadata/src/main/java/org/apache/kylin/metrics/HdfsCapacityMetrics.java
index d26d509155..9b114779ea 100644
--- a/src/core-metadata/src/main/java/org/apache/kylin/metrics/HdfsCapacityMetrics.java
+++ b/src/core-metadata/src/main/java/org/apache/kylin/metrics/HdfsCapacityMetrics.java
@@ -18,7 +18,15 @@
 
 package org.apache.kylin.metrics;
 
-import lombok.extern.slf4j.Slf4j;
+import java.io.IOException;
+import java.util.Set;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.ConcurrentMap;
+import java.util.concurrent.Executors;
+import java.util.concurrent.ScheduledExecutorService;
+import java.util.concurrent.TimeUnit;
+import java.util.stream.Collectors;
+
 import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileSystem;
@@ -32,19 +40,12 @@ import org.apache.kylin.common.util.NamedThreadFactory;
 import org.apache.kylin.metadata.project.NProjectManager;
 import org.apache.kylin.metadata.project.ProjectInstance;
 
-import java.io.IOException;
-import java.util.Set;
-import java.util.concurrent.ConcurrentHashMap;
-import java.util.concurrent.ConcurrentMap;
-import java.util.concurrent.Executors;
-import java.util.concurrent.ScheduledExecutorService;
-import java.util.concurrent.TimeUnit;
-import java.util.stream.Collectors;
+import lombok.extern.slf4j.Slf4j;
 
 /**
  * 1. Unify the entry point for all calculation calls to obtain the capacity of the WorkingDir through scheduled threads
  * 2. Expose two configurations externally:
- * - function enable switch: kylin.metrics.hdfs-periodic-calculation-enabled  - default false
+ * - function enable switch: kylin.metrics.hdfs-periodic-calculation-enabled  - default true
  * - polling time parameter: kylin.metrics.hdfs-periodic-calculation-interval - default 5min
  */
 @Slf4j
@@ -68,7 +69,8 @@ public class HdfsCapacityMetrics {
         SERVICE_INFO = AddressUtil.getLocalInstance();
         WORKING_FS = HadoopUtil.getWorkingFileSystem();
         HDFS_CAPACITY_METRICS_PATH = new Path(KYLIN_CONFIG.getHdfsMetricsDir("hdfsCapacity.json"));
-        HDFS_METRICS_SCHEDULED_EXECUTOR = Executors.newScheduledThreadPool(1, new NamedThreadFactory("HdfsMetricsChecker"));
+        HDFS_METRICS_SCHEDULED_EXECUTOR = Executors.newScheduledThreadPool(1,
+                new NamedThreadFactory("HdfsMetricsChecker"));
         registerHdfsMetrics();
     }
 
@@ -85,8 +87,8 @@ public class HdfsCapacityMetrics {
         hdfsMetricsPeriodicCalculationEnabled = KYLIN_CONFIG.isHdfsMetricsPeriodicCalculationEnabled();
         if (hdfsMetricsPeriodicCalculationEnabled) {
             log.info("HDFS metrics periodic calculation is enabled, path: {}", HDFS_CAPACITY_METRICS_PATH);
-            HDFS_METRICS_SCHEDULED_EXECUTOR.scheduleAtFixedRate(HdfsCapacityMetrics::handleNodeHdfsMetrics,
-                    0, KYLIN_CONFIG.getHdfsMetricsPeriodicCalculationInterval(), TimeUnit.MILLISECONDS);
+            HDFS_METRICS_SCHEDULED_EXECUTOR.scheduleAtFixedRate(HdfsCapacityMetrics::handleNodeHdfsMetrics, 0,
+                    KYLIN_CONFIG.getHdfsMetricsPeriodicCalculationInterval(), TimeUnit.MILLISECONDS);
         }
     }
 
@@ -104,8 +106,8 @@ public class HdfsCapacityMetrics {
     public static void writeHdfsMetrics() {
         prepareForWorkingDirCapacity.clear();
         // All WorkingDir capacities involved are calculated here
-        Set<String> allProjects = NProjectManager.getInstance(KYLIN_CONFIG).listAllProjects()
-                .stream().map(ProjectInstance::getName).collect(Collectors.toSet());
+        Set<String> allProjects = NProjectManager.getInstance(KYLIN_CONFIG).listAllProjects().stream()
+                .map(ProjectInstance::getName).collect(Collectors.toSet());
         try {
             for (String project : allProjects) {
                 // Should not initialize projectTotalStorageSize outside the loop, otherwise it may affect the next calculation
@@ -157,4 +159,4 @@ public class HdfsCapacityMetrics {
         }
         return -1L;
     }
-}
\ No newline at end of file
+}
diff --git a/src/core-metadata/src/test/java/org/apache/kylin/metrics/HdfsCapacityMetricsTest.java b/src/core-metadata/src/test/java/org/apache/kylin/metrics/HdfsCapacityMetricsTest.java
index b35736e3cc..0c4332707a 100644
--- a/src/core-metadata/src/test/java/org/apache/kylin/metrics/HdfsCapacityMetricsTest.java
+++ b/src/core-metadata/src/test/java/org/apache/kylin/metrics/HdfsCapacityMetricsTest.java
@@ -18,23 +18,23 @@
 
 package org.apache.kylin.metrics;
 
-import org.apache.kylin.metadata.epoch.EpochManager;
+import static org.awaitility.Awaitility.await;
+
+import java.io.IOException;
+import java.util.concurrent.TimeUnit;
+
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.util.HadoopUtil;
 import org.apache.kylin.common.util.NLocalFileMetadataTestCase;
+import org.apache.kylin.metadata.epoch.EpochManager;
 import org.awaitility.Duration;
 import org.junit.After;
 import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Test;
 
-import java.io.IOException;
-import java.util.concurrent.TimeUnit;
-
-import static org.awaitility.Awaitility.await;
-
 public class HdfsCapacityMetricsTest extends NLocalFileMetadataTestCase {
 
     @Before
@@ -51,12 +51,13 @@ public class HdfsCapacityMetricsTest extends NLocalFileMetadataTestCase {
     public void testRegisterHdfsMetricsFailed() {
         HdfsCapacityMetrics.registerHdfsMetrics();
         // scheduledExecutor may like this
-        // java.util.concurrent.ScheduledThreadPoolExecutor@2d9caaeb[Running, pool size = 0, active threads = 0, queued tasks = 0, completed tasks = 0]
+        // java.util.concurrent.ScheduledThreadPoolExecutor@5bf61e67[Running, pool size = 1, active threads = 1, queued tasks = 1, completed tasks = 0]
         String scheduledExecutor = HdfsCapacityMetrics.HDFS_METRICS_SCHEDULED_EXECUTOR.toString();
-        String activeThreadStr = "active threads = ";
-        int activeThreadIdx = scheduledExecutor.indexOf(activeThreadStr);
-        String thread = scheduledExecutor.substring(activeThreadIdx + activeThreadStr.length(), activeThreadIdx + activeThreadStr.length() + 1);
-        Assert.assertEquals(0, Integer.parseInt(thread));
+        String poolSizeStr = "pool size = ";
+        int activePoolSizeIdx = scheduledExecutor.indexOf(poolSizeStr);
+        String poolSize = scheduledExecutor.substring(activePoolSizeIdx + poolSizeStr.length(),
+                activePoolSizeIdx + poolSizeStr.length() + 1);
+        Assert.assertEquals(1, Integer.parseInt(poolSize));
     }
 
     @Test
@@ -68,7 +69,8 @@ public class HdfsCapacityMetricsTest extends NLocalFileMetadataTestCase {
         String scheduledExecutor = HdfsCapacityMetrics.HDFS_METRICS_SCHEDULED_EXECUTOR.toString();
         String activeThreadStr = "active threads = ";
         int activeThreadIdx = scheduledExecutor.indexOf(activeThreadStr);
-        String thread = scheduledExecutor.substring(activeThreadIdx + activeThreadStr.length(), activeThreadIdx + activeThreadStr.length() + 1);
+        String thread = scheduledExecutor.substring(activeThreadIdx + activeThreadStr.length(),
+                activeThreadIdx + activeThreadStr.length() + 1);
         Assert.assertEquals(1, Integer.parseInt(thread));
     }
 


[kylin] 10/22: skip secondary index while modify column

Posted by xx...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

xxyu pushed a commit to branch kylin5
in repository https://gitbox.apache.org/repos/asf/kylin.git

commit 13bd871bdf9d18bb2431cba466b80b6244ba607f
Author: Zhixiong Chen <ch...@apache.org>
AuthorDate: Fri Oct 14 12:56:10 2022 +0800

    skip secondary index while modify column
---
 .../kyligence/kap/secondstorage/SecondStorageLockTest.java | 14 +++++++++++++-
 .../kap/clickhouse/database/ClickHouseQueryOperator.java   |  8 ++++++--
 .../management/SecondStorageScheduleService.java           |  7 ++++++-
 .../kap/secondstorage/management/SecondStorageService.java |  6 ++++++
 .../kap/secondstorage/database/QueryOperator.java          |  3 ++-
 5 files changed, 33 insertions(+), 5 deletions(-)

diff --git a/src/second-storage/clickhouse-it/src/test/java/io/kyligence/kap/secondstorage/SecondStorageLockTest.java b/src/second-storage/clickhouse-it/src/test/java/io/kyligence/kap/secondstorage/SecondStorageLockTest.java
index 03c191ab16..ffb4340233 100644
--- a/src/second-storage/clickhouse-it/src/test/java/io/kyligence/kap/secondstorage/SecondStorageLockTest.java
+++ b/src/second-storage/clickhouse-it/src/test/java/io/kyligence/kap/secondstorage/SecondStorageLockTest.java
@@ -2867,7 +2867,7 @@ public class SecondStorageLockTest implements JobWaiter {
             int replica = 1;
             configClickhouseWith(clickhouse, replica, catalog, () -> {
                 QueryOperator queryOperator = SecondStorageFactoryUtils.createQueryMetricOperator(getProject());
-                queryOperator.modifyColumnByCardinality("default", "table");
+                queryOperator.modifyColumnByCardinality("default", "table", Sets.newHashSet());
 
                 buildIncrementalLoadQuery("2012-01-02", "2012-01-03");
                 waitAllJobFinish();
@@ -2938,6 +2938,18 @@ public class SecondStorageLockTest implements JobWaiter {
                     }
                 }
                 assertEquals(LOW_CARDINALITY_STRING, rows);
+
+                queryOperator.modifyColumnByCardinality(database, destTableName, Sets.newHashSet(4));
+                try (Connection connection = DriverManager.getConnection(clickhouse1.getJdbcUrl());
+                     val stmt = connection.createStatement()) {
+                    val rs = stmt.executeQuery(String.format(Locale.ROOT, "desc %s.%s", database, destTableName));
+                    while (rs.next()) {
+                        if ("c4".equals(rs.getString(1))) {
+                            rows = rs.getString(2);
+                        }
+                    }
+                }
+                assertEquals(LOW_CARDINALITY_STRING, rows);
                 return true;
             });
         }
diff --git a/src/second-storage/clickhouse/src/main/java/io/kyligence/kap/clickhouse/database/ClickHouseQueryOperator.java b/src/second-storage/clickhouse/src/main/java/io/kyligence/kap/clickhouse/database/ClickHouseQueryOperator.java
index 7e0b37d3bd..ab214644ba 100644
--- a/src/second-storage/clickhouse/src/main/java/io/kyligence/kap/clickhouse/database/ClickHouseQueryOperator.java
+++ b/src/second-storage/clickhouse/src/main/java/io/kyligence/kap/clickhouse/database/ClickHouseQueryOperator.java
@@ -28,6 +28,7 @@ import java.util.Optional;
 import java.util.Set;
 import java.util.stream.Collectors;
 
+import io.kyligence.kap.secondstorage.ColumnMapping;
 import org.apache.commons.collections.CollectionUtils;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.commons.lang3.exception.ExceptionUtils;
@@ -166,7 +167,7 @@ public class ClickHouseQueryOperator implements QueryOperator {
                 QueryMetrics.SOURCE_RESULT_COUNT, -1L);
     }
 
-    public void modifyColumnByCardinality(String database, String destTableName) {
+    public void modifyColumnByCardinality(String database, String destTableName, Set<Integer> secondaryIndex) {
         KylinConfig config = KylinConfig.getInstanceFromEnv();
         List<NodeGroup> nodeGroups = SecondStorageUtil.listNodeGroup(config, project);
         Set<String> nodes = nodeGroups.stream()
@@ -178,7 +179,10 @@ public class ClickHouseQueryOperator implements QueryOperator {
             return;
 
         ProjectInstance projectInstance = NProjectManager.getInstance(config).getProject(project);
-        List<ClickHouseSystemQuery.DescTable> modifyColumns = getFilterDescTable(maxRowsNode, database, destTableName, projectInstance.getConfig());
+        val tableColumns = getFilterDescTable(maxRowsNode, database, destTableName, projectInstance.getConfig());
+        val modifyColumns = tableColumns.stream()
+                .filter(col -> !secondaryIndex.contains(Integer.valueOf(ColumnMapping.secondStorageColumnToKapColumn(col.getColumn()))))
+                .collect(Collectors.toList());
         if (CollectionUtils.isEmpty(modifyColumns))
             return;
 
diff --git a/src/second-storage/core-ui/src/main/java/io/kyligence/kap/secondstorage/management/SecondStorageScheduleService.java b/src/second-storage/core-ui/src/main/java/io/kyligence/kap/secondstorage/management/SecondStorageScheduleService.java
index 97ac724418..eec15691e2 100644
--- a/src/second-storage/core-ui/src/main/java/io/kyligence/kap/secondstorage/management/SecondStorageScheduleService.java
+++ b/src/second-storage/core-ui/src/main/java/io/kyligence/kap/secondstorage/management/SecondStorageScheduleService.java
@@ -19,6 +19,8 @@
 package io.kyligence.kap.secondstorage.management;
 
 import com.google.common.collect.Maps;
+import io.kyligence.kap.secondstorage.metadata.TableEntity;
+import io.kyligence.kap.secondstorage.metadata.TablePlan;
 import org.apache.kylin.metadata.cube.model.NDataflowManager;
 import org.apache.kylin.metadata.epoch.EpochManager;
 import org.apache.kylin.metadata.model.NDataModel;
@@ -98,7 +100,10 @@ public class SecondStorageScheduleService {
                     try {
                         val database = NameUtil.getDatabase(df);
                         val destTableName = NameUtil.getTable(df, SecondStorageUtil.getBaseIndex(df).getId());
-                        queryOperator.modifyColumnByCardinality(database, destTableName);
+                        val tablePlanManager = SecondStorageUtil.tablePlanManager(config, project);
+                        TablePlan tablePlan = tablePlanManager.get().get(model.getId()).get();
+                        TableEntity tableEntity = tablePlan.getEntity(SecondStorageUtil.getBaseIndex(df).getId()).orElse(null);
+                        queryOperator.modifyColumnByCardinality(database, destTableName, tableEntity.getSecondaryIndexColumns());
                     } catch (Exception exception) {
                         log.error("Failed to modify second storage low cardinality on model {}.", model.getId(), exception);
                     } finally {
diff --git a/src/second-storage/core-ui/src/main/java/io/kyligence/kap/secondstorage/management/SecondStorageService.java b/src/second-storage/core-ui/src/main/java/io/kyligence/kap/secondstorage/management/SecondStorageService.java
index 460e40faed..b62b4e2376 100644
--- a/src/second-storage/core-ui/src/main/java/io/kyligence/kap/secondstorage/management/SecondStorageService.java
+++ b/src/second-storage/core-ui/src/main/java/io/kyligence/kap/secondstorage/management/SecondStorageService.java
@@ -1196,6 +1196,12 @@ public class SecondStorageService extends BasicService implements SecondStorageU
             if (StringUtils.isEmpty(colPrefix.get()))
                 throw new KylinException(INVALID_PARAMETER, String.format("There is no column %s in model %s", column, df.getModel().getAlias()));
 
+            val tablePlanManager = SecondStorageUtil.tablePlanManager(config, project);
+            TablePlan tablePlan = tablePlanManager.get().get(model).get();
+            TableEntity tableEntity = tablePlan.getEntity(SecondStorageUtil.getBaseIndex(df).getId()).orElse(null);
+            if (tableEntity.getSecondaryIndexColumns().contains(Integer.valueOf(ColumnMapping.secondStorageColumnToKapColumn(colPrefix.get()))))
+                throw new KylinException(INVALID_PARAMETER, String.format("The column %s is Secondary Index Column.", column));
+
             val destTableName = NameUtil.getTable(df, layout.getId());
             queryOperator.modifyColumnByCardinality(database, destTableName, colPrefix.get(), datatype);
         } catch (Exception exception) {
diff --git a/src/second-storage/core/src/main/java/io/kyligence/kap/secondstorage/database/QueryOperator.java b/src/second-storage/core/src/main/java/io/kyligence/kap/secondstorage/database/QueryOperator.java
index a723e6f337..c0f04b65b6 100644
--- a/src/second-storage/core/src/main/java/io/kyligence/kap/secondstorage/database/QueryOperator.java
+++ b/src/second-storage/core/src/main/java/io/kyligence/kap/secondstorage/database/QueryOperator.java
@@ -19,11 +19,12 @@
 package io.kyligence.kap.secondstorage.database;
 
 import java.util.Map;
+import java.util.Set;
 
 public interface QueryOperator {
     Map<String, Object> getQueryMetric(String queryId);
 
-    void modifyColumnByCardinality(String database, String destTableName);
+    void modifyColumnByCardinality(String database, String destTableName, Set<Integer> secondaryIndex);
 
     void modifyColumnByCardinality(String database, String destTableName, String column, String datatype);
 }


[kylin] 20/22: Revert Fix QueryHistory Clean

Posted by xx...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

xxyu pushed a commit to branch kylin5
in repository https://gitbox.apache.org/repos/asf/kylin.git

commit fb7f07c680b95e6999aff3ff02ccad7889ae7993
Author: He Xu <34...@users.noreply.github.com>
AuthorDate: Sun Oct 16 20:10:59 2022 +0800

    Revert Fix QueryHistory Clean
---
 .../metadata/query/JdbcQueryHistoryStore.java      | 73 +++++++---------------
 .../kylin/metadata/query/RDBMSQueryHistoryDAO.java | 44 +++++--------
 .../metadata/query/RDBMSQueryHistoryDaoTest.java   | 68 --------------------
 3 files changed, 38 insertions(+), 147 deletions(-)

diff --git a/src/core-metadata/src/main/java/org/apache/kylin/metadata/query/JdbcQueryHistoryStore.java b/src/core-metadata/src/main/java/org/apache/kylin/metadata/query/JdbcQueryHistoryStore.java
index 094ad722b2..f5175e2619 100644
--- a/src/core-metadata/src/main/java/org/apache/kylin/metadata/query/JdbcQueryHistoryStore.java
+++ b/src/core-metadata/src/main/java/org/apache/kylin/metadata/query/JdbcQueryHistoryStore.java
@@ -154,7 +154,7 @@ public class JdbcQueryHistoryStore {
             insertQhRealProviderList.forEach(qhRealizationMapper::insert);
 
             session.commit();
-            if (!queryMetricsList.isEmpty()) {
+            if (queryMetricsList.size() > 0) {
                 log.info("Insert {} query history into database takes {} ms", queryMetricsList.size(),
                         System.currentTimeMillis() - startTime);
             }
@@ -183,7 +183,7 @@ public class JdbcQueryHistoryStore {
             return mapper.selectDaily(qhTableName, startTime, endTime);
         }
     }
-
+    
     public List<QueryHistory> queryQueryHistoriesSubmitters(QueryHistoryRequest request, int size) {
         try (SqlSession session = sqlSessionFactory.openSession()) {
             QueryHistoryMapper mapper = session.getMapper(QueryHistoryMapper.class);
@@ -198,23 +198,22 @@ public class JdbcQueryHistoryStore {
             SelectStatementProvider statementProvider = selectDistinct(queryHistoryRealizationTable.queryId)
                     .from(queryHistoryRealizationTable).where(queryHistoryRealizationTable.model, isIn(modelIds))
                     .build().render(RenderingStrategies.MYBATIS3);
-            return mapper.selectMany(statementProvider).stream().map(QueryHistory::getQueryId)
-                    .collect(Collectors.toList());
+            return mapper.selectMany(statementProvider).stream().map(QueryHistory::getQueryId).collect(Collectors.toList());
         }
     }
 
     public List<QueryStatistics> queryQueryHistoriesModelIds(QueryHistoryRequest request, int size) {
         try (SqlSession session = sqlSessionFactory.openSession()) {
             QueryStatisticsMapper mapper = session.getMapper(QueryStatisticsMapper.class);
-            SelectStatementProvider statementProvider1 = selectDistinct(queryHistoryTable.engineType)
-                    .from(queryHistoryTable).where(queryHistoryTable.engineType, isNotEqualTo("NATIVE"))
-                    .and(queryHistoryTable.projectName, isEqualTo(request.getProject())).build()
-                    .render(RenderingStrategies.MYBATIS3);
+            SelectStatementProvider statementProvider1 = selectDistinct(queryHistoryTable.engineType).from(queryHistoryTable)
+                    .where(queryHistoryTable.engineType, isNotEqualTo("NATIVE"))
+                    .and(queryHistoryTable.projectName, isEqualTo(request.getProject()))
+                    .build().render(RenderingStrategies.MYBATIS3);
             List<QueryStatistics> engineTypes = mapper.selectMany(statementProvider1);
 
-            SelectStatementProvider statementProvider2 = selectDistinct(queryHistoryRealizationTable.model)
-                    .from(queryHistoryRealizationTable)
-                    .where(queryHistoryRealizationTable.projectName, isEqualTo(request.getProject())).limit(size)
+            SelectStatementProvider statementProvider2 = selectDistinct(queryHistoryRealizationTable.model).from(queryHistoryRealizationTable)
+                    .where(queryHistoryRealizationTable.projectName, isEqualTo(request.getProject()))
+                    .limit(size)
                     .build().render(RenderingStrategies.MYBATIS3);
             List<QueryStatistics> modelIds = mapper.selectMany(statementProvider2);
             engineTypes.addAll(modelIds);
@@ -222,36 +221,16 @@ public class JdbcQueryHistoryStore {
         }
     }
 
-    public long getMaxId() {
-        try (SqlSession session = sqlSessionFactory.openSession()) {
-            QueryHistoryMapper mapper = session.getMapper(QueryHistoryMapper.class);
-            SelectStatementProvider statementProvider = select(max(queryHistoryTable.id))
-                    .from(queryHistoryTable)
-                    .build().render(RenderingStrategies.MYBATIS3);
-            Long maxId = mapper.selectAsLong(statementProvider);
-            return maxId == null ? 0 : maxId;
-        }
-    }
-
-    public QueryHistory queryOldestQueryHistory(long retainMinId) {
+    public QueryHistory queryOldestQueryHistory(long maxSize) {
         try (SqlSession session = sqlSessionFactory.openSession()) {
             QueryHistoryMapper mapper = session.getMapper(QueryHistoryMapper.class);
             SelectStatementProvider statementProvider = select(getSelectFields(queryHistoryTable))
-                    .from(queryHistoryTable)
-                    .where(queryHistoryTable.id, isEqualTo(retainMinId))
-                    .build().render(RenderingStrategies.MYBATIS3);
-            return mapper.selectOne(statementProvider);
-        }
-    }
-
-    public long getProjectCount(String project) {
-        try (SqlSession session = sqlSessionFactory.openSession()) {
-            QueryHistoryMapper mapper = session.getMapper(QueryHistoryMapper.class);
-            SelectStatementProvider statementProvider = select(count(queryHistoryTable.id)) //
                     .from(queryHistoryTable) //
-                    .where(queryHistoryTable.projectName, isEqualTo(project)) //
+                    .orderBy(queryHistoryTable.id.descending()) //
+                    .limit(1) //
+                    .offset(maxSize - 1) //
                     .build().render(RenderingStrategies.MYBATIS3);
-            return mapper.selectAsLong(statementProvider);
+            return mapper.selectOne(statementProvider);
         }
     }
 
@@ -260,12 +239,11 @@ public class JdbcQueryHistoryStore {
             QueryHistoryMapper mapper = session.getMapper(QueryHistoryMapper.class);
             SelectStatementProvider statementProvider = select(getSelectFields(queryHistoryTable)) //
                     .from(queryHistoryTable) //
-                    .where(queryHistoryTable.id, //
-                            isEqualTo(select(queryHistoryTable.id) //
-                                    .from(queryHistoryTable) //
-                                    .where(queryHistoryTable.projectName, isEqualTo(project)) //
-                                    .orderBy(queryHistoryTable.id.descending()).limit(1).offset(maxSize - 1)) //
-                    ).build().render(RenderingStrategies.MYBATIS3);
+                    .where(queryHistoryTable.projectName, isEqualTo(project)) //
+                    .orderBy(queryHistoryTable.id.descending()) //
+                    .limit(1) //
+                    .offset(maxSize - 1) //
+                    .build().render(RenderingStrategies.MYBATIS3);
             return mapper.selectOne(statementProvider);
         }
     }
@@ -520,7 +498,7 @@ public class JdbcQueryHistoryStore {
             idToQHInfoList.forEach(pair -> providers.add(changeQHInfoProvider(pair.getFirst(), pair.getSecond())));
             providers.forEach(mapper::update);
             session.commit();
-            if (!idToQHInfoList.isEmpty()) {
+            if (idToQHInfoList.size() > 0) {
                 log.info("Update {} query history info takes {} ms", idToQHInfoList.size(),
                         System.currentTimeMillis() - start);
             }
@@ -642,8 +620,7 @@ public class JdbcQueryHistoryStore {
             if (request.isSubmitterExactlyMatch()) {
                 filterSql = filterSql.and(queryHistoryTable.querySubmitter, isIn(request.getFilterSubmitter()));
             } else if (request.getFilterSubmitter().size() == 1) {
-                filterSql = filterSql.and(queryHistoryTable.querySubmitter,
-                        isLikeCaseInsensitive("%" + request.getFilterSubmitter().get(0) + "%"));
+                filterSql = filterSql.and(queryHistoryTable.querySubmitter, isLikeCaseInsensitive("%" + request.getFilterSubmitter().get(0) + "%"));
             }
         }
 
@@ -667,12 +644,10 @@ public class JdbcQueryHistoryStore {
             }
         } else if (selectAllModels) {
             // Process CONSTANTS, HIVE, RDBMS and all model
-            filterSql = filterSql.and(queryHistoryTable.engineType, isIn(realizations),
-                    or(queryHistoryTable.indexHit, isEqualTo(true)));
+            filterSql = filterSql.and(queryHistoryTable.engineType, isIn(realizations), or(queryHistoryTable.indexHit, isEqualTo(true)));
         } else if (request.getFilterModelIds() != null && !request.getFilterModelIds().isEmpty()) {
             // Process CONSTANTS, HIVE, RDBMS and model1, model2, model3...
-            filterSql = filterSql.and(queryHistoryTable.engineType, isIn(realizations),
-                    or(queryHistoryTable.queryId,
+            filterSql = filterSql.and(queryHistoryTable.engineType, isIn(realizations), or(queryHistoryTable.queryId,
                             isIn(selectDistinct(queryHistoryRealizationTable.queryId).from(queryHistoryRealizationTable)
                                     .where(queryHistoryRealizationTable.model, isIn(request.getFilterModelIds())))));
         } else {
diff --git a/src/core-metadata/src/main/java/org/apache/kylin/metadata/query/RDBMSQueryHistoryDAO.java b/src/core-metadata/src/main/java/org/apache/kylin/metadata/query/RDBMSQueryHistoryDAO.java
index 31990f27d4..73124496de 100644
--- a/src/core-metadata/src/main/java/org/apache/kylin/metadata/query/RDBMSQueryHistoryDAO.java
+++ b/src/core-metadata/src/main/java/org/apache/kylin/metadata/query/RDBMSQueryHistoryDAO.java
@@ -45,8 +45,8 @@ public class RDBMSQueryHistoryDAO implements QueryHistoryDAO {
     private static final Logger logger = LoggerFactory.getLogger(RDBMSQueryHistoryDAO.class);
     @Setter
     private String queryMetricMeasurement;
-    private final String realizationMetricMeasurement;
-    private final JdbcQueryHistoryStore jdbcQueryHisStore;
+    private String realizationMetricMeasurement;
+    private JdbcQueryHistoryStore jdbcQueryHisStore;
 
     public static final String WEEK = "week";
     public static final String DAY = "day";
@@ -105,21 +105,13 @@ public class RDBMSQueryHistoryDAO implements QueryHistoryDAO {
     }
 
     public void deleteQueryHistoriesIfMaxSizeReached() {
-        int globalMaxSize = KylinConfig.getInstanceFromEnv().getQueryHistoryMaxSize();
-        long globalMaxId = jdbcQueryHisStore.getMaxId();
-        long retainMinId = globalMaxId - globalMaxSize + 1;
-        logger.info("Clean QueryHistory Global MaxId: {}, MaxSize: {}, RetainMinId: {}", globalMaxId, globalMaxSize, retainMinId);
-        if (retainMinId <= 1) {
-            // no need to delete
-            return;
-        }
-        QueryHistory queryHistory = jdbcQueryHisStore.queryOldestQueryHistory(retainMinId);
-        if (Objects.isNull(queryHistory)) {
-            return;
+        QueryHistory queryHistory = jdbcQueryHisStore
+                .queryOldestQueryHistory(KylinConfig.getInstanceFromEnv().getQueryHistoryMaxSize());
+        if (Objects.nonNull(queryHistory)) {
+            long time = queryHistory.getQueryTime();
+            jdbcQueryHisStore.deleteQueryHistory(time);
+            jdbcQueryHisStore.deleteQueryHistoryRealization(time);
         }
-        long time = queryHistory.getQueryTime();
-        jdbcQueryHisStore.deleteQueryHistory(time);
-        jdbcQueryHisStore.deleteQueryHistoryRealization(time);
     }
 
     public QueryHistory getByQueryId(String queryId) {
@@ -127,21 +119,13 @@ public class RDBMSQueryHistoryDAO implements QueryHistoryDAO {
     }
 
     public void deleteQueryHistoriesIfProjectMaxSizeReached(String project) {
-        int projectMaxSize = KylinConfig.getInstanceFromEnv().getQueryHistoryProjectMaxSize();
-        long projectCount = jdbcQueryHisStore.getProjectCount(project);
-        logger.info("Clean QueryHistory Project: {}, Count: {}, MaxSize: {}", project, projectCount, projectMaxSize);
-        if (projectCount <= projectMaxSize) {
-            // no need to delete
-            return;
-        }
-
-        QueryHistory queryHistory = jdbcQueryHisStore.queryOldestQueryHistory(projectMaxSize, project);
-        if (Objects.isNull(queryHistory)) {
-            return;
+        QueryHistory queryHistory = jdbcQueryHisStore
+                .queryOldestQueryHistory(KylinConfig.getInstanceFromEnv().getQueryHistoryProjectMaxSize(), project);
+        if (Objects.nonNull(queryHistory)) {
+            long time = queryHistory.getQueryTime();
+            jdbcQueryHisStore.deleteQueryHistory(time, project);
+            jdbcQueryHisStore.deleteQueryHistoryRealization(time, project);
         }
-        long time = queryHistory.getQueryTime();
-        jdbcQueryHisStore.deleteQueryHistory(time, project);
-        jdbcQueryHisStore.deleteQueryHistoryRealization(time, project);
     }
 
     public void deleteQueryHistoriesIfRetainTimeReached() {
diff --git a/src/core-metadata/src/test/java/org/apache/kylin/metadata/query/RDBMSQueryHistoryDaoTest.java b/src/core-metadata/src/test/java/org/apache/kylin/metadata/query/RDBMSQueryHistoryDaoTest.java
index 70baeed914..18443017dc 100644
--- a/src/core-metadata/src/test/java/org/apache/kylin/metadata/query/RDBMSQueryHistoryDaoTest.java
+++ b/src/core-metadata/src/test/java/org/apache/kylin/metadata/query/RDBMSQueryHistoryDaoTest.java
@@ -23,7 +23,6 @@ import static org.apache.kylin.metadata.query.RDBMSQueryHistoryDAO.fillZeroForQu
 import java.util.List;
 
 import org.apache.kylin.common.util.NLocalFileMetadataTestCase;
-import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.util.Pair;
 import org.apache.kylin.common.util.TimeUtil;
 import org.apache.kylin.junit.TimeZoneTestRunner;
@@ -766,71 +765,4 @@ public class RDBMSQueryHistoryDaoTest extends NLocalFileMetadataTestCase {
         queryMetrics.setQueryHistoryInfo(queryHistoryInfo);
         return queryMetrics;
     }
-
-    @Test
-    public void testDeleteQueryHistoryMaxSizeForGlobal() {
-        KylinConfig config = getTestConfig();
-        config.setProperty("kylin.query.queryhistory.max-size", "2");
-        Assert.assertEquals(2, config.getQueryHistoryMaxSize());
-
-        // before delete
-        Assert.assertEquals(0, queryHistoryDAO.getAllQueryHistories().size());
-
-        // delete empty
-        queryHistoryDAO.deleteQueryHistoriesIfMaxSizeReached();
-        Assert.assertEquals(0, queryHistoryDAO.getAllQueryHistories().size());
-
-        // insert
-        queryHistoryDAO.insert(createQueryMetrics(1580311512000L, 1L, true, PROJECT, true));
-        queryHistoryDAO.insert(createQueryMetrics(1580397912000L, 2L, false, PROJECT, true));
-        Assert.assertEquals(2, queryHistoryDAO.getAllQueryHistories().size());
-
-        // delete equals max size
-        queryHistoryDAO.deleteQueryHistoriesIfMaxSizeReached();
-        Assert.assertEquals(2, queryHistoryDAO.getAllQueryHistories().size());
-
-        // delete > max size
-        queryHistoryDAO.insert(createQueryMetrics(1580397912000L, 2L, false, PROJECT, true));
-        Assert.assertEquals(3, queryHistoryDAO.getAllQueryHistories().size());
-        queryHistoryDAO.deleteQueryHistoriesIfMaxSizeReached();
-        Assert.assertEquals(2, queryHistoryDAO.getAllQueryHistories().size());
-
-    }
-
-    @Test
-    public void testDeleteQueryHistoryMaxSizeForProject() {
-        String otherProject = "other_project";
-        KylinConfig config = getTestConfig();
-        config.setProperty("kylin.query.queryhistory.project-max-size", "2");
-        Assert.assertEquals(2, config.getQueryHistoryProjectMaxSize());
-
-        // before delete
-        Assert.assertEquals(0, queryHistoryDAO.getAllQueryHistories().size());
-
-        // delete empty
-        queryHistoryDAO.deleteQueryHistoriesIfProjectMaxSizeReached(PROJECT);
-        Assert.assertEquals(0, queryHistoryDAO.getAllQueryHistories().size());
-
-        // insert
-        queryHistoryDAO.insert(createQueryMetrics(1580311512000L, 1L, true, PROJECT, true));
-        queryHistoryDAO.insert(createQueryMetrics(1580397912000L, 2L, false, PROJECT, true));
-        queryHistoryDAO.insert(createQueryMetrics(1580311512000L, 1L, true, otherProject, true));
-        queryHistoryDAO.insert(createQueryMetrics(1580397912000L, 2L, false, otherProject, true));
-        Assert.assertEquals(4, queryHistoryDAO.getAllQueryHistories().size());
-
-        // delete equals max size
-        queryHistoryDAO.deleteQueryHistoriesIfProjectMaxSizeReached(PROJECT);
-        queryHistoryDAO.deleteQueryHistoriesIfProjectMaxSizeReached(otherProject);
-        Assert.assertEquals(4, queryHistoryDAO.getAllQueryHistories().size());
-
-        // delete > max size
-        queryHistoryDAO.insert(createQueryMetrics(1580397912000L, 2L, false, PROJECT, true));
-        queryHistoryDAO.insert(createQueryMetrics(1580397912000L, 2L, false, otherProject, true));
-        Assert.assertEquals(6, queryHistoryDAO.getAllQueryHistories().size());
-        queryHistoryDAO.deleteQueryHistoriesIfProjectMaxSizeReached(PROJECT);
-        queryHistoryDAO.deleteQueryHistoriesIfProjectMaxSizeReached(otherProject);
-        Assert.assertEquals(4, queryHistoryDAO.getAllQueryHistories().size());
-
-    }
-
 }


[kylin] 19/22: KYLIN-5324 tableindex answer select start

Posted by xx...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

xxyu pushed a commit to branch kylin5
in repository https://gitbox.apache.org/repos/asf/kylin.git

commit a5875f29c1d168d447d41f68622ed78045ea133f
Author: songzhxlh-max <59...@users.noreply.github.com>
AuthorDate: Sat Oct 8 09:53:54 2022 +0800

    KYLIN-5324 tableindex answer select start
---
 .../org/apache/kylin/common/KylinConfigBase.java   | 11 ++++++--
 .../metadata/cube/cuboid/NQueryLayoutChooser.java  | 33 ++++++++++++++--------
 .../metadata/cube/cuboid/TableIndexMatcher.java    | 24 ++++++++++++++--
 .../kylin/metadata/cube/model/IndexPlan.java       | 22 +++++++++++++--
 .../kylin/metadata/cube/model/NDataflow.java       |  4 +++
 .../org/apache/kylin/query/schema/OLAPTable.java   | 32 +++++++++++++++++++--
 .../apache/kylin/query/util/RuntimeHelper.scala    | 17 +++++++++--
 7 files changed, 118 insertions(+), 25 deletions(-)

diff --git a/src/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java b/src/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
index 5fe045c115..dec30004b8 100644
--- a/src/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
+++ b/src/core-common/src/main/java/org/apache/kylin/common/KylinConfigBase.java
@@ -617,7 +617,7 @@ public abstract class KylinConfigBase implements Serializable {
         String uuid = RandomUtil.randomUUIDStr().toUpperCase(Locale.ROOT).substring(0, 6);
         String packageName = DIAG_ID_PREFIX
                 + new SimpleDateFormat("yyyy_MM_dd_HH_mm_ss", Locale.getDefault(Locale.Category.FORMAT))
-                        .format(new Date())
+                .format(new Date())
                 + "_" + uuid;
         String workDir = KylinConfigBase.getKylinHomeWithoutWarn();
         String diagPath = "diag_dump/" + packageName;
@@ -2036,7 +2036,7 @@ public abstract class KylinConfigBase implements Serializable {
         String value = getOptional("kylin.query.table-detect-transformers");
         return value == null
                 ? new String[] { POWER_BI_CONVERTER, "org.apache.kylin.query.util.DefaultQueryTransformer",
-                        "org.apache.kylin.query.util.EscapeTransformer" }
+                "org.apache.kylin.query.util.EscapeTransformer" }
                 : getOptionalStringArray("kylin.query.table-detect-transformers", new String[0]);
     }
 
@@ -2973,7 +2973,7 @@ public abstract class KylinConfigBase implements Serializable {
     }
 
     private double getConfigItemDoubleValue(String configItem, double defaultDoubleValue, double rangeStart,
-            double rangeEnd) {
+                                            double rangeEnd) {
         double resultValue = defaultDoubleValue;
         try {
             resultValue = Integer.parseInt(getOptional(configItem, String.valueOf(defaultDoubleValue)));
@@ -3632,6 +3632,11 @@ public abstract class KylinConfigBase implements Serializable {
         return Boolean.parseBoolean(getOptional("kylin.build.resource.skip-resource-check", FALSE));
     }
 
+    public boolean useTableIndexAnswerSelectStarEnabled() {
+        return Boolean.parseBoolean(getOptional("kylin.query.use-tableindex-answer-select-star.enabled", FALSE));
+    }
+
+
     public int getSecondStorageSkippingIndexGranularity() {
         int granularity = Integer.parseInt(getOptional("kylin.second-storage.skipping-index.granularity", "3"));
         return granularity <= 0 ? 3 : granularity;
diff --git a/src/core-metadata/src/main/java/org/apache/kylin/metadata/cube/cuboid/NQueryLayoutChooser.java b/src/core-metadata/src/main/java/org/apache/kylin/metadata/cube/cuboid/NQueryLayoutChooser.java
index 5af671f842..0b32223e33 100644
--- a/src/core-metadata/src/main/java/org/apache/kylin/metadata/cube/cuboid/NQueryLayoutChooser.java
+++ b/src/core-metadata/src/main/java/org/apache/kylin/metadata/cube/cuboid/NQueryLayoutChooser.java
@@ -67,7 +67,8 @@ public class NQueryLayoutChooser {
         List<NDataSegment> toRemovedSegments = Lists.newArrayList();
         for (NDataSegment segment : prunedSegments) {
             if (candidate == null) {
-                candidate = selectLayoutCandidate(dataflow, Lists.newArrayList(segment), sqlDigest, secondStorageSegmentLayoutMap);
+                candidate = selectLayoutCandidate(dataflow, Lists.newArrayList(segment), sqlDigest,
+                        secondStorageSegmentLayoutMap);
                 if (candidate == null) {
                     toRemovedSegments.add(segment);
                 }
@@ -87,8 +88,7 @@ public class NQueryLayoutChooser {
             return NLayoutCandidate.EMPTY;
         }
         List<NLayoutCandidate> candidates = new ArrayList<>();
-        val commonLayouts = getLayoutsFromSegments(prunedSegments, dataflow,
-                secondStorageSegmentLayoutMap);
+        val commonLayouts = getLayoutsFromSegments(prunedSegments, dataflow, secondStorageSegmentLayoutMap);
         val model = dataflow.getModel();
         log.info("Matching dataflow with seg num: {} layout num: {}", prunedSegments.size(), commonLayouts.size());
         KylinConfig config = KylinConfig.getInstanceFromEnv();
@@ -108,6 +108,9 @@ public class NQueryLayoutChooser {
         if (!aggIndexMatcher.valid() && !tableIndexMatcher.valid()) {
             return null;
         }
+        val projectInstance = NProjectManager.getInstance(KylinConfig.getInstanceFromEnv())
+                .getProject(dataflow.getProject());
+        double influenceFactor = 1.0;
         for (NDataLayout dataLayout : commonLayouts) {
             log.trace("Matching layout {}", dataLayout);
             CapabilityResult tempResult = new CapabilityResult();
@@ -119,6 +122,8 @@ public class NQueryLayoutChooser {
             var matchResult = tableIndexMatcher.match(layout);
             if (!matchResult.isMatched()) {
                 matchResult = aggIndexMatcher.match(layout);
+            } else if (projectInstance.getConfig().useTableIndexAnswerSelectStarEnabled()) {
+                influenceFactor += influenceFactor + tableIndexMatcher.getLayoutUnmatchedColsSize();
             }
             if (!matchResult.isMatched()) {
                 log.trace("Matching failed");
@@ -127,7 +132,7 @@ public class NQueryLayoutChooser {
 
             NLayoutCandidate candidate = new NLayoutCandidate(layout);
             tempResult.influences = matchResult.getInfluences();
-            candidate.setCost(dataLayout.getRows() * (tempResult.influences.size() + 1.0));
+            candidate.setCost(dataLayout.getRows() * (tempResult.influences.size() + influenceFactor));
             if (!matchResult.getNeedDerive().isEmpty()) {
                 candidate.setDerivedToHostMap(matchResult.getNeedDerive());
                 candidate.setDerivedTableSnapshots(candidate.getDerivedToHostMap().keySet().stream()
@@ -151,7 +156,7 @@ public class NQueryLayoutChooser {
     }
 
     private static Collection<NDataLayout> getLayoutsFromSegments(List<NDataSegment> segments, NDataflow dataflow,
-                                                                  Map<String, Set<Long>> secondStorageSegmentLayoutMap) {
+            Map<String, Set<Long>> secondStorageSegmentLayoutMap) {
         KylinConfig config = KylinConfig.getInstanceFromEnv();
         val projectInstance = NProjectManager.getInstance(config).getProject(dataflow.getProject());
         if (!projectInstance.getConfig().isHeterogeneousSegmentEnabled()) {
@@ -166,10 +171,13 @@ public class NQueryLayoutChooser {
         for (int i = 0; i < segments.size(); i++) {
             val dataSegment = segments.get(i);
             var layoutIdMapToDataLayout = dataSegment.getLayoutsMap();
-            if (SegmentOnlineMode.ANY.toString().equalsIgnoreCase(projectInstance.getConfig().getKylinEngineSegmentOnlineMode())
+            if (SegmentOnlineMode.ANY.toString()
+                    .equalsIgnoreCase(projectInstance.getConfig().getKylinEngineSegmentOnlineMode())
                     && MapUtils.isNotEmpty(secondStorageSegmentLayoutMap)) {
-                Set<Long> chLayouts = secondStorageSegmentLayoutMap.getOrDefault(dataSegment.getId(), Sets.newHashSet());
-                Map<Long, NDataLayout> nDataLayoutMap = chLayouts.stream().map(id -> NDataLayout.newDataLayout(dataflow, dataSegment.getId(), id))
+                Set<Long> chLayouts = secondStorageSegmentLayoutMap.getOrDefault(dataSegment.getId(),
+                        Sets.newHashSet());
+                Map<Long, NDataLayout> nDataLayoutMap = chLayouts.stream()
+                        .map(id -> NDataLayout.newDataLayout(dataflow, dataSegment.getId(), id))
                         .collect(Collectors.toMap(NDataLayout::getLayoutId, nDataLayout -> nDataLayout));
 
                 nDataLayoutMap.putAll(layoutIdMapToDataLayout);
@@ -202,8 +210,7 @@ public class NQueryLayoutChooser {
                 .collect(Collectors.toList());
 
         Ordering<NLayoutCandidate> ordering = Ordering //
-                .from(priorityLayoutComparator()).compound(derivedLayoutComparator())
-                .compound(rowSizeComparator()) // L1 comparator, compare cuboid rows
+                .from(priorityLayoutComparator()).compound(derivedLayoutComparator()).compound(rowSizeComparator()) // L1 comparator, compare cuboid rows
                 .compound(filterColumnComparator(filterColIds, chooserContext)) // L2 comparator, order filter columns
                 .compound(dimensionSizeComparator()) // the lower dimension the best
                 .compound(measureSizeComparator()) // L3 comparator, order size of cuboid columns
@@ -216,9 +223,11 @@ public class NQueryLayoutChooser {
             if (!KylinConfig.getInstanceFromEnv().isPreferAggIndex()) {
                 return 0;
             }
-            if (!layoutCandidate1.getLayoutEntity().getIndex().isTableIndex() && layoutCandidate2.getLayoutEntity().getIndex().isTableIndex()) {
+            if (!layoutCandidate1.getLayoutEntity().getIndex().isTableIndex()
+                    && layoutCandidate2.getLayoutEntity().getIndex().isTableIndex()) {
                 return -1;
-            } else if (layoutCandidate1.getLayoutEntity().getIndex().isTableIndex() && !layoutCandidate2.getLayoutEntity().getIndex().isTableIndex()) {
+            } else if (layoutCandidate1.getLayoutEntity().getIndex().isTableIndex()
+                    && !layoutCandidate2.getLayoutEntity().getIndex().isTableIndex()) {
                 return 1;
             }
             return 0;
diff --git a/src/core-metadata/src/main/java/org/apache/kylin/metadata/cube/cuboid/TableIndexMatcher.java b/src/core-metadata/src/main/java/org/apache/kylin/metadata/cube/cuboid/TableIndexMatcher.java
index 9253df0b73..f3eaee1b36 100644
--- a/src/core-metadata/src/main/java/org/apache/kylin/metadata/cube/cuboid/TableIndexMatcher.java
+++ b/src/core-metadata/src/main/java/org/apache/kylin/metadata/cube/cuboid/TableIndexMatcher.java
@@ -23,11 +23,16 @@ import java.util.Map;
 import java.util.Set;
 import java.util.stream.Collectors;
 
+import org.apache.kylin.common.KylinConfig;
+import org.apache.kylin.metadata.cube.model.IndexEntity;
+import org.apache.kylin.metadata.cube.model.LayoutEntity;
+import org.apache.kylin.metadata.cube.model.NDataflow;
+import org.apache.kylin.metadata.cube.model.NDataflowManager;
 import org.apache.kylin.metadata.model.DeriveInfo;
+import org.apache.kylin.metadata.project.NProjectManager;
+import org.apache.kylin.metadata.project.ProjectInstance;
 import org.apache.kylin.metadata.realization.CapabilityResult;
 import org.apache.kylin.metadata.realization.SQLDigest;
-import org.apache.kylin.metadata.cube.model.IndexEntity;
-import org.apache.kylin.metadata.cube.model.LayoutEntity;
 
 import com.google.common.collect.Lists;
 import com.google.common.collect.Maps;
@@ -41,12 +46,14 @@ public class TableIndexMatcher extends IndexMatcher {
     private final boolean isUseTableIndexAnswerNonRawQuery;
     private Set<Integer> sqlColumns;
     private final boolean valid;
+    private int layoutUnmatchedColsSize;
 
     public TableIndexMatcher(SQLDigest sqlDigest, ChooserContext chooserContext, Set<String> excludedTables,
             boolean isUseTableIndexAnswerNonRawQuery) {
         super(sqlDigest, chooserContext, excludedTables);
         this.isUseTableIndexAnswerNonRawQuery = isUseTableIndexAnswerNonRawQuery;
         valid = init();
+        this.layoutUnmatchedColsSize = 0;
     }
 
     private boolean init() {
@@ -72,6 +79,15 @@ public class TableIndexMatcher extends IndexMatcher {
             unmatchedCols.removeAll(layout.getStreamingColumns().keySet());
         }
         unmatchedCols.removeAll(layout.getOrderedDimensions().keySet());
+        ProjectInstance projectInstance = NProjectManager.getInstance(KylinConfig.getInstanceFromEnv())
+                .getProject(model.getProject());
+        if (projectInstance.getConfig().useTableIndexAnswerSelectStarEnabled()) {
+            layoutUnmatchedColsSize = unmatchedCols.size();
+            NDataflowManager dataflowManager = NDataflowManager.getInstance(KylinConfig.getInstanceFromEnv(),
+                    model.getProject());
+            NDataflow dataflow = dataflowManager.getDataflow(layout.getModel().getId());
+            unmatchedCols.removeAll(dataflow.getAllColumnsIndex());
+        }
         goThruDerivedDims(layout.getIndex(), needDerive, unmatchedCols);
         if (!unmatchedCols.isEmpty()) {
             if (log.isDebugEnabled()) {
@@ -88,4 +104,8 @@ public class TableIndexMatcher extends IndexMatcher {
         boolean isUseTableIndex = isUseTableIndexAnswerNonRawQuery && !nonSupportFunTableIndex(sqlDigest.aggregations);
         return index.isTableIndex() && (sqlDigest.isRawQuery || isUseTableIndex);
     }
+
+    public int getLayoutUnmatchedColsSize() {
+        return layoutUnmatchedColsSize;
+    }
 }
diff --git a/src/core-metadata/src/main/java/org/apache/kylin/metadata/cube/model/IndexPlan.java b/src/core-metadata/src/main/java/org/apache/kylin/metadata/cube/model/IndexPlan.java
index 1df0411e62..c004d53135 100644
--- a/src/core-metadata/src/main/java/org/apache/kylin/metadata/cube/model/IndexPlan.java
+++ b/src/core-metadata/src/main/java/org/apache/kylin/metadata/cube/model/IndexPlan.java
@@ -29,6 +29,7 @@ import java.util.BitSet;
 import java.util.Collection;
 import java.util.Collections;
 import java.util.Comparator;
+import java.util.HashSet;
 import java.util.LinkedHashMap;
 import java.util.LinkedHashSet;
 import java.util.List;
@@ -50,12 +51,12 @@ import org.apache.kylin.common.util.JsonUtil;
 import org.apache.kylin.metadata.MetadataConstants;
 import org.apache.kylin.metadata.model.IEngineAware;
 import org.apache.kylin.metadata.model.JoinTableDesc;
-import org.apache.kylin.metadata.model.SegmentStatusEnum;
-import org.apache.kylin.metadata.model.TblColRef;
-import org.apache.kylin.metadata.project.ProjectInstance;
 import org.apache.kylin.metadata.model.NDataModel;
 import org.apache.kylin.metadata.model.NDataModelManager;
+import org.apache.kylin.metadata.model.SegmentStatusEnum;
+import org.apache.kylin.metadata.model.TblColRef;
 import org.apache.kylin.metadata.project.NProjectManager;
+import org.apache.kylin.metadata.project.ProjectInstance;
 
 import com.fasterxml.jackson.annotation.JsonIgnore;
 import com.fasterxml.jackson.annotation.JsonInclude;
@@ -65,6 +66,7 @@ import com.google.common.annotations.VisibleForTesting;
 import com.google.common.base.Joiner;
 import com.google.common.base.Preconditions;
 import com.google.common.collect.BiMap;
+import com.google.common.collect.ImmutableBiMap;
 import com.google.common.collect.ImmutableList;
 import com.google.common.collect.ImmutableMap;
 import com.google.common.collect.Lists;
@@ -159,6 +161,8 @@ public class IndexPlan extends RootPersistentEntity implements Serializable, IEn
 
     private final LinkedHashSet<TblColRef> allColumns = Sets.newLinkedHashSet();
 
+    private Set<Integer> allColumnsIndex = new HashSet<>();
+
     private List<LayoutEntity> ruleBasedLayouts = Lists.newArrayList();
     @Setter
     @Getter
@@ -276,6 +280,14 @@ public class IndexPlan extends RootPersistentEntity implements Serializable, IEn
             //all lookup tables are automatically derived
             allColumns.addAll(join.getTableRef().getColumns());
         }
+        initAllColumnsIndex();
+    }
+
+    private void initAllColumnsIndex() {
+        Map<TblColRef, Integer> tblColMap = Maps.newHashMap();
+        ImmutableBiMap<Integer, TblColRef> effectiveCols = getModel().getEffectiveCols();
+        effectiveCols.forEach((key, value) -> tblColMap.put(value, key));
+        allColumnsIndex = allColumns.stream().map(tblColMap::get).collect(Collectors.toSet());
     }
 
     private void initDictionaryDesc() {
@@ -367,6 +379,10 @@ public class IndexPlan extends RootPersistentEntity implements Serializable, IEn
         return allColumns;
     }
 
+    public Set<Integer> listAllTblColRefsIndex() {
+        return allColumnsIndex;
+    }
+
     private void addLayout2TargetIndex(LayoutEntity sourceLayout, IndexEntity targetIndex) {
         addLayout2TargetIndex(sourceLayout, targetIndex, false);
     }
diff --git a/src/core-metadata/src/main/java/org/apache/kylin/metadata/cube/model/NDataflow.java b/src/core-metadata/src/main/java/org/apache/kylin/metadata/cube/model/NDataflow.java
index 58e3c3abb2..c72b4842e5 100644
--- a/src/core-metadata/src/main/java/org/apache/kylin/metadata/cube/model/NDataflow.java
+++ b/src/core-metadata/src/main/java/org/apache/kylin/metadata/cube/model/NDataflow.java
@@ -247,6 +247,10 @@ public class NDataflow extends RootPersistentEntity implements Serializable, IRe
         return getIndexPlan().listAllTblColRefs();
     }
 
+    public Set<Integer> getAllColumnsIndex() {
+        return getIndexPlan().listAllTblColRefsIndex();
+    }
+
     @Override
     public List<TblColRef> getAllDimensions() {
         return Lists.newArrayList(getIndexPlan().getEffectiveDimCols().values());
diff --git a/src/query-common/src/main/java/org/apache/kylin/query/schema/OLAPTable.java b/src/query-common/src/main/java/org/apache/kylin/query/schema/OLAPTable.java
index dfd9368873..96e8fa02f4 100644
--- a/src/query-common/src/main/java/org/apache/kylin/query/schema/OLAPTable.java
+++ b/src/query-common/src/main/java/org/apache/kylin/query/schema/OLAPTable.java
@@ -74,6 +74,8 @@ import org.apache.kylin.common.KylinConfig;
 import org.apache.kylin.common.QueryContext;
 import org.apache.kylin.common.util.CollectionUtil;
 import org.apache.kylin.measure.topn.TopNMeasureType;
+import org.apache.kylin.metadata.cube.model.NDataflow;
+import org.apache.kylin.metadata.cube.model.NDataflowManager;
 import org.apache.kylin.metadata.datatype.DataType;
 import org.apache.kylin.metadata.model.ColumnDesc;
 import org.apache.kylin.metadata.model.ComputedColumnDesc;
@@ -83,6 +85,8 @@ import org.apache.kylin.metadata.model.NDataModel;
 import org.apache.kylin.metadata.model.TableDesc;
 import org.apache.kylin.metadata.model.util.ComputedColumnUtil;
 import org.apache.kylin.metadata.project.NProjectManager;
+import org.apache.kylin.metadata.project.ProjectInstance;
+import org.apache.kylin.metadata.realization.RealizationStatusEnum;
 import org.apache.kylin.query.QueryExtension;
 import org.apache.kylin.query.enumerator.OLAPQuery;
 import org.apache.kylin.query.relnode.OLAPTableScan;
@@ -268,10 +272,34 @@ public class OLAPTable extends AbstractQueryableTable implements TranslatableTab
     }
 
     private List<ColumnDesc> listTableColumnsIncludingCC() {
-        val allColumns = Lists.newArrayList(sourceTable.getColumns());
+        List<ColumnDesc> allColumns = Lists.newArrayList(sourceTable.getColumns());
 
-        if (!modelsMap.containsKey(sourceTable.getIdentity()))
+        if (!modelsMap.containsKey(sourceTable.getIdentity())) {
             return allColumns;
+        }
+
+        ProjectInstance projectInstance = NProjectManager.getInstance(olapSchema.getConfig())
+                .getProject(sourceTable.getProject());
+        NDataflowManager dataflowManager = NDataflowManager.getInstance(olapSchema.getConfig(),
+                sourceTable.getProject());
+        if (projectInstance.getConfig().useTableIndexAnswerSelectStarEnabled()) {
+            Set<ColumnDesc> exposeColumnDescSet = new HashSet<>();
+            String tableName = sourceTable.getIdentity();
+            List<NDataModel> modelList = modelsMap.get(tableName);
+            for (NDataModel dataModel : modelList) {
+                NDataflow dataflow = dataflowManager.getDataflow(dataModel.getId());
+                if (dataflow.getStatus() == RealizationStatusEnum.ONLINE) {
+                    dataflow.getAllColumns().forEach(tblColRef -> {
+                        if (tblColRef.getTable().equalsIgnoreCase(tableName)) {
+                            exposeColumnDescSet.add(tblColRef.getColumnDesc());
+                        }
+                    });
+                }
+            }
+            if (!exposeColumnDescSet.isEmpty()) {
+                allColumns = Lists.newArrayList(exposeColumnDescSet);
+            }
+        }
 
         val authorizedCC = getAuthorizedCC();
         if (CollectionUtils.isNotEmpty(authorizedCC)) {
diff --git a/src/spark-project/sparder/src/main/scala/org/apache/kylin/query/util/RuntimeHelper.scala b/src/spark-project/sparder/src/main/scala/org/apache/kylin/query/util/RuntimeHelper.scala
index 4fc0c31cff..3e6d0b1933 100644
--- a/src/spark-project/sparder/src/main/scala/org/apache/kylin/query/util/RuntimeHelper.scala
+++ b/src/spark-project/sparder/src/main/scala/org/apache/kylin/query/util/RuntimeHelper.scala
@@ -18,11 +18,12 @@
 
 package org.apache.kylin.query.util
 
+import org.apache.kylin.common.KylinConfig
 import org.apache.kylin.common.util.ImmutableBitSet
-import org.apache.kylin.query.util.SparderDerivedUtil
 import org.apache.kylin.metadata.datatype.DataType
 import org.apache.kylin.metadata.model.DeriveInfo.DeriveType
 import org.apache.kylin.metadata.model.TblColRef
+import org.apache.kylin.metadata.project.NProjectManager
 import org.apache.spark.internal.Logging
 import org.apache.spark.sql.Column
 import org.apache.spark.sql.catalyst.expressions.Literal
@@ -35,8 +36,9 @@ import scala.collection.JavaConverters._
 import scala.collection.mutable
 
 // scalastyle:off
-object RuntimeHelper  extends Logging {
+object RuntimeHelper extends Logging {
 
+  final val literalZero = new Column(Literal(0, DataTypes.IntegerType))
   final val literalOne = new Column(Literal(1, DataTypes.IntegerType))
   final val literalTs = new Column(Literal(null, DataTypes.TimestampType))
   final val literalString = new Column(Literal(null, DataTypes.StringType))
@@ -95,6 +97,8 @@ object RuntimeHelper  extends Logging {
       }.toMap
     }
 
+    val projectInstance = NProjectManager.getInstance(KylinConfig.getInstanceFromEnv)
+      .getProject(derivedUtil.model.getProject)
     // may have multi TopN measures.
     val topNIndexs = sourceSchema.fields.map(_.dataType).zipWithIndex.filter(_._1.isInstanceOf[ArrayType])
     allColumns.indices
@@ -116,6 +120,13 @@ object RuntimeHelper  extends Logging {
               if (hasTopN && topNIndexs.map(_._2).contains(gTInfoIndex)) {
                 // topn measure will be erase when calling inline
                 literalOne.as(s"${factTableName}_${columnName}")
+              } else if (projectInstance.getConfig.useTableIndexAnswerSelectStarEnabled()
+                && gTInfoIndex < 0) {
+                if (column.getColumnDesc.getType.isNumberFamily) {
+                  literalZero.as(s"${factTableName}_${columnName}")
+                } else {
+                  literalString.as(s"${factTableName}_${columnName}")
+                }
               } else if (primaryKey.get(gTInfoIndex)) {
                 //  primary key
                 col(gTInfoNames.apply(gTInfoIndex))
@@ -132,7 +143,7 @@ object RuntimeHelper  extends Logging {
             }
           } else if (deriveMap.contains(index)) {
             deriveMap.apply(index)
-          } else if( DataType.DATETIME_FAMILY.contains(column.getType.getName)) {
+          } else if (DataType.DATETIME_FAMILY.contains(column.getType.getName)) {
             // https://github.com/Kyligence/KAP/issues/14561
             literalTs.as(s"${factTableName}_${columnName}")
           } else if (DataType.STRING_FAMILY.contains(column.getType.getName)) {


[kylin] 17/22: KYLIN-5322 fix select count when out of segment range

Posted by xx...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

xxyu pushed a commit to branch kylin5
in repository https://gitbox.apache.org/repos/asf/kylin.git

commit 6cba5e63396e1e2e56703b4c334e7a4df7a23dda
Author: fanshu.kong <17...@qq.com>
AuthorDate: Tue Sep 27 17:09:08 2022 +0800

    KYLIN-5322 fix select count when out of segment range
---
 .../engine/exec/sparder/SparderQueryPlanExec.java  | 10 ++---
 .../kylin/query/runtime/plan/TableScanPlan.scala   | 43 ++++++++++++++-----
 .../query/runtime/plan/SegmentEmptyTest.scala      | 50 ++++++++++++++++++++++
 .../org/apache/spark/sql/SparderTypeUtil.scala     | 13 +++---
 4 files changed, 94 insertions(+), 22 deletions(-)

diff --git a/src/query/src/main/java/org/apache/kylin/query/engine/exec/sparder/SparderQueryPlanExec.java b/src/query/src/main/java/org/apache/kylin/query/engine/exec/sparder/SparderQueryPlanExec.java
index a9a9963ae4..bbe7d25192 100644
--- a/src/query/src/main/java/org/apache/kylin/query/engine/exec/sparder/SparderQueryPlanExec.java
+++ b/src/query/src/main/java/org/apache/kylin/query/engine/exec/sparder/SparderQueryPlanExec.java
@@ -32,8 +32,6 @@ import org.apache.kylin.common.debug.BackdoorToggles;
 import org.apache.kylin.common.exception.KylinException;
 import org.apache.kylin.common.exception.QueryErrorCode;
 import org.apache.kylin.common.msg.MsgPicker;
-import org.apache.kylin.query.relnode.OLAPContext;
-import org.apache.kylin.query.relnode.OLAPRel;
 import org.apache.kylin.metadata.cube.cuboid.NLayoutCandidate;
 import org.apache.kylin.metadata.cube.model.IndexEntity;
 import org.apache.kylin.query.engine.exec.ExecuteResult;
@@ -43,6 +41,8 @@ import org.apache.kylin.query.engine.meta.SimpleDataContext;
 import org.apache.kylin.query.relnode.ContextUtil;
 import org.apache.kylin.query.relnode.KapContext;
 import org.apache.kylin.query.relnode.KapRel;
+import org.apache.kylin.query.relnode.OLAPContext;
+import org.apache.kylin.query.relnode.OLAPRel;
 import org.apache.kylin.query.runtime.SparkEngine;
 import org.apache.kylin.query.util.QueryContextCutter;
 import org.apache.spark.SparkException;
@@ -77,7 +77,7 @@ public class SparderQueryPlanExec implements QueryPlanExec {
                 || KapConfig.wrap(((SimpleDataContext) dataContext).getKylinConfig()).runConstantQueryLocally()) {
             val contexts = ContextUtil.listContexts();
             for (OLAPContext context : contexts) {
-                if (context.olapSchema != null && context.storageContext.isEmptyLayout()) {
+                if (context.olapSchema != null && context.storageContext.isEmptyLayout() && !context.isHasAgg()) {
                     QueryContext.fillEmptyResultSetMetrics();
                     return new ExecuteResult(Lists.newArrayList(), 0);
                 }
@@ -134,7 +134,7 @@ public class SparderQueryPlanExec implements QueryPlanExec {
                 QueryContext.current().getSecondStorageUsageMap().clear();
             } else if (e instanceof SQLException) {
                 handleForceToTieredStorage(e);
-            }else {
+            } else {
                 return ExceptionUtils.rethrow(e);
             }
         }
@@ -186,7 +186,7 @@ public class SparderQueryPlanExec implements QueryPlanExec {
     }
 
     private void handleForceToTieredStorage(final Exception e) {
-        if (e.getMessage().equals(QueryContext.ROUTE_USE_FORCEDTOTIEREDSTORAGE)){
+        if (e.getMessage().equals(QueryContext.ROUTE_USE_FORCEDTOTIEREDSTORAGE)) {
             ForceToTieredStorage forcedToTieredStorage = QueryContext.current().getForcedToTieredStorage();
             boolean forceTableIndex = QueryContext.current().isForceTableIndex();
             QueryContext.current().setLastFailed(true);
diff --git a/src/spark-project/sparder/src/main/scala/org/apache/kylin/query/runtime/plan/TableScanPlan.scala b/src/spark-project/sparder/src/main/scala/org/apache/kylin/query/runtime/plan/TableScanPlan.scala
index a10d0a16f3..5c79e0f113 100644
--- a/src/spark-project/sparder/src/main/scala/org/apache/kylin/query/runtime/plan/TableScanPlan.scala
+++ b/src/spark-project/sparder/src/main/scala/org/apache/kylin/query/runtime/plan/TableScanPlan.scala
@@ -25,23 +25,21 @@ import org.apache.kylin.metadata.cube.cuboid.NLayoutCandidate
 import org.apache.kylin.metadata.cube.gridtable.NLayoutToGridTableMapping
 import org.apache.kylin.metadata.cube.model.{LayoutEntity, NDataSegment, NDataflow}
 import org.apache.kylin.metadata.cube.realization.HybridRealization
-import org.apache.kylin.metadata.model.NTableMetadataManager
-import org.apache.kylin.query.util.{RuntimeHelper, SparderDerivedUtil}
 import org.apache.kylin.metadata.model._
+import org.apache.kylin.metadata.realization.IRealization
 import org.apache.kylin.metadata.tuple.TupleInfo
+import org.apache.kylin.query.implicits.sessionToQueryContext
+import org.apache.kylin.query.relnode.{KapRel, OLAPContext}
+import org.apache.kylin.query.util.{RuntimeHelper, SparderDerivedUtil}
 import org.apache.spark.sql.execution.utils.SchemaProcessor
 import org.apache.spark.sql.functions.col
 import org.apache.spark.sql.manager.SparderLookupManager
 import org.apache.spark.sql.types.{ArrayType, DoubleType, StructField, StructType}
 import org.apache.spark.sql.util.SparderTypeUtil
-import org.apache.spark.sql.{DataFrame, _}
+import org.apache.spark.sql._
 
 import java.util.concurrent.ConcurrentHashMap
 import java.{lang, util}
-import org.apache.kylin.metadata.realization.IRealization
-import org.apache.kylin.query.implicits.sessionToQueryContext
-import org.apache.kylin.query.relnode.{KapRel, OLAPContext}
-
 import scala.collection.JavaConverters._
 
 // scalastyle:off
@@ -71,7 +69,8 @@ object TableScanPlan extends LogEx {
     val realizations = olapContext.realization.getRealizations.asScala.toList
     realizations.map(_.asInstanceOf[NDataflow])
       .filter(dataflow => (!dataflow.isStreaming && !context.isBatchCandidateEmpty) ||
-        (dataflow.isStreaming && !context.isStreamCandidateEmpty))
+        (dataflow.isStreaming && !context.isStreamCandidateEmpty) ||
+        isSegmentsEmpty(prunedSegments, prunedStreamingSegments))
       .map(dataflow => {
         if (dataflow.isStreaming) {
           tableScan(rel, dataflow, olapContext, session, prunedStreamingSegments, context.getStreamingCandidate)
@@ -81,13 +80,33 @@ object TableScanPlan extends LogEx {
       }).reduce(_.union(_))
   }
 
+  // prunedSegments is null
+  def tableScanEmptySegment(rel: KapRel): DataFrame = {
+    logInfo("prunedSegments is null")
+    val df = SparkOperation.createEmptyDataFrame(
+      StructType(
+        rel.getColumnRowType.getAllColumns.asScala
+          .map(column =>
+            StructField(column.toString.replaceAll("\\.", "_"), SparderTypeUtil.toSparkType(column.getType)))))
+    val cols = df.schema.map(structField => {
+      col(structField.name)
+    })
+    df.select(cols: _*)
+  }
+
+  def isSegmentsEmpty(prunedSegments: util.List[NDataSegment], prunedStreamingSegments: util.List[NDataSegment]): Boolean = {
+    val isPrunedSegmentsEmpty = prunedSegments == null || prunedSegments.size() == 0
+    val isPrunedStreamingSegmentsEmpty = prunedStreamingSegments == null || prunedStreamingSegments.size() == 0
+    isPrunedSegmentsEmpty && isPrunedStreamingSegmentsEmpty
+  }
+
   def tableScan(rel: KapRel, dataflow: NDataflow, olapContext: OLAPContext,
                 session: SparkSession, prunedSegments: util.List[NDataSegment], candidate: NLayoutCandidate): DataFrame = {
     val prunedPartitionMap = olapContext.storageContext.getPrunedPartitions
     olapContext.resetSQLDigest()
     //TODO: refactor
     val cuboidLayout = candidate.getLayoutEntity
-    if (cuboidLayout.getIndex.isTableIndex) {
+    if (cuboidLayout.getIndex != null && cuboidLayout.getIndex.isTableIndex) {
       QueryContext.current().getQueryTagInfo.setTableIndex(true)
     }
     val tableName = olapContext.firstTableScan.getBackupAlias
@@ -97,6 +116,9 @@ object TableScanPlan extends LogEx {
     /////////////////////////////////////////////
     val kapConfig = KapConfig.wrap(dataflow.getConfig)
     val basePath = kapConfig.getReadParquetStoragePath(dataflow.getProject)
+    if (prunedSegments == null || prunedSegments.size() == 0) {
+      return tableScanEmptySegment(rel: KapRel)
+    }
     val fileList = prunedSegments.asScala.map(
       seg => toLayoutPath(dataflow, cuboidLayout.getId, basePath, seg, prunedPartitionMap)
     )
@@ -366,8 +388,7 @@ object TableScanPlan extends LogEx {
     val session = SparderEnv.getSparkSession
     val olapContext = rel.getContext
     var instance: IRealization = null
-    if (olapContext.realization.isInstanceOf[NDataflow])
-    {
+    if (olapContext.realization.isInstanceOf[NDataflow]) {
       instance = olapContext.realization.asInstanceOf[NDataflow]
     } else {
       instance = olapContext.realization.asInstanceOf[HybridRealization]
diff --git a/src/spark-project/sparder/src/test/scala/org/apache/kylin/query/runtime/plan/SegmentEmptyTest.scala b/src/spark-project/sparder/src/test/scala/org/apache/kylin/query/runtime/plan/SegmentEmptyTest.scala
new file mode 100644
index 0000000000..03e0577013
--- /dev/null
+++ b/src/spark-project/sparder/src/test/scala/org/apache/kylin/query/runtime/plan/SegmentEmptyTest.scala
@@ -0,0 +1,50 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.kylin.query.runtime.plan
+
+import org.apache.kylin.metadata.cube.model.NDataSegment
+import org.apache.spark.sql.common.{LocalMetadata, SharedSparkSession, SparderBaseFunSuite}
+import org.junit.Assert
+
+import java.util
+
+class SegmentEmptyTest extends SparderBaseFunSuite with SharedSparkSession with LocalMetadata {
+
+    val prunedSegment1 = null
+    val prunedSegment2 = new util.LinkedList[NDataSegment]
+    val prunedSegment3 = new util.LinkedList[NDataSegment]
+    prunedSegment3.add(new NDataSegment())
+
+    val prunedStreamingSegment1 = null
+    val prunedStreamingSegment2 = new util.LinkedList[NDataSegment]
+    val prunedStreamingSegment3 = new util.LinkedList[NDataSegment]
+    prunedStreamingSegment3.add(new NDataSegment())
+
+    Assert.assertTrue(TableScanPlan.isSegmentsEmpty(prunedSegment1, prunedStreamingSegment1))
+    Assert.assertTrue(TableScanPlan.isSegmentsEmpty(prunedSegment1, prunedStreamingSegment2))
+    Assert.assertFalse(TableScanPlan.isSegmentsEmpty(prunedSegment1, prunedStreamingSegment3))
+
+    Assert.assertTrue(TableScanPlan.isSegmentsEmpty(prunedSegment2, prunedStreamingSegment1))
+    Assert.assertTrue(TableScanPlan.isSegmentsEmpty(prunedSegment2, prunedStreamingSegment2))
+    Assert.assertFalse(TableScanPlan.isSegmentsEmpty(prunedSegment2, prunedStreamingSegment3))
+
+    Assert.assertFalse(TableScanPlan.isSegmentsEmpty(prunedSegment3, prunedStreamingSegment1))
+    Assert.assertFalse(TableScanPlan.isSegmentsEmpty(prunedSegment3, prunedStreamingSegment2))
+    Assert.assertFalse(TableScanPlan.isSegmentsEmpty(prunedSegment3, prunedStreamingSegment3))
+}
diff --git a/src/spark-project/spark-common/src/main/scala/org/apache/spark/sql/SparderTypeUtil.scala b/src/spark-project/spark-common/src/main/scala/org/apache/spark/sql/SparderTypeUtil.scala
index 2a2d1d02ab..6791718885 100644
--- a/src/spark-project/spark-common/src/main/scala/org/apache/spark/sql/SparderTypeUtil.scala
+++ b/src/spark-project/spark-common/src/main/scala/org/apache/spark/sql/SparderTypeUtil.scala
@@ -18,11 +18,6 @@
 
 package org.apache.spark.sql.util
 
-import java.lang.{Boolean => JBoolean, Byte => JByte, Double => JDouble, Float => JFloat, Long => JLong, Short => JShort}
-import java.math.BigDecimal
-import java.sql.{Date, Timestamp, Types}
-import java.time.ZoneId
-import java.util.{GregorianCalendar, Locale, TimeZone}
 import org.apache.calcite.avatica.util.TimeUnitRange
 import org.apache.calcite.rel.`type`.RelDataType
 import org.apache.calcite.rex.RexLiteral
@@ -33,7 +28,7 @@ import org.apache.kylin.common.util.DateFormat
 import org.apache.kylin.metadata.datatype.DataType
 import org.apache.spark.internal.Logging
 import org.apache.spark.sql.Column
-import org.apache.spark.sql.catalyst.expressions.{Base64, Cast}
+import org.apache.spark.sql.catalyst.expressions.Cast
 import org.apache.spark.sql.catalyst.parser.ParserUtils
 import org.apache.spark.sql.catalyst.util.DateTimeUtils
 import org.apache.spark.sql.functions._
@@ -41,6 +36,11 @@ import org.apache.spark.sql.types._
 import org.apache.spark.unsafe.types.UTF8String
 import org.springframework.util.Base64Utils
 
+import java.lang.{Boolean => JBoolean, Byte => JByte, Double => JDouble, Float => JFloat, Long => JLong, Short => JShort}
+import java.math.BigDecimal
+import java.sql.{Date, Timestamp, Types}
+import java.time.ZoneId
+import java.util.{GregorianCalendar, Locale, TimeZone}
 import scala.collection.{immutable, mutable}
 
 object SparderTypeUtil extends Logging {
@@ -117,6 +117,7 @@ object SparderTypeUtil extends Logging {
       case tp if tp.startsWith("extendedcolumn") => BinaryType
       case tp if tp.startsWith("percentile") => BinaryType
       case tp if tp.startsWith("raw") => BinaryType
+      case "any" => StringType
       case _ => throw new IllegalArgumentException(dataTp.toString)
     }
   }


[kylin] 22/22: KYLIN-5326 Fix request parameter json deserializer

Posted by xx...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

xxyu pushed a commit to branch kylin5
in repository https://gitbox.apache.org/repos/asf/kylin.git

commit 21a6b9f7f611d8d4a23a8003abbcc139e687c394
Author: Jiale He <35...@users.noreply.github.com>
AuthorDate: Wed Oct 19 08:40:18 2022 +0800

    KYLIN-5326 Fix request parameter json deserializer
---
 .../common/util/ArgsTypeJsonDeserializer.java      |  35 ++++-
 .../common/util/ArgsTypeJsonDeserializerTest.java  | 156 +++++++++++++++++++++
 .../controller/open/OpenTableControllerTest.java   |  50 ++++---
 3 files changed, 214 insertions(+), 27 deletions(-)

diff --git a/src/core-common/src/main/java/org/apache/kylin/common/util/ArgsTypeJsonDeserializer.java b/src/core-common/src/main/java/org/apache/kylin/common/util/ArgsTypeJsonDeserializer.java
index e9509b1314..6b47bc12f1 100644
--- a/src/core-common/src/main/java/org/apache/kylin/common/util/ArgsTypeJsonDeserializer.java
+++ b/src/core-common/src/main/java/org/apache/kylin/common/util/ArgsTypeJsonDeserializer.java
@@ -23,11 +23,13 @@ import static org.apache.kylin.common.exception.code.ErrorCodeServer.ARGS_TYPE_C
 import java.io.IOException;
 import java.util.List;
 
+import org.apache.commons.lang3.StringUtils;
 import org.apache.kylin.common.exception.KylinException;
 
 import com.fasterxml.jackson.core.JsonParser;
 import com.fasterxml.jackson.databind.DeserializationContext;
 import com.fasterxml.jackson.databind.JsonDeserializer;
+import com.google.common.collect.Lists;
 
 public class ArgsTypeJsonDeserializer {
 
@@ -36,12 +38,27 @@ public class ArgsTypeJsonDeserializer {
     }
 
     public static class BooleanJsonDeserializer extends JsonDeserializer<Boolean> {
+
+        private final List<String> boolList = Lists.newArrayList("true", "false", "TRUE", "FALSE", "null");
+
+        @Override
+        public Boolean getNullValue(DeserializationContext ctxt) {
+            return Boolean.FALSE;
+        }
+
         @Override
         public Boolean deserialize(JsonParser p, DeserializationContext ctxt) throws IOException {
             try {
+                String text = p.getText();
+                if (StringUtils.isEmpty(text)) {
+                    return Boolean.FALSE;
+                }
+                if (boolList.contains(text)) {
+                    return Boolean.parseBoolean(text);
+                }
                 return p.getBooleanValue();
             } catch (Exception e) {
-                throw new KylinException(ARGS_TYPE_CHECK, p.getText(), "Boolean");
+                throw new KylinException(ARGS_TYPE_CHECK, e, p.getText(), "Boolean");
             }
         }
     }
@@ -52,18 +69,28 @@ public class ArgsTypeJsonDeserializer {
             try {
                 return p.readValueAs(List.class);
             } catch (Exception e) {
-                throw new KylinException(ARGS_TYPE_CHECK, p.getText(), "List");
+                throw new KylinException(ARGS_TYPE_CHECK, e, p.getText(), "List");
             }
         }
     }
 
     public static class IntegerJsonDeserializer extends JsonDeserializer<Integer> {
+
+        @Override
+        public Integer getNullValue(DeserializationContext ctxt) {
+            return 0;
+        }
+
         @Override
         public Integer deserialize(JsonParser p, DeserializationContext ctxt) throws IOException {
             try {
-                return p.getIntValue();
+                String text = p.getText();
+                if (StringUtils.isEmpty(text) || StringUtils.equals("null", text)) {
+                    return 0;
+                }
+                return Integer.parseInt(text);
             } catch (Exception e) {
-                throw new KylinException(ARGS_TYPE_CHECK, p.getText(), "Integer");
+                throw new KylinException(ARGS_TYPE_CHECK, e, p.getText(), "Integer");
             }
         }
     }
diff --git a/src/core-common/src/test/java/org/apache/kylin/common/util/ArgsTypeJsonDeserializerTest.java b/src/core-common/src/test/java/org/apache/kylin/common/util/ArgsTypeJsonDeserializerTest.java
new file mode 100644
index 0000000000..dd412883c5
--- /dev/null
+++ b/src/core-common/src/test/java/org/apache/kylin/common/util/ArgsTypeJsonDeserializerTest.java
@@ -0,0 +1,156 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.kylin.common.util;
+
+import static org.apache.kylin.common.exception.code.ErrorCodeServer.ARGS_TYPE_CHECK;
+
+import java.util.HashMap;
+
+import org.apache.kylin.common.exception.KylinException;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.Test;
+
+import com.fasterxml.jackson.annotation.JsonProperty;
+import com.fasterxml.jackson.databind.JsonMappingException;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
+import com.google.common.collect.Maps;
+
+import lombok.Data;
+
+class ArgsTypeJsonDeserializerTest {
+
+    private static final ObjectMapper mapper = new ObjectMapper();
+
+    @Test
+    void testDeserialize() throws Exception {
+        {
+            // "null" -> false
+            // "null" -> 0
+            HashMap<Object, Object> map = Maps.newHashMap();
+            map.put("bol_value", "null");
+            map.put("int_value", "null");
+            String jsonStr = mapper.writeValueAsString(map);
+            MockRequest request = mapper.readValue(jsonStr, MockRequest.class);
+            Assertions.assertEquals(false, request.getBolValue());
+            Assertions.assertEquals(0, request.getIntValue());
+        }
+
+        {
+            // "" -> false
+            // "" -> 0
+            HashMap<Object, Object> map = Maps.newHashMap();
+            map.put("bol_value", "");
+            map.put("int_value", "");
+            String jsonStr = mapper.writeValueAsString(map);
+            MockRequest request = mapper.readValue(jsonStr, MockRequest.class);
+            Assertions.assertEquals(false, request.getBolValue());
+            Assertions.assertEquals(0, request.getIntValue());
+        }
+
+        {
+            // null -> false
+            // null -> 0
+            HashMap<Object, Object> map = Maps.newHashMap();
+            map.put("bol_value", null);
+            map.put("int_value", null);
+            String jsonStr = mapper.writeValueAsString(map);
+            MockRequest request = mapper.readValue(jsonStr, MockRequest.class);
+            Assertions.assertEquals(false, request.getBolValue());
+            Assertions.assertEquals(0, request.getIntValue());
+        }
+
+        {
+            // null -> true
+            // null -> 1
+            HashMap<Object, Object> map = Maps.newHashMap();
+            String jsonStr = mapper.writeValueAsString(map);
+            MockRequest request = mapper.readValue(jsonStr, MockRequest.class);
+            Assertions.assertEquals(true, request.getBolValue());
+            Assertions.assertEquals(1, request.getIntValue());
+        }
+
+        {
+            // "true" -> true
+            // "99" -> 99
+            HashMap<Object, Object> map = Maps.newHashMap();
+            map.put("bol_value", "true");
+            map.put("int_value", "99");
+            String jsonStr = mapper.writeValueAsString(map);
+            MockRequest request = mapper.readValue(jsonStr, MockRequest.class);
+            Assertions.assertEquals(true, request.getBolValue());
+            Assertions.assertEquals(99, request.getIntValue());
+        }
+
+        {
+            // "TRUE" -> true
+            // "99" -> 99
+            HashMap<Object, Object> map = Maps.newHashMap();
+            map.put("bol_value", "true");
+            map.put("int_value", "99");
+            String jsonStr = mapper.writeValueAsString(map);
+            MockRequest request = mapper.readValue(jsonStr, MockRequest.class);
+            Assertions.assertEquals(true, request.getBolValue());
+            Assertions.assertEquals(99, request.getIntValue());
+        }
+
+        {
+            // "abd" -> exception
+            HashMap<Object, Object> map = Maps.newHashMap();
+            map.put("bol_value", "abc");
+            String jsonStr = mapper.writeValueAsString(map);
+            try {
+                mapper.readValue(jsonStr, MockRequest.class);
+            } catch (Exception e) {
+                Assertions.assertTrue(e instanceof JsonMappingException);
+                Assertions.assertTrue(e.getCause() instanceof KylinException);
+                KylinException kylinException = (KylinException) e.getCause();
+                Assertions.assertEquals(ARGS_TYPE_CHECK.getErrorCode().getCode(),
+                        kylinException.getErrorCode().getCodeString());
+            }
+        }
+
+        {
+            // "abc" -> exception
+            HashMap<Object, Object> map = Maps.newHashMap();
+            map.put("int_value", "abc");
+            String jsonStr = mapper.writeValueAsString(map);
+            try {
+                mapper.readValue(jsonStr, MockRequest.class);
+            } catch (Exception e) {
+                Assertions.assertTrue(e instanceof JsonMappingException);
+                Assertions.assertTrue(e.getCause() instanceof KylinException);
+                KylinException kylinException = (KylinException) e.getCause();
+                Assertions.assertEquals(ARGS_TYPE_CHECK.getErrorCode().getCode(),
+                        kylinException.getErrorCode().getCodeString());
+            }
+        }
+    }
+
+    @Data
+    static class MockRequest {
+        @JsonDeserialize(using = ArgsTypeJsonDeserializer.BooleanJsonDeserializer.class)
+        @JsonProperty("bol_value")
+        private Boolean bolValue = true;
+
+        @JsonDeserialize(using = ArgsTypeJsonDeserializer.IntegerJsonDeserializer.class)
+        @JsonProperty("int_value")
+        private Integer intValue = 1;
+    }
+}
diff --git a/src/metadata-server/src/test/java/org/apache/kylin/rest/controller/open/OpenTableControllerTest.java b/src/metadata-server/src/test/java/org/apache/kylin/rest/controller/open/OpenTableControllerTest.java
index 9cafc5e6c5..1ba31ca693 100644
--- a/src/metadata-server/src/test/java/org/apache/kylin/rest/controller/open/OpenTableControllerTest.java
+++ b/src/metadata-server/src/test/java/org/apache/kylin/rest/controller/open/OpenTableControllerTest.java
@@ -148,6 +148,7 @@ public class OpenTableControllerTest extends NLocalFileMetadataTestCase {
         tableLoadRequest.setTables(new String[] { "hh.kk" });
         tableLoadRequest.setNeedSampling(false);
         tableLoadRequest.setProject("default");
+        tableLoadRequest.setSamplingRows(0);
         Mockito.doNothing().when(openTableController).updateDataSourceType("default", 9);
         Mockito.doAnswer(x -> null).when(nTableController).loadTables(tableLoadRequest);
         mockMvc.perform(MockMvcRequestBuilders.post("/api/tables") //
@@ -203,27 +204,27 @@ public class OpenTableControllerTest extends NLocalFileMetadataTestCase {
         Mockito.doNothing().when(openTableController).updateDataSourceType("default", 9);
         Mockito.doAnswer(x -> null).when(nTableController).loadAWSTablesCompatibleCrossAccount(tableLoadRequest);
         mockMvc.perform(MockMvcRequestBuilders.post("/api/tables/compatibility/aws") //
-                        .contentType(MediaType.APPLICATION_JSON) //
-                        .content(JsonUtil.writeValueAsString(tableLoadRequest)) //
-                        .accept(MediaType.parseMediaType(HTTP_VND_APACHE_KYLIN_V4_PUBLIC_JSON))) //
+                .contentType(MediaType.APPLICATION_JSON) //
+                .content(JsonUtil.writeValueAsString(tableLoadRequest)) //
+                .accept(MediaType.parseMediaType(HTTP_VND_APACHE_KYLIN_V4_PUBLIC_JSON))) //
                 .andExpect(MockMvcResultMatchers.status().isOk());
         Mockito.verify(openTableController).loadAWSTablesCompatibleCrossAccount(tableLoadRequest);
 
         tableLoadRequest.setNeedSampling(true);
         tableLoadRequest.setSamplingRows(10000);
         mockMvc.perform(MockMvcRequestBuilders.post("/api/tables/compatibility/aws") //
-                        .contentType(MediaType.APPLICATION_JSON) //
-                        .content(JsonUtil.writeValueAsString(tableLoadRequest)) //
-                        .accept(MediaType.parseMediaType(HTTP_VND_APACHE_KYLIN_V4_PUBLIC_JSON))) //
+                .contentType(MediaType.APPLICATION_JSON) //
+                .content(JsonUtil.writeValueAsString(tableLoadRequest)) //
+                .accept(MediaType.parseMediaType(HTTP_VND_APACHE_KYLIN_V4_PUBLIC_JSON))) //
                 .andExpect(MockMvcResultMatchers.status().isOk());
         Mockito.verify(openTableController).loadAWSTablesCompatibleCrossAccount(tableLoadRequest);
 
         tableLoadRequest.setNeedSampling(true);
         tableLoadRequest.setSamplingRows(1000);
         mockMvc.perform(MockMvcRequestBuilders.post("/api/tables/compatibility/aws") //
-                        .contentType(MediaType.APPLICATION_JSON) //
-                        .content(JsonUtil.writeValueAsString(tableLoadRequest)) //
-                        .accept(MediaType.parseMediaType(HTTP_VND_APACHE_KYLIN_V4_PUBLIC_JSON))) //
+                .contentType(MediaType.APPLICATION_JSON) //
+                .content(JsonUtil.writeValueAsString(tableLoadRequest)) //
+                .accept(MediaType.parseMediaType(HTTP_VND_APACHE_KYLIN_V4_PUBLIC_JSON))) //
                 .andExpect(MockMvcResultMatchers.status().isInternalServerError());
         Mockito.verify(openTableController).loadAWSTablesCompatibleCrossAccount(tableLoadRequest);
 
@@ -248,11 +249,12 @@ public class OpenTableControllerTest extends NLocalFileMetadataTestCase {
         request.setTables(tableExtInfoList);
 
         mockMvc.perform(MockMvcRequestBuilders.put("/api/tables/ext/prop/aws") //
-                        .contentType(MediaType.APPLICATION_JSON) //
-                        .content(JsonUtil.writeValueAsString(request)) //
-                        .accept(MediaType.parseMediaType(HTTP_VND_APACHE_KYLIN_V4_PUBLIC_JSON))) //
+                .contentType(MediaType.APPLICATION_JSON) //
+                .content(JsonUtil.writeValueAsString(request)) //
+                .accept(MediaType.parseMediaType(HTTP_VND_APACHE_KYLIN_V4_PUBLIC_JSON))) //
                 .andExpect(MockMvcResultMatchers.status().isOk());
-        Mockito.verify(openTableController).updateLoadedAWSTableExtProp(Mockito.any(UpdateAWSTableExtDescRequest.class));
+        Mockito.verify(openTableController)
+                .updateLoadedAWSTableExtProp(Mockito.any(UpdateAWSTableExtDescRequest.class));
     }
 
     @Test
@@ -349,12 +351,13 @@ public class OpenTableControllerTest extends NLocalFileMetadataTestCase {
         request.setNeedSampling(false);
         request.setS3TableExtInfo(s3TableExtInfo);
 
-        Mockito.doReturn(new Pair<String, List<String>>()).when(tableService).reloadAWSTableCompatibleCrossAccount(request.getProject(),
-                request.getS3TableExtInfo(), request.getNeedSampling(), 0, false, ExecutablePO.DEFAULT_PRIORITY, null);
+        Mockito.doReturn(new Pair<String, List<String>>()).when(tableService).reloadAWSTableCompatibleCrossAccount(
+                request.getProject(), request.getS3TableExtInfo(), request.getNeedSampling(), 0, false,
+                ExecutablePO.DEFAULT_PRIORITY, null);
         mockMvc.perform(MockMvcRequestBuilders.post("/api/tables/reload/compatibility/aws") //
-                        .contentType(MediaType.APPLICATION_JSON) //
-                        .content(JsonUtil.writeValueAsString(request)) //
-                        .accept(MediaType.parseMediaType(HTTP_VND_APACHE_KYLIN_V4_PUBLIC_JSON))) //
+                .contentType(MediaType.APPLICATION_JSON) //
+                .content(JsonUtil.writeValueAsString(request)) //
+                .accept(MediaType.parseMediaType(HTTP_VND_APACHE_KYLIN_V4_PUBLIC_JSON))) //
                 .andExpect(MockMvcResultMatchers.status().isOk());
         Mockito.verify(openTableController).reloadAWSTablesCompatibleCrossAccount(request);
 
@@ -364,12 +367,13 @@ public class OpenTableControllerTest extends NLocalFileMetadataTestCase {
         request2.setS3TableExtInfo(s3TableExtInfo);
         request2.setNeedSampling(true);
         request2.setSamplingRows(10000);
-        Mockito.doReturn(new Pair<String, List<String>>()).when(tableService).reloadAWSTableCompatibleCrossAccount(request2.getProject(),
-                request2.getS3TableExtInfo(), request2.getNeedSampling(), request2.getSamplingRows(), false, ExecutablePO.DEFAULT_PRIORITY, null);
+        Mockito.doReturn(new Pair<String, List<String>>()).when(tableService).reloadAWSTableCompatibleCrossAccount(
+                request2.getProject(), request2.getS3TableExtInfo(), request2.getNeedSampling(),
+                request2.getSamplingRows(), false, ExecutablePO.DEFAULT_PRIORITY, null);
         mockMvc.perform(MockMvcRequestBuilders.post("/api/tables/reload/compatibility/aws") //
-                        .contentType(MediaType.APPLICATION_JSON) //
-                        .content(JsonUtil.writeValueAsString(request2)) //
-                        .accept(MediaType.parseMediaType(HTTP_VND_APACHE_KYLIN_V4_PUBLIC_JSON))) //
+                .contentType(MediaType.APPLICATION_JSON) //
+                .content(JsonUtil.writeValueAsString(request2)) //
+                .accept(MediaType.parseMediaType(HTTP_VND_APACHE_KYLIN_V4_PUBLIC_JSON))) //
                 .andExpect(MockMvcResultMatchers.status().isOk());
         Mockito.verify(openTableController).reloadAWSTablesCompatibleCrossAccount(request2);
     }