You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@linkis.apache.org by ca...@apache.org on 2022/07/14 14:36:01 UTC

[incubator-linkis] branch dev-1.2.0 updated: [ISSUE-2344] Opt metadata sql (#2358)

This is an automated email from the ASF dual-hosted git repository.

casion pushed a commit to branch dev-1.2.0
in repository https://gitbox.apache.org/repos/asf/incubator-linkis.git


The following commit(s) were added to refs/heads/dev-1.2.0 by this push:
     new b26cff72f [ISSUE-2344] Opt metadata sql (#2358)
b26cff72f is described below

commit b26cff72f95e6939f66ff14654b1be55c6f23cec
Author: Jack Xu <xu...@126.com>
AuthorDate: Thu Jul 14 22:35:56 2022 +0800

    [ISSUE-2344] Opt metadata sql (#2358)
    
    * feat(metadata): 1. make the owner can see his dbs
    * make the owner can see his dbs and refactor the sqls with permissions
    * refactor: add DatabaseQueryParam class and refact the metadate project
    * refactor(linkis-matadata): rename DatabaseQueryParam to MetadataQueryParam
    * fix(typo): fix the role typo rename rose to role
---
 .../linkis/metadata/hive/dao/HiveMetaDao.java      |  54 ++++--
 .../linkis/metadata/hive/dao/impl/HiveMetaDao.xml  | 200 +++++++++++----------
 .../metadata/hive/dto/MetadataQueryParam.java      | 144 +++++++++++++++
 .../metadata/restful/api/DataSourceRestfulApi.java |  30 ++--
 .../metadata/restful/api/MdqTableRestfulApi.java   |  17 +-
 .../linkis/metadata/service/DataSourceService.java |  15 +-
 .../service/HiveMetaWithPermissionService.java     |   6 +-
 .../apache/linkis/metadata/service/MdqService.java |  14 +-
 .../service/impl/DataSourceServiceImpl.java        |  84 ++++-----
 .../impl/HiveMetaWithPermissionServiceImpl.java    |  43 +++--
 .../metadata/service/impl/MdqServiceImpl.java      |  80 ++++-----
 .../org/apache/linkis/metadata/util/DWSConfig.java |   2 +-
 .../linkis/metadata/utils/MdqConstants.scala       |   6 -
 .../apache/linkis/metadata/utils/MdqUtils.scala    |   9 +-
 14 files changed, 435 insertions(+), 269 deletions(-)

diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/java/org/apache/linkis/metadata/hive/dao/HiveMetaDao.java b/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/java/org/apache/linkis/metadata/hive/dao/HiveMetaDao.java
index 805846f90..92e560881 100644
--- a/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/java/org/apache/linkis/metadata/hive/dao/HiveMetaDao.java
+++ b/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/java/org/apache/linkis/metadata/hive/dao/HiveMetaDao.java
@@ -17,6 +17,8 @@
 
 package org.apache.linkis.metadata.hive.dao;
 
+import org.apache.linkis.metadata.hive.dto.MetadataQueryParam;
+
 import org.apache.ibatis.annotations.Param;
 
 import java.util.List;
@@ -24,28 +26,54 @@ import java.util.Map;
 
 public interface HiveMetaDao {
 
-    String getLocationByDbAndTable(Map<String, String> map);
-
-    List<String> getDbsByUser(String userName);
-
-    /** @return get all list of DBS NAME without filtering by userName */
+    String getLocationByDbAndTable(MetadataQueryParam queryParam);
+
+    /**
+     * get user's roles by username
+     *
+     * @param userName user's username
+     * @return the role name list
+     */
+    List<String> getRolesByUser(String userName);
+
+    /**
+     * get dbs by user's username and user's roles
+     *
+     * @param userName user's username
+     * @param roles user's roles
+     * @return the db name list
+     */
+    List<String> getDbsByUserAndRoles(
+            @Param("userName") String userName, @Param("roles") List<String> roles);
+
+    /**
+     * get all list of DBS NAME
+     *
+     * @return the db name list
+     */
     List<String> getAllDbs();
 
-    List<Map<String, Object>> getTablesByDbNameAndUser(Map<String, String> map);
+    List<Map<String, Object>> getTablesByDbNameAndUserAndRoles(MetadataQueryParam queryParam);
 
-    List<Map<String, Object>> getTablesByDbName(Map<String, String> map);
+    List<Map<String, Object>> getTablesByDbName(MetadataQueryParam queryParam);
 
-    Long getPartitionSize(Map<String, String> map);
+    /**
+     * get the table partition's size
+     *
+     * @param queryParam the database search properties
+     * @return the size
+     */
+    Long getPartitionSize(MetadataQueryParam queryParam);
 
-    List<String> getPartitions(Map<String, String> map);
+    List<String> getPartitions(MetadataQueryParam queryParam);
 
-    List<Map<String, Object>> getColumns(Map<String, String> map);
+    List<Map<String, Object>> getColumns(MetadataQueryParam queryParam);
 
-    Map<String, Object> getStorageDescriptionIDByDbTableNameAndUser(Map<String, String> map);
+    Map<String, Object> getStorageDescriptionIDByDbTableNameAndUser(MetadataQueryParam queryParam);
 
-    List<Map<String, Object>> getColumnsByStorageDescriptionID(Map<String, String> map);
+    List<Map<String, Object>> getColumnsByStorageDescriptionID(MetadataQueryParam queryParam);
 
-    List<Map<String, Object>> getPartitionKeys(Map<String, String> map);
+    List<Map<String, Object>> getPartitionKeys(MetadataQueryParam queryParam);
 
     String getTableComment(@Param("DbName") String DbName, @Param("tableName") String tableName);
 }
diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/java/org/apache/linkis/metadata/hive/dao/impl/HiveMetaDao.xml b/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/java/org/apache/linkis/metadata/hive/dao/impl/HiveMetaDao.xml
index f9c705704..496b4b093 100644
--- a/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/java/org/apache/linkis/metadata/hive/dao/impl/HiveMetaDao.xml
+++ b/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/java/org/apache/linkis/metadata/hive/dao/impl/HiveMetaDao.xml
@@ -22,27 +22,54 @@
 
     <select id="getLocationByDbAndTable" resultType="java.lang.String" parameterType="map">
         select LOCATION from SDS where SD_ID in (
-        select SD_ID from `TBLS`
+        select SD_ID from TBLS
         where TBL_NAME = #{tableName,jdbcType=VARCHAR}
         and DB_ID in (select DB_ID from `DBS` where NAME = #{dbName,jdbcType=VARCHAR})
         )
     </select>
 
-    <select id="getDbsByUser" resultType="java.lang.String" parameterType="java.lang.String">
-        select NAME from(
-        select t2.NAME  as NAME
-        from DB_PRIVS t1, DBS t2
-        where (lcase(t1.PRINCIPAL_NAME) = #{userName,jdbcType=VARCHAR}
-        OR t1.PRINCIPAL_NAME IN (SELECT ROLE FROM(SELECT r.ROLE_NAME AS ROLE, u.PRINCIPAL_NAME AS USER FROM ROLES r LEFT JOIN (SELECT * FROM ROLE_MAP WHERE PRINCIPAL_TYPE = 'USER') u ON r.ROLE_ID = u.ROLE_ID)AS T where T.USER = #{userName,jdbcType=VARCHAR}))
-        and lcase(t1.DB_PRIV) in ('select','all') and t1.DB_ID =t2.DB_ID
-        union all
+    <select id="getRolesByUser" resultType="java.lang.String" parameterType="map">
+        SELECT ROLE_NAME
+        FROM ROLES r INNER JOIN ROLE_MAP rm
+            ON r.ROLE_ID = rm.ROLE_ID
+            and rm.PRINCIPAL_TYPE = 'USER'
+            and rm.PRINCIPAL_NAME = #{userName,jdbcType=VARCHAR}
+    </select>
+
+    <select id="getDbsByUserAndRoles" resultType="java.lang.String">
+        select NAME
+        from DBS
+        where (OWNER_TYPE = 'USER' AND OWNER_NAME = #{userName,jdbcType=VARCHAR})
+        <if test="roles != null and roles.size() > 0">
+        OR (OWNER_TYPE = 'ROLE' AND OWNER_NAME IN
+            <foreach collection="roles" item="id" index="index" open="(" close=")" separator=",">
+               #{id}
+            </foreach>)
+        </if>
+        union
+        select t2.NAME as NAME
+        from DB_PRIVS t1
+            INNER JOIN DBS t2 ON t1.DB_ID = t2.DB_ID and t1.DB_PRIV in ('SELECT', 'ALL')
+        where (t1.PRINCIPAL_TYPE = 'USER' AND t1.PRINCIPAL_NAME = #{userName,jdbcType=VARCHAR})
+        <if test="roles != null and roles.size() > 0">
+            OR (t1.PRINCIPAL_TYPE = 'ROLE' AND t1.PRINCIPAL_NAME IN
+            <foreach collection="roles" item="id" index="index" open="(" close=")" separator=",">
+                #{id}
+            </foreach>)
+        </if>
+        union
         select t3.NAME as NAME
-        from TBL_PRIVS t1, TBLS t2 , DBS t3
-        where t1.TBL_ID=t2.TBL_ID and lcase(t1.TBL_PRIV) in ('select','all') and (
-        lcase(t1.PRINCIPAL_NAME) = #{userName,jdbcType=VARCHAR} or lcase(t1.PRINCIPAL_NAME) in (SELECT ROLE FROM(SELECT r.ROLE_NAME AS ROLE, u.PRINCIPAL_NAME AS USER FROM ROLES r LEFT JOIN (SELECT * FROM ROLE_MAP WHERE PRINCIPAL_TYPE = 'USER') u ON r.ROLE_ID = u.ROLE_ID)AS T where T.USER = #{userName,jdbcType=VARCHAR}))
-        and t2.DB_ID=t3.DB_ID) a
-        GROUP BY NAME
-        order by NAME
+        from TBL_PRIVS t1
+                INNER JOIN TBLS t2 ON t1.TBL_ID = t2.TBL_ID and t1.TBL_PRIV in ('SELECT', 'ALL')
+                INNER JOIN DBS t3 ON t2.DB_ID = t3.DB_ID
+        where (t1.PRINCIPAL_TYPE = 'USER' AND t1.PRINCIPAL_NAME = #{userName,jdbcType=VARCHAR})
+        <if test="roles != null and roles.size() > 0">
+            OR (t1.PRINCIPAL_TYPE = 'ROLE' AND t1.PRINCIPAL_NAME IN
+            <foreach collection="roles" item="id" index="index" open="(" close=")" separator=",">
+                #{id}
+            </foreach>)
+        </if>
+        ORDER BY NAME
     </select>
 
     <select id="getAllDbs" resultType="java.lang.String">
@@ -50,128 +77,105 @@
         GROUP BY NAME
         order by NAME
     </select>
-    <select id="getTablesByDbNameAndUser" resultType="map"  parameterType="map">
+    <select id="getTablesByDbNameAndUserAndRoles" resultType="map" parameterType="map">
         select t2.TBL_NAME as NAME, t2.TBL_TYPE as TYPE, t2.CREATE_TIME as CREATE_TIME, t2.LAST_ACCESS_TIME as LAST_ACCESS_TIME, t2.OWNER as OWNER
-        from DB_PRIVS t1,TBLS t2, DBS t3
-        where  t1.DB_ID =t3.DB_ID
-        and t2.DB_ID=t3.DB_ID
-        and lcase(t1.DB_PRIV) in ('select','all')
-        and lcase(t1.PRINCIPAL_NAME) = #{userName,jdbcType=VARCHAR}
-        and t3.NAME = #{dbName,jdbcType=VARCHAR}
+        from DB_PRIVS t1
+            inner join TBLS t2 on t1.DB_ID = t2.DB_ID and t1.DB_PRIV in ('SELECT','ALL')
+            inner join DBS t3 on t1.DB_ID= t3.DB_ID and t3.NAME = #{dbName,jdbcType=VARCHAR}
+        where  (t1.PRINCIPAL_TYPE = 'USER' and t1.PRINCIPAL_NAME = #{userName,jdbcType=VARCHAR})
+        <if test="roles != null and roles.size() > 0">
+            OR (t1.PRINCIPAL_TYPE = 'ROLE' AND t1.PRINCIPAL_NAME IN
+            <foreach collection="roles" item="id" index="index" open="(" close=")" separator=",">
+                #{id}
+            </foreach>)
+        </if>
         union
         select t2.TBL_NAME as NAME, t2.TBL_TYPE as TYPE, t2.CREATE_TIME as CREATE_TIME, t2.LAST_ACCESS_TIME as LAST_ACCESS_TIME, t2.OWNER as OWNER
-        from DB_PRIVS t1,TBLS t2, DBS t3
-        where  t1.DB_ID =t3.DB_ID
-        and t2.DB_ID=t3.DB_ID
-        and lcase(t1.DB_PRIV) in ('select','all')
-        and lcase(t1.PRINCIPAL_NAME) in (select ROLE_NAME from ROLES where ROLE_ID in (select ROLE_ID from ROLE_MAP where PRINCIPAL_NAME = #{userName,jdbcType=VARCHAR}))
-        and t3.NAME = #{dbName,jdbcType=VARCHAR}
-        union
-        select t2.TBL_NAME as NAME, t2.TBL_TYPE as TYPE, t2.CREATE_TIME as CREATE_TIME, t2.LAST_ACCESS_TIME as LAST_ACCESS_TIME, t2.OWNER as OWNER
-        from TBL_PRIVS t1, TBLS t2 , DBS t3
-        where t1.TBL_ID=t2.TBL_ID
-        and t2.DB_ID=t3.DB_ID
-        and lcase(t1.TBL_PRIV) in ('select','all')
-        and t1.PRINCIPAL_NAME = #{userName,jdbcType=VARCHAR}
-        and t3.NAME = #{dbName,jdbcType=VARCHAR}
-        union
-        select t2.TBL_NAME as NAME, t2.TBL_TYPE as TYPE, t2.CREATE_TIME as CREATE_TIME, t2.LAST_ACCESS_TIME as LAST_ACCESS_TIME, t2.OWNER as OWNER
-        from TBL_PRIVS t1, TBLS t2 , DBS t3
-        where t1.TBL_ID=t2.TBL_ID
-        and t2.DB_ID=t3.DB_ID
-        and lcase(t1.TBL_PRIV) in ('select','all')
-        and t1.PRINCIPAL_NAME in (select ROLE_NAME from ROLES where ROLE_ID in (select ROLE_ID from ROLE_MAP where PRINCIPAL_NAME = #{userName,jdbcType=VARCHAR}))
-        and t3.NAME = #{dbName,jdbcType=VARCHAR}
+        from TBL_PRIVS t1
+            inner join TBLS t2 on t1.TBL_ID=t2.TBL_ID and t1.TBL_PRIV in ('SELECT','ALL')
+            inner join DBS t3 on t2.DB_ID=t3.DB_ID and t3.NAME = #{dbName,jdbcType=VARCHAR}
+        where  (t1.PRINCIPAL_TYPE = 'USER' and t1.PRINCIPAL_NAME = #{userName,jdbcType=VARCHAR})
+        <if test="roles != null and roles.size() > 0">
+            OR (t1.PRINCIPAL_TYPE = 'ROLE' AND t1.PRINCIPAL_NAME IN
+            <foreach collection="roles" item="id" index="index" open="(" close=")" separator=",">
+                #{id}
+            </foreach>)
+        </if>
         order by NAME;
     </select>
 
-
     <select id="getTablesByDbName" resultType="map"  parameterType="map">
         select t2.TBL_NAME as NAME, t2.TBL_TYPE as TYPE, t2.CREATE_TIME as CREATE_TIME, t2.LAST_ACCESS_TIME as LAST_ACCESS_TIME, t2.OWNER as OWNER
-        from TBLS t2 , DBS t3
-        where
-        t2.DB_ID=t3.DB_ID
-        and t3.NAME = #{dbName,jdbcType=VARCHAR}
-        order by NAME;
+        from TBLS t2 inner join DBS t3 on t2.DB_ID = t3.DB_ID
+        where t3.NAME = #{dbName,jdbcType=VARCHAR}
+        order by t2.TBL_NAME;
     </select>
 
     <select id="getPartitionSize" resultType="java.lang.Long"  parameterType="map">
         select PARAM_VALUE from PARTITION_PARAMS
         where PARAM_KEY = 'totalSize'
         and PART_ID in (
-        select PART_ID from PARTITIONS
-        where PART_NAME = #{partitionName,jdbcType=VARCHAR}
-        and TBL_ID in(
-        select TBL_ID from `TBLS`
-        where TBL_NAME = #{tableName,jdbcType=VARCHAR}
-        and DB_ID in (select DB_ID from `DBS` where NAME = #{dbName,jdbcType=VARCHAR})
-        )
+            select PART_ID from PARTITIONS
+            where PART_NAME = #{partitionName,jdbcType=VARCHAR}
+            and TBL_ID in(
+                select TBL_ID from `TBLS`
+                where TBL_NAME = #{tableName,jdbcType=VARCHAR}
+                and DB_ID in (select DB_ID from `DBS` where NAME = #{dbName,jdbcType=VARCHAR})
+            )
         );
     </select>
 
     <select id="getPartitions" resultType="java.lang.String"  parameterType="map">
         select PART_NAME from PARTITIONS
         where TBL_ID in(
-        select TBL_ID from `TBLS`
-        where TBL_NAME = #{tableName,jdbcType=VARCHAR}
-        and DB_ID in (select DB_ID from `DBS` where NAME = #{dbName,jdbcType=VARCHAR})
+            select TBL_ID from `TBLS`
+            where TBL_NAME = #{tableName,jdbcType=VARCHAR}
+            and DB_ID in (select DB_ID from `DBS` where NAME = #{dbName,jdbcType=VARCHAR})
         );
     </select>
 
     <select id="getColumns" resultType="map"  parameterType="map">
         SELECT COMMENT, COLUMN_NAME, TYPE_NAME FROM COLUMNS_V2
         where CD_ID in(
-        select CD_ID from SDS where SD_ID in (
-        select SD_ID from `TBLS`
-        where TBL_NAME = #{tableName,jdbcType=VARCHAR}
-        and DB_ID in (select DB_ID from `DBS` where NAME = #{dbName,jdbcType=VARCHAR})
-        )
+            select CD_ID from SDS where SD_ID in (
+                select SD_ID from `TBLS`
+                where TBL_NAME = #{tableName,jdbcType=VARCHAR}
+                and DB_ID in (select DB_ID from `DBS` where NAME = #{dbName,jdbcType=VARCHAR})
+            )
         ) order by INTEGER_IDX asc;
     </select>
 
     <select id="getStorageDescriptionIDByDbTableNameAndUser" resultType="map"  parameterType="map">
         select t2.TBL_NAME as NAME, t2.TBL_TYPE as TYPE, t2.CREATE_TIME as CREATE_TIME, t2.LAST_ACCESS_TIME as LAST_ACCESS_TIME, t2.OWNER as OWNER, t2.SD_ID as SD_ID
-        from DB_PRIVS t1,TBLS t2, DBS t3
-        where  t1.DB_ID =t3.DB_ID
-          and t2.DB_ID=t3.DB_ID
-          and lcase(t1.DB_PRIV) in ('select','all')
-          and lcase(t1.PRINCIPAL_NAME) = #{userName,jdbcType=VARCHAR}
-          and t3.NAME = #{dbName,jdbcType=VARCHAR}
-          and t2.TBL_NAME = #{tableName,jdbcType=VARCHAR}
-        union
-        select t2.TBL_NAME as NAME, t2.TBL_TYPE as TYPE, t2.CREATE_TIME as CREATE_TIME, t2.LAST_ACCESS_TIME as LAST_ACCESS_TIME, t2.OWNER as OWNER, t2.SD_ID as SD_ID
-        from DB_PRIVS t1,TBLS t2, DBS t3
-        where  t1.DB_ID =t3.DB_ID
-          and t2.DB_ID=t3.DB_ID
-          and lcase(t1.DB_PRIV) in ('select','all')
-          and lcase(t1.PRINCIPAL_NAME) in (select ROLE_NAME from ROLES where ROLE_ID in (select ROLE_ID from ROLE_MAP where PRINCIPAL_NAME = #{userName,jdbcType=VARCHAR}))
-          and t3.NAME = #{dbName,jdbcType=VARCHAR}
-          and t2.TBL_NAME = #{tableName,jdbcType=VARCHAR}
-        union
-        select t2.TBL_NAME as NAME, t2.TBL_TYPE as TYPE, t2.CREATE_TIME as CREATE_TIME, t2.LAST_ACCESS_TIME as LAST_ACCESS_TIME, t2.OWNER as OWNER, t2.SD_ID as SD_ID
-        from TBL_PRIVS t1, TBLS t2 , DBS t3
-        where t1.TBL_ID=t2.TBL_ID
-          and t2.DB_ID=t3.DB_ID
-          and lcase(t1.TBL_PRIV) in ('select','all')
-          and t1.PRINCIPAL_NAME = #{userName,jdbcType=VARCHAR}
-          and t3.NAME = #{dbName,jdbcType=VARCHAR}
-          and t2.TBL_NAME = #{tableName,jdbcType=VARCHAR}
+        from DB_PRIVS t1
+            inner join TBLS t2 on t1.DB_ID = t2.DB_ID and t1.DB_PRIV in ('SELECT','ALL') and t2.TBL_NAME = #{tableName,jdbcType=VARCHAR}
+            inner join DBS t3 on t1.DB_ID = t3.DB_ID and and t3.NAME = #{dbName,jdbcType=VARCHAR}
+        where  (t1.PRINCIPAL_TYPE = 'USER' and t1.PRINCIPAL_NAME = #{userName,jdbcType=VARCHAR})
+        <if test="roles != null and roles.size() > 0">
+            OR (t1.PRINCIPAL_TYPE = 'ROLE' AND t1.PRINCIPAL_NAME IN
+            <foreach collection="roles" item="id" index="index" open="(" close=")" separator=",">
+                #{id}
+            </foreach>)
+        </if>
         union
         select t2.TBL_NAME as NAME, t2.TBL_TYPE as TYPE, t2.CREATE_TIME as CREATE_TIME, t2.LAST_ACCESS_TIME as LAST_ACCESS_TIME, t2.OWNER as OWNER, t2.SD_ID as SD_ID
-        from TBL_PRIVS t1, TBLS t2 , DBS t3
-        where t1.TBL_ID=t2.TBL_ID
-          and t2.DB_ID=t3.DB_ID
-          and lcase(t1.TBL_PRIV) in ('select','all')
-          and t1.PRINCIPAL_NAME in (select ROLE_NAME from ROLES where ROLE_ID in (select ROLE_ID from ROLE_MAP where PRINCIPAL_NAME = #{userName,jdbcType=VARCHAR}))
-          and t3.NAME = #{dbName,jdbcType=VARCHAR}
-          and t2.TBL_NAME = #{tableName,jdbcType=VARCHAR}
+        from TBL_PRIVS t1
+            inner join TBLS t2 on t1.TBL_ID = t2.TBL_ID and t1.TBL_PRIV in ('SELECT','ALL') and t2.TBL_NAME = #{tableName,jdbcType=VARCHAR}
+            inner join DBS t3 on t2.DB_ID=t3.DB_ID and t3.NAME = #{dbName,jdbcType=VARCHAR}
+        where  (t1.PRINCIPAL_TYPE = 'USER' and t1.PRINCIPAL_NAME = #{userName,jdbcType=VARCHAR})
+        <if test="roles != null and roles.size() > 0">
+            OR (t1.PRINCIPAL_TYPE = 'ROLE' AND t1.PRINCIPAL_NAME IN
+            <foreach collection="roles" item="id" index="index" open="(" close=")" separator=",">
+                #{id}
+            </foreach>)
+        </if>
         order by NAME;
     </select>
 
     <select id="getColumnsByStorageDescriptionID" resultType="map"  parameterType="map">
         SELECT COMMENT, COLUMN_NAME, TYPE_NAME FROM COLUMNS_V2
         where CD_ID in(
-            select CD_ID from SDS where SD_ID = #{SD_ID,jdbcType=VARCHAR}
+            select CD_ID from SDS where SD_ID = #{sdId,jdbcType=VARCHAR}
         ) order by INTEGER_IDX asc;
     </select>
 
diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/java/org/apache/linkis/metadata/hive/dto/MetadataQueryParam.java b/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/java/org/apache/linkis/metadata/hive/dto/MetadataQueryParam.java
new file mode 100644
index 000000000..0a3d9d71d
--- /dev/null
+++ b/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/java/org/apache/linkis/metadata/hive/dto/MetadataQueryParam.java
@@ -0,0 +1,144 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.linkis.metadata.hive.dto;
+
+import org.apache.commons.lang3.StringUtils;
+
+import java.util.Collections;
+import java.util.List;
+
+public class MetadataQueryParam {
+
+    /** the query user's username */
+    private String userName;
+
+    /** the query db name */
+    private String dbName;
+
+    /** the query table name */
+    private String tableName;
+
+    /** the query table's partition name */
+    private String partitionName;
+
+    /** the query storage description id */
+    private String sdId;
+
+    /** the user's role */
+    private List<String> roles;
+
+    public static MetadataQueryParam of(String userName) {
+        return new MetadataQueryParam(userName);
+    }
+
+    public MetadataQueryParam() {
+        this.roles = Collections.emptyList();
+    }
+
+    public MetadataQueryParam(String username) {
+        this.userName = username;
+        this.roles = Collections.emptyList();
+    }
+
+    public String getUserName() {
+        return userName;
+    }
+
+    public void setUserName(String userName) {
+        this.userName = userName;
+    }
+
+    public MetadataQueryParam withUserName(String userName) {
+        this.userName = userName;
+        return this;
+    }
+
+    public String getDbName() {
+        return dbName;
+    }
+
+    public void setDbName(String dbName) {
+        this.dbName = dbName;
+    }
+
+    public MetadataQueryParam withDbName(String dbName) {
+        this.dbName = dbName;
+        return this;
+    }
+
+    public String getTableName() {
+        return tableName;
+    }
+
+    public void setTableName(String tableName) {
+        this.tableName = tableName;
+    }
+
+    public MetadataQueryParam withTableName(String tableName) {
+        this.tableName = tableName;
+        return this;
+    }
+
+    public List<String> getRoles() {
+        return roles;
+    }
+
+    public void setRoles(List<String> roles) {
+        this.roles = roles;
+    }
+
+    public MetadataQueryParam withRoles(List<String> roles) {
+        if (roles != null && !roles.isEmpty()) {
+            this.roles.addAll(roles);
+        }
+        return this;
+    }
+
+    public MetadataQueryParam withRole(String role) {
+        if (StringUtils.isNotBlank(role)) {
+            this.roles.add(role);
+        }
+        return this;
+    }
+
+    public String getPartitionName() {
+        return partitionName;
+    }
+
+    public void setPartitionName(String partitionName) {
+        this.partitionName = partitionName;
+    }
+
+    public MetadataQueryParam withPartitionName(String partitionName) {
+        this.partitionName = partitionName;
+        return this;
+    }
+
+    public String getSdId() {
+        return sdId;
+    }
+
+    public void setSdId(String sdId) {
+        this.sdId = sdId;
+    }
+
+    public MetadataQueryParam withSdId(String sdId) {
+        this.sdId = sdId;
+        return this;
+    }
+}
diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/java/org/apache/linkis/metadata/restful/api/DataSourceRestfulApi.java b/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/java/org/apache/linkis/metadata/restful/api/DataSourceRestfulApi.java
index 97f7c7c91..c1fdba5f6 100644
--- a/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/java/org/apache/linkis/metadata/restful/api/DataSourceRestfulApi.java
+++ b/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/java/org/apache/linkis/metadata/restful/api/DataSourceRestfulApi.java
@@ -17,10 +17,10 @@
 
 package org.apache.linkis.metadata.restful.api;
 
+import org.apache.linkis.metadata.hive.dto.MetadataQueryParam;
 import org.apache.linkis.metadata.restful.remote.DataSourceRestfulRemote;
 import org.apache.linkis.metadata.service.DataSourceService;
 import org.apache.linkis.metadata.service.HiveMetaWithPermissionService;
-import org.apache.linkis.metadata.utils.MdqConstants;
 import org.apache.linkis.server.Message;
 import org.apache.linkis.server.utils.ModuleUserUtils;
 
@@ -38,9 +38,6 @@ import com.fasterxml.jackson.databind.JsonNode;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.util.HashMap;
-import java.util.Map;
-
 @RestController
 @RequestMapping(path = "/datasource")
 public class DataSourceRestfulApi implements DataSourceRestfulRemote {
@@ -83,8 +80,9 @@ public class DataSourceRestfulApi implements DataSourceRestfulRemote {
             @RequestParam(value = "database", required = false) String database,
             HttpServletRequest req) {
         String userName = ModuleUserUtils.getOperationUser(req, "get tables");
+        MetadataQueryParam queryParam = MetadataQueryParam.of(userName).withDbName(database);
         try {
-            JsonNode tables = dataSourceService.queryTables(database, userName);
+            JsonNode tables = dataSourceService.queryTables(queryParam);
             return Message.ok("").data("tables", tables);
         } catch (Exception e) {
             logger.error("Failed to queryTables", e);
@@ -99,13 +97,12 @@ public class DataSourceRestfulApi implements DataSourceRestfulRemote {
             @RequestParam(value = "table", required = false) String table,
             HttpServletRequest req) {
         String userName = ModuleUserUtils.getOperationUser(req, "get columns of table " + table);
+        MetadataQueryParam queryParam =
+                MetadataQueryParam.of(userName).withDbName(database).withTableName(table);
         try {
-            Map<String, String> map = new HashMap<String, String>();
-            map.put(MdqConstants.DB_NAME_KEY(), database);
-            map.put(MdqConstants.TABLE_NAME_KEY(), table);
-            map.put(MdqConstants.USERNAME_KEY(), userName);
             JsonNode columns =
-                    hiveMetaWithPermissionService.getColumnsByDbTableNameAndOptionalUserName(map);
+                    hiveMetaWithPermissionService.getColumnsByDbTableNameAndOptionalUserName(
+                            queryParam);
             return Message.ok("").data("columns", columns);
         } catch (Exception e) {
             logger.error("Failed to get data table structure(获取数据表结构失败)", e);
@@ -121,12 +118,17 @@ public class DataSourceRestfulApi implements DataSourceRestfulRemote {
             @RequestParam(value = "partition", required = false) String partition,
             HttpServletRequest req) {
         String userName = ModuleUserUtils.getOperationUser(req, "get size ");
+        MetadataQueryParam queryParam =
+                MetadataQueryParam.of(userName)
+                        .withDbName(database)
+                        .withTableName(table)
+                        .withPartitionName(partition);
         try {
             JsonNode sizeNode;
             if (StringUtils.isBlank(partition)) {
-                sizeNode = dataSourceService.getTableSize(database, table, userName);
+                sizeNode = dataSourceService.getTableSize(queryParam);
             } else {
-                sizeNode = dataSourceService.getPartitionSize(database, table, partition, userName);
+                sizeNode = dataSourceService.getPartitionSize(queryParam);
             }
             return Message.ok("").data("sizeInfo", sizeNode);
         } catch (Exception e) {
@@ -142,8 +144,10 @@ public class DataSourceRestfulApi implements DataSourceRestfulRemote {
             @RequestParam(value = "table", required = false) String table,
             HttpServletRequest req) {
         String userName = ModuleUserUtils.getOperationUser(req, "get partitions of " + table);
+        MetadataQueryParam queryParam =
+                MetadataQueryParam.of(userName).withDbName(database).withTableName(table);
         try {
-            JsonNode partitionNode = dataSourceService.getPartitions(database, table, userName);
+            JsonNode partitionNode = dataSourceService.getPartitions(queryParam);
             return Message.ok("").data("partitionInfo", partitionNode);
         } catch (Exception e) {
             logger.error("Failed to get table partition(获取表分区失败)", e);
diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/java/org/apache/linkis/metadata/restful/api/MdqTableRestfulApi.java b/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/java/org/apache/linkis/metadata/restful/api/MdqTableRestfulApi.java
index 74495d1b8..ed6845768 100644
--- a/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/java/org/apache/linkis/metadata/restful/api/MdqTableRestfulApi.java
+++ b/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/java/org/apache/linkis/metadata/restful/api/MdqTableRestfulApi.java
@@ -26,6 +26,7 @@ import org.apache.linkis.metadata.domain.mdq.vo.MdqTableFieldsInfoVO;
 import org.apache.linkis.metadata.domain.mdq.vo.MdqTablePartitionStatisticInfoVO;
 import org.apache.linkis.metadata.domain.mdq.vo.MdqTableStatisticInfoVO;
 import org.apache.linkis.metadata.exception.MdqIllegalParamException;
+import org.apache.linkis.metadata.hive.dto.MetadataQueryParam;
 import org.apache.linkis.metadata.service.MdqService;
 import org.apache.linkis.server.Message;
 import org.apache.linkis.server.utils.ModuleUserUtils;
@@ -67,11 +68,13 @@ public class MdqTableRestfulApi {
             @RequestParam(value = "tableName", required = false) String tableName,
             HttpServletRequest req) {
         String userName = ModuleUserUtils.getOperationUser(req, "getTableBaseInfo " + tableName);
+        MetadataQueryParam queryParam =
+                MetadataQueryParam.of(userName).withDbName(database).withTableName(tableName);
         MdqTableBaseInfoVO tableBaseInfo;
         if (mdqService.isExistInMdq(database, tableName, userName)) {
             tableBaseInfo = mdqService.getTableBaseInfoFromMdq(database, tableName, userName);
         } else {
-            tableBaseInfo = mdqService.getTableBaseInfoFromHive(database, tableName, userName);
+            tableBaseInfo = mdqService.getTableBaseInfoFromHive(queryParam);
         }
         return Message.ok().data("tableBaseInfo", tableBaseInfo);
     }
@@ -82,11 +85,13 @@ public class MdqTableRestfulApi {
             @RequestParam(value = "tableName", required = false) String tableName,
             HttpServletRequest req) {
         String userName = ModuleUserUtils.getOperationUser(req, "getTableFieldsInfo " + tableName);
+        MetadataQueryParam queryParam =
+                MetadataQueryParam.of(userName).withDbName(database).withTableName(tableName);
         List<MdqTableFieldsInfoVO> tableFieldsInfo;
         if (mdqService.isExistInMdq(database, tableName, userName)) {
             tableFieldsInfo = mdqService.getTableFieldsInfoFromMdq(database, tableName, userName);
         } else {
-            tableFieldsInfo = mdqService.getTableFieldsInfoFromHive(database, tableName, userName);
+            tableFieldsInfo = mdqService.getTableFieldsInfoFromHive(queryParam);
         }
         return Message.ok().data("tableFieldsInfo", tableFieldsInfo);
     }
@@ -102,8 +107,10 @@ public class MdqTableRestfulApi {
             throws IOException {
         String userName =
                 ModuleUserUtils.getOperationUser(req, "getTableStatisticInfo " + tableName);
+        MetadataQueryParam queryParam =
+                MetadataQueryParam.of(userName).withDbName(database).withTableName(tableName);
         MdqTableStatisticInfoVO tableStatisticInfo =
-                mdqService.getTableStatisticInfo(database, tableName, userName, partitionSort);
+                mdqService.getTableStatisticInfo(queryParam, partitionSort);
         int totalSize = 0;
         List<MdqTablePartitionStatisticInfoVO> partitionPage;
         List<MdqTablePartitionStatisticInfoVO> partitions = tableStatisticInfo.getPartitions();
@@ -159,8 +166,10 @@ public class MdqTableRestfulApi {
             throws IOException, MdqIllegalParamException {
         String userName =
                 ModuleUserUtils.getOperationUser(req, "getPartitionStatisticInfo " + tableName);
+        MetadataQueryParam queryParam =
+                MetadataQueryParam.of(userName).withDbName(database).withTableName(tableName);
         MdqTablePartitionStatisticInfoVO partition =
-                mdqService.getPartitionStatisticInfo(database, tableName, userName, partitionName);
+                mdqService.getPartitionStatisticInfo(queryParam, partitionName);
         return Message.ok().data("partitionStatisticInfo", partition);
     }
 
diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/java/org/apache/linkis/metadata/service/DataSourceService.java b/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/java/org/apache/linkis/metadata/service/DataSourceService.java
index 0795b4b2a..00bedfd93 100644
--- a/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/java/org/apache/linkis/metadata/service/DataSourceService.java
+++ b/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/java/org/apache/linkis/metadata/service/DataSourceService.java
@@ -17,6 +17,8 @@
 
 package org.apache.linkis.metadata.service;
 
+import org.apache.linkis.metadata.hive.dto.MetadataQueryParam;
+
 import com.fasterxml.jackson.databind.JsonNode;
 
 public interface DataSourceService {
@@ -25,16 +27,15 @@ public interface DataSourceService {
 
     JsonNode getDbsWithTables(String userName) throws Exception;
 
-    JsonNode queryTables(String database, String userName);
+    JsonNode queryTables(MetadataQueryParam queryParam);
 
-    JsonNode queryTableMeta(String dbName, String tableName, String userName);
+    JsonNode queryTableMeta(MetadataQueryParam queryParam);
 
-    JsonNode queryTableMetaBySDID(String dbName, String tableName, String sdid);
+    JsonNode queryTableMetaBySDID(MetadataQueryParam queryParam);
 
-    JsonNode getTableSize(String dbName, String tableName, String userName);
+    JsonNode getTableSize(MetadataQueryParam queryParam);
 
-    JsonNode getPartitionSize(
-            String dbName, String tableName, String partitionName, String userName);
+    JsonNode getPartitionSize(MetadataQueryParam queryParam);
 
-    JsonNode getPartitions(String dbName, String tableName, String userName);
+    JsonNode getPartitions(MetadataQueryParam queryParam);
 }
diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/java/org/apache/linkis/metadata/service/HiveMetaWithPermissionService.java b/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/java/org/apache/linkis/metadata/service/HiveMetaWithPermissionService.java
index c27de7e18..8e67a1692 100644
--- a/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/java/org/apache/linkis/metadata/service/HiveMetaWithPermissionService.java
+++ b/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/java/org/apache/linkis/metadata/service/HiveMetaWithPermissionService.java
@@ -16,6 +16,8 @@
  */
 package org.apache.linkis.metadata.service;
 
+import org.apache.linkis.metadata.hive.dto.MetadataQueryParam;
+
 import com.fasterxml.jackson.databind.JsonNode;
 
 import java.util.List;
@@ -25,7 +27,7 @@ public interface HiveMetaWithPermissionService {
 
     List<String> getDbsOptionalUserName(String userName);
 
-    List<Map<String, Object>> getTablesByDbNameAndOptionalUserName(Map<String, String> map);
+    List<Map<String, Object>> getTablesByDbNameAndOptionalUserName(MetadataQueryParam queryParam);
 
-    JsonNode getColumnsByDbTableNameAndOptionalUserName(Map<String, String> map);
+    JsonNode getColumnsByDbTableNameAndOptionalUserName(MetadataQueryParam queryParam);
 }
diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/java/org/apache/linkis/metadata/service/MdqService.java b/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/java/org/apache/linkis/metadata/service/MdqService.java
index b41f3f95a..106caeb47 100644
--- a/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/java/org/apache/linkis/metadata/service/MdqService.java
+++ b/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/java/org/apache/linkis/metadata/service/MdqService.java
@@ -23,6 +23,7 @@ import org.apache.linkis.metadata.domain.mdq.vo.MdqTableFieldsInfoVO;
 import org.apache.linkis.metadata.domain.mdq.vo.MdqTablePartitionStatisticInfoVO;
 import org.apache.linkis.metadata.domain.mdq.vo.MdqTableStatisticInfoVO;
 import org.apache.linkis.metadata.exception.MdqIllegalParamException;
+import org.apache.linkis.metadata.hive.dto.MetadataQueryParam;
 
 import java.io.IOException;
 import java.util.List;
@@ -46,8 +47,7 @@ public interface MdqService {
     Long persistTable(MdqTableBO mdqTableBO, String userName);
 
     MdqTableStatisticInfoVO getTableStatisticInfo(
-            String database, String tableName, String user, String partitionSort)
-            throws IOException;
+            MetadataQueryParam queryParam, String partitionSort) throws IOException;
 
     /**
      * 产生sql给前台,和sparkEngine
@@ -61,19 +61,17 @@ public interface MdqService {
 
     MdqTableBaseInfoVO getTableBaseInfoFromMdq(String database, String tableName, String user);
 
-    MdqTableBaseInfoVO getTableBaseInfoFromHive(String database, String tableName, String user);
+    MdqTableBaseInfoVO getTableBaseInfoFromHive(MetadataQueryParam queryParam);
 
     List<MdqTableFieldsInfoVO> getTableFieldsInfoFromMdq(
             String database, String tableName, String user);
 
-    List<MdqTableFieldsInfoVO> getTableFieldsInfoFromHive(
-            String database, String tableName, String user);
+    List<MdqTableFieldsInfoVO> getTableFieldsInfoFromHive(MetadataQueryParam queryParam);
 
     MdqTableStatisticInfoVO getTableStatisticInfoFromHive(
-            String database, String tableName, String user, String partitionSort)
-            throws IOException;
+            MetadataQueryParam queryParam, String partitionSort) throws IOException;
 
     MdqTablePartitionStatisticInfoVO getPartitionStatisticInfo(
-            String database, String tableName, String userName, String partitionName)
+            MetadataQueryParam queryParam, String partitionName)
             throws IOException, MdqIllegalParamException;
 }
diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/java/org/apache/linkis/metadata/service/impl/DataSourceServiceImpl.java b/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/java/org/apache/linkis/metadata/service/impl/DataSourceServiceImpl.java
index ce88d574f..957fdc709 100644
--- a/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/java/org/apache/linkis/metadata/service/impl/DataSourceServiceImpl.java
+++ b/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/java/org/apache/linkis/metadata/service/impl/DataSourceServiceImpl.java
@@ -23,10 +23,10 @@ import org.apache.linkis.hadoop.common.utils.HDFSUtils;
 import org.apache.linkis.metadata.hive.config.DSEnum;
 import org.apache.linkis.metadata.hive.config.DataSource;
 import org.apache.linkis.metadata.hive.dao.HiveMetaDao;
+import org.apache.linkis.metadata.hive.dto.MetadataQueryParam;
 import org.apache.linkis.metadata.service.DataSourceService;
 import org.apache.linkis.metadata.service.HiveMetaWithPermissionService;
 import org.apache.linkis.metadata.util.DWSConfig;
-import org.apache.linkis.metadata.utils.MdqConstants;
 
 import org.apache.commons.collections.CollectionUtils;
 import org.apache.commons.io.IOUtils;
@@ -43,7 +43,6 @@ import com.fasterxml.jackson.databind.JsonNode;
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.fasterxml.jackson.databind.node.ArrayNode;
 import com.fasterxml.jackson.databind.node.ObjectNode;
-import com.google.common.collect.Lists;
 import com.google.common.collect.Maps;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -84,17 +83,19 @@ public class DataSourceServiceImpl implements DataSourceService {
 
     @DataSource(name = DSEnum.FIRST_DATA_SOURCE)
     @Override
-    public JsonNode getDbsWithTables(String userName) throws Exception {
+    public JsonNode getDbsWithTables(String userName) {
         ArrayNode dbNodes = jsonMapper.createArrayNode();
         List<String> dbs = hiveMetaWithPermissionService.getDbsOptionalUserName(userName);
+        MetadataQueryParam queryParam = MetadataQueryParam.of(userName);
         for (String db : dbs) {
             if (StringUtils.isBlank(db) || db.contains(dbKeyword)) {
                 logger.info("db  will be filter: " + db);
                 continue;
             }
+            queryParam.setDbName(db);
             ObjectNode dbNode = jsonMapper.createObjectNode();
             dbNode.put("databaseName", db);
-            dbNode.put("tables", queryTables(db, userName));
+            dbNode.put("tables", queryTables(queryParam));
             dbNodes.add(dbNode);
         }
         return dbNodes;
@@ -102,13 +103,11 @@ public class DataSourceServiceImpl implements DataSourceService {
 
     @DataSource(name = DSEnum.FIRST_DATA_SOURCE)
     @Override
-    public JsonNode queryTables(String database, String userName) {
-        List<Map<String, Object>> listTables = Lists.newArrayList();
+    public JsonNode queryTables(MetadataQueryParam queryParam) {
+        List<Map<String, Object>> listTables;
         try {
-            Map<String, String> map = Maps.newHashMap();
-            map.put("dbName", database);
-            map.put("userName", userName);
-            listTables = hiveMetaWithPermissionService.getTablesByDbNameAndOptionalUserName(map);
+            listTables =
+                    hiveMetaWithPermissionService.getTablesByDbNameAndOptionalUserName(queryParam);
         } catch (Throwable e) {
             logger.error("Failed to list Tables:", e);
             throw new RuntimeException(e);
@@ -119,7 +118,7 @@ public class DataSourceServiceImpl implements DataSourceService {
             ObjectNode tableNode = jsonMapper.createObjectNode();
             tableNode.put("tableName", (String) table.get("NAME"));
             tableNode.put("isView", table.get("TYPE").equals("VIRTUAL_VIEW"));
-            tableNode.put("databaseName", database);
+            tableNode.put("databaseName", queryParam.getDbName());
             tableNode.put("createdBy", (String) table.get("OWNER"));
             tableNode.put("createdAt", (Integer) table.get("CREATE_TIME"));
             tableNode.put("lastAccessAt", (Integer) table.get("LAST_ACCESS_TIME"));
@@ -130,13 +129,10 @@ public class DataSourceServiceImpl implements DataSourceService {
 
     @DataSource(name = DSEnum.FIRST_DATA_SOURCE)
     @Override
-    public JsonNode queryTableMeta(String dbName, String tableName, String userName) {
-        logger.info("getTable:" + userName);
-        Map<String, String> param = Maps.newHashMap();
-        param.put("dbName", dbName);
-        param.put("tableName", tableName);
-        List<Map<String, Object>> columns = hiveMetaDao.getColumns(param);
-        List<Map<String, Object>> partitionKeys = hiveMetaDao.getPartitionKeys(param);
+    public JsonNode queryTableMeta(MetadataQueryParam queryParam) {
+        logger.info("getTable:" + queryParam.getTableName());
+        List<Map<String, Object>> columns = hiveMetaDao.getColumns(queryParam);
+        List<Map<String, Object>> partitionKeys = hiveMetaDao.getPartitionKeys(queryParam);
         return getJsonNodesFromColumnMap(columns, partitionKeys);
     }
 
@@ -164,34 +160,28 @@ public class DataSourceServiceImpl implements DataSourceService {
 
     @DataSource(name = DSEnum.FIRST_DATA_SOURCE)
     @Override
-    public JsonNode queryTableMetaBySDID(String dbName, String tableName, String sdid) {
-        logger.info("getTableMetabysdid : sdid = {}", sdid);
-        Map<String, String> param = Maps.newHashMap();
-        param.put(MdqConstants.DB_NAME_KEY(), dbName);
-        param.put(MdqConstants.TABLE_NAME_KEY(), tableName);
-        param.put(MdqConstants.SDID_KEY(), sdid);
-        List<Map<String, Object>> columns = hiveMetaDao.getColumnsByStorageDescriptionID(param);
-        List<Map<String, Object>> partitionKeys = hiveMetaDao.getPartitionKeys(param);
+    public JsonNode queryTableMetaBySDID(MetadataQueryParam queryParam) {
+        logger.info("getTableMetabysdid : sdid = {}", queryParam.getSdId());
+        List<Map<String, Object>> columns =
+                hiveMetaDao.getColumnsByStorageDescriptionID(queryParam);
+        List<Map<String, Object>> partitionKeys = hiveMetaDao.getPartitionKeys(queryParam);
         return getJsonNodesFromColumnMap(columns, partitionKeys);
     }
 
     @DataSource(name = DSEnum.FIRST_DATA_SOURCE)
-    public String getTableLocation(String database, String tableName) {
-        Map<String, String> param = Maps.newHashMap();
-        param.put("dbName", database);
-        param.put("tableName", tableName);
-        String tableLocation = hiveMetaDao.getLocationByDbAndTable(param);
+    public String getTableLocation(MetadataQueryParam queryParam) {
+        String tableLocation = hiveMetaDao.getLocationByDbAndTable(queryParam);
         logger.info("tableLocation:" + tableLocation);
         return tableLocation;
     }
 
     @Override
-    public JsonNode getTableSize(String dbName, String tableName, String userName) {
-        logger.info("getTable:" + userName);
+    public JsonNode getTableSize(MetadataQueryParam queryParam) {
+        logger.info("getTable:" + queryParam.getTableName());
 
         String tableSize = "";
         try {
-            FileStatus tableFile = getFileStatus(this.getTableLocation(dbName, tableName));
+            FileStatus tableFile = getFileStatus(this.getTableLocation(queryParam));
             if (tableFile.isDirectory()) {
                 tableSize =
                         ByteTimeUtils.bytesToString(
@@ -205,42 +195,34 @@ public class DataSourceServiceImpl implements DataSourceService {
 
         ObjectNode sizeJson = jsonMapper.createObjectNode();
         sizeJson.put("size", tableSize);
-        sizeJson.put("tableName", dbName + "." + tableName);
+        sizeJson.put("tableName", queryParam.getDbName() + "." + queryParam.getTableName());
         return sizeJson;
     }
 
     @DataSource(name = DSEnum.FIRST_DATA_SOURCE)
     @Override
-    public JsonNode getPartitionSize(
-            String dbName, String tableName, String partitionName, String userName) {
-        Map<String, String> map = Maps.newHashMap();
-        map.put("dbName", dbName);
-        map.put("tableName", tableName);
-        map.put("partitionName", partitionName);
-        map.put("userName", userName);
-        Long partitionSize = hiveMetaDao.getPartitionSize(map);
+    public JsonNode getPartitionSize(MetadataQueryParam queryParam) {
+
+        Long partitionSize = hiveMetaDao.getPartitionSize(queryParam);
         if (partitionSize == null) {
             partitionSize = 0L;
         }
         ObjectNode sizeJson = jsonMapper.createObjectNode();
         sizeJson.put("size", ByteTimeUtils.bytesToString(partitionSize));
-        sizeJson.put("tableName", dbName + "." + tableName);
-        sizeJson.put("partitionName", partitionName);
+        sizeJson.put("tableName", queryParam.getDbName() + "." + queryParam.getTableName());
+        sizeJson.put("partitionName", queryParam.getPartitionName());
         return sizeJson;
     }
 
     @DataSource(name = DSEnum.FIRST_DATA_SOURCE)
     @Override
-    public JsonNode getPartitions(String dbName, String tableName, String userName) {
-        Map<String, String> map = Maps.newHashMap();
-        map.put("dbName", dbName);
-        map.put("tableName", tableName);
-        List<String> partitions = hiveMetaDao.getPartitions(map);
+    public JsonNode getPartitions(MetadataQueryParam queryParam) {
+        List<String> partitions = hiveMetaDao.getPartitions(queryParam);
         Collections.sort(partitions);
         Collections.reverse(partitions);
 
         ObjectNode partitionJson = jsonMapper.createObjectNode();
-        partitionJson.put("tableName", dbName + "." + tableName);
+        partitionJson.put("tableName", queryParam.getDbName() + "." + queryParam.getTableName());
         if (CollectionUtils.isEmpty(partitions)) {
             partitionJson.put("isPartition", false);
         } else {
diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/java/org/apache/linkis/metadata/service/impl/HiveMetaWithPermissionServiceImpl.java b/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/java/org/apache/linkis/metadata/service/impl/HiveMetaWithPermissionServiceImpl.java
index 18d4e2cdf..b9138f2e9 100644
--- a/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/java/org/apache/linkis/metadata/service/impl/HiveMetaWithPermissionServiceImpl.java
+++ b/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/java/org/apache/linkis/metadata/service/impl/HiveMetaWithPermissionServiceImpl.java
@@ -19,6 +19,7 @@ package org.apache.linkis.metadata.service.impl;
 import org.apache.linkis.metadata.hive.config.DSEnum;
 import org.apache.linkis.metadata.hive.config.DataSource;
 import org.apache.linkis.metadata.hive.dao.HiveMetaDao;
+import org.apache.linkis.metadata.hive.dto.MetadataQueryParam;
 import org.apache.linkis.metadata.service.DataSourceService;
 import org.apache.linkis.metadata.service.HiveMetaWithPermissionService;
 import org.apache.linkis.metadata.util.DWSConfig;
@@ -54,7 +55,8 @@ public class HiveMetaWithPermissionServiceImpl implements HiveMetaWithPermission
         }
         Boolean flag = DWSConfig.HIVE_PERMISSION_WITH_lOGIN_USER_ENABLED.getValue();
         if (flag) {
-            return hiveMetaDao.getDbsByUser(userName);
+            List<String> roles = hiveMetaDao.getRolesByUser(userName);
+            return hiveMetaDao.getDbsByUserAndRoles(userName, roles);
         } else {
             log.info("user {} to get all dbs no permission control", userName);
             return hiveMetaDao.getAllDbs();
@@ -62,47 +64,54 @@ public class HiveMetaWithPermissionServiceImpl implements HiveMetaWithPermission
     }
 
     @Override
-    public List<Map<String, Object>> getTablesByDbNameAndOptionalUserName(Map<String, String> map) {
+    public List<Map<String, Object>> getTablesByDbNameAndOptionalUserName(
+            MetadataQueryParam queryParam) {
         Boolean flag = DWSConfig.HIVE_PERMISSION_WITH_lOGIN_USER_ENABLED.getValue();
-        if (null == map) {
+        if (null == queryParam) {
             return null;
         }
-        String userName = map.get("userName");
+        String userName = queryParam.getUserName();
         if (adminUser.equals(userName)) {
             log.info("admin {} to get all tables ", userName);
-            return hiveMetaDao.getTablesByDbName(map);
+            return hiveMetaDao.getTablesByDbName(queryParam);
         }
         if (flag) {
-            return hiveMetaDao.getTablesByDbNameAndUser(map);
+            List<String> roles = hiveMetaDao.getRolesByUser(queryParam.getUserName());
+            queryParam.withRoles(roles);
+            return hiveMetaDao.getTablesByDbNameAndUserAndRoles(queryParam);
         } else {
-            log.info("user {} to getTablesByDbName no permission control", userName);
-            return hiveMetaDao.getTablesByDbName(map);
+            log.info(
+                    "user {} to getTablesByDbName no permission control", queryParam.getUserName());
+            return hiveMetaDao.getTablesByDbName(queryParam);
         }
     }
 
     @DataSource(name = DSEnum.FIRST_DATA_SOURCE)
     @Override
-    public JsonNode getColumnsByDbTableNameAndOptionalUserName(Map<String, String> map) {
+    public JsonNode getColumnsByDbTableNameAndOptionalUserName(MetadataQueryParam queryParam) {
         Boolean flag = DWSConfig.HIVE_PERMISSION_WITH_lOGIN_USER_ENABLED.getValue();
-        if (null == map) {
+        if (null == queryParam) {
             return null;
         }
-        String userName = map.get(MdqConstants.USERNAME_KEY());
-        String dbName = map.get(MdqConstants.DB_NAME_KEY());
-        String tableName = map.get(MdqConstants.TABLE_NAME_KEY());
+        String userName = queryParam.getUserName();
+        String dbName = queryParam.getDbName();
+        String tableName = queryParam.getTableName();
         if (adminUser.equals(userName)) {
             log.info("admin {} to get all tables ", userName);
-            return dataSourceService.queryTableMeta(dbName, tableName, userName);
+            return dataSourceService.queryTableMeta(queryParam);
         }
         if (flag) {
+            List<String> roles = hiveMetaDao.getRolesByUser(userName);
+            queryParam.withRoles(roles);
             // with permission
             Map<String, Object> tableMap =
-                    hiveMetaDao.getStorageDescriptionIDByDbTableNameAndUser(map);
+                    hiveMetaDao.getStorageDescriptionIDByDbTableNameAndUser(queryParam);
             if (null != tableMap
                     && !tableMap.isEmpty()
                     && tableMap.containsKey(MdqConstants.SDID_KEY())) {
                 String sdid = tableMap.get(MdqConstants.SDID_KEY()).toString();
-                return dataSourceService.queryTableMetaBySDID(dbName, tableName, sdid);
+                queryParam.setSdId(sdid);
+                return dataSourceService.queryTableMetaBySDID(queryParam);
             } else {
                 log.error(
                         "User {} has no read permission for meta of db : {}, table : {}",
@@ -113,7 +122,7 @@ public class HiveMetaWithPermissionServiceImpl implements HiveMetaWithPermission
             }
         } else {
             log.info("user {} to getTablesByDbName no permission control", userName);
-            return dataSourceService.queryTableMeta(dbName, tableName, userName);
+            return dataSourceService.queryTableMeta(queryParam);
         }
     }
 }
diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/java/org/apache/linkis/metadata/service/impl/MdqServiceImpl.java b/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/java/org/apache/linkis/metadata/service/impl/MdqServiceImpl.java
index 9da20c3ca..0ea69bbe2 100644
--- a/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/java/org/apache/linkis/metadata/service/impl/MdqServiceImpl.java
+++ b/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/java/org/apache/linkis/metadata/service/impl/MdqServiceImpl.java
@@ -38,6 +38,7 @@ import org.apache.linkis.metadata.domain.mdq.vo.MdqTableStatisticInfoVO;
 import org.apache.linkis.metadata.hive.config.DSEnum;
 import org.apache.linkis.metadata.hive.config.DataSource;
 import org.apache.linkis.metadata.hive.dao.HiveMetaDao;
+import org.apache.linkis.metadata.hive.dto.MetadataQueryParam;
 import org.apache.linkis.metadata.service.HiveMetaWithPermissionService;
 import org.apache.linkis.metadata.service.MdqService;
 import org.apache.linkis.metadata.type.MdqImportType;
@@ -54,13 +55,19 @@ import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.stereotype.Service;
 import org.springframework.transaction.annotation.Transactional;
 
-import com.google.common.collect.Maps;
 import com.google.gson.Gson;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import java.io.IOException;
-import java.util.*;
+import java.util.ArrayList;
+import java.util.Comparator;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Objects;
+import java.util.Optional;
 import java.util.stream.Collectors;
 
 @Service
@@ -165,10 +172,9 @@ public class MdqServiceImpl implements MdqService {
     @DataSource(name = DSEnum.FIRST_DATA_SOURCE)
     @Override
     public MdqTableStatisticInfoVO getTableStatisticInfo(
-            String database, String tableName, String user, String partitionSort)
-            throws IOException {
+            MetadataQueryParam queryParam, String partitionSort) throws IOException {
         MdqTableStatisticInfoVO mdqTableStatisticInfoVO =
-                getTableStatisticInfoFromHive(database, tableName, user, partitionSort);
+                getTableStatisticInfoFromHive(queryParam, partitionSort);
         return mdqTableStatisticInfoVO;
     }
 
@@ -198,22 +204,20 @@ public class MdqServiceImpl implements MdqService {
 
     @DataSource(name = DSEnum.FIRST_DATA_SOURCE)
     @Override
-    public MdqTableBaseInfoVO getTableBaseInfoFromHive(
-            String database, String tableName, String user) {
-        Map<String, String> map = Maps.newHashMap();
-        map.put("dbName", database);
-        map.put("userName", user);
-        map.put("tableName", tableName);
+    public MdqTableBaseInfoVO getTableBaseInfoFromHive(MetadataQueryParam queryParam) {
         List<Map<String, Object>> tables =
-                hiveMetaWithPermissionService.getTablesByDbNameAndOptionalUserName(map);
-        List<Map<String, Object>> partitionKeys = hiveMetaDao.getPartitionKeys(map);
+                hiveMetaWithPermissionService.getTablesByDbNameAndOptionalUserName(queryParam);
+        List<Map<String, Object>> partitionKeys = hiveMetaDao.getPartitionKeys(queryParam);
         Optional<Map<String, Object>> tableOptional =
-                tables.parallelStream().filter(f -> tableName.equals(f.get("NAME"))).findFirst();
+                tables.parallelStream()
+                        .filter(f -> queryParam.getTableName().equals(f.get("NAME")))
+                        .findFirst();
         Map<String, Object> talbe =
                 tableOptional.orElseThrow(() -> new IllegalArgumentException("table不存在"));
         MdqTableBaseInfoVO mdqTableBaseInfoVO =
-                DomainCoversionUtils.mapToMdqTableBaseInfoVO(talbe, database);
-        String tableComment = hiveMetaDao.getTableComment(database, tableName);
+                DomainCoversionUtils.mapToMdqTableBaseInfoVO(talbe, queryParam.getDbName());
+        String tableComment =
+                hiveMetaDao.getTableComment(queryParam.getDbName(), queryParam.getTableName());
         mdqTableBaseInfoVO.getBase().setComment(tableComment);
         mdqTableBaseInfoVO.getBase().setPartitionTable(!partitionKeys.isEmpty());
         return mdqTableBaseInfoVO;
@@ -230,13 +234,9 @@ public class MdqServiceImpl implements MdqService {
 
     @DataSource(name = DSEnum.FIRST_DATA_SOURCE)
     @Override
-    public List<MdqTableFieldsInfoVO> getTableFieldsInfoFromHive(
-            String database, String tableName, String user) {
-        Map<String, String> param = Maps.newHashMap();
-        param.put("dbName", database);
-        param.put("tableName", tableName);
-        List<Map<String, Object>> columns = hiveMetaDao.getColumns(param);
-        List<Map<String, Object>> partitionKeys = hiveMetaDao.getPartitionKeys(param);
+    public List<MdqTableFieldsInfoVO> getTableFieldsInfoFromHive(MetadataQueryParam queryParam) {
+        List<Map<String, Object>> columns = hiveMetaDao.getColumns(queryParam);
+        List<Map<String, Object>> partitionKeys = hiveMetaDao.getPartitionKeys(queryParam);
         List<MdqTableFieldsInfoVO> normalColumns =
                 DomainCoversionUtils.normalColumnListToMdqTableFieldsInfoVOList(columns);
         List<MdqTableFieldsInfoVO> partitions =
@@ -248,19 +248,14 @@ public class MdqServiceImpl implements MdqService {
     @DataSource(name = DSEnum.FIRST_DATA_SOURCE)
     @Override
     public MdqTableStatisticInfoVO getTableStatisticInfoFromHive(
-            String database, String tableName, String user, String partitionSort)
-            throws IOException {
-        Map<String, String> map = Maps.newHashMap();
-        map.put("dbName", database);
-        map.put("tableName", tableName);
-        List<String> partitions = hiveMetaDao.getPartitions(map);
+            MetadataQueryParam queryParam, String partitionSort) throws IOException {
+        List<String> partitions = hiveMetaDao.getPartitions(queryParam);
         MdqTableStatisticInfoVO mdqTableStatisticInfoVO = new MdqTableStatisticInfoVO();
         mdqTableStatisticInfoVO.setRowNum(0); // 下个版本
         mdqTableStatisticInfoVO.setTableLastUpdateTime(null);
-        mdqTableStatisticInfoVO.setFieldsNum(
-                getTableFieldsInfoFromHive(database, tableName, user).size());
+        mdqTableStatisticInfoVO.setFieldsNum(getTableFieldsInfoFromHive(queryParam).size());
 
-        String tableLocation = getTableLocation(database, tableName);
+        String tableLocation = getTableLocation(queryParam);
         mdqTableStatisticInfoVO.setTableSize(getTableSize(tableLocation));
         mdqTableStatisticInfoVO.setFileNum(getTableFileNum(tableLocation));
         if (partitions.isEmpty()) {
@@ -278,9 +273,8 @@ public class MdqServiceImpl implements MdqService {
     @DataSource(name = DSEnum.FIRST_DATA_SOURCE)
     @Override
     public MdqTablePartitionStatisticInfoVO getPartitionStatisticInfo(
-            String database, String tableName, String userName, String partitionPath)
-            throws IOException {
-        String tableLocation = getTableLocation(database, tableName);
+            MetadataQueryParam queryParam, String partitionPath) throws IOException {
+        String tableLocation = getTableLocation(queryParam);
         logger.info("start to get partitionStatisticInfo,path:{}", tableLocation + partitionPath);
         return create(tableLocation + partitionPath);
     }
@@ -307,14 +301,14 @@ public class MdqServiceImpl implements MdqService {
                             getMdqTablePartitionStatisticInfoVO(
                                     subPartitions, subPartitionPath, partitionSort);
                     // 排序
-                    if ("asc".equals(partitionSort))
+                    if ("asc".equals(partitionSort)) {
                         childrens =
                                 childrens.stream()
                                         .sorted(
                                                 Comparator.comparing(
                                                         MdqTablePartitionStatisticInfoVO::getName))
                                         .collect(Collectors.toList());
-                    else
+                    } else {
                         childrens =
                                 childrens.stream()
                                         .sorted(
@@ -323,6 +317,7 @@ public class MdqServiceImpl implements MdqService {
                                                                         ::getName)
                                                         .reversed())
                                         .collect(Collectors.toList());
+                    }
                     mdqTablePartitionStatisticInfoVO.setChildrens(childrens);
                     statisticInfoVOS.add(mdqTablePartitionStatisticInfoVO);
                 });
@@ -336,7 +331,9 @@ public class MdqServiceImpl implements MdqService {
      * @return
      */
     private Tunple<String, String> splitStrByFirstSlanting(String str) {
-        if (StringUtils.isBlank(str)) return null;
+        if (StringUtils.isBlank(str)) {
+            return null;
+        }
         int index = str.indexOf("/");
         if (index == -1) {
             return new Tunple<>(str, null);
@@ -374,11 +371,8 @@ public class MdqServiceImpl implements MdqService {
     }
 
     @DataSource(name = DSEnum.FIRST_DATA_SOURCE)
-    public String getTableLocation(String database, String tableName) {
-        Map<String, String> param = Maps.newHashMap();
-        param.put("dbName", database);
-        param.put("tableName", tableName);
-        String tableLocation = hiveMetaDao.getLocationByDbAndTable(param);
+    public String getTableLocation(MetadataQueryParam queryParam) {
+        String tableLocation = hiveMetaDao.getLocationByDbAndTable(queryParam);
         logger.info("tableLocation:" + tableLocation);
         return tableLocation;
     }
diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/java/org/apache/linkis/metadata/util/DWSConfig.java b/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/java/org/apache/linkis/metadata/util/DWSConfig.java
index 447592400..cef527271 100644
--- a/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/java/org/apache/linkis/metadata/util/DWSConfig.java
+++ b/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/java/org/apache/linkis/metadata/util/DWSConfig.java
@@ -36,7 +36,7 @@ public class DWSConfig {
 
     // wds.linkis.metadata.hive.encode.enable配置HIVE BASE64加解密
     public static final CommonVars<Boolean> HIVE_PASS_ENCODE_ENABLED =
-            CommonVars.apply("wds.linkis.metadata.hive.encode.enabled", new Boolean(false));
+            CommonVars.apply("wds.linkis.metadata.hive.encode.enabled", false);
 
     public static CommonVars<Boolean> HIVE_PERMISSION_WITH_lOGIN_USER_ENABLED =
             CommonVars$.MODULE$.apply(
diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/scala/org/apache/linkis/metadata/utils/MdqConstants.scala b/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/scala/org/apache/linkis/metadata/utils/MdqConstants.scala
index fd47854f0..8389a4676 100644
--- a/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/scala/org/apache/linkis/metadata/utils/MdqConstants.scala
+++ b/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/scala/org/apache/linkis/metadata/utils/MdqConstants.scala
@@ -20,12 +20,6 @@ package org.apache.linkis.metadata.utils
 
 object MdqConstants {
 
-  val DB_NAME_KEY = "dbName"
-
-  val TABLE_NAME_KEY = "tableName"
-
-  val USERNAME_KEY = "userName"
-
   val SDID_KEY = "SD_ID"
 
 }
diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/scala/org/apache/linkis/metadata/utils/MdqUtils.scala b/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/scala/org/apache/linkis/metadata/utils/MdqUtils.scala
index b58d98c97..63cdeb870 100644
--- a/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/scala/org/apache/linkis/metadata/utils/MdqUtils.scala
+++ b/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/scala/org/apache/linkis/metadata/utils/MdqUtils.scala
@@ -26,9 +26,9 @@ import com.google.gson._
 object MdqUtils {
 
 
-  val gson:Gson= {
+  val gson: Gson = {
     val gsonBuilder = new GsonBuilder()
-    gsonBuilder.registerTypeAdapter(classOf[Date], new JsonDeserializer[Date](){
+    gsonBuilder.registerTypeAdapter(classOf[Date], new JsonDeserializer[Date]() {
       override def deserialize(json: JsonElement, _type: Type, context: JsonDeserializationContext): Date = {
         new Date(json.getAsJsonPrimitive.getAsLong)
       }
@@ -38,11 +38,8 @@ object MdqUtils {
 
 
 
-  def ruleString(code:String):String = {
+  def ruleString(code: String): String = {
     if (code.length <= 1000) code else code.substring(0, 1000)
   }
 
-
-
-
 }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@linkis.apache.org
For additional commands, e-mail: commits-help@linkis.apache.org