You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@linkis.apache.org by ca...@apache.org on 2023/02/16 04:56:16 UTC

[linkis] branch dev-1.3.2 updated: [Feature][linkis-engineplugin-spark] spark etl linkis datasource support (#4048)

This is an automated email from the ASF dual-hosted git repository.

casion pushed a commit to branch dev-1.3.2
in repository https://gitbox.apache.org/repos/asf/linkis.git


The following commit(s) were added to refs/heads/dev-1.3.2 by this push:
     new 3ffce6db8 [Feature][linkis-engineplugin-spark] spark etl linkis datasource support (#4048)
3ffce6db8 is described below

commit 3ffce6db843221c0a659a64337a42e8791a39231
Author: rarexixi <ra...@gmail.com>
AuthorDate: Thu Feb 16 12:56:07 2023 +0800

    [Feature][linkis-engineplugin-spark] spark etl linkis datasource support (#4048)
---
 docs/errorcode/spark-errorcode.md                  |  13 +-
 linkis-dist/package/db/linkis_dml.sql              | 207 +++++++++-------
 .../db/upgrade/1.3.2_schema/mysql/linkis_dml.sql   |  40 ++++
 ...tion.java => DataSourceNotConfigException.java} |   4 +-
 ...ption.java => DatabaseNotSupportException.java} |   4 +-
 .../spark/datacalc/model/DataCalcDataSource.java   |  36 +--
 .../spark/datacalc/model/SinkConfig.java           |  12 +
 .../spark/datacalc/model/SourceConfig.java         |  12 +
 .../datacalc/service/LinkisDataSourceContext.java  | 102 ++++++++
 .../datacalc/service/LinkisDataSourceService.java  |  24 +-
 .../strategy/BaseMySqlStrategy.java}               |  20 +-
 .../strategy/BasePostgreSqlStrategy.java}          |  19 +-
 .../strategy/ClickHouseStrategy.java}              |  20 +-
 .../strategy/DB2Strategy.java}                     |  29 ++-
 .../service/strategy/DataSourceStrategy.java       |  57 +++++
 .../strategy/DorisStrategy.java}                   |  15 +-
 .../strategy/MySqlStrategy.java}                   |  17 +-
 .../strategy/NormalStrategy.java}                  |  47 ++--
 .../datacalc/service/strategy/OracleStrategy.java  |  54 +++++
 .../strategy/PostgreSqlStrategy.java}              |  17 +-
 .../service/strategy/SqlServerStrategy.java        |  57 +++++
 .../strategy/TiDBStrategy.java}                    |  15 +-
 .../spark/datacalc/sink/FileSinkConfig.java        |  17 +-
 .../spark/datacalc/sink/HiveSinkConfig.java        |  13 -
 .../spark/datacalc/sink/JdbcSinkConfig.java        |  11 -
 .../spark/datacalc/sink/ManagedJdbcSinkConfig.java |  11 -
 .../spark/datacalc/source/FileSourceConfig.java    |  17 +-
 .../spark/datacalc/source/JdbcSourceConfig.java    |  12 -
 .../datacalc/source/ManagedJdbcSourceConfig.java   |  12 -
 .../spark/datacalc/util/PluginUtil.java            |  21 +-
 .../spark/errorcode/SparkErrorCodeSummary.java     |   4 +-
 .../spark/datacalc/sink/FileSink.scala             |   9 +-
 .../spark/datacalc/sink/HiveSink.scala             |  17 +-
 .../spark/datacalc/sink/JdbcSink.scala             |  19 +-
 .../spark/datacalc/sink/ManagedJdbcSink.scala      |  11 +-
 .../spark/datacalc/source/FileSource.scala         |   9 +-
 .../spark/datacalc/source/JdbcSource.scala         |   9 +-
 .../spark/datacalc/source/ManagedJdbcSource.scala  |  11 +-
 .../spark/datacalc/transform/SqlTransform.scala    |   9 +-
 .../spark/datacalc/TestDataCalcDataSource.scala    | 262 +++++++++++++++++++++
 .../GetInfoPublishedByDataSourceNameAction.scala   |   8 +-
 41 files changed, 912 insertions(+), 391 deletions(-)

diff --git a/docs/errorcode/spark-errorcode.md b/docs/errorcode/spark-errorcode.md
index a57374cdd..2dee6b484 100644
--- a/docs/errorcode/spark-errorcode.md
+++ b/docs/errorcode/spark-errorcode.md
@@ -15,5 +15,14 @@
 |spark |40010|The request to the MDQ service to parse into executable SQL failed(向MDQ服务请求解析为可以执行的sql时失败)|REQUEST_MDQ_FAILED|SparkErrorCodeSummary|
 |spark |420001|Invalid EngineConn engine session obj, failed to create sparkSql executor(EngineConn 引擎会话 obj 无效,无法创建 sparkSql 执行程序)|INVALID_CREATE_SPARKSQL|SparkErrorCodeSummary|
 |spark |420002|Invalid EngineConn engine session obj, failed to create sparkPython executor(EngineConn 引擎会话 obj 无效,无法创建 sparkPython 执行程序)|INVALID_CREATE_SPARKPYTHON|SparkErrorCodeSummary|
-
-
+|spark |43001|Config data validate failed (data_calc JSON验证失败)|DATA_CALC_CONFIG_VALID_FAILED|SparkErrorCodeSummary|
+|spark |43002|xxx is not a valid type (data_calc 配置类型xxx不支持)|DATA_CALC_CONFIG_TYPE_NOT_VALID|SparkErrorCodeSummary|
+|spark |43011|DataSource xxx is not configured! (data_calc 数据源xxx未配置)|DATA_CALC_DATASOURCE_NOT_CONFIG|SparkErrorCodeSummary|
+|spark |43012|DataSource type is not supported (data_calc 数据源类型不支持)|DATA_CALC_DATABASE_NOT_SUPPORT|SparkErrorCodeSummary|
+|spark |43021|The columns' name or data type in the select statement does not match target table column (查询语句中的字段和目标表字段不匹配)|DATA_CALC_COLUMN_NOT_MATCH|SparkErrorCodeSummary|
+|spark |43022|The data to be inserted need to have the same number of columns as the target table (插入表的字段数量需要和select语句中的字段数量相等)|DATA_CALC_COLUMN_NUM_NOT_MATCH|SparkErrorCodeSummary|
+|spark |43023|Target table's columns(xxx) are not exist in source columns (目标表中的字段xxx在select语句中不存在)|DATA_CALC_FIELD_NOT_EXIST|SparkErrorCodeSummary|
+|spark |43024|Please set xxx in variables (data_calc需要配置变量xxx)|DATA_CALC_VARIABLE_NOT_EXIST|SparkErrorCodeSummary|
+|spark |43031|Not support Adapter for spark application. (不支持 Spark 应用的 ClusterDescriptorAdapter)|NOT_SUPPORT_ADAPTER|SparkErrorCodeSummary|
+|spark |43032|The application start failed, since yarn applicationId is null. (提交到Yarn上的程序提交/启动失败)|YARN_APPLICATION_START_FAILED|SparkErrorCodeSummary|
+|spark |43040|Not support method for requestExpectedResource. (不支持 requestExpectedResource 的方法)|NOT_SUPPORT_METHOD|SparkErrorCodeSummary|
diff --git a/linkis-dist/package/db/linkis_dml.sql b/linkis-dist/package/db/linkis_dml.sql
index be48b00f3..a13eab63c 100644
--- a/linkis-dist/package/db/linkis_dml.sql
+++ b/linkis-dist/package/db/linkis_dml.sql
@@ -554,105 +554,148 @@ INSERT INTO `linkis_ps_dm_datasource_type` (`name`, `description`, `option`, `cl
 
 select @data_source_type_id := id from `linkis_ps_dm_datasource_type` where `name` = 'hive';
 SET @data_source=CONCAT('/data-source-manager/env-list/all/type/',@data_source_type_id);
-INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `name_en`, `default_value`, `value_type`, `scope`, `require`, `description`, `description_en`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`) VALUES (@data_source_type_id, 'envId', '集群环境(Cluster env)', 'Cluster env', NULL, 'SELECT', NULL, 1, '集群环境(Cluster env)', 'Cluster env', NULL, NULL, NULL, @data_source, now(), now());
+INSERT INTO `linkis_ps_dm_datasource_type_key`
+    (`data_source_type_id`, `key`, `name`, `name_en`, `default_value`, `value_type`, `scope`, `require`, `description`, `description_en`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`)
+VALUES (@data_source_type_id, 'envId', '集群环境(Cluster env)', 'Cluster env', NULL, 'SELECT', NULL, 1, '集群环境(Cluster env)', 'Cluster env', NULL, NULL, NULL, @data_source, now(), now());
 
 
 select @data_source_type_id := id from `linkis_ps_dm_datasource_type` where `name` = 'elasticsearch';
-INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `name_en`, `default_value`, `value_type`, `scope`, `require`, `description`, `description_en`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`) VALUES (@data_source_type_id, 'username', '用户名(Username)' , 'Username', NULL, 'TEXT', NULL, 1, '用户名(Username)', 'Username', '^[0-9A-Za-z_-]+$', NULL, '', NULL, now(), now());
-INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `name_en`, `default_value`, `value_type`, `scope`, `require`, `description`, `description_en`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`) VALUES (@data_source_type_id, 'password', '密码(Password)', 'Password', NULL, 'PASSWORD', NULL, 1, '密码(Password)', 'Password', '', NULL, '', NULL, now(), now());
-INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `name_en`, `default_value`, `value_type`, `scope`, `require`, `description`, `description_en`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`) VALUES (@data_source_type_id, 'elasticUrls', 'ES连接URL(Elastic Url)', 'Elastic Url', NULL, 'TEXT', NULL, 1, 'ES连接URL(Elastic Url)', 'Elastic Url', '', NULL, '', NULL, now(), now());
-
+INSERT INTO `linkis_ps_dm_datasource_type_key`
+    (`data_source_type_id`, `key`, `name`, `name_en`, `default_value`, `value_type`, `scope`, `require`, `description`, `description_en`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`)
+VALUES (@data_source_type_id, 'username', '用户名(Username)' , 'Username', NULL, 'TEXT', NULL, 1, '用户名(Username)', 'Username', '^[0-9A-Za-z_-]+$', NULL, '', NULL, now(), now()),
+       (@data_source_type_id, 'password', '密码(Password)', 'Password', NULL, 'PASSWORD', NULL, 1, '密码(Password)', 'Password', '', NULL, '', NULL, now(), now()),
+       (@data_source_type_id, 'elasticUrls', 'ES连接URL(Elastic Url)', 'Elastic Url', NULL, 'TEXT', NULL, 1, 'ES连接URL(Elastic Url)', 'Elastic Url', '', NULL, '', NULL, now(), now());
 
+-- https://dev.mysql.com/doc/connector-j/8.0/en/connector-j-reference-jdbc-url-format.html
 select @data_source_type_id := id from `linkis_ps_dm_datasource_type` where `name` = 'mysql';
-INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `name_en`, `default_value`, `value_type`, `scope`, `require`, `description`, `description_en`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`) VALUES (@data_source_type_id, 'host', '主机名(Host)', 'Host', NULL, 'TEXT', NULL, 0, '主机名(Host)', 'Host', NULL, NULL, NULL, NULL,  now(), now());
-INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `name_en`, `default_value`, `value_type`, `scope`, `require`, `description`, `description_en`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`) VALUES (@data_source_type_id, 'port', '端口号(Port)','Port', NULL, 'TEXT', NULL, 0, '端口号(Port)','Port', NULL, NULL, NULL, NULL,  now(), now());
-INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `name_en`, `default_value`, `value_type`, `scope`, `require`, `description`, `description_en`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`) VALUES (@data_source_type_id, 'driverClassName', '驱动类名(Driver class name)', 'Driver class name', 'com.mysql.jdbc.Driver', 'TEXT', NULL, 0, '驱动类名(Driver class name)', 'Driver class name', NULL, NULL, NULL, NULL,  now(), now());
-INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `name_en`, `default_value`, `value_type`, `scope`, `require`, `description`, `description_en`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`) VALUES (@data_source_type_id, 'params', '连接参数(Connection params)', 'Connection params', NULL, 'TEXT', NULL, 0, '输入JSON格式(Input JSON format): {"param":"value"}', 'Input JSON format: {"param":"value"}', NULL, NULL, NULL, NULL,  no [...]
-INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `name_en`, `default_value`, `value_type`, `scope`, `require`, `description`, `description_en`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`) VALUES (@data_source_type_id, 'username', '用户名(Username)', 'Username', NULL, 'TEXT', NULL, 0, '用户名(Username)', 'Username', '^[0-9A-Za-z_-]+$', NULL, NULL, NULL,  now(), now());
-INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `name_en`, `default_value`, `value_type`, `scope`, `require`, `description`, `description_en`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`) VALUES (@data_source_type_id, 'password', '密码(Password)', 'Password', NULL, 'PASSWORD', NULL, 0, '密码(Password)', 'Password', '', NULL, NULL, NULL,  now(), now());
-INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `name_en`, `default_value`, `value_type`, `scope`, `require`, `description`, `description_en`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`) VALUES (@data_source_type_id, 'databaseName', '数据库名(Database name)', 'Database name', NULL, 'TEXT', NULL, 0, '数据库名(Database name)', 'Database name', NULL, NULL, NULL, NULL,  now(), now());
-
-
+INSERT INTO `linkis_ps_dm_datasource_type_key`
+    (`data_source_type_id`, `key`, `name`, `name_en`, `default_value`, `value_type`, `scope`, `require`, `description`, `description_en`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`)
+VALUES (@data_source_type_id, 'address', '地址', 'Address', NULL, 'TEXT', NULL, 0, '地址(host1:port1,host2:port2...)', 'Address(host1:port1,host2:port2...)', NULL, NULL, NULL, NULL,  now(), now()),
+       (@data_source_type_id, 'host', '主机名(Host)', 'Host', NULL, 'TEXT', NULL, 0, '主机名(Host)', 'Host', NULL, NULL, NULL, NULL,  now(), now()),
+       (@data_source_type_id, 'port', '端口号(Port)','Port', NULL, 'TEXT', NULL, 0, '端口号(Port)','Port', NULL, NULL, NULL, NULL,  now(), now()),
+       (@data_source_type_id, 'driverClassName', '驱动类名(Driver class name)', 'Driver class name', 'com.mysql.jdbc.Driver', 'TEXT', NULL, 0, '驱动类名(Driver class name)', 'Driver class name', NULL, NULL, NULL, NULL,  now(), now()),
+       (@data_source_type_id, 'params', '连接参数(Connection params)', 'Connection params', NULL, 'TEXT', NULL, 0, '输入JSON格式(Input JSON format): {"param":"value"}', 'Input JSON format: {"param":"value"}', NULL, NULL, NULL, NULL,  now(), now()),
+       (@data_source_type_id, 'username', '用户名(Username)', 'Username', NULL, 'TEXT', NULL, 0, '用户名(Username)', 'Username', '^[0-9A-Za-z_-]+$', NULL, NULL, NULL,  now(), now()),
+       (@data_source_type_id, 'password', '密码(Password)', 'Password', NULL, 'PASSWORD', NULL, 0, '密码(Password)', 'Password', '', NULL, NULL, NULL,  now(), now()),
+       (@data_source_type_id, 'databaseName', '数据库名(Database name)', 'Database name', NULL, 'TEXT', NULL, 0, '数据库名(Database name)', 'Database name', NULL, NULL, NULL, NULL,  now(), now());
+
+-- https://docs.oracle.com/en/database/oracle/oracle-database/21/jajdb/oracle/jdbc/OracleDriver.html
 select @data_source_type_id := id from `linkis_ps_dm_datasource_type` where `name` = 'oracle';
-INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `name_en`, `default_value`, `value_type`, `scope`, `require`, `description`, `description_en`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`) VALUES (@data_source_type_id, 'host', '主机名(Host)', 'Host', NULL, 'TEXT', NULL, 1, '主机名(Host)', 'Host', NULL, NULL, NULL, NULL,  now(), now());
-INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `name_en`, `default_value`, `value_type`, `scope`, `require`, `description`, `description_en`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`) VALUES (@data_source_type_id, 'port', '端口号(Port)', 'Port', NULL, 'TEXT', NULL, 1, '端口号(Port)', 'Port', NULL, NULL, NULL, NULL,  now(), now());
-INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `name_en`, `default_value`, `value_type`, `scope`, `require`, `description`, `description_en`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`) VALUES (@data_source_type_id, 'driverClassName', '驱动类名(Driver class name)', 'Driver class name', 'oracle.jdbc.driver.OracleDriver', 'TEXT', NULL, 1, '驱动类名(Driver class name)', 'Driver class name', NULL, NULL, NULL, NULL,  now(), [...]
-INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `name_en`, `default_value`, `value_type`, `scope`, `require`, `description`, `description_en`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`) VALUES (@data_source_type_id, 'params', '连接参数(Connection params)', 'Connection params', NULL, 'TEXT', NULL, 0, '输入JSON格式(Input JSON format): {"param":"value"}', 'Input JSON format: {"param":"value"}', NULL, NULL, NULL, NULL,  no [...]
-INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `name_en`, `default_value`, `value_type`, `scope`, `require`, `description`, `description_en`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`) VALUES (@data_source_type_id, 'username', '用户名(Username)', 'Username', NULL, 'TEXT', NULL, 1, '用户名(Username)', 'Username', '^[0-9A-Za-z_-]+$', NULL, NULL, NULL,  now(), now());
-INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `name_en`, `default_value`, `value_type`, `scope`, `require`, `description`, `description_en`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`) VALUES (@data_source_type_id, 'password', '密码(Password)', 'Password', NULL, 'PASSWORD', NULL, 1, '密码(Password)', 'Password', '', NULL, NULL, NULL,  now(), now());
-INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `name_en`, `default_value`, `value_type`, `scope`, `require`, `description`, `description_en`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`) VALUES (@data_source_type_id, 'instance', '实例名(instance)', 'Instance', NULL, 'TEXT', NULL, 0, '实例名(instance)', 'Instance', NULL, NULL, NULL, NULL,  now(), now());
+INSERT INTO `linkis_ps_dm_datasource_type_key`
+    (`data_source_type_id`, `key`, `name`, `name_en`, `default_value`, `value_type`, `scope`, `require`, `description`, `description_en`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`)
+VALUES (@data_source_type_id, 'address', '地址', 'Address', NULL, 'TEXT', NULL, 0, '地址(host1:port1,host2:port2...)', 'Address(host1:port1,host2:port2...)', NULL, NULL, NULL, NULL,  now(), now()),
+       (@data_source_type_id, 'host', '主机名(Host)', 'Host', NULL, 'TEXT', NULL, 1, '主机名(Host)', 'Host', NULL, NULL, NULL, NULL,  now(), now()),
+       (@data_source_type_id, 'port', '端口号(Port)', 'Port', NULL, 'TEXT', NULL, 1, '端口号(Port)', 'Port', NULL, NULL, NULL, NULL,  now(), now()),
+       (@data_source_type_id, 'driverClassName', '驱动类名(Driver class name)', 'Driver class name', 'oracle.jdbc.driver.OracleDriver', 'TEXT', NULL, 1, '驱动类名(Driver class name)', 'Driver class name', NULL, NULL, NULL, NULL,  now(), now()),
+       (@data_source_type_id, 'params', '连接参数(Connection params)', 'Connection params', NULL, 'TEXT', NULL, 0, '输入JSON格式(Input JSON format): {"param":"value"}', 'Input JSON format: {"param":"value"}', NULL, NULL, NULL, NULL,  now(), now()),
+       (@data_source_type_id, 'username', '用户名(Username)', 'Username', NULL, 'TEXT', NULL, 1, '用户名(Username)', 'Username', '^[0-9A-Za-z_-]+$', NULL, NULL, NULL,  now(), now()),
+       (@data_source_type_id, 'password', '密码(Password)', 'Password', NULL, 'PASSWORD', NULL, 1, '密码(Password)', 'Password', '', NULL, NULL, NULL,  now(), now()),
+       (@data_source_type_id, 'sid', 'SID', 'SID', NULL, 'TEXT', NULL, 0, 'SID', 'SID', NULL, NULL, NULL, NULL,  now(), now()),
+       (@data_source_type_id, 'serviceName', 'service_name', 'service_name', NULL, 'TEXT', NULL, 0, 'service_name', 'service_name', NULL, NULL, NULL, NULL,  now(), now()),
+       (@data_source_type_id, 'server', 'server', 'server', NULL, 'TEXT', NULL, 0, 'server', 'server', NULL, NULL, NULL, NULL,  now(), now()),
+       (@data_source_type_id, 'instance', '实例名(instance)', 'Instance', NULL, 'TEXT', NULL, 0, '实例名(instance)', 'Instance', NULL, NULL, NULL, NULL,  now(), now());
 
 select @data_source_type_id := id from `linkis_ps_dm_datasource_type` where `name` = 'dm';
-INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `name_en`, `default_value`, `value_type`, `scope`, `require`, `description`, `description_en`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`) VALUES (@data_source_type_id, 'host', '主机名(Host)', 'Host', NULL, 'TEXT', NULL, 1, '主机名(Host)', 'Host', NULL, NULL, NULL, NULL,  now(), now());
-INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `name_en`, `default_value`, `value_type`, `scope`, `require`, `description`, `description_en`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`) VALUES (@data_source_type_id, 'port', '端口号(Port)', 'Port', NULL, 'TEXT', NULL, 1, '端口号(Port)', 'Port', NULL, NULL, NULL, NULL,  now(), now());
-INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `name_en`, `default_value`, `value_type`, `scope`, `require`, `description`, `description_en`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`) VALUES (@data_source_type_id, 'driverClassName', '驱动类名(Driver class name)', 'Driver class name', 'dm.jdbc.driver.DmDriver', 'TEXT', NULL, 1, '驱动类名(Driver class name)', 'Driver class name', NULL, NULL, NULL, NULL,  now(), now());
-INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `name_en`, `default_value`, `value_type`, `scope`, `require`, `description`, `description_en`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`) VALUES (@data_source_type_id, 'params', '连接参数(Connection params)', 'Connection params', NULL, 'TEXT', NULL, 0, '输入JSON格式(Input JSON format): {"param":"value"}', 'Input JSON format: {"param":"value"}', NULL, NULL, NULL, NULL,  no [...]
-INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `name_en`, `default_value`, `value_type`, `scope`, `require`, `description`, `description_en`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`) VALUES (@data_source_type_id, 'username', '用户名(Username)', 'Username', NULL, 'TEXT', NULL, 1, '用户名(Username)', 'Username', '^[0-9A-Za-z_-]+$', NULL, NULL, NULL,  now(), now());
-INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `name_en`, `default_value`, `value_type`, `scope`, `require`, `description`, `description_en`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`) VALUES (@data_source_type_id, 'password', '密码(Password)', 'Password', NULL, 'PASSWORD', NULL, 1, '密码(Password)', 'Password', '', NULL, NULL, NULL,  now(), now());
-INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `name_en`, `default_value`, `value_type`, `scope`, `require`, `description`, `description_en`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`) VALUES (@data_source_type_id, 'instance', '实例名(instance)', 'Instance', NULL, 'TEXT', NULL, 1, '实例名(instance)', 'Instance', NULL, NULL, NULL, NULL,  now(), now());
+INSERT INTO `linkis_ps_dm_datasource_type_key`
+    (`data_source_type_id`, `key`, `name`, `name_en`, `default_value`, `value_type`, `scope`, `require`, `description`, `description_en`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`)
+VALUES (@data_source_type_id, 'address', '地址', 'Address', NULL, 'TEXT', NULL, 0, '地址(host1:port1,host2:port2...)', 'Address(host1:port1,host2:port2...)', NULL, NULL, NULL, NULL,  now(), now()),
+       (@data_source_type_id, 'host', '主机名(Host)', 'Host', NULL, 'TEXT', NULL, 1, '主机名(Host)', 'Host', NULL, NULL, NULL, NULL,  now(), now()),
+       (@data_source_type_id, 'port', '端口号(Port)', 'Port', NULL, 'TEXT', NULL, 1, '端口号(Port)', 'Port', NULL, NULL, NULL, NULL,  now(), now()),
+       (@data_source_type_id, 'driverClassName', '驱动类名(Driver class name)', 'Driver class name', 'dm.jdbc.driver.DmDriver', 'TEXT', NULL, 1, '驱动类名(Driver class name)', 'Driver class name', NULL, NULL, NULL, NULL,  now(), now()),
+       (@data_source_type_id, 'params', '连接参数(Connection params)', 'Connection params', NULL, 'TEXT', NULL, 0, '输入JSON格式(Input JSON format): {"param":"value"}', 'Input JSON format: {"param":"value"}', NULL, NULL, NULL, NULL,  now(), now()),
+       (@data_source_type_id, 'username', '用户名(Username)', 'Username', NULL, 'TEXT', NULL, 1, '用户名(Username)', 'Username', '^[0-9A-Za-z_-]+$', NULL, NULL, NULL,  now(), now()),
+       (@data_source_type_id, 'password', '密码(Password)', 'Password', NULL, 'PASSWORD', NULL, 1, '密码(Password)', 'Password', '', NULL, NULL, NULL,  now(), now()),
+       (@data_source_type_id, 'instance', '实例名(instance)', 'Instance', NULL, 'TEXT', NULL, 1, '实例名(instance)', 'Instance', NULL, NULL, NULL, NULL,  now(), now());
 
 select @data_source_type_id := id from `linkis_ps_dm_datasource_type` where `name` = 'kingbase';
-INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `name_en`, `default_value`, `value_type`, `scope`, `require`, `description`, `description_en`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`) VALUES (@data_source_type_id, 'host', '主机名(Host)', 'Host', NULL, 'TEXT', NULL, 1, '主机名(Host)', 'Host', NULL, NULL, NULL, NULL,  now(), now());
-INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `name_en`, `default_value`, `value_type`, `scope`, `require`, `description`, `description_en`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`) VALUES (@data_source_type_id, 'port', '端口号(Port)', 'Port', NULL, 'TEXT', NULL, 1, '端口号(Port)', 'Port', NULL, NULL, NULL, NULL,  now(), now());
-INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `name_en`, `default_value`, `value_type`, `scope`, `require`, `description`, `description_en`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`) VALUES (@data_source_type_id, 'driverClassName', '驱动类名(Driver class name)', 'Driver class name', 'com.kingbase8.Driver', 'TEXT', NULL, 1, '驱动类名(Driver class name)', 'Driver class name', NULL, NULL, NULL, NULL,  now(), now());
-INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `name_en`, `default_value`, `value_type`, `scope`, `require`, `description`, `description_en`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`) VALUES (@data_source_type_id, 'params', '连接参数(Connection params)', 'Connection params', NULL, 'TEXT', NULL, 0, '输入JSON格式(Input JSON format): {"param":"value"}', 'Input JSON format: {"param":"value"}', NULL, NULL, NULL, NULL,  no [...]
-INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `name_en`, `default_value`, `value_type`, `scope`, `require`, `description`, `description_en`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`) VALUES (@data_source_type_id, 'username', '用户名(Username)', 'Username', NULL, 'TEXT', NULL, 1, '用户名(Username)', 'Username', '^[0-9A-Za-z_-]+$', NULL, NULL, NULL,  now(), now());
-INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `name_en`, `default_value`, `value_type`, `scope`, `require`, `description`, `description_en`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`) VALUES (@data_source_type_id, 'password', '密码(Password)', 'Password', NULL, 'PASSWORD', NULL, 1, '密码(Password)', 'Password', '', NULL, NULL, NULL,  now(), now());
-INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `name_en`, `default_value`, `value_type`, `scope`, `require`, `description`, `description_en`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`) VALUES (@data_source_type_id, 'instance', '实例名(instance)', 'Instance', NULL, 'TEXT', NULL, 1, '实例名(instance)', 'instance', NULL, NULL, NULL, NULL,  now(), now());
-
+INSERT INTO `linkis_ps_dm_datasource_type_key`
+    (`data_source_type_id`, `key`, `name`, `name_en`, `default_value`, `value_type`, `scope`, `require`, `description`, `description_en`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`)
+VALUES (@data_source_type_id, 'address', '地址', 'Address', NULL, 'TEXT', NULL, 0, '地址(host1:port1,host2:port2...)', 'Address(host1:port1,host2:port2...)', NULL, NULL, NULL, NULL,  now(), now()),
+       (@data_source_type_id, 'host', '主机名(Host)', 'Host', NULL, 'TEXT', NULL, 1, '主机名(Host)', 'Host', NULL, NULL, NULL, NULL,  now(), now()),
+       (@data_source_type_id, 'port', '端口号(Port)', 'Port', NULL, 'TEXT', NULL, 1, '端口号(Port)', 'Port', NULL, NULL, NULL, NULL,  now(), now()),
+       (@data_source_type_id, 'driverClassName', '驱动类名(Driver class name)', 'Driver class name', 'com.kingbase8.Driver', 'TEXT', NULL, 1, '驱动类名(Driver class name)', 'Driver class name', NULL, NULL, NULL, NULL,  now(), now()),
+       (@data_source_type_id, 'params', '连接参数(Connection params)', 'Connection params', NULL, 'TEXT', NULL, 0, '输入JSON格式(Input JSON format): {"param":"value"}', 'Input JSON format: {"param":"value"}', NULL, NULL, NULL, NULL,  now(), now()),
+       (@data_source_type_id, 'username', '用户名(Username)', 'Username', NULL, 'TEXT', NULL, 1, '用户名(Username)', 'Username', '^[0-9A-Za-z_-]+$', NULL, NULL, NULL,  now(), now()),
+       (@data_source_type_id, 'password', '密码(Password)', 'Password', NULL, 'PASSWORD', NULL, 1, '密码(Password)', 'Password', '', NULL, NULL, NULL,  now(), now()),
+       (@data_source_type_id, 'instance', '实例名(instance)', 'Instance', NULL, 'TEXT', NULL, 1, '实例名(instance)', 'instance', NULL, NULL, NULL, NULL,  now(), now());
+
+-- https://jdbc.postgresql.org/documentation/use/
 select @data_source_type_id := id from `linkis_ps_dm_datasource_type` where `name` = 'postgresql';
-INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `name_en`, `default_value`, `value_type`, `scope`, `require`, `description`, `description_en`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`) VALUES (@data_source_type_id, 'host', '主机名(Host)', 'Host', NULL, 'TEXT', NULL, 1, '主机名(Host)', 'Host', NULL, NULL, NULL, NULL,  now(), now());
-INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `name_en`, `default_value`, `value_type`, `scope`, `require`, `description`, `description_en`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`) VALUES (@data_source_type_id, 'port', '端口号(Port)', 'Port', NULL, 'TEXT', NULL, 1, '端口号(Port)', 'Port', NULL, NULL, NULL, NULL,  now(), now());
-INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `name_en`, `default_value`, `value_type`, `scope`, `require`, `description`, `description_en`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`) VALUES (@data_source_type_id, 'driverClassName', '驱动类名(Driver class name)', 'Driver class name', 'org.postgresql.Driver', 'TEXT', NULL, 1, '驱动类名(Driver class name)', 'Driver class name', NULL, NULL, NULL, NULL,  now(), now());
-INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `name_en`, `default_value`, `value_type`, `scope`, `require`, `description`, `description_en`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`) VALUES (@data_source_type_id, 'params', '连接参数(Connection params)', 'Connection params', NULL, 'TEXT', NULL, 0, '输入JSON格式(Input JSON format): {"param":"value"}', 'Input JSON format: {"param":"value"}', NULL, NULL, NULL, NULL,  no [...]
-INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `name_en`, `default_value`, `value_type`, `scope`, `require`, `description`, `description_en`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`) VALUES (@data_source_type_id, 'username', '用户名(Username)', 'Username', NULL, 'TEXT', NULL, 1, '用户名(Username)', 'Username', '^[0-9A-Za-z_-]+$', NULL, NULL, NULL,  now(), now());
-INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `name_en`, `default_value`, `value_type`, `scope`, `require`, `description`, `description_en`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`) VALUES (@data_source_type_id, 'password', '密码(Password)', 'Password', NULL, 'PASSWORD', NULL, 1, '密码(Password)', 'Password', '', NULL, NULL, NULL,  now(), now());
-INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `name_en`, `default_value`, `value_type`, `scope`, `require`, `description`, `description_en`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`) VALUES (@data_source_type_id, 'instance', '实例名(instance)', 'Instance', NULL, 'TEXT', NULL, 1, '实例名(instance)', 'Instance', NULL, NULL, NULL, NULL,  now(), now());
-
+INSERT INTO `linkis_ps_dm_datasource_type_key`
+    (`data_source_type_id`, `key`, `name`, `name_en`, `default_value`, `value_type`, `scope`, `require`, `description`, `description_en`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`)
+VALUES (@data_source_type_id, 'address', '地址', 'Address', NULL, 'TEXT', NULL, 0, '地址(host1:port1,host2:port2...)', 'Address(host1:port1,host2:port2...)', NULL, NULL, NULL, NULL,  now(), now()),
+       (@data_source_type_id, 'host', '主机名(Host)', 'Host', NULL, 'TEXT', NULL, 1, '主机名(Host)', 'Host', NULL, NULL, NULL, NULL,  now(), now()),
+       (@data_source_type_id, 'port', '端口号(Port)', 'Port', NULL, 'TEXT', NULL, 1, '端口号(Port)', 'Port', NULL, NULL, NULL, NULL,  now(), now()),
+       (@data_source_type_id, 'driverClassName', '驱动类名(Driver class name)', 'Driver class name', 'org.postgresql.Driver', 'TEXT', NULL, 1, '驱动类名(Driver class name)', 'Driver class name', NULL, NULL, NULL, NULL,  now(), now()),
+       (@data_source_type_id, 'params', '连接参数(Connection params)', 'Connection params', NULL, 'TEXT', NULL, 0, '输入JSON格式(Input JSON format): {"param":"value"}', 'Input JSON format: {"param":"value"}', NULL, NULL, NULL, NULL,  now(), now()),
+       (@data_source_type_id, 'username', '用户名(Username)', 'Username', NULL, 'TEXT', NULL, 1, '用户名(Username)', 'Username', '^[0-9A-Za-z_-]+$', NULL, NULL, NULL,  now(), now()),
+       (@data_source_type_id, 'password', '密码(Password)', 'Password', NULL, 'PASSWORD', NULL, 1, '密码(Password)', 'Password', '', NULL, NULL, NULL,  now(), now()),
+       (@data_source_type_id, 'databaseName', '数据库名(Database name)', 'Database name', NULL, 'TEXT', NULL, 0, '数据库名(Database name)', 'Database name', NULL, NULL, NULL, NULL,  now(), now());
+
+-- https://learn.microsoft.com/zh-cn/sql/connect/jdbc/building-the-connection-url?redirectedfrom=MSDN&view=sql-server-ver16
 select @data_source_type_id := id from `linkis_ps_dm_datasource_type` where `name` = 'sqlserver';
-INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `name_en`, `default_value`, `value_type`, `scope`, `require`, `description`, `description_en`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`) VALUES (@data_source_type_id, 'host', '主机名(Host)', 'Host', NULL, 'TEXT', NULL, 1, '主机名(Host)', 'Host', NULL, NULL, NULL, NULL,  now(), now());
-INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `name_en`, `default_value`, `value_type`, `scope`, `require`, `description`, `description_en`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`) VALUES (@data_source_type_id, 'port', '端口号(Port)', 'Port', NULL, 'TEXT', NULL, 1, '端口号(Port)', 'Port', NULL, NULL, NULL, NULL,  now(), now());
-INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `name_en`, `default_value`, `value_type`, `scope`, `require`, `description`, `description_en`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`) VALUES (@data_source_type_id, 'driverClassName', '驱动类名(Driver class name)', 'Driver class name', 'com.microsoft.jdbc.sqlserver.SQLServerDriver', 'TEXT', NULL, 1, '驱动类名(Driver class name)', 'Driver class name', NULL, NULL, NULL,  [...]
-INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `name_en`, `default_value`, `value_type`, `scope`, `require`, `description`, `description_en`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`) VALUES (@data_source_type_id, 'params', '连接参数(Connection params)', 'Connection params', NULL, 'TEXT', NULL, 0, '输入JSON格式(Input JSON format): {"param":"value"}', 'Input JSON format: {"param":"value"}', NULL, NULL, NULL, NULL,  no [...]
-INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `name_en`, `default_value`, `value_type`, `scope`, `require`, `description`, `description_en`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`) VALUES (@data_source_type_id, 'username', '用户名(Username)', 'Username', NULL, 'TEXT', NULL, 1, '用户名(Username)', 'Username', '^[0-9A-Za-z_-]+$', NULL, NULL, NULL,  now(), now());
-INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `name_en`, `default_value`, `value_type`, `scope`, `require`, `description`, `description_en`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`) VALUES (@data_source_type_id, 'password', '密码(Password)', 'Password', NULL, 'PASSWORD', NULL, 1, '密码(Password)', 'Password', '', NULL, NULL, NULL,  now(), now());
-INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `name_en`, `default_value`, `value_type`, `scope`, `require`, `description`, `description_en`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`) VALUES (@data_source_type_id, 'instance', '实例名(instance)', 'Instance', NULL, 'TEXT', NULL, 1, '实例名(instance)', 'Instance', NULL, NULL, NULL, NULL,  now(), now());
-
+INSERT INTO `linkis_ps_dm_datasource_type_key`
+    (`data_source_type_id`, `key`, `name`, `name_en`, `default_value`, `value_type`, `scope`, `require`, `description`, `description_en`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`)
+VALUES (@data_source_type_id, 'address', '地址', 'Address', NULL, 'TEXT', NULL, 0, '地址(host1:port1,host2:port2...)', 'Address(host1:port1,host2:port2...)', NULL, NULL, NULL, NULL,  now(), now()),
+       (@data_source_type_id, 'host', '主机名(Host)', 'Host', NULL, 'TEXT', NULL, 1, '主机名(Host)', 'Host', NULL, NULL, NULL, NULL,  now(), now()),
+       (@data_source_type_id, 'port', '端口号(Port)', 'Port', NULL, 'TEXT', NULL, 1, '端口号(Port)', 'Port', NULL, NULL, NULL, NULL,  now(), now()),
+       (@data_source_type_id, 'driverClassName', '驱动类名(Driver class name)', 'Driver class name', 'com.microsoft.sqlserver.jdbc.SQLServerDriver', 'TEXT', NULL, 1, '驱动类名(Driver class name)', 'Driver class name', NULL, NULL, NULL, NULL,  now(), now()),
+       (@data_source_type_id, 'params', '连接参数(Connection params)', 'Connection params', NULL, 'TEXT', NULL, 0, '输入JSON格式(Input JSON format): {"param":"value"}', 'Input JSON format: {"param":"value"}', NULL, NULL, NULL, NULL,  now(), now()),
+       (@data_source_type_id, 'username', '用户名(Username)', 'Username', NULL, 'TEXT', NULL, 1, '用户名(Username)', 'Username', '^[0-9A-Za-z_-]+$', NULL, NULL, NULL,  now(), now()),
+       (@data_source_type_id, 'password', '密码(Password)', 'Password', NULL, 'PASSWORD', NULL, 1, '密码(Password)', 'Password', '', NULL, NULL, NULL,  now(), now()),
+       (@data_source_type_id, 'instance', '实例名(instance)', 'Instance', NULL, 'TEXT', NULL, 1, '实例名(instance)', 'Instance', NULL, NULL, NULL, NULL,  now(), now()),
+       (@data_source_type_id, 'databaseName', '数据库名(Database name)', 'Database name', NULL, 'TEXT', NULL, 0, '数据库名(Database name)', 'Database name', NULL, NULL, NULL, NULL,  now(), now());
+
+-- https://www.ibm.com/docs/en/db2/11.5?topic=cdsudidsdjs-url-format-data-server-driver-jdbc-sqlj-type-4-connectivity
 select @data_source_type_id := id from `linkis_ps_dm_datasource_type` where `name` = 'db2';
-INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `name_en`, `default_value`, `value_type`, `scope`, `require`, `description`, `description_en`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`) VALUES (@data_source_type_id, 'host', '主机名(Host)', 'Host', NULL, 'TEXT', NULL, 1, '主机名(Host)', 'Host', NULL, NULL, NULL, NULL,  now(), now());
-INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `name_en`, `default_value`, `value_type`, `scope`, `require`, `description`, `description_en`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`) VALUES (@data_source_type_id, 'port', '端口号(Port)', 'Port', NULL, 'TEXT', NULL, 1, '端口号(Port)', 'Port', NULL, NULL, NULL, NULL,  now(), now());
-INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `name_en`, `default_value`, `value_type`, `scope`, `require`, `description`, `description_en`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`) VALUES (@data_source_type_id, 'driverClassName', '驱动类名(Driver class name)', 'Driver class name', 'com.ibm.db2.jcc.DB2Driver', 'TEXT', NULL, 1, '驱动类名(Driver class name)', 'Driver class name', NULL, NULL, NULL, NULL,  now(), now());
-INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `name_en`, `default_value`, `value_type`, `scope`, `require`, `description`, `description_en`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`) VALUES (@data_source_type_id, 'params', '连接参数(Connection params)', 'Connection params', NULL, 'TEXT', NULL, 0, '输入JSON格式(Input JSON format): {"param":"value"}', 'Input JSON format: {"param":"value"}', NULL, NULL, NULL, NULL,  no [...]
-INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `name_en`, `default_value`, `value_type`, `scope`, `require`, `description`, `description_en`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`) VALUES (@data_source_type_id, 'username', '用户名(Username)', 'Username', NULL, 'TEXT', NULL, 1, '用户名(Username)', 'Username', '^[0-9A-Za-z_-]+$', NULL, NULL, NULL,  now(), now());
-INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `name_en`, `default_value`, `value_type`, `scope`, `require`, `description`, `description_en`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`) VALUES (@data_source_type_id, 'password', '密码(Password)', 'Password', NULL, 'PASSWORD', NULL, 1, '密码(Password)', 'Password', '', NULL, NULL, NULL,  now(), now());
-INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `name_en`, `default_value`, `value_type`, `scope`, `require`, `description`, `description_en`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`) VALUES (@data_source_type_id, 'instance', '实例名(instance)', 'Instance', NULL, 'TEXT', NULL, 1, '实例名(instance)', 'Instance', NULL, NULL, NULL, NULL,  now(), now());
-
+INSERT INTO `linkis_ps_dm_datasource_type_key`
+    (`data_source_type_id`, `key`, `name`, `name_en`, `default_value`, `value_type`, `scope`, `require`, `description`, `description_en`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`)
+VALUES (@data_source_type_id, 'address', '地址', 'Address', NULL, 'TEXT', NULL, 0, '地址(host1:port1,host2:port2...)', 'Address(host1:port1,host2:port2...)', NULL, NULL, NULL, NULL,  now(), now()),
+       (@data_source_type_id, 'host', '主机名(Host)', 'Host', NULL, 'TEXT', NULL, 1, '主机名(Host)', 'Host', NULL, NULL, NULL, NULL,  now(), now()),
+       (@data_source_type_id, 'port', '端口号(Port)', 'Port', NULL, 'TEXT', NULL, 1, '端口号(Port)', 'Port', NULL, NULL, NULL, NULL,  now(), now()),
+       (@data_source_type_id, 'driverClassName', '驱动类名(Driver class name)', 'Driver class name', 'com.ibm.db2.jcc.DB2Driver', 'TEXT', NULL, 1, '驱动类名(Driver class name)', 'Driver class name', NULL, NULL, NULL, NULL,  now(), now()),
+       (@data_source_type_id, 'params', '连接参数(Connection params)', 'Connection params', NULL, 'TEXT', NULL, 0, '输入JSON格式(Input JSON format): {"param":"value"}', 'Input JSON format: {"param":"value"}', NULL, NULL, NULL, NULL,  now(), now()),
+       (@data_source_type_id, 'username', '用户名(Username)', 'Username', NULL, 'TEXT', NULL, 1, '用户名(Username)', 'Username', '^[0-9A-Za-z_-]+$', NULL, NULL, NULL,  now(), now()),
+       (@data_source_type_id, 'password', '密码(Password)', 'Password', NULL, 'PASSWORD', NULL, 1, '密码(Password)', 'Password', '', NULL, NULL, NULL,  now(), now()),
+       (@data_source_type_id, 'databaseName', '数据库名(Database name)', 'Database name', NULL, 'TEXT', NULL, 0, '数据库名(Database name)', 'Database name', NULL, NULL, NULL, NULL,  now(), now());
+
+-- https://greenplum.docs.pivotal.io/6-1/datadirect/datadirect_jdbc.html#topic_ylk_pbx_2bb
 select @data_source_type_id := id from `linkis_ps_dm_datasource_type` where `name` = 'greenplum';
-INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `name_en`, `default_value`, `value_type`, `scope`, `require`, `description`, `description_en`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`) VALUES (@data_source_type_id, 'host', '主机名(Host)', 'Host', NULL, 'TEXT', NULL, 1, '主机名(Host)', 'Host', NULL, NULL, NULL, NULL,  now(), now());
-INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `name_en`, `default_value`, `value_type`, `scope`, `require`, `description`, `description_en`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`) VALUES (@data_source_type_id, 'port', '端口号(Port)', 'Port', NULL, 'TEXT', NULL, 1, '端口号(Port)', 'Port', NULL, NULL, NULL, NULL,  now(), now());
-INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `name_en`, `default_value`, `value_type`, `scope`, `require`, `description`, `description_en`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`) VALUES (@data_source_type_id, 'driverClassName', '驱动类名(Driver class name)', 'Driver class name', 'com.pivotal.jdbc.GreenplumDriver', 'TEXT', NULL, 1, '驱动类名(Driver class name)', 'Driver class name', NULL, NULL, NULL, NULL,  now() [...]
-INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `name_en`, `default_value`, `value_type`, `scope`, `require`, `description`, `description_en`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`) VALUES (@data_source_type_id, 'params', '连接参数(Connection params)', 'Connection params', NULL, 'TEXT', NULL, 0, '输入JSON格式(Input JSON format): {"param":"value"}', 'Input JSON format: {"param":"value"}', NULL, NULL, NULL, NULL,  no [...]
-INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `name_en`, `default_value`, `value_type`, `scope`, `require`, `description`, `description_en`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`) VALUES (@data_source_type_id, 'username', '用户名(Username)', 'Username', NULL, 'TEXT', NULL, 1, '用户名(Username)', 'Username', '^[0-9A-Za-z_-]+$', NULL, NULL, NULL,  now(), now());
-INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `name_en`, `default_value`, `value_type`, `scope`, `require`, `description`, `description_en`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`) VALUES (@data_source_type_id, 'password', '密码(Password)', 'Password', NULL, 'PASSWORD', NULL, 1, '密码(Password)', 'Password', '', NULL, NULL, NULL,  now(), now());
-INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `name_en`, `default_value`, `value_type`, `scope`, `require`, `description`, `description_en`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`) VALUES (@data_source_type_id, 'instance', '实例名(instance)', 'Instance', NULL, 'TEXT', NULL, 1, '实例名(instance)', 'Instance', NULL, NULL, NULL, NULL,  now(), now());
+INSERT INTO `linkis_ps_dm_datasource_type_key`
+    (`data_source_type_id`, `key`, `name`, `name_en`, `default_value`, `value_type`, `scope`, `require`, `description`, `description_en`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`)
+VALUES (@data_source_type_id, 'address', '地址', 'Address', NULL, 'TEXT', NULL, 0, '地址(host1:port1,host2:port2...)', 'Address(host1:port1,host2:port2...)', NULL, NULL, NULL, NULL,  now(), now()),
+       (@data_source_type_id, 'host', '主机名(Host)', 'Host', NULL, 'TEXT', NULL, 1, '主机名(Host)', 'Host', NULL, NULL, NULL, NULL,  now(), now()),
+       (@data_source_type_id, 'port', '端口号(Port)', 'Port', NULL, 'TEXT', NULL, 1, '端口号(Port)', 'Port', NULL, NULL, NULL, NULL,  now(), now()),
+       (@data_source_type_id, 'driverClassName', '驱动类名(Driver class name)', 'Driver class name', 'com.pivotal.jdbc.GreenplumDriver', 'TEXT', NULL, 1, '驱动类名(Driver class name)', 'Driver class name', NULL, NULL, NULL, NULL,  now(), now()),
+       (@data_source_type_id, 'params', '连接参数(Connection params)', 'Connection params', NULL, 'TEXT', NULL, 0, '输入JSON格式(Input JSON format): {"param":"value"}', 'Input JSON format: {"param":"value"}', NULL, NULL, NULL, NULL,  now(), now()),
+       (@data_source_type_id, 'username', '用户名(Username)', 'Username', NULL, 'TEXT', NULL, 1, '用户名(Username)', 'Username', '^[0-9A-Za-z_-]+$', NULL, NULL, NULL,  now(), now()),
+       (@data_source_type_id, 'password', '密码(Password)', 'Password', NULL, 'PASSWORD', NULL, 1, '密码(Password)', 'Password', '', NULL, NULL, NULL,  now(), now()),
+       (@data_source_type_id, 'databaseName', '数据库名(Database name)', 'Database name', NULL, 'TEXT', NULL, 0, '数据库名(Database name)', 'Database name', NULL, NULL, NULL, NULL,  now(), now());
 
 select @data_source_type_id := id from `linkis_ps_dm_datasource_type` where `name` = 'doris';
-INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `name_en`, `default_value`, `value_type`, `scope`, `require`, `description`, `description_en`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`) VALUES (@data_source_type_id, 'host', '主机名(Host)', 'Host', NULL, 'TEXT', NULL, 1, '主机名(Host)', 'Host', NULL, NULL, NULL, NULL,  now(), now());
-INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `name_en`, `default_value`, `value_type`, `scope`, `require`, `description`, `description_en`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`) VALUES (@data_source_type_id, 'port', '端口号(Port)', 'Port', NULL, 'TEXT', NULL, 1, '端口号(Port)', 'Port', NULL, NULL, NULL, NULL,  now(), now());
-INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `name_en`, `default_value`, `value_type`, `scope`, `require`, `description`, `description_en`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`) VALUES (@data_source_type_id, 'driverClassName', '驱动类名(Driver class name)', 'Driver class name', 'com.mysql.jdbc.Driver', 'TEXT', NULL, 1, '驱动类名(Driver class name)', 'Driver class name', NULL, NULL, NULL, NULL,  now(), now());
-INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `name_en`, `default_value`, `value_type`, `scope`, `require`, `description`, `description_en`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`) VALUES (@data_source_type_id, 'params', '连接参数(Connection params)', 'Connection params', NULL, 'TEXT', NULL, 0, '输入JSON格式(Input JSON format): {"param":"value"}', 'Input JSON format: {"param":"value"}', NULL, NULL, NULL, NULL,  no [...]
-INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `name_en`, `default_value`, `value_type`, `scope`, `require`, `description`, `description_en`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`) VALUES (@data_source_type_id, 'username', '用户名(Username)', 'Username', NULL, 'TEXT', NULL, 1, '用户名(Username)', 'Username', '^[0-9A-Za-z_-]+$', NULL, NULL, NULL,  now(), now());
-INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `name_en`, `default_value`, `value_type`, `scope`, `require`, `description`, `description_en`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`) VALUES (@data_source_type_id, 'password', '密码(Password)', 'Password', NULL, 'PASSWORD', NULL, 1, '密码(Password)', 'Password', '', NULL, NULL, NULL,  now(), now());
-INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `name_en`, `default_value`, `value_type`, `scope`, `require`, `description`, `description_en`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`) VALUES (@data_source_type_id, 'instance', '实例名(instance)', 'Instance', NULL, 'TEXT', NULL, 1, '实例名(instance)', 'Instance', NULL, NULL, NULL, NULL,  now(), now());
-
+INSERT INTO `linkis_ps_dm_datasource_type_key`
+    (`data_source_type_id`, `key`, `name`, `name_en`, `default_value`, `value_type`, `scope`, `require`, `description`, `description_en`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`)
+VALUES (@data_source_type_id, 'address', '地址', 'Address', NULL, 'TEXT', NULL, 0, '地址(host1:port1,host2:port2...)', 'Address(host1:port1,host2:port2...)', NULL, NULL, NULL, NULL,  now(), now()),
+       (@data_source_type_id, 'host', '主机名(Host)', 'Host', NULL, 'TEXT', NULL, 1, '主机名(Host)', 'Host', NULL, NULL, NULL, NULL,  now(), now()),
+       (@data_source_type_id, 'port', '端口号(Port)', 'Port', NULL, 'TEXT', NULL, 1, '端口号(Port)', 'Port', NULL, NULL, NULL, NULL,  now(), now()),
+       (@data_source_type_id, 'driverClassName', '驱动类名(Driver class name)', 'Driver class name', 'com.mysql.jdbc.Driver', 'TEXT', NULL, 1, '驱动类名(Driver class name)', 'Driver class name', NULL, NULL, NULL, NULL,  now(), now()),
+       (@data_source_type_id, 'params', '连接参数(Connection params)', 'Connection params', NULL, 'TEXT', NULL, 0, '输入JSON格式(Input JSON format): {"param":"value"}', 'Input JSON format: {"param":"value"}', NULL, NULL, NULL, NULL,  now(), now()),
+       (@data_source_type_id, 'username', '用户名(Username)', 'Username', NULL, 'TEXT', NULL, 1, '用户名(Username)', 'Username', '^[0-9A-Za-z_-]+$', NULL, NULL, NULL,  now(), now()),
+       (@data_source_type_id, 'password', '密码(Password)', 'Password', NULL, 'PASSWORD', NULL, 1, '密码(Password)', 'Password', '', NULL, NULL, NULL,  now(), now()),
+       (@data_source_type_id, 'databaseName', '数据库名(Database name)', 'Database name', NULL, 'TEXT', NULL, 0, '数据库名(Database name)', 'Database name', NULL, NULL, NULL, NULL,  now(), now());
+
+-- https://github.com/ClickHouse/clickhouse-jdbc/tree/master/clickhouse-jdbc
 select @data_source_type_id := id from `linkis_ps_dm_datasource_type` where `name` = 'clickhouse';
-INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `name_en`, `default_value`, `value_type`, `scope`, `require`, `description`, `description_en`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`) VALUES (@data_source_type_id, 'host', '主机名(Host)', 'Host', NULL, 'TEXT', NULL, 1, '主机名(Host)', 'Host', NULL, NULL, NULL, NULL,  now(), now());
-INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `name_en`, `default_value`, `value_type`, `scope`, `require`, `description`, `description_en`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`) VALUES (@data_source_type_id, 'port', '端口号(Port)', 'Port', NULL, 'TEXT', NULL, 1, '端口号(Port)', 'Port', NULL, NULL, NULL, NULL,  now(), now());
-INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `name_en`, `default_value`, `value_type`, `scope`, `require`, `description`, `description_en`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`) VALUES (@data_source_type_id, 'driverClassName', '驱动类名(Driver class name)', 'Driver class name', 'ru.yandex.clickhouse.ClickHouseDriver', 'TEXT', NULL, 1, '驱动类名(Driver class name)', 'Driver class name', NULL, NULL, NULL, NULL,   [...]
-INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `name_en`, `default_value`, `value_type`, `scope`, `require`, `description`, `description_en`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`) VALUES (@data_source_type_id, 'params', '连接参数(Connection params)', 'Connection params', NULL, 'TEXT', NULL, 0, '输入JSON格式(Input JSON format): {"param":"value"}', 'Input JSON format: {"param":"value"}', NULL, NULL, NULL, NULL,  no [...]
-INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `name_en`, `default_value`, `value_type`, `scope`, `require`, `description`, `description_en`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`) VALUES (@data_source_type_id, 'username', '用户名(Username)', 'Username', NULL, 'TEXT', NULL, 1, '用户名(Username)', 'Username', '^[0-9A-Za-z_-]+$', NULL, NULL, NULL,  now(), now());
-INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `name_en`, `default_value`, `value_type`, `scope`, `require`, `description`, `description_en`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`) VALUES (@data_source_type_id, 'password', '密码(Password)', 'Password', NULL, 'PASSWORD', NULL, 1, '密码(Password)', 'Password', '', NULL, NULL, NULL,  now(), now());
-INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `name_en`, `default_value`, `value_type`, `scope`, `require`, `description`, `description_en`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`) VALUES (@data_source_type_id, 'instance', '实例名(instance)', 'Instance', NULL, 'TEXT', NULL, 1, '实例名(instance)', 'Instance', NULL, NULL, NULL, NULL,  now(), now());
+INSERT INTO `linkis_ps_dm_datasource_type_key`
+    (`data_source_type_id`, `key`, `name`, `name_en`, `default_value`, `value_type`, `scope`, `require`, `description`, `description_en`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`)
+VALUES (@data_source_type_id, 'address', '地址', 'Address', NULL, 'TEXT', NULL, 0, '地址(host1:port1,host2:port2...)', 'Address(host1:port1,host2:port2...)', NULL, NULL, NULL, NULL,  now(), now()),
+       (@data_source_type_id, 'host', '主机名(Host)', 'Host', NULL, 'TEXT', NULL, 1, '主机名(Host)', 'Host', NULL, NULL, NULL, NULL,  now(), now()),
+       (@data_source_type_id, 'port', '端口号(Port)', 'Port', NULL, 'TEXT', NULL, 1, '端口号(Port)', 'Port', NULL, NULL, NULL, NULL,  now(), now()),
+       (@data_source_type_id, 'driverClassName', '驱动类名(Driver class name)', 'Driver class name', 'ru.yandex.clickhouse.ClickHouseDriver', 'TEXT', NULL, 1, '驱动类名(Driver class name)', 'Driver class name', NULL, NULL, NULL, NULL,  now(), now()),
+       (@data_source_type_id, 'params', '连接参数(Connection params)', 'Connection params', NULL, 'TEXT', NULL, 0, '输入JSON格式(Input JSON format): {"param":"value"}', 'Input JSON format: {"param":"value"}', NULL, NULL, NULL, NULL,  now(), now()),
+       (@data_source_type_id, 'username', '用户名(Username)', 'Username', NULL, 'TEXT', NULL, 1, '用户名(Username)', 'Username', '^[0-9A-Za-z_-]+$', NULL, NULL, NULL,  now(), now()),
+       (@data_source_type_id, 'password', '密码(Password)', 'Password', NULL, 'PASSWORD', NULL, 1, '密码(Password)', 'Password', '', NULL, NULL, NULL,  now(), now()),
+       (@data_source_type_id, 'databaseName', '数据库名(Database name)', 'Database name', NULL, 'TEXT', NULL, 0, '数据库名(Database name)', 'Database name', NULL, NULL, NULL, NULL,  now(), now());
 
 
 select @data_source_type_id := id from `linkis_ps_dm_datasource_type` where `name` = 'hive';
diff --git a/linkis-dist/package/db/upgrade/1.3.2_schema/mysql/linkis_dml.sql b/linkis-dist/package/db/upgrade/1.3.2_schema/mysql/linkis_dml.sql
index 7ef7ac458..15abd17e4 100644
--- a/linkis-dist/package/db/upgrade/1.3.2_schema/mysql/linkis_dml.sql
+++ b/linkis-dist/package/db/upgrade/1.3.2_schema/mysql/linkis_dml.sql
@@ -250,3 +250,43 @@ update linkis_ps_dm_datasource_type_key set description_en="Mongodb Host" where
 update linkis_ps_dm_datasource_type_key set description_en="Port" where description ="端口";
 update linkis_ps_dm_datasource_type_key set description_en="Input JSON Format: {\"param\":\"value\"}" where description ="输入JSON格式: {\"param\":\"value\"}";
 
+select @data_source_type_id := id from `linkis_ps_dm_datasource_type` where `name` = 'mysql';
+INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `name_en`, `default_value`, `value_type`, `scope`, `require`, `description`, `description_en`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`) VALUES (@data_source_type_id, 'address', '地址', 'Address', NULL, 'TEXT', NULL, 0, '地址(host1:port1,host2:port2...)', 'Address(host1:port1,host2:port2...)', NULL, NULL, NULL, NULL,  now(), now());
+
+select @data_source_type_id := id from `linkis_ps_dm_datasource_type` where `name` = 'oracle';
+INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `name_en`, `default_value`, `value_type`, `scope`, `require`, `description`, `description_en`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`) VALUES (@data_source_type_id, 'address', '地址', 'Address', NULL, 'TEXT', NULL, 0, '地址(host1:port1,host2:port2...)', 'Address(host1:port1,host2:port2...)', NULL, NULL, NULL, NULL,  now(), now());
+INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `name_en`, `default_value`, `value_type`, `scope`, `require`, `description`, `description_en`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`) VALUES (@data_source_type_id, 'sid', 'SID', 'SID', NULL, 'TEXT', NULL, 0, 'SID', 'SID', NULL, NULL, NULL, NULL,  now(), now());
+INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `name_en`, `default_value`, `value_type`, `scope`, `require`, `description`, `description_en`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`) VALUES (@data_source_type_id, 'serviceName', 'service_name', 'service_name', NULL, 'TEXT', NULL, 0, 'service_name', 'service_name', NULL, NULL, NULL, NULL,  now(), now());
+INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `name_en`, `default_value`, `value_type`, `scope`, `require`, `description`, `description_en`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`) VALUES (@data_source_type_id, 'server', 'server', 'server', NULL, 'TEXT', NULL, 0, 'server', 'server', NULL, NULL, NULL, NULL,  now(), now());
+
+select @data_source_type_id := id from `linkis_ps_dm_datasource_type` where `name` = 'dm';
+INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `name_en`, `default_value`, `value_type`, `scope`, `require`, `description`, `description_en`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`) VALUES (@data_source_type_id, 'address', '地址', 'Address', NULL, 'TEXT', NULL, 0, '地址(host1:port1,host2:port2...)', 'Address(host1:port1,host2:port2...)', NULL, NULL, NULL, NULL,  now(), now());
+
+select @data_source_type_id := id from `linkis_ps_dm_datasource_type` where `name` = 'kingbase';
+INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `name_en`, `default_value`, `value_type`, `scope`, `require`, `description`, `description_en`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`) VALUES (@data_source_type_id, 'address', '地址', 'Address', NULL, 'TEXT', NULL, 0, '地址(host1:port1,host2:port2...)', 'Address(host1:port1,host2:port2...)', NULL, NULL, NULL, NULL,  now(), now());
+
+select @data_source_type_id := id from `linkis_ps_dm_datasource_type` where `name` = 'postgresql';
+INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `name_en`, `default_value`, `value_type`, `scope`, `require`, `description`, `description_en`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`) VALUES (@data_source_type_id, 'address', '地址', 'Address', NULL, 'TEXT', NULL, 0, '地址(host1:port1,host2:port2...)', 'Address(host1:port1,host2:port2...)', NULL, NULL, NULL, NULL,  now(), now());
+INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `name_en`, `default_value`, `value_type`, `scope`, `require`, `description`, `description_en`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`) VALUES (@data_source_type_id, 'databaseName', '数据库名(Database name)', 'Database name', NULL, 'TEXT', NULL, 0, '数据库名(Database name)', 'Database name', NULL, NULL, NULL, NULL,  now(), now());
+
+select @data_source_type_id := id from `linkis_ps_dm_datasource_type` where `name` = 'sqlserver';
+INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `name_en`, `default_value`, `value_type`, `scope`, `require`, `description`, `description_en`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`) VALUES (@data_source_type_id, 'address', '地址', 'Address', NULL, 'TEXT', NULL, 0, '地址(host1:port1,host2:port2...)', 'Address(host1:port1,host2:port2...)', NULL, NULL, NULL, NULL,  now(), now());
+INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `name_en`, `default_value`, `value_type`, `scope`, `require`, `description`, `description_en`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`) VALUES (@data_source_type_id, 'databaseName', '数据库名(Database name)', 'Database name', NULL, 'TEXT', NULL, 0, '数据库名(Database name)', 'Database name', NULL, NULL, NULL, NULL,  now(), now());
+
+select @data_source_type_id := id from `linkis_ps_dm_datasource_type` where `name` = 'db2';
+INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `name_en`, `default_value`, `value_type`, `scope`, `require`, `description`, `description_en`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`) VALUES (@data_source_type_id, 'address', '地址', 'Address', NULL, 'TEXT', NULL, 0, '地址(host1:port1,host2:port2...)', 'Address(host1:port1,host2:port2...)', NULL, NULL, NULL, NULL,  now(), now());
+INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `name_en`, `default_value`, `value_type`, `scope`, `require`, `description`, `description_en`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`) VALUES (@data_source_type_id, 'databaseName', '数据库名(Database name)', 'Database name', NULL, 'TEXT', NULL, 0, '数据库名(Database name)', 'Database name', NULL, NULL, NULL, NULL,  now(), now());
+
+select @data_source_type_id := id from `linkis_ps_dm_datasource_type` where `name` = 'greenplum';
+INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `name_en`, `default_value`, `value_type`, `scope`, `require`, `description`, `description_en`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`) VALUES (@data_source_type_id, 'address', '地址', 'Address', NULL, 'TEXT', NULL, 0, '地址(host1:port1,host2:port2...)', 'Address(host1:port1,host2:port2...)', NULL, NULL, NULL, NULL,  now(), now());
+INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `name_en`, `default_value`, `value_type`, `scope`, `require`, `description`, `description_en`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`) VALUES (@data_source_type_id, 'databaseName', '数据库名(Database name)', 'Database name', NULL, 'TEXT', NULL, 0, '数据库名(Database name)', 'Database name', NULL, NULL, NULL, NULL,  now(), now());
+
+select @data_source_type_id := id from `linkis_ps_dm_datasource_type` where `name` = 'doris';
+INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `name_en`, `default_value`, `value_type`, `scope`, `require`, `description`, `description_en`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`) VALUES (@data_source_type_id, 'address', '地址', 'Address', NULL, 'TEXT', NULL, 0, '地址(host1:port1,host2:port2...)', 'Address(host1:port1,host2:port2...)', NULL, NULL, NULL, NULL,  now(), now());
+INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `name_en`, `default_value`, `value_type`, `scope`, `require`, `description`, `description_en`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`) VALUES (@data_source_type_id, 'databaseName', '数据库名(Database name)', 'Database name', NULL, 'TEXT', NULL, 0, '数据库名(Database name)', 'Database name', NULL, NULL, NULL, NULL,  now(), now());
+
+select @data_source_type_id := id from `linkis_ps_dm_datasource_type` where `name` = 'clickhouse';
+INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `name_en`, `default_value`, `value_type`, `scope`, `require`, `description`, `description_en`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`) VALUES (@data_source_type_id, 'address', '地址', 'Address', NULL, 'TEXT', NULL, 0, '地址(host1:port1,host2:port2...)', 'Address(host1:port1,host2:port2...)', NULL, NULL, NULL, NULL,  now(), now());
+INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `name_en`, `default_value`, `value_type`, `scope`, `require`, `description`, `description_en`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`) VALUES (@data_source_type_id, 'databaseName', '数据库名(Database name)', 'Database name', NULL, 'TEXT', NULL, 0, '数据库名(Database name)', 'Database name', NULL, NULL, NULL, NULL,  now(), now());
+
+
diff --git a/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/exception/DatabaseNotConfigException.java b/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/exception/DataSourceNotConfigException.java
similarity index 88%
copy from linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/exception/DatabaseNotConfigException.java
copy to linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/exception/DataSourceNotConfigException.java
index 442b32d84..76e964db0 100644
--- a/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/exception/DatabaseNotConfigException.java
+++ b/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/exception/DataSourceNotConfigException.java
@@ -20,9 +20,9 @@ package org.apache.linkis.engineplugin.spark.datacalc.exception;
 import org.apache.linkis.common.exception.ExceptionLevel;
 import org.apache.linkis.common.exception.LinkisRuntimeException;
 
-public class DatabaseNotConfigException extends LinkisRuntimeException {
+public class DataSourceNotConfigException extends LinkisRuntimeException {
 
-  public DatabaseNotConfigException(int errCode, String desc) {
+  public DataSourceNotConfigException(int errCode, String desc) {
     super(errCode, desc);
   }
 
diff --git a/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/exception/DatabaseNotConfigException.java b/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/exception/DatabaseNotSupportException.java
similarity index 88%
copy from linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/exception/DatabaseNotConfigException.java
copy to linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/exception/DatabaseNotSupportException.java
index 442b32d84..391486d54 100644
--- a/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/exception/DatabaseNotConfigException.java
+++ b/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/exception/DatabaseNotSupportException.java
@@ -20,9 +20,9 @@ package org.apache.linkis.engineplugin.spark.datacalc.exception;
 import org.apache.linkis.common.exception.ExceptionLevel;
 import org.apache.linkis.common.exception.LinkisRuntimeException;
 
-public class DatabaseNotConfigException extends LinkisRuntimeException {
+public class DatabaseNotSupportException extends LinkisRuntimeException {
 
-  public DatabaseNotConfigException(int errCode, String desc) {
+  public DatabaseNotSupportException(int errCode, String desc) {
     super(errCode, desc);
   }
 
diff --git a/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/model/DataCalcDataSource.java b/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/model/DataCalcDataSource.java
index 7e1ae1462..159459194 100644
--- a/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/model/DataCalcDataSource.java
+++ b/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/model/DataCalcDataSource.java
@@ -20,21 +20,11 @@ package org.apache.linkis.engineplugin.spark.datacalc.model;
 import java.io.Serializable;
 
 public class DataCalcDataSource implements Serializable {
-  private String typeName;
   private String driver;
   private String url;
-  private String databaseName;
   private String user;
   private String password;
 
-  public String getTypeName() {
-    return typeName;
-  }
-
-  public void setTypeName(String typeName) {
-    this.typeName = typeName;
-  }
-
   public String getDriver() {
     return driver;
   }
@@ -51,14 +41,6 @@ public class DataCalcDataSource implements Serializable {
     this.url = url;
   }
 
-  public String getDatabaseName() {
-    return databaseName;
-  }
-
-  public void setDatabaseName(String databaseName) {
-    this.databaseName = databaseName;
-  }
-
   public String getUser() {
     return user;
   }
@@ -74,4 +56,22 @@ public class DataCalcDataSource implements Serializable {
   public void setPassword(String password) {
     this.password = password;
   }
+
+  @Override
+  public String toString() {
+    return "DataCalcDataSource{"
+        + "driver='"
+        + driver
+        + '\''
+        + ", url='"
+        + url
+        + '\''
+        + ", user='"
+        + user
+        + '\''
+        + ", password='"
+        + password
+        + '\''
+        + '}';
+  }
 }
diff --git a/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/model/SinkConfig.java b/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/model/SinkConfig.java
index 735d4deb4..a3f72086a 100644
--- a/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/model/SinkConfig.java
+++ b/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/model/SinkConfig.java
@@ -22,6 +22,8 @@ import org.apache.commons.lang3.StringUtils;
 import javax.validation.constraints.AssertTrue;
 
 import java.io.Serializable;
+import java.util.HashMap;
+import java.util.Map;
 
 public abstract class SinkConfig extends DataCalcPluginConfig implements Serializable {
 
@@ -29,6 +31,8 @@ public abstract class SinkConfig extends DataCalcPluginConfig implements Seriali
 
   protected String sourceQuery;
 
+  protected Map<String, String> options = new HashMap<>();
+
   public String getSourceTable() {
     return sourceTable;
   }
@@ -45,6 +49,14 @@ public abstract class SinkConfig extends DataCalcPluginConfig implements Seriali
     this.sourceQuery = sourceQuery;
   }
 
+  public Map<String, String> getOptions() {
+    return options;
+  }
+
+  public void setOptions(Map<String, String> options) {
+    this.options = options;
+  }
+
   @AssertTrue(message = "[sourceTable, sourceQuery] cannot be blank at the same time.")
   public boolean isSourceOK() {
     return StringUtils.isNotBlank(sourceTable) || StringUtils.isNotBlank(sourceQuery);
diff --git a/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/model/SourceConfig.java b/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/model/SourceConfig.java
index 31cad254d..5263778e2 100644
--- a/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/model/SourceConfig.java
+++ b/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/model/SourceConfig.java
@@ -22,6 +22,8 @@ import org.apache.commons.lang3.StringUtils;
 import javax.validation.constraints.NotBlank;
 
 import java.io.Serializable;
+import java.util.HashMap;
+import java.util.Map;
 
 public abstract class SourceConfig extends DataCalcPluginConfig
     implements ResultTableConfig, Serializable {
@@ -32,6 +34,8 @@ public abstract class SourceConfig extends DataCalcPluginConfig
 
   private String storageLevel = "MEMORY_AND_DISK";
 
+  protected Map<String, String> options = new HashMap<>();
+
   public String getResultTable() {
     return resultTable;
   }
@@ -55,4 +59,12 @@ public abstract class SourceConfig extends DataCalcPluginConfig
   public void setStorageLevel(String storageLevel) {
     if (StringUtils.isNotBlank(storageLevel)) this.storageLevel = storageLevel;
   }
+
+  public Map<String, String> getOptions() {
+    return options;
+  }
+
+  public void setOptions(Map<String, String> options) {
+    this.options = options;
+  }
 }
diff --git a/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/service/LinkisDataSourceContext.java b/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/service/LinkisDataSourceContext.java
new file mode 100644
index 000000000..3fa423748
--- /dev/null
+++ b/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/service/LinkisDataSourceContext.java
@@ -0,0 +1,102 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.linkis.engineplugin.spark.datacalc.service;
+
+import org.apache.linkis.datasourcemanager.common.domain.DataSource;
+import org.apache.linkis.engineplugin.spark.datacalc.exception.DatabaseNotSupportException;
+import org.apache.linkis.engineplugin.spark.datacalc.model.DataCalcDataSource;
+import org.apache.linkis.engineplugin.spark.datacalc.service.strategy.*;
+import org.apache.linkis.engineplugin.spark.errorcode.SparkErrorCodeSummary;
+
+import org.apache.commons.lang3.StringUtils;
+
+import java.text.MessageFormat;
+import java.util.*;
+import java.util.stream.Collectors;
+
+public class LinkisDataSourceContext {
+
+  private static final Map<String, DataSourceStrategy> dsStrategyMap = new HashMap<>();
+
+  static {
+    // https://dev.mysql.com/doc/connector-j/8.0/en/connector-j-reference-jdbc-url-format.html
+    dsStrategyMap.put("mysql", new MySqlStrategy());
+    // https://docs.pingcap.com/tidb/dev/dev-guide-connect-to-tidb
+    dsStrategyMap.put("tidb", new TiDBStrategy());
+    dsStrategyMap.put("doris", new DorisStrategy());
+    // https://jdbc.postgresql.org/documentation/use/
+    dsStrategyMap.put("postgresql", new PostgreSqlStrategy());
+    // https://github.com/ClickHouse/clickhouse-jdbc/tree/master/clickhouse-jdbc
+    dsStrategyMap.put("clickhouse", new ClickHouseStrategy());
+    // https://docs.oracle.com/en/database/oracle/oracle-database/21/jajdb/oracle/jdbc/OracleDriver.html
+    dsStrategyMap.put("oracle", new OracleStrategy());
+    // https://learn.microsoft.com/zh-cn/sql/connect/jdbc/building-the-connection-url?redirectedfrom=MSDN&view=sql-server-ver16
+    dsStrategyMap.put("sqlserver", new SqlServerStrategy());
+    // https://www.ibm.com/docs/en/db2/11.5?topic=cdsudidsdjs-url-format-data-server-driver-jdbc-sqlj-type-4-connectivity
+    dsStrategyMap.put("db2", new DB2Strategy());
+  }
+
+  private final DataSourceStrategy dataSourceStrategy;
+
+  private final DataSource datasource;
+
+  public LinkisDataSourceContext(DataSource ds) {
+    this.datasource = ds;
+    String databaseType = ds.getDataSourceType() == null ? "" : ds.getDataSourceType().getName();
+    this.dataSourceStrategy = getDataSourceStrategy(databaseType);
+  }
+
+  private DataSourceStrategy getDataSourceStrategy(String databaseType) {
+    if (dsStrategyMap.containsKey(databaseType)) {
+      return dsStrategyMap.get(databaseType);
+    } else {
+      int code = SparkErrorCodeSummary.DATA_CALC_DATABASE_NOT_SUPPORT.getErrorCode();
+      String errDesc = SparkErrorCodeSummary.DATA_CALC_DATABASE_NOT_SUPPORT.getErrorDesc();
+      String msg = MessageFormat.format(errDesc, datasource.getDataSourceName(), databaseType);
+      throw new DatabaseNotSupportException(code, msg);
+    }
+  }
+
+  public DataCalcDataSource getDataCalcDataSource() {
+    Map<String, Object> connectParams =
+        datasource.getConnectParams(); // this should return Map<String, String>
+    Map<String, String> params = new HashMap<>(0);
+    if (connectParams != null) {
+      params =
+          connectParams.entrySet().stream()
+              .collect(Collectors.toMap(Map.Entry::getKey, entry -> entry.getValue().toString()));
+    }
+    String defaultDriver = dataSourceStrategy.defaultDriver();
+    String paramsJson = params.getOrDefault("params", "{}");
+    String defaultPort = dataSourceStrategy.defaultPort();
+
+    String address = params.getOrDefault("address", "");
+    if (StringUtils.isBlank(address)) {
+      String host = params.getOrDefault("host", "");
+      String port = params.getOrDefault("port", defaultPort);
+      address = host + ":" + port;
+    }
+
+    DataCalcDataSource ds = new DataCalcDataSource();
+    ds.setDriver(params.getOrDefault("driverClassName", defaultDriver));
+    ds.setUser(params.getOrDefault("username", ""));
+    ds.setPassword(params.getOrDefault("password", ""));
+    ds.setUrl(dataSourceStrategy.getJdbcUrl(address, params, paramsJson));
+    return ds;
+  }
+}
diff --git a/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/service/LinkisDataSourceService.java b/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/service/LinkisDataSourceService.java
index f66d15d0a..8d67f11f5 100644
--- a/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/service/LinkisDataSourceService.java
+++ b/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/service/LinkisDataSourceService.java
@@ -20,9 +20,13 @@ package org.apache.linkis.engineplugin.spark.datacalc.service;
 import org.apache.linkis.datasource.client.impl.LinkisDataSourceRemoteClient;
 import org.apache.linkis.datasource.client.request.GetInfoPublishedByDataSourceNameAction;
 import org.apache.linkis.datasourcemanager.common.domain.DataSource;
+import org.apache.linkis.engineplugin.spark.datacalc.exception.DataSourceNotConfigException;
 import org.apache.linkis.engineplugin.spark.datacalc.model.DataCalcDataSource;
+import org.apache.linkis.engineplugin.spark.errorcode.SparkErrorCodeSummary;
 import org.apache.linkis.storage.utils.StorageUtils;
 
+import java.text.MessageFormat;
+
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -30,10 +34,8 @@ public class LinkisDataSourceService {
 
   private static final Logger logger = LoggerFactory.getLogger(LinkisDataSourceService.class);
 
-  private static final LinkisDataSourceRemoteClient dataSourceClient =
-      new LinkisDataSourceRemoteClient();
-
   public static DataCalcDataSource getDatasource(String datasourceName) {
+    LinkisDataSourceRemoteClient dataSourceClient = new LinkisDataSourceRemoteClient();
     GetInfoPublishedByDataSourceNameAction action =
         GetInfoPublishedByDataSourceNameAction.builder()
             .setDataSourceName(datasourceName)
@@ -41,12 +43,14 @@ public class LinkisDataSourceService {
             .build(); // ignore parameter 'system'
     DataSource datasource =
         dataSourceClient.getInfoPublishedByDataSourceName(action).getDataSource();
-    datasource.getConnectParams();
-    return transform(datasource);
-  }
-
-  private static DataCalcDataSource transform(DataSource datasource) {
-    DataCalcDataSource ds = new DataCalcDataSource();
-    return ds;
+    if (datasource == null) {
+      int code = SparkErrorCodeSummary.DATA_CALC_DATASOURCE_NOT_CONFIG.getErrorCode();
+      String errDesc = SparkErrorCodeSummary.DATA_CALC_DATASOURCE_NOT_CONFIG.getErrorDesc();
+      String msg = MessageFormat.format(errDesc, datasourceName);
+      logger.error(msg);
+      throw new DataSourceNotConfigException(code, msg);
+    }
+    LinkisDataSourceContext dataSourceContext = new LinkisDataSourceContext(datasource);
+    return dataSourceContext.getDataCalcDataSource();
   }
 }
diff --git a/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/exception/DatabaseNotConfigException.java b/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/service/strategy/BaseMySqlStrategy.java
similarity index 66%
copy from linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/exception/DatabaseNotConfigException.java
copy to linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/service/strategy/BaseMySqlStrategy.java
index 442b32d84..0a37c5a2f 100644
--- a/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/exception/DatabaseNotConfigException.java
+++ b/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/service/strategy/BaseMySqlStrategy.java
@@ -15,19 +15,23 @@
  * limitations under the License.
  */
 
-package org.apache.linkis.engineplugin.spark.datacalc.exception;
+package org.apache.linkis.engineplugin.spark.datacalc.service.strategy;
 
-import org.apache.linkis.common.exception.ExceptionLevel;
-import org.apache.linkis.common.exception.LinkisRuntimeException;
+public abstract class BaseMySqlStrategy extends NormalStrategy {
 
-public class DatabaseNotConfigException extends LinkisRuntimeException {
+  @Override
+  public String defaultDriver() {
+    return "com.mysql.jdbc.Driver";
+    // return "com.mysql.cj.jdbc.Driver";
+  }
 
-  public DatabaseNotConfigException(int errCode, String desc) {
-    super(errCode, desc);
+  @Override
+  public String defaultPort() {
+    return "3306";
   }
 
   @Override
-  public ExceptionLevel getLevel() {
-    return ExceptionLevel.ERROR;
+  public String getDatabaseType() {
+    return "mysql";
   }
 }
diff --git a/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/exception/DatabaseNotConfigException.java b/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/service/strategy/BasePostgreSqlStrategy.java
similarity index 66%
copy from linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/exception/DatabaseNotConfigException.java
copy to linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/service/strategy/BasePostgreSqlStrategy.java
index 442b32d84..ed19cf213 100644
--- a/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/exception/DatabaseNotConfigException.java
+++ b/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/service/strategy/BasePostgreSqlStrategy.java
@@ -15,19 +15,22 @@
  * limitations under the License.
  */
 
-package org.apache.linkis.engineplugin.spark.datacalc.exception;
+package org.apache.linkis.engineplugin.spark.datacalc.service.strategy;
 
-import org.apache.linkis.common.exception.ExceptionLevel;
-import org.apache.linkis.common.exception.LinkisRuntimeException;
+public abstract class BasePostgreSqlStrategy extends NormalStrategy {
 
-public class DatabaseNotConfigException extends LinkisRuntimeException {
+  @Override
+  public String defaultDriver() {
+    return "org.postgresql.Driver";
+  }
 
-  public DatabaseNotConfigException(int errCode, String desc) {
-    super(errCode, desc);
+  @Override
+  public String defaultPort() {
+    return "5432";
   }
 
   @Override
-  public ExceptionLevel getLevel() {
-    return ExceptionLevel.ERROR;
+  public String getDatabaseType() {
+    return "postgresql";
   }
 }
diff --git a/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/exception/DatabaseNotConfigException.java b/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/service/strategy/ClickHouseStrategy.java
similarity index 66%
copy from linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/exception/DatabaseNotConfigException.java
copy to linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/service/strategy/ClickHouseStrategy.java
index 442b32d84..14e878cef 100644
--- a/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/exception/DatabaseNotConfigException.java
+++ b/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/service/strategy/ClickHouseStrategy.java
@@ -15,19 +15,23 @@
  * limitations under the License.
  */
 
-package org.apache.linkis.engineplugin.spark.datacalc.exception;
+package org.apache.linkis.engineplugin.spark.datacalc.service.strategy;
 
-import org.apache.linkis.common.exception.ExceptionLevel;
-import org.apache.linkis.common.exception.LinkisRuntimeException;
+public class ClickHouseStrategy extends NormalStrategy {
 
-public class DatabaseNotConfigException extends LinkisRuntimeException {
+  @Override
+  public String defaultDriver() {
+    return "ru.yandex.clickhouse.ClickHouseDriver";
+    // return "com.clickhouse.jdbc.ClickHouseDriver";  // >= 0.3.2
+  }
 
-  public DatabaseNotConfigException(int errCode, String desc) {
-    super(errCode, desc);
+  @Override
+  public String defaultPort() {
+    return "8123";
   }
 
   @Override
-  public ExceptionLevel getLevel() {
-    return ExceptionLevel.ERROR;
+  public String getDatabaseType() {
+    return "clickhouse";
   }
 }
diff --git a/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/exception/DatabaseNotConfigException.java b/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/service/strategy/DB2Strategy.java
similarity index 62%
copy from linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/exception/DatabaseNotConfigException.java
copy to linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/service/strategy/DB2Strategy.java
index 442b32d84..da00c7071 100644
--- a/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/exception/DatabaseNotConfigException.java
+++ b/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/service/strategy/DB2Strategy.java
@@ -15,19 +15,32 @@
  * limitations under the License.
  */
 
-package org.apache.linkis.engineplugin.spark.datacalc.exception;
+package org.apache.linkis.engineplugin.spark.datacalc.service.strategy;
 
-import org.apache.linkis.common.exception.ExceptionLevel;
-import org.apache.linkis.common.exception.LinkisRuntimeException;
+public class DB2Strategy extends NormalStrategy {
 
-public class DatabaseNotConfigException extends LinkisRuntimeException {
+  @Override
+  public String defaultDriver() {
+    return "com.ibm.db2.jcc.DB2Driver";
+  }
+
+  @Override
+  public String defaultPort() {
+    return "5021";
+  }
 
-  public DatabaseNotConfigException(int errCode, String desc) {
-    super(errCode, desc);
+  @Override
+  public String getDatabaseType() {
+    return "db2";
+  }
+
+  @Override
+  protected String getParamsStartCharacter() {
+    return ":";
   }
 
   @Override
-  public ExceptionLevel getLevel() {
-    return ExceptionLevel.ERROR;
+  protected String getParamsSplitCharacter() {
+    return ";";
   }
 }
diff --git a/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/service/strategy/DataSourceStrategy.java b/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/service/strategy/DataSourceStrategy.java
new file mode 100644
index 000000000..4c59c489e
--- /dev/null
+++ b/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/service/strategy/DataSourceStrategy.java
@@ -0,0 +1,57 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.linkis.engineplugin.spark.datacalc.service.strategy;
+
+import org.apache.linkis.server.BDPJettyServerHelper;
+
+import org.apache.commons.lang3.StringUtils;
+
+import java.util.Map;
+import java.util.stream.Collectors;
+
+public abstract class DataSourceStrategy {
+
+  public abstract String getJdbcUrl(
+      String address, Map<String, String> paramsJson, String paramsStr);
+
+  public abstract String defaultDriver();
+
+  public abstract String defaultPort();
+
+  protected String getConnectParams(String paramsStr) {
+    if (StringUtils.isBlank(paramsStr)) return "";
+
+    Map<String, ?> paramsMap = BDPJettyServerHelper.gson().fromJson(paramsStr, Map.class);
+    if (paramsMap.isEmpty()) return "";
+
+    String paramsSplitCharacter = getParamsSplitCharacter();
+    String params =
+        paramsMap.entrySet().stream()
+            .map(entry -> entry.getKey() + "=" + entry.getValue())
+            .collect(Collectors.joining(paramsSplitCharacter));
+    return getParamsStartCharacter() + params;
+  }
+
+  protected String getParamsStartCharacter() {
+    return "?";
+  }
+
+  protected String getParamsSplitCharacter() {
+    return "&";
+  }
+}
diff --git a/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/exception/DatabaseNotConfigException.java b/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/service/strategy/DorisStrategy.java
similarity index 65%
copy from linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/exception/DatabaseNotConfigException.java
copy to linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/service/strategy/DorisStrategy.java
index 442b32d84..f72a3a0b6 100644
--- a/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/exception/DatabaseNotConfigException.java
+++ b/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/service/strategy/DorisStrategy.java
@@ -15,19 +15,12 @@
  * limitations under the License.
  */
 
-package org.apache.linkis.engineplugin.spark.datacalc.exception;
+package org.apache.linkis.engineplugin.spark.datacalc.service.strategy;
 
-import org.apache.linkis.common.exception.ExceptionLevel;
-import org.apache.linkis.common.exception.LinkisRuntimeException;
-
-public class DatabaseNotConfigException extends LinkisRuntimeException {
-
-  public DatabaseNotConfigException(int errCode, String desc) {
-    super(errCode, desc);
-  }
+public class DorisStrategy extends BaseMySqlStrategy {
 
   @Override
-  public ExceptionLevel getLevel() {
-    return ExceptionLevel.ERROR;
+  public String defaultPort() {
+    return "9030";
   }
 }
diff --git a/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/exception/DatabaseNotConfigException.java b/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/service/strategy/MySqlStrategy.java
similarity index 64%
copy from linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/exception/DatabaseNotConfigException.java
copy to linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/service/strategy/MySqlStrategy.java
index 442b32d84..41d797ec1 100644
--- a/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/exception/DatabaseNotConfigException.java
+++ b/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/service/strategy/MySqlStrategy.java
@@ -15,19 +15,6 @@
  * limitations under the License.
  */
 
-package org.apache.linkis.engineplugin.spark.datacalc.exception;
+package org.apache.linkis.engineplugin.spark.datacalc.service.strategy;
 
-import org.apache.linkis.common.exception.ExceptionLevel;
-import org.apache.linkis.common.exception.LinkisRuntimeException;
-
-public class DatabaseNotConfigException extends LinkisRuntimeException {
-
-  public DatabaseNotConfigException(int errCode, String desc) {
-    super(errCode, desc);
-  }
-
-  @Override
-  public ExceptionLevel getLevel() {
-    return ExceptionLevel.ERROR;
-  }
-}
+public class MySqlStrategy extends BaseMySqlStrategy {}
diff --git a/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/source/ManagedJdbcSourceConfig.java b/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/service/strategy/NormalStrategy.java
similarity index 51%
copy from linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/source/ManagedJdbcSourceConfig.java
copy to linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/service/strategy/NormalStrategy.java
index 57cab73e3..c122c8c7f 100644
--- a/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/source/ManagedJdbcSourceConfig.java
+++ b/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/service/strategy/NormalStrategy.java
@@ -15,43 +15,24 @@
  * limitations under the License.
  */
 
-package org.apache.linkis.engineplugin.spark.datacalc.source;
+package org.apache.linkis.engineplugin.spark.datacalc.service.strategy;
 
-import org.apache.linkis.engineplugin.spark.datacalc.model.SourceConfig;
-
-import javax.validation.constraints.NotBlank;
+import org.apache.commons.lang3.StringUtils;
 
 import java.util.Map;
 
-public class ManagedJdbcSourceConfig extends SourceConfig {
-
-  @NotBlank private String datasource;
-
-  @NotBlank private String query;
-
-  private Map<String, String> options;
-
-  public String getDatasource() {
-    return datasource;
-  }
-
-  public void setDatasource(String datasource) {
-    this.datasource = datasource;
+public abstract class NormalStrategy extends DataSourceStrategy {
+
+  @Override
+  public String getJdbcUrl(String address, Map<String, String> paramsJson, String paramsStr) {
+    String databaseName = paramsJson.getOrDefault("databaseName", "");
+    StringBuilder builder = new StringBuilder();
+    builder.append("jdbc:").append(this.getDatabaseType()).append("://");
+    if (StringUtils.isNotBlank(address)) builder.append(address);
+    if (StringUtils.isNotBlank(databaseName)) builder.append("/").append(databaseName);
+    if (!paramsStr.isEmpty()) builder.append(getConnectParams(paramsStr));
+    return builder.toString();
   }
 
-  public String getQuery() {
-    return query;
-  }
-
-  public void setQuery(String query) {
-    this.query = query;
-  }
-
-  public Map<String, String> getOptions() {
-    return options;
-  }
-
-  public void setOptions(Map<String, String> options) {
-    this.options = options;
-  }
+  public abstract String getDatabaseType();
 }
diff --git a/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/service/strategy/OracleStrategy.java b/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/service/strategy/OracleStrategy.java
new file mode 100644
index 000000000..2bafb4ffd
--- /dev/null
+++ b/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/service/strategy/OracleStrategy.java
@@ -0,0 +1,54 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.linkis.engineplugin.spark.datacalc.service.strategy;
+
+import org.apache.commons.lang3.StringUtils;
+
+import java.util.Map;
+
+public class OracleStrategy extends DataSourceStrategy {
+
+  @Override
+  public String getJdbcUrl(String address, Map<String, String> paramsJson, String paramsStr) {
+    String serviceName = paramsJson.getOrDefault("serviceName", "");
+    String server = paramsJson.getOrDefault("server", "");
+    String instanceName = paramsJson.getOrDefault("instance", "");
+    String sid = paramsJson.getOrDefault("sid", "");
+    StringBuilder builder = new StringBuilder("jdbc:oracle:thin:@");
+    if (StringUtils.isNotBlank(sid)) {
+      builder.append(address);
+      builder.append(":").append(sid);
+    } else {
+      builder.append("//").append(address).append("/").append(serviceName);
+      if (StringUtils.isNotBlank(server)) builder.append(":").append(server);
+    }
+    if (StringUtils.isNotBlank(instanceName)) builder.append("/").append(instanceName);
+    if (!paramsStr.isEmpty()) builder.append(getConnectParams(paramsStr));
+    return builder.toString();
+  }
+
+  @Override
+  public String defaultDriver() {
+    return "oracle.jdbc.driver.OracleDriver";
+  }
+
+  @Override
+  public String defaultPort() {
+    return "1521";
+  }
+}
diff --git a/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/exception/DatabaseNotConfigException.java b/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/service/strategy/PostgreSqlStrategy.java
similarity index 64%
copy from linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/exception/DatabaseNotConfigException.java
copy to linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/service/strategy/PostgreSqlStrategy.java
index 442b32d84..00a20de78 100644
--- a/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/exception/DatabaseNotConfigException.java
+++ b/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/service/strategy/PostgreSqlStrategy.java
@@ -15,19 +15,6 @@
  * limitations under the License.
  */
 
-package org.apache.linkis.engineplugin.spark.datacalc.exception;
+package org.apache.linkis.engineplugin.spark.datacalc.service.strategy;
 
-import org.apache.linkis.common.exception.ExceptionLevel;
-import org.apache.linkis.common.exception.LinkisRuntimeException;
-
-public class DatabaseNotConfigException extends LinkisRuntimeException {
-
-  public DatabaseNotConfigException(int errCode, String desc) {
-    super(errCode, desc);
-  }
-
-  @Override
-  public ExceptionLevel getLevel() {
-    return ExceptionLevel.ERROR;
-  }
-}
+public class PostgreSqlStrategy extends BasePostgreSqlStrategy {}
diff --git a/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/service/strategy/SqlServerStrategy.java b/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/service/strategy/SqlServerStrategy.java
new file mode 100644
index 000000000..0ae0b8db7
--- /dev/null
+++ b/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/service/strategy/SqlServerStrategy.java
@@ -0,0 +1,57 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.linkis.engineplugin.spark.datacalc.service.strategy;
+
+import org.apache.commons.lang3.StringUtils;
+
+import java.util.Map;
+
+public class SqlServerStrategy extends DataSourceStrategy {
+
+  @Override
+  public String getJdbcUrl(String address, Map<String, String> paramsJson, String paramsStr) {
+    String databaseName = paramsJson.getOrDefault("databaseName", "");
+    String instanceName = paramsJson.getOrDefault("instance", "");
+    StringBuilder builder = new StringBuilder("jdbc:sqlserver://");
+    if (StringUtils.isNotBlank(address)) builder.append(address);
+    if (StringUtils.isNotBlank(instanceName)) builder.append(";instanceName=").append(instanceName);
+    if (StringUtils.isNotBlank(databaseName)) builder.append(";databaseName=").append(databaseName);
+    if (!paramsStr.isEmpty()) builder.append(getConnectParams(paramsStr));
+    return builder.toString();
+  }
+
+  @Override
+  public String defaultDriver() {
+    return "com.microsoft.sqlserver.jdbc.SQLServerDriver";
+  }
+
+  @Override
+  public String defaultPort() {
+    return "1433";
+  }
+
+  @Override
+  protected String getParamsStartCharacter() {
+    return ";";
+  }
+
+  @Override
+  protected String getParamsSplitCharacter() {
+    return ";";
+  }
+}
diff --git a/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/exception/DatabaseNotConfigException.java b/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/service/strategy/TiDBStrategy.java
similarity index 65%
rename from linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/exception/DatabaseNotConfigException.java
rename to linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/service/strategy/TiDBStrategy.java
index 442b32d84..af0af5972 100644
--- a/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/exception/DatabaseNotConfigException.java
+++ b/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/service/strategy/TiDBStrategy.java
@@ -15,19 +15,12 @@
  * limitations under the License.
  */
 
-package org.apache.linkis.engineplugin.spark.datacalc.exception;
+package org.apache.linkis.engineplugin.spark.datacalc.service.strategy;
 
-import org.apache.linkis.common.exception.ExceptionLevel;
-import org.apache.linkis.common.exception.LinkisRuntimeException;
-
-public class DatabaseNotConfigException extends LinkisRuntimeException {
-
-  public DatabaseNotConfigException(int errCode, String desc) {
-    super(errCode, desc);
-  }
+public class TiDBStrategy extends BaseMySqlStrategy {
 
   @Override
-  public ExceptionLevel getLevel() {
-    return ExceptionLevel.ERROR;
+  public String defaultPort() {
+    return "4000";
   }
 }
diff --git a/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/sink/FileSinkConfig.java b/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/sink/FileSinkConfig.java
index 22ac537d8..765ec19fa 100644
--- a/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/sink/FileSinkConfig.java
+++ b/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/sink/FileSinkConfig.java
@@ -23,17 +23,15 @@ import javax.validation.constraints.NotBlank;
 import javax.validation.constraints.Pattern;
 
 import java.util.ArrayList;
-import java.util.HashMap;
 import java.util.List;
-import java.util.Map;
 
 public class FileSinkConfig extends SinkConfig {
 
   @NotBlank
   @Pattern(
-      regexp = "^(file|hdfs)://.*",
+      regexp = "^(((file|hdfs)://)|/).*",
       message =
-          "Invalid path URI, please set the following allowed schemas: 'file://' or 'hdfs://'.")
+          "Invalid path URI, please set the following allowed schemas: 'file://' or 'hdfs://'(default).")
   private String path;
 
   @NotBlank private String serializer = "parquet";
@@ -47,9 +45,8 @@ public class FileSinkConfig extends SinkConfig {
           "Unknown save mode: {saveMode}. Accepted save modes are 'overwrite', 'append', 'ignore', 'error', 'errorifexists'.")
   private String saveMode = "overwrite";
 
-  private Map<String, String> options = new HashMap<>();
-
   public String getPath() {
+    if (path.startsWith("/")) return "hdfs://" + path;
     return path;
   }
 
@@ -80,12 +77,4 @@ public class FileSinkConfig extends SinkConfig {
   public void setSaveMode(String saveMode) {
     this.saveMode = saveMode;
   }
-
-  public Map<String, String> getOptions() {
-    return options;
-  }
-
-  public void setOptions(Map<String, String> options) {
-    this.options = options;
-  }
 }
diff --git a/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/sink/HiveSinkConfig.java b/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/sink/HiveSinkConfig.java
index 56589de24..ff4301644 100644
--- a/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/sink/HiveSinkConfig.java
+++ b/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/sink/HiveSinkConfig.java
@@ -22,9 +22,6 @@ import org.apache.linkis.engineplugin.spark.datacalc.model.SinkConfig;
 import javax.validation.constraints.NotBlank;
 import javax.validation.constraints.Pattern;
 
-import java.util.HashMap;
-import java.util.Map;
-
 public class HiveSinkConfig extends SinkConfig {
 
   private String targetDatabase;
@@ -44,8 +41,6 @@ public class HiveSinkConfig extends SinkConfig {
 
   private Integer numPartitions = 10;
 
-  private Map<String, String> options = new HashMap<>();
-
   public String getTargetDatabase() {
     return targetDatabase;
   }
@@ -94,12 +89,4 @@ public class HiveSinkConfig extends SinkConfig {
     if (numPartitions == null) return;
     this.numPartitions = numPartitions > 20 ? 20 : numPartitions;
   }
-
-  public Map<String, String> getOptions() {
-    return options;
-  }
-
-  public void setOptions(Map<String, String> options) {
-    this.options = options;
-  }
 }
diff --git a/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/sink/JdbcSinkConfig.java b/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/sink/JdbcSinkConfig.java
index 21f9aba59..8a04221fd 100644
--- a/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/sink/JdbcSinkConfig.java
+++ b/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/sink/JdbcSinkConfig.java
@@ -24,7 +24,6 @@ import javax.validation.constraints.Pattern;
 
 import java.util.ArrayList;
 import java.util.List;
-import java.util.Map;
 
 public class JdbcSinkConfig extends SinkConfig {
 
@@ -51,8 +50,6 @@ public class JdbcSinkConfig extends SinkConfig {
 
   private Integer numPartitions = 10;
 
-  private Map<String, String> options;
-
   public String getUrl() {
     return url;
   }
@@ -125,12 +122,4 @@ public class JdbcSinkConfig extends SinkConfig {
     if (numPartitions == null) return;
     this.numPartitions = numPartitions > 20 ? 20 : numPartitions;
   }
-
-  public Map<String, String> getOptions() {
-    return options;
-  }
-
-  public void setOptions(Map<String, String> options) {
-    this.options = options;
-  }
 }
diff --git a/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/sink/ManagedJdbcSinkConfig.java b/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/sink/ManagedJdbcSinkConfig.java
index 3ccec8287..e94d61f6c 100644
--- a/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/sink/ManagedJdbcSinkConfig.java
+++ b/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/sink/ManagedJdbcSinkConfig.java
@@ -24,7 +24,6 @@ import javax.validation.constraints.Pattern;
 
 import java.util.ArrayList;
 import java.util.List;
-import java.util.Map;
 
 public class ManagedJdbcSinkConfig extends SinkConfig {
 
@@ -45,8 +44,6 @@ public class ManagedJdbcSinkConfig extends SinkConfig {
 
   private Integer numPartitions = 10;
 
-  private Map<String, String> options;
-
   public String getTargetDatasource() {
     return targetDatasource;
   }
@@ -95,12 +92,4 @@ public class ManagedJdbcSinkConfig extends SinkConfig {
     if (numPartitions == null) return;
     this.numPartitions = numPartitions > 20 ? 20 : numPartitions;
   }
-
-  public Map<String, String> getOptions() {
-    return options;
-  }
-
-  public void setOptions(Map<String, String> options) {
-    this.options = options;
-  }
 }
diff --git a/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/source/FileSourceConfig.java b/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/source/FileSourceConfig.java
index 7e83bdd82..539a97466 100644
--- a/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/source/FileSourceConfig.java
+++ b/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/source/FileSourceConfig.java
@@ -22,24 +22,21 @@ import org.apache.linkis.engineplugin.spark.datacalc.model.SourceConfig;
 import javax.validation.constraints.NotBlank;
 import javax.validation.constraints.Pattern;
 
-import java.util.Map;
-
 public class FileSourceConfig extends SourceConfig {
 
   @NotBlank
   @Pattern(
-      regexp = "^(file|hdfs)://.*",
+      regexp = "^(((file|hdfs)://)|/).*",
       message =
-          "Invalid path URI, please set the following allowed schemas: 'file://' or 'hdfs://'.")
+          "Invalid path URI, please set the following allowed schemas: 'file://' or 'hdfs://'(default).")
   private String path;
 
   @NotBlank private String serializer = "parquet";
 
   private String[] columnNames;
 
-  private Map<String, String> options;
-
   public String getPath() {
+    if (path.startsWith("/")) return "hdfs://" + path;
     return path;
   }
 
@@ -62,12 +59,4 @@ public class FileSourceConfig extends SourceConfig {
   public void setColumnNames(String[] columnNames) {
     this.columnNames = columnNames;
   }
-
-  public Map<String, String> getOptions() {
-    return options;
-  }
-
-  public void setOptions(Map<String, String> options) {
-    this.options = options;
-  }
 }
diff --git a/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/source/JdbcSourceConfig.java b/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/source/JdbcSourceConfig.java
index fc13dfcb3..5166a2ae0 100644
--- a/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/source/JdbcSourceConfig.java
+++ b/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/source/JdbcSourceConfig.java
@@ -21,8 +21,6 @@ import org.apache.linkis.engineplugin.spark.datacalc.model.SourceConfig;
 
 import javax.validation.constraints.NotBlank;
 
-import java.util.Map;
-
 public class JdbcSourceConfig extends SourceConfig {
 
   @NotBlank private String url;
@@ -35,8 +33,6 @@ public class JdbcSourceConfig extends SourceConfig {
 
   @NotBlank private String query;
 
-  private Map<String, String> options;
-
   public String getUrl() {
     return url;
   }
@@ -76,12 +72,4 @@ public class JdbcSourceConfig extends SourceConfig {
   public void setQuery(String query) {
     this.query = query;
   }
-
-  public Map<String, String> getOptions() {
-    return options;
-  }
-
-  public void setOptions(Map<String, String> options) {
-    this.options = options;
-  }
 }
diff --git a/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/source/ManagedJdbcSourceConfig.java b/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/source/ManagedJdbcSourceConfig.java
index 57cab73e3..4e0c21b5b 100644
--- a/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/source/ManagedJdbcSourceConfig.java
+++ b/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/source/ManagedJdbcSourceConfig.java
@@ -21,16 +21,12 @@ import org.apache.linkis.engineplugin.spark.datacalc.model.SourceConfig;
 
 import javax.validation.constraints.NotBlank;
 
-import java.util.Map;
-
 public class ManagedJdbcSourceConfig extends SourceConfig {
 
   @NotBlank private String datasource;
 
   @NotBlank private String query;
 
-  private Map<String, String> options;
-
   public String getDatasource() {
     return datasource;
   }
@@ -46,12 +42,4 @@ public class ManagedJdbcSourceConfig extends SourceConfig {
   public void setQuery(String query) {
     this.query = query;
   }
-
-  public Map<String, String> getOptions() {
-    return options;
-  }
-
-  public void setOptions(Map<String, String> options) {
-    this.options = options;
-  }
 }
diff --git a/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/util/PluginUtil.java b/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/util/PluginUtil.java
index 5dfcd4e4a..8cb7beb4f 100644
--- a/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/util/PluginUtil.java
+++ b/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/datacalc/util/PluginUtil.java
@@ -21,6 +21,9 @@ import org.apache.linkis.engineplugin.spark.datacalc.api.*;
 import org.apache.linkis.engineplugin.spark.datacalc.model.SinkConfig;
 import org.apache.linkis.engineplugin.spark.datacalc.model.SourceConfig;
 import org.apache.linkis.engineplugin.spark.datacalc.model.TransformConfig;
+import org.apache.linkis.engineplugin.spark.datacalc.sink.*;
+import org.apache.linkis.engineplugin.spark.datacalc.source.*;
+import org.apache.linkis.engineplugin.spark.datacalc.transform.*;
 import org.apache.linkis.server.BDPJettyServerHelper;
 
 import java.lang.reflect.InvocationTargetException;
@@ -38,26 +41,24 @@ public class PluginUtil {
 
   private static Map<String, Class<?>> getSourcePlugins() {
     Map<String, Class<?>> classMap = new HashMap<>();
-    // classMap.put("managed_jdbc",
-    // org.apache.linkis.engineplugin.spark.datacalc.source.ManagedJdbcSource.class);
-    classMap.put("jdbc", org.apache.linkis.engineplugin.spark.datacalc.source.JdbcSource.class);
-    classMap.put("file", org.apache.linkis.engineplugin.spark.datacalc.source.FileSource.class);
+    classMap.put("managed_jdbc", ManagedJdbcSource.class);
+    classMap.put("jdbc", JdbcSource.class);
+    classMap.put("file", FileSource.class);
     return classMap;
   }
 
   private static Map<String, Class<?>> getTransformPlugins() {
     Map<String, Class<?>> classMap = new HashMap<>();
-    classMap.put("sql", org.apache.linkis.engineplugin.spark.datacalc.transform.SqlTransform.class);
+    classMap.put("sql", SqlTransform.class);
     return classMap;
   }
 
   private static Map<String, Class<?>> getSinkPlugins() {
     Map<String, Class<?>> classMap = new HashMap<>();
-    // classMap.put("managed_jdbc",
-    // org.apache.linkis.engineplugin.spark.datacalc.sink.ManagedJdbcSink.class);
-    classMap.put("jdbc", org.apache.linkis.engineplugin.spark.datacalc.sink.JdbcSink.class);
-    classMap.put("hive", org.apache.linkis.engineplugin.spark.datacalc.sink.HiveSink.class);
-    classMap.put("file", org.apache.linkis.engineplugin.spark.datacalc.sink.FileSink.class);
+    classMap.put("managed_jdbc", ManagedJdbcSink.class);
+    classMap.put("jdbc", JdbcSink.class);
+    classMap.put("hive", HiveSink.class);
+    classMap.put("file", FileSink.class);
     return classMap;
   }
 
diff --git a/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/errorcode/SparkErrorCodeSummary.java b/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/errorcode/SparkErrorCodeSummary.java
index c2ba489a9..936e773e4 100644
--- a/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/errorcode/SparkErrorCodeSummary.java
+++ b/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/errorcode/SparkErrorCodeSummary.java
@@ -47,7 +47,9 @@ public enum SparkErrorCodeSummary implements LinkisErrorCode {
   DATA_CALC_CONFIG_VALID_FAILED(43001, "Config data validate failed"),
   DATA_CALC_CONFIG_TYPE_NOT_VALID(43002, "[{0}] is not a valid type"),
 
-  DATA_CALC_DATASOURCE_NOT_CONFIG(43011, "Datasource {0} is not configured!"),
+  DATA_CALC_DATASOURCE_NOT_CONFIG(43011, "DataSource {0} is not configured!"),
+
+  DATA_CALC_DATABASE_NOT_SUPPORT(43012, "DataSource [{0}] type [{1}] is not supported"),
 
   DATA_CALC_COLUMN_NOT_MATCH(
       43021,
diff --git a/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/datacalc/sink/FileSink.scala b/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/datacalc/sink/FileSink.scala
index 1c9d9e034..1464375e3 100644
--- a/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/datacalc/sink/FileSink.scala
+++ b/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/datacalc/sink/FileSink.scala
@@ -17,6 +17,7 @@
 
 package org.apache.linkis.engineplugin.spark.datacalc.sink
 
+import org.apache.linkis.common.utils.Logging
 import org.apache.linkis.engineplugin.spark.datacalc.api.DataCalcSink
 
 import org.apache.commons.text.StringSubstitutor
@@ -24,11 +25,7 @@ import org.apache.spark.sql.{Dataset, Row, SparkSession}
 
 import scala.collection.JavaConverters._
 
-import org.slf4j.{Logger, LoggerFactory}
-
-class FileSink extends DataCalcSink[FileSinkConfig] {
-
-  private val log: Logger = LoggerFactory.getLogger(classOf[FileSink])
+class FileSink extends DataCalcSink[FileSinkConfig] with Logging {
 
   def output(spark: SparkSession, ds: Dataset[Row]): Unit = {
     val writer = ds.write.mode(config.getSaveMode)
@@ -43,7 +40,7 @@ class FileSink extends DataCalcSink[FileSinkConfig] {
     }
     val substitutor = new StringSubstitutor(config.getVariables)
     val path = substitutor.replace(config.getPath)
-    log.info(s"Save data to file, path: $path")
+    logger.info(s"Save data to file, path: $path")
 
     config.getSerializer match {
       case "csv" => writer.csv(path)
diff --git a/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/datacalc/sink/HiveSink.scala b/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/datacalc/sink/HiveSink.scala
index 348f819c5..8ba618776 100644
--- a/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/datacalc/sink/HiveSink.scala
+++ b/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/datacalc/sink/HiveSink.scala
@@ -17,6 +17,7 @@
 
 package org.apache.linkis.engineplugin.spark.datacalc.sink
 
+import org.apache.linkis.common.utils.Logging
 import org.apache.linkis.engineplugin.spark.datacalc.api.DataCalcSink
 import org.apache.linkis.engineplugin.spark.datacalc.exception.HiveSinkException
 import org.apache.linkis.engineplugin.spark.errorcode.SparkErrorCodeSummary
@@ -31,9 +32,7 @@ import org.apache.spark.sql.types.StructField
 
 import org.slf4j.{Logger, LoggerFactory}
 
-class HiveSink extends DataCalcSink[HiveSinkConfig] {
-
-  private val log: Logger = LoggerFactory.getLogger(classOf[HiveSink])
+class HiveSink extends DataCalcSink[HiveSinkConfig] with Logging {
 
   def output(spark: SparkSession, ds: Dataset[Row]): Unit = {
     val targetTable =
@@ -50,7 +49,7 @@ class HiveSink extends DataCalcSink[HiveSinkConfig] {
       val location = getLocation(spark, targetTable, partitionsColumns)
       val fileFormat = getTableFileFormat(spark, targetTable)
 
-      log.info(
+      logger.info(
         s"Write $fileFormat into target table: $targetTable, location: $location, file format: $fileFormat"
       )
       val writer = getSaveWriter(
@@ -68,12 +67,12 @@ class HiveSink extends DataCalcSink[HiveSinkConfig] {
         .map(colName => s"$colName='${config.getVariables.get(colName)}'")
         .mkString(",")
       if (StringUtils.isNotBlank(partition)) {
-        log.info(s"Refresh table partition: $partition")
+        logger.info(s"Refresh table partition: $partition")
         refreshPartition(spark, targetTable, partition)
       }
     } else {
       val writer = getSaveWriter(ds, targetFields, targetTable)
-      log.info(s"InsertInto data to hive table: $targetTable")
+      logger.info(s"InsertInto data to hive table: $targetTable")
       writer.format("hive").insertInto(targetTable)
     }
   }
@@ -109,8 +108,8 @@ class HiveSink extends DataCalcSink[HiveSinkConfig] {
   }
 
   def logFields(sourceFields: Array[StructField], targetFields: Array[StructField]): Unit = {
-    log.info(s"sourceFields: ${sourceFields.mkString("Array(", ", ", ")")}")
-    log.info(s"targetFields: ${targetFields.mkString("Array(", ", ", ")")}")
+    logger.info(s"sourceFields: ${sourceFields.mkString("Array(", ", ", ")")}")
+    logger.info(s"targetFields: ${targetFields.mkString("Array(", ", ", ")")}")
   }
 
   def sequenceFields(
@@ -136,7 +135,7 @@ class HiveSink extends DataCalcSink[HiveSinkConfig] {
       // sort column
       dsSource.select(targetFields.map(field => col(field.name)): _*)
     } else if (subSet.size == targetFieldMap.size) {
-      log.info("None target table fields match with source fields, write in order")
+      logger.info("None target table fields match with source fields, write in order")
       dsSource.toDF(targetFields.map(field => field.name): _*)
     } else {
       throw new HiveSinkException(
diff --git a/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/datacalc/sink/JdbcSink.scala b/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/datacalc/sink/JdbcSink.scala
index 1d97ba22a..ab8a21c3f 100644
--- a/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/datacalc/sink/JdbcSink.scala
+++ b/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/datacalc/sink/JdbcSink.scala
@@ -17,6 +17,7 @@
 
 package org.apache.linkis.engineplugin.spark.datacalc.sink
 
+import org.apache.linkis.common.utils.Logging
 import org.apache.linkis.engineplugin.spark.datacalc.api.DataCalcSink
 
 import org.apache.commons.lang3.StringUtils
@@ -27,11 +28,7 @@ import java.sql.Connection
 
 import scala.collection.JavaConverters._
 
-import org.slf4j.{Logger, LoggerFactory}
-
-class JdbcSink extends DataCalcSink[JdbcSinkConfig] {
-
-  private val log: Logger = LoggerFactory.getLogger(classOf[JdbcSink])
+class JdbcSink extends DataCalcSink[JdbcSinkConfig] with Logging {
 
   def output(spark: SparkSession, ds: Dataset[Row]): Unit = {
     val targetTable =
@@ -64,11 +61,11 @@ class JdbcSink extends DataCalcSink[JdbcSinkConfig] {
           val conn: Connection = JdbcUtils.createConnectionFactory(jdbcOptions)()
           try {
             config.getPreQueries.asScala.foreach(query => {
-              log.info(s"Execute pre query: $query")
+              logger.info(s"Execute pre query: $query")
               execute(conn, jdbcOptions, query)
             })
           } catch {
-            case e: Exception => log.error("Execute preQueries failed. ", e)
+            case e: Exception => logger.error("Execute preQueries failed. ", e)
           } finally {
             conn.close()
           }
@@ -79,21 +76,21 @@ class JdbcSink extends DataCalcSink[JdbcSinkConfig] {
     if (StringUtils.isNotBlank(config.getSaveMode)) {
       writer.mode(config.getSaveMode)
     }
-    log.info(
+    logger.info(
       s"Save data to jdbc url: ${config.getUrl}, driver: ${config.getDriver}, username: ${config.getUser}, table: $targetTable"
     )
     writer.options(options).save()
   }
 
   private def execute(conn: Connection, jdbcOptions: JDBCOptions, query: String): Unit = {
-    log.info("Execute query: {}", query)
+    logger.info("Execute query: {}", query)
     val statement = conn.prepareStatement(query)
     try {
       statement.setQueryTimeout(jdbcOptions.queryTimeout)
       val rows = statement.executeUpdate()
-      log.info("{} rows affected", rows)
+      logger.info("{} rows affected", rows)
     } catch {
-      case e: Exception => log.error("Execute query failed. ", e)
+      case e: Exception => logger.error("Execute query failed. ", e)
     } finally {
       statement.close()
     }
diff --git a/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/datacalc/sink/ManagedJdbcSink.scala b/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/datacalc/sink/ManagedJdbcSink.scala
index d64e0b6a5..510a332bd 100644
--- a/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/datacalc/sink/ManagedJdbcSink.scala
+++ b/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/datacalc/sink/ManagedJdbcSink.scala
@@ -17,8 +17,9 @@
 
 package org.apache.linkis.engineplugin.spark.datacalc.sink
 
+import org.apache.linkis.common.utils.Logging
 import org.apache.linkis.engineplugin.spark.datacalc.api.DataCalcSink
-import org.apache.linkis.engineplugin.spark.datacalc.exception.DatabaseNotConfigException
+import org.apache.linkis.engineplugin.spark.datacalc.exception.DataSourceNotConfigException
 import org.apache.linkis.engineplugin.spark.datacalc.model.DataCalcDataSource
 import org.apache.linkis.engineplugin.spark.datacalc.service.LinkisDataSourceService
 import org.apache.linkis.engineplugin.spark.errorcode.SparkErrorCodeSummary
@@ -27,16 +28,12 @@ import org.apache.spark.sql.{Dataset, Row, SparkSession}
 
 import java.text.MessageFormat
 
-import org.slf4j.{Logger, LoggerFactory}
-
-class ManagedJdbcSink extends DataCalcSink[ManagedJdbcSinkConfig] {
-
-  private val log: Logger = LoggerFactory.getLogger(classOf[ManagedJdbcSink])
+class ManagedJdbcSink extends DataCalcSink[ManagedJdbcSinkConfig] with Logging {
 
   def output(spark: SparkSession, ds: Dataset[Row]): Unit = {
     val db: DataCalcDataSource = LinkisDataSourceService.getDatasource(config.getTargetDatasource)
     if (db == null) {
-      throw new DatabaseNotConfigException(
+      throw new DataSourceNotConfigException(
         SparkErrorCodeSummary.DATA_CALC_DATASOURCE_NOT_CONFIG.getErrorCode,
         MessageFormat.format(
           SparkErrorCodeSummary.DATA_CALC_DATASOURCE_NOT_CONFIG.getErrorDesc,
diff --git a/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/datacalc/source/FileSource.scala b/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/datacalc/source/FileSource.scala
index 95f2e0535..8b579a0b6 100644
--- a/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/datacalc/source/FileSource.scala
+++ b/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/datacalc/source/FileSource.scala
@@ -17,16 +17,13 @@
 
 package org.apache.linkis.engineplugin.spark.datacalc.source
 
+import org.apache.linkis.common.utils.Logging
 import org.apache.linkis.engineplugin.spark.datacalc.api.DataCalcSource
 
 import org.apache.commons.text.StringSubstitutor
 import org.apache.spark.sql.{Dataset, Row, SparkSession}
 
-import org.slf4j.{Logger, LoggerFactory}
-
-class FileSource extends DataCalcSource[FileSourceConfig] {
-
-  private val log: Logger = LoggerFactory.getLogger(classOf[FileSource])
+class FileSource extends DataCalcSource[FileSourceConfig] with Logging {
 
   override def getData(spark: SparkSession): Dataset[Row] = {
     val reader = spark.read
@@ -36,7 +33,7 @@ class FileSource extends DataCalcSource[FileSourceConfig] {
     }
     val substitutor = new StringSubstitutor(config.getVariables)
     val path = substitutor.replace(config.getPath)
-    log.info(s"Load data from file <$path>")
+    logger.info(s"Load data from file <$path>")
 
     var df = config.getSerializer match {
       case "csv" => reader.csv(path)
diff --git a/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/datacalc/source/JdbcSource.scala b/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/datacalc/source/JdbcSource.scala
index 7bdc43b38..7a9aa1279 100644
--- a/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/datacalc/source/JdbcSource.scala
+++ b/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/datacalc/source/JdbcSource.scala
@@ -17,15 +17,12 @@
 
 package org.apache.linkis.engineplugin.spark.datacalc.source
 
+import org.apache.linkis.common.utils.Logging
 import org.apache.linkis.engineplugin.spark.datacalc.api.DataCalcSource
 
 import org.apache.spark.sql.{Dataset, Row, SparkSession}
 
-import org.slf4j.{Logger, LoggerFactory}
-
-class JdbcSource extends DataCalcSource[JdbcSourceConfig] {
-
-  private val log: Logger = LoggerFactory.getLogger(classOf[JdbcSource])
+class JdbcSource extends DataCalcSource[JdbcSourceConfig] with Logging {
 
   override def getData(spark: SparkSession): Dataset[Row] = {
     val reader = spark.read.format("jdbc")
@@ -33,7 +30,7 @@ class JdbcSource extends DataCalcSource[JdbcSourceConfig] {
       reader.options(config.getOptions)
     }
 
-    log.info(
+    logger.info(
       s"Load data from jdbc url: ${config.getUrl}, driver: ${config.getDriver}, username: ${config.getUser}, query: ${config.getQuery}"
     )
 
diff --git a/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/datacalc/source/ManagedJdbcSource.scala b/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/datacalc/source/ManagedJdbcSource.scala
index 47a0f55ba..9cd3a5461 100644
--- a/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/datacalc/source/ManagedJdbcSource.scala
+++ b/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/datacalc/source/ManagedJdbcSource.scala
@@ -17,8 +17,9 @@
 
 package org.apache.linkis.engineplugin.spark.datacalc.source
 
+import org.apache.linkis.common.utils.Logging
 import org.apache.linkis.engineplugin.spark.datacalc.api.DataCalcSource
-import org.apache.linkis.engineplugin.spark.datacalc.exception.DatabaseNotConfigException
+import org.apache.linkis.engineplugin.spark.datacalc.exception.DataSourceNotConfigException
 import org.apache.linkis.engineplugin.spark.datacalc.service.LinkisDataSourceService
 import org.apache.linkis.engineplugin.spark.errorcode.SparkErrorCodeSummary
 
@@ -26,16 +27,12 @@ import org.apache.spark.sql.{Dataset, Row, SparkSession}
 
 import java.text.MessageFormat
 
-import org.slf4j.{Logger, LoggerFactory}
-
-class ManagedJdbcSource extends DataCalcSource[ManagedJdbcSourceConfig] {
-
-  private val log: Logger = LoggerFactory.getLogger(classOf[ManagedJdbcSource])
+class ManagedJdbcSource extends DataCalcSource[ManagedJdbcSourceConfig] with Logging {
 
   override def getData(spark: SparkSession): Dataset[Row] = {
     val db = LinkisDataSourceService.getDatasource(config.getDatasource)
     if (db == null) {
-      throw new DatabaseNotConfigException(
+      throw new DataSourceNotConfigException(
         SparkErrorCodeSummary.DATA_CALC_DATASOURCE_NOT_CONFIG.getErrorCode,
         MessageFormat.format(
           SparkErrorCodeSummary.DATA_CALC_DATASOURCE_NOT_CONFIG.getErrorDesc,
diff --git a/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/datacalc/transform/SqlTransform.scala b/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/datacalc/transform/SqlTransform.scala
index bff241546..4f9cf0520 100644
--- a/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/datacalc/transform/SqlTransform.scala
+++ b/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/datacalc/transform/SqlTransform.scala
@@ -17,18 +17,15 @@
 
 package org.apache.linkis.engineplugin.spark.datacalc.transform
 
+import org.apache.linkis.common.utils.Logging
 import org.apache.linkis.engineplugin.spark.datacalc.api.DataCalcTransform
 
 import org.apache.spark.sql.{Dataset, Row, SparkSession}
 
-import org.slf4j.{Logger, LoggerFactory}
-
-class SqlTransform extends DataCalcTransform[SqlTransformConfig] {
-
-  private val log: Logger = LoggerFactory.getLogger(classOf[SqlTransform])
+class SqlTransform extends DataCalcTransform[SqlTransformConfig] with Logging {
 
   override def process(spark: SparkSession, ds: Dataset[Row]): Dataset[Row] = {
-    log.info(s"Load data from query: ${config.getSql}")
+    logger.info(s"Load data from query: ${config.getSql}")
     spark.sql(config.getSql)
   }
 
diff --git a/linkis-engineconn-plugins/spark/src/test/scala/org/apache/linkis/engineplugin/spark/datacalc/TestDataCalcDataSource.scala b/linkis-engineconn-plugins/spark/src/test/scala/org/apache/linkis/engineplugin/spark/datacalc/TestDataCalcDataSource.scala
new file mode 100644
index 000000000..09c14e4fc
--- /dev/null
+++ b/linkis-engineconn-plugins/spark/src/test/scala/org/apache/linkis/engineplugin/spark/datacalc/TestDataCalcDataSource.scala
@@ -0,0 +1,262 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.linkis.engineplugin.spark.datacalc
+
+import org.apache.linkis.datasourcemanager.common.domain.DataSource
+import org.apache.linkis.engineplugin.spark.datacalc.service.LinkisDataSourceContext
+import org.apache.linkis.server.BDPJettyServerHelper
+
+import org.junit.jupiter.api.{Assertions, Test};
+
+class TestDataCalcDataSource {
+
+  @Test
+  def testGetMySqlDataSource: Unit = {
+    val datasource: DataSource =
+      BDPJettyServerHelper.jacksonJson.readValue(mysqlJson, classOf[DataSource])
+    val context = new LinkisDataSourceContext(datasource)
+    val source = context.getDataCalcDataSource()
+    println(source)
+    Assertions.assertNotNull(source)
+  }
+
+  @Test
+  def testGetPostgreSqlDataSource: Unit = {
+    val datasource: DataSource =
+      BDPJettyServerHelper.jacksonJson.readValue(postgresqlJson, classOf[DataSource])
+    val context = new LinkisDataSourceContext(datasource)
+    val source = context.getDataCalcDataSource()
+    println(source)
+    Assertions.assertNotNull(source)
+  }
+
+  @Test
+  def testGetClickHouseDataSource: Unit = {
+    val datasource: DataSource =
+      BDPJettyServerHelper.jacksonJson.readValue(clickhouseJson, classOf[DataSource])
+    val context = new LinkisDataSourceContext(datasource)
+    val source = context.getDataCalcDataSource()
+    println(source)
+    Assertions.assertNotNull(source)
+  }
+
+  @Test
+  def testGetOracleDataSource: Unit = {
+    val datasource: DataSource =
+      BDPJettyServerHelper.jacksonJson.readValue(oracleJson, classOf[DataSource])
+    val context = new LinkisDataSourceContext(datasource)
+    val source = context.getDataCalcDataSource()
+    println(source)
+    Assertions.assertNotNull(source)
+  }
+
+  @Test
+  def testGetSqlServerDataSource: Unit = {
+    val datasource: DataSource =
+      BDPJettyServerHelper.jacksonJson.readValue(sqlserverJson, classOf[DataSource])
+    val context = new LinkisDataSourceContext(datasource)
+    val source = context.getDataCalcDataSource()
+    println(source)
+    Assertions.assertNotNull(source)
+  }
+
+  @Test
+  def testGetDB2DataSource: Unit = {
+    val datasource: DataSource =
+      BDPJettyServerHelper.jacksonJson.readValue(db2Json, classOf[DataSource])
+    val context = new LinkisDataSourceContext(datasource)
+    val source = context.getDataCalcDataSource()
+    println(source)
+    Assertions.assertNotNull(source)
+  }
+
+  val mysqlJson =
+    """
+      |{
+      |    "id": 1,
+      |    "dataSourceName": "test",
+      |    "dataSourceDesc": "测试数据源",
+      |    "dataSourceTypeId": 1,
+      |    "createSystem": "Linkis",
+      |    "connectParams": {
+      |        "username": "test_db_rw",
+      |        "password": "p@ssw0rd",
+      |        "databaseName": "test_db",
+      |        "port": "37001",
+      |        "host": "testdb-mysql.linkis.com",
+      |        "driverClassName": "com.mysql.cj.jdbc.Driver",
+      |        "params": "{\"params1\":\"value1\", \"params2\":\"value2\"}"
+      |    },
+      |    "createTime": 1663568239000,
+      |    "modifyTime": 1670853368000,
+      |    "modifyUser": "linkis",
+      |    "createUser": "linkis",
+      |    "versionId": 3,
+      |    "expire": false,
+      |    "dataSourceType": {
+      |        "name": "mysql",
+      |        "layers": 0
+      |    }
+      |}
+      |""".stripMargin
+
+  val postgresqlJson =
+    """
+      |{
+      |    "id": 1,
+      |    "dataSourceName": "test",
+      |    "dataSourceDesc": "测试数据源",
+      |    "dataSourceTypeId": 1,
+      |    "createSystem": "Linkis",
+      |    "connectParams": {
+      |        "username": "test_db_rw",
+      |        "password": "p@ssw0rd",
+      |        "databaseName": "test_db",
+      |        "port": "37001",
+      |        "host": "testdb-postgresql.linkis.com",
+      |        "params": "{\"params1\":\"value1\", \"params2\":\"value2\"}"
+      |    },
+      |    "createTime": 1663568239000,
+      |    "modifyTime": 1670853368000,
+      |    "modifyUser": "linkis",
+      |    "createUser": "linkis",
+      |    "versionId": 3,
+      |    "expire": false,
+      |    "dataSourceType": {
+      |        "name": "postgresql",
+      |        "layers": 0
+      |    }
+      |}
+      |""".stripMargin
+
+  val clickhouseJson =
+    """
+      |{
+      |    "id": 1,
+      |    "dataSourceName": "test",
+      |    "dataSourceDesc": "测试数据源",
+      |    "dataSourceTypeId": 1,
+      |    "createSystem": "Linkis",
+      |    "connectParams": {
+      |        "username": "test_db_rw",
+      |        "password": "p@ssw0rd",
+      |        "databaseName": "test_db",
+      |        "address": "server1,server2,server3",
+      |        "params": "{\"params1\":\"value1\", \"params2\":\"value2\"}"
+      |    },
+      |    "createTime": 1663568239000,
+      |    "modifyTime": 1670853368000,
+      |    "modifyUser": "linkis",
+      |    "createUser": "linkis",
+      |    "versionId": 3,
+      |    "expire": false,
+      |    "dataSourceType": {
+      |        "name": "clickhouse",
+      |        "layers": 0
+      |    }
+      |}
+      |""".stripMargin
+
+  val oracleJson =
+    """
+      |{
+      |    "id": 1,
+      |    "dataSourceName": "test",
+      |    "dataSourceDesc": "测试数据源",
+      |    "dataSourceTypeId": 1,
+      |    "createSystem": "Linkis",
+      |    "connectParams": {
+      |        "username": "test_db_rw",
+      |        "password": "p@ssw0rd",
+      |        "address": "testdb-oracle.linkis.com:5021",
+      |        "serviceName":"test.linkis.com",
+      |        "server":"server_test",
+      |        "params": "{\"params1\":\"value1\", \"params2\":\"value2\"}"
+      |    },
+      |    "createTime": 1663568239000,
+      |    "modifyTime": 1670853368000,
+      |    "modifyUser": "linkis",
+      |    "createUser": "linkis",
+      |    "versionId": 3,
+      |    "expire": false,
+      |    "dataSourceType": {
+      |        "name": "oracle",
+      |        "layers": 0
+      |    }
+      |}
+      |""".stripMargin
+
+  val sqlserverJson =
+    """
+      |{
+      |    "id": 1,
+      |    "dataSourceName": "test",
+      |    "dataSourceDesc": "测试数据源",
+      |    "dataSourceTypeId": 1,
+      |    "createSystem": "Linkis",
+      |    "connectParams": {
+      |        "username": "test_db_rw",
+      |        "password": "p@ssw0rd",
+      |        "databaseName": "test_db",
+      |        "port": "37001",
+      |        "host": "testdb-sqlserver.linkis.com",
+      |        "params": "{\"params1\":\"value1\", \"params2\":\"value2\"}"
+      |    },
+      |    "createTime": 1663568239000,
+      |    "modifyTime": 1670853368000,
+      |    "modifyUser": "linkis",
+      |    "createUser": "linkis",
+      |    "versionId": 3,
+      |    "expire": false,
+      |    "dataSourceType": {
+      |        "name": "sqlserver",
+      |        "layers": 0
+      |    }
+      |}
+      |""".stripMargin
+
+  val db2Json =
+    """
+      |{
+      |    "id": 1,
+      |    "dataSourceName": "test",
+      |    "dataSourceDesc": "测试数据源",
+      |    "dataSourceTypeId": 1,
+      |    "createSystem": "Linkis",
+      |    "connectParams": {
+      |        "username": "test_db_rw",
+      |        "password": "p@ssw0rd",
+      |        "databaseName": "test_db",
+      |        "port": "37001",
+      |        "host": "testdb-db2.linkis.com",
+      |        "params": "{\"params1\":\"value1\", \"params2\":\"value2\"}"
+      |    },
+      |    "createTime": 1663568239000,
+      |    "modifyTime": 1670853368000,
+      |    "modifyUser": "linkis",
+      |    "createUser": "linkis",
+      |    "versionId": 3,
+      |    "expire": false,
+      |    "dataSourceType": {
+      |        "name": "db2",
+      |        "layers": 0
+      |    }
+      |}
+      |""".stripMargin
+
+}
diff --git a/linkis-public-enhancements/linkis-datasource/linkis-datasource-client/src/main/scala/org/apache/linkis/datasource/client/request/GetInfoPublishedByDataSourceNameAction.scala b/linkis-public-enhancements/linkis-datasource/linkis-datasource-client/src/main/scala/org/apache/linkis/datasource/client/request/GetInfoPublishedByDataSourceNameAction.scala
index b450d763f..0b75e178f 100644
--- a/linkis-public-enhancements/linkis-datasource/linkis-datasource-client/src/main/scala/org/apache/linkis/datasource/client/request/GetInfoPublishedByDataSourceNameAction.scala
+++ b/linkis-public-enhancements/linkis-datasource/linkis-datasource-client/src/main/scala/org/apache/linkis/datasource/client/request/GetInfoPublishedByDataSourceNameAction.scala
@@ -22,6 +22,8 @@ import org.apache.linkis.datasource.client.errorcode.DatasourceClientErrorCodeSu
 import org.apache.linkis.datasource.client.exception.DataSourceClientBuilderException
 import org.apache.linkis.httpclient.request.GetAction
 
+import org.apache.commons.lang3.StringUtils
+
 class GetInfoPublishedByDataSourceNameAction extends GetAction with DataSourceAction {
   private var dataSourceName: String = _
 
@@ -62,12 +64,14 @@ object GetInfoPublishedByDataSourceNameAction {
       if (dataSourceName == null) {
         throw new DataSourceClientBuilderException(DATASOURCENAME_NEEDED.getErrorDesc)
       }
-      if (system == null) throw new DataSourceClientBuilderException(SYSTEM_NEEDED.getErrorDesc)
+//      if (system == null) throw new DataSourceClientBuilderException(SYSTEM_NEEDED.getErrorDesc)
       if (user == null) throw new DataSourceClientBuilderException(USER_NEEDED.getErrorDesc)
 
       val getPublishedInfoByDataSourceNameAction = new GetInfoPublishedByDataSourceNameAction
       getPublishedInfoByDataSourceNameAction.dataSourceName = this.dataSourceName
-      getPublishedInfoByDataSourceNameAction.setParameter("system", system)
+      if (StringUtils.isNotBlank(system)) {
+        getPublishedInfoByDataSourceNameAction.setParameter("system", system)
+      }
       getPublishedInfoByDataSourceNameAction.setUser(user)
       getPublishedInfoByDataSourceNameAction
     }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@linkis.apache.org
For additional commands, e-mail: commits-help@linkis.apache.org