You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@seatunnel.apache.org by ga...@apache.org on 2023/05/16 09:21:00 UTC

[incubator-seatunnel-web] branch add_canvas_job_define updated: [Improve][code style] Add spotless plugin to seatunnel web (#56)

This is an automated email from the ASF dual-hosted git repository.

gaojun2048 pushed a commit to branch add_canvas_job_define
in repository https://gitbox.apache.org/repos/asf/incubator-seatunnel-web.git


The following commit(s) were added to refs/heads/add_canvas_job_define by this push:
     new 21a195c8 [Improve][code style] Add spotless plugin to seatunnel web (#56)
21a195c8 is described below

commit 21a195c824da75ce1049143036661da43ad36ba9
Author: Eric <ga...@gmail.com>
AuthorDate: Tue May 16 17:20:54 2023 +0800

    [Improve][code style] Add spotless plugin to seatunnel web (#56)
---
 .github/workflows/backend.yml                      |   8 +-
 pom.xml                                            | 241 ++++++---
 .../datasource/AbstractDataSourceClient.java       |  68 +--
 .../datasource/service/DataSourceService.java      |  36 +-
 .../datasource/s3/S3DatasourceChannelTest.java     |  29 +-
 .../seatunnel/datasource/DataSourceClientTest.java |  31 +-
 .../ElasticSearchDataSourceChannel.java            |  45 +-
 .../plugin/elasticsearch/client/EsRestClient.java  | 246 ++++-----
 .../plugin/elasticsearch/client/SSLUtils.java      |  40 +-
 .../ElasticSearchDataSourceChannelTest.java        |  66 +--
 .../ElasticSearchDataSourceFactoryTest.java        |   4 +-
 .../jdbc/ClickhouseDataSourceConfig.java           |  42 +-
 .../jdbc/ClickhouseJdbcDataSourceChannel.java      |  55 +-
 .../clickhouse/jdbc/ClickhouseOptionRule.java      |  28 +-
 .../hive/jdbc/HiveJdbcDataSourceChannel.java       |  45 +-
 .../plugin/mysql/jdbc/MysqlDataSourceConfig.java   |  24 +-
 .../mysql/jdbc/MysqlJdbcDataSourceChannel.java     |  65 +--
 .../plugin/mysql/jdbc/MysqlOptionRule.java         |  28 +-
 .../oracle/jdbc/OracleDataSourceChannel.java       |  53 +-
 .../plugin/oracle/jdbc/OracleDataSourceConfig.java |  28 +-
 .../plugin/oracle/jdbc/OracleOptionRule.java       |  24 +-
 .../jdbc/PostgresqlDataSourceChannel.java          |  59 ++-
 .../jdbc/PostgresqlDataSourceConfig.java           |  46 +-
 .../postgresql/jdbc/PostgresqlOptionRule.java      |  28 +-
 .../redshift/jdbc/RedshiftDataSourceChannel.java   |  57 +-
 .../redshift/jdbc/RedshiftDataSourceConfig.java    |  46 +-
 .../plugin/redshift/jdbc/RedshiftOptionRule.java   |  28 +-
 .../sqlserver/jdbc/SqlServerDataSourceChannel.java |  53 +-
 .../sqlserver/jdbc/SqlServerDataSourceConfig.java  |  42 +-
 .../plugin/sqlserver/jdbc/SqlServerOptionRule.java |  26 +-
 .../starrocks/jdbc/StarRocksDataSourceConfig.java  |  28 +-
 .../jdbc/StarRocksJdbcDataSourceChannel.java       |  57 +-
 .../plugin/starrocks/jdbc/StarRocksOptionRule.java |  28 +-
 .../plugin/tidb/jdbc/TidbDataSourceConfig.java     |  24 +-
 .../tidb/jdbc/TidbJdbcDataSourceChannel.java       |  65 +--
 .../plugin/tidb/jdbc/TidbOptionRule.java           |  28 +-
 .../plugin/kafka/KafkaDataSourceChannel.java       |  41 +-
 .../plugin/kafka/KafkaDataSourceFactory.java       |  14 +-
 .../plugin/kafka/KafkaRequestParamsUtils.java      |  30 +-
 .../plugin/kafka/KafkaDataSourceChannelTest.java   |  37 +-
 .../plugin/kafka/KafkaRequestParamsUtilsTest.java  |  49 +-
 .../cdc/mysql/MysqlCDCDataSourceChannel.java       |  53 +-
 .../plugin/api/DataSourcePluginInfo.java           |  14 +-
 .../plugin/api/common/ParamtersUtils.java          |   8 +-
 .../plugin/redshift/s3/HadoopS3AConfiguration.java |  55 +-
 .../redshift/s3/S3RedshiftDataSourceChannel.java   |  83 +--
 .../redshift/s3/S3RedshiftDataSourceFactory.java   |  16 +-
 .../plugin/s3/HadoopS3AConfiguration.java          |  41 +-
 .../datasource/plugin/s3/S3DataSourceFactory.java  |  14 +-
 .../datasource/plugin/s3/S3DatasourceChannel.java  |  31 +-
 .../sqlserver/SqlServerCDCDataSourceChannel.java   |  65 +--
 .../sqlserver/SqlServerCDCDataSourceFactory.java   |   2 +-
 .../test/TestSqlServerCDCDataSourceChannel.java    |  13 +-
 .../plugin/starrocks/StarRocksCatalog.java         |  67 +--
 .../starrocks/StarRocksDataSourceChannel.java      |  35 +-
 .../seatunnel-datasource-plugins/pom.xml           |   1 -
 seatunnel-server/pom.xml                           |   9 +-
 seatunnel-server/seatunnel-app/pom.xml             |  23 +-
 .../apache/seatunnel/app/SeatunnelApplication.java |   3 +-
 .../seatunnel/app/adapter/SeatunnelWebAdapter.java |  23 +-
 .../apache/seatunnel/app/aspect/LogoutAspect.java  |  17 +-
 .../org/apache/seatunnel/app/aspect/UserId.java    |   3 +-
 .../apache/seatunnel/app/common/RoleTypeEnum.java  |   2 +-
 .../app/common/ScriptParamStatusEnum.java          |   2 +-
 .../seatunnel/app/common/ScriptStatusEnum.java     |   2 +-
 .../seatunnel/app/common/ScriptTypeEnum.java       |   2 +-
 .../app/common/SeaTunnelConnectorI18n.java         |  17 +-
 .../seatunnel/app/common/UserStatusEnum.java       |   2 +-
 .../apache/seatunnel/app/common/UserTypeEnum.java  |   2 +-
 .../org/apache/seatunnel/app/config/Swagger2.java  |   4 +-
 .../seatunnel/app/controller/AuthController.java   |  14 +-
 .../seatunnel/app/controller/ScriptController.java |  58 +-
 .../seatunnel/app/controller/TaskController.java   |  46 +-
 .../seatunnel/app/controller/UserController.java   |  33 +-
 .../seatunnel/app/dal/dao/impl/RoleDaoImpl.java    |   6 +-
 .../app/dal/dao/impl/RoleUserRelationDaoImpl.java  |   9 +-
 .../app/dal/dao/impl/SchedulerConfigDaoImpl.java   |   3 +-
 .../seatunnel/app/dal/dao/impl/ScriptDaoImpl.java  |  25 +-
 .../app/dal/dao/impl/ScriptJobApplyDaoImpl.java    |   3 +-
 .../app/dal/dao/impl/ScriptParamDaoImpl.java       |  23 +-
 .../seatunnel/app/dal/dao/impl/UserDaoImpl.java    |  20 +-
 .../app/dal/mapper/RoleUserRelationMapper.java     |   6 +-
 .../seatunnel/app/dal/mapper/ScriptMapper.java     |  14 +-
 .../app/dal/mapper/UserLoginLogMapper.java         |   4 -
 .../seatunnel/app/dal/mapper/UserMapper.java       |   6 +-
 .../seatunnel/app/domain/request/BasePageReq.java  |   1 +
 .../app/domain/request/script/CreateScriptReq.java |  15 +-
 .../domain/request/script/PublishScriptReq.java    |   7 +-
 .../request/script/UpdateScriptContentReq.java     |  11 +-
 .../request/script/UpdateScriptParamReq.java       |   1 +
 .../app/domain/request/task/ExecuteReq.java        |  20 +-
 .../app/domain/request/task/RecycleScriptReq.java  |   7 +-
 .../app/domain/request/user/AddUserReq.java        |   3 +-
 .../app/domain/request/user/BaseUserReq.java       |   9 +-
 .../app/domain/request/user/UpdateUserReq.java     |   5 +-
 .../seatunnel/app/domain/response/PageInfo.java    |   3 +-
 .../domain/response/script/BaseScriptInfoRes.java  |   7 +
 .../domain/response/script/ScriptFullInfoRes.java  |   2 +-
 .../app/domain/response/script/ScriptParamRes.java |   2 +
 .../response/script/ScriptSimpleInfoRes.java       |   3 +-
 .../response/task/InstanceSimpleInfoRes.java       |  10 +
 .../app/domain/response/task/JobSimpleInfoRes.java |   9 +
 .../app/domain/response/user/BaseUserInfoRes.java  |   6 +
 .../app/interceptor/AuthenticationInterceptor.java |  35 +-
 .../app/resolver/UserIdMethodArgumentResolver.java |  12 +-
 .../apache/seatunnel/app/security/JwtUtils.java    |  18 +-
 .../app/service/impl/RoleServiceImpl.java          |  38 +-
 .../app/service/impl/ScriptServiceImpl.java        | 110 ++--
 .../app/service/impl/TaskServiceImpl.java          | 333 +++++++-----
 .../app/service/impl/UserServiceImpl.java          |  69 +--
 .../AbstractDataSourceConfigSwitcher.java          | 198 +++----
 .../datasource/DataSourceConfigSwitcher.java       |  39 +-
 .../datasource/DataSourceConfigSwitcherUtils.java  |  74 +--
 .../app/thridparty/datasource/SchemaGenerator.java |  50 +-
 .../impl/BaseJdbcDataSourceConfigSwitcher.java     | 102 ++--
 .../impl/ClickhouseDataSourceConfigSwitcher.java   | 127 +++--
 .../ElasticSearchDataSourceConfigSwitcher.java     |  89 ++--
 .../impl/KafkaDataSourceConfigSwitcher.java        |  97 ++--
 .../impl/MysqlCDCDataSourceConfigSwitcher.java     | 133 +++--
 .../impl/MysqlDatasourceConfigSwitcher.java        |   3 +-
 .../impl/PostgresCDCDataSourceConfigSwitcher.java  | 143 +++--
 .../impl/PostgresqlDataSourceConfigSwitcher.java   |   9 +-
 .../impl/RedshiftDataSourceConfigSwitcher.java     |   9 +-
 .../impl/S3DataSourceConfigSwitcher.java           | 238 ++++-----
 .../impl/S3RedshiftDataSourceConfigSwitcher.java   |  65 ++-
 .../impl/SqlServerCDCDataSourceConfigSwitcher.java | 143 +++--
 .../impl/SqlServerDataSourceConfigSwitcher.java    |   5 +-
 .../impl/StarRocksDataSourceConfigSwitcher.java    |  79 ++-
 .../impl/TidbDataSourceConfigSwitcher.java         |  59 +--
 .../app/thridparty/framework/FormOptionSort.java   |  47 +-
 .../thridparty/framework/PluginDiscoveryUtil.java  |  80 ++-
 .../framework/SeaTunnelOptionRuleWrapper.java      | 492 ++++++++---------
 .../app/utils/GlobalExceptionHandler.java          |  10 +-
 .../org/apache/seatunnel/app/utils/JdbcUtils.java  |   4 +-
 .../org/apache/seatunnel/app/utils/Md5Utils.java   |   3 +-
 .../apache/seatunnel/app/utils/PasswordUtils.java  |   2 +-
 .../seatunnel/app/WebMvcApplicationTest.java       |   7 +-
 .../app/controller/UserControllerTest.java         |  41 +-
 seatunnel-server/seatunnel-dynamicform/pom.xml     |   2 +-
 .../app/dynamicforms/AbstractFormOption.java       |   7 +-
 .../app/dynamicforms/AbstractFormSelectOption.java |   8 +-
 .../app/dynamicforms/DynamicSelectOption.java      |   4 +-
 .../app/dynamicforms/FormInputOption.java          |   8 +-
 .../seatunnel/app/dynamicforms/FormLocale.java     |   7 +-
 .../app/dynamicforms/FormOptionBuilder.java        |  29 +-
 .../app/dynamicforms/FormStructureValidate.java    | 317 ++++++-----
 .../app/dynamicforms/StaticSelectOption.java       |   6 +-
 .../dynamicforms/validate/AbstractValidate.java    |   7 +-
 .../validate/MutuallyExclusiveValidate.java        |   4 +-
 .../validate/UnionNonEmptyValidate.java            |  10 +-
 .../app/dynamicforms/FormStructureBuilderTest.java | 471 ++++++++---------
 seatunnel-server/seatunnel-scheduler/pom.xml       |   9 +-
 .../seatunnel-scheduler-dolphinscheduler/pom.xml   |   9 +-
 .../dolphinscheduler/IDolphinschedulerService.java |   3 +-
 .../constants/DolphinschedulerConstants.java       |  21 +-
 .../dto/ListProcessDefinitionDto.java              |   3 +-
 .../dto/ListProcessInstanceDto.java                |   3 +-
 .../dolphinscheduler/dto/ListTaskInstanceDto.java  |   3 +-
 .../dolphinscheduler/dto/ProcessDefinitionDto.java |   7 +-
 .../dolphinscheduler/dto/ProcessInstanceDto.java   |  11 +-
 .../dolphinscheduler/dto/ResourceDto.java          |   4 +-
 .../dolphinscheduler/dto/SchedulerDto.java         |  10 +-
 .../dolphinscheduler/dto/TaskInstanceDto.java      |  10 +-
 .../dolphinscheduler/enums/ReleaseStateEnum.java   |   2 +-
 .../impl/DolphinschedulerServiceImpl.java          | 587 +++++++++++++--------
 .../dolphinscheduler/impl/InstanceServiceImpl.java |  43 +-
 .../dolphinscheduler/impl/JobServiceImpl.java      | 203 ++++---
 .../dolphinscheduler/utils/HttpUtils.java          |  11 +-
 .../dolphinscheduler/utils/StatusUtils.java        |   1 -
 seatunnel-server/seatunnel-server-common/pom.xml   |   9 +-
 .../apache/seatunnel/server/common/DateUtils.java  |  11 +-
 .../server/common/SeatunnelErrorEnum.java          |  43 +-
 .../server/common/SeatunnelException.java          |   4 +-
 seatunnel-server/seatunnel-spi/pom.xml             |   9 +-
 .../seatunnel/spi/scheduler/dto/ExecuteDto.java    |   1 -
 .../spi/scheduler/enums/ExecuteTypeEnum.java       |   4 +-
 seatunnel-web-dist/pom.xml                         |  11 +-
 177 files changed, 4213 insertions(+), 3654 deletions(-)

diff --git a/.github/workflows/backend.yml b/.github/workflows/backend.yml
index eea3507e..201cee7e 100644
--- a/.github/workflows/backend.yml
+++ b/.github/workflows/backend.yml
@@ -49,8 +49,14 @@ jobs:
       - uses: actions/checkout@v3
         with:
           submodules: true
+      - name: Set up JDK ${{ matrix.java }}
+        uses: actions/setup-java@v3
+        with:
+          java-version: 8
+          distribution: 'temurin'
+          cache: 'maven'
       - name: Check code style
-        run: ./mvnw --batch-mode --quiet --no-snapshot-updates clean checkstyle:check
+        run: ./mvnw --batch-mode --quiet --no-snapshot-updates clean spotless:check
 
   dead-link:
     if: github.repository == 'apache/incubator-seatunnel-web'
diff --git a/pom.xml b/pom.xml
index c015a38d..ef0fc19a 100644
--- a/pom.xml
+++ b/pom.xml
@@ -13,8 +13,7 @@
     See the License for the specific language governing permissions and
     limitations under the License.
 -->
-<project xmlns="http://maven.apache.org/POM/4.0.0"
-         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
          xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
     <modelVersion>4.0.0</modelVersion>
 
@@ -26,55 +25,17 @@
 
     <groupId>org.apache.seatunnel</groupId>
     <artifactId>seatunnel-web</artifactId>
-    <packaging>pom</packaging>
     <version>${revision}</version>
+    <packaging>pom</packaging>
 
     <name>SeaTunnel</name>
 
-    <description>
-        Production ready big data processing product based on Apache Spark and Apache Flink.
-    </description>
-
-    <url>https://github.com/apache/incubator-seatunnel</url>
-
-    <licenses>
-        <license>
-            <name>The Apache License, Version 2.0</name>
-            <url>https://www.apache.org/licenses/LICENSE-2.0.txt</url>
-        </license>
-    </licenses>
-
-    <scm>
-        <connection>scm:git:https://github.com/apache/incubator-seatunnel.git</connection>
-        <developerConnection>scm:git:https://github.com/apache/incubator-seatunnel.git</developerConnection>
-        <url>https://github.com/apache/incubator-seatunnel</url>
-        <tag>HEAD</tag>
-    </scm>
-
-    <issueManagement>
-        <system>GitHub</system>
-        <url>https://github.com/apache/incubator-seatunnel/issues</url>
-    </issueManagement>
-
-    <mailingLists>
-        <mailingList>
-            <name>SeaTunnel Developer List</name>
-            <post>dev@seatunnel.apache.org</post>
-            <subscribe>dev-subscribe@seatunnel.apache.org</subscribe>
-            <unsubscribe>dev-unsubscribe@seatunnel.apache.org</unsubscribe>
-        </mailingList>
-        <mailingList>
-            <name>SeaTunnel Commits List</name>
-            <post>commits@seatunnel.apache.org</post>
-            <subscribe>commits-subscribe@seatunnel.apache.org</subscribe>
-            <unsubscribe>commits-unsubscribe@seatunnel.apache.org</unsubscribe>
-        </mailingList>
-    </mailingLists>
+    <description>Production ready big data processing product based on Apache Spark and Apache Flink.</description>
 
     <modules>
-	    <module>seatunnel-server</module>
-	    <module>seatunnel-datasource</module>
-	    <module>seatunnel-web-dist</module>
+        <module>seatunnel-server</module>
+        <module>seatunnel-datasource</module>
+        <module>seatunnel-web-dist</module>
     </modules>
 
     <properties>
@@ -143,6 +104,7 @@
         <hadoop-uber.version>2.3.1</hadoop-uber.version>
         <hadoop-aws.version>3.1.4</hadoop-aws.version>
         <aws-java-sdk-bundle.version>1.11.271</aws-java-sdk-bundle.version>
+        <spotless.version>2.29.0</spotless.version>
     </properties>
 
     <dependencyManagement>
@@ -154,24 +116,24 @@
                 <version>${seatunnel-framework.version}</version>
                 <exclusions>
                     <exclusion>
-                        <artifactId>jcl-over-slf4j</artifactId>
                         <groupId>org.slf4j</groupId>
+                        <artifactId>jcl-over-slf4j</artifactId>
                     </exclusion>
                     <exclusion>
-                        <artifactId>log4j-1.2-api</artifactId>
                         <groupId>org.apache.logging.log4j</groupId>
+                        <artifactId>log4j-1.2-api</artifactId>
                     </exclusion>
                     <exclusion>
-                        <artifactId>log4j-api</artifactId>
                         <groupId>org.apache.logging.log4j</groupId>
+                        <artifactId>log4j-api</artifactId>
                     </exclusion>
                     <exclusion>
-                        <artifactId>log4j-core</artifactId>
                         <groupId>org.apache.logging.log4j</groupId>
+                        <artifactId>log4j-core</artifactId>
                     </exclusion>
                     <exclusion>
-                        <artifactId>log4j-slf4j-impl</artifactId>
                         <groupId>org.apache.logging.log4j</groupId>
+                        <artifactId>log4j-slf4j-impl</artifactId>
                     </exclusion>
                 </exclusions>
             </dependency>
@@ -187,28 +149,28 @@
                         <artifactId>jackson-dataformat-properties</artifactId>
                     </exclusion>
                     <exclusion>
-                        <artifactId>slf4j-log4j12</artifactId>
                         <groupId>org.slf4j</groupId>
+                        <artifactId>slf4j-log4j12</artifactId>
                     </exclusion>
                     <exclusion>
-                        <artifactId>log4j-1.2-api</artifactId>
                         <groupId>org.apache.logging.log4j</groupId>
+                        <artifactId>log4j-1.2-api</artifactId>
                     </exclusion>
                     <exclusion>
-                        <artifactId>jcl-over-slf4j</artifactId>
                         <groupId>org.slf4j</groupId>
+                        <artifactId>jcl-over-slf4j</artifactId>
                     </exclusion>
                     <exclusion>
-                        <artifactId>log4j-api</artifactId>
                         <groupId>org.apache.logging.log4j</groupId>
+                        <artifactId>log4j-api</artifactId>
                     </exclusion>
                     <exclusion>
-                        <artifactId>log4j-core</artifactId>
                         <groupId>org.apache.logging.log4j</groupId>
+                        <artifactId>log4j-core</artifactId>
                     </exclusion>
                     <exclusion>
-                        <artifactId>log4j-slf4j-impl</artifactId>
                         <groupId>org.apache.logging.log4j</groupId>
+                        <artifactId>log4j-slf4j-impl</artifactId>
                     </exclusion>
                 </exclusions>
             </dependency>
@@ -219,24 +181,24 @@
                 <version>${seatunnel-framework.version}</version>
                 <exclusions>
                     <exclusion>
-                        <artifactId>jcl-over-slf4j</artifactId>
                         <groupId>org.slf4j</groupId>
+                        <artifactId>jcl-over-slf4j</artifactId>
                     </exclusion>
                     <exclusion>
-                        <artifactId>log4j-1.2-api</artifactId>
                         <groupId>org.apache.logging.log4j</groupId>
+                        <artifactId>log4j-1.2-api</artifactId>
                     </exclusion>
                     <exclusion>
-                        <artifactId>log4j-api</artifactId>
                         <groupId>org.apache.logging.log4j</groupId>
+                        <artifactId>log4j-api</artifactId>
                     </exclusion>
                     <exclusion>
-                        <artifactId>log4j-core</artifactId>
                         <groupId>org.apache.logging.log4j</groupId>
+                        <artifactId>log4j-core</artifactId>
                     </exclusion>
                     <exclusion>
-                        <artifactId>log4j-slf4j-impl</artifactId>
                         <groupId>org.apache.logging.log4j</groupId>
+                        <artifactId>log4j-slf4j-impl</artifactId>
                     </exclusion>
                 </exclusions>
             </dependency>
@@ -260,24 +222,24 @@
                 <version>${seatunnel-framework.version}</version>
                 <exclusions>
                     <exclusion>
-                        <artifactId>jcl-over-slf4j</artifactId>
                         <groupId>org.slf4j</groupId>
+                        <artifactId>jcl-over-slf4j</artifactId>
                     </exclusion>
                     <exclusion>
-                        <artifactId>log4j-1.2-api</artifactId>
                         <groupId>org.apache.logging.log4j</groupId>
+                        <artifactId>log4j-1.2-api</artifactId>
                     </exclusion>
                     <exclusion>
-                        <artifactId>log4j-api</artifactId>
                         <groupId>org.apache.logging.log4j</groupId>
+                        <artifactId>log4j-api</artifactId>
                     </exclusion>
                     <exclusion>
-                        <artifactId>log4j-core</artifactId>
                         <groupId>org.apache.logging.log4j</groupId>
+                        <artifactId>log4j-core</artifactId>
                     </exclusion>
                     <exclusion>
-                        <artifactId>log4j-slf4j-impl</artifactId>
                         <groupId>org.apache.logging.log4j</groupId>
+                        <artifactId>log4j-slf4j-impl</artifactId>
                     </exclusion>
                 </exclusions>
             </dependency>
@@ -344,8 +306,8 @@
                         <artifactId>spring-boot-starter-jdbc</artifactId>
                     </exclusion>
                     <exclusion>
-                        <artifactId>spring-boot-autoconfigure</artifactId>
                         <groupId>org.springframework.boot</groupId>
+                        <artifactId>spring-boot-autoconfigure</artifactId>
                     </exclusion>
                 </exclusions>
             </dependency>
@@ -502,8 +464,8 @@
             <dependency>
                 <groupId>org.apache.seatunnel</groupId>
                 <artifactId>seatunnel-hadoop3-3.1.4-uber</artifactId>
-                <scope>provided</scope>
                 <version>${hadoop-uber.version}</version>
+                <scope>provided</scope>
                 <exclusions>
                     <exclusion>
                         <groupId>org.apache.avro</groupId>
@@ -535,8 +497,8 @@
                 <version>${cron-utils.version}</version>
                 <exclusions>
                     <exclusion>
-                        <artifactId>javassist</artifactId>
                         <groupId>org.javassist</groupId>
+                        <artifactId>javassist</artifactId>
                     </exclusion>
                 </exclusions>
             </dependency>
@@ -593,8 +555,7 @@
                     <configuration>
                         <skip>${skipUT}</skip>
                         <systemPropertyVariables>
-                            <jacoco-agent.destfile>${project.build.directory}/jacoco.exec
-                            </jacoco-agent.destfile>
+                            <jacoco-agent.destfile>${project.build.directory}/jacoco.exec</jacoco-agent.destfile>
                         </systemPropertyVariables>
                         <excludes>
                             <exclude>**/*IT.java</exclude>
@@ -602,7 +563,6 @@
                     </configuration>
                 </plugin>
 
-
                 <!-- assembly -->
                 <plugin>
                     <groupId>org.apache.maven.plugins</groupId>
@@ -664,8 +624,7 @@
                     <version>${maven-checkstyle-plugin.version}</version>
                     <configuration>
                         <!--suppress UnresolvedMavenProperty -->
-                        <configLocation>${maven.multiModuleProjectDirectory}/tools/checkstyle/checkStyle.xml
-                        </configLocation>
+                        <configLocation>${maven.multiModuleProjectDirectory}/tools/checkstyle/checkStyle.xml</configLocation>
                         <encoding>UTF-8</encoding>
                         <consoleOutput>true</consoleOutput>
                         <includeTestSourceDirectory>true</includeTestSourceDirectory>
@@ -674,28 +633,23 @@
                             <sourceDirectory>${project.build.sourceDirectory}</sourceDirectory>
                             <sourceDirectory>${project.build.testSourceDirectory}</sourceDirectory>
                         </sourceDirectories>
-                        <resourceIncludes>
-                            **/*.properties,
+                        <resourceIncludes>**/*.properties,
                             **/*.sh,
                             **/*.bat,
                             **/*.yml,
                             **/*.yaml,
-                            **/*.xml
-                        </resourceIncludes>
-                        <resourceExcludes>
-                            **/.asf.yaml,
-                            **/.github/**
-                        </resourceExcludes>
-                        <excludes>
-                        </excludes>
+                            **/*.xml</resourceIncludes>
+                        <resourceExcludes>**/.asf.yaml,
+                            **/.github/**</resourceExcludes>
+                        <excludes />
                     </configuration>
                     <executions>
                         <execution>
                             <id>validate</id>
-                            <phase>process-sources</phase>
                             <goals>
                                 <goal>check</goal>
                             </goals>
+                            <phase>process-sources</phase>
                         </execution>
                     </executions>
                 </plugin>
@@ -737,7 +691,6 @@
                     </executions>
                 </plugin>
 
-
                 <plugin>
                     <groupId>org.codehaus.mojo</groupId>
                     <artifactId>build-helper-maven-plugin</artifactId>
@@ -791,6 +744,81 @@
                     <artifactId>maven-dependency-plugin</artifactId>
                     <version>${maven-dependency-plugin.version}</version>
                 </plugin>
+
+                <plugin>
+                    <groupId>com.diffplug.spotless</groupId>
+                    <artifactId>spotless-maven-plugin</artifactId>
+                    <version>${spotless.version}</version>
+                    <configuration>
+                        <java>
+                            <googleJavaFormat>
+                                <version>1.7</version>
+                                <style>AOSP</style>
+                            </googleJavaFormat>
+                            <removeUnusedImports />
+                            <formatAnnotations />
+                            <importOrder>
+                                <order>org.apache.seatunnel.shade,org.apache.seatunnel,org.apache,org,,javax,java,\#</order>
+                            </importOrder>
+                            <replaceRegex>
+                                <name>Remove wildcard imports</name>
+                                <searchRegex>import\s+(static)*\s*[^\*\s]+\*;(\r\n|\r|\n)</searchRegex>
+                                <replacement>$1</replacement>
+                            </replaceRegex>
+                            <replaceRegex>
+                                <name>Block powermock</name>
+                                <searchRegex>import\s+org\.powermock\.[^\*\s]*(|\*);(\r\n|\r|\n)</searchRegex>
+                                <replacement>$1</replacement>
+                            </replaceRegex>
+                            <replaceRegex>
+                                <name>Block jUnit4 imports</name>
+                                <searchRegex>import\s+org\.junit\.[^jupiter][^\*\s]*(|\*);(\r\n|\r|\n)</searchRegex>
+                                <replacement>$1</replacement>
+                            </replaceRegex>
+                        </java>
+                        <pom>
+                            <sortPom>
+                                <encoding>UTF-8</encoding>
+                                <nrOfIndentSpace>4</nrOfIndentSpace>
+                                <keepBlankLines>true</keepBlankLines>
+                                <indentBlankLines>false</indentBlankLines>
+                                <indentSchemaLocation>true</indentSchemaLocation>
+                                <spaceBeforeCloseEmptyElement>true</spaceBeforeCloseEmptyElement>
+                                <sortModules>false</sortModules>
+                                <sortExecutions>false</sortExecutions>
+                                <predefinedSortOrder>custom_1</predefinedSortOrder>
+                                <expandEmptyElements>false</expandEmptyElements>
+                                <sortProperties>false</sortProperties>
+                            </sortPom>
+                            <replace>
+                                <name>Leading blank line</name>
+                                <search>project</search>
+                                <replacement>project</replacement>
+                            </replace>
+                        </pom>
+                        <markdown>
+                            <includes>
+                                <include>docs/**/*.md</include>
+                            </includes>
+                            <excludes>
+                                <exclude>**/.github/**/*.md</exclude>
+                            </excludes>
+                            <flexmark />
+                        </markdown>
+                        <upToDateChecking>
+                            <enabled>true</enabled>
+                        </upToDateChecking>
+                    </configuration>
+                    <executions>
+                        <execution>
+                            <goals>
+                                <goal>check</goal>
+                            </goals>
+                            <phase>compile</phase>
+                        </execution>
+                    </executions>
+                </plugin>
+
             </plugins>
         </pluginManagement>
 
@@ -837,7 +865,48 @@
                 <groupId>org.codehaus.mojo</groupId>
                 <artifactId>license-maven-plugin</artifactId>
             </plugin>
+
+            <plugin>
+                <groupId>com.diffplug.spotless</groupId>
+                <artifactId>spotless-maven-plugin</artifactId>
+            </plugin>
         </plugins>
     </build>
 
+    <url>https://github.com/apache/incubator-seatunnel</url>
+
+    <licenses>
+        <license>
+            <name>The Apache License, Version 2.0</name>
+            <url>https://www.apache.org/licenses/LICENSE-2.0.txt</url>
+        </license>
+    </licenses>
+
+    <mailingLists>
+        <mailingList>
+            <name>SeaTunnel Developer List</name>
+            <subscribe>dev-subscribe@seatunnel.apache.org</subscribe>
+            <unsubscribe>dev-unsubscribe@seatunnel.apache.org</unsubscribe>
+            <post>dev@seatunnel.apache.org</post>
+        </mailingList>
+        <mailingList>
+            <name>SeaTunnel Commits List</name>
+            <subscribe>commits-subscribe@seatunnel.apache.org</subscribe>
+            <unsubscribe>commits-unsubscribe@seatunnel.apache.org</unsubscribe>
+            <post>commits@seatunnel.apache.org</post>
+        </mailingList>
+    </mailingLists>
+
+    <scm>
+        <connection>scm:git:https://github.com/apache/incubator-seatunnel.git</connection>
+        <developerConnection>scm:git:https://github.com/apache/incubator-seatunnel.git</developerConnection>
+        <url>https://github.com/apache/incubator-seatunnel</url>
+        <tag>HEAD</tag>
+    </scm>
+
+    <issueManagement>
+        <system>GitHub</system>
+        <url>https://github.com/apache/incubator-seatunnel/issues</url>
+    </issueManagement>
+
 </project>
diff --git a/seatunnel-datasource/seatunnel-datasource-client/src/main/java/org/apache/seatunnel/datasource/AbstractDataSourceClient.java b/seatunnel-datasource/seatunnel-datasource-client/src/main/java/org/apache/seatunnel/datasource/AbstractDataSourceClient.java
index 7cc2783e..b9e3b4f3 100644
--- a/seatunnel-datasource/seatunnel-datasource-client/src/main/java/org/apache/seatunnel/datasource/AbstractDataSourceClient.java
+++ b/seatunnel-datasource/seatunnel-datasource-client/src/main/java/org/apache/seatunnel/datasource/AbstractDataSourceClient.java
@@ -17,8 +17,6 @@
 
 package org.apache.seatunnel.datasource;
 
-import static com.google.common.base.Preconditions.checkNotNull;
-
 import org.apache.seatunnel.api.configuration.util.OptionRule;
 import org.apache.seatunnel.datasource.exception.DataSourceSDKException;
 import org.apache.seatunnel.datasource.plugin.api.DataSourceChannel;
@@ -34,6 +32,8 @@ import java.util.Map;
 import java.util.ServiceLoader;
 import java.util.concurrent.atomic.AtomicInteger;
 
+import static com.google.common.base.Preconditions.checkNotNull;
+
 public abstract class AbstractDataSourceClient implements DataSourceService {
 
     private Map<String, DataSourcePluginInfo> supportedDataSourceInfo = new HashMap<>();
@@ -47,23 +47,23 @@ public abstract class AbstractDataSourceClient implements DataSourceService {
     protected AbstractDataSourceClient() {
         AtomicInteger dataSourceIndex = new AtomicInteger();
         ServiceLoader.load(DataSourceFactory.class)
-            .forEach(
-                seaTunnelDataSourceFactory -> {
-                    seaTunnelDataSourceFactory
-                        .supportedDataSources()
-                        .forEach(
-                            dataSourceInfo -> {
-                                supportedDataSourceInfo.put(
-                                    dataSourceInfo.getName().toUpperCase(),
-                                    dataSourceInfo);
-                                supportedDataSourceIndex.put(
-                                    dataSourceInfo.getName().toUpperCase(),
-                                    dataSourceIndex.get());
-                                supportedDataSources.add(dataSourceInfo);
-                            });
-                    dataSourceChannels.add(seaTunnelDataSourceFactory.createChannel());
-                    dataSourceIndex.getAndIncrement();
-                });
+                .forEach(
+                        seaTunnelDataSourceFactory -> {
+                            seaTunnelDataSourceFactory
+                                    .supportedDataSources()
+                                    .forEach(
+                                            dataSourceInfo -> {
+                                                supportedDataSourceInfo.put(
+                                                        dataSourceInfo.getName().toUpperCase(),
+                                                        dataSourceInfo);
+                                                supportedDataSourceIndex.put(
+                                                        dataSourceInfo.getName().toUpperCase(),
+                                                        dataSourceIndex.get());
+                                                supportedDataSources.add(dataSourceInfo);
+                                            });
+                            dataSourceChannels.add(seaTunnelDataSourceFactory.createChannel());
+                            dataSourceIndex.getAndIncrement();
+                        });
         if (supportedDataSourceInfo.isEmpty()) {
             throw new DataSourceSDKException("No supported data source found");
         }
@@ -71,9 +71,9 @@ public abstract class AbstractDataSourceClient implements DataSourceService {
 
     @Override
     public Boolean checkDataSourceConnectivity(
-        String pluginName, Map<String, String> dataSourceParams) {
+            String pluginName, Map<String, String> dataSourceParams) {
         return getDataSourceChannel(pluginName)
-            .checkDataSourceConnectivity(pluginName, dataSourceParams);
+                .checkDataSourceConnectivity(pluginName, dataSourceParams);
     }
 
     @Override
@@ -86,7 +86,7 @@ public abstract class AbstractDataSourceClient implements DataSourceService {
         Integer index = supportedDataSourceIndex.get(pluginName.toUpperCase());
         if (index == null) {
             throw new DataSourceSDKException(
-                "The %s plugin is not supported or plugin not exist.", pluginName);
+                    "The %s plugin is not supported or plugin not exist.", pluginName);
         }
         return dataSourceChannels.get(index);
     }
@@ -99,12 +99,12 @@ public abstract class AbstractDataSourceClient implements DataSourceService {
     @Override
     public OptionRule queryMetadataFieldByName(String pluginName) {
         return getDataSourceChannel(pluginName)
-            .getDatasourceMetadataFieldsByDataSourceName(pluginName);
+                .getDatasourceMetadataFieldsByDataSourceName(pluginName);
     }
 
     @Override
     public List<String> getTables(
-        String pluginName, String databaseName, Map<String, String> requestParams) {
+            String pluginName, String databaseName, Map<String, String> requestParams) {
         return getDataSourceChannel(pluginName).getTables(pluginName, requestParams, databaseName);
     }
 
@@ -115,21 +115,21 @@ public abstract class AbstractDataSourceClient implements DataSourceService {
 
     @Override
     public List<TableField> getTableFields(
-        String pluginName,
-        Map<String, String> requestParams,
-        String databaseName,
-        String tableName) {
+            String pluginName,
+            Map<String, String> requestParams,
+            String databaseName,
+            String tableName) {
         return getDataSourceChannel(pluginName)
-            .getTableFields(pluginName, requestParams, databaseName, tableName);
+                .getTableFields(pluginName, requestParams, databaseName, tableName);
     }
 
     @Override
     public Map<String, List<TableField>> getTableFields(
-        String pluginName,
-        Map<String, String> requestParams,
-        String databaseName,
-        List<String> tableNames) {
+            String pluginName,
+            Map<String, String> requestParams,
+            String databaseName,
+            List<String> tableNames) {
         return getDataSourceChannel(pluginName)
-            .getTableFields(pluginName, requestParams, databaseName, tableNames);
+                .getTableFields(pluginName, requestParams, databaseName, tableNames);
     }
 }
diff --git a/seatunnel-datasource/seatunnel-datasource-client/src/main/java/org/apache/seatunnel/datasource/service/DataSourceService.java b/seatunnel-datasource/seatunnel-datasource-client/src/main/java/org/apache/seatunnel/datasource/service/DataSourceService.java
index 9ab18325..5e52c077 100644
--- a/seatunnel-datasource/seatunnel-datasource-client/src/main/java/org/apache/seatunnel/datasource/service/DataSourceService.java
+++ b/seatunnel-datasource/seatunnel-datasource-client/src/main/java/org/apache/seatunnel/datasource/service/DataSourceService.java
@@ -67,18 +67,18 @@ public interface DataSourceService {
     /**
      * get data source table names by database name
      *
-     * @param pluginName    plugin name
-     * @param databaseName  database name
+     * @param pluginName plugin name
+     * @param databaseName database name
      * @param requestParams connection params
      * @return table names
      */
     List<String> getTables(
-        String pluginName, String databaseName, Map<String, String> requestParams);
+            String pluginName, String databaseName, Map<String, String> requestParams);
 
     /**
      * get data source database names
      *
-     * @param pluginName    plugin name
+     * @param pluginName plugin name
      * @param requestParams connection params
      * @return database names
      */
@@ -87,30 +87,30 @@ public interface DataSourceService {
     /**
      * get data source table fields
      *
-     * @param pluginName    plugin name
+     * @param pluginName plugin name
      * @param requestParams connection params
-     * @param databaseName  database name
-     * @param tableName     table name
+     * @param databaseName database name
+     * @param tableName table name
      * @return table fields
      */
     List<TableField> getTableFields(
-        String pluginName,
-        Map<String, String> requestParams,
-        String databaseName,
-        String tableName);
+            String pluginName,
+            Map<String, String> requestParams,
+            String databaseName,
+            String tableName);
 
     /**
      * get data source table fields
      *
-     * @param pluginName    plugin name
+     * @param pluginName plugin name
      * @param requestParams connection params
-     * @param databaseName  database name
-     * @param tableNames    table names
+     * @param databaseName database name
+     * @param tableNames table names
      * @return table fields
      */
     Map<String, List<TableField>> getTableFields(
-        String pluginName,
-        Map<String, String> requestParams,
-        String databaseName,
-        List<String> tableNames);
+            String pluginName,
+            Map<String, String> requestParams,
+            String databaseName,
+            List<String> tableNames);
 }
diff --git a/seatunnel-datasource/seatunnel-datasource-client/src/test/java/com/whaleops/datasource/s3/S3DatasourceChannelTest.java b/seatunnel-datasource/seatunnel-datasource-client/src/test/java/com/whaleops/datasource/s3/S3DatasourceChannelTest.java
index ece18206..50c3d2d5 100644
--- a/seatunnel-datasource/seatunnel-datasource-client/src/test/java/com/whaleops/datasource/s3/S3DatasourceChannelTest.java
+++ b/seatunnel-datasource/seatunnel-datasource-client/src/test/java/com/whaleops/datasource/s3/S3DatasourceChannelTest.java
@@ -19,11 +19,12 @@ package com.whaleops.datasource.s3;
 
 import org.apache.seatunnel.datasource.plugin.s3.S3DatasourceChannel;
 
-import com.google.common.collect.ImmutableMap;
 import org.junit.jupiter.api.Assertions;
 import org.junit.jupiter.api.Disabled;
 import org.junit.jupiter.api.Test;
 
+import com.google.common.collect.ImmutableMap;
+
 import java.util.Map;
 
 @Disabled
@@ -33,23 +34,23 @@ class S3DatasourceChannelTest {
     @Test
     void checkDataSourceConnectivity() {
         Assertions.assertDoesNotThrow(
-            () -> {
-                S3_DATASOURCE_CHANNEL.checkDataSourceConnectivity("S3", createRequestParams());
-            });
+                () -> {
+                    S3_DATASOURCE_CHANNEL.checkDataSourceConnectivity("S3", createRequestParams());
+                });
     }
 
     private Map<String, String> createRequestParams() {
         Map<String, String> requestParams =
-            new ImmutableMap.Builder<String, String>()
-                .put("bucket", "s3a://poc-kuke")
-                .put("fs.s3a.endpoint", "s3.cn-north-1.amazonaws.com.cn")
-                .put(
-                    "fs.s3a.aws.credentials.provider",
-                    "org.apache.hadoop.fs.s3a.SimpleAWSCredentialsProvider")
-                .put("access_key", "AKIAYYUV5DMXADXRBGTA")
-                .put("secret_key", "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")
-                .put("hadoop_s3_properties", "")
-                .build();
+                new ImmutableMap.Builder<String, String>()
+                        .put("bucket", "s3a://poc-kuke")
+                        .put("fs.s3a.endpoint", "s3.cn-north-1.amazonaws.com.cn")
+                        .put(
+                                "fs.s3a.aws.credentials.provider",
+                                "org.apache.hadoop.fs.s3a.SimpleAWSCredentialsProvider")
+                        .put("access_key", "AKIAYYUV5DMXADXRBGTA")
+                        .put("secret_key", "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")
+                        .put("hadoop_s3_properties", "")
+                        .build();
         return requestParams;
     }
 }
diff --git a/seatunnel-datasource/seatunnel-datasource-client/src/test/java/org/apache/seatunnel/datasource/DataSourceClientTest.java b/seatunnel-datasource/seatunnel-datasource-client/src/test/java/org/apache/seatunnel/datasource/DataSourceClientTest.java
index e2877e88..1c45fe7e 100644
--- a/seatunnel-datasource/seatunnel-datasource-client/src/test/java/org/apache/seatunnel/datasource/DataSourceClientTest.java
+++ b/seatunnel-datasource/seatunnel-datasource-client/src/test/java/org/apache/seatunnel/datasource/DataSourceClientTest.java
@@ -18,6 +18,7 @@
 package org.apache.seatunnel.datasource;
 
 import org.apache.commons.lang3.StringUtils;
+
 import org.junit.jupiter.api.Assertions;
 import org.junit.jupiter.api.Test;
 
@@ -27,22 +28,22 @@ class DataSourceClientTest {
     @Test
     public void listAllDataSources() {
         Assertions.assertTrue(
-            DATA_SOURCE_CLIENT.listAllDataSources().stream()
-                .anyMatch(
-                    dataSourcePluginInfo ->
-                        StringUtils.equalsAnyIgnoreCase(
-                            dataSourcePluginInfo.getName(), "jdbc-mysql")));
+                DATA_SOURCE_CLIENT.listAllDataSources().stream()
+                        .anyMatch(
+                                dataSourcePluginInfo ->
+                                        StringUtils.equalsAnyIgnoreCase(
+                                                dataSourcePluginInfo.getName(), "jdbc-mysql")));
         Assertions.assertTrue(
-            DATA_SOURCE_CLIENT.listAllDataSources().stream()
-                .anyMatch(
-                    dataSourcePluginInfo ->
-                        StringUtils.equalsAnyIgnoreCase(
-                            dataSourcePluginInfo.getName(), "kafka")));
+                DATA_SOURCE_CLIENT.listAllDataSources().stream()
+                        .anyMatch(
+                                dataSourcePluginInfo ->
+                                        StringUtils.equalsAnyIgnoreCase(
+                                                dataSourcePluginInfo.getName(), "kafka")));
         Assertions.assertTrue(
-            DATA_SOURCE_CLIENT.listAllDataSources().stream()
-                .anyMatch(
-                    dataSourcePluginInfo ->
-                        StringUtils.equalsAnyIgnoreCase(
-                            dataSourcePluginInfo.getName(), "elasticsearch")));
+                DATA_SOURCE_CLIENT.listAllDataSources().stream()
+                        .anyMatch(
+                                dataSourcePluginInfo ->
+                                        StringUtils.equalsAnyIgnoreCase(
+                                                dataSourcePluginInfo.getName(), "elasticsearch")));
     }
 }
diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-elasticsearch/src/main/java/org/apache/seatunnel/datasource/plugin/elasticsearch/ElasticSearchDataSourceChannel.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-elasticsearch/src/main/java/org/apache/seatunnel/datasource/plugin/elasticsearch/ElasticSearchDataSourceChannel.java
index 17495487..6a4d9243 100644
--- a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-elasticsearch/src/main/java/org/apache/seatunnel/datasource/plugin/elasticsearch/ElasticSearchDataSourceChannel.java
+++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-elasticsearch/src/main/java/org/apache/seatunnel/datasource/plugin/elasticsearch/ElasticSearchDataSourceChannel.java
@@ -17,16 +17,17 @@
 
 package org.apache.seatunnel.datasource.plugin.elasticsearch;
 
+import org.apache.seatunnel.shade.com.typesafe.config.ConfigFactory;
+
 import org.apache.seatunnel.api.configuration.util.OptionRule;
 import org.apache.seatunnel.datasource.plugin.api.DataSourceChannel;
 import org.apache.seatunnel.datasource.plugin.api.DataSourcePluginException;
 import org.apache.seatunnel.datasource.plugin.api.model.TableField;
 import org.apache.seatunnel.datasource.plugin.elasticsearch.client.EsRestClient;
 
-import org.apache.seatunnel.shade.com.typesafe.config.ConfigFactory;
+import org.apache.commons.lang3.StringUtils;
 
 import lombok.NonNull;
-import org.apache.commons.lang3.StringUtils;
 
 import java.util.ArrayList;
 import java.util.HashMap;
@@ -54,47 +55,47 @@ public class ElasticSearchDataSourceChannel implements DataSourceChannel {
 
     @Override
     public List<String> getTables(
-        @NonNull String pluginName, Map<String, String> requestParams, String database) {
+            @NonNull String pluginName, Map<String, String> requestParams, String database) {
         databaseCheck(database);
         try (EsRestClient client =
-                 EsRestClient.createInstance(ConfigFactory.parseMap(requestParams))) {
+                EsRestClient.createInstance(ConfigFactory.parseMap(requestParams))) {
             return client.listIndex();
         }
     }
 
     @Override
     public List<String> getDatabases(
-        @NonNull String pluginName, @NonNull Map<String, String> requestParams) {
+            @NonNull String pluginName, @NonNull Map<String, String> requestParams) {
         return DEFAULT_DATABASES;
     }
 
     @Override
     public boolean checkDataSourceConnectivity(
-        @NonNull String pluginName, @NonNull Map<String, String> requestParams) {
+            @NonNull String pluginName, @NonNull Map<String, String> requestParams) {
         try (EsRestClient client =
-                 EsRestClient.createInstance(ConfigFactory.parseMap(requestParams))) {
+                EsRestClient.createInstance(ConfigFactory.parseMap(requestParams))) {
             client.getClusterInfo();
             return true;
         } catch (Throwable e) {
             throw new DataSourcePluginException(
-                "check ElasticSearch connectivity failed, " + e.getMessage(), e);
+                    "check ElasticSearch connectivity failed, " + e.getMessage(), e);
         }
     }
 
     @Override
     public List<TableField> getTableFields(
-        @NonNull String pluginName,
-        @NonNull Map<String, String> requestParams,
-        @NonNull String database,
-        @NonNull String table) {
+            @NonNull String pluginName,
+            @NonNull Map<String, String> requestParams,
+            @NonNull String database,
+            @NonNull String table) {
         databaseCheck(database);
         try (EsRestClient client =
-                 EsRestClient.createInstance(ConfigFactory.parseMap(requestParams))) {
+                EsRestClient.createInstance(ConfigFactory.parseMap(requestParams))) {
             Map<String, String> fieldTypeMapping = client.getFieldTypeMapping(table);
             List<TableField> fields = new ArrayList<>();
             fieldTypeMapping.forEach(
-                (fieldName, fieldType) ->
-                    fields.add(convertToTableField(fieldName, fieldType)));
+                    (fieldName, fieldType) ->
+                            fields.add(convertToTableField(fieldName, fieldType)));
             return fields;
         } catch (Exception ex) {
             throw new DataSourcePluginException("Get table fields failed", ex);
@@ -103,16 +104,16 @@ public class ElasticSearchDataSourceChannel implements DataSourceChannel {
 
     @Override
     public Map<String, List<TableField>> getTableFields(
-        @NonNull String pluginName,
-        @NonNull Map<String, String> requestParams,
-        @NonNull String database,
-        @NonNull List<String> tables) {
+            @NonNull String pluginName,
+            @NonNull Map<String, String> requestParams,
+            @NonNull String database,
+            @NonNull List<String> tables) {
         databaseCheck(database);
         Map<String, List<TableField>> tableFields = new HashMap<>();
         tables.forEach(
-            table ->
-                tableFields.put(
-                    table, getTableFields(pluginName, requestParams, database, table)));
+                table ->
+                        tableFields.put(
+                                table, getTableFields(pluginName, requestParams, database, table)));
         return tableFields;
     }
 
diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-elasticsearch/src/main/java/org/apache/seatunnel/datasource/plugin/elasticsearch/client/EsRestClient.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-elasticsearch/src/main/java/org/apache/seatunnel/datasource/plugin/elasticsearch/client/EsRestClient.java
index 1ef2e705..4de06bf4 100644
--- a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-elasticsearch/src/main/java/org/apache/seatunnel/datasource/plugin/elasticsearch/client/EsRestClient.java
+++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-elasticsearch/src/main/java/org/apache/seatunnel/datasource/plugin/elasticsearch/client/EsRestClient.java
@@ -17,15 +17,14 @@
 
 package org.apache.seatunnel.datasource.plugin.elasticsearch.client;
 
-import org.apache.seatunnel.common.utils.JsonUtils;
-import org.apache.seatunnel.datasource.plugin.elasticsearch.ElasticSearchOptionRule;
-
 import org.apache.seatunnel.shade.com.fasterxml.jackson.databind.JsonNode;
 import org.apache.seatunnel.shade.com.fasterxml.jackson.databind.ObjectMapper;
 import org.apache.seatunnel.shade.com.fasterxml.jackson.databind.node.ObjectNode;
 import org.apache.seatunnel.shade.com.typesafe.config.Config;
 
-import lombok.extern.slf4j.Slf4j;
+import org.apache.seatunnel.common.utils.JsonUtils;
+import org.apache.seatunnel.datasource.plugin.elasticsearch.ElasticSearchOptionRule;
+
 import org.apache.http.HttpHost;
 import org.apache.http.HttpStatus;
 import org.apache.http.auth.AuthScope;
@@ -36,11 +35,14 @@ import org.apache.http.conn.ssl.TrustAllStrategy;
 import org.apache.http.impl.client.BasicCredentialsProvider;
 import org.apache.http.ssl.SSLContexts;
 import org.apache.http.util.EntityUtils;
+
 import org.elasticsearch.client.Request;
 import org.elasticsearch.client.Response;
 import org.elasticsearch.client.RestClient;
 import org.elasticsearch.client.RestClientBuilder;
 
+import lombok.extern.slf4j.Slf4j;
+
 import javax.net.ssl.SSLContext;
 
 import java.io.IOException;
@@ -69,18 +71,18 @@ public class EsRestClient implements AutoCloseable {
     public static EsRestClient createInstance(Config pluginConfig) {
         try {
             List<String> hosts =
-                OBJECT_MAPPER.readValue(
-                    pluginConfig.getString(ElasticSearchOptionRule.HOSTS.key()),
-                    List.class);
+                    OBJECT_MAPPER.readValue(
+                            pluginConfig.getString(ElasticSearchOptionRule.HOSTS.key()),
+                            List.class);
             Optional<String> username = Optional.empty();
             Optional<String> password = Optional.empty();
             if (pluginConfig.hasPath(ElasticSearchOptionRule.USERNAME.key())) {
                 username =
-                    Optional.of(pluginConfig.getString(ElasticSearchOptionRule.USERNAME.key()));
+                        Optional.of(pluginConfig.getString(ElasticSearchOptionRule.USERNAME.key()));
                 if (pluginConfig.hasPath(ElasticSearchOptionRule.PASSWORD.key())) {
                     password =
-                        Optional.of(
-                            pluginConfig.getString(ElasticSearchOptionRule.PASSWORD.key()));
+                            Optional.of(
+                                    pluginConfig.getString(ElasticSearchOptionRule.PASSWORD.key()));
                 }
             }
             Optional<String> keystorePath = Optional.empty();
@@ -88,141 +90,141 @@ public class EsRestClient implements AutoCloseable {
             Optional<String> truststorePath = Optional.empty();
             Optional<String> truststorePassword = Optional.empty();
             boolean tlsVerifyCertificate =
-                ElasticSearchOptionRule.TLS_VERIFY_CERTIFICATE.defaultValue();
+                    ElasticSearchOptionRule.TLS_VERIFY_CERTIFICATE.defaultValue();
             if (pluginConfig.hasPath(ElasticSearchOptionRule.TLS_VERIFY_CERTIFICATE.key())) {
                 tlsVerifyCertificate =
-                    pluginConfig.getBoolean(
-                        ElasticSearchOptionRule.TLS_VERIFY_CERTIFICATE.key());
+                        pluginConfig.getBoolean(
+                                ElasticSearchOptionRule.TLS_VERIFY_CERTIFICATE.key());
             }
             if (tlsVerifyCertificate) {
                 if (pluginConfig.hasPath(ElasticSearchOptionRule.TLS_KEY_STORE_PATH.key())) {
                     keystorePath =
-                        Optional.of(
-                            pluginConfig.getString(
-                                ElasticSearchOptionRule.TLS_KEY_STORE_PATH.key()));
+                            Optional.of(
+                                    pluginConfig.getString(
+                                            ElasticSearchOptionRule.TLS_KEY_STORE_PATH.key()));
                 }
                 if (pluginConfig.hasPath(ElasticSearchOptionRule.TLS_KEY_STORE_PASSWORD.key())) {
                     keystorePassword =
-                        Optional.of(
-                            pluginConfig.getString(
-                                ElasticSearchOptionRule.TLS_KEY_STORE_PASSWORD.key()));
+                            Optional.of(
+                                    pluginConfig.getString(
+                                            ElasticSearchOptionRule.TLS_KEY_STORE_PASSWORD.key()));
                 }
                 if (pluginConfig.hasPath(ElasticSearchOptionRule.TLS_TRUST_STORE_PATH.key())) {
                     truststorePath =
-                        Optional.of(
-                            pluginConfig.getString(
-                                ElasticSearchOptionRule.TLS_TRUST_STORE_PATH.key()));
+                            Optional.of(
+                                    pluginConfig.getString(
+                                            ElasticSearchOptionRule.TLS_TRUST_STORE_PATH.key()));
                 }
                 if (pluginConfig.hasPath(ElasticSearchOptionRule.TLS_TRUST_STORE_PASSWORD.key())) {
                     truststorePassword =
-                        Optional.of(
-                            pluginConfig.getString(
-                                ElasticSearchOptionRule.TLS_TRUST_STORE_PASSWORD
-                                    .key()));
+                            Optional.of(
+                                    pluginConfig.getString(
+                                            ElasticSearchOptionRule.TLS_TRUST_STORE_PASSWORD
+                                                    .key()));
                 }
             }
             boolean tlsVerifyHostnames = ElasticSearchOptionRule.TLS_VERIFY_HOSTNAME.defaultValue();
             if (pluginConfig.hasPath(ElasticSearchOptionRule.TLS_VERIFY_HOSTNAME.key())) {
                 tlsVerifyHostnames =
-                    pluginConfig.getBoolean(ElasticSearchOptionRule.TLS_VERIFY_HOSTNAME.key());
+                        pluginConfig.getBoolean(ElasticSearchOptionRule.TLS_VERIFY_HOSTNAME.key());
             }
             return createInstance(
-                hosts,
-                username,
-                password,
-                tlsVerifyCertificate,
-                tlsVerifyHostnames,
-                keystorePath,
-                keystorePassword,
-                truststorePath,
-                truststorePassword);
+                    hosts,
+                    username,
+                    password,
+                    tlsVerifyCertificate,
+                    tlsVerifyHostnames,
+                    keystorePath,
+                    keystorePassword,
+                    truststorePath,
+                    truststorePassword);
         } catch (Exception e) {
             throw new RuntimeException("Create EsRestClient failed", e);
         }
     }
 
     public static EsRestClient createInstance(
-        List<String> hosts,
-        Optional<String> username,
-        Optional<String> password,
-        boolean tlsVerifyCertificate,
-        boolean tlsVerifyHostnames,
-        Optional<String> keystorePath,
-        Optional<String> keystorePassword,
-        Optional<String> truststorePath,
-        Optional<String> truststorePassword) {
+            List<String> hosts,
+            Optional<String> username,
+            Optional<String> password,
+            boolean tlsVerifyCertificate,
+            boolean tlsVerifyHostnames,
+            Optional<String> keystorePath,
+            Optional<String> keystorePassword,
+            Optional<String> truststorePath,
+            Optional<String> truststorePassword) {
         RestClientBuilder restClientBuilder =
-            getRestClientBuilder(
-                hosts,
-                username,
-                password,
-                tlsVerifyCertificate,
-                tlsVerifyHostnames,
-                keystorePath,
-                keystorePassword,
-                truststorePath,
-                truststorePassword);
+                getRestClientBuilder(
+                        hosts,
+                        username,
+                        password,
+                        tlsVerifyCertificate,
+                        tlsVerifyHostnames,
+                        keystorePath,
+                        keystorePassword,
+                        truststorePath,
+                        truststorePassword);
         return new EsRestClient(restClientBuilder.build());
     }
 
     private static RestClientBuilder getRestClientBuilder(
-        List<String> hosts,
-        Optional<String> username,
-        Optional<String> password,
-        boolean tlsVerifyCertificate,
-        boolean tlsVerifyHostnames,
-        Optional<String> keystorePath,
-        Optional<String> keystorePassword,
-        Optional<String> truststorePath,
-        Optional<String> truststorePassword) {
+            List<String> hosts,
+            Optional<String> username,
+            Optional<String> password,
+            boolean tlsVerifyCertificate,
+            boolean tlsVerifyHostnames,
+            Optional<String> keystorePath,
+            Optional<String> keystorePassword,
+            Optional<String> truststorePath,
+            Optional<String> truststorePassword) {
         HttpHost[] httpHosts = new HttpHost[hosts.size()];
         for (int i = 0; i < hosts.size(); i++) {
             httpHosts[i] = HttpHost.create(hosts.get(i));
         }
 
         RestClientBuilder restClientBuilder =
-            RestClient.builder(httpHosts)
-                .setRequestConfigCallback(
-                    requestConfigBuilder ->
-                        requestConfigBuilder
-                            .setConnectionRequestTimeout(
-                                CONNECTION_REQUEST_TIMEOUT)
-                            .setSocketTimeout(SOCKET_TIMEOUT));
+                RestClient.builder(httpHosts)
+                        .setRequestConfigCallback(
+                                requestConfigBuilder ->
+                                        requestConfigBuilder
+                                                .setConnectionRequestTimeout(
+                                                        CONNECTION_REQUEST_TIMEOUT)
+                                                .setSocketTimeout(SOCKET_TIMEOUT));
 
         restClientBuilder.setHttpClientConfigCallback(
-            httpClientBuilder -> {
-                if (username.isPresent()) {
-                    CredentialsProvider credentialsProvider = new BasicCredentialsProvider();
-                    credentialsProvider.setCredentials(
-                        AuthScope.ANY,
-                        new UsernamePasswordCredentials(username.get(), password.get()));
-                    httpClientBuilder.setDefaultCredentialsProvider(credentialsProvider);
-                }
-
-                try {
-                    if (tlsVerifyCertificate) {
-                        Optional<SSLContext> sslContext =
-                            SSLUtils.buildSSLContext(
-                                keystorePath,
-                                keystorePassword,
-                                truststorePath,
-                                truststorePassword);
-                        sslContext.ifPresent(e -> httpClientBuilder.setSSLContext(e));
-                    } else {
-                        SSLContext sslContext =
-                            SSLContexts.custom()
-                                .loadTrustMaterial(new TrustAllStrategy())
-                                .build();
-                        httpClientBuilder.setSSLContext(sslContext);
+                httpClientBuilder -> {
+                    if (username.isPresent()) {
+                        CredentialsProvider credentialsProvider = new BasicCredentialsProvider();
+                        credentialsProvider.setCredentials(
+                                AuthScope.ANY,
+                                new UsernamePasswordCredentials(username.get(), password.get()));
+                        httpClientBuilder.setDefaultCredentialsProvider(credentialsProvider);
                     }
-                    if (!tlsVerifyHostnames) {
-                        httpClientBuilder.setSSLHostnameVerifier(NoopHostnameVerifier.INSTANCE);
+
+                    try {
+                        if (tlsVerifyCertificate) {
+                            Optional<SSLContext> sslContext =
+                                    SSLUtils.buildSSLContext(
+                                            keystorePath,
+                                            keystorePassword,
+                                            truststorePath,
+                                            truststorePassword);
+                            sslContext.ifPresent(e -> httpClientBuilder.setSSLContext(e));
+                        } else {
+                            SSLContext sslContext =
+                                    SSLContexts.custom()
+                                            .loadTrustMaterial(new TrustAllStrategy())
+                                            .build();
+                            httpClientBuilder.setSSLContext(sslContext);
+                        }
+                        if (!tlsVerifyHostnames) {
+                            httpClientBuilder.setSSLHostnameVerifier(NoopHostnameVerifier.INSTANCE);
+                        }
+                    } catch (Exception e) {
+                        throw new RuntimeException(e);
                     }
-                } catch (Exception e) {
-                    throw new RuntimeException(e);
-                }
-                return httpClientBuilder;
-            });
+                    return httpClientBuilder;
+                });
         return restClientBuilder;
     }
 
@@ -235,12 +237,12 @@ public class EsRestClient implements AutoCloseable {
             JsonNode jsonNode = objectMapper.readTree(result);
             JsonNode versionNode = jsonNode.get("version");
             return ElasticsearchClusterInfo.builder()
-                .clusterVersion(versionNode.get("number").asText())
-                .distribution(
-                    Optional.ofNullable(versionNode.get("distribution"))
-                        .map(JsonNode::asText)
-                        .orElse(null))
-                .build();
+                    .clusterVersion(versionNode.get("number").asText())
+                    .distribution(
+                            Optional.ofNullable(versionNode.get("distribution"))
+                                    .map(JsonNode::asText)
+                                    .orElse(null))
+                    .build();
         } catch (IOException e) {
             throw new ResponseException("fail to get elasticsearch version.", e);
         }
@@ -265,13 +267,13 @@ public class EsRestClient implements AutoCloseable {
             if (response.getStatusLine().getStatusCode() == HttpStatus.SC_OK) {
                 String entity = EntityUtils.toString(response.getEntity());
                 return JsonUtils.toList(entity, Map.class).stream()
-                    .map(map -> map.get("index").toString())
-                    .collect(Collectors.toList());
+                        .map(map -> map.get("index").toString())
+                        .collect(Collectors.toList());
             } else {
                 throw new ResponseException(
-                    String.format(
-                        "GET %s response status code=%d",
-                        endpoint, response.getStatusLine().getStatusCode()));
+                        String.format(
+                                "GET %s response status code=%d",
+                                endpoint, response.getStatusLine().getStatusCode()));
             }
         } catch (IOException ex) {
             throw new ResponseException(ex);
@@ -289,9 +291,9 @@ public class EsRestClient implements AutoCloseable {
             // todo: if the index doesn't exist, the response status code is 200?
             if (response.getStatusLine().getStatusCode() != HttpStatus.SC_OK) {
                 throw new ResponseException(
-                    String.format(
-                        "DELETE %s response status code=%d",
-                        endpoint, response.getStatusLine().getStatusCode()));
+                        String.format(
+                                "DELETE %s response status code=%d",
+                                endpoint, response.getStatusLine().getStatusCode()));
             }
         } catch (IOException ex) {
             throw new ResponseException(ex);
@@ -315,9 +317,9 @@ public class EsRestClient implements AutoCloseable {
             }
             if (response.getStatusLine().getStatusCode() != HttpStatus.SC_OK) {
                 throw new ResponseException(
-                    String.format(
-                        "GET %s response status code=%d",
-                        endpoint, response.getStatusLine().getStatusCode()));
+                        String.format(
+                                "GET %s response status code=%d",
+                                endpoint, response.getStatusLine().getStatusCode()));
             }
             String entity = EntityUtils.toString(response.getEntity());
             log.info(String.format("GET %s response=%s", endpoint, entity));
@@ -359,9 +361,9 @@ public class EsRestClient implements AutoCloseable {
                     mapping.put(field, type);
                 } else {
                     log.warn(
-                        String.format(
-                            "fail to get elasticsearch field %s mapping type,so give a default type text",
-                            field));
+                            String.format(
+                                    "fail to get elasticsearch field %s mapping type,so give a default type text",
+                                    field));
                     mapping.put(field, "text");
                 }
             }
diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-elasticsearch/src/main/java/org/apache/seatunnel/datasource/plugin/elasticsearch/client/SSLUtils.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-elasticsearch/src/main/java/org/apache/seatunnel/datasource/plugin/elasticsearch/client/SSLUtils.java
index 9c10cd3b..8f3caa20 100644
--- a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-elasticsearch/src/main/java/org/apache/seatunnel/datasource/plugin/elasticsearch/client/SSLUtils.java
+++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-elasticsearch/src/main/java/org/apache/seatunnel/datasource/plugin/elasticsearch/client/SSLUtils.java
@@ -17,8 +17,6 @@
 
 package org.apache.seatunnel.datasource.plugin.elasticsearch.client;
 
-import static java.util.Collections.list;
-
 import io.airlift.security.pem.PemReader;
 
 import javax.net.ssl.KeyManager;
@@ -43,29 +41,31 @@ import java.util.Arrays;
 import java.util.List;
 import java.util.Optional;
 
+import static java.util.Collections.list;
+
 @SuppressWarnings("MagicNumber")
 public final class SSLUtils {
 
     public static Optional<SSLContext> buildSSLContext(
-        Optional<String> keyStorePath,
-        Optional<String> keyStorePassword,
-        Optional<String> trustStorePath,
-        Optional<String> trustStorePassword)
-        throws GeneralSecurityException, IOException {
+            Optional<String> keyStorePath,
+            Optional<String> keyStorePassword,
+            Optional<String> trustStorePath,
+            Optional<String> trustStorePassword)
+            throws GeneralSecurityException, IOException {
         if (!keyStorePath.isPresent() && !trustStorePath.isPresent()) {
             return Optional.empty();
         }
         return Optional.of(
-            createSSLContext(
-                keyStorePath, keyStorePassword, trustStorePath, trustStorePassword));
+                createSSLContext(
+                        keyStorePath, keyStorePassword, trustStorePath, trustStorePassword));
     }
 
     private static SSLContext createSSLContext(
-        Optional<String> keyStorePath,
-        Optional<String> keyStorePassword,
-        Optional<String> trustStorePath,
-        Optional<String> trustStorePassword)
-        throws GeneralSecurityException, IOException {
+            Optional<String> keyStorePath,
+            Optional<String> keyStorePassword,
+            Optional<String> trustStorePath,
+            Optional<String> trustStorePassword)
+            throws GeneralSecurityException, IOException {
         // load KeyStore if configured and get KeyManagers
         KeyStore keyStore = null;
         KeyManager[] keyManagers = null;
@@ -88,7 +88,7 @@ public final class SSLUtils {
             }
             validateCertificates(keyStore);
             KeyManagerFactory keyManagerFactory =
-                KeyManagerFactory.getInstance(KeyManagerFactory.getDefaultAlgorithm());
+                    KeyManagerFactory.getInstance(KeyManagerFactory.getDefaultAlgorithm());
             keyManagerFactory.init(keyStore, keyManagerPassword);
             keyManagers = keyManagerFactory.getKeyManagers();
         }
@@ -102,14 +102,14 @@ public final class SSLUtils {
 
         // create TrustManagerFactory
         TrustManagerFactory trustManagerFactory =
-            TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm());
+                TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm());
         trustManagerFactory.init(trustStore);
 
         // get X509TrustManager
         TrustManager[] trustManagers = trustManagerFactory.getTrustManagers();
         if (trustManagers.length != 1 || !(trustManagers[0] instanceof X509TrustManager)) {
             throw new RuntimeException(
-                "Unexpected default trust managers:" + Arrays.toString(trustManagers));
+                    "Unexpected default trust managers:" + Arrays.toString(trustManagers));
         }
         // create SSLContext
         SSLContext result = SSLContext.getInstance("SSL");
@@ -118,7 +118,7 @@ public final class SSLUtils {
     }
 
     private static KeyStore loadTrustStore(File trustStorePath, Optional<String> trustStorePassword)
-        throws IOException, GeneralSecurityException {
+            throws IOException, GeneralSecurityException {
         KeyStore trustStore = KeyStore.getInstance(KeyStore.getDefaultType());
         try {
             // attempt to read the trust store as a PEM file
@@ -155,10 +155,10 @@ public final class SSLUtils {
                 ((X509Certificate) certificate).checkValidity();
             } catch (CertificateExpiredException e) {
                 throw new CertificateExpiredException(
-                    "KeyStore certificate is expired: " + e.getMessage());
+                        "KeyStore certificate is expired: " + e.getMessage());
             } catch (CertificateNotYetValidException e) {
                 throw new CertificateNotYetValidException(
-                    "KeyStore certificate is not yet valid: " + e.getMessage());
+                        "KeyStore certificate is not yet valid: " + e.getMessage());
             }
         }
     }
diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-elasticsearch/src/test/java/org/apache/seatunnel/datasource/plugin/elasticsearch/ElasticSearchDataSourceChannelTest.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-elasticsearch/src/test/java/org/apache/seatunnel/datasource/plugin/elasticsearch/ElasticSearchDataSourceChannelTest.java
index b8b25d84..b888a1ff 100644
--- a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-elasticsearch/src/test/java/org/apache/seatunnel/datasource/plugin/elasticsearch/ElasticSearchDataSourceChannelTest.java
+++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-elasticsearch/src/test/java/org/apache/seatunnel/datasource/plugin/elasticsearch/ElasticSearchDataSourceChannelTest.java
@@ -19,14 +19,15 @@ package org.apache.seatunnel.datasource.plugin.elasticsearch;
 
 import org.apache.seatunnel.datasource.plugin.api.model.TableField;
 
-import com.google.common.collect.ImmutableMap;
-import com.google.common.collect.Lists;
 import org.junit.jupiter.api.Assertions;
 import org.junit.jupiter.api.Disabled;
 import org.junit.jupiter.api.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import com.google.common.collect.ImmutableMap;
+import com.google.common.collect.Lists;
+
 import java.util.List;
 import java.util.Map;
 
@@ -34,19 +35,19 @@ import java.util.Map;
 @Disabled
 class ElasticSearchDataSourceChannelTest {
     private static final Logger LOGGER =
-        LoggerFactory.getLogger(ElasticSearchDataSourceChannelTest.class);
+            LoggerFactory.getLogger(ElasticSearchDataSourceChannelTest.class);
 
     private static final ElasticSearchDataSourceChannel ELASTIC_SEARCH_DATA_SOURCE_CHANNEL =
-        new ElasticSearchDataSourceChannel();
+            new ElasticSearchDataSourceChannel();
 
     private static final String PLUGIN_NAME = "ElasticSearch";
 
     private static final String DATABASE = "Default";
 
     private static final Map<String, String> REQUEST_MAP =
-        new ImmutableMap.Builder<String, String>()
-            .put(ElasticSearchOptionRule.HOSTS.key(), "[\"http://localhost:9200\"]")
-            .build();
+            new ImmutableMap.Builder<String, String>()
+                    .put(ElasticSearchOptionRule.HOSTS.key(), "[\"http://localhost:9200\"]")
+                    .build();
 
     @Test
     void canAbleGetSchema() {
@@ -55,60 +56,61 @@ class ElasticSearchDataSourceChannelTest {
 
     @Test
     void getDataSourceOptions() {
-        Assertions.assertNotNull(ELASTIC_SEARCH_DATA_SOURCE_CHANNEL.getDataSourceOptions(PLUGIN_NAME));
+        Assertions.assertNotNull(
+                ELASTIC_SEARCH_DATA_SOURCE_CHANNEL.getDataSourceOptions(PLUGIN_NAME));
     }
 
     @Test
     void getDatasourceMetadataFieldsByDataSourceName() {
         Assertions.assertNotNull(
-            ELASTIC_SEARCH_DATA_SOURCE_CHANNEL.getDatasourceMetadataFieldsByDataSourceName(
-                PLUGIN_NAME));
+                ELASTIC_SEARCH_DATA_SOURCE_CHANNEL.getDatasourceMetadataFieldsByDataSourceName(
+                        PLUGIN_NAME));
     }
 
     @Test
     void getTables() {
         Assertions.assertDoesNotThrow(
-            () -> {
-                List<String> tables =
-                    ELASTIC_SEARCH_DATA_SOURCE_CHANNEL.getTables(
-                        PLUGIN_NAME, REQUEST_MAP, DATABASE);
-                LOGGER.info("{}", tables);
-            });
+                () -> {
+                    List<String> tables =
+                            ELASTIC_SEARCH_DATA_SOURCE_CHANNEL.getTables(
+                                    PLUGIN_NAME, REQUEST_MAP, DATABASE);
+                    LOGGER.info("{}", tables);
+                });
     }
 
     @Test
     void getDatabases() {
         Assertions.assertLinesMatch(
-            Lists.newArrayList("default"),
-            ELASTIC_SEARCH_DATA_SOURCE_CHANNEL.getDatabases(PLUGIN_NAME, REQUEST_MAP));
+                Lists.newArrayList("default"),
+                ELASTIC_SEARCH_DATA_SOURCE_CHANNEL.getDatabases(PLUGIN_NAME, REQUEST_MAP));
     }
 
     @Test
     void checkDataSourceConnectivity() {
         Assertions.assertTrue(
-            ELASTIC_SEARCH_DATA_SOURCE_CHANNEL.checkDataSourceConnectivity(
-                PLUGIN_NAME, REQUEST_MAP));
+                ELASTIC_SEARCH_DATA_SOURCE_CHANNEL.checkDataSourceConnectivity(
+                        PLUGIN_NAME, REQUEST_MAP));
     }
 
     @Test
     void getTableFields() {
         Assertions.assertDoesNotThrow(
-            () -> {
-                List<TableField> tableFields =
-                    ELASTIC_SEARCH_DATA_SOURCE_CHANNEL.getTableFields(
-                        PLUGIN_NAME, REQUEST_MAP, DATABASE, "");
-                LOGGER.info("{}", tableFields);
-            });
+                () -> {
+                    List<TableField> tableFields =
+                            ELASTIC_SEARCH_DATA_SOURCE_CHANNEL.getTableFields(
+                                    PLUGIN_NAME, REQUEST_MAP, DATABASE, "");
+                    LOGGER.info("{}", tableFields);
+                });
     }
 
     @Test
     void testGetTableFields() {
         Assertions.assertDoesNotThrow(
-            () -> {
-                Map<String, List<TableField>> tableFields =
-                    ELASTIC_SEARCH_DATA_SOURCE_CHANNEL.getTableFields(
-                        PLUGIN_NAME, REQUEST_MAP, DATABASE, Lists.newArrayList(""));
-                LOGGER.info("{}", tableFields);
-            });
+                () -> {
+                    Map<String, List<TableField>> tableFields =
+                            ELASTIC_SEARCH_DATA_SOURCE_CHANNEL.getTableFields(
+                                    PLUGIN_NAME, REQUEST_MAP, DATABASE, Lists.newArrayList(""));
+                    LOGGER.info("{}", tableFields);
+                });
     }
 }
diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-elasticsearch/src/test/java/org/apache/seatunnel/datasource/plugin/elasticsearch/ElasticSearchDataSourceFactoryTest.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-elasticsearch/src/test/java/org/apache/seatunnel/datasource/plugin/elasticsearch/ElasticSearchDataSourceFactoryTest.java
index 85a49e77..0441d7a1 100644
--- a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-elasticsearch/src/test/java/org/apache/seatunnel/datasource/plugin/elasticsearch/ElasticSearchDataSourceFactoryTest.java
+++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-elasticsearch/src/test/java/org/apache/seatunnel/datasource/plugin/elasticsearch/ElasticSearchDataSourceFactoryTest.java
@@ -23,12 +23,12 @@ import org.junit.jupiter.api.Test;
 class ElasticSearchDataSourceFactoryTest {
 
     private static final ElasticSearchDataSourceFactory ELASTIC_SEARCH_DATA_SOURCE_FACTORY =
-        new ElasticSearchDataSourceFactory();
+            new ElasticSearchDataSourceFactory();
 
     @Test
     void factoryIdentifier() {
         Assertions.assertEquals(
-            "ElasticSearch", ELASTIC_SEARCH_DATA_SOURCE_FACTORY.factoryIdentifier());
+                "ElasticSearch", ELASTIC_SEARCH_DATA_SOURCE_FACTORY.factoryIdentifier());
     }
 
     @Test
diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-clickhouse/src/main/java/org/apache/seatunnel/datasource/plugin/clickhouse/jdbc/ClickhouseDataSourceConfig.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-clickhouse/src/main/java/org/apache/seatunnel/datasource/plugin/clickhouse/jdbc/ClickhouseDataSourceConfig.java
index 4862d41d..4c5b3b2e 100644
--- a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-clickhouse/src/main/java/org/apache/seatunnel/datasource/plugin/clickhouse/jdbc/ClickhouseDataSourceConfig.java
+++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-clickhouse/src/main/java/org/apache/seatunnel/datasource/plugin/clickhouse/jdbc/ClickhouseDataSourceConfig.java
@@ -29,31 +29,31 @@ public class ClickhouseDataSourceConfig {
     public static final String PLUGIN_NAME = "JDBC-ClickHouse";
 
     public static final DataSourcePluginInfo CLICKHOUSE_DATASOURCE_PLUGIN_INFO =
-        DataSourcePluginInfo.builder()
-            .name(PLUGIN_NAME)
-            .icon(PLUGIN_NAME)
-            .version("1.0.0")
-            .type(DatasourcePluginTypeEnum.DATABASE.getCode())
-            .supportVirtualTables(false)
-            .build();
+            DataSourcePluginInfo.builder()
+                    .name(PLUGIN_NAME)
+                    .icon(PLUGIN_NAME)
+                    .version("1.0.0")
+                    .type(DatasourcePluginTypeEnum.DATABASE.getCode())
+                    .supportVirtualTables(false)
+                    .build();
 
     public static final Set<String> CLICKHOUSE_SYSTEM_DATABASES =
-        Sets.newHashSet(
-            "system",
-            "default",
-            "information_schema",
-            "mysql",
-            "performance_schema",
-            "sys");
+            Sets.newHashSet(
+                    "system",
+                    "default",
+                    "information_schema",
+                    "mysql",
+                    "performance_schema",
+                    "sys");
 
     public static final OptionRule OPTION_RULE =
-        OptionRule.builder()
-            .required(ClickhouseOptionRule.URL, ClickhouseOptionRule.DRIVER)
-            .optional(ClickhouseOptionRule.USER, ClickhouseOptionRule.PASSWORD)
-            .build();
+            OptionRule.builder()
+                    .required(ClickhouseOptionRule.URL, ClickhouseOptionRule.DRIVER)
+                    .optional(ClickhouseOptionRule.USER, ClickhouseOptionRule.PASSWORD)
+                    .build();
 
     public static final OptionRule METADATA_RULE =
-        OptionRule.builder()
-            .required(ClickhouseOptionRule.DATABASE, ClickhouseOptionRule.TABLE)
-            .build();
+            OptionRule.builder()
+                    .required(ClickhouseOptionRule.DATABASE, ClickhouseOptionRule.TABLE)
+                    .build();
 }
diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-clickhouse/src/main/java/org/apache/seatunnel/datasource/plugin/clickhouse/jdbc/ClickhouseJdbcDataSourceChannel.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-clickhouse/src/main/java/org/apache/seatunnel/datasource/plugin/clickhouse/jdbc/ClickhouseJdbcDataSourceChannel.java
index 37141174..ef160afb 100644
--- a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-clickhouse/src/main/java/org/apache/seatunnel/datasource/plugin/clickhouse/jdbc/ClickhouseJdbcDataSourceChannel.java
+++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-clickhouse/src/main/java/org/apache/seatunnel/datasource/plugin/clickhouse/jdbc/ClickhouseJdbcDataSourceChannel.java
@@ -17,17 +17,16 @@
 
 package org.apache.seatunnel.datasource.plugin.clickhouse.jdbc;
 
-import static com.google.common.base.Preconditions.checkNotNull;
-
 import org.apache.seatunnel.api.configuration.util.OptionRule;
 import org.apache.seatunnel.datasource.plugin.api.DataSourceChannel;
 import org.apache.seatunnel.datasource.plugin.api.DataSourcePluginException;
 import org.apache.seatunnel.datasource.plugin.api.model.TableField;
 import org.apache.seatunnel.datasource.plugin.api.utils.JdbcUtils;
 
+import org.apache.commons.lang3.StringUtils;
+
 import lombok.NonNull;
 import lombok.extern.slf4j.Slf4j;
-import org.apache.commons.lang3.StringUtils;
 
 import java.sql.Connection;
 import java.sql.DatabaseMetaData;
@@ -39,6 +38,8 @@ import java.util.ArrayList;
 import java.util.List;
 import java.util.Map;
 
+import static com.google.common.base.Preconditions.checkNotNull;
+
 @Slf4j
 public class ClickhouseJdbcDataSourceChannel implements DataSourceChannel {
 
@@ -54,13 +55,13 @@ public class ClickhouseJdbcDataSourceChannel implements DataSourceChannel {
 
     @Override
     public List<String> getTables(
-        @NonNull String pluginName, Map<String, String> requestParams, String database) {
+            @NonNull String pluginName, Map<String, String> requestParams, String database) {
         List<String> tableNames = new ArrayList<>();
-        try (Connection connection = getConnection(requestParams);) {
+        try (Connection connection = getConnection(requestParams); ) {
             ResultSet resultSet =
-                connection
-                    .getMetaData()
-                    .getTables(database, null, null, new String[]{"TABLE"});
+                    connection
+                            .getMetaData()
+                            .getTables(database, null, null, new String[] {"TABLE"});
             while (resultSet.next()) {
                 String tableName = resultSet.getString("TABLE_NAME");
                 if (StringUtils.isNotBlank(tableName)) {
@@ -75,17 +76,17 @@ public class ClickhouseJdbcDataSourceChannel implements DataSourceChannel {
 
     @Override
     public List<String> getDatabases(
-        @NonNull String pluginName, @NonNull Map<String, String> requestParams) {
+            @NonNull String pluginName, @NonNull Map<String, String> requestParams) {
         List<String> dbNames = new ArrayList<>();
         try (Connection connection = getConnection(requestParams);
-             Statement statement = connection.createStatement();
-             ResultSet re = statement.executeQuery("SHOW DATABASES;")) {
+                Statement statement = connection.createStatement();
+                ResultSet re = statement.executeQuery("SHOW DATABASES;")) {
             // filter system databases
             while (re.next()) {
                 String dbName = re.getString("name");
                 if (StringUtils.isNotBlank(dbName)
-                    && !ClickhouseDataSourceConfig.CLICKHOUSE_SYSTEM_DATABASES.contains(
-                    dbName)) {
+                        && !ClickhouseDataSourceConfig.CLICKHOUSE_SYSTEM_DATABASES.contains(
+                                dbName)) {
                     dbNames.add(dbName);
                 }
             }
@@ -97,7 +98,7 @@ public class ClickhouseJdbcDataSourceChannel implements DataSourceChannel {
 
     @Override
     public boolean checkDataSourceConnectivity(
-        @NonNull String pluginName, @NonNull Map<String, String> requestParams) {
+            @NonNull String pluginName, @NonNull Map<String, String> requestParams) {
         try (Connection ignored = getConnection(requestParams)) {
             return true;
         } catch (Exception e) {
@@ -107,10 +108,10 @@ public class ClickhouseJdbcDataSourceChannel implements DataSourceChannel {
 
     @Override
     public List<TableField> getTableFields(
-        @NonNull String pluginName,
-        @NonNull Map<String, String> requestParams,
-        @NonNull String database,
-        @NonNull String table) {
+            @NonNull String pluginName,
+            @NonNull Map<String, String> requestParams,
+            @NonNull String database,
+            @NonNull String table) {
         List<TableField> tableFields = new ArrayList<>();
         try (Connection connection = getConnection(requestParams, database)) {
             DatabaseMetaData metaData = connection.getMetaData();
@@ -139,15 +140,15 @@ public class ClickhouseJdbcDataSourceChannel implements DataSourceChannel {
 
     @Override
     public Map<String, List<TableField>> getTableFields(
-        @NonNull String pluginName,
-        @NonNull Map<String, String> requestParams,
-        @NonNull String database,
-        @NonNull List<String> tables) {
+            @NonNull String pluginName,
+            @NonNull Map<String, String> requestParams,
+            @NonNull String database,
+            @NonNull List<String> tables) {
         return null;
     }
 
     private String getPrimaryKey(DatabaseMetaData metaData, String dbName, String tableName)
-        throws SQLException {
+            throws SQLException {
         ResultSet primaryKeysInfo = metaData.getPrimaryKeys(dbName, "%", tableName);
         while (primaryKeysInfo.next()) {
             return primaryKeysInfo.getString("COLUMN_NAME");
@@ -156,17 +157,17 @@ public class ClickhouseJdbcDataSourceChannel implements DataSourceChannel {
     }
 
     private Connection getConnection(Map<String, String> requestParams)
-        throws SQLException, ClassNotFoundException {
+            throws SQLException, ClassNotFoundException {
         return getConnection(requestParams, null);
     }
 
     private Connection getConnection(Map<String, String> requestParams, String databaseName)
-        throws SQLException, ClassNotFoundException {
+            throws SQLException, ClassNotFoundException {
         checkNotNull(requestParams.get(ClickhouseOptionRule.DRIVER.key()));
         checkNotNull(requestParams.get(ClickhouseOptionRule.URL.key()), "Jdbc url cannot be null");
         String url =
-            JdbcUtils.replaceDatabase(
-                requestParams.get(ClickhouseOptionRule.URL.key()), databaseName);
+                JdbcUtils.replaceDatabase(
+                        requestParams.get(ClickhouseOptionRule.URL.key()), databaseName);
         if (requestParams.containsKey(ClickhouseOptionRule.USER.key())) {
             String username = requestParams.get(ClickhouseOptionRule.USER.key());
             String password = requestParams.get(ClickhouseOptionRule.PASSWORD.key());
diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-clickhouse/src/main/java/org/apache/seatunnel/datasource/plugin/clickhouse/jdbc/ClickhouseOptionRule.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-clickhouse/src/main/java/org/apache/seatunnel/datasource/plugin/clickhouse/jdbc/ClickhouseOptionRule.java
index 3beff605..f02e6030 100644
--- a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-clickhouse/src/main/java/org/apache/seatunnel/datasource/plugin/clickhouse/jdbc/ClickhouseOptionRule.java
+++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-clickhouse/src/main/java/org/apache/seatunnel/datasource/plugin/clickhouse/jdbc/ClickhouseOptionRule.java
@@ -23,30 +23,30 @@ import org.apache.seatunnel.api.configuration.Options;
 public class ClickhouseOptionRule {
 
     public static final Option<String> URL =
-        Options.key("url")
-            .stringType()
-            .noDefaultValue()
-            .withDescription(
-                "jdbc url, eg:"
-                    + "jdbc:clickhouse://localhost:8123/test?useSSL=false&serverTimezone=UTC&useUnicode=true&characterEncoding=utf-8");
+            Options.key("url")
+                    .stringType()
+                    .noDefaultValue()
+                    .withDescription(
+                            "jdbc url, eg:"
+                                    + "jdbc:clickhouse://localhost:8123/test?useSSL=false&serverTimezone=UTC&useUnicode=true&characterEncoding=utf-8");
 
     public static final Option<String> USER =
-        Options.key("user").stringType().noDefaultValue().withDescription("jdbc user");
+            Options.key("user").stringType().noDefaultValue().withDescription("jdbc user");
 
     public static final Option<String> PASSWORD =
-        Options.key("password").stringType().noDefaultValue().withDescription("jdbc password");
+            Options.key("password").stringType().noDefaultValue().withDescription("jdbc password");
 
     public static final Option<String> DATABASE =
-        Options.key("database").stringType().noDefaultValue().withDescription("jdbc database");
+            Options.key("database").stringType().noDefaultValue().withDescription("jdbc database");
 
     public static final Option<String> TABLE =
-        Options.key("table").stringType().noDefaultValue().withDescription("jdbc table");
+            Options.key("table").stringType().noDefaultValue().withDescription("jdbc table");
 
     public static final Option<DriverType> DRIVER =
-        Options.key("driver")
-            .enumType(DriverType.class)
-            .noDefaultValue()
-            .withDescription("driver");
+            Options.key("driver")
+                    .enumType(DriverType.class)
+                    .noDefaultValue()
+                    .withDescription("driver");
 
     public enum DriverType {
         ClickHouse("ru.yandex.clickhouse.ClickHouseDriver");
diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-hive/src/main/java/org/apache/seatunnel/datasource/plugin/hive/jdbc/HiveJdbcDataSourceChannel.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-hive/src/main/java/org/apache/seatunnel/datasource/plugin/hive/jdbc/HiveJdbcDataSourceChannel.java
index 32543c42..62559037 100644
--- a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-hive/src/main/java/org/apache/seatunnel/datasource/plugin/hive/jdbc/HiveJdbcDataSourceChannel.java
+++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-hive/src/main/java/org/apache/seatunnel/datasource/plugin/hive/jdbc/HiveJdbcDataSourceChannel.java
@@ -22,11 +22,12 @@ import org.apache.seatunnel.datasource.plugin.api.DataSourceChannel;
 import org.apache.seatunnel.datasource.plugin.api.DataSourcePluginException;
 import org.apache.seatunnel.datasource.plugin.api.model.TableField;
 
-import lombok.NonNull;
-import lombok.extern.slf4j.Slf4j;
 import org.apache.commons.collections4.MapUtils;
 import org.apache.commons.lang3.StringUtils;
 
+import lombok.NonNull;
+import lombok.extern.slf4j.Slf4j;
+
 import java.io.IOException;
 import java.net.InetSocketAddress;
 import java.net.Socket;
@@ -56,13 +57,13 @@ public class HiveJdbcDataSourceChannel implements DataSourceChannel {
 
     @Override
     public List<String> getTables(
-        @NonNull String pluginName, Map<String, String> requestParams, String database) {
+            @NonNull String pluginName, Map<String, String> requestParams, String database) {
         return getTables(pluginName, requestParams, database);
     }
 
     @Override
     public List<String> getDatabases(
-        @NonNull String pluginName, @NonNull Map<String, String> requestParams) {
+            @NonNull String pluginName, @NonNull Map<String, String> requestParams) {
         try {
             return getDataBaseNames(pluginName, requestParams);
         } catch (SQLException e) {
@@ -73,25 +74,25 @@ public class HiveJdbcDataSourceChannel implements DataSourceChannel {
 
     @Override
     public boolean checkDataSourceConnectivity(
-        @NonNull String pluginName, @NonNull Map<String, String> requestParams) {
+            @NonNull String pluginName, @NonNull Map<String, String> requestParams) {
         return checkJdbcConnectivity(requestParams);
     }
 
     @Override
     public List<TableField> getTableFields(
-        @NonNull String pluginName,
-        @NonNull Map<String, String> requestParams,
-        @NonNull String database,
-        @NonNull String table) {
+            @NonNull String pluginName,
+            @NonNull Map<String, String> requestParams,
+            @NonNull String database,
+            @NonNull String table) {
         return getTableFields(requestParams, database, table);
     }
 
     @Override
     public Map<String, List<TableField>> getTableFields(
-        @NonNull String pluginName,
-        @NonNull Map<String, String> requestParams,
-        @NonNull String database,
-        @NonNull List<String> tables) {
+            @NonNull String pluginName,
+            @NonNull Map<String, String> requestParams,
+            @NonNull String database,
+            @NonNull List<String> tables) {
         Map<String, List<TableField>> tableFields = new HashMap<>(tables.size());
         for (String table : tables) {
             tableFields.put(table, getTableFields(requestParams, database, table));
@@ -104,24 +105,24 @@ public class HiveJdbcDataSourceChannel implements DataSourceChannel {
             return true;
         } catch (Exception e) {
             throw new DataSourcePluginException(
-                "check jdbc connectivity failed, " + e.getMessage(), e);
+                    "check jdbc connectivity failed, " + e.getMessage(), e);
         }
     }
 
     protected Connection init(Map<String, String> requestParams) throws SQLException {
         if (MapUtils.isEmpty(requestParams)) {
             throw new DataSourcePluginException(
-                "Hive jdbc request params is null, please check your config");
+                    "Hive jdbc request params is null, please check your config");
         }
         String url = requestParams.get(HiveJdbcOptionRule.URL.key());
         return DriverManager.getConnection(url);
     }
 
     protected List<String> getDataBaseNames(String pluginName, Map<String, String> requestParams)
-        throws SQLException {
+            throws SQLException {
         List<String> dbNames = new ArrayList<>();
         try (Connection connection = init(requestParams);
-             Statement statement = connection.createStatement();) {
+                Statement statement = connection.createStatement(); ) {
             ResultSet re = statement.executeQuery("SHOW DATABASES;");
             // filter system databases
             while (re.next()) {
@@ -136,9 +137,9 @@ public class HiveJdbcDataSourceChannel implements DataSourceChannel {
 
     protected List<String> getTableNames(Map<String, String> requestParams, String dbName) {
         List<String> tableNames = new ArrayList<>();
-        try (Connection connection = init(requestParams);) {
+        try (Connection connection = init(requestParams); ) {
             ResultSet resultSet =
-                connection.getMetaData().getTables(dbName, null, null, new String[]{"TABLE"});
+                    connection.getMetaData().getTables(dbName, null, null, new String[] {"TABLE"});
             while (resultSet.next()) {
                 String tableName = resultSet.getString("TABLE_NAME");
                 if (StringUtils.isNotBlank(tableName)) {
@@ -152,9 +153,9 @@ public class HiveJdbcDataSourceChannel implements DataSourceChannel {
     }
 
     protected List<TableField> getTableFields(
-        Map<String, String> requestParams, String dbName, String tableName) {
+            Map<String, String> requestParams, String dbName, String tableName) {
         List<TableField> tableFields = new ArrayList<>();
-        try (Connection connection = init(requestParams);) {
+        try (Connection connection = init(requestParams); ) {
             DatabaseMetaData metaData = connection.getMetaData();
             String primaryKey = getPrimaryKey(metaData, dbName, tableName);
             ResultSet resultSet = metaData.getColumns(dbName, null, tableName, null);
@@ -180,7 +181,7 @@ public class HiveJdbcDataSourceChannel implements DataSourceChannel {
     }
 
     private String getPrimaryKey(DatabaseMetaData metaData, String dbName, String tableName)
-        throws SQLException {
+            throws SQLException {
         ResultSet primaryKeysInfo = metaData.getPrimaryKeys(dbName, "%", tableName);
         while (primaryKeysInfo.next()) {
             return primaryKeysInfo.getString("COLUMN_NAME");
diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-mysql/src/main/java/org/apache/seatunnel/datasource/plugin/mysql/jdbc/MysqlDataSourceConfig.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-mysql/src/main/java/org/apache/seatunnel/datasource/plugin/mysql/jdbc/MysqlDataSourceConfig.java
index 2e2de3e4..03d6d0cf 100644
--- a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-mysql/src/main/java/org/apache/seatunnel/datasource/plugin/mysql/jdbc/MysqlDataSourceConfig.java
+++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-mysql/src/main/java/org/apache/seatunnel/datasource/plugin/mysql/jdbc/MysqlDataSourceConfig.java
@@ -30,22 +30,22 @@ public class MysqlDataSourceConfig {
     public static final String PLUGIN_NAME = "JDBC-Mysql";
 
     public static final DataSourcePluginInfo MYSQL_DATASOURCE_PLUGIN_INFO =
-        DataSourcePluginInfo.builder()
-            .name(PLUGIN_NAME)
-            .icon(PLUGIN_NAME)
-            .version("1.0.0")
-            .type(DatasourcePluginTypeEnum.DATABASE.getCode())
-            .build();
+            DataSourcePluginInfo.builder()
+                    .name(PLUGIN_NAME)
+                    .icon(PLUGIN_NAME)
+                    .version("1.0.0")
+                    .type(DatasourcePluginTypeEnum.DATABASE.getCode())
+                    .build();
 
     public static final Set<String> MYSQL_SYSTEM_DATABASES =
-        Sets.newHashSet("information_schema", "mysql", "performance_schema", "sys");
+            Sets.newHashSet("information_schema", "mysql", "performance_schema", "sys");
 
     public static final OptionRule OPTION_RULE =
-        OptionRule.builder()
-            .required(MysqlOptionRule.URL, MysqlOptionRule.DRIVER)
-            .optional(MysqlOptionRule.USER, MysqlOptionRule.PASSWORD)
-            .build();
+            OptionRule.builder()
+                    .required(MysqlOptionRule.URL, MysqlOptionRule.DRIVER)
+                    .optional(MysqlOptionRule.USER, MysqlOptionRule.PASSWORD)
+                    .build();
 
     public static final OptionRule METADATA_RULE =
-        OptionRule.builder().required(MysqlOptionRule.DATABASE, MysqlOptionRule.TABLE).build();
+            OptionRule.builder().required(MysqlOptionRule.DATABASE, MysqlOptionRule.TABLE).build();
 }
diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-mysql/src/main/java/org/apache/seatunnel/datasource/plugin/mysql/jdbc/MysqlJdbcDataSourceChannel.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-mysql/src/main/java/org/apache/seatunnel/datasource/plugin/mysql/jdbc/MysqlJdbcDataSourceChannel.java
index 1fe420d5..99ab042b 100644
--- a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-mysql/src/main/java/org/apache/seatunnel/datasource/plugin/mysql/jdbc/MysqlJdbcDataSourceChannel.java
+++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-mysql/src/main/java/org/apache/seatunnel/datasource/plugin/mysql/jdbc/MysqlJdbcDataSourceChannel.java
@@ -17,17 +17,16 @@
 
 package org.apache.seatunnel.datasource.plugin.mysql.jdbc;
 
-import static com.google.common.base.Preconditions.checkNotNull;
-
 import org.apache.seatunnel.api.configuration.util.OptionRule;
 import org.apache.seatunnel.datasource.plugin.api.DataSourceChannel;
 import org.apache.seatunnel.datasource.plugin.api.DataSourcePluginException;
 import org.apache.seatunnel.datasource.plugin.api.model.TableField;
 import org.apache.seatunnel.datasource.plugin.api.utils.JdbcUtils;
 
-import lombok.NonNull;
 import org.apache.commons.lang3.StringUtils;
 
+import lombok.NonNull;
+
 import java.sql.Connection;
 import java.sql.DatabaseMetaData;
 import java.sql.DriverManager;
@@ -40,6 +39,8 @@ import java.util.Map;
 import java.util.function.Function;
 import java.util.stream.Collectors;
 
+import static com.google.common.base.Preconditions.checkNotNull;
+
 public class MysqlJdbcDataSourceChannel implements DataSourceChannel {
 
     @Override
@@ -54,13 +55,13 @@ public class MysqlJdbcDataSourceChannel implements DataSourceChannel {
 
     @Override
     public List<String> getTables(
-        @NonNull String pluginName, Map<String, String> requestParams, String database) {
+            @NonNull String pluginName, Map<String, String> requestParams, String database) {
         List<String> tableNames = new ArrayList<>();
         try (Connection connection = getConnection(requestParams);
-             ResultSet resultSet =
-                 connection
-                     .getMetaData()
-                     .getTables(database, null, null, new String[]{"TABLE"})) {
+                ResultSet resultSet =
+                        connection
+                                .getMetaData()
+                                .getTables(database, null, null, new String[] {"TABLE"})) {
             while (resultSet.next()) {
                 String tableName = resultSet.getString("TABLE_NAME");
                 if (StringUtils.isNotBlank(tableName)) {
@@ -75,16 +76,16 @@ public class MysqlJdbcDataSourceChannel implements DataSourceChannel {
 
     @Override
     public List<String> getDatabases(
-        @NonNull String pluginName, @NonNull Map<String, String> requestParams) {
+            @NonNull String pluginName, @NonNull Map<String, String> requestParams) {
         List<String> dbNames = new ArrayList<>();
         try (Connection connection = getConnection(requestParams);
-             PreparedStatement statement = connection.prepareStatement("SHOW DATABASES;");
-             ResultSet re = statement.executeQuery()) {
+                PreparedStatement statement = connection.prepareStatement("SHOW DATABASES;");
+                ResultSet re = statement.executeQuery()) {
             // filter system databases
             while (re.next()) {
                 String dbName = re.getString("database");
                 if (StringUtils.isNotBlank(dbName)
-                    && !MysqlDataSourceConfig.MYSQL_SYSTEM_DATABASES.contains(dbName)) {
+                        && !MysqlDataSourceConfig.MYSQL_SYSTEM_DATABASES.contains(dbName)) {
                     dbNames.add(dbName);
                 }
             }
@@ -96,7 +97,7 @@ public class MysqlJdbcDataSourceChannel implements DataSourceChannel {
 
     @Override
     public boolean checkDataSourceConnectivity(
-        @NonNull String pluginName, @NonNull Map<String, String> requestParams) {
+            @NonNull String pluginName, @NonNull Map<String, String> requestParams) {
         try (Connection ignored = getConnection(requestParams)) {
             return true;
         } catch (Exception e) {
@@ -106,10 +107,10 @@ public class MysqlJdbcDataSourceChannel implements DataSourceChannel {
 
     @Override
     public List<TableField> getTableFields(
-        @NonNull String pluginName,
-        @NonNull Map<String, String> requestParams,
-        @NonNull String database,
-        @NonNull String table) {
+            @NonNull String pluginName,
+            @NonNull Map<String, String> requestParams,
+            @NonNull String database,
+            @NonNull String table) {
         List<TableField> tableFields = new ArrayList<>();
         try (Connection connection = getConnection(requestParams, database)) {
             DatabaseMetaData metaData = connection.getMetaData();
@@ -138,21 +139,21 @@ public class MysqlJdbcDataSourceChannel implements DataSourceChannel {
 
     @Override
     public Map<String, List<TableField>> getTableFields(
-        @NonNull String pluginName,
-        @NonNull Map<String, String> requestParams,
-        @NonNull String database,
-        @NonNull List<String> tables) {
+            @NonNull String pluginName,
+            @NonNull Map<String, String> requestParams,
+            @NonNull String database,
+            @NonNull List<String> tables) {
         return tables.parallelStream()
-            .collect(
-                Collectors.toMap(
-                    Function.identity(),
-                    table ->
-                        getTableFields(
-                            pluginName, requestParams, database, table)));
+                .collect(
+                        Collectors.toMap(
+                                Function.identity(),
+                                table ->
+                                        getTableFields(
+                                                pluginName, requestParams, database, table)));
     }
 
     private String getPrimaryKey(DatabaseMetaData metaData, String dbName, String tableName)
-        throws SQLException {
+            throws SQLException {
         ResultSet primaryKeysInfo = metaData.getPrimaryKeys(dbName, "%", tableName);
         while (primaryKeysInfo.next()) {
             return primaryKeysInfo.getString("COLUMN_NAME");
@@ -161,17 +162,17 @@ public class MysqlJdbcDataSourceChannel implements DataSourceChannel {
     }
 
     private Connection getConnection(Map<String, String> requestParams)
-        throws SQLException, ClassNotFoundException {
+            throws SQLException, ClassNotFoundException {
         return getConnection(requestParams, null);
     }
 
     private Connection getConnection(Map<String, String> requestParams, String databaseName)
-        throws SQLException, ClassNotFoundException {
+            throws SQLException, ClassNotFoundException {
         checkNotNull(requestParams.get(MysqlOptionRule.DRIVER.key()));
         checkNotNull(requestParams.get(MysqlOptionRule.URL.key()), "Jdbc url cannot be null");
         String url =
-            JdbcUtils.replaceDatabase(
-                requestParams.get(MysqlOptionRule.URL.key()), databaseName);
+                JdbcUtils.replaceDatabase(
+                        requestParams.get(MysqlOptionRule.URL.key()), databaseName);
         if (requestParams.containsKey(MysqlOptionRule.USER.key())) {
             String username = requestParams.get(MysqlOptionRule.USER.key());
             String password = requestParams.get(MysqlOptionRule.PASSWORD.key());
diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-mysql/src/main/java/org/apache/seatunnel/datasource/plugin/mysql/jdbc/MysqlOptionRule.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-mysql/src/main/java/org/apache/seatunnel/datasource/plugin/mysql/jdbc/MysqlOptionRule.java
index f666251f..e3469641 100644
--- a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-mysql/src/main/java/org/apache/seatunnel/datasource/plugin/mysql/jdbc/MysqlOptionRule.java
+++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-mysql/src/main/java/org/apache/seatunnel/datasource/plugin/mysql/jdbc/MysqlOptionRule.java
@@ -23,30 +23,30 @@ import org.apache.seatunnel.api.configuration.Options;
 public class MysqlOptionRule {
 
     public static final Option<String> URL =
-        Options.key("url")
-            .stringType()
-            .noDefaultValue()
-            .withDescription(
-                "jdbc url, eg:"
-                    + " jdbc:mysql://localhost:3306/test?useSSL=false&serverTimezone=UTC&useUnicode=true&characterEncoding=utf-8");
+            Options.key("url")
+                    .stringType()
+                    .noDefaultValue()
+                    .withDescription(
+                            "jdbc url, eg:"
+                                    + " jdbc:mysql://localhost:3306/test?useSSL=false&serverTimezone=UTC&useUnicode=true&characterEncoding=utf-8");
 
     public static final Option<String> USER =
-        Options.key("user").stringType().noDefaultValue().withDescription("jdbc user");
+            Options.key("user").stringType().noDefaultValue().withDescription("jdbc user");
 
     public static final Option<String> PASSWORD =
-        Options.key("password").stringType().noDefaultValue().withDescription("jdbc password");
+            Options.key("password").stringType().noDefaultValue().withDescription("jdbc password");
 
     public static final Option<String> DATABASE =
-        Options.key("database").stringType().noDefaultValue().withDescription("jdbc database");
+            Options.key("database").stringType().noDefaultValue().withDescription("jdbc database");
 
     public static final Option<String> TABLE =
-        Options.key("table").stringType().noDefaultValue().withDescription("jdbc table");
+            Options.key("table").stringType().noDefaultValue().withDescription("jdbc table");
 
     public static final Option<DriverType> DRIVER =
-        Options.key("driver")
-            .enumType(DriverType.class)
-            .defaultValue(DriverType.MYSQL)
-            .withDescription("driver");
+            Options.key("driver")
+                    .enumType(DriverType.class)
+                    .defaultValue(DriverType.MYSQL)
+                    .withDescription("driver");
 
     public enum DriverType {
         MYSQL("com.mysql.cj.jdbc.Driver"),
diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-oracle/src/main/java/org/apache/seatunnel/datasource/plugin/oracle/jdbc/OracleDataSourceChannel.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-oracle/src/main/java/org/apache/seatunnel/datasource/plugin/oracle/jdbc/OracleDataSourceChannel.java
index 91ccc661..87a04fcf 100644
--- a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-oracle/src/main/java/org/apache/seatunnel/datasource/plugin/oracle/jdbc/OracleDataSourceChannel.java
+++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-oracle/src/main/java/org/apache/seatunnel/datasource/plugin/oracle/jdbc/OracleDataSourceChannel.java
@@ -17,17 +17,16 @@
 
 package org.apache.seatunnel.datasource.plugin.oracle.jdbc;
 
-import static com.google.common.base.Preconditions.checkNotNull;
-
 import org.apache.seatunnel.api.configuration.util.OptionRule;
 import org.apache.seatunnel.datasource.plugin.api.DataSourceChannel;
 import org.apache.seatunnel.datasource.plugin.api.DataSourcePluginException;
 import org.apache.seatunnel.datasource.plugin.api.model.TableField;
 import org.apache.seatunnel.datasource.plugin.api.utils.JdbcUtils;
 
-import lombok.NonNull;
 import org.apache.commons.lang3.StringUtils;
 
+import lombok.NonNull;
+
 import java.sql.Connection;
 import java.sql.DatabaseMetaData;
 import java.sql.DriverManager;
@@ -38,6 +37,8 @@ import java.util.ArrayList;
 import java.util.List;
 import java.util.Map;
 
+import static com.google.common.base.Preconditions.checkNotNull;
+
 public class OracleDataSourceChannel implements DataSourceChannel {
 
     @Override
@@ -52,13 +53,13 @@ public class OracleDataSourceChannel implements DataSourceChannel {
 
     @Override
     public List<String> getTables(
-        @NonNull String pluginName, Map<String, String> requestParams, String database) {
+            @NonNull String pluginName, Map<String, String> requestParams, String database) {
         List<String> tableNames = new ArrayList<>();
         try (Connection connection = getConnection(requestParams);
-             ResultSet resultSet =
-                 connection
-                     .getMetaData()
-                     .getTables(database, null, null, new String[]{"TABLE"});) {
+                ResultSet resultSet =
+                        connection
+                                .getMetaData()
+                                .getTables(database, null, null, new String[] {"TABLE"}); ) {
             while (resultSet.next()) {
                 String tableName = resultSet.getString("TABLE_NAME");
                 if (StringUtils.isNotBlank(tableName)) {
@@ -73,16 +74,16 @@ public class OracleDataSourceChannel implements DataSourceChannel {
 
     @Override
     public List<String> getDatabases(
-        @NonNull String pluginName, @NonNull Map<String, String> requestParams) {
+            @NonNull String pluginName, @NonNull Map<String, String> requestParams) {
         List<String> dbNames = new ArrayList<>();
         try (Connection connection = getConnection(requestParams);
-             PreparedStatement statement = connection.prepareStatement("SHOW DATABASES;");
-             ResultSet re = statement.executeQuery()) {
+                PreparedStatement statement = connection.prepareStatement("SHOW DATABASES;");
+                ResultSet re = statement.executeQuery()) {
             // filter system databases
             while (re.next()) {
                 String dbName = re.getString("database");
                 if (StringUtils.isNotBlank(dbName)
-                    && !OracleDataSourceConfig.ORACLE_SYSTEM_DATABASES.contains(dbName)) {
+                        && !OracleDataSourceConfig.ORACLE_SYSTEM_DATABASES.contains(dbName)) {
                     dbNames.add(dbName);
                 }
             }
@@ -94,7 +95,7 @@ public class OracleDataSourceChannel implements DataSourceChannel {
 
     @Override
     public boolean checkDataSourceConnectivity(
-        @NonNull String pluginName, @NonNull Map<String, String> requestParams) {
+            @NonNull String pluginName, @NonNull Map<String, String> requestParams) {
         try (Connection ignored = getConnection(requestParams)) {
             return true;
         } catch (Exception e) {
@@ -104,10 +105,10 @@ public class OracleDataSourceChannel implements DataSourceChannel {
 
     @Override
     public List<TableField> getTableFields(
-        @NonNull String pluginName,
-        @NonNull Map<String, String> requestParams,
-        @NonNull String database,
-        @NonNull String table) {
+            @NonNull String pluginName,
+            @NonNull Map<String, String> requestParams,
+            @NonNull String database,
+            @NonNull String table) {
         List<TableField> tableFields = new ArrayList<>();
         try (Connection connection = getConnection(requestParams, database)) {
             DatabaseMetaData metaData = connection.getMetaData();
@@ -136,15 +137,15 @@ public class OracleDataSourceChannel implements DataSourceChannel {
 
     @Override
     public Map<String, List<TableField>> getTableFields(
-        @NonNull String pluginName,
-        @NonNull Map<String, String> requestParams,
-        @NonNull String database,
-        @NonNull List<String> tables) {
+            @NonNull String pluginName,
+            @NonNull Map<String, String> requestParams,
+            @NonNull String database,
+            @NonNull List<String> tables) {
         return null;
     }
 
     private String getPrimaryKey(DatabaseMetaData metaData, String dbName, String tableName)
-        throws SQLException {
+            throws SQLException {
         ResultSet primaryKeysInfo = metaData.getPrimaryKeys(dbName, "%", tableName);
         while (primaryKeysInfo.next()) {
             return primaryKeysInfo.getString("COLUMN_NAME");
@@ -153,17 +154,17 @@ public class OracleDataSourceChannel implements DataSourceChannel {
     }
 
     private Connection getConnection(Map<String, String> requestParams)
-        throws SQLException, ClassNotFoundException {
+            throws SQLException, ClassNotFoundException {
         return getConnection(requestParams, null);
     }
 
     private Connection getConnection(Map<String, String> requestParams, String databaseName)
-        throws SQLException, ClassNotFoundException {
+            throws SQLException, ClassNotFoundException {
         checkNotNull(requestParams.get(OracleOptionRule.DRIVER.key()));
         checkNotNull(requestParams.get(OracleOptionRule.URL.key()), "Jdbc url cannot be null");
         String url =
-            JdbcUtils.replaceDatabase(
-                requestParams.get(OracleOptionRule.URL.key()), databaseName);
+                JdbcUtils.replaceDatabase(
+                        requestParams.get(OracleOptionRule.URL.key()), databaseName);
         if (requestParams.containsKey(OracleOptionRule.USER.key())) {
             String username = requestParams.get(OracleOptionRule.USER.key());
             String password = requestParams.get(OracleOptionRule.PASSWORD.key());
diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-oracle/src/main/java/org/apache/seatunnel/datasource/plugin/oracle/jdbc/OracleDataSourceConfig.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-oracle/src/main/java/org/apache/seatunnel/datasource/plugin/oracle/jdbc/OracleDataSourceConfig.java
index 96f09fbe..83455b6d 100644
--- a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-oracle/src/main/java/org/apache/seatunnel/datasource/plugin/oracle/jdbc/OracleDataSourceConfig.java
+++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-oracle/src/main/java/org/apache/seatunnel/datasource/plugin/oracle/jdbc/OracleDataSourceConfig.java
@@ -30,24 +30,24 @@ public class OracleDataSourceConfig {
     public static final String PLUGIN_NAME = "JDBC-Oracle";
 
     public static final DataSourcePluginInfo ORACLE_DATASOURCE_PLUGIN_INFO =
-        DataSourcePluginInfo.builder()
-            .name(PLUGIN_NAME)
-            .icon(PLUGIN_NAME)
-            .version("1.0.0")
-            .type(DatasourcePluginTypeEnum.DATABASE.getCode())
-            .build();
+            DataSourcePluginInfo.builder()
+                    .name(PLUGIN_NAME)
+                    .icon(PLUGIN_NAME)
+                    .version("1.0.0")
+                    .type(DatasourcePluginTypeEnum.DATABASE.getCode())
+                    .build();
 
     public static final Set<String> ORACLE_SYSTEM_DATABASES =
-        Sets.newHashSet("SYS", "SYSTEM", "SYSDBA", "SYSOPER", "HR", "SCOTT");
+            Sets.newHashSet("SYS", "SYSTEM", "SYSDBA", "SYSOPER", "HR", "SCOTT");
 
     public static final OptionRule OPTION_RULE =
-        OptionRule.builder()
-            .required(OracleOptionRule.URL, OracleOptionRule.DRIVER)
-            .optional(OracleOptionRule.USER, OracleOptionRule.PASSWORD)
-            .build();
+            OptionRule.builder()
+                    .required(OracleOptionRule.URL, OracleOptionRule.DRIVER)
+                    .optional(OracleOptionRule.USER, OracleOptionRule.PASSWORD)
+                    .build();
 
     public static final OptionRule METADATA_RULE =
-        OptionRule.builder()
-            .required(OracleOptionRule.DATABASE, OracleOptionRule.TABLE)
-            .build();
+            OptionRule.builder()
+                    .required(OracleOptionRule.DATABASE, OracleOptionRule.TABLE)
+                    .build();
 }
diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-oracle/src/main/java/org/apache/seatunnel/datasource/plugin/oracle/jdbc/OracleOptionRule.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-oracle/src/main/java/org/apache/seatunnel/datasource/plugin/oracle/jdbc/OracleOptionRule.java
index 517a8c32..f3ec40e3 100644
--- a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-oracle/src/main/java/org/apache/seatunnel/datasource/plugin/oracle/jdbc/OracleOptionRule.java
+++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-oracle/src/main/java/org/apache/seatunnel/datasource/plugin/oracle/jdbc/OracleOptionRule.java
@@ -23,28 +23,28 @@ import org.apache.seatunnel.api.configuration.Options;
 public class OracleOptionRule {
 
     public static final Option<String> URL =
-        Options.key("url")
-            .stringType()
-            .noDefaultValue()
-            .withDescription("jdbc url, eg:" + "jdbc:oracle:thin:@localhost:1521:XE");
+            Options.key("url")
+                    .stringType()
+                    .noDefaultValue()
+                    .withDescription("jdbc url, eg:" + "jdbc:oracle:thin:@localhost:1521:XE");
 
     public static final Option<String> USER =
-        Options.key("user").stringType().noDefaultValue().withDescription("jdbc user");
+            Options.key("user").stringType().noDefaultValue().withDescription("jdbc user");
 
     public static final Option<String> PASSWORD =
-        Options.key("password").stringType().noDefaultValue().withDescription("jdbc password");
+            Options.key("password").stringType().noDefaultValue().withDescription("jdbc password");
 
     public static final Option<String> DATABASE =
-        Options.key("database").stringType().noDefaultValue().withDescription("jdbc database");
+            Options.key("database").stringType().noDefaultValue().withDescription("jdbc database");
 
     public static final Option<String> TABLE =
-        Options.key("table").stringType().noDefaultValue().withDescription("jdbc table");
+            Options.key("table").stringType().noDefaultValue().withDescription("jdbc table");
 
     public static final Option<DriverType> DRIVER =
-        Options.key("driver")
-            .enumType(DriverType.class)
-            .defaultValue(DriverType.ORACLE)
-            .withDescription("driver");
+            Options.key("driver")
+                    .enumType(DriverType.class)
+                    .defaultValue(DriverType.ORACLE)
+                    .withDescription("driver");
 
     public enum DriverType {
         ORACLE("oracle.jdbc.driver.OracleDriver"),
diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-postgresql/src/main/java/org/apache/seatunnel/datasource/plugin/postgresql/jdbc/PostgresqlDataSourceChannel.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-postgresql/src/main/java/org/apache/seatunnel/datasource/plugin/postgresql/jdbc/PostgresqlDataSourceChannel.java
index b331d61d..e4393953 100644
--- a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-postgresql/src/main/java/org/apache/seatunnel/datasource/plugin/postgresql/jdbc/PostgresqlDataSourceChannel.java
+++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-postgresql/src/main/java/org/apache/seatunnel/datasource/plugin/postgresql/jdbc/PostgresqlDataSourceChannel.java
@@ -17,17 +17,16 @@
 
 package org.apache.seatunnel.datasource.plugin.postgresql.jdbc;
 
-import static com.google.common.base.Preconditions.checkNotNull;
-
 import org.apache.seatunnel.api.configuration.util.OptionRule;
 import org.apache.seatunnel.datasource.plugin.api.DataSourceChannel;
 import org.apache.seatunnel.datasource.plugin.api.DataSourcePluginException;
 import org.apache.seatunnel.datasource.plugin.api.model.TableField;
 import org.apache.seatunnel.datasource.plugin.api.utils.JdbcUtils;
 
-import lombok.NonNull;
 import org.apache.commons.lang3.StringUtils;
 
+import lombok.NonNull;
+
 import java.sql.Connection;
 import java.sql.DatabaseMetaData;
 import java.sql.DriverManager;
@@ -39,6 +38,8 @@ import java.util.ArrayList;
 import java.util.List;
 import java.util.Map;
 
+import static com.google.common.base.Preconditions.checkNotNull;
+
 public class PostgresqlDataSourceChannel implements DataSourceChannel {
 
     @Override
@@ -53,18 +54,18 @@ public class PostgresqlDataSourceChannel implements DataSourceChannel {
 
     @Override
     public List<String> getTables(
-        @NonNull String pluginName, Map<String, String> requestParams, String database) {
+            @NonNull String pluginName, Map<String, String> requestParams, String database) {
         List<String> tableNames = new ArrayList<>();
         String query = "SELECT table_schema, table_name FROM information_schema.tables";
         try (Connection connection = getConnection(requestParams, database)) {
             try (Statement statement = connection.createStatement();
-                 ResultSet resultSet = statement.executeQuery(query)) {
+                    ResultSet resultSet = statement.executeQuery(query)) {
                 while (resultSet.next()) {
                     String schemaName = resultSet.getString("table_schema");
                     String tableName = resultSet.getString("table_name");
                     if (StringUtils.isNotBlank(schemaName)
-                        && !PostgresqlDataSourceConfig.POSTGRESQL_SYSTEM_DATABASES.contains(
-                        schemaName)) {
+                            && !PostgresqlDataSourceConfig.POSTGRESQL_SYSTEM_DATABASES.contains(
+                                    schemaName)) {
                         tableNames.add(schemaName + "." + tableName);
                     }
                 }
@@ -77,17 +78,17 @@ public class PostgresqlDataSourceChannel implements DataSourceChannel {
 
     @Override
     public List<String> getDatabases(
-        @NonNull String pluginName, @NonNull Map<String, String> requestParams) {
+            @NonNull String pluginName, @NonNull Map<String, String> requestParams) {
         List<String> dbNames = new ArrayList<>();
         try (Connection connection = getConnection(requestParams);
-             PreparedStatement statement =
-                 connection.prepareStatement("select datname from pg_database;");
-             ResultSet re = statement.executeQuery()) {
+                PreparedStatement statement =
+                        connection.prepareStatement("select datname from pg_database;");
+                ResultSet re = statement.executeQuery()) {
             while (re.next()) {
                 String dbName = re.getString("datname");
                 if (StringUtils.isNotBlank(dbName)
-                    && !PostgresqlDataSourceConfig.POSTGRESQL_SYSTEM_DATABASES.contains(
-                    dbName)) {
+                        && !PostgresqlDataSourceConfig.POSTGRESQL_SYSTEM_DATABASES.contains(
+                                dbName)) {
                     dbNames.add(dbName);
                 }
             }
@@ -99,7 +100,7 @@ public class PostgresqlDataSourceChannel implements DataSourceChannel {
 
     @Override
     public boolean checkDataSourceConnectivity(
-        @NonNull String pluginName, @NonNull Map<String, String> requestParams) {
+            @NonNull String pluginName, @NonNull Map<String, String> requestParams) {
         try (Connection ignored = getConnection(requestParams)) {
             return true;
         } catch (Exception e) {
@@ -109,18 +110,18 @@ public class PostgresqlDataSourceChannel implements DataSourceChannel {
 
     @Override
     public List<TableField> getTableFields(
-        @NonNull String pluginName,
-        @NonNull Map<String, String> requestParams,
-        @NonNull String database,
-        @NonNull String table) {
+            @NonNull String pluginName,
+            @NonNull Map<String, String> requestParams,
+            @NonNull String database,
+            @NonNull String table) {
         List<TableField> tableFields = new ArrayList<>();
-        try (Connection connection = getConnection(requestParams, database);) {
+        try (Connection connection = getConnection(requestParams, database); ) {
             DatabaseMetaData metaData = connection.getMetaData();
             String primaryKey = getPrimaryKey(metaData, database, table);
             String[] split = table.split("\\.");
             if (split.length != 2) {
                 throw new DataSourcePluginException(
-                    "Postgresql tableName should composed by schemaName.tableName");
+                        "Postgresql tableName should composed by schemaName.tableName");
             }
             try (ResultSet resultSet = metaData.getColumns(database, split[0], split[1], null)) {
                 while (resultSet.next()) {
@@ -146,15 +147,15 @@ public class PostgresqlDataSourceChannel implements DataSourceChannel {
 
     @Override
     public Map<String, List<TableField>> getTableFields(
-        @NonNull String pluginName,
-        @NonNull Map<String, String> requestParams,
-        @NonNull String database,
-        @NonNull List<String> tables) {
+            @NonNull String pluginName,
+            @NonNull Map<String, String> requestParams,
+            @NonNull String database,
+            @NonNull List<String> tables) {
         return null;
     }
 
     private String getPrimaryKey(DatabaseMetaData metaData, String dbName, String tableName)
-        throws SQLException {
+            throws SQLException {
         ResultSet primaryKeysInfo = metaData.getPrimaryKeys(dbName, "%", tableName);
         while (primaryKeysInfo.next()) {
             return primaryKeysInfo.getString("COLUMN_NAME");
@@ -163,17 +164,17 @@ public class PostgresqlDataSourceChannel implements DataSourceChannel {
     }
 
     private Connection getConnection(Map<String, String> requestParams)
-        throws SQLException, ClassNotFoundException {
+            throws SQLException, ClassNotFoundException {
         return getConnection(requestParams, null);
     }
 
     private Connection getConnection(Map<String, String> requestParams, String databaseName)
-        throws SQLException, ClassNotFoundException {
+            throws SQLException, ClassNotFoundException {
         checkNotNull(requestParams.get(PostgresqlOptionRule.DRIVER.key()));
         checkNotNull(requestParams.get(PostgresqlOptionRule.URL.key()), "Jdbc url cannot be null");
         String url =
-            JdbcUtils.replaceDatabase(
-                requestParams.get(PostgresqlOptionRule.URL.key()), databaseName);
+                JdbcUtils.replaceDatabase(
+                        requestParams.get(PostgresqlOptionRule.URL.key()), databaseName);
         if (requestParams.containsKey(PostgresqlOptionRule.USER.key())) {
             String username = requestParams.get(PostgresqlOptionRule.USER.key());
             String password = requestParams.get(PostgresqlOptionRule.PASSWORD.key());
diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-postgresql/src/main/java/org/apache/seatunnel/datasource/plugin/postgresql/jdbc/PostgresqlDataSourceConfig.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-postgresql/src/main/java/org/apache/seatunnel/datasource/plugin/postgresql/jdbc/PostgresqlDataSourceConfig.java
index 2aa0538b..f150ab5d 100644
--- a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-postgresql/src/main/java/org/apache/seatunnel/datasource/plugin/postgresql/jdbc/PostgresqlDataSourceConfig.java
+++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-postgresql/src/main/java/org/apache/seatunnel/datasource/plugin/postgresql/jdbc/PostgresqlDataSourceConfig.java
@@ -30,33 +30,33 @@ public class PostgresqlDataSourceConfig {
     public static final String PLUGIN_NAME = "JDBC-Postgres";
 
     public static final DataSourcePluginInfo POSTGRESQL_DATASOURCE_PLUGIN_INFO =
-        DataSourcePluginInfo.builder()
-            .name(PLUGIN_NAME)
-            .icon(PLUGIN_NAME)
-            .version("1.0.0")
-            .type(DatasourcePluginTypeEnum.DATABASE.getCode())
-            .build();
+            DataSourcePluginInfo.builder()
+                    .name(PLUGIN_NAME)
+                    .icon(PLUGIN_NAME)
+                    .version("1.0.0")
+                    .type(DatasourcePluginTypeEnum.DATABASE.getCode())
+                    .build();
 
     public static final Set<String> POSTGRESQL_SYSTEM_DATABASES =
-        Sets.newHashSet(
-            "information_schema",
-            "pg_catalog",
-            "root",
-            "pg_toast",
-            "pg_temp_1",
-            "pg_toast_temp_1",
-            "postgres",
-            "template0",
-            "template1");
+            Sets.newHashSet(
+                    "information_schema",
+                    "pg_catalog",
+                    "root",
+                    "pg_toast",
+                    "pg_temp_1",
+                    "pg_toast_temp_1",
+                    "postgres",
+                    "template0",
+                    "template1");
 
     public static final OptionRule OPTION_RULE =
-        OptionRule.builder()
-            .required(PostgresqlOptionRule.URL, PostgresqlOptionRule.DRIVER)
-            .optional(PostgresqlOptionRule.USER, PostgresqlOptionRule.PASSWORD)
-            .build();
+            OptionRule.builder()
+                    .required(PostgresqlOptionRule.URL, PostgresqlOptionRule.DRIVER)
+                    .optional(PostgresqlOptionRule.USER, PostgresqlOptionRule.PASSWORD)
+                    .build();
 
     public static final OptionRule METADATA_RULE =
-        OptionRule.builder()
-            .required(PostgresqlOptionRule.DATABASE, PostgresqlOptionRule.TABLE)
-            .build();
+            OptionRule.builder()
+                    .required(PostgresqlOptionRule.DATABASE, PostgresqlOptionRule.TABLE)
+                    .build();
 }
diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-postgresql/src/main/java/org/apache/seatunnel/datasource/plugin/postgresql/jdbc/PostgresqlOptionRule.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-postgresql/src/main/java/org/apache/seatunnel/datasource/plugin/postgresql/jdbc/PostgresqlOptionRule.java
index 16e8b85b..748c4ea7 100644
--- a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-postgresql/src/main/java/org/apache/seatunnel/datasource/plugin/postgresql/jdbc/PostgresqlOptionRule.java
+++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-postgresql/src/main/java/org/apache/seatunnel/datasource/plugin/postgresql/jdbc/PostgresqlOptionRule.java
@@ -23,30 +23,30 @@ import org.apache.seatunnel.api.configuration.Options;
 public class PostgresqlOptionRule {
 
     public static final Option<String> URL =
-        Options.key("url")
-            .stringType()
-            .noDefaultValue()
-            .withDescription(
-                "jdbc url, eg:"
-                    + "jdbc:postgresql://localhost:5432//test?useSSL=false&serverTimezone=UTC&useUnicode=true&characterEncoding=utf-8");
+            Options.key("url")
+                    .stringType()
+                    .noDefaultValue()
+                    .withDescription(
+                            "jdbc url, eg:"
+                                    + "jdbc:postgresql://localhost:5432//test?useSSL=false&serverTimezone=UTC&useUnicode=true&characterEncoding=utf-8");
 
     public static final Option<String> USER =
-        Options.key("user").stringType().noDefaultValue().withDescription("jdbc user");
+            Options.key("user").stringType().noDefaultValue().withDescription("jdbc user");
 
     public static final Option<String> PASSWORD =
-        Options.key("password").stringType().noDefaultValue().withDescription("jdbc password");
+            Options.key("password").stringType().noDefaultValue().withDescription("jdbc password");
 
     public static final Option<String> DATABASE =
-        Options.key("database").stringType().noDefaultValue().withDescription("jdbc database");
+            Options.key("database").stringType().noDefaultValue().withDescription("jdbc database");
 
     public static final Option<String> TABLE =
-        Options.key("table").stringType().noDefaultValue().withDescription("jdbc table");
+            Options.key("table").stringType().noDefaultValue().withDescription("jdbc table");
 
     public static final Option<DriverType> DRIVER =
-        Options.key("driver")
-            .enumType(DriverType.class)
-            .defaultValue(DriverType.POSTGRESQL)
-            .withDescription("driver");
+            Options.key("driver")
+                    .enumType(DriverType.class)
+                    .defaultValue(DriverType.POSTGRESQL)
+                    .withDescription("driver");
 
     public enum DriverType {
         POSTGRESQL("org.postgresql.Driver"),
diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-redshift/src/main/java/org/apache/seatunnel/datasource/plugin/redshift/jdbc/RedshiftDataSourceChannel.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-redshift/src/main/java/org/apache/seatunnel/datasource/plugin/redshift/jdbc/RedshiftDataSourceChannel.java
index 32f77e98..0e3dfa70 100644
--- a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-redshift/src/main/java/org/apache/seatunnel/datasource/plugin/redshift/jdbc/RedshiftDataSourceChannel.java
+++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-redshift/src/main/java/org/apache/seatunnel/datasource/plugin/redshift/jdbc/RedshiftDataSourceChannel.java
@@ -17,17 +17,16 @@
 
 package org.apache.seatunnel.datasource.plugin.redshift.jdbc;
 
-import static com.google.common.base.Preconditions.checkNotNull;
-
 import org.apache.seatunnel.api.configuration.util.OptionRule;
 import org.apache.seatunnel.datasource.plugin.api.DataSourceChannel;
 import org.apache.seatunnel.datasource.plugin.api.DataSourcePluginException;
 import org.apache.seatunnel.datasource.plugin.api.model.TableField;
 import org.apache.seatunnel.datasource.plugin.api.utils.JdbcUtils;
 
-import lombok.NonNull;
 import org.apache.commons.lang3.StringUtils;
 
+import lombok.NonNull;
+
 import java.sql.Connection;
 import java.sql.DatabaseMetaData;
 import java.sql.DriverManager;
@@ -38,6 +37,8 @@ import java.util.ArrayList;
 import java.util.List;
 import java.util.Map;
 
+import static com.google.common.base.Preconditions.checkNotNull;
+
 public class RedshiftDataSourceChannel implements DataSourceChannel {
 
     @Override
@@ -52,17 +53,17 @@ public class RedshiftDataSourceChannel implements DataSourceChannel {
 
     @Override
     public List<String> getTables(
-        @NonNull String pluginName, Map<String, String> requestParams, String database) {
+            @NonNull String pluginName, Map<String, String> requestParams, String database) {
         List<String> tableNames = new ArrayList<>();
         try (Connection connection = getConnection(requestParams, database);
-             ResultSet resultSet =
-                 connection.getMetaData().getTables(database, null, null, null);) {
+                ResultSet resultSet =
+                        connection.getMetaData().getTables(database, null, null, null); ) {
             while (resultSet.next()) {
                 String schemaName = resultSet.getString("TABLE_SCHEM");
                 String tableName = resultSet.getString("TABLE_NAME");
                 // todo: use isNotSystemSchemaName
                 if (StringUtils.isNotBlank(schemaName)
-                    && !RedshiftDataSourceConfig.REDSHIFT_SYSTEM_TABLES.contains(schemaName)) {
+                        && !RedshiftDataSourceConfig.REDSHIFT_SYSTEM_TABLES.contains(schemaName)) {
                     tableNames.add(schemaName + "." + tableName);
                 }
             }
@@ -74,16 +75,16 @@ public class RedshiftDataSourceChannel implements DataSourceChannel {
 
     @Override
     public List<String> getDatabases(
-        @NonNull String pluginName, @NonNull Map<String, String> requestParams) {
+            @NonNull String pluginName, @NonNull Map<String, String> requestParams) {
         List<String> dbNames = new ArrayList<>();
         try (Connection connection = getConnection(requestParams);
-             PreparedStatement statement =
-                 connection.prepareStatement("select datname from pg_database;");
-             ResultSet re = statement.executeQuery()) {
+                PreparedStatement statement =
+                        connection.prepareStatement("select datname from pg_database;");
+                ResultSet re = statement.executeQuery()) {
             while (re.next()) {
                 String dbName = re.getString("datname");
                 if (StringUtils.isNotBlank(dbName)
-                    && !RedshiftDataSourceConfig.REDSHIFT_SYSTEM_TABLES.contains(dbName)) {
+                        && !RedshiftDataSourceConfig.REDSHIFT_SYSTEM_TABLES.contains(dbName)) {
                     dbNames.add(dbName);
                 }
             }
@@ -95,7 +96,7 @@ public class RedshiftDataSourceChannel implements DataSourceChannel {
 
     @Override
     public boolean checkDataSourceConnectivity(
-        @NonNull String pluginName, @NonNull Map<String, String> requestParams) {
+            @NonNull String pluginName, @NonNull Map<String, String> requestParams) {
         try (Connection ignored = getConnection(requestParams)) {
             return true;
         } catch (Exception e) {
@@ -105,18 +106,18 @@ public class RedshiftDataSourceChannel implements DataSourceChannel {
 
     @Override
     public List<TableField> getTableFields(
-        @NonNull String pluginName,
-        @NonNull Map<String, String> requestParams,
-        @NonNull String database,
-        @NonNull String table) {
+            @NonNull String pluginName,
+            @NonNull Map<String, String> requestParams,
+            @NonNull String database,
+            @NonNull String table) {
         List<TableField> tableFields = new ArrayList<>();
-        try (Connection connection = getConnection(requestParams, database);) {
+        try (Connection connection = getConnection(requestParams, database); ) {
             DatabaseMetaData metaData = connection.getMetaData();
             String primaryKey = getPrimaryKey(metaData, database, table);
             String[] split = table.split("\\.");
             if (split.length != 2) {
                 throw new DataSourcePluginException(
-                    "Postgresql tableName should composed by schemaName.tableName");
+                        "Postgresql tableName should composed by schemaName.tableName");
             }
             try (ResultSet resultSet = metaData.getColumns(database, split[0], split[1], null)) {
                 while (resultSet.next()) {
@@ -142,15 +143,15 @@ public class RedshiftDataSourceChannel implements DataSourceChannel {
 
     @Override
     public Map<String, List<TableField>> getTableFields(
-        @NonNull String pluginName,
-        @NonNull Map<String, String> requestParams,
-        @NonNull String database,
-        @NonNull List<String> tables) {
+            @NonNull String pluginName,
+            @NonNull Map<String, String> requestParams,
+            @NonNull String database,
+            @NonNull List<String> tables) {
         return null;
     }
 
     private String getPrimaryKey(DatabaseMetaData metaData, String dbName, String tableName)
-        throws SQLException {
+            throws SQLException {
         ResultSet primaryKeysInfo = metaData.getPrimaryKeys(dbName, "%", tableName);
         while (primaryKeysInfo.next()) {
             return primaryKeysInfo.getString("COLUMN_NAME");
@@ -159,17 +160,17 @@ public class RedshiftDataSourceChannel implements DataSourceChannel {
     }
 
     private Connection getConnection(Map<String, String> requestParams)
-        throws SQLException, ClassNotFoundException {
+            throws SQLException, ClassNotFoundException {
         return getConnection(requestParams, null);
     }
 
     private Connection getConnection(Map<String, String> requestParams, String databaseName)
-        throws SQLException, ClassNotFoundException {
+            throws SQLException, ClassNotFoundException {
         checkNotNull(requestParams.get(RedshiftOptionRule.DRIVER.key()));
         checkNotNull(requestParams.get(RedshiftOptionRule.URL.key()), "Jdbc url cannot be null");
         String url =
-            JdbcUtils.replaceDatabase(
-                requestParams.get(RedshiftOptionRule.URL.key()), databaseName);
+                JdbcUtils.replaceDatabase(
+                        requestParams.get(RedshiftOptionRule.URL.key()), databaseName);
         if (requestParams.containsKey(RedshiftOptionRule.USER.key())) {
             String username = requestParams.get(RedshiftOptionRule.USER.key());
             String password = requestParams.get(RedshiftOptionRule.PASSWORD.key());
diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-redshift/src/main/java/org/apache/seatunnel/datasource/plugin/redshift/jdbc/RedshiftDataSourceConfig.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-redshift/src/main/java/org/apache/seatunnel/datasource/plugin/redshift/jdbc/RedshiftDataSourceConfig.java
index 450c8d95..b5561f09 100644
--- a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-redshift/src/main/java/org/apache/seatunnel/datasource/plugin/redshift/jdbc/RedshiftDataSourceConfig.java
+++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-redshift/src/main/java/org/apache/seatunnel/datasource/plugin/redshift/jdbc/RedshiftDataSourceConfig.java
@@ -30,33 +30,33 @@ public class RedshiftDataSourceConfig {
     public static final String PLUGIN_NAME = "JDBC-Redshift";
 
     public static final DataSourcePluginInfo REDSHIFT_DATASOURCE_PLUGIN_INFO =
-        DataSourcePluginInfo.builder()
-            .name(PLUGIN_NAME)
-            .icon("redshift")
-            .version("1.0.0")
-            .type(DatasourcePluginTypeEnum.DATABASE.getCode())
-            .build();
+            DataSourcePluginInfo.builder()
+                    .name(PLUGIN_NAME)
+                    .icon("redshift")
+                    .version("1.0.0")
+                    .type(DatasourcePluginTypeEnum.DATABASE.getCode())
+                    .build();
 
     public static final Set<String> REDSHIFT_SYSTEM_TABLES =
-        Sets.newHashSet(
-            "information_schema",
-            "pg_catalog",
-            "root",
-            "pg_toast",
-            "pg_temp_1",
-            "pg_toast_temp_1",
-            "postgres",
-            "template0",
-            "template1");
+            Sets.newHashSet(
+                    "information_schema",
+                    "pg_catalog",
+                    "root",
+                    "pg_toast",
+                    "pg_temp_1",
+                    "pg_toast_temp_1",
+                    "postgres",
+                    "template0",
+                    "template1");
 
     public static final OptionRule OPTION_RULE =
-        OptionRule.builder()
-            .required(RedshiftOptionRule.URL, RedshiftOptionRule.DRIVER)
-            .optional(RedshiftOptionRule.USER, RedshiftOptionRule.PASSWORD)
-            .build();
+            OptionRule.builder()
+                    .required(RedshiftOptionRule.URL, RedshiftOptionRule.DRIVER)
+                    .optional(RedshiftOptionRule.USER, RedshiftOptionRule.PASSWORD)
+                    .build();
 
     public static final OptionRule METADATA_RULE =
-        OptionRule.builder()
-            .required(RedshiftOptionRule.DATABASE, RedshiftOptionRule.TABLE)
-            .build();
+            OptionRule.builder()
+                    .required(RedshiftOptionRule.DATABASE, RedshiftOptionRule.TABLE)
+                    .build();
 }
diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-redshift/src/main/java/org/apache/seatunnel/datasource/plugin/redshift/jdbc/RedshiftOptionRule.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-redshift/src/main/java/org/apache/seatunnel/datasource/plugin/redshift/jdbc/RedshiftOptionRule.java
index ba4c11c0..c4f38954 100644
--- a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-redshift/src/main/java/org/apache/seatunnel/datasource/plugin/redshift/jdbc/RedshiftOptionRule.java
+++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-redshift/src/main/java/org/apache/seatunnel/datasource/plugin/redshift/jdbc/RedshiftOptionRule.java
@@ -23,30 +23,30 @@ import org.apache.seatunnel.api.configuration.Options;
 public class RedshiftOptionRule {
 
     public static final Option<String> URL =
-        Options.key("url")
-            .stringType()
-            .noDefaultValue()
-            .withDescription(
-                "jdbc url, eg:"
-                    + "jdbc:redshift://server.redshift.amazonaws.com:5439/test?useSSL=false&serverTimezone=UTC&useUnicode=true&characterEncoding=utf-8");
+            Options.key("url")
+                    .stringType()
+                    .noDefaultValue()
+                    .withDescription(
+                            "jdbc url, eg:"
+                                    + "jdbc:redshift://server.redshift.amazonaws.com:5439/test?useSSL=false&serverTimezone=UTC&useUnicode=true&characterEncoding=utf-8");
 
     public static final Option<String> USER =
-        Options.key("user").stringType().noDefaultValue().withDescription("jdbc user");
+            Options.key("user").stringType().noDefaultValue().withDescription("jdbc user");
 
     public static final Option<String> PASSWORD =
-        Options.key("password").stringType().noDefaultValue().withDescription("jdbc password");
+            Options.key("password").stringType().noDefaultValue().withDescription("jdbc password");
 
     public static final Option<String> DATABASE =
-        Options.key("database").stringType().noDefaultValue().withDescription("jdbc database");
+            Options.key("database").stringType().noDefaultValue().withDescription("jdbc database");
 
     public static final Option<String> TABLE =
-        Options.key("table").stringType().noDefaultValue().withDescription("jdbc table");
+            Options.key("table").stringType().noDefaultValue().withDescription("jdbc table");
 
     public static final Option<DriverType> DRIVER =
-        Options.key("driver")
-            .enumType(DriverType.class)
-            .defaultValue(DriverType.JDBC42_REDSHIFT)
-            .withDescription("driver");
+            Options.key("driver")
+                    .enumType(DriverType.class)
+                    .defaultValue(DriverType.JDBC42_REDSHIFT)
+                    .withDescription("driver");
 
     public enum DriverType {
         JDBC42_REDSHIFT("com.amazon.redshift.jdbc42.Driver"),
diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-sqlserver/src/main/java/org/apache/seatunnel/datasource/plugin/sqlserver/jdbc/SqlServerDataSourceChannel.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-sqlserver/src/main/java/org/apache/seatunnel/datasource/plugin/sqlserver/jdbc/SqlServerDataSourceChannel.java
index bde7550c..7a32f86a 100644
--- a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-sqlserver/src/main/java/org/apache/seatunnel/datasource/plugin/sqlserver/jdbc/SqlServerDataSourceChannel.java
+++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-sqlserver/src/main/java/org/apache/seatunnel/datasource/plugin/sqlserver/jdbc/SqlServerDataSourceChannel.java
@@ -17,17 +17,16 @@
 
 package org.apache.seatunnel.datasource.plugin.sqlserver.jdbc;
 
-import static com.google.common.base.Preconditions.checkNotNull;
-
 import org.apache.seatunnel.api.configuration.util.OptionRule;
 import org.apache.seatunnel.datasource.plugin.api.DataSourceChannel;
 import org.apache.seatunnel.datasource.plugin.api.DataSourcePluginException;
 import org.apache.seatunnel.datasource.plugin.api.model.TableField;
 import org.apache.seatunnel.datasource.plugin.api.utils.JdbcUtils;
 
+import org.apache.commons.lang3.StringUtils;
+
 import lombok.NonNull;
 import lombok.extern.slf4j.Slf4j;
-import org.apache.commons.lang3.StringUtils;
 
 import java.sql.Connection;
 import java.sql.DatabaseMetaData;
@@ -39,6 +38,8 @@ import java.util.ArrayList;
 import java.util.List;
 import java.util.Map;
 
+import static com.google.common.base.Preconditions.checkNotNull;
+
 @Slf4j
 public class SqlServerDataSourceChannel implements DataSourceChannel {
     @Override
@@ -53,13 +54,13 @@ public class SqlServerDataSourceChannel implements DataSourceChannel {
 
     @Override
     public List<String> getTables(
-        @NonNull String pluginName, Map<String, String> requestParams, String database) {
+            @NonNull String pluginName, Map<String, String> requestParams, String database) {
         List<String> tableNames = new ArrayList<>();
         try (Connection connection = getConnection(requestParams);
-             ResultSet resultSet =
-                 connection
-                     .getMetaData()
-                     .getTables(database, null, null, new String[]{"TABLE"})) {
+                ResultSet resultSet =
+                        connection
+                                .getMetaData()
+                                .getTables(database, null, null, new String[] {"TABLE"})) {
             while (resultSet.next()) {
                 String tableName = resultSet.getString("TABLE_NAME");
                 if (StringUtils.isNotBlank(tableName)) {
@@ -74,16 +75,16 @@ public class SqlServerDataSourceChannel implements DataSourceChannel {
 
     @Override
     public List<String> getDatabases(
-        @NonNull String pluginName, @NonNull Map<String, String> requestParams) {
+            @NonNull String pluginName, @NonNull Map<String, String> requestParams) {
         List<String> dbNames = new ArrayList<>();
         try (Connection connection = getConnection(requestParams);
-             PreparedStatement statement = connection.prepareStatement("SHOW DATABASES;");
-             ResultSet re = statement.executeQuery()) {
+                PreparedStatement statement = connection.prepareStatement("SHOW DATABASES;");
+                ResultSet re = statement.executeQuery()) {
             // filter system databases
             while (re.next()) {
                 String dbName = re.getString("database");
                 if (StringUtils.isNotBlank(dbName)
-                    && !SqlServerDataSourceConfig.SQLSERVER_SYSTEM_DATABASES.contains(dbName)) {
+                        && !SqlServerDataSourceConfig.SQLSERVER_SYSTEM_DATABASES.contains(dbName)) {
                     dbNames.add(dbName);
                 }
             }
@@ -95,7 +96,7 @@ public class SqlServerDataSourceChannel implements DataSourceChannel {
 
     @Override
     public boolean checkDataSourceConnectivity(
-        @NonNull String pluginName, @NonNull Map<String, String> requestParams) {
+            @NonNull String pluginName, @NonNull Map<String, String> requestParams) {
         try (Connection ignored = getConnection(requestParams)) {
             return true;
         } catch (Exception e) {
@@ -105,10 +106,10 @@ public class SqlServerDataSourceChannel implements DataSourceChannel {
 
     @Override
     public List<TableField> getTableFields(
-        @NonNull String pluginName,
-        @NonNull Map<String, String> requestParams,
-        @NonNull String database,
-        @NonNull String table) {
+            @NonNull String pluginName,
+            @NonNull Map<String, String> requestParams,
+            @NonNull String database,
+            @NonNull String table) {
         List<TableField> tableFields = new ArrayList<>();
         try (Connection connection = getConnection(requestParams, database)) {
             DatabaseMetaData metaData = connection.getMetaData();
@@ -137,15 +138,15 @@ public class SqlServerDataSourceChannel implements DataSourceChannel {
 
     @Override
     public Map<String, List<TableField>> getTableFields(
-        @NonNull String pluginName,
-        @NonNull Map<String, String> requestParams,
-        @NonNull String database,
-        @NonNull List<String> tables) {
+            @NonNull String pluginName,
+            @NonNull Map<String, String> requestParams,
+            @NonNull String database,
+            @NonNull List<String> tables) {
         return null;
     }
 
     private String getPrimaryKey(DatabaseMetaData metaData, String dbName, String tableName)
-        throws SQLException {
+            throws SQLException {
         ResultSet primaryKeysInfo = metaData.getPrimaryKeys(dbName, "%", tableName);
         while (primaryKeysInfo.next()) {
             return primaryKeysInfo.getString("COLUMN_NAME");
@@ -154,17 +155,17 @@ public class SqlServerDataSourceChannel implements DataSourceChannel {
     }
 
     private Connection getConnection(Map<String, String> requestParams)
-        throws SQLException, ClassNotFoundException {
+            throws SQLException, ClassNotFoundException {
         return getConnection(requestParams, null);
     }
 
     private Connection getConnection(Map<String, String> requestParams, String databaseName)
-        throws SQLException, ClassNotFoundException {
+            throws SQLException, ClassNotFoundException {
         checkNotNull(requestParams.get(SqlServerOptionRule.DRIVER.key()));
         checkNotNull(requestParams.get(SqlServerOptionRule.URL.key()), "Jdbc url cannot be null");
         String url =
-            JdbcUtils.replaceDatabase(
-                requestParams.get(SqlServerOptionRule.URL.key()), databaseName);
+                JdbcUtils.replaceDatabase(
+                        requestParams.get(SqlServerOptionRule.URL.key()), databaseName);
         if (requestParams.containsKey(SqlServerOptionRule.USER.key())) {
             String username = requestParams.get(SqlServerOptionRule.USER.key());
             String password = requestParams.get(SqlServerOptionRule.PASSWORD.key());
diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-sqlserver/src/main/java/org/apache/seatunnel/datasource/plugin/sqlserver/jdbc/SqlServerDataSourceConfig.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-sqlserver/src/main/java/org/apache/seatunnel/datasource/plugin/sqlserver/jdbc/SqlServerDataSourceConfig.java
index 3bfc056c..218464ab 100644
--- a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-sqlserver/src/main/java/org/apache/seatunnel/datasource/plugin/sqlserver/jdbc/SqlServerDataSourceConfig.java
+++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-sqlserver/src/main/java/org/apache/seatunnel/datasource/plugin/sqlserver/jdbc/SqlServerDataSourceConfig.java
@@ -30,31 +30,31 @@ public class SqlServerDataSourceConfig {
     public static final String PLUGIN_NAME = "JDBC-SQLServer";
 
     public static final DataSourcePluginInfo SQLSERVER_DATASOURCE_PLUGIN_INFO =
-        DataSourcePluginInfo.builder()
-            .name(PLUGIN_NAME)
-            .icon(PLUGIN_NAME)
-            .version("1.0.0")
-            .type(DatasourcePluginTypeEnum.DATABASE.getCode())
-            .build();
+            DataSourcePluginInfo.builder()
+                    .name(PLUGIN_NAME)
+                    .icon(PLUGIN_NAME)
+                    .version("1.0.0")
+                    .type(DatasourcePluginTypeEnum.DATABASE.getCode())
+                    .build();
 
     public static final Set<String> SQLSERVER_SYSTEM_DATABASES =
-        Sets.newHashSet(
-            "master",
-            "tempdb",
-            "model",
-            "msdb",
-            "ReportServer",
-            "ReportServerTempDB",
-            "SSISDB");
+            Sets.newHashSet(
+                    "master",
+                    "tempdb",
+                    "model",
+                    "msdb",
+                    "ReportServer",
+                    "ReportServerTempDB",
+                    "SSISDB");
 
     public static final OptionRule OPTION_RULE =
-        OptionRule.builder()
-            .required(SqlServerOptionRule.URL, SqlServerOptionRule.DRIVER)
-            .optional(SqlServerOptionRule.USER, SqlServerOptionRule.PASSWORD)
-            .build();
+            OptionRule.builder()
+                    .required(SqlServerOptionRule.URL, SqlServerOptionRule.DRIVER)
+                    .optional(SqlServerOptionRule.USER, SqlServerOptionRule.PASSWORD)
+                    .build();
 
     public static final OptionRule METADATA_RULE =
-        OptionRule.builder()
-            .required(SqlServerOptionRule.DATABASE, SqlServerOptionRule.TABLE)
-            .build();
+            OptionRule.builder()
+                    .required(SqlServerOptionRule.DATABASE, SqlServerOptionRule.TABLE)
+                    .build();
 }
diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-sqlserver/src/main/java/org/apache/seatunnel/datasource/plugin/sqlserver/jdbc/SqlServerOptionRule.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-sqlserver/src/main/java/org/apache/seatunnel/datasource/plugin/sqlserver/jdbc/SqlServerOptionRule.java
index ec157d39..6052a7ab 100644
--- a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-sqlserver/src/main/java/org/apache/seatunnel/datasource/plugin/sqlserver/jdbc/SqlServerOptionRule.java
+++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-sqlserver/src/main/java/org/apache/seatunnel/datasource/plugin/sqlserver/jdbc/SqlServerOptionRule.java
@@ -23,29 +23,29 @@ import org.apache.seatunnel.api.configuration.Options;
 public class SqlServerOptionRule {
 
     public static final Option<String> URL =
-        Options.key("url")
-            .stringType()
-            .noDefaultValue()
-            .withDescription(
-                "jdbc url, eg:" + "jdbc:sqlserver://localhost:1433;database=xx");
+            Options.key("url")
+                    .stringType()
+                    .noDefaultValue()
+                    .withDescription(
+                            "jdbc url, eg:" + "jdbc:sqlserver://localhost:1433;database=xx");
 
     public static final Option<String> USER =
-        Options.key("user").stringType().noDefaultValue().withDescription("jdbc user");
+            Options.key("user").stringType().noDefaultValue().withDescription("jdbc user");
 
     public static final Option<String> PASSWORD =
-        Options.key("password").stringType().noDefaultValue().withDescription("jdbc password");
+            Options.key("password").stringType().noDefaultValue().withDescription("jdbc password");
 
     public static final Option<String> DATABASE =
-        Options.key("database").stringType().noDefaultValue().withDescription("jdbc database");
+            Options.key("database").stringType().noDefaultValue().withDescription("jdbc database");
 
     public static final Option<String> TABLE =
-        Options.key("table").stringType().noDefaultValue().withDescription("jdbc table");
+            Options.key("table").stringType().noDefaultValue().withDescription("jdbc table");
 
     public static final Option<DriverType> DRIVER =
-        Options.key("driver")
-            .enumType(DriverType.class)
-            .defaultValue(DriverType.SQL_SERVER)
-            .withDescription("driver");
+            Options.key("driver")
+                    .enumType(DriverType.class)
+                    .defaultValue(DriverType.SQL_SERVER)
+                    .withDescription("driver");
 
     public enum DriverType {
         SQL_SERVER("com.microsoft.sqlserver.jdbc.SQLServerDriver"),
diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-starrocks/src/main/java/org/apache/seatunnel/datasource/plugin/starrocks/jdbc/StarRocksDataSourceConfig.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-starrocks/src/main/java/org/apache/seatunnel/datasource/plugin/starrocks/jdbc/StarRocksDataSourceConfig.java
index 427657cc..913bc5ce 100644
--- a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-starrocks/src/main/java/org/apache/seatunnel/datasource/plugin/starrocks/jdbc/StarRocksDataSourceConfig.java
+++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-starrocks/src/main/java/org/apache/seatunnel/datasource/plugin/starrocks/jdbc/StarRocksDataSourceConfig.java
@@ -30,24 +30,24 @@ public class StarRocksDataSourceConfig {
     public static final String PLUGIN_NAME = "JDBC-StarRocks";
 
     public static final DataSourcePluginInfo STAR_ROCKS_DATA_SOURCE_PLUGIN_INFO =
-        DataSourcePluginInfo.builder()
-            .name(PLUGIN_NAME)
-            .icon(PLUGIN_NAME)
-            .version("1.0.0")
-            .type(DatasourcePluginTypeEnum.DATABASE.getCode())
-            .build();
+            DataSourcePluginInfo.builder()
+                    .name(PLUGIN_NAME)
+                    .icon(PLUGIN_NAME)
+                    .version("1.0.0")
+                    .type(DatasourcePluginTypeEnum.DATABASE.getCode())
+                    .build();
 
     public static final Set<String> STAR_ROCKS_SYSTEM_DATABASES =
-        Sets.newHashSet("_statistics_", "information_schema");
+            Sets.newHashSet("_statistics_", "information_schema");
 
     public static final OptionRule OPTION_RULE =
-        OptionRule.builder()
-            .required(StarRocksOptionRule.URL, StarRocksOptionRule.DRIVER)
-            .optional(StarRocksOptionRule.USER, StarRocksOptionRule.PASSWORD)
-            .build();
+            OptionRule.builder()
+                    .required(StarRocksOptionRule.URL, StarRocksOptionRule.DRIVER)
+                    .optional(StarRocksOptionRule.USER, StarRocksOptionRule.PASSWORD)
+                    .build();
 
     public static final OptionRule METADATA_RULE =
-        OptionRule.builder()
-            .required(StarRocksOptionRule.DATABASE, StarRocksOptionRule.TABLE)
-            .build();
+            OptionRule.builder()
+                    .required(StarRocksOptionRule.DATABASE, StarRocksOptionRule.TABLE)
+                    .build();
 }
diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-starrocks/src/main/java/org/apache/seatunnel/datasource/plugin/starrocks/jdbc/StarRocksJdbcDataSourceChannel.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-starrocks/src/main/java/org/apache/seatunnel/datasource/plugin/starrocks/jdbc/StarRocksJdbcDataSourceChannel.java
index 3f429a7c..65f0f613 100644
--- a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-starrocks/src/main/java/org/apache/seatunnel/datasource/plugin/starrocks/jdbc/StarRocksJdbcDataSourceChannel.java
+++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-starrocks/src/main/java/org/apache/seatunnel/datasource/plugin/starrocks/jdbc/StarRocksJdbcDataSourceChannel.java
@@ -17,17 +17,16 @@
 
 package org.apache.seatunnel.datasource.plugin.starrocks.jdbc;
 
-import static com.google.common.base.Preconditions.checkNotNull;
-
 import org.apache.seatunnel.api.configuration.util.OptionRule;
 import org.apache.seatunnel.datasource.plugin.api.DataSourceChannel;
 import org.apache.seatunnel.datasource.plugin.api.DataSourcePluginException;
 import org.apache.seatunnel.datasource.plugin.api.model.TableField;
 import org.apache.seatunnel.datasource.plugin.api.utils.JdbcUtils;
 
-import lombok.NonNull;
 import org.apache.commons.lang3.StringUtils;
 
+import lombok.NonNull;
+
 import java.sql.Connection;
 import java.sql.DatabaseMetaData;
 import java.sql.DriverManager;
@@ -38,6 +37,8 @@ import java.util.ArrayList;
 import java.util.List;
 import java.util.Map;
 
+import static com.google.common.base.Preconditions.checkNotNull;
+
 public class StarRocksJdbcDataSourceChannel implements DataSourceChannel {
 
     @Override
@@ -52,13 +53,13 @@ public class StarRocksJdbcDataSourceChannel implements DataSourceChannel {
 
     @Override
     public List<String> getTables(
-        @NonNull String pluginName, Map<String, String> requestParams, String database) {
+            @NonNull String pluginName, Map<String, String> requestParams, String database) {
         List<String> tableNames = new ArrayList<>();
         try (Connection connection = getConnection(requestParams);
-             ResultSet resultSet =
-                 connection
-                     .getMetaData()
-                     .getTables(database, null, null, new String[]{"TABLE"})) {
+                ResultSet resultSet =
+                        connection
+                                .getMetaData()
+                                .getTables(database, null, null, new String[] {"TABLE"})) {
             while (resultSet.next()) {
                 String tableName = resultSet.getString("TABLE_NAME");
                 if (StringUtils.isNotBlank(tableName)) {
@@ -73,17 +74,17 @@ public class StarRocksJdbcDataSourceChannel implements DataSourceChannel {
 
     @Override
     public List<String> getDatabases(
-        @NonNull String pluginName, @NonNull Map<String, String> requestParams) {
+            @NonNull String pluginName, @NonNull Map<String, String> requestParams) {
         List<String> dbNames = new ArrayList<>();
         try (Connection connection = getConnection(requestParams);
-             PreparedStatement statement = connection.prepareStatement("SHOW DATABASES;");
-             ResultSet re = statement.executeQuery()) {
+                PreparedStatement statement = connection.prepareStatement("SHOW DATABASES;");
+                ResultSet re = statement.executeQuery()) {
             // filter system databases
             while (re.next()) {
                 String dbName = re.getString("database");
                 if (StringUtils.isNotBlank(dbName)
-                    && !StarRocksDataSourceConfig.STAR_ROCKS_SYSTEM_DATABASES.contains(
-                    dbName)) {
+                        && !StarRocksDataSourceConfig.STAR_ROCKS_SYSTEM_DATABASES.contains(
+                                dbName)) {
                     dbNames.add(dbName);
                 }
             }
@@ -95,7 +96,7 @@ public class StarRocksJdbcDataSourceChannel implements DataSourceChannel {
 
     @Override
     public boolean checkDataSourceConnectivity(
-        @NonNull String pluginName, @NonNull Map<String, String> requestParams) {
+            @NonNull String pluginName, @NonNull Map<String, String> requestParams) {
         try (Connection ignored = getConnection(requestParams)) {
             return true;
         } catch (Exception e) {
@@ -105,15 +106,15 @@ public class StarRocksJdbcDataSourceChannel implements DataSourceChannel {
 
     @Override
     public List<TableField> getTableFields(
-        @NonNull String pluginName,
-        @NonNull Map<String, String> requestParams,
-        @NonNull String database,
-        @NonNull String table) {
+            @NonNull String pluginName,
+            @NonNull Map<String, String> requestParams,
+            @NonNull String database,
+            @NonNull String table) {
         List<TableField> tableFields = new ArrayList<>();
         try (Connection connection = getConnection(requestParams, database)) {
             DatabaseMetaData metaData = connection.getMetaData();
             String primaryKey = getPrimaryKey(metaData, database, table);
-            try (ResultSet resultSet = metaData.getColumns(database, null, table, null);) {
+            try (ResultSet resultSet = metaData.getColumns(database, null, table, null); ) {
                 while (resultSet.next()) {
                     TableField tableField = new TableField();
                     String columnName = resultSet.getString("COLUMN_NAME");
@@ -137,15 +138,15 @@ public class StarRocksJdbcDataSourceChannel implements DataSourceChannel {
 
     @Override
     public Map<String, List<TableField>> getTableFields(
-        @NonNull String pluginName,
-        @NonNull Map<String, String> requestParams,
-        @NonNull String database,
-        @NonNull List<String> tables) {
+            @NonNull String pluginName,
+            @NonNull Map<String, String> requestParams,
+            @NonNull String database,
+            @NonNull List<String> tables) {
         return null;
     }
 
     private String getPrimaryKey(DatabaseMetaData metaData, String dbName, String tableName)
-        throws SQLException {
+            throws SQLException {
         ResultSet primaryKeysInfo = metaData.getPrimaryKeys(dbName, "%", tableName);
         while (primaryKeysInfo.next()) {
             return primaryKeysInfo.getString("COLUMN_NAME");
@@ -154,17 +155,17 @@ public class StarRocksJdbcDataSourceChannel implements DataSourceChannel {
     }
 
     private Connection getConnection(Map<String, String> requestParams)
-        throws SQLException, ClassNotFoundException {
+            throws SQLException, ClassNotFoundException {
         return getConnection(requestParams, null);
     }
 
     private Connection getConnection(Map<String, String> requestParams, String databaseName)
-        throws SQLException, ClassNotFoundException {
+            throws SQLException, ClassNotFoundException {
         checkNotNull(requestParams.get(StarRocksOptionRule.DRIVER.key()));
         checkNotNull(requestParams.get(StarRocksOptionRule.URL.key()), "Jdbc url cannot be null");
         String url =
-            JdbcUtils.replaceDatabase(
-                requestParams.get(StarRocksOptionRule.URL.key()), databaseName);
+                JdbcUtils.replaceDatabase(
+                        requestParams.get(StarRocksOptionRule.URL.key()), databaseName);
         if (requestParams.containsKey(StarRocksOptionRule.USER.key())) {
             String username = requestParams.get(StarRocksOptionRule.USER.key());
             String password = requestParams.get(StarRocksOptionRule.PASSWORD.key());
diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-starrocks/src/main/java/org/apache/seatunnel/datasource/plugin/starrocks/jdbc/StarRocksOptionRule.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-starrocks/src/main/java/org/apache/seatunnel/datasource/plugin/starrocks/jdbc/StarRocksOptionRule.java
index d67c2a30..08f2dc21 100644
--- a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-starrocks/src/main/java/org/apache/seatunnel/datasource/plugin/starrocks/jdbc/StarRocksOptionRule.java
+++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-starrocks/src/main/java/org/apache/seatunnel/datasource/plugin/starrocks/jdbc/StarRocksOptionRule.java
@@ -23,30 +23,30 @@ import org.apache.seatunnel.api.configuration.Options;
 public class StarRocksOptionRule {
 
     public static final Option<String> URL =
-        Options.key("url")
-            .stringType()
-            .noDefaultValue()
-            .withDescription(
-                "jdbc url, eg:"
-                    + "jdbc:mysql://localhost:9030/test?useSSL=false&serverTimezone=UTC&useUnicode=true&characterEncoding=utf-8");
+            Options.key("url")
+                    .stringType()
+                    .noDefaultValue()
+                    .withDescription(
+                            "jdbc url, eg:"
+                                    + "jdbc:mysql://localhost:9030/test?useSSL=false&serverTimezone=UTC&useUnicode=true&characterEncoding=utf-8");
 
     public static final Option<String> USER =
-        Options.key("user").stringType().noDefaultValue().withDescription("jdbc user");
+            Options.key("user").stringType().noDefaultValue().withDescription("jdbc user");
 
     public static final Option<String> PASSWORD =
-        Options.key("password").stringType().noDefaultValue().withDescription("jdbc password");
+            Options.key("password").stringType().noDefaultValue().withDescription("jdbc password");
 
     public static final Option<String> DATABASE =
-        Options.key("database").stringType().noDefaultValue().withDescription("jdbc database");
+            Options.key("database").stringType().noDefaultValue().withDescription("jdbc database");
 
     public static final Option<String> TABLE =
-        Options.key("table").stringType().noDefaultValue().withDescription("jdbc table");
+            Options.key("table").stringType().noDefaultValue().withDescription("jdbc table");
 
     public static final Option<DriverType> DRIVER =
-        Options.key("driver")
-            .enumType(DriverType.class)
-            .defaultValue(DriverType.MYSQL)
-            .withDescription("driver");
+            Options.key("driver")
+                    .enumType(DriverType.class)
+                    .defaultValue(DriverType.MYSQL)
+                    .withDescription("driver");
 
     public enum DriverType {
         MYSQL("com.mysql.cj.jdbc.Driver"),
diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-tidb/src/main/java/org/apache/seatunnel/datasource/plugin/tidb/jdbc/TidbDataSourceConfig.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-tidb/src/main/java/org/apache/seatunnel/datasource/plugin/tidb/jdbc/TidbDataSourceConfig.java
index f02caff2..c9acb5b6 100644
--- a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-tidb/src/main/java/org/apache/seatunnel/datasource/plugin/tidb/jdbc/TidbDataSourceConfig.java
+++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-tidb/src/main/java/org/apache/seatunnel/datasource/plugin/tidb/jdbc/TidbDataSourceConfig.java
@@ -30,22 +30,22 @@ public class TidbDataSourceConfig {
     public static final String PLUGIN_NAME = "JDBC-TiDB";
 
     public static final DataSourcePluginInfo TIDB_DATASOURCE_PLUGIN_INFO =
-        DataSourcePluginInfo.builder()
-            .name(PLUGIN_NAME)
-            .icon(PLUGIN_NAME)
-            .version("1.0.0")
-            .type(DatasourcePluginTypeEnum.DATABASE.getCode())
-            .build();
+            DataSourcePluginInfo.builder()
+                    .name(PLUGIN_NAME)
+                    .icon(PLUGIN_NAME)
+                    .version("1.0.0")
+                    .type(DatasourcePluginTypeEnum.DATABASE.getCode())
+                    .build();
 
     public static final Set<String> TIDB_SYSTEM_DATABASES =
-        Sets.newHashSet("information_schema", "mysql", "performance_schema", "metrics_schema");
+            Sets.newHashSet("information_schema", "mysql", "performance_schema", "metrics_schema");
 
     public static final OptionRule OPTION_RULE =
-        OptionRule.builder()
-            .required(TidbOptionRule.URL, TidbOptionRule.DRIVER)
-            .optional(TidbOptionRule.USER, TidbOptionRule.PASSWORD)
-            .build();
+            OptionRule.builder()
+                    .required(TidbOptionRule.URL, TidbOptionRule.DRIVER)
+                    .optional(TidbOptionRule.USER, TidbOptionRule.PASSWORD)
+                    .build();
 
     public static final OptionRule METADATA_RULE =
-        OptionRule.builder().required(TidbOptionRule.DATABASE, TidbOptionRule.TABLE).build();
+            OptionRule.builder().required(TidbOptionRule.DATABASE, TidbOptionRule.TABLE).build();
 }
diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-tidb/src/main/java/org/apache/seatunnel/datasource/plugin/tidb/jdbc/TidbJdbcDataSourceChannel.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-tidb/src/main/java/org/apache/seatunnel/datasource/plugin/tidb/jdbc/TidbJdbcDataSourceChannel.java
index 8558f13c..90688bbb 100644
--- a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-tidb/src/main/java/org/apache/seatunnel/datasource/plugin/tidb/jdbc/TidbJdbcDataSourceChannel.java
+++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-tidb/src/main/java/org/apache/seatunnel/datasource/plugin/tidb/jdbc/TidbJdbcDataSourceChannel.java
@@ -17,17 +17,16 @@
 
 package org.apache.seatunnel.datasource.plugin.tidb.jdbc;
 
-import static com.google.common.base.Preconditions.checkNotNull;
-
 import org.apache.seatunnel.api.configuration.util.OptionRule;
 import org.apache.seatunnel.datasource.plugin.api.DataSourceChannel;
 import org.apache.seatunnel.datasource.plugin.api.DataSourcePluginException;
 import org.apache.seatunnel.datasource.plugin.api.model.TableField;
 import org.apache.seatunnel.datasource.plugin.api.utils.JdbcUtils;
 
-import lombok.NonNull;
 import org.apache.commons.lang3.StringUtils;
 
+import lombok.NonNull;
+
 import java.sql.Connection;
 import java.sql.DatabaseMetaData;
 import java.sql.DriverManager;
@@ -40,6 +39,8 @@ import java.util.Map;
 import java.util.function.Function;
 import java.util.stream.Collectors;
 
+import static com.google.common.base.Preconditions.checkNotNull;
+
 public class TidbJdbcDataSourceChannel implements DataSourceChannel {
 
     @Override
@@ -54,13 +55,13 @@ public class TidbJdbcDataSourceChannel implements DataSourceChannel {
 
     @Override
     public List<String> getTables(
-        @NonNull String pluginName, Map<String, String> requestParams, String database) {
+            @NonNull String pluginName, Map<String, String> requestParams, String database) {
         List<String> tableNames = new ArrayList<>();
         try (Connection connection = getConnection(requestParams);
-             ResultSet resultSet =
-                 connection
-                     .getMetaData()
-                     .getTables(database, null, null, new String[]{"TABLE"})) {
+                ResultSet resultSet =
+                        connection
+                                .getMetaData()
+                                .getTables(database, null, null, new String[] {"TABLE"})) {
             while (resultSet.next()) {
                 String tableName = resultSet.getString("TABLE_NAME");
                 if (StringUtils.isNotBlank(tableName)) {
@@ -75,16 +76,16 @@ public class TidbJdbcDataSourceChannel implements DataSourceChannel {
 
     @Override
     public List<String> getDatabases(
-        @NonNull String pluginName, @NonNull Map<String, String> requestParams) {
+            @NonNull String pluginName, @NonNull Map<String, String> requestParams) {
         List<String> dbNames = new ArrayList<>();
         try (Connection connection = getConnection(requestParams);
-             PreparedStatement statement = connection.prepareStatement("SHOW DATABASES;");
-             ResultSet re = statement.executeQuery()) {
+                PreparedStatement statement = connection.prepareStatement("SHOW DATABASES;");
+                ResultSet re = statement.executeQuery()) {
             // filter system databases
             while (re.next()) {
                 String dbName = re.getString("database");
                 if (StringUtils.isNotBlank(dbName)
-                    && !TidbDataSourceConfig.TIDB_SYSTEM_DATABASES.contains(dbName)) {
+                        && !TidbDataSourceConfig.TIDB_SYSTEM_DATABASES.contains(dbName)) {
                     dbNames.add(dbName);
                 }
             }
@@ -96,7 +97,7 @@ public class TidbJdbcDataSourceChannel implements DataSourceChannel {
 
     @Override
     public boolean checkDataSourceConnectivity(
-        @NonNull String pluginName, @NonNull Map<String, String> requestParams) {
+            @NonNull String pluginName, @NonNull Map<String, String> requestParams) {
         try (Connection ignored = getConnection(requestParams)) {
             return true;
         } catch (Exception e) {
@@ -106,10 +107,10 @@ public class TidbJdbcDataSourceChannel implements DataSourceChannel {
 
     @Override
     public List<TableField> getTableFields(
-        @NonNull String pluginName,
-        @NonNull Map<String, String> requestParams,
-        @NonNull String database,
-        @NonNull String table) {
+            @NonNull String pluginName,
+            @NonNull Map<String, String> requestParams,
+            @NonNull String database,
+            @NonNull String table) {
         List<TableField> tableFields = new ArrayList<>();
         try (Connection connection = getConnection(requestParams, database)) {
             DatabaseMetaData metaData = connection.getMetaData();
@@ -138,21 +139,21 @@ public class TidbJdbcDataSourceChannel implements DataSourceChannel {
 
     @Override
     public Map<String, List<TableField>> getTableFields(
-        @NonNull String pluginName,
-        @NonNull Map<String, String> requestParams,
-        @NonNull String database,
-        @NonNull List<String> tables) {
+            @NonNull String pluginName,
+            @NonNull Map<String, String> requestParams,
+            @NonNull String database,
+            @NonNull List<String> tables) {
         return tables.parallelStream()
-            .collect(
-                Collectors.toMap(
-                    Function.identity(),
-                    table ->
-                        getTableFields(
-                            pluginName, requestParams, database, table)));
+                .collect(
+                        Collectors.toMap(
+                                Function.identity(),
+                                table ->
+                                        getTableFields(
+                                                pluginName, requestParams, database, table)));
     }
 
     private String getPrimaryKey(DatabaseMetaData metaData, String dbName, String tableName)
-        throws SQLException {
+            throws SQLException {
         ResultSet primaryKeysInfo = metaData.getPrimaryKeys(dbName, "%", tableName);
         while (primaryKeysInfo.next()) {
             return primaryKeysInfo.getString("COLUMN_NAME");
@@ -161,17 +162,17 @@ public class TidbJdbcDataSourceChannel implements DataSourceChannel {
     }
 
     private Connection getConnection(Map<String, String> requestParams)
-        throws SQLException, ClassNotFoundException {
+            throws SQLException, ClassNotFoundException {
         return getConnection(requestParams, null);
     }
 
     private Connection getConnection(Map<String, String> requestParams, String databaseName)
-        throws SQLException, ClassNotFoundException {
+            throws SQLException, ClassNotFoundException {
         checkNotNull(requestParams.get(TidbOptionRule.DRIVER.key()));
         checkNotNull(requestParams.get(TidbOptionRule.URL.key()), "Jdbc url cannot be null");
         String url =
-            JdbcUtils.replaceDatabase(
-                requestParams.get(TidbOptionRule.URL.key()), databaseName);
+                JdbcUtils.replaceDatabase(
+                        requestParams.get(TidbOptionRule.URL.key()), databaseName);
         if (requestParams.containsKey(TidbOptionRule.USER.key())) {
             String username = requestParams.get(TidbOptionRule.USER.key());
             String password = requestParams.get(TidbOptionRule.PASSWORD.key());
diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-tidb/src/main/java/org/apache/seatunnel/datasource/plugin/tidb/jdbc/TidbOptionRule.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-tidb/src/main/java/org/apache/seatunnel/datasource/plugin/tidb/jdbc/TidbOptionRule.java
index 90af0f95..7ae00917 100644
--- a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-tidb/src/main/java/org/apache/seatunnel/datasource/plugin/tidb/jdbc/TidbOptionRule.java
+++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-jdbc-tidb/src/main/java/org/apache/seatunnel/datasource/plugin/tidb/jdbc/TidbOptionRule.java
@@ -23,30 +23,30 @@ import org.apache.seatunnel.api.configuration.Options;
 public class TidbOptionRule {
 
     public static final Option<String> URL =
-        Options.key("url")
-            .stringType()
-            .noDefaultValue()
-            .withDescription(
-                "jdbc url, eg:"
-                    + " jdbc:mysql://localhost:3306/test?useSSL=false&serverTimezone=UTC&useUnicode=true&characterEncoding=utf-8");
+            Options.key("url")
+                    .stringType()
+                    .noDefaultValue()
+                    .withDescription(
+                            "jdbc url, eg:"
+                                    + " jdbc:mysql://localhost:3306/test?useSSL=false&serverTimezone=UTC&useUnicode=true&characterEncoding=utf-8");
 
     public static final Option<String> USER =
-        Options.key("user").stringType().noDefaultValue().withDescription("jdbc user");
+            Options.key("user").stringType().noDefaultValue().withDescription("jdbc user");
 
     public static final Option<String> PASSWORD =
-        Options.key("password").stringType().noDefaultValue().withDescription("jdbc password");
+            Options.key("password").stringType().noDefaultValue().withDescription("jdbc password");
 
     public static final Option<String> DATABASE =
-        Options.key("database").stringType().noDefaultValue().withDescription("jdbc database");
+            Options.key("database").stringType().noDefaultValue().withDescription("jdbc database");
 
     public static final Option<String> TABLE =
-        Options.key("table").stringType().noDefaultValue().withDescription("jdbc table");
+            Options.key("table").stringType().noDefaultValue().withDescription("jdbc table");
 
     public static final Option<DriverType> DRIVER =
-        Options.key("driver")
-            .enumType(DriverType.class)
-            .defaultValue(DriverType.MYSQL)
-            .withDescription("driver");
+            Options.key("driver")
+                    .enumType(DriverType.class)
+                    .defaultValue(DriverType.MYSQL)
+                    .withDescription("driver");
 
     public enum DriverType {
         MYSQL("com.mysql.cj.jdbc.Driver"),
diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-kafka/src/main/java/org/apache/seatunnel/datasource/plugin/kafka/KafkaDataSourceChannel.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-kafka/src/main/java/org/apache/seatunnel/datasource/plugin/kafka/KafkaDataSourceChannel.java
index 72e67995..9413bcae 100644
--- a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-kafka/src/main/java/org/apache/seatunnel/datasource/plugin/kafka/KafkaDataSourceChannel.java
+++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-kafka/src/main/java/org/apache/seatunnel/datasource/plugin/kafka/KafkaDataSourceChannel.java
@@ -17,33 +17,34 @@
 
 package org.apache.seatunnel.datasource.plugin.kafka;
 
-import static com.google.common.base.Preconditions.checkArgument;
-
 import org.apache.seatunnel.api.configuration.util.OptionRule;
 import org.apache.seatunnel.datasource.plugin.api.DataSourceChannel;
 import org.apache.seatunnel.datasource.plugin.api.DataSourcePluginException;
 import org.apache.seatunnel.datasource.plugin.api.model.TableField;
 
-import lombok.NonNull;
-import lombok.extern.slf4j.Slf4j;
 import org.apache.commons.collections4.CollectionUtils;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.kafka.clients.admin.AdminClient;
 import org.apache.kafka.clients.admin.DescribeClusterOptions;
 import org.apache.kafka.clients.admin.DescribeClusterResult;
 
+import lombok.NonNull;
+import lombok.extern.slf4j.Slf4j;
+
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
 
+import static com.google.common.base.Preconditions.checkArgument;
+
 @Slf4j
 public class KafkaDataSourceChannel implements DataSourceChannel {
 
     private static final String DATABASE = "default";
     private static final DescribeClusterOptions DEFAULT_TIMEOUT_OPTIONS =
-        new DescribeClusterOptions().timeoutMs(60 * 1000);
+            new DescribeClusterOptions().timeoutMs(60 * 1000);
 
     @Override
     public OptionRule getDataSourceOptions(@NonNull String pluginName) {
@@ -57,59 +58,59 @@ public class KafkaDataSourceChannel implements DataSourceChannel {
 
     @Override
     public List<String> getTables(
-        @NonNull String pluginName, Map<String, String> requestParams, String database) {
+            @NonNull String pluginName, Map<String, String> requestParams, String database) {
         checkArgument(StringUtils.equalsIgnoreCase(database, DATABASE), "database must be default");
         try (AdminClient adminClient = createAdminClient(requestParams)) {
             Set<String> strings = adminClient.listTopics().names().get();
             return new ArrayList<>(strings);
         } catch (Exception ex) {
             throw new DataSourcePluginException(
-                "check kafka connectivity failed, " + ex.getMessage(), ex);
+                    "check kafka connectivity failed, " + ex.getMessage(), ex);
         }
     }
 
     @Override
     public List<String> getDatabases(
-        @NonNull String pluginName, @NonNull Map<String, String> requestParams) {
+            @NonNull String pluginName, @NonNull Map<String, String> requestParams) {
         return DEFAULT_DATABASES;
     }
 
     @Override
     public boolean checkDataSourceConnectivity(
-        @NonNull String pluginName, @NonNull Map<String, String> requestParams) {
+            @NonNull String pluginName, @NonNull Map<String, String> requestParams) {
         try (AdminClient adminClient = createAdminClient(requestParams)) {
             // just test the connection
             DescribeClusterResult describeClusterResult =
-                adminClient.describeCluster(DEFAULT_TIMEOUT_OPTIONS);
+                    adminClient.describeCluster(DEFAULT_TIMEOUT_OPTIONS);
             return CollectionUtils.isNotEmpty(describeClusterResult.nodes().get());
         } catch (Exception ex) {
             throw new DataSourcePluginException(
-                "check kafka connectivity failed, " + ex.getMessage(), ex);
+                    "check kafka connectivity failed, " + ex.getMessage(), ex);
         }
     }
 
     @Override
     public List<TableField> getTableFields(
-        @NonNull String pluginName,
-        @NonNull Map<String, String> requestParams,
-        @NonNull String database,
-        @NonNull String table) {
+            @NonNull String pluginName,
+            @NonNull Map<String, String> requestParams,
+            @NonNull String database,
+            @NonNull String table) {
         checkArgument(StringUtils.equalsIgnoreCase(database, DATABASE), "database must be default");
         return Collections.emptyList();
     }
 
     @Override
     public Map<String, List<TableField>> getTableFields(
-        @NonNull String pluginName,
-        @NonNull Map<String, String> requestParams,
-        @NonNull String database,
-        @NonNull List<String> tables) {
+            @NonNull String pluginName,
+            @NonNull Map<String, String> requestParams,
+            @NonNull String database,
+            @NonNull List<String> tables) {
         checkArgument(StringUtils.equalsIgnoreCase(database, DATABASE), "database must be default");
         return Collections.emptyMap();
     }
 
     private AdminClient createAdminClient(Map<String, String> requestParams) {
         return AdminClient.create(
-            KafkaRequestParamsUtils.parsePropertiesFromRequestParams(requestParams));
+                KafkaRequestParamsUtils.parsePropertiesFromRequestParams(requestParams));
     }
 }
diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-kafka/src/main/java/org/apache/seatunnel/datasource/plugin/kafka/KafkaDataSourceFactory.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-kafka/src/main/java/org/apache/seatunnel/datasource/plugin/kafka/KafkaDataSourceFactory.java
index bcba81c0..16b258cd 100644
--- a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-kafka/src/main/java/org/apache/seatunnel/datasource/plugin/kafka/KafkaDataSourceFactory.java
+++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-kafka/src/main/java/org/apache/seatunnel/datasource/plugin/kafka/KafkaDataSourceFactory.java
@@ -42,13 +42,13 @@ public class KafkaDataSourceFactory implements DataSourceFactory {
     @Override
     public Set<DataSourcePluginInfo> supportedDataSources() {
         return Sets.newHashSet(
-            DataSourcePluginInfo.builder()
-                .name(KAFKA_PLUGIN_NAME)
-                .icon(KAFKA_PLUGIN_ICON)
-                .version(KAFKA_PLUGIN_VERSION)
-                .supportVirtualTables(true)
-                .type(DatasourcePluginTypeEnum.NO_STRUCTURED.getCode())
-                .build());
+                DataSourcePluginInfo.builder()
+                        .name(KAFKA_PLUGIN_NAME)
+                        .icon(KAFKA_PLUGIN_ICON)
+                        .version(KAFKA_PLUGIN_VERSION)
+                        .supportVirtualTables(true)
+                        .type(DatasourcePluginTypeEnum.NO_STRUCTURED.getCode())
+                        .build());
     }
 
     @Override
diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-kafka/src/main/java/org/apache/seatunnel/datasource/plugin/kafka/KafkaRequestParamsUtils.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-kafka/src/main/java/org/apache/seatunnel/datasource/plugin/kafka/KafkaRequestParamsUtils.java
index 95988e76..25e70c99 100644
--- a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-kafka/src/main/java/org/apache/seatunnel/datasource/plugin/kafka/KafkaRequestParamsUtils.java
+++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-kafka/src/main/java/org/apache/seatunnel/datasource/plugin/kafka/KafkaRequestParamsUtils.java
@@ -17,8 +17,6 @@
 
 package org.apache.seatunnel.datasource.plugin.kafka;
 
-import static com.google.common.base.Preconditions.checkArgument;
-
 import org.apache.seatunnel.shade.com.typesafe.config.Config;
 import org.apache.seatunnel.shade.com.typesafe.config.ConfigFactory;
 
@@ -27,28 +25,30 @@ import org.apache.kafka.clients.admin.AdminClientConfig;
 import java.util.Map;
 import java.util.Properties;
 
+import static com.google.common.base.Preconditions.checkArgument;
+
 public class KafkaRequestParamsUtils {
 
     public static Properties parsePropertiesFromRequestParams(Map<String, String> requestParams) {
         checkArgument(
-            requestParams.containsKey(KafkaOptionRule.BOOTSTRAP_SERVERS.key()),
-            String.format(
-                "Missing %s in requestParams", KafkaOptionRule.BOOTSTRAP_SERVERS.key()));
+                requestParams.containsKey(KafkaOptionRule.BOOTSTRAP_SERVERS.key()),
+                String.format(
+                        "Missing %s in requestParams", KafkaOptionRule.BOOTSTRAP_SERVERS.key()));
         final Properties properties = new Properties();
         properties.put(
-            AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG,
-            requestParams.get(KafkaOptionRule.BOOTSTRAP_SERVERS.key()));
+                AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG,
+                requestParams.get(KafkaOptionRule.BOOTSTRAP_SERVERS.key()));
         if (requestParams.containsKey(KafkaOptionRule.KAFKA_CONFIG.key())) {
             Config configObject =
-                ConfigFactory.parseString(
-                    requestParams.get(KafkaOptionRule.KAFKA_CONFIG.key()));
+                    ConfigFactory.parseString(
+                            requestParams.get(KafkaOptionRule.KAFKA_CONFIG.key()));
             configObject
-                .entrySet()
-                .forEach(
-                    entry -> {
-                        properties.put(
-                            entry.getKey(), entry.getValue().unwrapped().toString());
-                    });
+                    .entrySet()
+                    .forEach(
+                            entry -> {
+                                properties.put(
+                                        entry.getKey(), entry.getValue().unwrapped().toString());
+                            });
         }
         return properties;
     }
diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-kafka/src/test/java/org/apache/seatunnel/datasource/plugin/kafka/KafkaDataSourceChannelTest.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-kafka/src/test/java/org/apache/seatunnel/datasource/plugin/kafka/KafkaDataSourceChannelTest.java
index e694de45..cdcb4ffa 100644
--- a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-kafka/src/test/java/org/apache/seatunnel/datasource/plugin/kafka/KafkaDataSourceChannelTest.java
+++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-kafka/src/test/java/org/apache/seatunnel/datasource/plugin/kafka/KafkaDataSourceChannelTest.java
@@ -20,12 +20,13 @@ package org.apache.seatunnel.datasource.plugin.kafka;
 import org.apache.seatunnel.api.configuration.util.OptionRule;
 import org.apache.seatunnel.datasource.plugin.api.model.TableField;
 
-import com.google.common.collect.ImmutableMap;
-import lombok.extern.slf4j.Slf4j;
 import org.junit.jupiter.api.Assertions;
 import org.junit.jupiter.api.Disabled;
 import org.junit.jupiter.api.Test;
 
+import com.google.common.collect.ImmutableMap;
+import lombok.extern.slf4j.Slf4j;
+
 import java.util.Collections;
 import java.util.List;
 import java.util.Map;
@@ -36,37 +37,37 @@ import java.util.Map;
 public class KafkaDataSourceChannelTest {
 
     private static final KafkaDataSourceChannel KAFKA_DATA_SOURCE_CHANNEL =
-        new KafkaDataSourceChannel();
+            new KafkaDataSourceChannel();
 
     private static final String KAFKA_PLUGIN_NAME = "kafka";
     private static final String BOOTSTRAP_SERVER = "localhost:9092";
 
     private static final Map<String, String> REQUEST_PARAMS =
-        new ImmutableMap.Builder<String, String>()
-            .put(KafkaOptionRule.BOOTSTRAP_SERVERS.key(), BOOTSTRAP_SERVER)
-            .build();
+            new ImmutableMap.Builder<String, String>()
+                    .put(KafkaOptionRule.BOOTSTRAP_SERVERS.key(), BOOTSTRAP_SERVER)
+                    .build();
 
     @Test
     public void getDataSourceOptions() {
         OptionRule dataSourceMetadataFieldsByDataSourceName =
-            KAFKA_DATA_SOURCE_CHANNEL.getDataSourceOptions(KAFKA_PLUGIN_NAME);
+                KAFKA_DATA_SOURCE_CHANNEL.getDataSourceOptions(KAFKA_PLUGIN_NAME);
         Assertions.assertEquals(
-            1, dataSourceMetadataFieldsByDataSourceName.getRequiredOptions().size());
+                1, dataSourceMetadataFieldsByDataSourceName.getRequiredOptions().size());
     }
 
     @Test
     public void getDatasourceMetadataFieldsByDataSourceName() {
         OptionRule datasourceMetadataFieldsByDataSourceName =
-            KAFKA_DATA_SOURCE_CHANNEL.getDatasourceMetadataFieldsByDataSourceName(
-                KAFKA_PLUGIN_NAME);
+                KAFKA_DATA_SOURCE_CHANNEL.getDatasourceMetadataFieldsByDataSourceName(
+                        KAFKA_PLUGIN_NAME);
         Assertions.assertEquals(
-            2, datasourceMetadataFieldsByDataSourceName.getOptionalOptions().size());
+                2, datasourceMetadataFieldsByDataSourceName.getOptionalOptions().size());
     }
 
     @Test
     public void getTables() {
         List<String> tables =
-            KAFKA_DATA_SOURCE_CHANNEL.getTables(KAFKA_PLUGIN_NAME, REQUEST_PARAMS, null);
+                KAFKA_DATA_SOURCE_CHANNEL.getTables(KAFKA_PLUGIN_NAME, REQUEST_PARAMS, null);
         log.info("{}", tables);
         Assertions.assertNotNull(tables);
     }
@@ -74,7 +75,7 @@ public class KafkaDataSourceChannelTest {
     @Test
     public void getDatabases() {
         List<String> databases =
-            KAFKA_DATA_SOURCE_CHANNEL.getDatabases(KAFKA_PLUGIN_NAME, REQUEST_PARAMS);
+                KAFKA_DATA_SOURCE_CHANNEL.getDatabases(KAFKA_PLUGIN_NAME, REQUEST_PARAMS);
         log.info("{}", databases);
         Assertions.assertNotNull(databases);
     }
@@ -82,15 +83,15 @@ public class KafkaDataSourceChannelTest {
     @Test
     public void checkDataSourceConnectivity() {
         boolean dataSourceConnectivity =
-            KAFKA_DATA_SOURCE_CHANNEL.checkDataSourceConnectivity(
-                KAFKA_PLUGIN_NAME, REQUEST_PARAMS);
+                KAFKA_DATA_SOURCE_CHANNEL.checkDataSourceConnectivity(
+                        KAFKA_PLUGIN_NAME, REQUEST_PARAMS);
         Assertions.assertTrue(dataSourceConnectivity);
     }
 
     @Test
     public void getTableFields() {
         List<TableField> tableFields =
-            KAFKA_DATA_SOURCE_CHANNEL.getTableFields(KAFKA_PLUGIN_NAME, REQUEST_PARAMS, "", "");
+                KAFKA_DATA_SOURCE_CHANNEL.getTableFields(KAFKA_PLUGIN_NAME, REQUEST_PARAMS, "", "");
         log.info("{}", tableFields);
         Assertions.assertTrue(tableFields.isEmpty());
     }
@@ -98,8 +99,8 @@ public class KafkaDataSourceChannelTest {
     @Test
     public void testGetTableFields() {
         Map<String, List<TableField>> tableFields =
-            KAFKA_DATA_SOURCE_CHANNEL.getTableFields(
-                KAFKA_PLUGIN_NAME, REQUEST_PARAMS, "", Collections.emptyList());
+                KAFKA_DATA_SOURCE_CHANNEL.getTableFields(
+                        KAFKA_PLUGIN_NAME, REQUEST_PARAMS, "", Collections.emptyList());
         log.info("{}", tableFields);
         Assertions.assertTrue(tableFields.isEmpty());
     }
diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-kafka/src/test/java/org/apache/seatunnel/datasource/plugin/kafka/KafkaRequestParamsUtilsTest.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-kafka/src/test/java/org/apache/seatunnel/datasource/plugin/kafka/KafkaRequestParamsUtilsTest.java
index 67d8d33f..2ce0842f 100644
--- a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-kafka/src/test/java/org/apache/seatunnel/datasource/plugin/kafka/KafkaRequestParamsUtilsTest.java
+++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-kafka/src/test/java/org/apache/seatunnel/datasource/plugin/kafka/KafkaRequestParamsUtilsTest.java
@@ -17,10 +17,11 @@
 
 package org.apache.seatunnel.datasource.plugin.kafka;
 
-import com.google.common.collect.ImmutableMap;
 import org.junit.jupiter.api.Assertions;
 import org.junit.jupiter.api.Test;
 
+import com.google.common.collect.ImmutableMap;
+
 import java.util.HashMap;
 import java.util.Map;
 import java.util.Properties;
@@ -30,37 +31,41 @@ class KafkaRequestParamsUtilsTest {
     @Test
     void parsePropertiesFromRequestParams() {
         Map<String, String> requestParams =
-            new ImmutableMap.Builder<String, String>()
-                .put(KafkaOptionRule.BOOTSTRAP_SERVERS.key(), "localhost:9092")
-                .put(
-                    KafkaOptionRule.KAFKA_CONFIG.key(),
-                    "{" + "security.protocol = SASL_PLAINTEXT" + "}")
-                .build();
+                new ImmutableMap.Builder<String, String>()
+                        .put(KafkaOptionRule.BOOTSTRAP_SERVERS.key(), "localhost:9092")
+                        .put(
+                                KafkaOptionRule.KAFKA_CONFIG.key(),
+                                "{" + "security.protocol = SASL_PLAINTEXT" + "}")
+                        .build();
         Properties properties =
-            KafkaRequestParamsUtils.parsePropertiesFromRequestParams(requestParams);
+                KafkaRequestParamsUtils.parsePropertiesFromRequestParams(requestParams);
         Assertions.assertEquals("SASL_PLAINTEXT", properties.getProperty("security.protocol"));
     }
 
     @Test
     void parsePropertiesFromRequestParamsBadCase() {
         Assertions.assertDoesNotThrow(
-            () ->
-                KafkaRequestParamsUtils.parsePropertiesFromRequestParams(
-                    new ImmutableMap.Builder<String, String>()
-                        .put(KafkaOptionRule.BOOTSTRAP_SERVERS.key(), "localhost:9092")
-                        .put(KafkaOptionRule.KAFKA_CONFIG.key(), "{}")
-                        .build()));
+                () ->
+                        KafkaRequestParamsUtils.parsePropertiesFromRequestParams(
+                                new ImmutableMap.Builder<String, String>()
+                                        .put(
+                                                KafkaOptionRule.BOOTSTRAP_SERVERS.key(),
+                                                "localhost:9092")
+                                        .put(KafkaOptionRule.KAFKA_CONFIG.key(), "{}")
+                                        .build()));
 
         Assertions.assertThrows(
-            IllegalArgumentException.class,
-            () -> KafkaRequestParamsUtils.parsePropertiesFromRequestParams(new HashMap<>()));
+                IllegalArgumentException.class,
+                () -> KafkaRequestParamsUtils.parsePropertiesFromRequestParams(new HashMap<>()));
 
         Assertions.assertDoesNotThrow(
-            () ->
-                KafkaRequestParamsUtils.parsePropertiesFromRequestParams(
-                    new ImmutableMap.Builder<String, String>()
-                        .put(KafkaOptionRule.BOOTSTRAP_SERVERS.key(), "localhost:9092")
-                        .put(KafkaOptionRule.KAFKA_CONFIG.key(), "")
-                        .build()));
+                () ->
+                        KafkaRequestParamsUtils.parsePropertiesFromRequestParams(
+                                new ImmutableMap.Builder<String, String>()
+                                        .put(
+                                                KafkaOptionRule.BOOTSTRAP_SERVERS.key(),
+                                                "localhost:9092")
+                                        .put(KafkaOptionRule.KAFKA_CONFIG.key(), "")
+                                        .build()));
     }
 }
diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-mysql-cdc/src/main/java/org/apache/seatunnel/datasource/plugin/cdc/mysql/MysqlCDCDataSourceChannel.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-mysql-cdc/src/main/java/org/apache/seatunnel/datasource/plugin/cdc/mysql/MysqlCDCDataSourceChannel.java
index 07a9f385..dd26ac14 100644
--- a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-mysql-cdc/src/main/java/org/apache/seatunnel/datasource/plugin/cdc/mysql/MysqlCDCDataSourceChannel.java
+++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-mysql-cdc/src/main/java/org/apache/seatunnel/datasource/plugin/cdc/mysql/MysqlCDCDataSourceChannel.java
@@ -22,9 +22,10 @@ import org.apache.seatunnel.datasource.plugin.api.DataSourceChannel;
 import org.apache.seatunnel.datasource.plugin.api.DataSourcePluginException;
 import org.apache.seatunnel.datasource.plugin.api.model.TableField;
 
+import org.apache.commons.lang3.StringUtils;
+
 import com.google.common.collect.Sets;
 import lombok.NonNull;
-import org.apache.commons.lang3.StringUtils;
 
 import java.sql.Connection;
 import java.sql.DatabaseMetaData;
@@ -42,7 +43,7 @@ import java.util.Set;
 public class MysqlCDCDataSourceChannel implements DataSourceChannel {
 
     public static final Set<String> MYSQL_SYSTEM_DATABASES =
-        Sets.newHashSet("information_schema", "mysql", "performance_schema", "sys");
+            Sets.newHashSet("information_schema", "mysql", "performance_schema", "sys");
 
     @Override
     public boolean canAbleGetSchema() {
@@ -61,7 +62,7 @@ public class MysqlCDCDataSourceChannel implements DataSourceChannel {
 
     @Override
     public List<String> getTables(
-        String pluginName, Map<String, String> requestParams, String database) {
+            String pluginName, Map<String, String> requestParams, String database) {
         return this.getTableNames(requestParams, database);
     }
 
@@ -76,22 +77,22 @@ public class MysqlCDCDataSourceChannel implements DataSourceChannel {
 
     @Override
     public boolean checkDataSourceConnectivity(
-        String pluginName, Map<String, String> requestParams) {
+            String pluginName, Map<String, String> requestParams) {
         return this.checkJdbcConnectivity(requestParams);
     }
 
     @Override
     public List<TableField> getTableFields(
-        String pluginName, Map<String, String> requestParams, String database, String table) {
+            String pluginName, Map<String, String> requestParams, String database, String table) {
         return getTableFields(requestParams, database, table);
     }
 
     @Override
     public Map<String, List<TableField>> getTableFields(
-        String pluginName,
-        Map<String, String> requestParams,
-        String database,
-        List<String> tables) {
+            String pluginName,
+            Map<String, String> requestParams,
+            String database,
+            List<String> tables) {
         Map<String, List<TableField>> tableFields = new HashMap<>(tables.size());
         for (String table : tables) {
             tableFields.put(table, getTableFields(requestParams, database, table));
@@ -102,9 +103,9 @@ public class MysqlCDCDataSourceChannel implements DataSourceChannel {
     @SuppressWarnings("checkstyle:MagicNumber")
     protected boolean checkJdbcConnectivity(Map<String, String> requestParams) {
         try (Connection connection = init(requestParams);
-             Statement statement = connection.createStatement()) {
+                Statement statement = connection.createStatement()) {
 
-            try (ResultSet resultSet = statement.executeQuery("SHOW MASTER STATUS");) {
+            try (ResultSet resultSet = statement.executeQuery("SHOW MASTER STATUS"); ) {
                 if (resultSet.next()) {
                     String binlogFile = resultSet.getString("File");
                     if (StringUtils.isBlank(binlogFile)) {
@@ -116,7 +117,7 @@ public class MysqlCDCDataSourceChannel implements DataSourceChannel {
             }
 
             try (ResultSet resultSet =
-                     statement.executeQuery("SHOW VARIABLES LIKE 'binlog_format'")) {
+                    statement.executeQuery("SHOW VARIABLES LIKE 'binlog_format'")) {
                 if (resultSet.next()) {
                     String binlogFormat = resultSet.getString("Value");
                     if (!"ROW".equalsIgnoreCase(binlogFormat)) {
@@ -128,7 +129,7 @@ public class MysqlCDCDataSourceChannel implements DataSourceChannel {
             }
 
             try (ResultSet resultSet =
-                     statement.executeQuery("SHOW VARIABLES LIKE 'binlog_row_image'")) {
+                    statement.executeQuery("SHOW VARIABLES LIKE 'binlog_row_image'")) {
                 if (resultSet.next()) {
                     String binlogRowImage = resultSet.getString("Value");
                     if (!"FULL".equalsIgnoreCase(binlogRowImage)) {
@@ -141,7 +142,7 @@ public class MysqlCDCDataSourceChannel implements DataSourceChannel {
             return true;
         } catch (Exception e) {
             throw new DataSourcePluginException(
-                "check jdbc connectivity failed, " + e.getMessage(), e);
+                    "check jdbc connectivity failed, " + e.getMessage(), e);
         }
     }
 
@@ -151,7 +152,7 @@ public class MysqlCDCDataSourceChannel implements DataSourceChannel {
         }
         String url = requestParams.get(MysqlCDCOptionRule.BASE_URL.key());
         if (null != requestParams.get(MysqlCDCOptionRule.PASSWORD.key())
-            && null != requestParams.get(MysqlCDCOptionRule.USERNAME.key())) {
+                && null != requestParams.get(MysqlCDCOptionRule.USERNAME.key())) {
             String username = requestParams.get(MysqlCDCOptionRule.USERNAME.key());
             String password = requestParams.get(MysqlCDCOptionRule.PASSWORD.key());
             return DriverManager.getConnection(url, username, password);
@@ -162,8 +163,8 @@ public class MysqlCDCDataSourceChannel implements DataSourceChannel {
     protected List<String> getDataBaseNames(Map<String, String> requestParams) throws SQLException {
         List<String> dbNames = new ArrayList<>();
         try (Connection connection = init(requestParams);
-             PreparedStatement statement = connection.prepareStatement("SHOW DATABASES;");
-             ResultSet re = statement.executeQuery()) {
+                PreparedStatement statement = connection.prepareStatement("SHOW DATABASES;");
+                ResultSet re = statement.executeQuery()) {
             // filter system databases
             while (re.next()) {
                 String dbName = re.getString("database");
@@ -178,10 +179,10 @@ public class MysqlCDCDataSourceChannel implements DataSourceChannel {
     protected List<String> getTableNames(Map<String, String> requestParams, String dbName) {
         List<String> tableNames = new ArrayList<>();
         try (Connection connection = init(requestParams);
-             ResultSet resultSet =
-                 connection
-                     .getMetaData()
-                     .getTables(dbName, null, null, new String[]{"TABLE"})) {
+                ResultSet resultSet =
+                        connection
+                                .getMetaData()
+                                .getTables(dbName, null, null, new String[] {"TABLE"})) {
             while (resultSet.next()) {
                 String tableName = resultSet.getString("TABLE_NAME");
                 if (StringUtils.isNotBlank(tableName)) {
@@ -195,9 +196,9 @@ public class MysqlCDCDataSourceChannel implements DataSourceChannel {
     }
 
     protected List<TableField> getTableFields(
-        Map<String, String> requestParams, String dbName, String tableName) {
+            Map<String, String> requestParams, String dbName, String tableName) {
         List<TableField> tableFields = new ArrayList<>();
-        try (Connection connection = init(requestParams);) {
+        try (Connection connection = init(requestParams); ) {
             DatabaseMetaData metaData = connection.getMetaData();
             String primaryKey = getPrimaryKey(metaData, dbName, tableName);
             ResultSet resultSet = metaData.getColumns(dbName, null, tableName, null);
@@ -223,7 +224,7 @@ public class MysqlCDCDataSourceChannel implements DataSourceChannel {
     }
 
     private String getPrimaryKey(DatabaseMetaData metaData, String dbName, String tableName)
-        throws SQLException {
+            throws SQLException {
         ResultSet primaryKeysInfo = metaData.getPrimaryKeys(dbName, "%", tableName);
         while (primaryKeysInfo.next()) {
             return primaryKeysInfo.getString("COLUMN_NAME");
@@ -233,8 +234,8 @@ public class MysqlCDCDataSourceChannel implements DataSourceChannel {
 
     private boolean isNotSystemDatabase(String dbName) {
         return MYSQL_SYSTEM_DATABASES.stream()
-            .noneMatch(
-                systemDatabase -> StringUtils.equalsAnyIgnoreCase(systemDatabase, dbName));
+                .noneMatch(
+                        systemDatabase -> StringUtils.equalsAnyIgnoreCase(systemDatabase, dbName));
     }
 
     private boolean convertToBoolean(Object value) {
diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-plugins-api/src/main/java/org/apache/seatunnel/datasource/plugin/api/DataSourcePluginInfo.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-plugins-api/src/main/java/org/apache/seatunnel/datasource/plugin/api/DataSourcePluginInfo.java
index d496945d..3c47d5a5 100644
--- a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-plugins-api/src/main/java/org/apache/seatunnel/datasource/plugin/api/DataSourcePluginInfo.java
+++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-plugins-api/src/main/java/org/apache/seatunnel/datasource/plugin/api/DataSourcePluginInfo.java
@@ -17,11 +17,11 @@
 
 package org.apache.seatunnel.datasource.plugin.api;
 
-import static com.google.common.base.Preconditions.checkNotNull;
-
 import lombok.Builder;
 import lombok.Data;
 
+import static com.google.common.base.Preconditions.checkNotNull;
+
 @Data
 @Builder
 public class DataSourcePluginInfo {
@@ -32,18 +32,14 @@ public class DataSourcePluginInfo {
 
     public String version;
 
-    /**
-     * @see DatasourcePluginTypeEnum
-     */
+    /** @see DatasourcePluginTypeEnum */
     private Integer type;
 
-    /**
-     * whether support virtual tables, default false
-     */
+    /** whether support virtual tables, default false */
     private Boolean supportVirtualTables;
 
     public DataSourcePluginInfo(
-        String name, String icon, String version, Integer type, Boolean supportVirtualTables) {
+            String name, String icon, String version, Integer type, Boolean supportVirtualTables) {
         this.name = checkNotNull(name, "name can not be null");
         this.icon = checkNotNull(icon, "icon can not be null");
         this.version = checkNotNull(version, "version can not be null");
diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-plugins-api/src/main/java/org/apache/seatunnel/datasource/plugin/api/common/ParamtersUtils.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-plugins-api/src/main/java/org/apache/seatunnel/datasource/plugin/api/common/ParamtersUtils.java
index 2a27a568..257d9f3d 100644
--- a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-plugins-api/src/main/java/org/apache/seatunnel/datasource/plugin/api/common/ParamtersUtils.java
+++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-plugins-api/src/main/java/org/apache/seatunnel/datasource/plugin/api/common/ParamtersUtils.java
@@ -23,10 +23,10 @@ import java.util.Map;
 
 public class ParamtersUtils {
     /**
-     * for some parameters, we need to convert them to {@link Map} eg: s3Options {
-     * "access.value": "org.apache.hadoop.fs.s3a.S3AFileSystem", "access.key":
-     * "AKIAIOSFODNN7EXAMPLE", "hadoop_s3_properties": " fs.s3a.impl =
-     * org.apache.hadoop.fs.s3a.S3AFileSystem fs.s3a.access.key = AKIAIOSFODNN7EXAMPLE "
+     * for some parameters, we need to convert them to {@link Map} eg: s3Options { "access.value":
+     * "org.apache.hadoop.fs.s3a.S3AFileSystem", "access.key": "AKIAIOSFODNN7EXAMPLE",
+     * "hadoop_s3_properties": " fs.s3a.impl = org.apache.hadoop.fs.s3a.S3AFileSystem
+     * fs.s3a.access.key = AKIAIOSFODNN7EXAMPLE "
      *
      * <p>Convert parameters to {@link Map}
      *
diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-s3-redshift/src/main/java/org/apache/seatunnel/datasource/plugin/redshift/s3/HadoopS3AConfiguration.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-s3-redshift/src/main/java/org/apache/seatunnel/datasource/plugin/redshift/s3/HadoopS3AConfiguration.java
index f3256186..8da509c1 100644
--- a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-s3-redshift/src/main/java/org/apache/seatunnel/datasource/plugin/redshift/s3/HadoopS3AConfiguration.java
+++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-s3-redshift/src/main/java/org/apache/seatunnel/datasource/plugin/redshift/s3/HadoopS3AConfiguration.java
@@ -17,15 +17,16 @@
 
 package org.apache.seatunnel.datasource.plugin.redshift.s3;
 
-import static org.apache.hadoop.fs.FileSystem.FS_DEFAULT_NAME_KEY;
-
-import lombok.extern.slf4j.Slf4j;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.hadoop.conf.Configuration;
 
+import lombok.extern.slf4j.Slf4j;
+
 import java.util.Arrays;
 import java.util.Map;
 
+import static org.apache.hadoop.fs.FileSystem.FS_DEFAULT_NAME_KEY;
+
 @Slf4j
 public class HadoopS3AConfiguration {
 
@@ -42,11 +43,11 @@ public class HadoopS3AConfiguration {
 
         if (!s3Options.containsKey(S3RedshiftOptionRule.BUCKET.key())) {
             throw new IllegalArgumentException(
-                "S3Redshift datasource bucket is null, please check your config");
+                    "S3Redshift datasource bucket is null, please check your config");
         }
         if (!s3Options.containsKey(S3RedshiftOptionRule.FS_S3A_ENDPOINT.key())) {
             throw new IllegalArgumentException(
-                "S3Redshift datasource endpoint is null, please check your config");
+                    "S3Redshift datasource endpoint is null, please check your config");
         }
         String bucket = s3Options.get(S3RedshiftOptionRule.BUCKET.key());
 
@@ -58,38 +59,38 @@ public class HadoopS3AConfiguration {
         Configuration hadoopConf = new Configuration();
         hadoopConf.set(FS_DEFAULT_NAME_KEY, bucket);
         hadoopConf.set(
-            S3RedshiftOptionRule.FS_S3A_ENDPOINT.key(),
-            s3Options.get(S3RedshiftOptionRule.FS_S3A_ENDPOINT.key()));
+                S3RedshiftOptionRule.FS_S3A_ENDPOINT.key(),
+                s3Options.get(S3RedshiftOptionRule.FS_S3A_ENDPOINT.key()));
         hadoopConf.set(formatKey(protocol, HDFS_IMPL_KEY), fsImpl);
         if (s3Options.containsKey(S3RedshiftOptionRule.HADOOP_S3_PROPERTIES.key())) {
             Arrays.stream(
-                    s3Options
-                        .get(S3RedshiftOptionRule.HADOOP_S3_PROPERTIES.key())
-                        .split("\n"))
-                .map(String::trim)
-                .filter(StringUtils::isNotBlank)
-                .forEach(
-                    line -> {
-                        String[] kv = line.split("=");
-                        if (kv.length == 2) {
-                            hadoopConf.set(kv[0].trim(), kv[1].trim());
-                        }
-                    });
+                            s3Options
+                                    .get(S3RedshiftOptionRule.HADOOP_S3_PROPERTIES.key())
+                                    .split("\n"))
+                    .map(String::trim)
+                    .filter(StringUtils::isNotBlank)
+                    .forEach(
+                            line -> {
+                                String[] kv = line.split("=");
+                                if (kv.length == 2) {
+                                    hadoopConf.set(kv[0].trim(), kv[1].trim());
+                                }
+                            });
         }
         if (S3RedshiftOptionRule.S3aAwsCredentialsProvider.SimpleAWSCredentialsProvider
-            .getProvider()
-            .equals(s3Options.get(S3RedshiftOptionRule.S3A_AWS_CREDENTIALS_PROVIDER.key()))) {
+                .getProvider()
+                .equals(s3Options.get(S3RedshiftOptionRule.S3A_AWS_CREDENTIALS_PROVIDER.key()))) {
             hadoopConf.set(
-                S3RedshiftOptionRule.S3A_AWS_CREDENTIALS_PROVIDER.key(),
-                s3Options.get(S3RedshiftOptionRule.S3A_AWS_CREDENTIALS_PROVIDER.key()));
+                    S3RedshiftOptionRule.S3A_AWS_CREDENTIALS_PROVIDER.key(),
+                    s3Options.get(S3RedshiftOptionRule.S3A_AWS_CREDENTIALS_PROVIDER.key()));
             hadoopConf.set(
-                "fs.s3a.access.key", s3Options.get(S3RedshiftOptionRule.ACCESS_KEY.key()));
+                    "fs.s3a.access.key", s3Options.get(S3RedshiftOptionRule.ACCESS_KEY.key()));
             hadoopConf.set(
-                "fs.s3a.secret.key", s3Options.get(S3RedshiftOptionRule.SECRET_KEY.key()));
+                    "fs.s3a.secret.key", s3Options.get(S3RedshiftOptionRule.SECRET_KEY.key()));
         } else {
             hadoopConf.set(
-                S3RedshiftOptionRule.S3A_AWS_CREDENTIALS_PROVIDER.key(),
-                s3Options.get(S3RedshiftOptionRule.S3A_AWS_CREDENTIALS_PROVIDER.key()));
+                    S3RedshiftOptionRule.S3A_AWS_CREDENTIALS_PROVIDER.key(),
+                    s3Options.get(S3RedshiftOptionRule.S3A_AWS_CREDENTIALS_PROVIDER.key()));
         }
         return hadoopConf;
     }
diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-s3-redshift/src/main/java/org/apache/seatunnel/datasource/plugin/redshift/s3/S3RedshiftDataSourceChannel.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-s3-redshift/src/main/java/org/apache/seatunnel/datasource/plugin/redshift/s3/S3RedshiftDataSourceChannel.java
index 07aceb32..de9e2b09 100644
--- a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-s3-redshift/src/main/java/org/apache/seatunnel/datasource/plugin/redshift/s3/S3RedshiftDataSourceChannel.java
+++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-s3-redshift/src/main/java/org/apache/seatunnel/datasource/plugin/redshift/s3/S3RedshiftDataSourceChannel.java
@@ -23,13 +23,14 @@ import org.apache.seatunnel.datasource.plugin.api.DataSourcePluginException;
 import org.apache.seatunnel.datasource.plugin.api.model.TableField;
 import org.apache.seatunnel.datasource.plugin.api.utils.JdbcUtils;
 
-import com.google.common.collect.Sets;
-import lombok.NonNull;
-import lombok.extern.slf4j.Slf4j;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 
+import com.google.common.collect.Sets;
+import lombok.NonNull;
+import lombok.extern.slf4j.Slf4j;
+
 import java.io.IOException;
 import java.net.InetSocketAddress;
 import java.net.Socket;
@@ -59,13 +60,13 @@ public class S3RedshiftDataSourceChannel implements DataSourceChannel {
 
     @Override
     public List<String> getTables(
-        @NonNull String pluginName, Map<String, String> requestParams, String database) {
+            @NonNull String pluginName, Map<String, String> requestParams, String database) {
         return getTableNames(requestParams, database);
     }
 
     @Override
     public List<String> getDatabases(
-        @NonNull String pluginName, @NonNull Map<String, String> requestParams) {
+            @NonNull String pluginName, @NonNull Map<String, String> requestParams) {
         try {
             return getDataBaseNames(pluginName, requestParams);
         } catch (SQLException e) {
@@ -75,7 +76,7 @@ public class S3RedshiftDataSourceChannel implements DataSourceChannel {
 
     @Override
     public boolean checkDataSourceConnectivity(
-        @NonNull String pluginName, @NonNull Map<String, String> requestParams) {
+            @NonNull String pluginName, @NonNull Map<String, String> requestParams) {
         checkHdfsS3Connection(requestParams);
         checkJdbcConnection(requestParams);
         return true;
@@ -83,19 +84,19 @@ public class S3RedshiftDataSourceChannel implements DataSourceChannel {
 
     @Override
     public List<TableField> getTableFields(
-        @NonNull String pluginName,
-        @NonNull Map<String, String> requestParams,
-        @NonNull String database,
-        @NonNull String table) {
+            @NonNull String pluginName,
+            @NonNull Map<String, String> requestParams,
+            @NonNull String database,
+            @NonNull String table) {
         return getTableFields(requestParams, database, table);
     }
 
     @Override
     public Map<String, List<TableField>> getTableFields(
-        @NonNull String pluginName,
-        @NonNull Map<String, String> requestParams,
-        @NonNull String database,
-        @NonNull List<String> tables) {
+            @NonNull String pluginName,
+            @NonNull Map<String, String> requestParams,
+            @NonNull String database,
+            @NonNull List<String> tables) {
         // not need this method
         return null;
     }
@@ -113,14 +114,14 @@ public class S3RedshiftDataSourceChannel implements DataSourceChannel {
                 return;
             } catch (SQLException e) {
                 throw new DataSourcePluginException(
-                    "Check Redshift jdbc connection failed,please check your config", e);
+                        "Check Redshift jdbc connection failed,please check your config", e);
             }
         }
         try (Connection ignored = DriverManager.getConnection(jdbcUrl, username, password)) {
             log.info("Redshift jdbc connection is valid");
         } catch (SQLException e) {
             throw new DataSourcePluginException(
-                "Check Redshift jdbc connection failed,please check your config", e);
+                    "Check Redshift jdbc connection failed,please check your config", e);
         }
     }
 
@@ -130,20 +131,20 @@ public class S3RedshiftDataSourceChannel implements DataSourceChannel {
             fs.getFileStatus(new org.apache.hadoop.fs.Path("/"));
         } catch (IOException e) {
             throw new DataSourcePluginException(
-                "S3 configuration is invalid, please check your config", e);
+                    "S3 configuration is invalid, please check your config", e);
         }
     }
 
     protected Connection init(Map<String, String> requestParams, String databaseName)
-        throws SQLException {
+            throws SQLException {
         if (null == requestParams.get(S3RedshiftOptionRule.JDBC_URL.key())) {
             throw new DataSourcePluginException("Jdbc url is null");
         }
         String url =
-            JdbcUtils.replaceDatabase(
-                requestParams.get(S3RedshiftOptionRule.JDBC_URL.key()), databaseName);
+                JdbcUtils.replaceDatabase(
+                        requestParams.get(S3RedshiftOptionRule.JDBC_URL.key()), databaseName);
         if (null != requestParams.get(S3RedshiftOptionRule.JDBC_PASSWORD.key())
-            && null != requestParams.get(S3RedshiftOptionRule.JDBC_USER.key())) {
+                && null != requestParams.get(S3RedshiftOptionRule.JDBC_USER.key())) {
             String username = requestParams.get(S3RedshiftOptionRule.JDBC_USER.key());
             String password = requestParams.get(S3RedshiftOptionRule.JDBC_PASSWORD.key());
             return DriverManager.getConnection(url, username, password);
@@ -152,12 +153,12 @@ public class S3RedshiftDataSourceChannel implements DataSourceChannel {
     }
 
     protected List<String> getDataBaseNames(String pluginName, Map<String, String> requestParams)
-        throws SQLException {
+            throws SQLException {
         List<String> dbNames = new ArrayList<>();
         try (Connection connection = init(requestParams, null);
-             PreparedStatement statement =
-                 connection.prepareStatement("select datname from pg_database;");
-             ResultSet re = statement.executeQuery()) {
+                PreparedStatement statement =
+                        connection.prepareStatement("select datname from pg_database;");
+                ResultSet re = statement.executeQuery()) {
             while (re.next()) {
                 String dbName = re.getString("datname");
                 if (StringUtils.isNotBlank(dbName) && isNotSystemDatabase(dbName)) {
@@ -172,9 +173,9 @@ public class S3RedshiftDataSourceChannel implements DataSourceChannel {
 
     protected List<String> getTableNames(Map<String, String> requestParams, String dbName) {
         List<String> tableNames = new ArrayList<>();
-        try (Connection connection = init(requestParams, dbName);) {
+        try (Connection connection = init(requestParams, dbName); ) {
             ResultSet resultSet =
-                connection.getMetaData().getTables(dbName, null, null, new String[]{"TABLE"});
+                    connection.getMetaData().getTables(dbName, null, null, new String[] {"TABLE"});
             while (resultSet.next()) {
                 String tableName = resultSet.getString("TABLE_NAME");
                 if (StringUtils.isNotBlank(tableName)) {
@@ -188,15 +189,15 @@ public class S3RedshiftDataSourceChannel implements DataSourceChannel {
     }
 
     protected List<TableField> getTableFields(
-        Map<String, String> requestParams, String dbName, String tableName) {
+            Map<String, String> requestParams, String dbName, String tableName) {
         List<TableField> tableFields = new ArrayList<>();
-        try (Connection connection = init(requestParams, dbName);) {
+        try (Connection connection = init(requestParams, dbName); ) {
             DatabaseMetaData metaData = connection.getMetaData();
             String primaryKey = getPrimaryKey(metaData, dbName, tableName);
             String[] split = tableName.split("\\.");
             if (split.length != 2) {
                 throw new DataSourcePluginException(
-                    "Postgresql tableName should composed by schemaName.tableName");
+                        "Postgresql tableName should composed by schemaName.tableName");
             }
             ResultSet resultSet = metaData.getColumns(dbName, split[0], split[1], null);
             while (resultSet.next()) {
@@ -221,7 +222,7 @@ public class S3RedshiftDataSourceChannel implements DataSourceChannel {
     }
 
     private String getPrimaryKey(DatabaseMetaData metaData, String dbName, String tableName)
-        throws SQLException {
+            throws SQLException {
         ResultSet primaryKeysInfo = metaData.getPrimaryKeys(dbName, "%", tableName);
         while (primaryKeysInfo.next()) {
             return primaryKeysInfo.getString("COLUMN_NAME");
@@ -255,14 +256,14 @@ public class S3RedshiftDataSourceChannel implements DataSourceChannel {
     }
 
     public static final Set<String> POSTGRESQL_SYSTEM_DATABASES =
-        Sets.newHashSet(
-            "information_schema",
-            "pg_catalog",
-            "root",
-            "pg_toast",
-            "pg_temp_1",
-            "pg_toast_temp_1",
-            "postgres",
-            "template0",
-            "template1");
+            Sets.newHashSet(
+                    "information_schema",
+                    "pg_catalog",
+                    "root",
+                    "pg_toast",
+                    "pg_temp_1",
+                    "pg_toast_temp_1",
+                    "postgres",
+                    "template0",
+                    "template1");
 }
diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-s3-redshift/src/main/java/org/apache/seatunnel/datasource/plugin/redshift/s3/S3RedshiftDataSourceFactory.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-s3-redshift/src/main/java/org/apache/seatunnel/datasource/plugin/redshift/s3/S3RedshiftDataSourceFactory.java
index 3ef212ac..ee6aa4ba 100644
--- a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-s3-redshift/src/main/java/org/apache/seatunnel/datasource/plugin/redshift/s3/S3RedshiftDataSourceFactory.java
+++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-s3-redshift/src/main/java/org/apache/seatunnel/datasource/plugin/redshift/s3/S3RedshiftDataSourceFactory.java
@@ -37,14 +37,14 @@ public class S3RedshiftDataSourceFactory implements DataSourceFactory {
     @Override
     public Set<DataSourcePluginInfo> supportedDataSources() {
         DataSourcePluginInfo s3DatasourcePluginInfo =
-            DataSourcePluginInfo.builder()
-                .name("S3-Redshift")
-                .type(DatasourcePluginTypeEnum.DATABASE.getCode())
-                .version("1.0.0")
-                .supportVirtualTables(false)
-                .icon("S3-Redshift")
-                .icon("S3-Redshift")
-                .build();
+                DataSourcePluginInfo.builder()
+                        .name("S3-Redshift")
+                        .type(DatasourcePluginTypeEnum.DATABASE.getCode())
+                        .version("1.0.0")
+                        .supportVirtualTables(false)
+                        .icon("S3-Redshift")
+                        .icon("S3-Redshift")
+                        .build();
 
         return Sets.newHashSet(s3DatasourcePluginInfo);
     }
diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-s3/src/main/java/org/apache/seatunnel/datasource/plugin/s3/HadoopS3AConfiguration.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-s3/src/main/java/org/apache/seatunnel/datasource/plugin/s3/HadoopS3AConfiguration.java
index ecce9793..3c18b296 100644
--- a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-s3/src/main/java/org/apache/seatunnel/datasource/plugin/s3/HadoopS3AConfiguration.java
+++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-s3/src/main/java/org/apache/seatunnel/datasource/plugin/s3/HadoopS3AConfiguration.java
@@ -17,16 +17,17 @@
 
 package org.apache.seatunnel.datasource.plugin.s3;
 
-import static org.apache.hadoop.fs.FileSystem.FS_DEFAULT_NAME_KEY;
-
 import org.apache.seatunnel.shade.com.typesafe.config.Config;
 import org.apache.seatunnel.shade.com.typesafe.config.ConfigFactory;
 
-import lombok.extern.slf4j.Slf4j;
 import org.apache.hadoop.conf.Configuration;
 
+import lombok.extern.slf4j.Slf4j;
+
 import java.util.Map;
 
+import static org.apache.hadoop.fs.FileSystem.FS_DEFAULT_NAME_KEY;
+
 @Slf4j
 public class HadoopS3AConfiguration {
 
@@ -43,11 +44,11 @@ public class HadoopS3AConfiguration {
 
         if (!s3Options.containsKey(S3OptionRule.BUCKET.key())) {
             throw new IllegalArgumentException(
-                "S3 datasource bucket is null, please check your config");
+                    "S3 datasource bucket is null, please check your config");
         }
         if (!s3Options.containsKey(S3OptionRule.FS_S3A_ENDPOINT.key())) {
             throw new IllegalArgumentException(
-                "S3 datasource endpoint is null, please check your config");
+                    "S3 datasource endpoint is null, please check your config");
         }
         String bucket = s3Options.get(S3OptionRule.BUCKET.key());
 
@@ -59,32 +60,32 @@ public class HadoopS3AConfiguration {
         Configuration hadoopConf = new Configuration();
         hadoopConf.set(FS_DEFAULT_NAME_KEY, bucket);
         hadoopConf.set(
-            S3OptionRule.FS_S3A_ENDPOINT.key(),
-            s3Options.get(S3OptionRule.FS_S3A_ENDPOINT.key()));
+                S3OptionRule.FS_S3A_ENDPOINT.key(),
+                s3Options.get(S3OptionRule.FS_S3A_ENDPOINT.key()));
         hadoopConf.set(formatKey(protocol, HDFS_IMPL_KEY), fsImpl);
         if (s3Options.containsKey(S3OptionRule.HADOOP_S3_PROPERTIES.key())) {
             Config configObject =
-                ConfigFactory.parseString(
-                    s3Options.get(S3OptionRule.HADOOP_S3_PROPERTIES.key()));
+                    ConfigFactory.parseString(
+                            s3Options.get(S3OptionRule.HADOOP_S3_PROPERTIES.key()));
             configObject
-                .entrySet()
-                .forEach(
-                    entry -> {
-                        hadoopConf.set(
-                            entry.getKey(), entry.getValue().unwrapped().toString());
-                    });
+                    .entrySet()
+                    .forEach(
+                            entry -> {
+                                hadoopConf.set(
+                                        entry.getKey(), entry.getValue().unwrapped().toString());
+                            });
         }
         if (S3OptionRule.S3aAwsCredentialsProvider.SimpleAWSCredentialsProvider.getProvider()
-            .equals(s3Options.get(S3OptionRule.S3A_AWS_CREDENTIALS_PROVIDER.key()))) {
+                .equals(s3Options.get(S3OptionRule.S3A_AWS_CREDENTIALS_PROVIDER.key()))) {
             hadoopConf.set(
-                S3OptionRule.S3A_AWS_CREDENTIALS_PROVIDER.key(),
-                s3Options.get(S3OptionRule.S3A_AWS_CREDENTIALS_PROVIDER.key()));
+                    S3OptionRule.S3A_AWS_CREDENTIALS_PROVIDER.key(),
+                    s3Options.get(S3OptionRule.S3A_AWS_CREDENTIALS_PROVIDER.key()));
             hadoopConf.set("fs.s3a.access.key", s3Options.get(S3OptionRule.ACCESS_KEY.key()));
             hadoopConf.set("fs.s3a.secret.key", s3Options.get(S3OptionRule.SECRET_KEY.key()));
         } else {
             hadoopConf.set(
-                S3OptionRule.S3A_AWS_CREDENTIALS_PROVIDER.key(),
-                s3Options.get(S3OptionRule.S3A_AWS_CREDENTIALS_PROVIDER.key()));
+                    S3OptionRule.S3A_AWS_CREDENTIALS_PROVIDER.key(),
+                    s3Options.get(S3OptionRule.S3A_AWS_CREDENTIALS_PROVIDER.key()));
         }
         return hadoopConf;
     }
diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-s3/src/main/java/org/apache/seatunnel/datasource/plugin/s3/S3DataSourceFactory.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-s3/src/main/java/org/apache/seatunnel/datasource/plugin/s3/S3DataSourceFactory.java
index 640b6f16..5b223656 100644
--- a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-s3/src/main/java/org/apache/seatunnel/datasource/plugin/s3/S3DataSourceFactory.java
+++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-s3/src/main/java/org/apache/seatunnel/datasource/plugin/s3/S3DataSourceFactory.java
@@ -40,13 +40,13 @@ public class S3DataSourceFactory implements DataSourceFactory {
     @Override
     public Set<DataSourcePluginInfo> supportedDataSources() {
         DataSourcePluginInfo s3DatasourcePluginInfo =
-            DataSourcePluginInfo.builder()
-                .name(PLUGIN_NAME)
-                .type(DatasourcePluginTypeEnum.FILE.getCode())
-                .version("1.0.0")
-                .supportVirtualTables(false)
-                .icon("S3File")
-                .build();
+                DataSourcePluginInfo.builder()
+                        .name(PLUGIN_NAME)
+                        .type(DatasourcePluginTypeEnum.FILE.getCode())
+                        .version("1.0.0")
+                        .supportVirtualTables(false)
+                        .icon("S3File")
+                        .build();
 
         return Sets.newHashSet(s3DatasourcePluginInfo);
     }
diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-s3/src/main/java/org/apache/seatunnel/datasource/plugin/s3/S3DatasourceChannel.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-s3/src/main/java/org/apache/seatunnel/datasource/plugin/s3/S3DatasourceChannel.java
index e1ba8de5..cf0ee45b 100644
--- a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-s3/src/main/java/org/apache/seatunnel/datasource/plugin/s3/S3DatasourceChannel.java
+++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-s3/src/main/java/org/apache/seatunnel/datasource/plugin/s3/S3DatasourceChannel.java
@@ -22,11 +22,12 @@ import org.apache.seatunnel.datasource.plugin.api.DataSourceChannel;
 import org.apache.seatunnel.datasource.plugin.api.DataSourcePluginException;
 import org.apache.seatunnel.datasource.plugin.api.model.TableField;
 
-import lombok.NonNull;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 
+import lombok.NonNull;
+
 import java.io.IOException;
 import java.util.List;
 import java.util.Map;
@@ -44,46 +45,46 @@ public class S3DatasourceChannel implements DataSourceChannel {
 
     @Override
     public List<String> getTables(
-        @NonNull String pluginName, Map<String, String> requestParams, String database) {
+            @NonNull String pluginName, Map<String, String> requestParams, String database) {
         throw new UnsupportedOperationException("getTables is not supported for S3 datasource");
     }
 
     @Override
     public List<String> getDatabases(
-        @NonNull String pluginName, @NonNull Map<String, String> requestParams) {
+            @NonNull String pluginName, @NonNull Map<String, String> requestParams) {
         throw new UnsupportedOperationException("getDatabases is not supported for S3 datasource");
     }
 
     @Override
     public boolean checkDataSourceConnectivity(
-        @NonNull String pluginName, @NonNull Map<String, String> requestParams) {
+            @NonNull String pluginName, @NonNull Map<String, String> requestParams) {
         Configuration conf = HadoopS3AConfiguration.getConfiguration(requestParams);
         try (FileSystem fs = FileSystem.get(conf)) {
             fs.listStatus(new Path("/"));
             return true;
         } catch (IOException e) {
             throw new DataSourcePluginException(
-                String.format("check s3 connectivity failed, config is: %s", requestParams), e);
+                    String.format("check s3 connectivity failed, config is: %s", requestParams), e);
         }
     }
 
     @Override
     public List<TableField> getTableFields(
-        @NonNull String pluginName,
-        @NonNull Map<String, String> requestParams,
-        @NonNull String database,
-        @NonNull String table) {
+            @NonNull String pluginName,
+            @NonNull Map<String, String> requestParams,
+            @NonNull String database,
+            @NonNull String table) {
         throw new UnsupportedOperationException(
-            "getTableFields is not supported for S3 datasource");
+                "getTableFields is not supported for S3 datasource");
     }
 
     @Override
     public Map<String, List<TableField>> getTableFields(
-        @NonNull String pluginName,
-        @NonNull Map<String, String> requestParams,
-        @NonNull String database,
-        @NonNull List<String> tables) {
+            @NonNull String pluginName,
+            @NonNull Map<String, String> requestParams,
+            @NonNull String database,
+            @NonNull List<String> tables) {
         throw new UnsupportedOperationException(
-            "getTableFields is not supported for S3 datasource");
+                "getTableFields is not supported for S3 datasource");
     }
 }
diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-sqlserver-cdc/src/main/java/org/apache/seatunnel/datasource/plugin/cdc/sqlserver/SqlServerCDCDataSourceChannel.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-sqlserver-cdc/src/main/java/org/apache/seatunnel/datasource/plugin/cdc/sqlserver/SqlServerCDCDataSourceChannel.java
index 4fed9948..20a23fae 100644
--- a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-sqlserver-cdc/src/main/java/org/apache/seatunnel/datasource/plugin/cdc/sqlserver/SqlServerCDCDataSourceChannel.java
+++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-sqlserver-cdc/src/main/java/org/apache/seatunnel/datasource/plugin/cdc/sqlserver/SqlServerCDCDataSourceChannel.java
@@ -22,11 +22,12 @@ import org.apache.seatunnel.datasource.plugin.api.DataSourceChannel;
 import org.apache.seatunnel.datasource.plugin.api.DataSourcePluginException;
 import org.apache.seatunnel.datasource.plugin.api.model.TableField;
 
+import org.apache.commons.lang3.StringUtils;
+import org.apache.commons.lang3.tuple.Pair;
+
 import com.google.common.collect.Sets;
 import lombok.NonNull;
 import lombok.extern.slf4j.Slf4j;
-import org.apache.commons.lang3.StringUtils;
-import org.apache.commons.lang3.tuple.Pair;
 
 import java.sql.Connection;
 import java.sql.DatabaseMetaData;
@@ -45,7 +46,7 @@ import java.util.Set;
 public class SqlServerCDCDataSourceChannel implements DataSourceChannel {
 
     public static final Set<String> MYSQL_SYSTEM_DATABASES =
-        Sets.newHashSet("master", "tempdb", "model", "msdb");
+            Sets.newHashSet("master", "tempdb", "model", "msdb");
 
     @Override
     public boolean canAbleGetSchema() {
@@ -64,7 +65,7 @@ public class SqlServerCDCDataSourceChannel implements DataSourceChannel {
 
     @Override
     public List<String> getTables(
-        String pluginName, Map<String, String> requestParams, String database) {
+            String pluginName, Map<String, String> requestParams, String database) {
         return this.getTableNames(requestParams, database);
     }
 
@@ -79,10 +80,10 @@ public class SqlServerCDCDataSourceChannel implements DataSourceChannel {
 
     @Override
     public boolean checkDataSourceConnectivity(
-        String pluginName, Map<String, String> requestParams) {
+            String pluginName, Map<String, String> requestParams) {
         try (Connection connection = init(requestParams);
-             PreparedStatement statement = connection.prepareStatement("SELECT 1");
-             ResultSet rs = statement.executeQuery()) {
+                PreparedStatement statement = connection.prepareStatement("SELECT 1");
+                ResultSet rs = statement.executeQuery()) {
             return rs.next();
         } catch (SQLException e) {
             throw new DataSourcePluginException("connect datasource failed", e);
@@ -91,17 +92,17 @@ public class SqlServerCDCDataSourceChannel implements DataSourceChannel {
 
     @Override
     public List<TableField> getTableFields(
-        String pluginName, Map<String, String> requestParams, String database, String table) {
+            String pluginName, Map<String, String> requestParams, String database, String table) {
         Pair<String, String> pair = parseSchemaAndTable(table);
         return getTableFields(requestParams, database, pair.getLeft(), pair.getRight());
     }
 
     @Override
     public Map<String, List<TableField>> getTableFields(
-        String pluginName,
-        Map<String, String> requestParams,
-        String database,
-        List<String> tables) {
+            String pluginName,
+            Map<String, String> requestParams,
+            String database,
+            List<String> tables) {
         Map<String, List<TableField>> tableFields = new HashMap<>(tables.size());
         for (String table : tables) {
             tableFields.put(table, getTableFields(pluginName, requestParams, database, table));
@@ -115,7 +116,7 @@ public class SqlServerCDCDataSourceChannel implements DataSourceChannel {
         }
         String url = requestParams.get(SqlServerCDCOptionRule.BASE_URL.key());
         if (null != requestParams.get(SqlServerCDCOptionRule.PASSWORD.key())
-            && null != requestParams.get(SqlServerCDCOptionRule.USERNAME.key())) {
+                && null != requestParams.get(SqlServerCDCOptionRule.USERNAME.key())) {
             String username = requestParams.get(SqlServerCDCOptionRule.USERNAME.key());
             String password = requestParams.get(SqlServerCDCOptionRule.PASSWORD.key());
             return DriverManager.getConnection(url, username, password);
@@ -126,10 +127,10 @@ public class SqlServerCDCDataSourceChannel implements DataSourceChannel {
     private List<String> getDataBaseNames(Map<String, String> requestParams) throws SQLException {
         List<String> dbNames = new ArrayList<>();
         try (Connection connection = init(requestParams);
-             PreparedStatement statement =
-                 connection.prepareStatement(
-                     "SELECT NAME FROM SYS.DATABASES WHERE IS_CDC_ENABLED = 1;");
-             ResultSet re = statement.executeQuery()) {
+                PreparedStatement statement =
+                        connection.prepareStatement(
+                                "SELECT NAME FROM SYS.DATABASES WHERE IS_CDC_ENABLED = 1;");
+                ResultSet re = statement.executeQuery()) {
             // filter system databases
             while (re.next()) {
                 String dbName = re.getString("NAME");
@@ -144,18 +145,18 @@ public class SqlServerCDCDataSourceChannel implements DataSourceChannel {
 
     private List<String> getTableNames(Map<String, String> requestParams, String dbName) {
         final String sql =
-            String.format(
-                "SELECT SCHEMAS.NAME AS SCHEMA_NAME, TABLES.NAME AS TABLE_NAME"
-                    + "    FROM %s.SYS.SCHEMAS AS SCHEMAS"
-                    + "        JOIN %s.SYS.TABLES AS TABLES"
-                    + "            ON SCHEMAS.SCHEMA_ID = TABLES.SCHEMA_ID"
-                    + "                   AND TABLES.IS_TRACKED_BY_CDC = 1",
-                dbName, dbName);
+                String.format(
+                        "SELECT SCHEMAS.NAME AS SCHEMA_NAME, TABLES.NAME AS TABLE_NAME"
+                                + "    FROM %s.SYS.SCHEMAS AS SCHEMAS"
+                                + "        JOIN %s.SYS.TABLES AS TABLES"
+                                + "            ON SCHEMAS.SCHEMA_ID = TABLES.SCHEMA_ID"
+                                + "                   AND TABLES.IS_TRACKED_BY_CDC = 1",
+                        dbName, dbName);
 
         List<String> tableNames = new ArrayList<>();
         try (Connection connection = init(requestParams);
-             Statement statement = connection.createStatement();
-             ResultSet resultSet = statement.executeQuery(sql)) {
+                Statement statement = connection.createStatement();
+                ResultSet resultSet = statement.executeQuery(sql)) {
             while (resultSet.next()) {
                 String schemaName = resultSet.getString("SCHEMA_NAME");
                 String tableName = resultSet.getString("TABLE_NAME");
@@ -168,9 +169,9 @@ public class SqlServerCDCDataSourceChannel implements DataSourceChannel {
     }
 
     private List<TableField> getTableFields(
-        Map<String, String> requestParams, String dbName, String schemaName, String tableName) {
+            Map<String, String> requestParams, String dbName, String schemaName, String tableName) {
         List<TableField> tableFields = new ArrayList<>();
-        try (Connection connection = init(requestParams);) {
+        try (Connection connection = init(requestParams); ) {
             DatabaseMetaData metaData = connection.getMetaData();
             String primaryKey = getPrimaryKey(metaData, dbName, schemaName, tableName);
             ResultSet resultSet = metaData.getColumns(dbName, schemaName, tableName, null);
@@ -196,8 +197,8 @@ public class SqlServerCDCDataSourceChannel implements DataSourceChannel {
     }
 
     private String getPrimaryKey(
-        DatabaseMetaData metaData, String dbName, String schemaName, String tableName)
-        throws SQLException {
+            DatabaseMetaData metaData, String dbName, String schemaName, String tableName)
+            throws SQLException {
         ResultSet primaryKeysInfo = metaData.getPrimaryKeys(dbName, schemaName, tableName);
         while (primaryKeysInfo.next()) {
             return primaryKeysInfo.getString("COLUMN_NAME");
@@ -207,8 +208,8 @@ public class SqlServerCDCDataSourceChannel implements DataSourceChannel {
 
     private boolean isNotSystemDatabase(String dbName) {
         return MYSQL_SYSTEM_DATABASES.stream()
-            .noneMatch(
-                systemDatabase -> StringUtils.equalsAnyIgnoreCase(systemDatabase, dbName));
+                .noneMatch(
+                        systemDatabase -> StringUtils.equalsAnyIgnoreCase(systemDatabase, dbName));
     }
 
     private boolean convertToBoolean(Object value) {
diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-sqlserver-cdc/src/main/java/org/apache/seatunnel/datasource/plugin/cdc/sqlserver/SqlServerCDCDataSourceFactory.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-sqlserver-cdc/src/main/java/org/apache/seatunnel/datasource/plugin/cdc/sqlserver/SqlServerCDCDataSourceFactory.java
index 321f9c8c..bc4f276f 100644
--- a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-sqlserver-cdc/src/main/java/org/apache/seatunnel/datasource/plugin/cdc/sqlserver/SqlServerCDCDataSourceFactory.java
+++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-sqlserver-cdc/src/main/java/org/apache/seatunnel/datasource/plugin/cdc/sqlserver/SqlServerCDCDataSourceFactory.java
@@ -37,7 +37,7 @@ public class SqlServerCDCDataSourceFactory implements DataSourceFactory {
     @Override
     public Set<DataSourcePluginInfo> supportedDataSources() {
         return Collections.singleton(
-            SqlServerCDCDataSourceConfig.SQLSERVER_CDC_DATASOURCE_PLUGIN_INFO);
+                SqlServerCDCDataSourceConfig.SQLSERVER_CDC_DATASOURCE_PLUGIN_INFO);
     }
 
     @Override
diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-sqlserver-cdc/src/test/java/org/apache/seatunnel/datasource/plugin/cdc/sqlserver/test/TestSqlServerCDCDataSourceChannel.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-sqlserver-cdc/src/test/java/org/apache/seatunnel/datasource/plugin/cdc/sqlserver/test/TestSqlServerCDCDataSourceChannel.java
index b815a318..982c5072 100644
--- a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-sqlserver-cdc/src/test/java/org/apache/seatunnel/datasource/plugin/cdc/sqlserver/test/TestSqlServerCDCDataSourceChannel.java
+++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-sqlserver-cdc/src/test/java/org/apache/seatunnel/datasource/plugin/cdc/sqlserver/test/TestSqlServerCDCDataSourceChannel.java
@@ -39,10 +39,17 @@ public class TestSqlServerCDCDataSourceChannel {
         requestParams.put("username", "sa");
         requestParams.put("password", "MyPass@word");
 
-        for (String database : channel.getDatabases(SqlServerCDCDataSourceConfig.PLUGIN_NAME, requestParams)) {
-            final List<String> tables = channel.getTables(SqlServerCDCDataSourceConfig.PLUGIN_NAME, requestParams, database);
+        for (String database :
+                channel.getDatabases(SqlServerCDCDataSourceConfig.PLUGIN_NAME, requestParams)) {
+            final List<String> tables =
+                    channel.getTables(
+                            SqlServerCDCDataSourceConfig.PLUGIN_NAME, requestParams, database);
             final Map<String, List<TableField>> tableFields =
-                channel.getTableFields(SqlServerCDCDataSourceConfig.PLUGIN_NAME, requestParams, database, tables);
+                    channel.getTableFields(
+                            SqlServerCDCDataSourceConfig.PLUGIN_NAME,
+                            requestParams,
+                            database,
+                            tables);
         }
     }
 }
diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-starrocks/src/main/java/org/apache/seatunnel/datasource/plugin/starrocks/StarRocksCatalog.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-starrocks/src/main/java/org/apache/seatunnel/datasource/plugin/starrocks/StarRocksCatalog.java
index c04558b0..4acc43e6 100644
--- a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-starrocks/src/main/java/org/apache/seatunnel/datasource/plugin/starrocks/StarRocksCatalog.java
+++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-starrocks/src/main/java/org/apache/seatunnel/datasource/plugin/starrocks/StarRocksCatalog.java
@@ -17,8 +17,6 @@
 
 package org.apache.seatunnel.datasource.plugin.starrocks;
 
-import static com.google.common.base.Preconditions.checkArgument;
-
 import org.apache.seatunnel.api.table.catalog.PrimaryKey;
 import org.apache.seatunnel.api.table.catalog.TablePath;
 import org.apache.seatunnel.api.table.catalog.exception.CatalogException;
@@ -27,6 +25,7 @@ import org.apache.seatunnel.api.table.catalog.exception.TableNotExistException;
 import org.apache.seatunnel.datasource.plugin.api.model.TableField;
 
 import org.apache.commons.lang3.StringUtils;
+
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -42,6 +41,8 @@ import java.util.List;
 import java.util.Optional;
 import java.util.Set;
 
+import static com.google.common.base.Preconditions.checkArgument;
+
 public class StarRocksCatalog {
 
     protected final String catalogName;
@@ -79,8 +80,8 @@ public class StarRocksCatalog {
     public List<String> listDatabases() throws CatalogException {
         List<String> databases = new ArrayList<>();
         try (Connection conn = DriverManager.getConnection(defaultUrl, username, pwd);
-             PreparedStatement ps = conn.prepareStatement("SHOW DATABASES;");
-             ResultSet rs = ps.executeQuery();) {
+                PreparedStatement ps = conn.prepareStatement("SHOW DATABASES;");
+                ResultSet rs = ps.executeQuery(); ) {
 
             while (rs.next()) {
                 String databaseName = rs.getString(1);
@@ -92,19 +93,19 @@ public class StarRocksCatalog {
             return databases;
         } catch (Exception e) {
             throw new CatalogException(
-                String.format("Failed listing database in catalog %s", this.catalogName), e);
+                    String.format("Failed listing database in catalog %s", this.catalogName), e);
         }
     }
 
     public List<String> listTables(String databaseName)
-        throws CatalogException, DatabaseNotExistException {
+            throws CatalogException, DatabaseNotExistException {
         if (!databaseExists(databaseName)) {
             throw new DatabaseNotExistException(this.catalogName, databaseName);
         }
 
         try (Connection conn = DriverManager.getConnection(baseUrl + databaseName, username, pwd);
-             PreparedStatement ps = conn.prepareStatement("SHOW TABLES;");
-             ResultSet rs = ps.executeQuery()) {
+                PreparedStatement ps = conn.prepareStatement("SHOW TABLES;");
+                ResultSet rs = ps.executeQuery()) {
 
             List<String> tables = new ArrayList<>();
 
@@ -115,29 +116,29 @@ public class StarRocksCatalog {
             return tables;
         } catch (Exception e) {
             throw new CatalogException(
-                String.format("Failed listing database in catalog %s", catalogName), e);
+                    String.format("Failed listing database in catalog %s", catalogName), e);
         }
     }
 
     public List<TableField> getTable(TablePath tablePath)
-        throws CatalogException, TableNotExistException {
+            throws CatalogException, TableNotExistException {
         if (!tableExists(tablePath)) {
             throw new TableNotExistException(catalogName, tablePath);
         }
 
         String dbUrl = baseUrl + tablePath.getDatabaseName();
         try (Connection conn = DriverManager.getConnection(dbUrl, username, pwd);
-             PreparedStatement statement =
-                 conn.prepareStatement(
-                     String.format(
-                         "SELECT * FROM %s WHERE 1 = 0;",
-                         String.format(
-                             "`%s`.`%s`",
-                             tablePath.getDatabaseName(),
-                             tablePath.getTableName())));) {
+                PreparedStatement statement =
+                        conn.prepareStatement(
+                                String.format(
+                                        "SELECT * FROM %s WHERE 1 = 0;",
+                                        String.format(
+                                                "`%s`.`%s`",
+                                                tablePath.getDatabaseName(),
+                                                tablePath.getTableName()))); ) {
 
             Optional<PrimaryKey> primaryKey =
-                getPrimaryKey(tablePath.getDatabaseName(), tablePath.getTableName());
+                    getPrimaryKey(tablePath.getDatabaseName(), tablePath.getTableName());
 
             ResultSetMetaData tableMetaData = statement.getMetaData();
 
@@ -148,13 +149,13 @@ public class StarRocksCatalog {
                 tableField.setType(tableMetaData.getColumnTypeName(i));
                 tableField.setComment(tableMetaData.getColumnLabel(i));
                 tableField.setNullable(
-                    tableMetaData.isNullable(i) == ResultSetMetaData.columnNullable);
+                        tableMetaData.isNullable(i) == ResultSetMetaData.columnNullable);
                 tableField.setPrimaryKey(
-                    primaryKey.isPresent()
-                        && primaryKey
-                        .get()
-                        .getColumnNames()
-                        .contains(tableField.getName()));
+                        primaryKey.isPresent()
+                                && primaryKey
+                                        .get()
+                                        .getColumnNames()
+                                        .contains(tableField.getName()));
                 // TODO add default value
                 tableField.setDefaultValue(null);
                 fields.add(tableField);
@@ -162,7 +163,7 @@ public class StarRocksCatalog {
             return fields;
         } catch (Exception e) {
             throw new CatalogException(
-                String.format("Failed getting table %s", tablePath.getFullName()), e);
+                    String.format("Failed getting table %s", tablePath.getFullName()), e);
         }
     }
 
@@ -178,12 +179,12 @@ public class StarRocksCatalog {
 
         List<String> pkFields = new ArrayList<>();
         try (Connection conn = DriverManager.getConnection(defaultUrl, username, pwd);
-             PreparedStatement statement =
-                 conn.prepareStatement(
-                     String.format(
-                         "SELECT COLUMN_NAME FROM information_schema.columns where TABLE_SCHEMA = '%s' AND TABLE_NAME = '%s' AND COLUMN_KEY = 'PRI' ORDER BY ORDINAL_POSITION",
-                         schema, table));
-             ResultSet rs = statement.executeQuery()) {
+                PreparedStatement statement =
+                        conn.prepareStatement(
+                                String.format(
+                                        "SELECT COLUMN_NAME FROM information_schema.columns where TABLE_SCHEMA = '%s' AND TABLE_NAME = '%s' AND COLUMN_KEY = 'PRI' ORDER BY ORDINAL_POSITION",
+                                        schema, table));
+                ResultSet rs = statement.executeQuery()) {
             while (rs.next()) {
                 String columnName = rs.getString("COLUMN_NAME");
                 pkFields.add(columnName);
@@ -216,7 +217,7 @@ public class StarRocksCatalog {
     public boolean tableExists(TablePath tablePath) throws CatalogException {
         try {
             return databaseExists(tablePath.getDatabaseName())
-                && listTables(tablePath.getDatabaseName()).contains(tablePath.getTableName());
+                    && listTables(tablePath.getDatabaseName()).contains(tablePath.getTableName());
         } catch (DatabaseNotExistException e) {
             return false;
         }
diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-starrocks/src/main/java/org/apache/seatunnel/datasource/plugin/starrocks/StarRocksDataSourceChannel.java b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-starrocks/src/main/java/org/apache/seatunnel/datasource/plugin/starrocks/StarRocksDataSourceChannel.java
index a5ed265f..835864a4 100644
--- a/seatunnel-datasource/seatunnel-datasource-plugins/datasource-starrocks/src/main/java/org/apache/seatunnel/datasource/plugin/starrocks/StarRocksDataSourceChannel.java
+++ b/seatunnel-datasource/seatunnel-datasource-plugins/datasource-starrocks/src/main/java/org/apache/seatunnel/datasource/plugin/starrocks/StarRocksDataSourceChannel.java
@@ -17,18 +17,19 @@
 
 package org.apache.seatunnel.datasource.plugin.starrocks;
 
+import org.apache.seatunnel.shade.com.fasterxml.jackson.databind.ObjectMapper;
+
 import org.apache.seatunnel.api.configuration.util.OptionRule;
 import org.apache.seatunnel.api.table.catalog.TablePath;
 import org.apache.seatunnel.datasource.plugin.api.DataSourceChannel;
 import org.apache.seatunnel.datasource.plugin.api.DataSourcePluginException;
 import org.apache.seatunnel.datasource.plugin.api.model.TableField;
 
-import org.apache.seatunnel.shade.com.fasterxml.jackson.databind.ObjectMapper;
-
-import lombok.NonNull;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import lombok.NonNull;
+
 import java.io.IOException;
 import java.net.InetSocketAddress;
 import java.net.Socket;
@@ -59,21 +60,21 @@ public class StarRocksDataSourceChannel implements DataSourceChannel {
 
     @Override
     public List<String> getTables(
-        @NonNull String pluginName, Map<String, String> requestParams, String database) {
+            @NonNull String pluginName, Map<String, String> requestParams, String database) {
         StarRocksCatalog catalog = getCatalog(requestParams);
         return catalog.listTables(database);
     }
 
     @Override
     public List<String> getDatabases(
-        @NonNull String pluginName, @NonNull Map<String, String> requestParams) {
+            @NonNull String pluginName, @NonNull Map<String, String> requestParams) {
         StarRocksCatalog catalog = getCatalog(requestParams);
         return catalog.listDatabases();
     }
 
     @Override
     public boolean checkDataSourceConnectivity(
-        @NonNull String pluginName, @NonNull Map<String, String> requestParams) {
+            @NonNull String pluginName, @NonNull Map<String, String> requestParams) {
         try {
             StarRocksCatalog catalog = getCatalog(requestParams);
             String nodeUrls = requestParams.get(StarRocksOptionRule.NODE_URLS.key());
@@ -85,7 +86,7 @@ public class StarRocksDataSourceChannel implements DataSourceChannel {
             return true;
         } catch (Exception e) {
             throw new DataSourcePluginException(
-                "check StarRocks connectivity failed, " + e.getMessage(), e);
+                    "check StarRocks connectivity failed, " + e.getMessage(), e);
         }
     }
 
@@ -96,7 +97,7 @@ public class StarRocksDataSourceChannel implements DataSourceChannel {
         try {
             String[] hostAndPort = nodeUrl.split(":");
             socket.connect(
-                new InetSocketAddress(hostAndPort[0], Integer.parseInt(hostAndPort[1])), 1000);
+                    new InetSocketAddress(hostAndPort[0], Integer.parseInt(hostAndPort[1])), 1000);
             isConnected = socket.isConnected();
         } catch (IOException e) {
             LOGGER.error("telnet error", e);
@@ -113,24 +114,24 @@ public class StarRocksDataSourceChannel implements DataSourceChannel {
 
     @Override
     public List<TableField> getTableFields(
-        @NonNull String pluginName,
-        @NonNull Map<String, String> requestParams,
-        @NonNull String database,
-        @NonNull String table) {
+            @NonNull String pluginName,
+            @NonNull Map<String, String> requestParams,
+            @NonNull String database,
+            @NonNull String table) {
         StarRocksCatalog catalog = getCatalog(requestParams);
         return catalog.getTable(TablePath.of(database, table));
     }
 
     @Override
     public Map<String, List<TableField>> getTableFields(
-        @NonNull String pluginName,
-        @NonNull Map<String, String> requestParams,
-        @NonNull String database,
-        @NonNull List<String> tables) {
+            @NonNull String pluginName,
+            @NonNull Map<String, String> requestParams,
+            @NonNull String database,
+            @NonNull List<String> tables) {
         StarRocksCatalog catalog = getCatalog(requestParams);
         Map<String, List<TableField>> tableFields = new HashMap<>();
         tables.forEach(
-            table -> tableFields.put(table, catalog.getTable(TablePath.of(database, table))));
+                table -> tableFields.put(table, catalog.getTable(TablePath.of(database, table))));
         return tableFields;
     }
 
diff --git a/seatunnel-datasource/seatunnel-datasource-plugins/pom.xml b/seatunnel-datasource/seatunnel-datasource-plugins/pom.xml
index 91cdc740..2336b7b0 100644
--- a/seatunnel-datasource/seatunnel-datasource-plugins/pom.xml
+++ b/seatunnel-datasource/seatunnel-datasource-plugins/pom.xml
@@ -13,7 +13,6 @@
     See the License for the specific language governing permissions and
     limitations under the License.
 -->
-
 <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
          xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
     <modelVersion>4.0.0</modelVersion>
diff --git a/seatunnel-server/pom.xml b/seatunnel-server/pom.xml
index 7bfbf9d6..0a623719 100644
--- a/seatunnel-server/pom.xml
+++ b/seatunnel-server/pom.xml
@@ -13,15 +13,14 @@
     See the License for the specific language governing permissions and
     limitations under the License.
 -->
-<project xmlns="http://maven.apache.org/POM/4.0.0"
-         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
          xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <modelVersion>4.0.0</modelVersion>
     <parent>
-        <artifactId>seatunnel-web</artifactId>
         <groupId>org.apache.seatunnel</groupId>
+        <artifactId>seatunnel-web</artifactId>
         <version>${revision}</version>
     </parent>
-    <modelVersion>4.0.0</modelVersion>
 
     <artifactId>seatunnel-server</artifactId>
     <packaging>pom</packaging>
@@ -32,4 +31,4 @@
         <module>seatunnel-scheduler</module>
         <module>seatunnel-server-common</module>
     </modules>
-</project>
\ No newline at end of file
+</project>
diff --git a/seatunnel-server/seatunnel-app/pom.xml b/seatunnel-server/seatunnel-app/pom.xml
index ca908b06..6c5da47b 100644
--- a/seatunnel-server/seatunnel-app/pom.xml
+++ b/seatunnel-server/seatunnel-app/pom.xml
@@ -13,15 +13,14 @@
     See the License for the specific language governing permissions and
     limitations under the License.
 -->
-<project xmlns="http://maven.apache.org/POM/4.0.0"
-         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
          xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <modelVersion>4.0.0</modelVersion>
     <parent>
-        <artifactId>seatunnel-server</artifactId>
         <groupId>org.apache.seatunnel</groupId>
+        <artifactId>seatunnel-server</artifactId>
         <version>${revision}</version>
     </parent>
-    <modelVersion>4.0.0</modelVersion>
 
     <artifactId>seatunnel-app</artifactId>
 
@@ -83,8 +82,8 @@
                     <artifactId>spring-boot-starter-tomcat</artifactId>
                 </exclusion>
                 <exclusion>
-                    <artifactId>log4j-to-slf4j</artifactId>
                     <groupId>org.apache.logging.log4j</groupId>
+                    <artifactId>log4j-to-slf4j</artifactId>
                 </exclusion>
             </exclusions>
         </dependency>
@@ -125,16 +124,16 @@
             <version>${springfox-swagger.version}</version>
             <exclusions>
                 <exclusion>
-                    <artifactId>spring-aop</artifactId>
                     <groupId>org.springframework</groupId>
+                    <artifactId>spring-aop</artifactId>
                 </exclusion>
                 <exclusion>
-                    <artifactId>spring-beans</artifactId>
                     <groupId>org.springframework</groupId>
+                    <artifactId>spring-beans</artifactId>
                 </exclusion>
                 <exclusion>
-                    <artifactId>spring-context</artifactId>
                     <groupId>org.springframework</groupId>
+                    <artifactId>spring-context</artifactId>
                 </exclusion>
                 <exclusion>
                     <groupId>com.fasterxml.jackson.core</groupId>
@@ -158,8 +157,8 @@
             <artifactId>hibernate-validator</artifactId>
             <exclusions>
                 <exclusion>
-                    <artifactId>classmate</artifactId>
                     <groupId>com.fasterxml</groupId>
+                    <artifactId>classmate</artifactId>
                 </exclusion>
             </exclusions>
         </dependency>
@@ -188,8 +187,8 @@
             <version>${project.version}</version>
             <exclusions>
                 <exclusion>
-                    <artifactId>slf4j-log4j12</artifactId>
                     <groupId>org.slf4j</groupId>
+                    <artifactId>slf4j-log4j12</artifactId>
                 </exclusion>
             </exclusions>
         </dependency>
@@ -200,8 +199,8 @@
             <version>${project.version}</version>
             <exclusions>
                 <exclusion>
-                    <artifactId>slf4j-log4j12</artifactId>
                     <groupId>org.slf4j</groupId>
+                    <artifactId>slf4j-log4j12</artifactId>
                 </exclusion>
             </exclusions>
         </dependency>
@@ -240,4 +239,4 @@
             <artifactId>cron-utils</artifactId>
         </dependency>
     </dependencies>
-</project>
\ No newline at end of file
+</project>
diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/SeatunnelApplication.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/SeatunnelApplication.java
index 4947195f..96716789 100644
--- a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/SeatunnelApplication.java
+++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/SeatunnelApplication.java
@@ -25,7 +25,8 @@ import org.springframework.scheduling.annotation.EnableAsync;
 import org.springframework.scheduling.annotation.EnableScheduling;
 import org.springframework.transaction.annotation.EnableTransactionManagement;
 
-@SpringBootApplication(scanBasePackages = {"org.apache.seatunnel.app", "org.apache.seatunnel.scheduler"})
+@SpringBootApplication(
+        scanBasePackages = {"org.apache.seatunnel.app", "org.apache.seatunnel.scheduler"})
 @EnableTransactionManagement
 @EnableConfigurationProperties
 @EnableScheduling
diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/adapter/SeatunnelWebAdapter.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/adapter/SeatunnelWebAdapter.java
index 84117d8e..a20f5a8f 100644
--- a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/adapter/SeatunnelWebAdapter.java
+++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/adapter/SeatunnelWebAdapter.java
@@ -48,8 +48,7 @@ public class SeatunnelWebAdapter implements WebMvcConfigurer {
         return new AuthenticationInterceptor();
     }
 
-    @Resource
-    private UserIdMethodArgumentResolver currentUserMethodArgumentResolver;
+    @Resource private UserIdMethodArgumentResolver currentUserMethodArgumentResolver;
 
     /**
      * Cookie
@@ -70,10 +69,18 @@ public class SeatunnelWebAdapter implements WebMvcConfigurer {
     @Override
     public void addInterceptors(InterceptorRegistry registry) {
         registry.addInterceptor(authenticationInterceptor())
-            .order(1)
-            .addPathPatterns(LOGIN_INTERCEPTOR_PATH_PATTERN)
-            .excludePathPatterns(LOGIN_PATH_PATTERN, REGISTER_PATH_PATTERN,
-                "/swagger-resources/**", "/webjars/**", "/v2/**", "*.html", "/ui/**", "/error", "/swagger-ui.html**");
+                .order(1)
+                .addPathPatterns(LOGIN_INTERCEPTOR_PATH_PATTERN)
+                .excludePathPatterns(
+                        LOGIN_PATH_PATTERN,
+                        REGISTER_PATH_PATTERN,
+                        "/swagger-resources/**",
+                        "/webjars/**",
+                        "/v2/**",
+                        "*.html",
+                        "/ui/**",
+                        "/error",
+                        "/swagger-ui.html**");
     }
 
     @Override
@@ -84,7 +91,8 @@ public class SeatunnelWebAdapter implements WebMvcConfigurer {
     @Override
     public void addResourceHandlers(ResourceHandlerRegistry registry) {
         registry.addResourceHandler("/static/**").addResourceLocations("classpath:/static/");
-        registry.addResourceHandler("/webjars/**").addResourceLocations("classpath:/META-INF/resources/webjars/");
+        registry.addResourceHandler("/webjars/**")
+                .addResourceLocations("classpath:/META-INF/resources/webjars/");
         registry.addResourceHandler("/ui/**").addResourceLocations("file:ui/");
     }
 
@@ -93,5 +101,4 @@ public class SeatunnelWebAdapter implements WebMvcConfigurer {
         registry.addViewController("/").setViewName("redirect:/ui/");
         registry.addViewController("/ui/").setViewName("forward:/ui/index.html");
     }
-
 }
diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/aspect/LogoutAspect.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/aspect/LogoutAspect.java
index 3d481c2f..48c7b676 100644
--- a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/aspect/LogoutAspect.java
+++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/aspect/LogoutAspect.java
@@ -17,11 +17,8 @@
 
 package org.apache.seatunnel.app.aspect;
 
-import static org.apache.seatunnel.server.common.Constants.USER_ID;
-
 import org.apache.seatunnel.app.dal.dao.IUserDao;
 
-import lombok.extern.slf4j.Slf4j;
 import org.aspectj.lang.JoinPoint;
 import org.aspectj.lang.annotation.Aspect;
 import org.aspectj.lang.annotation.Before;
@@ -31,26 +28,28 @@ import org.springframework.stereotype.Component;
 import org.springframework.web.context.request.RequestContextHolder;
 import org.springframework.web.context.request.ServletRequestAttributes;
 
+import lombok.extern.slf4j.Slf4j;
+
 import javax.annotation.Resource;
 import javax.servlet.http.HttpServletRequest;
 
+import static org.apache.seatunnel.server.common.Constants.USER_ID;
+
 @Slf4j
 @Aspect
 @Component
 @Order(2)
 public class LogoutAspect {
 
-    @Resource
-    private IUserDao userDaoImpl;
+    @Resource private IUserDao userDaoImpl;
 
     @Pointcut("execution(public * org.apache.seatunnel.app.controller.UserController.logout(..))")
-    public void logoutPointCut() {
-
-    }
+    public void logoutPointCut() {}
 
     @Before("logoutPointCut()")
     public void check(JoinPoint pjp) {
-        ServletRequestAttributes attributes = (ServletRequestAttributes) RequestContextHolder.getRequestAttributes();
+        ServletRequestAttributes attributes =
+                (ServletRequestAttributes) RequestContextHolder.getRequestAttributes();
         HttpServletRequest request = attributes.getRequest();
         final Integer userId = (Integer) request.getAttribute(USER_ID);
         userDaoImpl.disableToken(userId);
diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/aspect/UserId.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/aspect/UserId.java
index 628e103c..8d812be3 100644
--- a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/aspect/UserId.java
+++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/aspect/UserId.java
@@ -24,5 +24,4 @@ import java.lang.annotation.Target;
 
 @Target({ElementType.PARAMETER})
 @Retention(RetentionPolicy.RUNTIME)
-public @interface UserId {
-}
+public @interface UserId {}
diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/common/RoleTypeEnum.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/common/RoleTypeEnum.java
index cd2566ab..836bfe7e 100644
--- a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/common/RoleTypeEnum.java
+++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/common/RoleTypeEnum.java
@@ -23,7 +23,7 @@ public enum RoleTypeEnum {
     ;
 
     private final int code;
-    private final String  description;
+    private final String description;
 
     RoleTypeEnum(int code, String description) {
         this.code = code;
diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/common/ScriptParamStatusEnum.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/common/ScriptParamStatusEnum.java
index 56195dd8..22f286be 100644
--- a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/common/ScriptParamStatusEnum.java
+++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/common/ScriptParamStatusEnum.java
@@ -23,7 +23,7 @@ public enum ScriptParamStatusEnum {
     ;
 
     private final int code;
-    private final String  description;
+    private final String description;
 
     ScriptParamStatusEnum(int code, String description) {
         this.code = code;
diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/common/ScriptStatusEnum.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/common/ScriptStatusEnum.java
index 0a4144e0..154fd2a5 100644
--- a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/common/ScriptStatusEnum.java
+++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/common/ScriptStatusEnum.java
@@ -24,7 +24,7 @@ public enum ScriptStatusEnum {
     ;
 
     private final int code;
-    private final String  description;
+    private final String description;
 
     ScriptStatusEnum(int code, String description) {
         this.code = code;
diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/common/ScriptTypeEnum.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/common/ScriptTypeEnum.java
index 89d627c6..353f2b2c 100644
--- a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/common/ScriptTypeEnum.java
+++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/common/ScriptTypeEnum.java
@@ -23,7 +23,7 @@ public enum ScriptTypeEnum {
     ;
 
     private final int code;
-    private final String  description;
+    private final String description;
 
     ScriptTypeEnum(int code, String description) {
         this.code = code;
diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/common/SeaTunnelConnectorI18n.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/common/SeaTunnelConnectorI18n.java
index 96e4780b..aa3144be 100644
--- a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/common/SeaTunnelConnectorI18n.java
+++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/common/SeaTunnelConnectorI18n.java
@@ -30,13 +30,18 @@ public class SeaTunnelConnectorI18n {
 
     static {
         try {
-            CONNECTOR_I18N_CONFIG_EN = ConfigFactory.parseString(IOUtils.toString(SeaTunnelConnectorI18n.class.getResourceAsStream("/i18n_en.config"), StandardCharsets.UTF_8));
+            CONNECTOR_I18N_CONFIG_EN =
+                    ConfigFactory.parseString(
+                            IOUtils.toString(
+                                    SeaTunnelConnectorI18n.class.getResourceAsStream(
+                                            "/i18n_en.config"),
+                                    StandardCharsets.UTF_8));
             CONNECTOR_I18N_CONFIG_ZH =
-                ConfigFactory.parseString(
-                    IOUtils.toString(
-                        SeaTunnelConnectorI18n.class.getResourceAsStream(
-                            "/i18n_zh.config"),
-                        StandardCharsets.UTF_8));
+                    ConfigFactory.parseString(
+                            IOUtils.toString(
+                                    SeaTunnelConnectorI18n.class.getResourceAsStream(
+                                            "/i18n_zh.config"),
+                                    StandardCharsets.UTF_8));
         } catch (Exception e) {
             throw new RuntimeException(e);
         }
diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/common/UserStatusEnum.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/common/UserStatusEnum.java
index c62b7e1c..e8d533d1 100644
--- a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/common/UserStatusEnum.java
+++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/common/UserStatusEnum.java
@@ -22,7 +22,7 @@ public enum UserStatusEnum {
     DISABLE(1, "disable"),
     ;
     private final int code;
-    private final String  description;
+    private final String description;
 
     UserStatusEnum(int code, String description) {
         this.code = code;
diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/common/UserTypeEnum.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/common/UserTypeEnum.java
index b583e6f4..6b8f3dfb 100644
--- a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/common/UserTypeEnum.java
+++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/common/UserTypeEnum.java
@@ -23,7 +23,7 @@ public enum UserTypeEnum {
     ;
 
     private final int code;
-    private final String  description;
+    private final String description;
 
     UserTypeEnum(int code, String description) {
         this.code = code;
diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/config/Swagger2.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/config/Swagger2.java
index 3eb93a85..8e6df854 100644
--- a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/config/Swagger2.java
+++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/config/Swagger2.java
@@ -19,6 +19,7 @@ package org.apache.seatunnel.app.config;
 
 import org.springframework.context.annotation.Bean;
 import org.springframework.context.annotation.Configuration;
+
 import springfox.documentation.builders.ApiInfoBuilder;
 import springfox.documentation.builders.PathSelectors;
 import springfox.documentation.builders.RequestHandlerSelectors;
@@ -29,7 +30,7 @@ import springfox.documentation.swagger2.annotations.EnableSwagger2;
 
 @Configuration
 @EnableSwagger2
-public class Swagger2{
+public class Swagger2 {
     @Bean
     public Docket createRestApi() {
 
@@ -49,5 +50,4 @@ public class Swagger2{
                 .termsOfServiceUrl("https://seatunnel.apache.org/")
                 .build();
     }
-
 }
diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/controller/AuthController.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/controller/AuthController.java
index 39ab914c..091190b0 100644
--- a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/controller/AuthController.java
+++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/controller/AuthController.java
@@ -20,14 +20,15 @@ package org.apache.seatunnel.app.controller;
 import org.apache.seatunnel.app.common.Result;
 import org.apache.seatunnel.app.service.IRoleService;
 
-import io.swagger.annotations.ApiImplicitParam;
-import io.swagger.annotations.ApiImplicitParams;
-import io.swagger.annotations.ApiOperation;
 import org.springframework.web.bind.annotation.GetMapping;
 import org.springframework.web.bind.annotation.RequestMapping;
 import org.springframework.web.bind.annotation.RequestParam;
 import org.springframework.web.bind.annotation.RestController;
 
+import io.swagger.annotations.ApiImplicitParam;
+import io.swagger.annotations.ApiImplicitParams;
+import io.swagger.annotations.ApiOperation;
+
 import javax.annotation.Resource;
 import javax.validation.constraints.NotNull;
 
@@ -35,8 +36,7 @@ import javax.validation.constraints.NotNull;
 @RestController
 public class AuthController {
 
-    @Resource
-    private IRoleService roleServiceImpl;
+    @Resource private IRoleService roleServiceImpl;
 
     @GetMapping("/userRole")
     @ApiOperation(value = "check relation between user and role", httpMethod = "GET")
@@ -44,7 +44,9 @@ public class AuthController {
         @ApiImplicitParam(name = "username", value = "user name", dataType = "String"),
         @ApiImplicitParam(name = "roleName", value = "role name", dataType = "String"),
     })
-    public Result<Boolean> userRole(@RequestParam("username") @NotNull String username, @RequestParam("roleName") @NotNull String roleName) {
+    public Result<Boolean> userRole(
+            @RequestParam("username") @NotNull String username,
+            @RequestParam("roleName") @NotNull String roleName) {
         final boolean b = roleServiceImpl.checkUserRole(username, roleName);
         return Result.success(b);
     }
diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/controller/ScriptController.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/controller/ScriptController.java
index e1885e29..bf22b5a5 100644
--- a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/controller/ScriptController.java
+++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/controller/ScriptController.java
@@ -31,8 +31,6 @@ import org.apache.seatunnel.app.domain.response.script.ScriptParamRes;
 import org.apache.seatunnel.app.domain.response.script.ScriptSimpleInfoRes;
 import org.apache.seatunnel.app.service.IScriptService;
 
-import io.swagger.annotations.ApiOperation;
-import io.swagger.annotations.ApiParam;
 import org.springframework.web.bind.annotation.DeleteMapping;
 import org.springframework.web.bind.annotation.GetMapping;
 import org.springframework.web.bind.annotation.PatchMapping;
@@ -43,6 +41,9 @@ import org.springframework.web.bind.annotation.RequestBody;
 import org.springframework.web.bind.annotation.RequestMapping;
 import org.springframework.web.bind.annotation.RequestParam;
 import org.springframework.web.bind.annotation.RestController;
+
+import io.swagger.annotations.ApiOperation;
+import io.swagger.annotations.ApiParam;
 import springfox.documentation.annotations.ApiIgnore;
 
 import javax.annotation.Resource;
@@ -53,22 +54,24 @@ import java.util.List;
 @RequestMapping("/seatunnel/api/v1/script")
 @RestController
 public class ScriptController {
-    @Resource
-    private IScriptService iScriptService;
+    @Resource private IScriptService iScriptService;
 
     @PostMapping
     @ApiOperation(value = "add an script with content", httpMethod = "POST")
-    public Result<CreateScriptRes> createScript(@RequestBody @NotNull CreateScriptReq createScriptReq,
-                                                  @ApiIgnore @UserId Integer operatorId) {
+    public Result<CreateScriptRes> createScript(
+            @RequestBody @NotNull CreateScriptReq createScriptReq,
+            @ApiIgnore @UserId Integer operatorId) {
         createScriptReq.setCreatorId(operatorId);
         return Result.success(iScriptService.createScript(createScriptReq));
     }
 
     @PutMapping("/{scriptId}/content")
     @ApiOperation(value = "update script", httpMethod = "PUT")
-    public Result<Void> updateScriptContent(@ApiParam(value = "script id", required = true) @PathVariable(value = "scriptId") Integer scriptId,
-                                            @RequestBody @NotNull UpdateScriptContentReq req,
-                                            @ApiIgnore @UserId Integer operatorId) {
+    public Result<Void> updateScriptContent(
+            @ApiParam(value = "script id", required = true) @PathVariable(value = "scriptId")
+                    Integer scriptId,
+            @RequestBody @NotNull UpdateScriptContentReq req,
+            @ApiIgnore @UserId Integer operatorId) {
         req.setScriptId(scriptId);
         req.setMenderId(operatorId);
 
@@ -78,17 +81,20 @@ public class ScriptController {
 
     @DeleteMapping("/{scriptId}")
     @ApiOperation(value = "delete script", httpMethod = "DELETE")
-    public Result<Void> delete(@ApiParam(value = "script id", required = true) @PathVariable(value = "scriptId") Integer scriptId) {
+    public Result<Void> delete(
+            @ApiParam(value = "script id", required = true) @PathVariable(value = "scriptId")
+                    Integer scriptId) {
         iScriptService.delete(scriptId);
         return Result.success();
     }
 
     @GetMapping
     @ApiOperation(value = "script list", httpMethod = "GET")
-    public Result<PageInfo<ScriptSimpleInfoRes>> list(@ApiParam(value = "script name") @RequestParam(required = false) String name,
-                                                      @ApiParam(value = "script status") @RequestParam(required = false) Byte status,
-                                                      @ApiParam(value = "page num", required = true) @RequestParam Integer pageNo,
-                                                      @ApiParam(value = "page size", required = true) @RequestParam Integer pageSize) {
+    public Result<PageInfo<ScriptSimpleInfoRes>> list(
+            @ApiParam(value = "script name") @RequestParam(required = false) String name,
+            @ApiParam(value = "script status") @RequestParam(required = false) Byte status,
+            @ApiParam(value = "page num", required = true) @RequestParam Integer pageNo,
+            @ApiParam(value = "page size", required = true) @RequestParam Integer pageSize) {
 
         final ScriptListReq req = new ScriptListReq();
         req.setName(name);
@@ -100,19 +106,25 @@ public class ScriptController {
 
     @GetMapping("/{scriptId}/content")
     @ApiOperation(value = "fetch script content", httpMethod = "GET")
-    public Result<String> fetchScriptContent(@ApiParam(value = "script id", required = true) @PathVariable(value = "scriptId") Integer scriptId) {
+    public Result<String> fetchScriptContent(
+            @ApiParam(value = "script id", required = true) @PathVariable(value = "scriptId")
+                    Integer scriptId) {
         return Result.success(iScriptService.fetchScriptContent(scriptId));
     }
 
     @GetMapping("/{scriptId}")
-    public Result<ScriptFullInfoRes> detail(@ApiParam(value = "script id", required = true) @PathVariable(value = "scriptId") Integer scriptId) {
+    public Result<ScriptFullInfoRes> detail(
+            @ApiParam(value = "script id", required = true) @PathVariable(value = "scriptId")
+                    Integer scriptId) {
         return Result.success(iScriptService.detail(scriptId));
     }
 
     @PutMapping("/{scriptId}/param")
     @ApiOperation(value = "update script param", httpMethod = "PUT")
-    public Result<Void> updateScriptParam(@ApiParam(value = "script id", required = true) @PathVariable(value = "scriptId") Integer scriptId,
-                                          @RequestBody @NotNull UpdateScriptParamReq updateScriptParamReq) {
+    public Result<Void> updateScriptParam(
+            @ApiParam(value = "script id", required = true) @PathVariable(value = "scriptId")
+                    Integer scriptId,
+            @RequestBody @NotNull UpdateScriptParamReq updateScriptParamReq) {
         updateScriptParamReq.setScriptId(scriptId);
         iScriptService.updateScriptParam(updateScriptParamReq);
         return Result.success();
@@ -120,14 +132,18 @@ public class ScriptController {
 
     @GetMapping("/{scriptId}/param")
     @ApiOperation(value = "fetch script param", httpMethod = "GET")
-    public Result<List<ScriptParamRes>> fetchScriptParam(@ApiParam(value = "script id", required = true) @PathVariable(value = "scriptId") Integer scriptId) {
+    public Result<List<ScriptParamRes>> fetchScriptParam(
+            @ApiParam(value = "script id", required = true) @PathVariable(value = "scriptId")
+                    Integer scriptId) {
         return Result.success(iScriptService.fetchScriptParam(scriptId));
     }
 
     @PatchMapping("/{scriptId}/publish")
     @ApiOperation(value = "publish script", httpMethod = "PATCH")
-    public Result<Void> publish(@ApiParam(value = "script id", required = true) @PathVariable(value = "scriptId") Integer scriptId,
-                                @ApiIgnore @UserId Integer operatorId) {
+    public Result<Void> publish(
+            @ApiParam(value = "script id", required = true) @PathVariable(value = "scriptId")
+                    Integer scriptId,
+            @ApiIgnore @UserId Integer operatorId) {
 
         final PublishScriptReq req = new PublishScriptReq();
         req.setScriptId(scriptId);
diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/controller/TaskController.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/controller/TaskController.java
index b66de54d..7fc63526 100644
--- a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/controller/TaskController.java
+++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/controller/TaskController.java
@@ -29,8 +29,6 @@ import org.apache.seatunnel.app.domain.response.task.InstanceSimpleInfoRes;
 import org.apache.seatunnel.app.domain.response.task.JobSimpleInfoRes;
 import org.apache.seatunnel.app.service.ITaskService;
 
-import io.swagger.annotations.ApiOperation;
-import io.swagger.annotations.ApiParam;
 import org.springframework.web.bind.annotation.GetMapping;
 import org.springframework.web.bind.annotation.PatchMapping;
 import org.springframework.web.bind.annotation.PathVariable;
@@ -39,6 +37,9 @@ import org.springframework.web.bind.annotation.RequestBody;
 import org.springframework.web.bind.annotation.RequestMapping;
 import org.springframework.web.bind.annotation.RequestParam;
 import org.springframework.web.bind.annotation.RestController;
+
+import io.swagger.annotations.ApiOperation;
+import io.swagger.annotations.ApiParam;
 import springfox.documentation.annotations.ApiIgnore;
 
 import javax.annotation.Resource;
@@ -48,13 +49,13 @@ import javax.validation.constraints.NotNull;
 @RestController
 public class TaskController {
 
-    @Resource
-    private ITaskService iTaskService;
+    @Resource private ITaskService iTaskService;
 
     @PatchMapping("/{jobId}/recycle")
     @ApiOperation(value = "recycle job", httpMethod = "PATCH")
-    Result<Void> recycle(@ApiParam(value = "job id", required = true) @PathVariable(value = "jobId") Long jobId,
-                         @ApiIgnore @UserId Integer operatorId) {
+    Result<Void> recycle(
+            @ApiParam(value = "job id", required = true) @PathVariable(value = "jobId") Long jobId,
+            @ApiIgnore @UserId Integer operatorId) {
         final RecycleScriptReq req = new RecycleScriptReq();
         req.setJobId(jobId);
         req.setOperatorId(operatorId);
@@ -65,9 +66,10 @@ public class TaskController {
 
     @GetMapping("/job")
     @ApiOperation(value = "list job", httpMethod = "GET")
-    Result<PageInfo<JobSimpleInfoRes>> listJob(@ApiParam(value = "job name") @RequestParam(required = false) String name,
-                                               @ApiParam(value = "page num", required = true) @RequestParam Integer pageNo,
-                                               @ApiParam(value = "page size", required = true) @RequestParam Integer pageSize) {
+    Result<PageInfo<JobSimpleInfoRes>> listJob(
+            @ApiParam(value = "job name") @RequestParam(required = false) String name,
+            @ApiParam(value = "page num", required = true) @RequestParam Integer pageNo,
+            @ApiParam(value = "page size", required = true) @RequestParam Integer pageSize) {
         final JobListReq req = new JobListReq();
         req.setName(name);
         req.setPageNo(pageNo);
@@ -78,9 +80,11 @@ public class TaskController {
 
     @GetMapping("/instance")
     @ApiOperation(value = "list instance", httpMethod = "GET")
-    Result<PageInfo<InstanceSimpleInfoRes>> listInstance(@ApiParam(value = "job name", required = false) @RequestParam(required = false) String name,
-                                                         @ApiParam(value = "page num", required = true) @RequestParam Integer pageNo,
-                                                         @ApiParam(value = "page size", required = true) @RequestParam Integer pageSize) {
+    Result<PageInfo<InstanceSimpleInfoRes>> listInstance(
+            @ApiParam(value = "job name", required = false) @RequestParam(required = false)
+                    String name,
+            @ApiParam(value = "page num", required = true) @RequestParam Integer pageNo,
+            @ApiParam(value = "page size", required = true) @RequestParam Integer pageSize) {
         final InstanceListReq req = new InstanceListReq();
         req.setName(name);
         req.setPageNo(pageNo);
@@ -91,9 +95,11 @@ public class TaskController {
 
     @PostMapping("/{objectId}/execute")
     @ApiOperation(value = "execute script temporary", httpMethod = "POST")
-    Result<InstanceSimpleInfoRes> tmpExecute(@ApiParam(value = "object id", required = true) @PathVariable(value = "objectId") Long objectId,
-                                             @RequestBody @NotNull ExecuteReq req,
-                                             @ApiIgnore @UserId Integer operatorId) {
+    Result<InstanceSimpleInfoRes> tmpExecute(
+            @ApiParam(value = "object id", required = true) @PathVariable(value = "objectId")
+                    Long objectId,
+            @RequestBody @NotNull ExecuteReq req,
+            @ApiIgnore @UserId Integer operatorId) {
         req.setObjectId(objectId);
         req.setOperatorId(operatorId);
 
@@ -102,13 +108,19 @@ public class TaskController {
 
     @GetMapping("/{taskInstanceId}")
     @ApiOperation(value = "query instance log", httpMethod = "GET")
-    Result<InstanceLogRes> queryInstanceLog(@ApiParam(value = "task instance id", required = true) @PathVariable(value = "taskInstanceId") Long taskInstanceId) {
+    Result<InstanceLogRes> queryInstanceLog(
+            @ApiParam(value = "task instance id", required = true)
+                    @PathVariable(value = "taskInstanceId")
+                    Long taskInstanceId) {
         return Result.success(iTaskService.queryInstanceLog(taskInstanceId));
     }
 
     @PatchMapping("/{taskInstanceId}")
     @ApiOperation(value = "kill running instance", httpMethod = "POST")
-    Result<Void> kill(@ApiParam(value = "task instance id", required = true) @PathVariable(value = "taskInstanceId") Long taskInstanceId) {
+    Result<Void> kill(
+            @ApiParam(value = "task instance id", required = true)
+                    @PathVariable(value = "taskInstanceId")
+                    Long taskInstanceId) {
         iTaskService.kill(taskInstanceId);
         return Result.success();
     }
diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/controller/UserController.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/controller/UserController.java
index 6a818496..106f23ff 100644
--- a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/controller/UserController.java
+++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/controller/UserController.java
@@ -27,8 +27,6 @@ import org.apache.seatunnel.app.domain.response.user.AddUserRes;
 import org.apache.seatunnel.app.domain.response.user.UserSimpleInfoRes;
 import org.apache.seatunnel.app.service.IUserService;
 
-import io.swagger.annotations.ApiOperation;
-import io.swagger.annotations.ApiParam;
 import org.springframework.web.bind.annotation.DeleteMapping;
 import org.springframework.web.bind.annotation.GetMapping;
 import org.springframework.web.bind.annotation.PatchMapping;
@@ -40,6 +38,9 @@ import org.springframework.web.bind.annotation.RequestMapping;
 import org.springframework.web.bind.annotation.RequestParam;
 import org.springframework.web.bind.annotation.RestController;
 
+import io.swagger.annotations.ApiOperation;
+import io.swagger.annotations.ApiParam;
+
 import javax.annotation.Resource;
 import javax.validation.constraints.NotNull;
 
@@ -47,8 +48,7 @@ import javax.validation.constraints.NotNull;
 @RestController
 public class UserController {
 
-    @Resource
-    private IUserService iUserService;
+    @Resource private IUserService iUserService;
 
     @PostMapping
     @ApiOperation(value = "add user", httpMethod = "POST")
@@ -58,8 +58,10 @@ public class UserController {
 
     @PutMapping("/{userId}")
     @ApiOperation(value = "update user", httpMethod = "PUT")
-    public Result<Void> update(@ApiParam(value = "user id", required = true) @PathVariable(value = "userId") Integer userId,
-                               @RequestBody @NotNull UpdateUserReq updateReq) {
+    public Result<Void> update(
+            @ApiParam(value = "user id", required = true) @PathVariable(value = "userId")
+                    Integer userId,
+            @RequestBody @NotNull UpdateUserReq updateReq) {
         updateReq.setUserId(userId);
 
         iUserService.update(updateReq);
@@ -68,16 +70,19 @@ public class UserController {
 
     @DeleteMapping("/{userId}")
     @ApiOperation(value = "delete user", httpMethod = "DELETE")
-    public Result<Void> delete(@ApiParam(value = "user id", required = true) @PathVariable(value = "userId") Integer userId) {
+    public Result<Void> delete(
+            @ApiParam(value = "user id", required = true) @PathVariable(value = "userId")
+                    Integer userId) {
         iUserService.delete(userId);
         return Result.success();
     }
 
     @GetMapping
     @ApiOperation(value = "user list", httpMethod = "GET")
-    public Result<PageInfo<UserSimpleInfoRes>> list(@ApiParam(value = "user name") @RequestParam(required = false) String name,
-                                                    @ApiParam(value = "page num", required = true) @RequestParam Integer pageNo,
-                                                    @ApiParam(value = "page size", required = true) @RequestParam Integer pageSize) {
+    public Result<PageInfo<UserSimpleInfoRes>> list(
+            @ApiParam(value = "user name") @RequestParam(required = false) String name,
+            @ApiParam(value = "page num", required = true) @RequestParam Integer pageNo,
+            @ApiParam(value = "page size", required = true) @RequestParam Integer pageSize) {
         final UserListReq req = new UserListReq();
         req.setName(name);
         req.setPageNo(pageNo);
@@ -88,14 +93,18 @@ public class UserController {
 
     @PatchMapping("/{userId}/enable")
     @ApiOperation(value = "enable a user", httpMethod = "PATCH")
-    public Result<Void> enable(@ApiParam(value = "user id", required = true) @PathVariable(value = "userId") Integer userId) {
+    public Result<Void> enable(
+            @ApiParam(value = "user id", required = true) @PathVariable(value = "userId")
+                    Integer userId) {
         iUserService.enable(userId);
         return Result.success();
     }
 
     @PutMapping("/{userId}/disable")
     @ApiOperation(value = "disable a user", httpMethod = "PUT")
-    public Result<Void> disable(@ApiParam(value = "user id", required = true) @PathVariable(value = "userId") Integer userId) {
+    public Result<Void> disable(
+            @ApiParam(value = "user id", required = true) @PathVariable(value = "userId")
+                    Integer userId) {
         iUserService.disable(userId);
         return Result.success();
     }
diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/dao/impl/RoleDaoImpl.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/dao/impl/RoleDaoImpl.java
index ae0b3262..227f8f2b 100644
--- a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/dao/impl/RoleDaoImpl.java
+++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/dao/impl/RoleDaoImpl.java
@@ -28,11 +28,10 @@ import javax.annotation.Resource;
 @Repository
 public class RoleDaoImpl implements IRoleDao {
 
-    @Resource
-    private RoleMapper roleMapper;
+    @Resource private RoleMapper roleMapper;
 
     @Override
-    public void add(Role role){
+    public void add(Role role) {
         roleMapper.insert(role);
     }
 
@@ -40,5 +39,4 @@ public class RoleDaoImpl implements IRoleDao {
     public Role getByRoleName(String roleName) {
         return roleMapper.selectByRole(roleName);
     }
-
 }
diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/dao/impl/RoleUserRelationDaoImpl.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/dao/impl/RoleUserRelationDaoImpl.java
index 8bc66ddc..bee2cd7b 100644
--- a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/dao/impl/RoleUserRelationDaoImpl.java
+++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/dao/impl/RoleUserRelationDaoImpl.java
@@ -28,17 +28,17 @@ import javax.annotation.Resource;
 @Repository
 public class RoleUserRelationDaoImpl implements IRoleUserRelationDao {
 
-    @Resource
-    private RoleUserRelationMapper roleUserRelationMapper;
+    @Resource private RoleUserRelationMapper roleUserRelationMapper;
 
     @Override
-    public void add(RoleUserRelation roleUserRelation){
+    public void add(RoleUserRelation roleUserRelation) {
         roleUserRelationMapper.insert(roleUserRelation);
     }
 
     @Override
     public RoleUserRelation getByUserAndRole(Integer userId, Integer roleId) {
-        final RoleUserRelation roleUserRelation = roleUserRelationMapper.selectByUserIdAndRoleId(userId, roleId);
+        final RoleUserRelation roleUserRelation =
+                roleUserRelationMapper.selectByUserIdAndRoleId(userId, roleId);
         return roleUserRelation;
     }
 
@@ -46,5 +46,4 @@ public class RoleUserRelationDaoImpl implements IRoleUserRelationDao {
     public void deleteByUserId(Integer userId) {
         roleUserRelationMapper.deleteByUserId(userId);
     }
-
 }
diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/dao/impl/SchedulerConfigDaoImpl.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/dao/impl/SchedulerConfigDaoImpl.java
index fa78042b..1d6ac1fd 100644
--- a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/dao/impl/SchedulerConfigDaoImpl.java
+++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/dao/impl/SchedulerConfigDaoImpl.java
@@ -29,8 +29,7 @@ import java.util.Objects;
 
 @Repository
 public class SchedulerConfigDaoImpl implements ISchedulerConfigDao {
-    @Resource
-    private SchedulerConfigMapper schedulerConfigMapper;
+    @Resource private SchedulerConfigMapper schedulerConfigMapper;
 
     @Override
     public boolean exists(int scriptId) {
diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/dao/impl/ScriptDaoImpl.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/dao/impl/ScriptDaoImpl.java
index 7f18048d..9e2ebdc5 100644
--- a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/dao/impl/ScriptDaoImpl.java
+++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/dao/impl/ScriptDaoImpl.java
@@ -17,9 +17,6 @@
 
 package org.apache.seatunnel.app.dal.dao.impl;
 
-import static org.apache.seatunnel.server.common.SeatunnelErrorEnum.SCRIPT_ALREADY_EXIST;
-import static com.google.common.base.Preconditions.checkState;
-
 import org.apache.seatunnel.app.common.ScriptStatusEnum;
 import org.apache.seatunnel.app.dal.dao.IScriptDao;
 import org.apache.seatunnel.app.dal.entity.Script;
@@ -37,16 +34,24 @@ import javax.annotation.Resource;
 import java.util.List;
 import java.util.Objects;
 
+import static com.google.common.base.Preconditions.checkState;
+import static org.apache.seatunnel.server.common.SeatunnelErrorEnum.SCRIPT_ALREADY_EXIST;
+
 @Repository
 public class ScriptDaoImpl implements IScriptDao {
 
-    @Resource
-    private ScriptMapper scriptMapper;
+    @Resource private ScriptMapper scriptMapper;
 
     @Override
     public void checkScriptDuplicate(CheckScriptDuplicateDto dto) {
-        final Script script = scriptMapper.selectByNameAndCreatorAndStatusNotEq(dto.getName(), dto.getCreatorId(), (byte) ScriptStatusEnum.DELETED.getCode());
-        checkState(Objects.isNull(script), String.format(SCRIPT_ALREADY_EXIST.getTemplate(), dto.getName()));
+        final Script script =
+                scriptMapper.selectByNameAndCreatorAndStatusNotEq(
+                        dto.getName(),
+                        dto.getCreatorId(),
+                        (byte) ScriptStatusEnum.DELETED.getCode());
+        checkState(
+                Objects.isNull(script),
+                String.format(SCRIPT_ALREADY_EXIST.getTemplate(), dto.getName()));
     }
 
     @Override
@@ -69,7 +74,8 @@ public class ScriptDaoImpl implements IScriptDao {
 
     @Override
     public void updateScriptContent(UpdateScriptContentDto dto) {
-        scriptMapper.updateContentByPrimaryKey(dto.getId(), dto.getContent(), dto.getContentMd5(), dto.getMenderId());
+        scriptMapper.updateContentByPrimaryKey(
+                dto.getId(), dto.getContent(), dto.getContentMd5(), dto.getMenderId());
     }
 
     @Override
@@ -82,7 +88,8 @@ public class ScriptDaoImpl implements IScriptDao {
         final Script script = new Script();
         script.setName(dto.getName());
 
-        final List<Script> scripts = scriptMapper.selectBySelectiveAndPage(script, pageNo * pageSize, pageSize);
+        final List<Script> scripts =
+                scriptMapper.selectBySelectiveAndPage(script, pageNo * pageSize, pageSize);
         int count = scriptMapper.countBySelectiveAndPage(script);
 
         return new PageData<Script>(count, scripts);
diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/dao/impl/ScriptJobApplyDaoImpl.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/dao/impl/ScriptJobApplyDaoImpl.java
index 0aab6d50..286ae7f0 100644
--- a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/dao/impl/ScriptJobApplyDaoImpl.java
+++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/dao/impl/ScriptJobApplyDaoImpl.java
@@ -33,8 +33,7 @@ import java.util.Objects;
 @Repository
 public class ScriptJobApplyDaoImpl implements IScriptJobApplyDao {
 
-    @Resource
-    private ScriptJobApplyMapper scriptJobApplyMapper;
+    @Resource private ScriptJobApplyMapper scriptJobApplyMapper;
 
     @Override
     public void insertOrUpdate(ScriptJobApplyDto dto) {
diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/dao/impl/ScriptParamDaoImpl.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/dao/impl/ScriptParamDaoImpl.java
index fad680cb..0c1f0f6f 100644
--- a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/dao/impl/ScriptParamDaoImpl.java
+++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/dao/impl/ScriptParamDaoImpl.java
@@ -23,9 +23,10 @@ import org.apache.seatunnel.app.dal.entity.ScriptParam;
 import org.apache.seatunnel.app.dal.mapper.ScriptParamMapper;
 import org.apache.seatunnel.app.domain.dto.script.UpdateScriptParamDto;
 
-import com.google.common.collect.Lists;
 import org.springframework.stereotype.Repository;
 
+import com.google.common.collect.Lists;
+
 import javax.annotation.Resource;
 
 import java.util.List;
@@ -33,8 +34,7 @@ import java.util.Map;
 
 @Repository
 public class ScriptParamDaoImpl implements IScriptParamDao {
-    @Resource
-    private ScriptParamMapper scriptParamMapper;
+    @Resource private ScriptParamMapper scriptParamMapper;
 
     @Override
     public List<ScriptParam> getParamsByScriptId(int id) {
@@ -50,14 +50,15 @@ public class ScriptParamDaoImpl implements IScriptParamDao {
     public void batchInsert(UpdateScriptParamDto dto) {
         final Map<String, String> keyAndValue = dto.getParams();
         final List<ScriptParam> scriptParams = Lists.newArrayListWithCapacity(keyAndValue.size());
-        keyAndValue.forEach((k, v) -> {
-            final ScriptParam scriptParam = new ScriptParam();
-            scriptParam.setStatus((byte) ScriptParamStatusEnum.NORMAL.getCode());
-            scriptParam.setKey(k);
-            scriptParam.setValue(v);
-            scriptParam.setScriptId(dto.getScriptId());
-            scriptParams.add(scriptParam);
-        });
+        keyAndValue.forEach(
+                (k, v) -> {
+                    final ScriptParam scriptParam = new ScriptParam();
+                    scriptParam.setStatus((byte) ScriptParamStatusEnum.NORMAL.getCode());
+                    scriptParam.setKey(k);
+                    scriptParam.setValue(v);
+                    scriptParam.setScriptId(dto.getScriptId());
+                    scriptParams.add(scriptParam);
+                });
 
         scriptParamMapper.batchInsert(scriptParams);
     }
diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/dao/impl/UserDaoImpl.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/dao/impl/UserDaoImpl.java
index a94ee5ea..c2831aa8 100644
--- a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/dao/impl/UserDaoImpl.java
+++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/dao/impl/UserDaoImpl.java
@@ -17,10 +17,6 @@
 
 package org.apache.seatunnel.app.dal.dao.impl;
 
-import static org.apache.seatunnel.server.common.SeatunnelErrorEnum.NO_SUCH_USER;
-import static org.apache.seatunnel.server.common.SeatunnelErrorEnum.USER_ALREADY_EXISTS;
-import static com.google.common.base.Preconditions.checkState;
-
 import org.apache.seatunnel.app.common.UserStatusEnum;
 import org.apache.seatunnel.app.common.UserTokenStatusEnum;
 import org.apache.seatunnel.app.dal.dao.IUserDao;
@@ -40,12 +36,14 @@ import javax.annotation.Resource;
 import java.util.List;
 import java.util.Objects;
 
+import static com.google.common.base.Preconditions.checkState;
+import static org.apache.seatunnel.server.common.SeatunnelErrorEnum.NO_SUCH_USER;
+import static org.apache.seatunnel.server.common.SeatunnelErrorEnum.USER_ALREADY_EXISTS;
+
 @Repository
 public class UserDaoImpl implements IUserDao {
-    @Resource
-    private UserMapper userMapper;
-    @Resource
-    private UserLoginLogMapper userLoginLogMapper;
+    @Resource private UserMapper userMapper;
+    @Resource private UserLoginLogMapper userLoginLogMapper;
 
     @Override
     public int add(UpdateUserDto dto) {
@@ -62,7 +60,8 @@ public class UserDaoImpl implements IUserDao {
     @Override
     public void checkUserExists(String username) {
         User user = userMapper.selectByName(username);
-        checkState(Objects.isNull(user), String.format(USER_ALREADY_EXISTS.getTemplate(), username));
+        checkState(
+                Objects.isNull(user), String.format(USER_ALREADY_EXISTS.getTemplate(), username));
     }
 
     @Override
@@ -99,7 +98,8 @@ public class UserDaoImpl implements IUserDao {
         user.setUsername(dto.getName());
 
         int count = userMapper.countBySelective(user);
-        final List<User> userList = userMapper.selectBySelectiveAndPage(user, pageNo * pageSize, pageSize);
+        final List<User> userList =
+                userMapper.selectBySelectiveAndPage(user, pageNo * pageSize, pageSize);
         return new PageData<User>(count, userList);
     }
 
diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/mapper/RoleUserRelationMapper.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/mapper/RoleUserRelationMapper.java
index 5ba02dc6..2d56aeaf 100644
--- a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/mapper/RoleUserRelationMapper.java
+++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/mapper/RoleUserRelationMapper.java
@@ -25,8 +25,8 @@ public interface RoleUserRelationMapper {
 
     int insert(RoleUserRelation record);
 
-    RoleUserRelation selectByUserIdAndRoleId(@Param("userId")Integer userId, @Param("roleId")Integer roleId);
-
-    void deleteByUserId(@Param("userId")Integer userId);
+    RoleUserRelation selectByUserIdAndRoleId(
+            @Param("userId") Integer userId, @Param("roleId") Integer roleId);
 
+    void deleteByUserId(@Param("userId") Integer userId);
 }
diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/mapper/ScriptMapper.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/mapper/ScriptMapper.java
index 0732ad0d..253a7745 100644
--- a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/mapper/ScriptMapper.java
+++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/mapper/ScriptMapper.java
@@ -28,13 +28,21 @@ public interface ScriptMapper {
 
     void insert(Script script);
 
-    void updateContentByPrimaryKey(@Param("id") int id, @Param("content") String content, @Param("contentMd5") String contentMd5, @Param("menderId") int menderId);
+    void updateContentByPrimaryKey(
+            @Param("id") int id,
+            @Param("content") String content,
+            @Param("contentMd5") String contentMd5,
+            @Param("menderId") int menderId);
 
     void updateStatus(@Param("id") int id, @Param("code") byte code);
 
-    List<Script> selectBySelectiveAndPage(@Param("script") Script script, @Param("start") int start, @Param("offset") int offset);
+    List<Script> selectBySelectiveAndPage(
+            @Param("script") Script script, @Param("start") int start, @Param("offset") int offset);
 
-    Script selectByNameAndCreatorAndStatusNotEq(@Param("name") String name, @Param("creatorId") int creatorId, @Param("status") byte status);
+    Script selectByNameAndCreatorAndStatusNotEq(
+            @Param("name") String name,
+            @Param("creatorId") int creatorId,
+            @Param("status") byte status);
 
     int countBySelectiveAndPage(@Param("script") Script script);
 }
diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/mapper/UserLoginLogMapper.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/mapper/UserLoginLogMapper.java
index 44801169..b5850ab0 100644
--- a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/mapper/UserLoginLogMapper.java
+++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/mapper/UserLoginLogMapper.java
@@ -28,7 +28,3 @@ public interface UserLoginLogMapper {
 
     UserLoginLog checkLastTokenEnable(@Param("userId") Integer userId);
 }
-
-
-
-
diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/mapper/UserMapper.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/mapper/UserMapper.java
index 3bd74f22..e270ade0 100644
--- a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/mapper/UserMapper.java
+++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/dal/mapper/UserMapper.java
@@ -32,7 +32,8 @@ public interface UserMapper {
 
     void deleteByPrimaryKey(@Param("id") int id);
 
-    List<User> selectBySelectiveAndPage(@Param("user") User user, @Param("start") int start, @Param("offset") int offset);
+    List<User> selectBySelectiveAndPage(
+            @Param("user") User user, @Param("start") int start, @Param("offset") int offset);
 
     void updateStatus(@Param("id") int id, @Param("status") byte status);
 
@@ -40,5 +41,6 @@ public interface UserMapper {
 
     int countBySelective(@Param("user") User user);
 
-    User selectByNameAndPasswd(@Param("username") String username, @Param("password") String password);
+    User selectByNameAndPasswd(
+            @Param("username") String username, @Param("password") String password);
 }
diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/domain/request/BasePageReq.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/domain/request/BasePageReq.java
index a04f8af2..bbc4c1e4 100644
--- a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/domain/request/BasePageReq.java
+++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/domain/request/BasePageReq.java
@@ -24,6 +24,7 @@ import lombok.Data;
 public class BasePageReq {
     @ApiModelProperty(value = "page number", required = true, dataType = "Integer")
     private Integer pageNo;
+
     @ApiModelProperty(value = "page size", required = true, dataType = "Integer")
     private Integer pageSize;
 
diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/domain/request/script/CreateScriptReq.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/domain/request/script/CreateScriptReq.java
index a5511321..d5e68d30 100644
--- a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/domain/request/script/CreateScriptReq.java
+++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/domain/request/script/CreateScriptReq.java
@@ -28,12 +28,17 @@ import javax.validation.constraints.NotNull;
 public class CreateScriptReq {
     @ApiModelProperty(value = "script name", required = true, dataType = "String")
     private String name;
+
     @ApiModelProperty(value = "script type", required = true, dataType = "Byte")
-    @NotNull
-    private Byte type;
-    @ApiModelProperty(value = "script creator id", required = true, dataType = "Integer", hidden = true)
-    @NotNull
-    private Integer creatorId;
+    @NotNull private Byte type;
+
+    @ApiModelProperty(
+            value = "script creator id",
+            required = true,
+            dataType = "Integer",
+            hidden = true)
+    @NotNull private Integer creatorId;
+
     @ApiModelProperty(value = "script content", required = true, dataType = "String")
     private String content;
 }
diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/domain/request/script/PublishScriptReq.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/domain/request/script/PublishScriptReq.java
index 1b1352f4..15d4e8c8 100644
--- a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/domain/request/script/PublishScriptReq.java
+++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/domain/request/script/PublishScriptReq.java
@@ -27,9 +27,8 @@ import javax.validation.constraints.NotNull;
 @ApiModel(value = "publishScriptReq", description = "publish script")
 public class PublishScriptReq {
     @ApiModelProperty(value = "script id", required = true, dataType = "Integer")
-    @NotNull
-    private int scriptId;
+    @NotNull private int scriptId;
+
     @ApiModelProperty(value = "operator id", required = true, dataType = "Integer")
-    @NotNull
-    private int operatorId;
+    @NotNull private int operatorId;
 }
diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/domain/request/script/UpdateScriptContentReq.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/domain/request/script/UpdateScriptContentReq.java
index 5a70d69f..b6291334 100644
--- a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/domain/request/script/UpdateScriptContentReq.java
+++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/domain/request/script/UpdateScriptContentReq.java
@@ -29,10 +29,15 @@ import javax.validation.constraints.NotNull;
 public class UpdateScriptContentReq {
     @ApiModelProperty(value = "script id", required = true, dataType = "Integer", hidden = true)
     private Integer scriptId;
+
     @ApiModelProperty(value = "script id", required = true, dataType = "Integer")
     @NotBlank
     private String content;
-    @ApiModelProperty(value = "script mender id", required = true, dataType = "Integer", hidden = true)
-    @NotNull
-    private Integer menderId;
+
+    @ApiModelProperty(
+            value = "script mender id",
+            required = true,
+            dataType = "Integer",
+            hidden = true)
+    @NotNull private Integer menderId;
 }
diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/domain/request/script/UpdateScriptParamReq.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/domain/request/script/UpdateScriptParamReq.java
index df105703..05abde8c 100644
--- a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/domain/request/script/UpdateScriptParamReq.java
+++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/domain/request/script/UpdateScriptParamReq.java
@@ -30,6 +30,7 @@ import java.util.Map;
 public class UpdateScriptParamReq {
     @ApiModelProperty(value = "script id", required = true, dataType = "Integer", hidden = true)
     private Integer scriptId;
+
     @ApiModelProperty(value = "script params", required = true, dataType = "Map")
     @NotEmpty
     private Map<String, String> params;
diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/domain/request/task/ExecuteReq.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/domain/request/task/ExecuteReq.java
index 4f77ca79..65bbb972 100644
--- a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/domain/request/task/ExecuteReq.java
+++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/domain/request/task/ExecuteReq.java
@@ -29,21 +29,37 @@ import java.util.Map;
 public class ExecuteReq {
     @ApiModelProperty(value = "script id", required = true, dataType = "Integer", hidden = true)
     private Long objectId;
+
     @ApiModelProperty(value = "object type", required = true, dataType = "Integer")
     private Integer objectType;
+
     @ApiModelProperty(value = "execute content", required = true, dataType = "String")
     private String content;
+
     @ApiModelProperty(value = "operator id", required = true, dataType = "Integer", hidden = true)
     private Integer operatorId;
+
     @ApiModelProperty(value = "script params", required = true, dataType = "Map")
     @NotEmpty
     private Map<String, Object> params;
-    @ApiModelProperty(value = "execute type", required = true, dataType = "Integer", allowableValues = "0, 1, 2, 3")
+
+    @ApiModelProperty(
+            value = "execute type",
+            required = true,
+            dataType = "Integer",
+            allowableValues = "0, 1, 2, 3")
     private Integer executeType;
+
     @ApiModelProperty(value = "start time", required = false, dataType = "Date", hidden = true)
     private Date startTime = new Date();
+
     @ApiModelProperty(value = "end time", required = false, dataType = "Date", hidden = true)
     private Date endTime = new Date();
-    @ApiModelProperty(value = "parallelism number", required = false, dataType = "Integer", hidden = true)
+
+    @ApiModelProperty(
+            value = "parallelism number",
+            required = false,
+            dataType = "Integer",
+            hidden = true)
     private Integer parallelismNum = 1;
 }
diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/domain/request/task/RecycleScriptReq.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/domain/request/task/RecycleScriptReq.java
index 754949da..24d8ef9d 100644
--- a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/domain/request/task/RecycleScriptReq.java
+++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/domain/request/task/RecycleScriptReq.java
@@ -27,9 +27,8 @@ import javax.validation.constraints.NotNull;
 @ApiModel(value = "recycleScriptReq", description = "recycle script")
 public class RecycleScriptReq {
     @ApiModelProperty(value = "script id", required = true, dataType = "Integer")
-    @NotNull
-    private long jobId;
+    @NotNull private long jobId;
+
     @ApiModelProperty(value = "operator id", required = true, dataType = "Integer")
-    @NotNull
-    private int operatorId;
+    @NotNull private int operatorId;
 }
diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/domain/request/user/AddUserReq.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/domain/request/user/AddUserReq.java
index fab6512d..ef1d74ef 100644
--- a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/domain/request/user/AddUserReq.java
+++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/domain/request/user/AddUserReq.java
@@ -20,5 +20,4 @@ package org.apache.seatunnel.app.domain.request.user;
 import io.swagger.annotations.ApiModel;
 
 @ApiModel(value = "addUserReq", description = "add a new user request")
-public class AddUserReq extends BaseUserReq{
-}
+public class AddUserReq extends BaseUserReq {}
diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/domain/request/user/BaseUserReq.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/domain/request/user/BaseUserReq.java
index 87e44887..2541f756 100644
--- a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/domain/request/user/BaseUserReq.java
+++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/domain/request/user/BaseUserReq.java
@@ -26,12 +26,13 @@ import javax.validation.constraints.NotNull;
 public class BaseUserReq {
     @ApiModelProperty(value = "user name", required = true, dataType = "String")
     private String username;
+
     @ApiModelProperty(value = "user password", required = true, dataType = "String")
     private String password;
+
     @ApiModelProperty(value = "user status", required = true, dataType = "Integer")
-    @NotNull
-    private Byte status;
+    @NotNull private Byte status;
+
     @ApiModelProperty(value = "user type", required = true, dataType = "Integer")
-    @NotNull
-    private Byte type;
+    @NotNull private Byte type;
 }
diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/domain/request/user/UpdateUserReq.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/domain/request/user/UpdateUserReq.java
index 29fae94c..69a0f1f1 100644
--- a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/domain/request/user/UpdateUserReq.java
+++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/domain/request/user/UpdateUserReq.java
@@ -25,8 +25,7 @@ import javax.validation.constraints.NotNull;
 
 @ApiModel(value = "updateUserReq", description = "update an exists user request")
 @Data
-public class UpdateUserReq extends BaseUserReq{
+public class UpdateUserReq extends BaseUserReq {
     @ApiModelProperty(value = "user id", required = true, dataType = "Integer", hidden = true)
-    @NotNull
-    private Integer userId;
+    @NotNull private Integer userId;
 }
diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/domain/response/PageInfo.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/domain/response/PageInfo.java
index fb757c1e..e9c70cfe 100644
--- a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/domain/response/PageInfo.java
+++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/domain/response/PageInfo.java
@@ -50,7 +50,8 @@ public class PageInfo<T> {
             pageSize = 20;
         }
         if (this.totalCount % this.pageSize == 0) {
-            this.totalPage = this.totalCount / this.pageSize == 0 ? 1 : this.totalCount / this.pageSize;
+            this.totalPage =
+                    this.totalCount / this.pageSize == 0 ? 1 : this.totalCount / this.pageSize;
             return;
         }
         this.totalPage = this.totalCount / this.pageSize + 1;
diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/domain/response/script/BaseScriptInfoRes.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/domain/response/script/BaseScriptInfoRes.java
index d8c5032a..f54208cd 100644
--- a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/domain/response/script/BaseScriptInfoRes.java
+++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/domain/response/script/BaseScriptInfoRes.java
@@ -26,18 +26,25 @@ import java.util.Date;
 public class BaseScriptInfoRes {
     @ApiModelProperty(value = "script id", dataType = "int")
     private int id;
+
     @ApiModelProperty(value = "script name", dataType = "String")
     private String name;
+
     @ApiModelProperty(value = "script status", dataType = "String")
     private String status;
+
     @ApiModelProperty(value = "script type", dataType = "type")
     private byte type;
+
     @ApiModelProperty(value = "script creator id", required = true, dataType = "Integer")
     private Integer creatorId;
+
     @ApiModelProperty(value = "script mender id", required = true, dataType = "Integer")
     private Integer menderId;
+
     @ApiModelProperty(value = "script create time", dataType = "Date")
     private Date createTime;
+
     @ApiModelProperty(value = "script update time", dataType = "Date")
     private Date updateTime;
 }
diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/domain/response/script/ScriptFullInfoRes.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/domain/response/script/ScriptFullInfoRes.java
index 5253da02..550ccac7 100644
--- a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/domain/response/script/ScriptFullInfoRes.java
+++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/domain/response/script/ScriptFullInfoRes.java
@@ -23,7 +23,7 @@ import lombok.Data;
 
 @Data
 @ApiModel(value = "scriptFullInfoRes", description = "script full info")
-public class ScriptFullInfoRes extends BaseScriptInfoRes{
+public class ScriptFullInfoRes extends BaseScriptInfoRes {
     @ApiModelProperty(value = "script content", dataType = "String")
     private String content;
 }
diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/domain/response/script/ScriptParamRes.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/domain/response/script/ScriptParamRes.java
index ae9e6161..d599d439 100644
--- a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/domain/response/script/ScriptParamRes.java
+++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/domain/response/script/ScriptParamRes.java
@@ -26,8 +26,10 @@ import lombok.Data;
 public class ScriptParamRes {
     @ApiModelProperty(value = "script param id", dataType = "String")
     private int id;
+
     @ApiModelProperty(value = "script param key", dataType = "String")
     private String key;
+
     @ApiModelProperty(value = "script param value", dataType = "String")
     private String value;
 }
diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/domain/response/script/ScriptSimpleInfoRes.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/domain/response/script/ScriptSimpleInfoRes.java
index 82de8886..47a27ca2 100644
--- a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/domain/response/script/ScriptSimpleInfoRes.java
+++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/domain/response/script/ScriptSimpleInfoRes.java
@@ -22,5 +22,4 @@ import lombok.Data;
 
 @ApiModel(value = "scriptSimpleInfoRes", description = "script simple information")
 @Data
-public class ScriptSimpleInfoRes extends BaseScriptInfoRes {
-}
+public class ScriptSimpleInfoRes extends BaseScriptInfoRes {}
diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/domain/response/task/InstanceSimpleInfoRes.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/domain/response/task/InstanceSimpleInfoRes.java
index 1bf4038a..515d8d65 100644
--- a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/domain/response/task/InstanceSimpleInfoRes.java
+++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/domain/response/task/InstanceSimpleInfoRes.java
@@ -30,24 +30,34 @@ import java.util.Date;
 public class InstanceSimpleInfoRes {
     @ApiModelProperty(value = "instance id", dataType = "Long")
     private long instanceId;
+
     @ApiModelProperty(value = "job id", dataType = "Long")
     private long jobId;
+
     @ApiModelProperty(value = "instance name", dataType = "String")
     private String instanceName;
+
     @ApiModelProperty(value = "submit time", dataType = "Date")
     private Date submitTime;
+
     @ApiModelProperty(value = "start time", dataType = "Date")
     private Date startTime;
+
     @ApiModelProperty(value = "end time", dataType = "Date")
     private Date endTime;
+
     @ApiModelProperty(value = "next execution time", dataType = "Date")
     private Date nextExecutionTime;
+
     @ApiModelProperty(value = "task status", dataType = "String")
     private String status;
+
     @ApiModelProperty(value = "execution duration", dataType = "String")
     private String executionDuration;
+
     @ApiModelProperty(value = "retry times", dataType = "Long")
     private long retryTimes;
+
     @ApiModelProperty(value = "run frequency", dataType = "String")
     private String runFrequency;
 }
diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/domain/response/task/JobSimpleInfoRes.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/domain/response/task/JobSimpleInfoRes.java
index 90e8a4f5..22fda68c 100644
--- a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/domain/response/task/JobSimpleInfoRes.java
+++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/domain/response/task/JobSimpleInfoRes.java
@@ -31,23 +31,32 @@ import java.util.Date;
 public class JobSimpleInfoRes {
     @ApiModelProperty(value = "job id", dataType = "Long")
     private Long jobId;
+
     @ApiModelProperty(value = "script id", dataType = "Long")
     private Integer scriptId;
+
     @ApiModelProperty(value = "job name", dataType = "String")
     @JsonProperty("datapipeName")
     private String jobName;
+
     @ApiModelProperty(value = "job status", dataType = "String")
     private String jobStatus;
+
     @ApiModelProperty(value = "job plan", dataType = "String")
     private String jobPlan;
+
     @ApiModelProperty(value = "job is publish", dataType = "Boolean")
     private Boolean publish;
+
     @ApiModelProperty(value = "job creator", dataType = "String")
     private String creatorName;
+
     @ApiModelProperty(value = "job mender", dataType = "String")
     private String menderName;
+
     @ApiModelProperty(value = "job create time", dataType = "String")
     private Date createTime;
+
     @ApiModelProperty(value = "job update time", dataType = "String")
     private Date updateTime;
 }
diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/domain/response/user/BaseUserInfoRes.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/domain/response/user/BaseUserInfoRes.java
index f59ddae0..792003e9 100644
--- a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/domain/response/user/BaseUserInfoRes.java
+++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/domain/response/user/BaseUserInfoRes.java
@@ -26,16 +26,22 @@ import java.util.Date;
 public class BaseUserInfoRes {
     @ApiModelProperty(value = "user id", dataType = "int")
     private int id;
+
     @ApiModelProperty(value = "user name", dataType = "String")
     private String name;
+
     @ApiModelProperty(value = "user token", dataType = "String")
     private String token;
+
     @ApiModelProperty(value = "user status", dataType = "type")
     private byte status;
+
     @ApiModelProperty(value = "user type", dataType = "type")
     private byte type;
+
     @ApiModelProperty(value = "user create time", dataType = "Date")
     private Date createTime;
+
     @ApiModelProperty(value = "user update time", dataType = "Date")
     private Date updateTime;
 }
diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/interceptor/AuthenticationInterceptor.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/interceptor/AuthenticationInterceptor.java
index 05a54fa3..b76de658 100644
--- a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/interceptor/AuthenticationInterceptor.java
+++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/interceptor/AuthenticationInterceptor.java
@@ -17,21 +17,18 @@
 
 package org.apache.seatunnel.app.interceptor;
 
-import static org.apache.seatunnel.server.common.Constants.OPTIONS;
-import static org.apache.seatunnel.server.common.Constants.TOKEN;
-import static org.apache.seatunnel.server.common.Constants.USER_ID;
-import static io.jsonwebtoken.Claims.EXPIRATION;
-
 import org.apache.seatunnel.app.dal.dao.IUserDao;
 import org.apache.seatunnel.app.dal.entity.UserLoginLog;
 import org.apache.seatunnel.app.security.JwtUtils;
 
-import lombok.extern.slf4j.Slf4j;
 import org.apache.commons.lang3.StringUtils;
+
 import org.eclipse.jetty.http.HttpStatus;
 import org.springframework.web.servlet.HandlerInterceptor;
 import org.springframework.web.servlet.ModelAndView;
 
+import lombok.extern.slf4j.Slf4j;
+
 import javax.annotation.Resource;
 import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletResponse;
@@ -39,18 +36,23 @@ import javax.servlet.http.HttpServletResponse;
 import java.util.Map;
 import java.util.Objects;
 
+import static io.jsonwebtoken.Claims.EXPIRATION;
+import static org.apache.seatunnel.server.common.Constants.OPTIONS;
+import static org.apache.seatunnel.server.common.Constants.TOKEN;
+import static org.apache.seatunnel.server.common.Constants.USER_ID;
+
 @Slf4j
 public class AuthenticationInterceptor implements HandlerInterceptor {
 
-    @Resource
-    private IUserDao userDaoImpl;
+    @Resource private IUserDao userDaoImpl;
 
-    @Resource
-    private JwtUtils jwtUtils;
+    @Resource private JwtUtils jwtUtils;
 
     @Override
     @SuppressWarnings("MagicNumber")
-    public boolean preHandle(HttpServletRequest request, HttpServletResponse response, Object handler) throws Exception {
+    public boolean preHandle(
+            HttpServletRequest request, HttpServletResponse response, Object handler)
+            throws Exception {
         if (request.getMethod().equals(OPTIONS)) {
             response.setHeader("Access-Control-Allow-Origin", "*");
             response.setHeader("Access-Control-Allow-Headers", "*");
@@ -94,12 +96,19 @@ public class AuthenticationInterceptor implements HandlerInterceptor {
     }
 
     @Override
-    public void postHandle(HttpServletRequest request, HttpServletResponse response, Object handler, ModelAndView modelAndView) throws Exception {
+    public void postHandle(
+            HttpServletRequest request,
+            HttpServletResponse response,
+            Object handler,
+            ModelAndView modelAndView)
+            throws Exception {
         // do nothing
     }
 
     @Override
-    public void afterCompletion(HttpServletRequest request, HttpServletResponse response, Object handler, Exception ex) throws Exception {
+    public void afterCompletion(
+            HttpServletRequest request, HttpServletResponse response, Object handler, Exception ex)
+            throws Exception {
         // do nothing
     }
 }
diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/resolver/UserIdMethodArgumentResolver.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/resolver/UserIdMethodArgumentResolver.java
index 3ebfca7a..37ad8b08 100644
--- a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/resolver/UserIdMethodArgumentResolver.java
+++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/resolver/UserIdMethodArgumentResolver.java
@@ -17,8 +17,6 @@
 
 package org.apache.seatunnel.app.resolver;
 
-import static org.apache.seatunnel.server.common.Constants.USER_ID;
-
 import org.apache.seatunnel.app.aspect.UserId;
 
 import org.springframework.core.MethodParameter;
@@ -29,6 +27,8 @@ import org.springframework.web.context.request.RequestAttributes;
 import org.springframework.web.method.support.HandlerMethodArgumentResolver;
 import org.springframework.web.method.support.ModelAndViewContainer;
 
+import static org.apache.seatunnel.server.common.Constants.USER_ID;
+
 @Component
 public class UserIdMethodArgumentResolver implements HandlerMethodArgumentResolver {
     @Override
@@ -38,8 +38,12 @@ public class UserIdMethodArgumentResolver implements HandlerMethodArgumentResolv
     }
 
     @Override
-    public Object resolveArgument(MethodParameter parameter, ModelAndViewContainer mavContainer, NativeWebRequest webRequest, WebDataBinderFactory binderFactory) throws Exception {
+    public Object resolveArgument(
+            MethodParameter parameter,
+            ModelAndViewContainer mavContainer,
+            NativeWebRequest webRequest,
+            WebDataBinderFactory binderFactory)
+            throws Exception {
         return (Integer) webRequest.getAttribute(USER_ID, RequestAttributes.SCOPE_REQUEST);
     }
-
 }
diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/security/JwtUtils.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/security/JwtUtils.java
index 9ddc9b40..7b866c71 100644
--- a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/security/JwtUtils.java
+++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/security/JwtUtils.java
@@ -17,15 +17,17 @@
 
 package org.apache.seatunnel.app.security;
 
-import io.jsonwebtoken.Claims;
-import io.jsonwebtoken.Jws;
-import io.jsonwebtoken.Jwts;
-import io.jsonwebtoken.SignatureAlgorithm;
 import org.apache.commons.lang3.time.DateUtils;
+
 import org.springframework.beans.factory.InitializingBean;
 import org.springframework.beans.factory.annotation.Value;
 import org.springframework.stereotype.Component;
 
+import io.jsonwebtoken.Claims;
+import io.jsonwebtoken.Jws;
+import io.jsonwebtoken.Jwts;
+import io.jsonwebtoken.SignatureAlgorithm;
+
 import java.nio.charset.StandardCharsets;
 import java.util.Date;
 import java.util.Map;
@@ -35,10 +37,13 @@ import java.util.UUID;
 public class JwtUtils implements InitializingBean {
     @Value("${jwt.expireTime}")
     private int expireTime;
+
     @Value("${jwt.secretKey}")
     private String secretKey;
+
     @Value("${jwt.algorithm}")
     private String algorithmString;
+
     private SignatureAlgorithm algorithm = null;
 
     @Override
@@ -60,7 +65,10 @@ public class JwtUtils implements InitializingBean {
     }
 
     public Map<String, Object> parseToken(String token) {
-        final Jws<Claims> claims = Jwts.parser().setSigningKey(secretKey.getBytes(StandardCharsets.UTF_8)).parseClaimsJws(token);
+        final Jws<Claims> claims =
+                Jwts.parser()
+                        .setSigningKey(secretKey.getBytes(StandardCharsets.UTF_8))
+                        .parseClaimsJws(token);
         return claims.getBody();
     }
 }
diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/service/impl/RoleServiceImpl.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/service/impl/RoleServiceImpl.java
index e0f31716..8c950f90 100644
--- a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/service/impl/RoleServiceImpl.java
+++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/service/impl/RoleServiceImpl.java
@@ -17,9 +17,6 @@
 
 package org.apache.seatunnel.app.service.impl;
 
-import static org.apache.seatunnel.server.common.SeatunnelErrorEnum.NO_SUCH_USER;
-import static com.google.common.base.Preconditions.checkState;
-
 import org.apache.seatunnel.app.common.RoleTypeEnum;
 import org.apache.seatunnel.app.dal.dao.IRoleDao;
 import org.apache.seatunnel.app.dal.dao.IRoleUserRelationDao;
@@ -29,44 +26,46 @@ import org.apache.seatunnel.app.dal.entity.RoleUserRelation;
 import org.apache.seatunnel.app.dal.entity.User;
 import org.apache.seatunnel.app.service.IRoleService;
 
-import lombok.extern.slf4j.Slf4j;
 import org.springframework.stereotype.Service;
 
+import lombok.extern.slf4j.Slf4j;
+
 import javax.annotation.Resource;
 
 import java.util.Objects;
 
+import static com.google.common.base.Preconditions.checkState;
+import static org.apache.seatunnel.server.common.SeatunnelErrorEnum.NO_SUCH_USER;
+
 @Service
 @Slf4j
 public class RoleServiceImpl implements IRoleService {
 
-    @Resource
-    private IRoleDao roleDaoImpl;
+    @Resource private IRoleDao roleDaoImpl;
 
-    @Resource
-    private IRoleUserRelationDao roleUserRelationDaoImpl;
+    @Resource private IRoleUserRelationDao roleUserRelationDaoImpl;
 
-    @Resource
-    private IUserDao userDaoImpl;
+    @Resource private IUserDao userDaoImpl;
 
     @Override
-    public boolean addUserToRole(Integer userId, Integer type){
+    public boolean addUserToRole(Integer userId, Integer type) {
 
-        String roleName = type == RoleTypeEnum.ADMIN.getCode() ? RoleTypeEnum.ADMIN.getDescription() : RoleTypeEnum.NORMAL.getDescription();
+        String roleName =
+                type == RoleTypeEnum.ADMIN.getCode()
+                        ? RoleTypeEnum.ADMIN.getDescription()
+                        : RoleTypeEnum.NORMAL.getDescription();
 
         final Role role = roleDaoImpl.getByRoleName(roleName);
 
-        final RoleUserRelation build = RoleUserRelation.builder()
-                .roleId(role.getId())
-                .userId(userId)
-                .build();
+        final RoleUserRelation build =
+                RoleUserRelation.builder().roleId(role.getId()).userId(userId).build();
 
         roleUserRelationDaoImpl.add(build);
         return true;
     }
 
     @Override
-    public boolean checkUserRole(String username, String roleName){
+    public boolean checkUserRole(String username, String roleName) {
 
         final User user = userDaoImpl.getByName(username);
 
@@ -74,15 +73,14 @@ public class RoleServiceImpl implements IRoleService {
 
         final Role role = roleDaoImpl.getByRoleName(roleName);
 
-        final RoleUserRelation byUserAndRole = roleUserRelationDaoImpl.getByUserAndRole(user.getId(), role.getId());
+        final RoleUserRelation byUserAndRole =
+                roleUserRelationDaoImpl.getByUserAndRole(user.getId(), role.getId());
 
         return !Objects.isNull(byUserAndRole);
-
     }
 
     @Override
     public void deleteByUserId(Integer userId) {
         roleUserRelationDaoImpl.deleteByUserId(userId);
     }
-
 }
diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/service/impl/ScriptServiceImpl.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/service/impl/ScriptServiceImpl.java
index e0c274ad..2ed88f2a 100644
--- a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/service/impl/ScriptServiceImpl.java
+++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/service/impl/ScriptServiceImpl.java
@@ -17,9 +17,6 @@
 
 package org.apache.seatunnel.app.service.impl;
 
-import static org.apache.seatunnel.server.common.SeatunnelErrorEnum.NO_SUCH_SCRIPT;
-import static com.google.common.base.Preconditions.checkState;
-
 import org.apache.seatunnel.app.common.ScriptParamStatusEnum;
 import org.apache.seatunnel.app.common.ScriptStatusEnum;
 import org.apache.seatunnel.app.dal.dao.IScriptDao;
@@ -49,11 +46,12 @@ import org.apache.seatunnel.app.utils.Md5Utils;
 import org.apache.seatunnel.scheduler.dolphinscheduler.impl.InstanceServiceImpl;
 import org.apache.seatunnel.server.common.PageData;
 
-import com.google.common.base.Strings;
 import org.springframework.stereotype.Component;
 import org.springframework.transaction.annotation.Transactional;
 import org.springframework.util.CollectionUtils;
 
+import com.google.common.base.Strings;
+
 import javax.annotation.Resource;
 
 import java.util.Collections;
@@ -61,53 +59,57 @@ import java.util.List;
 import java.util.Objects;
 import java.util.stream.Collectors;
 
+import static com.google.common.base.Preconditions.checkState;
+import static org.apache.seatunnel.server.common.SeatunnelErrorEnum.NO_SUCH_SCRIPT;
+
 @Component
 public class ScriptServiceImpl implements IScriptService {
 
-    @Resource
-    private IScriptDao scriptDaoImpl;
+    @Resource private IScriptDao scriptDaoImpl;
 
-    @Resource
-    private IScriptParamDao scriptParamDaoImpl;
+    @Resource private IScriptParamDao scriptParamDaoImpl;
 
-    @Resource
-    private InstanceServiceImpl instanceService;
+    @Resource private InstanceServiceImpl instanceService;
 
-    @Resource
-    private IUserDao userDaoImpl;
+    @Resource private IUserDao userDaoImpl;
 
-    @Resource
-    private ITaskService iTaskService;
+    @Resource private ITaskService iTaskService;
 
     @Override
     public CreateScriptRes createScript(CreateScriptReq createScriptReq) {
         // 1. check script name.
         checkDuplicate(createScriptReq.getName(), createScriptReq.getCreatorId());
         // 2. create  script
-        int scriptId = translate(createScriptReq.getName(), createScriptReq.getCreatorId(), createScriptReq.getCreatorId(), createScriptReq.getType(), createScriptReq.getContent());
+        int scriptId =
+                translate(
+                        createScriptReq.getName(),
+                        createScriptReq.getCreatorId(),
+                        createScriptReq.getCreatorId(),
+                        createScriptReq.getType(),
+                        createScriptReq.getContent());
 
         final CreateScriptRes res = new CreateScriptRes();
         res.setId(scriptId);
         return res;
     }
 
-    private int translate(String name, Integer creatorId, Integer menderId, Byte type, String content) {
-        final CreateScriptDto dto = CreateScriptDto.builder()
-                .name(name)
-                .menderId(creatorId)
-                .creatorId(menderId)
-                .type(type)
-                .status((byte) ScriptStatusEnum.UNPUBLISHED.getCode())
-                .content(content)
-                .build();
+    private int translate(
+            String name, Integer creatorId, Integer menderId, Byte type, String content) {
+        final CreateScriptDto dto =
+                CreateScriptDto.builder()
+                        .name(name)
+                        .menderId(creatorId)
+                        .creatorId(menderId)
+                        .type(type)
+                        .status((byte) ScriptStatusEnum.UNPUBLISHED.getCode())
+                        .content(content)
+                        .build();
         return scriptDaoImpl.createScript(dto);
     }
 
     private void checkDuplicate(String name, Integer creatorId) {
-        final CheckScriptDuplicateDto dto = CheckScriptDuplicateDto.builder()
-                .creatorId(creatorId)
-                .name(name)
-                .build();
+        final CheckScriptDuplicateDto dto =
+                CheckScriptDuplicateDto.builder().creatorId(creatorId).name(name).build();
         scriptDaoImpl.checkScriptDuplicate(dto);
     }
 
@@ -121,21 +123,26 @@ public class ScriptServiceImpl implements IScriptService {
         final boolean needSave = checkIfNeedSave(updateScriptContentReq.getScriptId(), contentMd5);
 
         if (needSave) {
-            final UpdateScriptContentDto dto = UpdateScriptContentDto.builder()
-                    .id(updateScriptContentReq.getScriptId())
-                    .content(content)
-                    .contentMd5(contentMd5)
-                    .menderId(updateScriptContentReq.getMenderId())
-                    .build();
+            final UpdateScriptContentDto dto =
+                    UpdateScriptContentDto.builder()
+                            .id(updateScriptContentReq.getScriptId())
+                            .content(content)
+                            .contentMd5(contentMd5)
+                            .menderId(updateScriptContentReq.getMenderId())
+                            .build();
             scriptDaoImpl.updateScriptContent(dto);
         }
     }
 
     private boolean checkIfNeedSave(int id, String newContentMd5) {
         Script script = scriptDaoImpl.getScript(id);
-        checkState(Objects.nonNull(script) && (int) script.getStatus() != ScriptStatusEnum.DELETED.getCode(), NO_SUCH_SCRIPT.getTemplate());
+        checkState(
+                Objects.nonNull(script)
+                        && (int) script.getStatus() != ScriptStatusEnum.DELETED.getCode(),
+                NO_SUCH_SCRIPT.getTemplate());
 
-        final String oldContentMd5 = Strings.isNullOrEmpty(script.getContentMd5()) ? "" : script.getContentMd5();
+        final String oldContentMd5 =
+                Strings.isNullOrEmpty(script.getContentMd5()) ? "" : script.getContentMd5();
         return !newContentMd5.equals(oldContentMd5);
     }
 
@@ -149,12 +156,12 @@ public class ScriptServiceImpl implements IScriptService {
     @Override
     public PageInfo<ScriptSimpleInfoRes> list(ScriptListReq scriptListReq) {
 
-        final ListScriptsDto dto = ListScriptsDto.builder()
-                .name(scriptListReq.getName())
-                .build();
+        final ListScriptsDto dto = ListScriptsDto.builder().name(scriptListReq.getName()).build();
 
-        PageData<Script> scriptPageData = scriptDaoImpl.list(dto, scriptListReq.getRealPageNo(), scriptListReq.getPageSize());
-        final List<ScriptSimpleInfoRes> data = scriptPageData.getData().stream().map(this::translate).collect(Collectors.toList());
+        PageData<Script> scriptPageData =
+                scriptDaoImpl.list(dto, scriptListReq.getRealPageNo(), scriptListReq.getPageSize());
+        final List<ScriptSimpleInfoRes> data =
+                scriptPageData.getData().stream().map(this::translate).collect(Collectors.toList());
 
         final PageInfo<ScriptSimpleInfoRes> pageInfo = new PageInfo<>();
         pageInfo.setPageNo(scriptListReq.getPageNo());
@@ -185,22 +192,25 @@ public class ScriptServiceImpl implements IScriptService {
     public void updateScriptParam(UpdateScriptParamReq updateScriptParamReq) {
         // 1. delete all old params first.
         // 2. save new params. (check params correctness)
-        scriptParamDaoImpl.updateStatusByScriptId(updateScriptParamReq.getScriptId(), ScriptParamStatusEnum.DELETED.getCode());
+        scriptParamDaoImpl.updateStatusByScriptId(
+                updateScriptParamReq.getScriptId(), ScriptParamStatusEnum.DELETED.getCode());
 
-        UpdateScriptParamDto dto = UpdateScriptParamDto.builder()
-                .scriptId(updateScriptParamReq.getScriptId())
-                .params(updateScriptParamReq.getParams())
-                .build();
+        UpdateScriptParamDto dto =
+                UpdateScriptParamDto.builder()
+                        .scriptId(updateScriptParamReq.getScriptId())
+                        .params(updateScriptParamReq.getParams())
+                        .build();
 
         scriptParamDaoImpl.batchInsert(dto);
     }
 
     @Override
     public void publishScript(PublishScriptReq req) {
-        final PushScriptDto dto = PushScriptDto.builder()
-                .scriptId(req.getScriptId())
-                .userId(req.getOperatorId())
-                .build();
+        final PushScriptDto dto =
+                PushScriptDto.builder()
+                        .scriptId(req.getScriptId())
+                        .userId(req.getOperatorId())
+                        .build();
         iTaskService.pushScriptToScheduler(dto);
     }
 
diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/service/impl/TaskServiceImpl.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/service/impl/TaskServiceImpl.java
index 04c88f41..6d005982 100644
--- a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/service/impl/TaskServiceImpl.java
+++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/service/impl/TaskServiceImpl.java
@@ -17,20 +17,6 @@
 
 package org.apache.seatunnel.app.service.impl;
 
-import static org.apache.seatunnel.app.common.ObjectTypeEnum.INSTANCE;
-import static org.apache.seatunnel.app.common.ObjectTypeEnum.JOB;
-import static org.apache.seatunnel.app.common.ObjectTypeEnum.SCRIPT;
-import static org.apache.seatunnel.server.common.Constants.UNDERLINE;
-import static org.apache.seatunnel.server.common.SeatunnelErrorEnum.NO_SUCH_ELEMENT;
-import static org.apache.seatunnel.server.common.SeatunnelErrorEnum.NO_SUCH_JOB;
-import static org.apache.seatunnel.server.common.SeatunnelErrorEnum.NO_SUCH_SCRIPT;
-import static org.apache.seatunnel.server.common.SeatunnelErrorEnum.SCHEDULER_CONFIG_NOT_EXIST;
-import static org.apache.seatunnel.spi.scheduler.constants.SchedulerConstant.NEVER_TRIGGER_EXPRESSION;
-import static org.apache.seatunnel.spi.scheduler.constants.SchedulerConstant.RETRY_INTERVAL_DEFAULT;
-import static org.apache.seatunnel.spi.scheduler.constants.SchedulerConstant.RETRY_TIMES_DEFAULT;
-import static com.cronutils.model.CronType.QUARTZ;
-import static java.util.Objects.requireNonNull;
-
 import org.apache.seatunnel.app.common.ObjectTypeEnum;
 import org.apache.seatunnel.app.common.ScriptStatusEnum;
 import org.apache.seatunnel.app.dal.dao.ISchedulerConfigDao;
@@ -67,14 +53,15 @@ import org.apache.seatunnel.spi.scheduler.dto.JobSimpleInfoDto;
 import org.apache.seatunnel.spi.scheduler.dto.SchedulerConfigDto;
 import org.apache.seatunnel.spi.scheduler.enums.ExecuteTypeEnum;
 
+import org.springframework.stereotype.Component;
+import org.springframework.util.CollectionUtils;
+
 import com.cronutils.model.definition.CronDefinition;
 import com.cronutils.model.definition.CronDefinitionBuilder;
 import com.cronutils.model.time.ExecutionTime;
 import com.cronutils.parser.CronParser;
 import com.google.common.collect.Maps;
 import lombok.extern.slf4j.Slf4j;
-import org.springframework.stereotype.Component;
-import org.springframework.util.CollectionUtils;
 
 import javax.annotation.PostConstruct;
 import javax.annotation.Resource;
@@ -89,34 +76,44 @@ import java.util.concurrent.CompletableFuture;
 import java.util.function.Function;
 import java.util.stream.Collectors;
 
+import static com.cronutils.model.CronType.QUARTZ;
+import static java.util.Objects.requireNonNull;
+import static org.apache.seatunnel.app.common.ObjectTypeEnum.INSTANCE;
+import static org.apache.seatunnel.app.common.ObjectTypeEnum.JOB;
+import static org.apache.seatunnel.app.common.ObjectTypeEnum.SCRIPT;
+import static org.apache.seatunnel.server.common.Constants.UNDERLINE;
+import static org.apache.seatunnel.server.common.SeatunnelErrorEnum.NO_SUCH_ELEMENT;
+import static org.apache.seatunnel.server.common.SeatunnelErrorEnum.NO_SUCH_JOB;
+import static org.apache.seatunnel.server.common.SeatunnelErrorEnum.NO_SUCH_SCRIPT;
+import static org.apache.seatunnel.server.common.SeatunnelErrorEnum.SCHEDULER_CONFIG_NOT_EXIST;
+import static org.apache.seatunnel.spi.scheduler.constants.SchedulerConstant.NEVER_TRIGGER_EXPRESSION;
+import static org.apache.seatunnel.spi.scheduler.constants.SchedulerConstant.RETRY_INTERVAL_DEFAULT;
+import static org.apache.seatunnel.spi.scheduler.constants.SchedulerConstant.RETRY_TIMES_DEFAULT;
+
 @Component
 @Slf4j
 public class TaskServiceImpl implements ITaskService {
 
-    @Resource
-    private IJobService iJobService;
+    @Resource private IJobService iJobService;
 
-    @Resource
-    private IInstanceService iInstanceService;
+    @Resource private IInstanceService iInstanceService;
 
-    @Resource
-    private IScriptDao scriptDaoImpl;
+    @Resource private IScriptDao scriptDaoImpl;
 
-    @Resource
-    private IScriptParamDao scriptParamDaoImpl;
+    @Resource private IScriptParamDao scriptParamDaoImpl;
 
-    @Resource
-    private ISchedulerConfigDao schedulerConfigDaoImpl;
+    @Resource private ISchedulerConfigDao schedulerConfigDaoImpl;
 
-    @Resource
-    private IScriptJobApplyDao scriptJobApplyDaoImpl;
+    @Resource private IScriptJobApplyDao scriptJobApplyDaoImpl;
 
-    private Map<ObjectTypeEnum, Function<ExecuteReq, ExecuteDto>> executeFuncMap = Maps.newHashMapWithExpectedSize(ObjectTypeEnum.values().length);
+    private Map<ObjectTypeEnum, Function<ExecuteReq, ExecuteDto>> executeFuncMap =
+            Maps.newHashMapWithExpectedSize(ObjectTypeEnum.values().length);
 
-    private static final CronDefinition CRON_DEFINITION = CronDefinitionBuilder.instanceDefinitionFor(QUARTZ);
+    private static final CronDefinition CRON_DEFINITION =
+            CronDefinitionBuilder.instanceDefinitionFor(QUARTZ);
 
     @PostConstruct
-    public void initFuncMap(){
+    public void initFuncMap() {
         executeFuncMap.put(SCRIPT, this::getExecuteDtoByScriptId);
         executeFuncMap.put(JOB, this::getExecuteDtoByJobId);
         executeFuncMap.put(INSTANCE, this::getExecuteDtoByInstanceId);
@@ -137,24 +134,26 @@ public class TaskServiceImpl implements ITaskService {
         final List<ScriptParam> scriptParams = scriptParamDaoImpl.getParamsByScriptId(scriptId);
         Map<String, Object> params = getScriptParamMap(scriptParams);
 
-        final SchedulerConfigDto schedulerConfigDto = SchedulerConfigDto.builder()
-                .retryInterval(config.getRetryInterval())
-                .retryTimes(config.getRetryTimes())
-                .startTime(config.getActiveStartTime())
-                .endTime(config.getActiveEndTime())
-                .triggerExpression(config.getTriggerExpression())
-                .build();
-
-        final JobDto jobDto = JobDto.builder()
-                .jobName(script.getName())
-                .jobContent(script.getContent())
-                .params(params)
-                .operatorId(userId)
-                .schedulerConfigDto(schedulerConfigDto)
-                //todo fix to real execute script
-                .executorScript(script.getContent())
-                .jobId(null)
-                .build();
+        final SchedulerConfigDto schedulerConfigDto =
+                SchedulerConfigDto.builder()
+                        .retryInterval(config.getRetryInterval())
+                        .retryTimes(config.getRetryTimes())
+                        .startTime(config.getActiveStartTime())
+                        .endTime(config.getActiveEndTime())
+                        .triggerExpression(config.getTriggerExpression())
+                        .build();
+
+        final JobDto jobDto =
+                JobDto.builder()
+                        .jobName(script.getName())
+                        .jobContent(script.getContent())
+                        .params(params)
+                        .operatorId(userId)
+                        .schedulerConfigDto(schedulerConfigDto)
+                        // todo fix to real execute script
+                        .executorScript(script.getContent())
+                        .jobId(null)
+                        .build();
 
         ScriptJobApply apply = scriptJobApplyDaoImpl.getByScriptId(script.getId());
         if (Objects.nonNull(apply)) {
@@ -175,43 +174,61 @@ public class TaskServiceImpl implements ITaskService {
 
     @Override
     public void recycleScriptFromScheduler(RecycleScriptReq req) {
-        final ScriptJobApply apply = requireNonNull(scriptJobApplyDaoImpl.getByJobId(req.getJobId()), NO_SUCH_JOB::getTemplate);
+        final ScriptJobApply apply =
+                requireNonNull(
+                        scriptJobApplyDaoImpl.getByJobId(req.getJobId()), NO_SUCH_JOB::getTemplate);
 
-        final Script script = requireNonNull(scriptDaoImpl.getScript(apply.getScriptId()), NO_SUCH_JOB::getTemplate);
+        final Script script =
+                requireNonNull(
+                        scriptDaoImpl.getScript(apply.getScriptId()), NO_SUCH_JOB::getTemplate);
 
-        final JobDto jobDto = JobDto.builder()
-                .jobId(apply.getJobId())
-                .jobName(script.getName())
-                .operatorId(req.getOperatorId())
-                .build();
+        final JobDto jobDto =
+                JobDto.builder()
+                        .jobId(apply.getJobId())
+                        .jobName(script.getName())
+                        .operatorId(req.getOperatorId())
+                        .build();
 
         iJobService.offlineJob(jobDto);
 
-        syncScriptJobMapping(script.getId(), req.getOperatorId(), apply.getSchedulerConfigId(), apply.getJobId());
+        syncScriptJobMapping(
+                script.getId(),
+                req.getOperatorId(),
+                apply.getSchedulerConfigId(),
+                apply.getJobId());
     }
 
     @Override
     public PageInfo<JobSimpleInfoRes> listJob(JobListReq req) {
         // Search from scheduler.
-        final JobListDto dto = JobListDto.builder()
-                .name(req.getName())
-                .pageNo(req.getPageNo())
-                .pageSize(req.getPageSize())
-                .build();
+        final JobListDto dto =
+                JobListDto.builder()
+                        .name(req.getName())
+                        .pageNo(req.getPageNo())
+                        .pageSize(req.getPageSize())
+                        .build();
         final PageData<JobSimpleInfoDto> jobPageData = iJobService.list(dto);
-        final List<JobSimpleInfoRes> data = jobPageData.getData().stream().map(this::translate).collect(Collectors.toList());
+        final List<JobSimpleInfoRes> data =
+                jobPageData.getData().stream().map(this::translate).collect(Collectors.toList());
 
         final PageInfo<JobSimpleInfoRes> pageInfo = new PageInfo<>();
 
         if (!CollectionUtils.isEmpty(data)) {
-            final List<JobDefine> jobDefines = scriptJobApplyDaoImpl.selectJobDefineByJobIds(data.stream().map(JobSimpleInfoRes::getJobId).collect(Collectors.toList()));
-            final Map<Long, JobDefine> mapping = jobDefines.stream().collect(Collectors.toMap(JobDefine::getJobId, Function.identity()));
-
-            data.forEach(d -> {
-                final JobDefine jf = mapping.getOrDefault(d.getJobId(), new JobDefine());
-                d.setJobPlan(jf.getTriggerExpression());
-                d.setScriptId(jf.getScriptId());
-            });
+            final List<JobDefine> jobDefines =
+                    scriptJobApplyDaoImpl.selectJobDefineByJobIds(
+                            data.stream()
+                                    .map(JobSimpleInfoRes::getJobId)
+                                    .collect(Collectors.toList()));
+            final Map<Long, JobDefine> mapping =
+                    jobDefines.stream()
+                            .collect(Collectors.toMap(JobDefine::getJobId, Function.identity()));
+
+            data.forEach(
+                    d -> {
+                        final JobDefine jf = mapping.getOrDefault(d.getJobId(), new JobDefine());
+                        d.setJobPlan(jf.getTriggerExpression());
+                        d.setScriptId(jf.getScriptId());
+                    });
 
             pageInfo.setData(data);
             pageInfo.setPageNo(req.getPageNo());
@@ -225,32 +242,46 @@ public class TaskServiceImpl implements ITaskService {
     @Override
     public PageInfo<InstanceSimpleInfoRes> listInstance(InstanceListReq req) {
         // Search from scheduler.
-        final InstanceListDto dto = InstanceListDto.builder()
-                .name(req.getName())
-                .pageNo(req.getPageNo())
-                .pageSize(req.getPageSize())
-                .build();
+        final InstanceListDto dto =
+                InstanceListDto.builder()
+                        .name(req.getName())
+                        .pageNo(req.getPageNo())
+                        .pageSize(req.getPageSize())
+                        .build();
         final PageData<InstanceDto> instancePageData = iInstanceService.list(dto);
-        final List<InstanceSimpleInfoRes> data = instancePageData.getData().stream().map(this::translate).collect(Collectors.toList());
+        final List<InstanceSimpleInfoRes> data =
+                instancePageData.getData().stream()
+                        .map(this::translate)
+                        .collect(Collectors.toList());
 
         if (!CollectionUtils.isEmpty(data)) {
-            final List<JobDefine> jobDefines = scriptJobApplyDaoImpl.selectJobDefineByJobIds(data.stream().map(InstanceSimpleInfoRes::getJobId).collect(Collectors.toList()));
-            final Map<Long, JobDefine> mapping = jobDefines.stream().collect(Collectors.toMap(JobDefine::getJobId, Function.identity()));
-
-            data.forEach(d -> {
-                final JobDefine jobDefine = mapping.get(d.getJobId());
-                CronParser parser = new CronParser(CRON_DEFINITION);
-
-                if (Objects.nonNull(jobDefine)) {
-                    ExecutionTime executionTime = ExecutionTime.forCron(parser.parse(jobDefine.getTriggerExpression()));
-                    Optional<ZonedDateTime> nextExecution = executionTime.nextExecution(ZonedDateTime.now());
-
-                    if (nextExecution.isPresent()) {
-                        final ZonedDateTime next = nextExecution.get();
-                        d.setNextExecutionTime(Date.from(next.toInstant()));
-                    }
-                }
-            });
+            final List<JobDefine> jobDefines =
+                    scriptJobApplyDaoImpl.selectJobDefineByJobIds(
+                            data.stream()
+                                    .map(InstanceSimpleInfoRes::getJobId)
+                                    .collect(Collectors.toList()));
+            final Map<Long, JobDefine> mapping =
+                    jobDefines.stream()
+                            .collect(Collectors.toMap(JobDefine::getJobId, Function.identity()));
+
+            data.forEach(
+                    d -> {
+                        final JobDefine jobDefine = mapping.get(d.getJobId());
+                        CronParser parser = new CronParser(CRON_DEFINITION);
+
+                        if (Objects.nonNull(jobDefine)) {
+                            ExecutionTime executionTime =
+                                    ExecutionTime.forCron(
+                                            parser.parse(jobDefine.getTriggerExpression()));
+                            Optional<ZonedDateTime> nextExecution =
+                                    executionTime.nextExecution(ZonedDateTime.now());
+
+                            if (nextExecution.isPresent()) {
+                                final ZonedDateTime next = nextExecution.get();
+                                d.setNextExecutionTime(Date.from(next.toInstant()));
+                            }
+                        }
+                    });
         }
 
         final PageInfo<InstanceSimpleInfoRes> pageInfo = new PageInfo<>();
@@ -270,7 +301,8 @@ public class TaskServiceImpl implements ITaskService {
         final ObjectTypeEnum parse = ObjectTypeEnum.parse(objectType);
 
         final Function<ExecuteReq, ExecuteDto> executeFunc =
-                Optional.ofNullable(executeFuncMap.get(parse)).orElseThrow(() -> new SeatunnelException(NO_SUCH_ELEMENT));
+                Optional.ofNullable(executeFuncMap.get(parse))
+                        .orElseThrow(() -> new SeatunnelException(NO_SUCH_ELEMENT));
 
         final ExecuteDto dto = executeFunc.apply(req);
 
@@ -280,18 +312,14 @@ public class TaskServiceImpl implements ITaskService {
     private ExecuteDto getExecuteDtoByInstanceId(ExecuteReq req) {
         // objectId of instance is jobId
         return ExecuteDto.builder()
-                .jobDto(JobDto.builder()
-                        .jobId(req.getObjectId())
-                        .build())
+                .jobDto(JobDto.builder().jobId(req.getObjectId()).build())
                 .executeTypeEnum(ExecuteTypeEnum.RERUN)
                 .build();
     }
 
     private ExecuteDto getExecuteDtoByJobId(ExecuteReq req) {
         return ExecuteDto.builder()
-                .jobDto(JobDto.builder()
-                        .jobId(req.getObjectId())
-                        .build())
+                .jobDto(JobDto.builder().jobId(req.getObjectId()).build())
                 .executeTypeEnum(ExecuteTypeEnum.parse(req.getExecuteType()))
                 .build();
     }
@@ -299,30 +327,36 @@ public class TaskServiceImpl implements ITaskService {
     private ExecuteDto getExecuteDtoByScriptId(ExecuteReq req) {
         final Script script = checkAndGetScript(Math.toIntExact(req.getObjectId()));
 
-        final SchedulerConfigDto schedulerConfigDto = SchedulerConfigDto.builder()
-                .retryInterval(RETRY_INTERVAL_DEFAULT)
-                .retryTimes(RETRY_TIMES_DEFAULT)
-                .startTime(new Date())
-                .endTime(new Date())
-                .triggerExpression(NEVER_TRIGGER_EXPRESSION)
-                .build();
-
-        final JobDto jobDto = JobDto.builder()
-                .jobName(script.getName().concat(UNDERLINE).concat(String.valueOf(System.currentTimeMillis())))
-                .jobContent(req.getContent())
-                .params(req.getParams())
-                .operatorId(req.getOperatorId())
-                .schedulerConfigDto(schedulerConfigDto)
-                //todo fix to real execute script
-                .executorScript(script.getContent())
-                .jobId(null)
-                .build();
-
-        final ExecuteDto dto = ExecuteDto.builder()
-                .jobDto(jobDto)
-                .executeTypeEnum(ExecuteTypeEnum.parse(req.getExecuteType()))
-                .complementDataDto(null)
-                .build();
+        final SchedulerConfigDto schedulerConfigDto =
+                SchedulerConfigDto.builder()
+                        .retryInterval(RETRY_INTERVAL_DEFAULT)
+                        .retryTimes(RETRY_TIMES_DEFAULT)
+                        .startTime(new Date())
+                        .endTime(new Date())
+                        .triggerExpression(NEVER_TRIGGER_EXPRESSION)
+                        .build();
+
+        final JobDto jobDto =
+                JobDto.builder()
+                        .jobName(
+                                script.getName()
+                                        .concat(UNDERLINE)
+                                        .concat(String.valueOf(System.currentTimeMillis())))
+                        .jobContent(req.getContent())
+                        .params(req.getParams())
+                        .operatorId(req.getOperatorId())
+                        .schedulerConfigDto(schedulerConfigDto)
+                        // todo fix to real execute script
+                        .executorScript(script.getContent())
+                        .jobId(null)
+                        .build();
+
+        final ExecuteDto dto =
+                ExecuteDto.builder()
+                        .jobDto(jobDto)
+                        .executeTypeEnum(ExecuteTypeEnum.parse(req.getExecuteType()))
+                        .complementDataDto(null)
+                        .build();
         return dto;
     }
 
@@ -331,9 +365,9 @@ public class TaskServiceImpl implements ITaskService {
         final InstanceLogDto dto = iInstanceService.queryInstanceLog(instanceId);
 
         return InstanceLogRes.builder()
-            .instanceId(instanceId)
-            .logContent(dto.getLogContent())
-            .build();
+                .instanceId(instanceId)
+                .logContent(dto.getLogContent())
+                .build();
     }
 
     @Override
@@ -381,28 +415,39 @@ public class TaskServiceImpl implements ITaskService {
     }
 
     private void syncScriptJobMapping(int scriptId, int userId, int schedulerConfigId, long jobId) {
-        CompletableFuture.runAsync(() -> {
-            // store script and job mapping
-            final ScriptJobApplyDto dto = ScriptJobApplyDto.builder()
-                    .scriptId(scriptId)
-                    .schedulerConfigId(schedulerConfigId)
-                    .jobId(jobId)
-                    .userId(userId)
-                    .build();
-            scriptJobApplyDaoImpl.insertOrUpdate(dto);
-        }).whenComplete((_return, e) -> {
-            if (Objects.nonNull(e)) {
-                log.error("Store script and job mapping failed, please maintain this mapping manually. \n" +
-                        "scriptId [{}], schedulerConfigId [{}], jobId [{}], userId [{}]", scriptId, schedulerConfigId, jobId, userId, e);
-            }
-        });
+        CompletableFuture.runAsync(
+                        () -> {
+                            // store script and job mapping
+                            final ScriptJobApplyDto dto =
+                                    ScriptJobApplyDto.builder()
+                                            .scriptId(scriptId)
+                                            .schedulerConfigId(schedulerConfigId)
+                                            .jobId(jobId)
+                                            .userId(userId)
+                                            .build();
+                            scriptJobApplyDaoImpl.insertOrUpdate(dto);
+                        })
+                .whenComplete(
+                        (_return, e) -> {
+                            if (Objects.nonNull(e)) {
+                                log.error(
+                                        "Store script and job mapping failed, please maintain this mapping manually. \n"
+                                                + "scriptId [{}], schedulerConfigId [{}], jobId [{}], userId [{}]",
+                                        scriptId,
+                                        schedulerConfigId,
+                                        jobId,
+                                        userId,
+                                        e);
+                            }
+                        });
     }
 
     private Map<String, Object> getScriptParamMap(List<ScriptParam> scriptParams) {
         Map<String, Object> params = Maps.newHashMap();
 
         if (!CollectionUtils.isEmpty(params)) {
-            scriptParams.forEach(scriptParam -> params.put(scriptParam.getKey(), scriptParam.getValue()));
+            scriptParams.forEach(
+                    scriptParam -> params.put(scriptParam.getKey(), scriptParam.getValue()));
         }
         return params;
     }
diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/service/impl/UserServiceImpl.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/service/impl/UserServiceImpl.java
index 636eb0fc..95f2321f 100644
--- a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/service/impl/UserServiceImpl.java
+++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/service/impl/UserServiceImpl.java
@@ -17,8 +17,6 @@
 
 package org.apache.seatunnel.app.service.impl;
 
-import static org.apache.seatunnel.server.common.SeatunnelErrorEnum.USERNAME_PASSWORD_NO_MATCHED;
-
 import org.apache.seatunnel.app.common.UserTokenStatusEnum;
 import org.apache.seatunnel.app.dal.dao.IUserDao;
 import org.apache.seatunnel.app.dal.entity.User;
@@ -49,16 +47,15 @@ import java.util.List;
 import java.util.Objects;
 import java.util.stream.Collectors;
 
+import static org.apache.seatunnel.server.common.SeatunnelErrorEnum.USERNAME_PASSWORD_NO_MATCHED;
+
 @Component
 public class UserServiceImpl implements IUserService {
-    @Resource
-    private IUserDao userDaoImpl;
+    @Resource private IUserDao userDaoImpl;
 
-    @Resource
-    private IRoleService roleServiceImpl;
+    @Resource private IRoleService roleServiceImpl;
 
-    @Resource
-    private JwtUtils jwtUtils;
+    @Resource private JwtUtils jwtUtils;
 
     @Value("${user.default.passwordSalt:seatunnel}")
     private String defaultSalt;
@@ -70,14 +67,15 @@ public class UserServiceImpl implements IUserService {
         userDaoImpl.checkUserExists(addReq.getUsername());
 
         // 2. add a new user.
-        final UpdateUserDto dto = UpdateUserDto.builder()
-                .id(null)
-                .username(addReq.getUsername())
-                // encryption user's password
-                .password(PasswordUtils.encryptWithSalt(defaultSalt, addReq.getPassword()))
-                .status(addReq.getStatus())
-                .type(addReq.getType())
-                .build();
+        final UpdateUserDto dto =
+                UpdateUserDto.builder()
+                        .id(null)
+                        .username(addReq.getUsername())
+                        // encryption user's password
+                        .password(PasswordUtils.encryptWithSalt(defaultSalt, addReq.getPassword()))
+                        .status(addReq.getStatus())
+                        .type(addReq.getType())
+                        .build();
 
         final int userId = userDaoImpl.add(dto);
         final AddUserRes res = new AddUserRes();
@@ -90,14 +88,16 @@ public class UserServiceImpl implements IUserService {
 
     @Override
     public void update(UpdateUserReq updateReq) {
-        final UpdateUserDto dto = UpdateUserDto.builder()
-                .id(updateReq.getUserId())
-                .username(updateReq.getUsername())
-                // encryption user's password
-                .password(PasswordUtils.encryptWithSalt(defaultSalt, updateReq.getPassword()))
-                .status(updateReq.getStatus())
-                .type(updateReq.getType())
-                .build();
+        final UpdateUserDto dto =
+                UpdateUserDto.builder()
+                        .id(updateReq.getUserId())
+                        .username(updateReq.getUsername())
+                        // encryption user's password
+                        .password(
+                                PasswordUtils.encryptWithSalt(defaultSalt, updateReq.getPassword()))
+                        .status(updateReq.getStatus())
+                        .type(updateReq.getType())
+                        .build();
 
         userDaoImpl.update(dto);
     }
@@ -112,13 +112,13 @@ public class UserServiceImpl implements IUserService {
     @Override
     public PageInfo<UserSimpleInfoRes> list(UserListReq userListReq) {
 
-        final ListUserDto dto = ListUserDto.builder()
-                .name(userListReq.getName())
-                .build();
+        final ListUserDto dto = ListUserDto.builder().name(userListReq.getName()).build();
 
-        final PageData<User> userPageData = userDaoImpl.list(dto, userListReq.getRealPageNo(), userListReq.getPageSize());
+        final PageData<User> userPageData =
+                userDaoImpl.list(dto, userListReq.getRealPageNo(), userListReq.getPageSize());
 
-        final List<UserSimpleInfoRes> data = userPageData.getData().stream().map(this::translate).collect(Collectors.toList());
+        final List<UserSimpleInfoRes> data =
+                userPageData.getData().stream().map(this::translate).collect(Collectors.toList());
         final PageInfo<UserSimpleInfoRes> pageInfo = new PageInfo<>();
         pageInfo.setPageNo(userListReq.getPageNo());
         pageInfo.setPageSize(userListReq.getPageSize());
@@ -153,11 +153,12 @@ public class UserServiceImpl implements IUserService {
         final String token = jwtUtils.genToken(translate.toMap());
         translate.setToken(token);
 
-        final UserLoginLogDto logDto = UserLoginLogDto.builder()
-                .token(token)
-                .tokenStatus(UserTokenStatusEnum.ENABLE.enable())
-                .userId(user.getId())
-                .build();
+        final UserLoginLogDto logDto =
+                UserLoginLogDto.builder()
+                        .token(token)
+                        .tokenStatus(UserTokenStatusEnum.ENABLE.enable())
+                        .userId(user.getId())
+                        .build();
         userDaoImpl.insertLoginLog(logDto);
 
         return translate;
diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/thridparty/datasource/AbstractDataSourceConfigSwitcher.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/thridparty/datasource/AbstractDataSourceConfigSwitcher.java
index 576b55a8..5e0e9e85 100644
--- a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/thridparty/datasource/AbstractDataSourceConfigSwitcher.java
+++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/thridparty/datasource/AbstractDataSourceConfigSwitcher.java
@@ -17,6 +17,9 @@
 
 package org.apache.seatunnel.app.thridparty.datasource;
 
+import org.apache.seatunnel.shade.com.typesafe.config.Config;
+import org.apache.seatunnel.shade.com.typesafe.config.ConfigValue;
+
 import org.apache.seatunnel.api.configuration.Option;
 import org.apache.seatunnel.api.configuration.util.OptionRule;
 import org.apache.seatunnel.api.configuration.util.RequiredOption;
@@ -29,9 +32,6 @@ import org.apache.seatunnel.app.thridparty.exceptions.UnSupportWrapperException;
 import org.apache.seatunnel.app.thridparty.framework.SeaTunnelOptionRuleWrapper;
 import org.apache.seatunnel.common.constants.PluginType;
 
-import org.apache.seatunnel.shade.com.typesafe.config.Config;
-import org.apache.seatunnel.shade.com.typesafe.config.ConfigValue;
-
 import java.util.List;
 import java.util.Map;
 import java.util.Objects;
@@ -41,116 +41,124 @@ import java.util.stream.Stream;
 public abstract class AbstractDataSourceConfigSwitcher implements DataSourceConfigSwitcher {
     @Override
     public FormStructure filterOptionRule(
-        String connectorName,
-        OptionRule dataSourceOptionRule,
-        OptionRule virtualTableOptionRule,
-        BusinessMode businessMode,
-        PluginType pluginType,
-        OptionRule connectorOptionRule,
-        List<String> excludedKeys) {
+            String connectorName,
+            OptionRule dataSourceOptionRule,
+            OptionRule virtualTableOptionRule,
+            BusinessMode businessMode,
+            PluginType pluginType,
+            OptionRule connectorOptionRule,
+            List<String> excludedKeys) {
 
         List<String> dataSourceRequiredAllKey =
-            Stream.concat(
-                    dataSourceOptionRule.getRequiredOptions().stream(),
-                    virtualTableOptionRule.getRequiredOptions().stream())
-                .flatMap(ro -> ro.getOptions().stream().map(Option::key))
-                .collect(Collectors.toList());
+                Stream.concat(
+                                dataSourceOptionRule.getRequiredOptions().stream(),
+                                virtualTableOptionRule.getRequiredOptions().stream())
+                        .flatMap(ro -> ro.getOptions().stream().map(Option::key))
+                        .collect(Collectors.toList());
 
         dataSourceRequiredAllKey.addAll(excludedKeys);
 
         List<RequiredOption> requiredOptions =
-            connectorOptionRule.getRequiredOptions().stream()
-                .map(
-                    requiredOption -> {
-                        if (requiredOption instanceof RequiredOption.AbsolutelyRequiredOptions) {
-                            RequiredOption.AbsolutelyRequiredOptions
-                                absolutelyRequiredOptions =
-                                (RequiredOption.AbsolutelyRequiredOptions)
-                                    requiredOption;
-                            List<Option<?>> requiredOpList =
-                                absolutelyRequiredOptions.getOptions().stream()
-                                    .filter(
-                                        op -> {
-                                            return !dataSourceRequiredAllKey
-                                                .contains(op.key());
-                                        })
-                                    .collect(Collectors.toList());
-                            return requiredOpList.isEmpty() ? null
-                                : OptionRule.builder()
-                                .required(
-                                    requiredOpList.toArray(
-                                        new Option<?>[0]))
-                                .build()
-                                .getRequiredOptions()
-                                .get(0);
-                        }
-
-                        if (requiredOption instanceof RequiredOption.BundledRequiredOptions) {
-                            List<Option<?>> bundledRequiredOptions =
-                                requiredOption.getOptions();
-                            return bundledRequiredOptions.stream()
-                                .anyMatch(
-                                    op ->
-                                        dataSourceRequiredAllKey
-                                            .contains(op.key())) ? null
-                                : requiredOption;
-                        }
-
-                        if (requiredOption instanceof RequiredOption.ExclusiveRequiredOptions) {
-                            List<Option<?>> exclusiveOptions =
-                                requiredOption.getOptions();
-                            return exclusiveOptions.stream()
-                                .anyMatch(
-                                    op ->
-                                        dataSourceRequiredAllKey
-                                            .contains(op.key())) ? null
-                                : requiredOption;
-                        }
-
-                        if (requiredOption instanceof RequiredOption.ConditionalRequiredOptions) {
-                            List<Option<?>> conditionalRequiredOptions =
-                                requiredOption.getOptions();
-                            return conditionalRequiredOptions.stream()
-                                .anyMatch(
-                                    op ->
-                                        dataSourceRequiredAllKey
-                                            .contains(op.key())) ? null
-                                : requiredOption;
-                        }
-
-                        throw new UnSupportWrapperException(
-                            connectorName, "Unknown", requiredOption.toString());
-                    })
-                .filter(Objects::nonNull)
-                .collect(Collectors.toList());
+                connectorOptionRule.getRequiredOptions().stream()
+                        .map(
+                                requiredOption -> {
+                                    if (requiredOption
+                                            instanceof RequiredOption.AbsolutelyRequiredOptions) {
+                                        RequiredOption.AbsolutelyRequiredOptions
+                                                absolutelyRequiredOptions =
+                                                        (RequiredOption.AbsolutelyRequiredOptions)
+                                                                requiredOption;
+                                        List<Option<?>> requiredOpList =
+                                                absolutelyRequiredOptions.getOptions().stream()
+                                                        .filter(
+                                                                op -> {
+                                                                    return !dataSourceRequiredAllKey
+                                                                            .contains(op.key());
+                                                                })
+                                                        .collect(Collectors.toList());
+                                        return requiredOpList.isEmpty()
+                                                ? null
+                                                : OptionRule.builder()
+                                                        .required(
+                                                                requiredOpList.toArray(
+                                                                        new Option<?>[0]))
+                                                        .build()
+                                                        .getRequiredOptions()
+                                                        .get(0);
+                                    }
+
+                                    if (requiredOption
+                                            instanceof RequiredOption.BundledRequiredOptions) {
+                                        List<Option<?>> bundledRequiredOptions =
+                                                requiredOption.getOptions();
+                                        return bundledRequiredOptions.stream()
+                                                        .anyMatch(
+                                                                op ->
+                                                                        dataSourceRequiredAllKey
+                                                                                .contains(op.key()))
+                                                ? null
+                                                : requiredOption;
+                                    }
+
+                                    if (requiredOption
+                                            instanceof RequiredOption.ExclusiveRequiredOptions) {
+                                        List<Option<?>> exclusiveOptions =
+                                                requiredOption.getOptions();
+                                        return exclusiveOptions.stream()
+                                                        .anyMatch(
+                                                                op ->
+                                                                        dataSourceRequiredAllKey
+                                                                                .contains(op.key()))
+                                                ? null
+                                                : requiredOption;
+                                    }
+
+                                    if (requiredOption
+                                            instanceof RequiredOption.ConditionalRequiredOptions) {
+                                        List<Option<?>> conditionalRequiredOptions =
+                                                requiredOption.getOptions();
+                                        return conditionalRequiredOptions.stream()
+                                                        .anyMatch(
+                                                                op ->
+                                                                        dataSourceRequiredAllKey
+                                                                                .contains(op.key()))
+                                                ? null
+                                                : requiredOption;
+                                    }
+
+                                    throw new UnSupportWrapperException(
+                                            connectorName, "Unknown", requiredOption.toString());
+                                })
+                        .filter(Objects::nonNull)
+                        .collect(Collectors.toList());
 
         List<String> dataSourceOptionAllKey =
-            Stream.concat(
-                    dataSourceOptionRule.getOptionalOptions().stream(),
-                    virtualTableOptionRule.getOptionalOptions().stream())
-                .map(Option::key)
-                .collect(Collectors.toList());
+                Stream.concat(
+                                dataSourceOptionRule.getOptionalOptions().stream(),
+                                virtualTableOptionRule.getOptionalOptions().stream())
+                        .map(Option::key)
+                        .collect(Collectors.toList());
 
         dataSourceOptionAllKey.addAll(excludedKeys);
 
         List<Option<?>> optionList =
-            connectorOptionRule.getOptionalOptions().stream()
-                .filter(option -> !dataSourceOptionAllKey.contains(option.key()))
-                .collect(Collectors.toList());
+                connectorOptionRule.getOptionalOptions().stream()
+                        .filter(option -> !dataSourceOptionAllKey.contains(option.key()))
+                        .collect(Collectors.toList());
 
         return SeaTunnelOptionRuleWrapper.wrapper(
-            optionList, requiredOptions, connectorName, pluginType);
+                optionList, requiredOptions, connectorName, pluginType);
     }
 
     @Override
     public Config mergeDatasourceConfig(
-        Config dataSourceInstanceConfig,
-        VirtualTableDetailRes virtualTableDetail,
-        DataSourceOption dataSourceOption,
-        SelectTableFields selectTableFields,
-        BusinessMode businessMode,
-        PluginType pluginType,
-        Config connectorConfig) {
+            Config dataSourceInstanceConfig,
+            VirtualTableDetailRes virtualTableDetail,
+            DataSourceOption dataSourceOption,
+            SelectTableFields selectTableFields,
+            BusinessMode businessMode,
+            PluginType pluginType,
+            Config connectorConfig) {
 
         Config mergedConfig = connectorConfig;
 
diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/thridparty/datasource/DataSourceConfigSwitcher.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/thridparty/datasource/DataSourceConfigSwitcher.java
index d7a4de8f..8d950e54 100644
--- a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/thridparty/datasource/DataSourceConfigSwitcher.java
+++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/thridparty/datasource/DataSourceConfigSwitcher.java
@@ -17,6 +17,8 @@
 
 package org.apache.seatunnel.app.thridparty.datasource;
 
+import org.apache.seatunnel.shade.com.typesafe.config.Config;
+
 import org.apache.seatunnel.api.configuration.util.OptionRule;
 import org.apache.seatunnel.app.domain.request.connector.BusinessMode;
 import org.apache.seatunnel.app.domain.request.job.DataSourceOption;
@@ -25,33 +27,30 @@ import org.apache.seatunnel.app.domain.response.datasource.VirtualTableDetailRes
 import org.apache.seatunnel.app.dynamicforms.FormStructure;
 import org.apache.seatunnel.common.constants.PluginType;
 
-import org.apache.seatunnel.shade.com.typesafe.config.Config;
-
 import java.util.List;
 
 public interface DataSourceConfigSwitcher {
 
-    /**
-     * Use the OptionRule of the data source to filter the OptionRule of the connector
-     */
+    /** Use the OptionRule of the data source to filter the OptionRule of the connector */
     FormStructure filterOptionRule(
-        String connectorName,
-        OptionRule dataSourceOptionRule,
-        OptionRule virtualTableOptionRule,
-        BusinessMode businessMode,
-        PluginType pluginType,
-        OptionRule connectorOptionRule,
-        List<String> excludedKeys);
+            String connectorName,
+            OptionRule dataSourceOptionRule,
+            OptionRule virtualTableOptionRule,
+            BusinessMode businessMode,
+            PluginType pluginType,
+            OptionRule connectorOptionRule,
+            List<String> excludedKeys);
 
     /**
-     * Merge the parameters of the data source instance and connector configuration into the final connector parameters
+     * Merge the parameters of the data source instance and connector configuration into the final
+     * connector parameters
      */
     Config mergeDatasourceConfig(
-        Config dataSourceInstanceConfig,
-        VirtualTableDetailRes virtualTableDetail,
-        DataSourceOption dataSourceOption,
-        SelectTableFields selectTableFields,
-        BusinessMode businessMode,
-        PluginType pluginType,
-        Config connectorConfig);
+            Config dataSourceInstanceConfig,
+            VirtualTableDetailRes virtualTableDetail,
+            DataSourceOption dataSourceOption,
+            SelectTableFields selectTableFields,
+            BusinessMode businessMode,
+            PluginType pluginType,
+            Config connectorConfig);
 }
diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/thridparty/datasource/DataSourceConfigSwitcherUtils.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/thridparty/datasource/DataSourceConfigSwitcherUtils.java
index 2e5b3071..8e250708 100644
--- a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/thridparty/datasource/DataSourceConfigSwitcherUtils.java
+++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/thridparty/datasource/DataSourceConfigSwitcherUtils.java
@@ -17,7 +17,7 @@
 
 package org.apache.seatunnel.app.thridparty.datasource;
 
-import static com.google.common.base.Preconditions.checkNotNull;
+import org.apache.seatunnel.shade.com.typesafe.config.Config;
 
 import org.apache.seatunnel.api.configuration.util.OptionRule;
 import org.apache.seatunnel.app.domain.request.connector.BusinessMode;
@@ -43,51 +43,51 @@ import org.apache.seatunnel.app.thridparty.datasource.impl.TidbDataSourceConfigS
 import org.apache.seatunnel.common.constants.PluginType;
 import org.apache.seatunnel.common.utils.SeaTunnelException;
 
-import org.apache.seatunnel.shade.com.typesafe.config.Config;
-
 import java.util.ArrayList;
 
+import static com.google.common.base.Preconditions.checkNotNull;
+
 public class DataSourceConfigSwitcherUtils {
 
     public static FormStructure filterOptionRule(
-        String datasourceName,
-        String connectorName,
-        OptionRule dataSourceOptionRule,
-        OptionRule virtualTableOptionRule,
-        PluginType pluginType,
-        BusinessMode businessMode,
-        OptionRule connectorOptionRule) {
+            String datasourceName,
+            String connectorName,
+            OptionRule dataSourceOptionRule,
+            OptionRule virtualTableOptionRule,
+            PluginType pluginType,
+            BusinessMode businessMode,
+            OptionRule connectorOptionRule) {
         DataSourceConfigSwitcher dataSourceConfigSwitcher =
-            getDataSourceConfigSwitcher(datasourceName.toUpperCase());
+                getDataSourceConfigSwitcher(datasourceName.toUpperCase());
         return dataSourceConfigSwitcher.filterOptionRule(
-            connectorName,
-            dataSourceOptionRule,
-            virtualTableOptionRule,
-            businessMode,
-            pluginType,
-            connectorOptionRule,
-            new ArrayList<>());
+                connectorName,
+                dataSourceOptionRule,
+                virtualTableOptionRule,
+                businessMode,
+                pluginType,
+                connectorOptionRule,
+                new ArrayList<>());
     }
 
     public static Config mergeDatasourceConfig(
-        String datasourceName,
-        Config dataSourceInstanceConfig,
-        VirtualTableDetailRes virtualTableDetail,
-        DataSourceOption dataSourceOption,
-        SelectTableFields selectTableFields,
-        BusinessMode businessMode,
-        PluginType pluginType,
-        Config connectorConfig) {
+            String datasourceName,
+            Config dataSourceInstanceConfig,
+            VirtualTableDetailRes virtualTableDetail,
+            DataSourceOption dataSourceOption,
+            SelectTableFields selectTableFields,
+            BusinessMode businessMode,
+            PluginType pluginType,
+            Config connectorConfig) {
         DataSourceConfigSwitcher dataSourceConfigSwitcher =
-            getDataSourceConfigSwitcher(datasourceName.toUpperCase());
+                getDataSourceConfigSwitcher(datasourceName.toUpperCase());
         return dataSourceConfigSwitcher.mergeDatasourceConfig(
-            dataSourceInstanceConfig,
-            virtualTableDetail,
-            dataSourceOption,
-            selectTableFields,
-            businessMode,
-            pluginType,
-            connectorConfig);
+                dataSourceInstanceConfig,
+                virtualTableDetail,
+                dataSourceOption,
+                selectTableFields,
+                businessMode,
+                pluginType,
+                connectorConfig);
     }
 
     private static DataSourceConfigSwitcher getDataSourceConfigSwitcher(String datasourceName) {
@@ -127,9 +127,9 @@ public class DataSourceConfigSwitcherUtils {
 
             default:
                 throw new SeaTunnelException(
-                    "data source : "
-                        + datasourceName
-                        + " is no implementation class for DataSourceConfigSwitcher");
+                        "data source : "
+                                + datasourceName
+                                + " is no implementation class for DataSourceConfigSwitcher");
         }
     }
 }
diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/thridparty/datasource/SchemaGenerator.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/thridparty/datasource/SchemaGenerator.java
index 730ee658..4002fa96 100644
--- a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/thridparty/datasource/SchemaGenerator.java
+++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/thridparty/datasource/SchemaGenerator.java
@@ -17,29 +17,29 @@
 
 package org.apache.seatunnel.app.thridparty.datasource;
 
-import static com.google.common.base.Preconditions.checkArgument;
-import static com.google.common.base.Preconditions.checkNotNull;
+import org.apache.seatunnel.shade.com.typesafe.config.Config;
+import org.apache.seatunnel.shade.com.typesafe.config.ConfigFactory;
+import org.apache.seatunnel.shade.com.typesafe.config.ConfigValueFactory;
 
 import org.apache.seatunnel.app.domain.request.job.SelectTableFields;
 import org.apache.seatunnel.app.domain.response.datasource.VirtualTableDetailRes;
 import org.apache.seatunnel.app.domain.response.datasource.VirtualTableFieldRes;
 
-import org.apache.seatunnel.shade.com.typesafe.config.Config;
-import org.apache.seatunnel.shade.com.typesafe.config.ConfigFactory;
-import org.apache.seatunnel.shade.com.typesafe.config.ConfigValueFactory;
+import org.apache.commons.collections4.CollectionUtils;
 
 import lombok.extern.slf4j.Slf4j;
-import org.apache.commons.collections4.CollectionUtils;
 
 import java.util.Map;
 import java.util.function.Function;
 import java.util.stream.Collectors;
 
+import static com.google.common.base.Preconditions.checkArgument;
+import static com.google.common.base.Preconditions.checkNotNull;
+
 @Slf4j
 public class SchemaGenerator {
 
-    private SchemaGenerator() {
-    }
+    private SchemaGenerator() {}
 
     /**
      * Generate the schema of the table.
@@ -52,38 +52,38 @@ public class SchemaGenerator {
      * </pre>
      *
      * @param virtualTableDetailRes virtual table detail.
-     * @param selectTableFields     select table fields which need to be placed in the schema.
+     * @param selectTableFields select table fields which need to be placed in the schema.
      * @return schema.
      */
     public static Config generateSchemaBySelectTableFields(
-        VirtualTableDetailRes virtualTableDetailRes, SelectTableFields selectTableFields) {
+            VirtualTableDetailRes virtualTableDetailRes, SelectTableFields selectTableFields) {
         checkNotNull(selectTableFields, "selectTableFields cannot be null");
         checkArgument(
-            CollectionUtils.isNotEmpty(selectTableFields.getTableFields()),
-            "selectTableFields.tableFields cannot be empty");
+                CollectionUtils.isNotEmpty(selectTableFields.getTableFields()),
+                "selectTableFields.tableFields cannot be empty");
 
         checkNotNull(virtualTableDetailRes, "virtualTableDetailRes cannot be null");
         checkArgument(
-            CollectionUtils.isNotEmpty(virtualTableDetailRes.getFields()),
-            "virtualTableDetailRes.fields cannot be empty");
+                CollectionUtils.isNotEmpty(virtualTableDetailRes.getFields()),
+                "virtualTableDetailRes.fields cannot be empty");
 
         Map<String, VirtualTableFieldRes> fieldTypeMap =
-            virtualTableDetailRes.getFields().stream()
-                .collect(
-                    Collectors.toMap(
-                        VirtualTableFieldRes::getFieldName, Function.identity()));
+                virtualTableDetailRes.getFields().stream()
+                        .collect(
+                                Collectors.toMap(
+                                        VirtualTableFieldRes::getFieldName, Function.identity()));
 
         Config schema = ConfigFactory.empty();
         for (String fieldName : selectTableFields.getTableFields()) {
             VirtualTableFieldRes virtualTableFieldRes =
-                checkNotNull(
-                    fieldTypeMap.get(fieldName),
-                    String.format(
-                        "Cannot find the field: %s from virtual table", fieldName));
+                    checkNotNull(
+                            fieldTypeMap.get(fieldName),
+                            String.format(
+                                    "Cannot find the field: %s from virtual table", fieldName));
             schema =
-                schema.withValue(
-                    fieldName,
-                    ConfigValueFactory.fromAnyRef(virtualTableFieldRes.getFieldType()));
+                    schema.withValue(
+                            fieldName,
+                            ConfigValueFactory.fromAnyRef(virtualTableFieldRes.getFieldType()));
         }
         return schema.atKey("fields");
     }
diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/thridparty/datasource/impl/BaseJdbcDataSourceConfigSwitcher.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/thridparty/datasource/impl/BaseJdbcDataSourceConfigSwitcher.java
index f8bba3dc..ce00e110 100644
--- a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/thridparty/datasource/impl/BaseJdbcDataSourceConfigSwitcher.java
+++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/thridparty/datasource/impl/BaseJdbcDataSourceConfigSwitcher.java
@@ -17,6 +17,9 @@
 
 package org.apache.seatunnel.app.thridparty.datasource.impl;
 
+import org.apache.seatunnel.shade.com.typesafe.config.Config;
+import org.apache.seatunnel.shade.com.typesafe.config.ConfigValueFactory;
+
 import org.apache.seatunnel.api.configuration.util.OptionRule;
 import org.apache.seatunnel.app.domain.request.connector.BusinessMode;
 import org.apache.seatunnel.app.domain.request.job.DataSourceOption;
@@ -26,9 +29,6 @@ import org.apache.seatunnel.app.dynamicforms.FormStructure;
 import org.apache.seatunnel.app.thridparty.datasource.AbstractDataSourceConfigSwitcher;
 import org.apache.seatunnel.common.constants.PluginType;
 
-import org.apache.seatunnel.shade.com.typesafe.config.Config;
-import org.apache.seatunnel.shade.com.typesafe.config.ConfigValueFactory;
-
 import java.util.Arrays;
 import java.util.Collections;
 import java.util.HashMap;
@@ -47,39 +47,39 @@ public abstract class BaseJdbcDataSourceConfigSwitcher extends AbstractDataSourc
 
     @Override
     public FormStructure filterOptionRule(
-        String connectorName,
-        OptionRule dataSourceOptionRule,
-        OptionRule virtualTableOptionRule,
-        BusinessMode businessMode,
-        PluginType pluginType,
-        OptionRule connectorOptionRule,
-        List<String> excludedKeys) {
+            String connectorName,
+            OptionRule dataSourceOptionRule,
+            OptionRule virtualTableOptionRule,
+            BusinessMode businessMode,
+            PluginType pluginType,
+            OptionRule connectorOptionRule,
+            List<String> excludedKeys) {
         Map<PluginType, List<String>> filterFieldMap = new HashMap<>();
 
         filterFieldMap.put(
-            PluginType.SINK,
-            Arrays.asList(QUERY_KEY, TABLE_KEY, DATABASE_KEY, GENERATE_SINK_SQL));
+                PluginType.SINK,
+                Arrays.asList(QUERY_KEY, TABLE_KEY, DATABASE_KEY, GENERATE_SINK_SQL));
         filterFieldMap.put(PluginType.SOURCE, Collections.singletonList(QUERY_KEY));
 
         return super.filterOptionRule(
-            connectorName,
-            dataSourceOptionRule,
-            virtualTableOptionRule,
-            businessMode,
-            pluginType,
-            connectorOptionRule,
-            filterFieldMap.get(pluginType));
+                connectorName,
+                dataSourceOptionRule,
+                virtualTableOptionRule,
+                businessMode,
+                pluginType,
+                connectorOptionRule,
+                filterFieldMap.get(pluginType));
     }
 
     @Override
     public Config mergeDatasourceConfig(
-        Config dataSourceInstanceConfig,
-        VirtualTableDetailRes virtualTableDetail,
-        DataSourceOption dataSourceOption,
-        SelectTableFields selectTableFields,
-        BusinessMode businessMode,
-        PluginType pluginType,
-        Config connectorConfig) {
+            Config dataSourceInstanceConfig,
+            VirtualTableDetailRes virtualTableDetail,
+            DataSourceOption dataSourceOption,
+            SelectTableFields selectTableFields,
+            BusinessMode businessMode,
+            PluginType pluginType,
+            Config connectorConfig) {
 
         // replace database in url
         if (dataSourceOption.getDatabases().size() == 1) {
@@ -87,13 +87,13 @@ public abstract class BaseJdbcDataSourceConfigSwitcher extends AbstractDataSourc
             String url = dataSourceInstanceConfig.getString(URL_KEY);
             String newUrl = replaceDatabaseNameInUrl(url, databaseName);
             dataSourceInstanceConfig =
-                dataSourceInstanceConfig.withValue(
-                    URL_KEY, ConfigValueFactory.fromAnyRef(newUrl));
+                    dataSourceInstanceConfig.withValue(
+                            URL_KEY, ConfigValueFactory.fromAnyRef(newUrl));
         }
         if (pluginType.equals(PluginType.SINK)) {
             connectorConfig =
-                connectorConfig.withValue(
-                    GENERATE_SINK_SQL, ConfigValueFactory.fromAnyRef(false));
+                    connectorConfig.withValue(
+                            GENERATE_SINK_SQL, ConfigValueFactory.fromAnyRef(false));
         }
         if (businessMode.equals(BusinessMode.DATA_INTEGRATION)) {
 
@@ -109,33 +109,19 @@ public abstract class BaseJdbcDataSourceConfigSwitcher extends AbstractDataSourc
                 String sql = tableFieldsToSql(tableFields, databaseName, tableName);
 
                 connectorConfig =
-                    connectorConfig.withValue(QUERY_KEY, ConfigValueFactory.fromAnyRef(sql));
+                        connectorConfig.withValue(QUERY_KEY, ConfigValueFactory.fromAnyRef(sql));
             } else if (pluginType.equals(PluginType.SINK)) {
                 connectorConfig =
-                    connectorConfig.withValue(
-                        DATABASE_KEY, ConfigValueFactory.fromAnyRef(databaseName));
+                        connectorConfig.withValue(
+                                DATABASE_KEY, ConfigValueFactory.fromAnyRef(databaseName));
                 connectorConfig =
-                    connectorConfig.withValue(
-                        TABLE_KEY, ConfigValueFactory.fromAnyRef(tableName));
+                        connectorConfig.withValue(
+                                TABLE_KEY, ConfigValueFactory.fromAnyRef(tableName));
             } else {
                 throw new UnsupportedOperationException("Unsupported plugin type: " + pluginType);
             }
 
             return super.mergeDatasourceConfig(
-                dataSourceInstanceConfig,
-                virtualTableDetail,
-                dataSourceOption,
-                selectTableFields,
-                businessMode,
-                pluginType,
-                connectorConfig);
-        } else if (businessMode.equals(BusinessMode.DATA_REPLICA)) {
-            String databaseName = dataSourceOption.getDatabases().get(0);
-            if (pluginType.equals(PluginType.SINK)) {
-                connectorConfig =
-                    connectorConfig.withValue(
-                        DATABASE_KEY, ConfigValueFactory.fromAnyRef(databaseName));
-                return super.mergeDatasourceConfig(
                     dataSourceInstanceConfig,
                     virtualTableDetail,
                     dataSourceOption,
@@ -143,9 +129,23 @@ public abstract class BaseJdbcDataSourceConfigSwitcher extends AbstractDataSourc
                     businessMode,
                     pluginType,
                     connectorConfig);
+        } else if (businessMode.equals(BusinessMode.DATA_REPLICA)) {
+            String databaseName = dataSourceOption.getDatabases().get(0);
+            if (pluginType.equals(PluginType.SINK)) {
+                connectorConfig =
+                        connectorConfig.withValue(
+                                DATABASE_KEY, ConfigValueFactory.fromAnyRef(databaseName));
+                return super.mergeDatasourceConfig(
+                        dataSourceInstanceConfig,
+                        virtualTableDetail,
+                        dataSourceOption,
+                        selectTableFields,
+                        businessMode,
+                        pluginType,
+                        connectorConfig);
             } else {
                 throw new UnsupportedOperationException(
-                    "JDBC DATA_REPLICA Unsupported plugin type: " + pluginType);
+                        "JDBC DATA_REPLICA Unsupported plugin type: " + pluginType);
             }
 
         } else {
@@ -154,7 +154,7 @@ public abstract class BaseJdbcDataSourceConfigSwitcher extends AbstractDataSourc
     }
 
     protected String generateSql(
-        List<String> tableFields, String database, String schema, String table) {
+            List<String> tableFields, String database, String schema, String table) {
         StringBuilder sb = new StringBuilder();
         sb.append("SELECT ");
         for (int i = 0; i < tableFields.size(); i++) {
diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/thridparty/datasource/impl/ClickhouseDataSourceConfigSwitcher.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/thridparty/datasource/impl/ClickhouseDataSourceConfigSwitcher.java
index 72fb36b7..5908a747 100644
--- a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/thridparty/datasource/impl/ClickhouseDataSourceConfigSwitcher.java
+++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/thridparty/datasource/impl/ClickhouseDataSourceConfigSwitcher.java
@@ -17,6 +17,9 @@
 
 package org.apache.seatunnel.app.thridparty.datasource.impl;
 
+import org.apache.seatunnel.shade.com.typesafe.config.Config;
+import org.apache.seatunnel.shade.com.typesafe.config.ConfigValueFactory;
+
 import org.apache.seatunnel.api.configuration.util.OptionRule;
 import org.apache.seatunnel.app.domain.request.connector.BusinessMode;
 import org.apache.seatunnel.app.domain.request.job.DataSourceOption;
@@ -27,9 +30,6 @@ import org.apache.seatunnel.app.thridparty.datasource.AbstractDataSourceConfigSw
 import org.apache.seatunnel.app.utils.JdbcUtils;
 import org.apache.seatunnel.common.constants.PluginType;
 
-import org.apache.seatunnel.shade.com.typesafe.config.Config;
-import org.apache.seatunnel.shade.com.typesafe.config.ConfigValueFactory;
-
 import com.google.common.collect.ImmutableMap;
 import com.google.common.collect.Lists;
 
@@ -38,7 +38,7 @@ import java.util.Map;
 
 public class ClickhouseDataSourceConfigSwitcher extends AbstractDataSourceConfigSwitcher {
     private static final ClickhouseDataSourceConfigSwitcher INSTANCE =
-        new ClickhouseDataSourceConfigSwitcher();
+            new ClickhouseDataSourceConfigSwitcher();
 
     private static final String HOST = "host";
     private static final String URL = "url";
@@ -47,51 +47,51 @@ public class ClickhouseDataSourceConfigSwitcher extends AbstractDataSourceConfig
     private static final String TABLE = "table";
 
     private static final Map<PluginType, List<String>> FILTER_FIELD_MAP =
-        new ImmutableMap.Builder<PluginType, List<String>>()
-            .put(PluginType.SOURCE, Lists.newArrayList(SQL, HOST))
-            .put(PluginType.SINK, Lists.newArrayList(HOST, DATABASE, TABLE))
-            .build();
+            new ImmutableMap.Builder<PluginType, List<String>>()
+                    .put(PluginType.SOURCE, Lists.newArrayList(SQL, HOST))
+                    .put(PluginType.SINK, Lists.newArrayList(HOST, DATABASE, TABLE))
+                    .build();
 
     @Override
     public FormStructure filterOptionRule(
-        String connectorName,
-        OptionRule dataSourceOptionRule,
-        OptionRule virtualTableOptionRule,
-        BusinessMode businessMode,
-        PluginType pluginType,
-        OptionRule connectorOptionRule,
-        List<String> excludedKeys) {
+            String connectorName,
+            OptionRule dataSourceOptionRule,
+            OptionRule virtualTableOptionRule,
+            BusinessMode businessMode,
+            PluginType pluginType,
+            OptionRule connectorOptionRule,
+            List<String> excludedKeys) {
         return super.filterOptionRule(
-            connectorName,
-            dataSourceOptionRule,
-            virtualTableOptionRule,
-            businessMode,
-            pluginType,
-            connectorOptionRule,
-            FILTER_FIELD_MAP.get(pluginType));
+                connectorName,
+                dataSourceOptionRule,
+                virtualTableOptionRule,
+                businessMode,
+                pluginType,
+                connectorOptionRule,
+                FILTER_FIELD_MAP.get(pluginType));
     }
 
     @Override
     public Config mergeDatasourceConfig(
-        Config dataSourceInstanceConfig,
-        VirtualTableDetailRes virtualTableDetail,
-        DataSourceOption dataSourceOption,
-        SelectTableFields selectTableFields,
-        BusinessMode businessMode,
-        PluginType pluginType,
-        Config connectorConfig) {
+            Config dataSourceInstanceConfig,
+            VirtualTableDetailRes virtualTableDetail,
+            DataSourceOption dataSourceOption,
+            SelectTableFields selectTableFields,
+            BusinessMode businessMode,
+            PluginType pluginType,
+            Config connectorConfig) {
         switch (businessMode) {
             case DATA_REPLICA:
                 // We only support sink in data replica mode
                 if (pluginType.equals(PluginType.SINK)) {
                     connectorConfig =
-                        connectorConfig.withValue(
-                            DATABASE,
-                            ConfigValueFactory.fromAnyRef(
-                                dataSourceOption.getDatabases().get(0)));
+                            connectorConfig.withValue(
+                                    DATABASE,
+                                    ConfigValueFactory.fromAnyRef(
+                                            dataSourceOption.getDatabases().get(0)));
                 } else {
                     throw new UnsupportedOperationException(
-                        "Clickhouse DATA_REPLICA Unsupported plugin type: " + pluginType);
+                            "Clickhouse DATA_REPLICA Unsupported plugin type: " + pluginType);
                 }
                 break;
             case DATA_INTEGRATION:
@@ -99,54 +99,53 @@ public class ClickhouseDataSourceConfigSwitcher extends AbstractDataSourceConfig
                 if (pluginType.equals(PluginType.SOURCE)) {
                     List<String> tableFields = selectTableFields.getTableFields();
                     String sql =
-                        String.format(
-                            "SELECT %s FROM %s",
-                            String.join(",", tableFields),
                             String.format(
-                                "`%s`.`%s`",
-                                dataSourceOption.getDatabases().get(0),
-                                dataSourceOption.getTables().get(0)));
+                                    "SELECT %s FROM %s",
+                                    String.join(",", tableFields),
+                                    String.format(
+                                            "`%s`.`%s`",
+                                            dataSourceOption.getDatabases().get(0),
+                                            dataSourceOption.getTables().get(0)));
                     connectorConfig =
-                        connectorConfig.withValue(SQL, ConfigValueFactory.fromAnyRef(sql));
+                            connectorConfig.withValue(SQL, ConfigValueFactory.fromAnyRef(sql));
                 } else if (pluginType.equals(PluginType.SINK)) {
                     connectorConfig =
-                        connectorConfig.withValue(
-                            DATABASE,
-                            ConfigValueFactory.fromAnyRef(
-                                dataSourceOption.getDatabases().get(0)));
+                            connectorConfig.withValue(
+                                    DATABASE,
+                                    ConfigValueFactory.fromAnyRef(
+                                            dataSourceOption.getDatabases().get(0)));
                     connectorConfig =
-                        connectorConfig.withValue(
-                            TABLE,
-                            ConfigValueFactory.fromAnyRef(
-                                dataSourceOption.getTables().get(0)));
+                            connectorConfig.withValue(
+                                    TABLE,
+                                    ConfigValueFactory.fromAnyRef(
+                                            dataSourceOption.getTables().get(0)));
                 } else {
                     throw new UnsupportedOperationException(
-                        "Unsupported plugin type: " + pluginType);
+                            "Unsupported plugin type: " + pluginType);
                 }
                 break;
             default:
                 break;
         }
         connectorConfig =
-            connectorConfig.withValue(
-                HOST,
-                ConfigValueFactory.fromAnyRef(
-                    JdbcUtils.getAddressFromUrl(
-                        dataSourceInstanceConfig.getString(URL))));
+                connectorConfig.withValue(
+                        HOST,
+                        ConfigValueFactory.fromAnyRef(
+                                JdbcUtils.getAddressFromUrl(
+                                        dataSourceInstanceConfig.getString(URL))));
         return super.mergeDatasourceConfig(
-            dataSourceInstanceConfig,
-            virtualTableDetail,
-            dataSourceOption,
-            selectTableFields,
-            businessMode,
-            pluginType,
-            connectorConfig);
+                dataSourceInstanceConfig,
+                virtualTableDetail,
+                dataSourceOption,
+                selectTableFields,
+                businessMode,
+                pluginType,
+                connectorConfig);
     }
 
     public static ClickhouseDataSourceConfigSwitcher getInstance() {
         return INSTANCE;
     }
 
-    private ClickhouseDataSourceConfigSwitcher() {
-    }
+    private ClickhouseDataSourceConfigSwitcher() {}
 }
diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/thridparty/datasource/impl/ElasticSearchDataSourceConfigSwitcher.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/thridparty/datasource/impl/ElasticSearchDataSourceConfigSwitcher.java
index a378fae7..b5e72276 100644
--- a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/thridparty/datasource/impl/ElasticSearchDataSourceConfigSwitcher.java
+++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/thridparty/datasource/impl/ElasticSearchDataSourceConfigSwitcher.java
@@ -17,6 +17,9 @@
 
 package org.apache.seatunnel.app.thridparty.datasource.impl;
 
+import org.apache.seatunnel.shade.com.typesafe.config.Config;
+import org.apache.seatunnel.shade.com.typesafe.config.ConfigValueFactory;
+
 import org.apache.seatunnel.api.configuration.util.OptionRule;
 import org.apache.seatunnel.app.domain.request.connector.BusinessMode;
 import org.apache.seatunnel.app.domain.request.job.DataSourceOption;
@@ -26,34 +29,30 @@ import org.apache.seatunnel.app.dynamicforms.FormStructure;
 import org.apache.seatunnel.app.thridparty.datasource.AbstractDataSourceConfigSwitcher;
 import org.apache.seatunnel.common.constants.PluginType;
 
-import org.apache.seatunnel.shade.com.typesafe.config.Config;
-import org.apache.seatunnel.shade.com.typesafe.config.ConfigValueFactory;
-
 import java.util.Arrays;
 import java.util.List;
 
 public class ElasticSearchDataSourceConfigSwitcher extends AbstractDataSourceConfigSwitcher {
 
     public static final ElasticSearchDataSourceConfigSwitcher INSTANCE =
-        new ElasticSearchDataSourceConfigSwitcher();
+            new ElasticSearchDataSourceConfigSwitcher();
 
     private static final String SOURCE = "source";
     private static final String SCHEMA = "schema";
     private static final String PRIMARY_KEYS = "primary_keys";
     private static final String INDEX = "index";
 
-    private ElasticSearchDataSourceConfigSwitcher() {
-    }
+    private ElasticSearchDataSourceConfigSwitcher() {}
 
     @Override
     public FormStructure filterOptionRule(
-        String connectorName,
-        OptionRule dataSourceOptionRule,
-        OptionRule virtualTableOptionRule,
-        BusinessMode businessMode,
-        PluginType pluginType,
-        OptionRule connectorOptionRule,
-        List<String> excludedKeys) {
+            String connectorName,
+            OptionRule dataSourceOptionRule,
+            OptionRule virtualTableOptionRule,
+            BusinessMode businessMode,
+            PluginType pluginType,
+            OptionRule connectorOptionRule,
+            List<String> excludedKeys) {
         if (PluginType.SOURCE.equals(pluginType)) {
             // DELETE source/schema
             excludedKeys.addAll(Arrays.asList(SOURCE, SCHEMA, INDEX));
@@ -67,59 +66,59 @@ public class ElasticSearchDataSourceConfigSwitcher extends AbstractDataSourceCon
             throw new UnsupportedOperationException("Unsupported plugin type: " + pluginType);
         }
         return super.filterOptionRule(
-            connectorName,
-            dataSourceOptionRule,
-            virtualTableOptionRule,
-            businessMode,
-            pluginType,
-            connectorOptionRule,
-            excludedKeys);
+                connectorName,
+                dataSourceOptionRule,
+                virtualTableOptionRule,
+                businessMode,
+                pluginType,
+                connectorOptionRule,
+                excludedKeys);
     }
 
     @Override
     public Config mergeDatasourceConfig(
-        Config dataSourceInstanceConfig,
-        VirtualTableDetailRes virtualTableDetail,
-        DataSourceOption dataSourceOption,
-        SelectTableFields selectTableFields,
-        BusinessMode businessMode,
-        PluginType pluginType,
-        Config connectorConfig) {
+            Config dataSourceInstanceConfig,
+            VirtualTableDetailRes virtualTableDetail,
+            DataSourceOption dataSourceOption,
+            SelectTableFields selectTableFields,
+            BusinessMode businessMode,
+            PluginType pluginType,
+            Config connectorConfig) {
         if (PluginType.SOURCE.equals(pluginType)) {
             if (businessMode.equals(BusinessMode.DATA_INTEGRATION)) {
                 // Add source
                 connectorConfig =
-                    connectorConfig.withValue(
-                        INDEX,
-                        ConfigValueFactory.fromAnyRef(dataSourceOption.getTables().get(0)));
+                        connectorConfig.withValue(
+                                INDEX,
+                                ConfigValueFactory.fromAnyRef(dataSourceOption.getTables().get(0)));
                 connectorConfig =
-                    connectorConfig.withValue(
-                        SOURCE,
-                        ConfigValueFactory.fromIterable(
-                            selectTableFields.getTableFields()));
+                        connectorConfig.withValue(
+                                SOURCE,
+                                ConfigValueFactory.fromIterable(
+                                        selectTableFields.getTableFields()));
             } else {
                 throw new UnsupportedOperationException(
-                    "Unsupported business mode: " + businessMode);
+                        "Unsupported business mode: " + businessMode);
             }
         } else if (PluginType.SINK.equals(pluginType)) {
             // TODO Add primary_keys
             if (businessMode.equals(BusinessMode.DATA_INTEGRATION)) {
                 // Add Index
                 connectorConfig =
-                    connectorConfig.withValue(
-                        INDEX,
-                        ConfigValueFactory.fromAnyRef(dataSourceOption.getTables().get(0)));
+                        connectorConfig.withValue(
+                                INDEX,
+                                ConfigValueFactory.fromAnyRef(dataSourceOption.getTables().get(0)));
             }
         } else {
             throw new UnsupportedOperationException("Unsupported plugin type: " + pluginType);
         }
         return super.mergeDatasourceConfig(
-            dataSourceInstanceConfig,
-            virtualTableDetail,
-            dataSourceOption,
-            selectTableFields,
-            businessMode,
-            pluginType,
-            connectorConfig);
+                dataSourceInstanceConfig,
+                virtualTableDetail,
+                dataSourceOption,
+                selectTableFields,
+                businessMode,
+                pluginType,
+                connectorConfig);
     }
 }
diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/thridparty/datasource/impl/KafkaDataSourceConfigSwitcher.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/thridparty/datasource/impl/KafkaDataSourceConfigSwitcher.java
index 3e7b5fdd..5e7b28be 100644
--- a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/thridparty/datasource/impl/KafkaDataSourceConfigSwitcher.java
+++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/thridparty/datasource/impl/KafkaDataSourceConfigSwitcher.java
@@ -17,6 +17,9 @@
 
 package org.apache.seatunnel.app.thridparty.datasource.impl;
 
+import org.apache.seatunnel.shade.com.typesafe.config.Config;
+import org.apache.seatunnel.shade.com.typesafe.config.ConfigValueFactory;
+
 import org.apache.seatunnel.api.configuration.util.OptionRule;
 import org.apache.seatunnel.app.domain.request.connector.BusinessMode;
 import org.apache.seatunnel.app.domain.request.job.DataSourceOption;
@@ -27,15 +30,12 @@ import org.apache.seatunnel.app.thridparty.datasource.AbstractDataSourceConfigSw
 import org.apache.seatunnel.app.thridparty.datasource.SchemaGenerator;
 import org.apache.seatunnel.common.constants.PluginType;
 
-import org.apache.seatunnel.shade.com.typesafe.config.Config;
-import org.apache.seatunnel.shade.com.typesafe.config.ConfigValueFactory;
-
 import java.util.List;
 
 public class KafkaDataSourceConfigSwitcher extends AbstractDataSourceConfigSwitcher {
 
     private static final KafkaDataSourceConfigSwitcher INSTANCE =
-        new KafkaDataSourceConfigSwitcher();
+            new KafkaDataSourceConfigSwitcher();
 
     private static final String SCHEMA = "schema";
     private static final String TOPIC = "topic";
@@ -46,13 +46,13 @@ public class KafkaDataSourceConfigSwitcher extends AbstractDataSourceConfigSwitc
 
     @Override
     public FormStructure filterOptionRule(
-        String connectorName,
-        OptionRule dataSourceOptionRule,
-        OptionRule virtualTableOptionRule,
-        BusinessMode businessMode,
-        PluginType pluginType,
-        OptionRule connectorOptionRule,
-        List<String> excludedKeys) {
+            String connectorName,
+            OptionRule dataSourceOptionRule,
+            OptionRule virtualTableOptionRule,
+            BusinessMode businessMode,
+            PluginType pluginType,
+            OptionRule connectorOptionRule,
+            List<String> excludedKeys) {
         if (pluginType == PluginType.SOURCE) {
             excludedKeys.add(SCHEMA);
             excludedKeys.add(TOPIC);
@@ -61,67 +61,66 @@ public class KafkaDataSourceConfigSwitcher extends AbstractDataSourceConfigSwitc
             excludedKeys.add(FORMAT);
         }
         return super.filterOptionRule(
-            connectorName,
-            dataSourceOptionRule,
-            virtualTableOptionRule,
-            businessMode,
-            pluginType,
-            connectorOptionRule,
-            excludedKeys);
+                connectorName,
+                dataSourceOptionRule,
+                virtualTableOptionRule,
+                businessMode,
+                pluginType,
+                connectorOptionRule,
+                excludedKeys);
     }
 
     @Override
     public Config mergeDatasourceConfig(
-        Config dataSourceInstanceConfig,
-        VirtualTableDetailRes virtualTableDetail,
-        DataSourceOption dataSourceOption,
-        SelectTableFields selectTableFields,
-        BusinessMode businessMode,
-        PluginType pluginType,
-        Config connectorConfig) {
+            Config dataSourceInstanceConfig,
+            VirtualTableDetailRes virtualTableDetail,
+            DataSourceOption dataSourceOption,
+            SelectTableFields selectTableFields,
+            BusinessMode businessMode,
+            PluginType pluginType,
+            Config connectorConfig) {
         if (pluginType == PluginType.SOURCE) {
             // Use field to generate the schema
             connectorConfig =
-                connectorConfig.withValue(
-                    TOPIC,
-                    ConfigValueFactory.fromAnyRef(
-                        virtualTableDetail.getDatasourceProperties().get(TOPIC)));
+                    connectorConfig.withValue(
+                            TOPIC,
+                            ConfigValueFactory.fromAnyRef(
+                                    virtualTableDetail.getDatasourceProperties().get(TOPIC)));
             connectorConfig =
-                connectorConfig.withValue(
-                    SCHEMA,
-                    SchemaGenerator.generateSchemaBySelectTableFields(
-                            virtualTableDetail, selectTableFields)
-                        .root());
+                    connectorConfig.withValue(
+                            SCHEMA,
+                            SchemaGenerator.generateSchemaBySelectTableFields(
+                                            virtualTableDetail, selectTableFields)
+                                    .root());
         } else if (pluginType == PluginType.SINK) {
             if (businessMode.equals(BusinessMode.DATA_INTEGRATION)) {
                 // Set the table name to topic
                 connectorConfig =
-                    connectorConfig.withValue(
-                        TOPIC,
-                        ConfigValueFactory.fromAnyRef(
-                            virtualTableDetail.getDatasourceProperties().get(TOPIC)));
+                        connectorConfig.withValue(
+                                TOPIC,
+                                ConfigValueFactory.fromAnyRef(
+                                        virtualTableDetail.getDatasourceProperties().get(TOPIC)));
             }
             if (businessMode.equals(BusinessMode.DATA_REPLICA)) {
                 connectorConfig =
-                    connectorConfig.withValue(
-                        FORMAT, ConfigValueFactory.fromAnyRef(DEBEZIUM_FORMAT));
+                        connectorConfig.withValue(
+                                FORMAT, ConfigValueFactory.fromAnyRef(DEBEZIUM_FORMAT));
             }
         } else {
             throw new UnsupportedOperationException("Unsupported plugin type: " + pluginType);
         }
 
         return super.mergeDatasourceConfig(
-            dataSourceInstanceConfig,
-            virtualTableDetail,
-            dataSourceOption,
-            selectTableFields,
-            businessMode,
-            pluginType,
-            connectorConfig);
+                dataSourceInstanceConfig,
+                virtualTableDetail,
+                dataSourceOption,
+                selectTableFields,
+                businessMode,
+                pluginType,
+                connectorConfig);
     }
 
-    private KafkaDataSourceConfigSwitcher() {
-    }
+    private KafkaDataSourceConfigSwitcher() {}
 
     public static KafkaDataSourceConfigSwitcher getInstance() {
         return INSTANCE;
diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/thridparty/datasource/impl/MysqlCDCDataSourceConfigSwitcher.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/thridparty/datasource/impl/MysqlCDCDataSourceConfigSwitcher.java
index 2d99e9bd..1ea6d897 100644
--- a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/thridparty/datasource/impl/MysqlCDCDataSourceConfigSwitcher.java
+++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/thridparty/datasource/impl/MysqlCDCDataSourceConfigSwitcher.java
@@ -17,6 +17,10 @@
 
 package org.apache.seatunnel.app.thridparty.datasource.impl;
 
+import org.apache.seatunnel.shade.com.typesafe.config.Config;
+import org.apache.seatunnel.shade.com.typesafe.config.ConfigFactory;
+import org.apache.seatunnel.shade.com.typesafe.config.ConfigValueFactory;
+
 import org.apache.seatunnel.api.configuration.util.OptionRule;
 import org.apache.seatunnel.app.domain.request.connector.BusinessMode;
 import org.apache.seatunnel.app.domain.request.job.DataSourceOption;
@@ -27,21 +31,16 @@ import org.apache.seatunnel.app.dynamicforms.FormStructure;
 import org.apache.seatunnel.app.thridparty.datasource.AbstractDataSourceConfigSwitcher;
 import org.apache.seatunnel.common.constants.PluginType;
 
-import org.apache.seatunnel.shade.com.typesafe.config.Config;
-import org.apache.seatunnel.shade.com.typesafe.config.ConfigFactory;
-import org.apache.seatunnel.shade.com.typesafe.config.ConfigValueFactory;
-
 import java.util.ArrayList;
 import java.util.List;
 import java.util.Locale;
 
 public class MysqlCDCDataSourceConfigSwitcher extends AbstractDataSourceConfigSwitcher {
 
-    private MysqlCDCDataSourceConfigSwitcher() {
-    }
+    private MysqlCDCDataSourceConfigSwitcher() {}
 
     public static final MysqlCDCDataSourceConfigSwitcher INSTANCE =
-        new MysqlCDCDataSourceConfigSwitcher();
+            new MysqlCDCDataSourceConfigSwitcher();
 
     private static final String FACTORY = "factory";
 
@@ -61,13 +60,13 @@ public class MysqlCDCDataSourceConfigSwitcher extends AbstractDataSourceConfigSw
 
     @Override
     public FormStructure filterOptionRule(
-        String connectorName,
-        OptionRule dataSourceOptionRule,
-        OptionRule virtualTableOptionRule,
-        BusinessMode businessMode,
-        PluginType pluginType,
-        OptionRule connectorOptionRule,
-        List<String> excludedKeys) {
+            String connectorName,
+            OptionRule dataSourceOptionRule,
+            OptionRule virtualTableOptionRule,
+            BusinessMode businessMode,
+            PluginType pluginType,
+            OptionRule connectorOptionRule,
+            List<String> excludedKeys) {
         if (PluginType.SOURCE.equals(pluginType)) {
             excludedKeys.add(DATABASE_NAMES);
             excludedKeys.add(TABLE_NAMES);
@@ -78,62 +77,62 @@ public class MysqlCDCDataSourceConfigSwitcher extends AbstractDataSourceConfigSw
             throw new UnsupportedOperationException("Unsupported plugin type: " + pluginType);
         }
         return super.filterOptionRule(
-            connectorName,
-            dataSourceOptionRule,
-            virtualTableOptionRule,
-            businessMode,
-            pluginType,
-            connectorOptionRule,
-            excludedKeys);
+                connectorName,
+                dataSourceOptionRule,
+                virtualTableOptionRule,
+                businessMode,
+                pluginType,
+                connectorOptionRule,
+                excludedKeys);
     }
 
     @Override
     public Config mergeDatasourceConfig(
-        Config dataSourceInstanceConfig,
-        VirtualTableDetailRes virtualTableDetail,
-        DataSourceOption dataSourceOption,
-        SelectTableFields selectTableFields,
-        BusinessMode businessMode,
-        PluginType pluginType,
-        Config connectorConfig) {
+            Config dataSourceInstanceConfig,
+            VirtualTableDetailRes virtualTableDetail,
+            DataSourceOption dataSourceOption,
+            SelectTableFields selectTableFields,
+            BusinessMode businessMode,
+            PluginType pluginType,
+            Config connectorConfig) {
         if (PluginType.SOURCE.equals(pluginType)) {
             // Add table-names
             Config config = ConfigFactory.empty();
             config = config.withValue(FACTORY, ConfigValueFactory.fromAnyRef("Mysql"));
             connectorConfig = connectorConfig.withValue(CATALOG, config.root());
             connectorConfig =
-                connectorConfig.withValue(
-                    DATABASE_NAMES,
-                    ConfigValueFactory.fromIterable(dataSourceOption.getDatabases()));
+                    connectorConfig.withValue(
+                            DATABASE_NAMES,
+                            ConfigValueFactory.fromIterable(dataSourceOption.getDatabases()));
             connectorConfig =
-                connectorConfig.withValue(
-                    TABLE_NAMES,
-                    ConfigValueFactory.fromIterable(
-                        mergeDatabaseAndTables(dataSourceOption)));
+                    connectorConfig.withValue(
+                            TABLE_NAMES,
+                            ConfigValueFactory.fromIterable(
+                                    mergeDatabaseAndTables(dataSourceOption)));
 
             if (businessMode.equals(BusinessMode.DATA_INTEGRATION)) {
                 connectorConfig =
-                    connectorConfig.withValue(
-                        FORMAT_KEY, ConfigValueFactory.fromAnyRef(DEFAULT_FORMAT));
+                        connectorConfig.withValue(
+                                FORMAT_KEY, ConfigValueFactory.fromAnyRef(DEFAULT_FORMAT));
             } else if (businessMode.equals(BusinessMode.DATA_REPLICA)
-                && connectorConfig
-                .getString(FORMAT_KEY)
-                .toUpperCase(Locale.ROOT)
-                .equals(DEBEZIUM_FORMAT)) {
+                    && connectorConfig
+                            .getString(FORMAT_KEY)
+                            .toUpperCase(Locale.ROOT)
+                            .equals(DEBEZIUM_FORMAT)) {
                 connectorConfig =
-                    connectorConfig.withValue(SCHEMA, generateDebeziumFormatSchema().root());
+                        connectorConfig.withValue(SCHEMA, generateDebeziumFormatSchema().root());
             }
         } else {
             throw new UnsupportedOperationException("Unsupported plugin type: " + pluginType);
         }
         return super.mergeDatasourceConfig(
-            dataSourceInstanceConfig,
-            virtualTableDetail,
-            dataSourceOption,
-            selectTableFields,
-            businessMode,
-            pluginType,
-            connectorConfig);
+                dataSourceInstanceConfig,
+                virtualTableDetail,
+                dataSourceOption,
+                selectTableFields,
+                businessMode,
+                pluginType,
+                connectorConfig);
     }
 
     private Config generateDebeziumFormatSchema() {
@@ -145,9 +144,9 @@ public class MysqlCDCDataSourceConfigSwitcher extends AbstractDataSourceConfigSw
         Config schema = ConfigFactory.empty();
         for (VirtualTableFieldRes virtualTableFieldRes : fieldResList) {
             schema =
-                schema.withValue(
-                    virtualTableFieldRes.getFieldName(),
-                    ConfigValueFactory.fromAnyRef(virtualTableFieldRes.getFieldType()));
+                    schema.withValue(
+                            virtualTableFieldRes.getFieldName(),
+                            ConfigValueFactory.fromAnyRef(virtualTableFieldRes.getFieldType()));
         }
         return schema.atKey("fields");
     }
@@ -155,21 +154,21 @@ public class MysqlCDCDataSourceConfigSwitcher extends AbstractDataSourceConfigSw
     private List<String> mergeDatabaseAndTables(DataSourceOption dataSourceOption) {
         List<String> tables = new ArrayList<>();
         dataSourceOption
-            .getDatabases()
-            .forEach(
-                database -> {
-                    dataSourceOption
-                        .getTables()
-                        .forEach(
-                            table -> {
-                                if (table.contains(".")) {
-                                    tables.add(table);
-                                } else {
-                                    tables.add(
-                                        getDatabaseAndTable(database, table));
-                                }
-                            });
-                });
+                .getDatabases()
+                .forEach(
+                        database -> {
+                            dataSourceOption
+                                    .getTables()
+                                    .forEach(
+                                            table -> {
+                                                if (table.contains(".")) {
+                                                    tables.add(table);
+                                                } else {
+                                                    tables.add(
+                                                            getDatabaseAndTable(database, table));
+                                                }
+                                            });
+                        });
         return tables;
     }
 
diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/thridparty/datasource/impl/MysqlDatasourceConfigSwitcher.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/thridparty/datasource/impl/MysqlDatasourceConfigSwitcher.java
index c27d323d..7da09161 100644
--- a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/thridparty/datasource/impl/MysqlDatasourceConfigSwitcher.java
+++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/thridparty/datasource/impl/MysqlDatasourceConfigSwitcher.java
@@ -20,6 +20,5 @@ package org.apache.seatunnel.app.thridparty.datasource.impl;
 public class MysqlDatasourceConfigSwitcher extends BaseJdbcDataSourceConfigSwitcher {
     public static MysqlDatasourceConfigSwitcher INSTANCE = new MysqlDatasourceConfigSwitcher();
 
-    private MysqlDatasourceConfigSwitcher() {
-    }
+    private MysqlDatasourceConfigSwitcher() {}
 }
diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/thridparty/datasource/impl/PostgresCDCDataSourceConfigSwitcher.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/thridparty/datasource/impl/PostgresCDCDataSourceConfigSwitcher.java
index 1b282dc0..6ee6e64e 100644
--- a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/thridparty/datasource/impl/PostgresCDCDataSourceConfigSwitcher.java
+++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/thridparty/datasource/impl/PostgresCDCDataSourceConfigSwitcher.java
@@ -17,6 +17,10 @@
 
 package org.apache.seatunnel.app.thridparty.datasource.impl;
 
+import org.apache.seatunnel.shade.com.typesafe.config.Config;
+import org.apache.seatunnel.shade.com.typesafe.config.ConfigFactory;
+import org.apache.seatunnel.shade.com.typesafe.config.ConfigValueFactory;
+
 import org.apache.seatunnel.api.configuration.util.OptionRule;
 import org.apache.seatunnel.app.domain.request.connector.BusinessMode;
 import org.apache.seatunnel.app.domain.request.job.DataSourceOption;
@@ -27,21 +31,16 @@ import org.apache.seatunnel.app.dynamicforms.FormStructure;
 import org.apache.seatunnel.app.thridparty.datasource.AbstractDataSourceConfigSwitcher;
 import org.apache.seatunnel.common.constants.PluginType;
 
-import org.apache.seatunnel.shade.com.typesafe.config.Config;
-import org.apache.seatunnel.shade.com.typesafe.config.ConfigFactory;
-import org.apache.seatunnel.shade.com.typesafe.config.ConfigValueFactory;
-
 import java.util.ArrayList;
 import java.util.List;
 import java.util.Locale;
 
 public class PostgresCDCDataSourceConfigSwitcher extends AbstractDataSourceConfigSwitcher {
 
-    private PostgresCDCDataSourceConfigSwitcher() {
-    }
+    private PostgresCDCDataSourceConfigSwitcher() {}
 
     public static final PostgresCDCDataSourceConfigSwitcher INSTANCE =
-        new PostgresCDCDataSourceConfigSwitcher();
+            new PostgresCDCDataSourceConfigSwitcher();
 
     private static final String FACTORY = "factory";
 
@@ -61,13 +60,13 @@ public class PostgresCDCDataSourceConfigSwitcher extends AbstractDataSourceConfi
 
     @Override
     public FormStructure filterOptionRule(
-        String connectorName,
-        OptionRule dataSourceOptionRule,
-        OptionRule virtualTableOptionRule,
-        BusinessMode businessMode,
-        PluginType pluginType,
-        OptionRule connectorOptionRule,
-        List<String> excludedKeys) {
+            String connectorName,
+            OptionRule dataSourceOptionRule,
+            OptionRule virtualTableOptionRule,
+            BusinessMode businessMode,
+            PluginType pluginType,
+            OptionRule connectorOptionRule,
+            List<String> excludedKeys) {
         if (PluginType.SOURCE.equals(pluginType)) {
             excludedKeys.add(DATABASE_NAMES);
             excludedKeys.add(TABLE_NAMES);
@@ -78,62 +77,62 @@ public class PostgresCDCDataSourceConfigSwitcher extends AbstractDataSourceConfi
             throw new UnsupportedOperationException("Unsupported plugin type: " + pluginType);
         }
         return super.filterOptionRule(
-            connectorName,
-            dataSourceOptionRule,
-            virtualTableOptionRule,
-            businessMode,
-            pluginType,
-            connectorOptionRule,
-            excludedKeys);
+                connectorName,
+                dataSourceOptionRule,
+                virtualTableOptionRule,
+                businessMode,
+                pluginType,
+                connectorOptionRule,
+                excludedKeys);
     }
 
     @Override
     public Config mergeDatasourceConfig(
-        Config dataSourceInstanceConfig,
-        VirtualTableDetailRes virtualTableDetail,
-        DataSourceOption dataSourceOption,
-        SelectTableFields selectTableFields,
-        BusinessMode businessMode,
-        PluginType pluginType,
-        Config connectorConfig) {
+            Config dataSourceInstanceConfig,
+            VirtualTableDetailRes virtualTableDetail,
+            DataSourceOption dataSourceOption,
+            SelectTableFields selectTableFields,
+            BusinessMode businessMode,
+            PluginType pluginType,
+            Config connectorConfig) {
         if (PluginType.SOURCE.equals(pluginType)) {
             // Add table-names
             Config config = ConfigFactory.empty();
             config = config.withValue(FACTORY, ConfigValueFactory.fromAnyRef("Postgres"));
             connectorConfig = connectorConfig.withValue(CATALOG, config.root());
             connectorConfig =
-                connectorConfig.withValue(
-                    DATABASE_NAMES,
-                    ConfigValueFactory.fromIterable(dataSourceOption.getDatabases()));
+                    connectorConfig.withValue(
+                            DATABASE_NAMES,
+                            ConfigValueFactory.fromIterable(dataSourceOption.getDatabases()));
             connectorConfig =
-                connectorConfig.withValue(
-                    TABLE_NAMES,
-                    ConfigValueFactory.fromIterable(
-                        mergeDatabaseAndTables(dataSourceOption)));
+                    connectorConfig.withValue(
+                            TABLE_NAMES,
+                            ConfigValueFactory.fromIterable(
+                                    mergeDatabaseAndTables(dataSourceOption)));
 
             if (businessMode.equals(BusinessMode.DATA_INTEGRATION)) {
                 connectorConfig =
-                    connectorConfig.withValue(
-                        FORMAT_KEY, ConfigValueFactory.fromAnyRef(DEFAULT_FORMAT));
+                        connectorConfig.withValue(
+                                FORMAT_KEY, ConfigValueFactory.fromAnyRef(DEFAULT_FORMAT));
             } else if (businessMode.equals(BusinessMode.DATA_REPLICA)
-                && connectorConfig
-                .getString(FORMAT_KEY)
-                .toUpperCase(Locale.ROOT)
-                .equals(DEBEZIUM_FORMAT)) {
+                    && connectorConfig
+                            .getString(FORMAT_KEY)
+                            .toUpperCase(Locale.ROOT)
+                            .equals(DEBEZIUM_FORMAT)) {
                 connectorConfig =
-                    connectorConfig.withValue(SCHEMA, generateDebeziumFormatSchema().root());
+                        connectorConfig.withValue(SCHEMA, generateDebeziumFormatSchema().root());
             }
         } else {
             throw new UnsupportedOperationException("Unsupported plugin type: " + pluginType);
         }
         return super.mergeDatasourceConfig(
-            dataSourceInstanceConfig,
-            virtualTableDetail,
-            dataSourceOption,
-            selectTableFields,
-            businessMode,
-            pluginType,
-            connectorConfig);
+                dataSourceInstanceConfig,
+                virtualTableDetail,
+                dataSourceOption,
+                selectTableFields,
+                businessMode,
+                pluginType,
+                connectorConfig);
     }
 
     private Config generateDebeziumFormatSchema() {
@@ -145,9 +144,9 @@ public class PostgresCDCDataSourceConfigSwitcher extends AbstractDataSourceConfi
         Config schema = ConfigFactory.empty();
         for (VirtualTableFieldRes virtualTableFieldRes : fieldResList) {
             schema =
-                schema.withValue(
-                    virtualTableFieldRes.getFieldName(),
-                    ConfigValueFactory.fromAnyRef(virtualTableFieldRes.getFieldType()));
+                    schema.withValue(
+                            virtualTableFieldRes.getFieldName(),
+                            ConfigValueFactory.fromAnyRef(virtualTableFieldRes.getFieldType()));
         }
         return schema.atKey("fields");
     }
@@ -156,26 +155,26 @@ public class PostgresCDCDataSourceConfigSwitcher extends AbstractDataSourceConfi
     private List<String> mergeDatabaseAndTables(DataSourceOption dataSourceOption) {
         List<String> tables = new ArrayList<>();
         dataSourceOption
-            .getDatabases()
-            .forEach(
-                database -> {
-                    dataSourceOption
-                        .getTables()
-                        .forEach(
-                            table -> {
-                                final String[] tableFragments = table.split("\\.");
-                                if (tableFragments.length == 3) {
-                                    tables.add(table);
-                                } else if (tableFragments.length == 2) {
-                                    tables.add(
-                                        getDatabaseAndTable(database, table));
-                                } else {
-                                    throw new IllegalArgumentException(
-                                        "Illegal postgres table-name: "
-                                            + table);
-                                }
-                            });
-                });
+                .getDatabases()
+                .forEach(
+                        database -> {
+                            dataSourceOption
+                                    .getTables()
+                                    .forEach(
+                                            table -> {
+                                                final String[] tableFragments = table.split("\\.");
+                                                if (tableFragments.length == 3) {
+                                                    tables.add(table);
+                                                } else if (tableFragments.length == 2) {
+                                                    tables.add(
+                                                            getDatabaseAndTable(database, table));
+                                                } else {
+                                                    throw new IllegalArgumentException(
+                                                            "Illegal postgres table-name: "
+                                                                    + table);
+                                                }
+                                            });
+                        });
         return tables;
     }
 
diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/thridparty/datasource/impl/PostgresqlDataSourceConfigSwitcher.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/thridparty/datasource/impl/PostgresqlDataSourceConfigSwitcher.java
index 90a983d9..ac307bb0 100644
--- a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/thridparty/datasource/impl/PostgresqlDataSourceConfigSwitcher.java
+++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/thridparty/datasource/impl/PostgresqlDataSourceConfigSwitcher.java
@@ -26,18 +26,17 @@ import java.util.List;
 public class PostgresqlDataSourceConfigSwitcher extends BaseJdbcDataSourceConfigSwitcher {
 
     private static final PostgresqlDataSourceConfigSwitcher INSTANCE =
-        new PostgresqlDataSourceConfigSwitcher();
+            new PostgresqlDataSourceConfigSwitcher();
 
-    private PostgresqlDataSourceConfigSwitcher() {
-    }
+    private PostgresqlDataSourceConfigSwitcher() {}
 
     protected String tableFieldsToSql(List<String> tableFields, String database, String fullTable) {
 
         String[] split = fullTable.split("\\.");
         if (split.length != 2) {
             throw new SeaTunnelException(
-                "The tableName for postgres must be schemaName.tableName, but tableName is "
-                    + fullTable);
+                    "The tableName for postgres must be schemaName.tableName, but tableName is "
+                            + fullTable);
         }
 
         String schemaName = split[0];
diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/thridparty/datasource/impl/RedshiftDataSourceConfigSwitcher.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/thridparty/datasource/impl/RedshiftDataSourceConfigSwitcher.java
index 98da884c..d01a843a 100644
--- a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/thridparty/datasource/impl/RedshiftDataSourceConfigSwitcher.java
+++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/thridparty/datasource/impl/RedshiftDataSourceConfigSwitcher.java
@@ -24,7 +24,7 @@ import java.util.List;
 
 public class RedshiftDataSourceConfigSwitcher extends BaseJdbcDataSourceConfigSwitcher {
     private static final RedshiftDataSourceConfigSwitcher INSTANCE =
-        new RedshiftDataSourceConfigSwitcher();
+            new RedshiftDataSourceConfigSwitcher();
 
     public static final RedshiftDataSourceConfigSwitcher getInstance() {
         return INSTANCE;
@@ -35,8 +35,8 @@ public class RedshiftDataSourceConfigSwitcher extends BaseJdbcDataSourceConfigSw
         String[] split = fullTable.split("\\.");
         if (split.length != 2) {
             throw new SeaTunnelException(
-                "The tableName for postgres must be schemaName.tableName, but tableName is "
-                    + fullTable);
+                    "The tableName for postgres must be schemaName.tableName, but tableName is "
+                            + fullTable);
         }
 
         String schemaName = split[0];
@@ -53,6 +53,5 @@ public class RedshiftDataSourceConfigSwitcher extends BaseJdbcDataSourceConfigSw
         return JdbcUtils.replaceDatabase(url, databaseName);
     }
 
-    private RedshiftDataSourceConfigSwitcher() {
-    }
+    private RedshiftDataSourceConfigSwitcher() {}
 }
diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/thridparty/datasource/impl/S3DataSourceConfigSwitcher.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/thridparty/datasource/impl/S3DataSourceConfigSwitcher.java
index d0c30cf0..b673d039 100644
--- a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/thridparty/datasource/impl/S3DataSourceConfigSwitcher.java
+++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/thridparty/datasource/impl/S3DataSourceConfigSwitcher.java
@@ -17,6 +17,9 @@
 
 package org.apache.seatunnel.app.thridparty.datasource.impl;
 
+import org.apache.seatunnel.shade.com.typesafe.config.Config;
+import org.apache.seatunnel.shade.com.typesafe.config.ConfigValueFactory;
+
 import org.apache.seatunnel.api.configuration.util.OptionRule;
 import org.apache.seatunnel.app.domain.request.connector.BusinessMode;
 import org.apache.seatunnel.app.domain.request.job.DataSourceOption;
@@ -28,9 +31,6 @@ import org.apache.seatunnel.app.thridparty.datasource.SchemaGenerator;
 import org.apache.seatunnel.common.constants.PluginType;
 import org.apache.seatunnel.datasource.plugin.s3.S3OptionRule;
 
-import org.apache.seatunnel.shade.com.typesafe.config.Config;
-import org.apache.seatunnel.shade.com.typesafe.config.ConfigValueFactory;
-
 import lombok.extern.slf4j.Slf4j;
 
 import java.util.List;
@@ -38,145 +38,145 @@ import java.util.List;
 @Slf4j
 public class S3DataSourceConfigSwitcher extends AbstractDataSourceConfigSwitcher {
 
-    private S3DataSourceConfigSwitcher() {
-    }
+    private S3DataSourceConfigSwitcher() {}
 
     private static final S3DataSourceConfigSwitcher INSTANCE = new S3DataSourceConfigSwitcher();
 
     @Override
     public FormStructure filterOptionRule(
-        String connectorName,
-        OptionRule dataSourceOptionRule,
-        OptionRule virtualTableOptionRule,
-        BusinessMode businessMode,
-        PluginType pluginType,
-        OptionRule connectorOptionRule,
-        List<String> excludedKeys) {
+            String connectorName,
+            OptionRule dataSourceOptionRule,
+            OptionRule virtualTableOptionRule,
+            BusinessMode businessMode,
+            PluginType pluginType,
+            OptionRule connectorOptionRule,
+            List<String> excludedKeys) {
         excludedKeys.add(S3OptionRule.PATH.key());
         if (PluginType.SOURCE.equals(pluginType)) {
             excludedKeys.add(S3OptionRule.SCHEMA.key());
         }
 
         return super.filterOptionRule(
-            connectorName,
-            dataSourceOptionRule,
-            virtualTableOptionRule,
-            businessMode,
-            pluginType,
-            connectorOptionRule,
-            excludedKeys);
+                connectorName,
+                dataSourceOptionRule,
+                virtualTableOptionRule,
+                businessMode,
+                pluginType,
+                connectorOptionRule,
+                excludedKeys);
     }
 
     @Override
     public Config mergeDatasourceConfig(
-        Config dataSourceInstanceConfig,
-        VirtualTableDetailRes virtualTableDetail,
-        DataSourceOption dataSourceOption,
-        SelectTableFields selectTableFields,
-        BusinessMode businessMode,
-        PluginType pluginType,
-        Config connectorConfig) {
+            Config dataSourceInstanceConfig,
+            VirtualTableDetailRes virtualTableDetail,
+            DataSourceOption dataSourceOption,
+            SelectTableFields selectTableFields,
+            BusinessMode businessMode,
+            PluginType pluginType,
+            Config connectorConfig) {
         if (PluginType.SOURCE.equals(pluginType)) {
             connectorConfig =
-                connectorConfig
-                    .withValue(
-                        S3OptionRule.SCHEMA.key(),
-                        SchemaGenerator.generateSchemaBySelectTableFields(
-                                virtualTableDetail, selectTableFields)
-                            .root())
-                    .withValue(
-                        S3OptionRule.PATH.key(),
-                        ConfigValueFactory.fromAnyRef(
-                            virtualTableDetail
-                                .getDatasourceProperties()
-                                .get(S3OptionRule.PATH.key())))
-                    .withValue(
-                        S3OptionRule.TYPE.key(),
-                        ConfigValueFactory.fromAnyRef(
-                            virtualTableDetail
-                                .getDatasourceProperties()
-                                .get(S3OptionRule.TYPE.key())))
-                    .withValue(
-                        S3OptionRule.PARSE_PARSE_PARTITION_FROM_PATH.key(),
-                        ConfigValueFactory.fromAnyRef(
-                            virtualTableDetail
-                                .getDatasourceProperties()
-                                .get(
-                                    S3OptionRule
-                                        .PARSE_PARSE_PARTITION_FROM_PATH
-                                        .key())))
-                    .withValue(
-                        S3OptionRule.DATE_FORMAT.key(),
-                        ConfigValueFactory.fromAnyRef(
-                            virtualTableDetail
-                                .getDatasourceProperties()
-                                .get(S3OptionRule.DATE_FORMAT.key())))
-                    .withValue(
-                        S3OptionRule.DATETIME_FORMAT.key(),
-                        ConfigValueFactory.fromAnyRef(
-                            virtualTableDetail
-                                .getDatasourceProperties()
-                                .get(S3OptionRule.DATETIME_FORMAT.key())))
-                    .withValue(
-                        S3OptionRule.TIME_FORMAT.key(),
-                        ConfigValueFactory.fromAnyRef(
-                            virtualTableDetail
-                                .getDatasourceProperties()
-                                .get(S3OptionRule.TIME_FORMAT.key())));
+                    connectorConfig
+                            .withValue(
+                                    S3OptionRule.SCHEMA.key(),
+                                    SchemaGenerator.generateSchemaBySelectTableFields(
+                                                    virtualTableDetail, selectTableFields)
+                                            .root())
+                            .withValue(
+                                    S3OptionRule.PATH.key(),
+                                    ConfigValueFactory.fromAnyRef(
+                                            virtualTableDetail
+                                                    .getDatasourceProperties()
+                                                    .get(S3OptionRule.PATH.key())))
+                            .withValue(
+                                    S3OptionRule.TYPE.key(),
+                                    ConfigValueFactory.fromAnyRef(
+                                            virtualTableDetail
+                                                    .getDatasourceProperties()
+                                                    .get(S3OptionRule.TYPE.key())))
+                            .withValue(
+                                    S3OptionRule.PARSE_PARSE_PARTITION_FROM_PATH.key(),
+                                    ConfigValueFactory.fromAnyRef(
+                                            virtualTableDetail
+                                                    .getDatasourceProperties()
+                                                    .get(
+                                                            S3OptionRule
+                                                                    .PARSE_PARSE_PARTITION_FROM_PATH
+                                                                    .key())))
+                            .withValue(
+                                    S3OptionRule.DATE_FORMAT.key(),
+                                    ConfigValueFactory.fromAnyRef(
+                                            virtualTableDetail
+                                                    .getDatasourceProperties()
+                                                    .get(S3OptionRule.DATE_FORMAT.key())))
+                            .withValue(
+                                    S3OptionRule.DATETIME_FORMAT.key(),
+                                    ConfigValueFactory.fromAnyRef(
+                                            virtualTableDetail
+                                                    .getDatasourceProperties()
+                                                    .get(S3OptionRule.DATETIME_FORMAT.key())))
+                            .withValue(
+                                    S3OptionRule.TIME_FORMAT.key(),
+                                    ConfigValueFactory.fromAnyRef(
+                                            virtualTableDetail
+                                                    .getDatasourceProperties()
+                                                    .get(S3OptionRule.TIME_FORMAT.key())));
         } else if (PluginType.SINK.equals(pluginType)) {
-            if (virtualTableDetail.getDatasourceProperties().get(S3OptionRule.TIME_FORMAT.key()) == null) {
+            if (virtualTableDetail.getDatasourceProperties().get(S3OptionRule.TIME_FORMAT.key())
+                    == null) {
                 throw new IllegalArgumentException("S3 virtual table path is null");
             }
             connectorConfig =
-                connectorConfig
-                    .withValue(
-                        S3OptionRule.PATH.key(),
-                        ConfigValueFactory.fromAnyRef(
-                            virtualTableDetail
-                                .getDatasourceProperties()
-                                .get(S3OptionRule.PATH.key())))
-                    .withValue(
-                        S3OptionRule.TYPE.key(),
-                        ConfigValueFactory.fromAnyRef(
-                            virtualTableDetail
-                                .getDatasourceProperties()
-                                .get(S3OptionRule.TYPE.key())))
-                    .withValue(
-                        S3OptionRule.PARSE_PARSE_PARTITION_FROM_PATH.key(),
-                        ConfigValueFactory.fromAnyRef(
-                            virtualTableDetail
-                                .getDatasourceProperties()
-                                .get(
-                                    S3OptionRule
-                                        .PARSE_PARSE_PARTITION_FROM_PATH
-                                        .key())))
-                    .withValue(
-                        S3OptionRule.DATE_FORMAT.key(),
-                        ConfigValueFactory.fromAnyRef(
-                            virtualTableDetail
-                                .getDatasourceProperties()
-                                .get(S3OptionRule.DATE_FORMAT.key())))
-                    .withValue(
-                        S3OptionRule.DATETIME_FORMAT.key(),
-                        ConfigValueFactory.fromAnyRef(
-                            virtualTableDetail
-                                .getDatasourceProperties()
-                                .get(S3OptionRule.DATETIME_FORMAT.key())))
-                    .withValue(
-                        S3OptionRule.TIME_FORMAT.key(),
-                        ConfigValueFactory.fromAnyRef(
-                            virtualTableDetail
-                                .getDatasourceProperties()
-                                .get(S3OptionRule.TIME_FORMAT.key())));
+                    connectorConfig
+                            .withValue(
+                                    S3OptionRule.PATH.key(),
+                                    ConfigValueFactory.fromAnyRef(
+                                            virtualTableDetail
+                                                    .getDatasourceProperties()
+                                                    .get(S3OptionRule.PATH.key())))
+                            .withValue(
+                                    S3OptionRule.TYPE.key(),
+                                    ConfigValueFactory.fromAnyRef(
+                                            virtualTableDetail
+                                                    .getDatasourceProperties()
+                                                    .get(S3OptionRule.TYPE.key())))
+                            .withValue(
+                                    S3OptionRule.PARSE_PARSE_PARTITION_FROM_PATH.key(),
+                                    ConfigValueFactory.fromAnyRef(
+                                            virtualTableDetail
+                                                    .getDatasourceProperties()
+                                                    .get(
+                                                            S3OptionRule
+                                                                    .PARSE_PARSE_PARTITION_FROM_PATH
+                                                                    .key())))
+                            .withValue(
+                                    S3OptionRule.DATE_FORMAT.key(),
+                                    ConfigValueFactory.fromAnyRef(
+                                            virtualTableDetail
+                                                    .getDatasourceProperties()
+                                                    .get(S3OptionRule.DATE_FORMAT.key())))
+                            .withValue(
+                                    S3OptionRule.DATETIME_FORMAT.key(),
+                                    ConfigValueFactory.fromAnyRef(
+                                            virtualTableDetail
+                                                    .getDatasourceProperties()
+                                                    .get(S3OptionRule.DATETIME_FORMAT.key())))
+                            .withValue(
+                                    S3OptionRule.TIME_FORMAT.key(),
+                                    ConfigValueFactory.fromAnyRef(
+                                            virtualTableDetail
+                                                    .getDatasourceProperties()
+                                                    .get(S3OptionRule.TIME_FORMAT.key())));
         }
         return super.mergeDatasourceConfig(
-            dataSourceInstanceConfig,
-            virtualTableDetail,
-            dataSourceOption,
-            selectTableFields,
-            businessMode,
-            pluginType,
-            connectorConfig);
+                dataSourceInstanceConfig,
+                virtualTableDetail,
+                dataSourceOption,
+                selectTableFields,
+                businessMode,
+                pluginType,
+                connectorConfig);
     }
 
     public static S3DataSourceConfigSwitcher getInstance() {
diff --git a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/thridparty/datasource/impl/S3RedshiftDataSourceConfigSwitcher.java b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/thridparty/datasource/impl/S3RedshiftDataSourceConfigSwitcher.java
index dd7b208a..3c816612 100644
--- a/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/thridparty/datasource/impl/S3RedshiftDataSourceConfigSwitcher.java
+++ b/seatunnel-server/seatunnel-app/src/main/java/org/apache/seatunnel/app/thridparty/datasource/impl/S3RedshiftDataSourceConfigSwitcher.java
@@ -17,6 +17,8 @@
 
 package org.apache.seatunnel.app.thridparty.datasource.impl;
 
+import org.apache.seatunnel.shade.com.typesafe.config.Config;
+
 import org.apache.seatunnel.api.configuration.util.OptionRule;
 import org.apache.seatunnel.app.domain.request.connector.BusinessMode;
 import org.apache.seatunnel.app.domain.request.job.DataSourceOption;
@@ -26,56 +28,53 @@ import org.apache.seatunnel.app.dynamicforms.FormStructure;
 import org.apache.seatunnel.app.thridparty.datasource.AbstractDataSourceConfigSwitcher;
 import org.apache.seatunnel.common.constants.PluginType;
 
-import org.apache.seatunnel.shade.com.typesafe.config.Config;
-
 import java.util.List;
 
 public class S3RedshiftDataSourceConfigSwitcher extends AbstractDataSourceConfigSwitcher {
 
-    private S3RedshiftDataSourceConfigSwitcher() {
-    }
+    private S3RedshiftDataSourceConfigSwitcher() {}
 
     private static final S3RedshiftDataSourceConfigSwitcher INSTANCE =
-        new S3RedshiftDataSourceConfigSwitcher();
+            new S3RedshiftDataSourceConfigSwitcher();
 
     @Override
     public FormStructure filterOptionRule(
-        String connectorName,
-        OptionRule dataSourceOptionRule,
-        OptionRule virtualTableOptionRule,
-        BusinessMode businessMode,
-        PluginType pluginType,
-        OptionRule connectorOptionRule,
-        List<String> excludedKeys) {
+            String connectorName,
+            OptionRule dataSourceOptionRule,
+            OptionRule virtualTableOptionRule,
+            BusinessMode businessMode,
+            PluginType pluginType,
+            OptionRule connectorOptionRule,
+            List<String> excludedKeys) {
         excludedKeys.add("access_key");
         excludedKeys.add("secret_key");
         return super.filterOptionRule(
-            connectorName,
-            dataSourceOptionRule,
-            virtualTableOptionRule,
-            businessMode,
-            pluginType,
-            connectorOptionRule,
-            excludedKeys);
+                connectorName,
+                dataSourceOptionRule,
+                virtualTableOptionRule,
+                businessMode,
+                pluginType,
+                connectorOptionRule,
+                excludedKeys);
     }
 
     @Override
     public Config mergeDatasourceConfig(
-        Config dataSourceInstanceConfig,
-        VirtualTableDetailRes virtualTableDetail,
-        DataSourceOption dataSourceOption,
-        SelectTableFields selectTableFields,
-        BusinessMode businessMode,
-        PluginType pluginType,
-        Config connectorConfig) {
+            Config dataSourceInstanceConfig,
+            VirtualTableDetailRes virtualTableDetail,
+            DataSourceOption dataSourceOption,
+            SelectTableFields selectTableFields,
+            BusinessMode businessMode,
+            PluginType pluginType,
+            Config connectorConfig) {
         return super.mergeDatasourceConfig(
-            dataSourceInstanceConfig,
-            virtualTableDetail,
-            dataSourceOption,
-            selectTableFields,
-            businessMode,
-            pluginType,
-            connectorConfig);
... 4604 lines suppressed ...