You are viewing a plain text version of this content. The canonical link for it is here.
Posted to notifications@shardingsphere.apache.org by zh...@apache.org on 2022/08/25 07:43:56 UTC

[shardingsphere] branch master updated: Reopen openGauss IT and fix the problem at IT cases. (#20498)

This is an automated email from the ASF dual-hosted git repository.

zhonghongsheng pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/shardingsphere.git


The following commit(s) were added to refs/heads/master by this push:
     new 15e2c442a0a Reopen openGauss IT and fix the problem at IT cases. (#20498)
15e2c442a0a is described below

commit 15e2c442a0a5f9894dac2c9f7a6b57ef29acd0ec
Author: Xinze Guo <10...@users.noreply.github.com>
AuthorDate: Thu Aug 25 15:43:46 2022 +0800

    Reopen openGauss IT and fix the problem at IT cases. (#20498)
    
    * Reopen opengauss IT and fix the problem at IT cases.
    
    * Add openGauss data source checker
    
    * Remove unused
    
    * Fix create table sql generator IT error
    
    * Change refer url
---
 .github/workflows/it-scaling.yml                   |  4 ++
 .../metadata/generator/PipelineDDLGenerator.java   | 14 +++---
 .../scenario/migration/MigrationJobPreparer.java   |  8 ++--
 .../datasource/OpenGaussDataSourceChecker.java     | 51 +++++++++++++++++++++-
 .../impl/ShardingSphereProxyClusterContainer.java  |  2 +-
 .../atomic/storage/DockerStorageContainer.java     |  7 +++
 .../pipeline/cases/base/BaseExtraSQLITCase.java    |  9 ++--
 .../data/pipeline/cases/base/BaseITCase.java       | 44 ++++++++++---------
 .../cases/general/CreateTableSQLGeneratorIT.java   |  6 ++-
 .../cases/general/MySQLMigrationGeneralIT.java     |  5 ++-
 .../general/PostgreSQLMigrationGeneralIT.java      |  7 ++-
 .../src/test/resources/env/opengauss/01-initdb.sql |  1 -
 .../src/test/resources/env/opengauss/pg_hba.conf   |  2 +-
 .../src/test/resources/env/opengauss/server.yaml   |  3 +-
 14 files changed, 118 insertions(+), 45 deletions(-)

diff --git a/.github/workflows/it-scaling.yml b/.github/workflows/it-scaling.yml
index 4a8d932c0a6..aeddfa4138f 100644
--- a/.github/workflows/it-scaling.yml
+++ b/.github/workflows/it-scaling.yml
@@ -98,6 +98,8 @@ jobs:
         run: ./mvnw -nsu -B install -f shardingsphere-test/shardingsphere-integration-test/shardingsphere-integration-test-scaling/pom.xml -Dscaling.it.env.type=docker -Dscaling.it.docker.mysql.version=${{ env.mysql_version }}
       - name: Run Scaling PostgreSQL Integration Test
         run: ./mvnw -nsu -B install -f shardingsphere-test/shardingsphere-integration-test/shardingsphere-integration-test-scaling/pom.xml -Dscaling.it.env.type=docker -Dscaling.it.docker.postgresql.version=${{ env.postgresql_version }}
+      - name: Run Scaling openGauss Integration Test
+        run: ./mvnw -nsu -B install -f shardingsphere-test/shardingsphere-integration-test/shardingsphere-integration-test-scaling/pom.xml -Dscaling.it.env.type=docker -Dscaling.it.docker.opengauss.version=${{ env.opengauss_version }}
 
   scaling-daily-it-test:
     if: (github.event_name == 'schedule' && github.repository == 'apache/shardingsphere')
@@ -126,3 +128,5 @@ jobs:
         run: ./mvnw -nsu -B install -f shardingsphere-test/shardingsphere-integration-test/shardingsphere-integration-test-scaling/pom.xml -Dscaling.it.env.type=docker -Dscaling.it.docker.mysql.version=${{ env.mysql_version }}
       - name: Run Scaling Daily PostgreSQL Integration Test
         run: ./mvnw -nsu -B install -f shardingsphere-test/shardingsphere-integration-test/shardingsphere-integration-test-scaling/pom.xml -Dscaling.it.env.type=docker -Dscaling.it.docker.postgresql.version=${{ env.postgresql_version }}
+      - name: Run Scaling Daily openGauss Integration Test
+        run: ./mvnw -nsu -B install -f shardingsphere-test/shardingsphere-integration-test/shardingsphere-integration-test-scaling/pom.xml -Dscaling.it.env.type=docker -Dscaling.it.docker.opengauss.version=${{ env.opengauss_version }}
diff --git a/shardingsphere-kernel/shardingsphere-data-pipeline/shardingsphere-data-pipeline-core/src/main/java/org/apache/shardingsphere/data/pipeline/core/metadata/generator/PipelineDDLGenerator.java b/shardingsphere-kernel/shardingsphere-data-pipeline/shardingsphere-data-pipeline-core/src/main/java/org/apache/shardingsphere/data/pipeline/core/metadata/generator/PipelineDDLGenerator.java
index 449e31ed25b..ecff9462e74 100644
--- a/shardingsphere-kernel/shardingsphere-data-pipeline/shardingsphere-data-pipeline-core/src/main/java/org/apache/shardingsphere/data/pipeline/core/metadata/generator/PipelineDDLGenerator.java
+++ b/shardingsphere-kernel/shardingsphere-data-pipeline/shardingsphere-data-pipeline-core/src/main/java/org/apache/shardingsphere/data/pipeline/core/metadata/generator/PipelineDDLGenerator.java
@@ -182,12 +182,16 @@ public final class PipelineDDLGenerator {
         SQLStatementContext<?> sqlStatementContext = queryContext.getSqlStatementContext();
         if (sqlStatementContext instanceof CreateTableStatementContext || sqlStatementContext instanceof CommentStatementContext
                 || sqlStatementContext instanceof CreateIndexStatementContext || sqlStatementContext instanceof AlterTableStatementContext) {
-            if (!sqlStatementContext.getTablesContext().getTables().isEmpty()) {
-                TableNameSegment tableNameSegment = sqlStatementContext.getTablesContext().getTables().iterator().next().getTableName();
-                Map<SQLSegment, String> replaceMap = new TreeMap<>(Comparator.comparing(SQLSegment::getStartIndex));
-                replaceMap.put(tableNameSegment, prefix + tableNameSegment.getIdentifier().getValue());
-                return doDecorateActualTable(replaceMap, sql);
+            if (sqlStatementContext.getTablesContext().getTables().isEmpty()) {
+                return sql;
             }
+            TableNameSegment tableNameSegment = sqlStatementContext.getTablesContext().getTables().iterator().next().getTableName();
+            if (sqlStatementContext.getTablesContext().getSchemaName().isPresent()) {
+                return sql;
+            }
+            Map<SQLSegment, String> replaceMap = new TreeMap<>(Comparator.comparing(SQLSegment::getStartIndex));
+            replaceMap.put(tableNameSegment, prefix + tableNameSegment.getIdentifier().getValue());
+            return doDecorateActualTable(replaceMap, sql);
         }
         return sql;
     }
diff --git a/shardingsphere-kernel/shardingsphere-data-pipeline/shardingsphere-data-pipeline-core/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/migration/MigrationJobPreparer.java b/shardingsphere-kernel/shardingsphere-data-pipeline/shardingsphere-data-pipeline-core/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/migration/MigrationJobPreparer.java
index b0b6e428de7..b11f3deb02a 100644
--- a/shardingsphere-kernel/shardingsphere-data-pipeline/shardingsphere-data-pipeline-core/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/migration/MigrationJobPreparer.java
+++ b/shardingsphere-kernel/shardingsphere-data-pipeline/shardingsphere-data-pipeline-core/src/main/java/org/apache/shardingsphere/data/pipeline/scenario/migration/MigrationJobPreparer.java
@@ -100,15 +100,17 @@ public final class MigrationJobPreparer {
         String lockName = "prepare-" + jobConfig.getJobId();
         LockContext lockContext = PipelineContext.getContextManager().getInstanceContext().getLockContext();
         LockDefinition lockDefinition = new ExclusiveLockDefinition(lockName);
-        JOB_API.persistJobItemProgress(jobItemContext);
+        if (null == JOB_API.getJobItemProgress(jobItemContext.getJobId(), jobItemContext.getShardingItem())) {
+            JOB_API.persistJobItemProgress(jobItemContext);
+        }
         if (lockContext.tryLock(lockDefinition, 180000)) {
             log.info("try lock success, jobId={}, shardingItem={}", jobConfig.getJobId(), jobItemContext.getShardingItem());
             try {
                 InventoryIncrementalJobItemProgress jobItemProgress = JOB_API.getJobItemProgress(jobItemContext.getJobId(), jobItemContext.getShardingItem());
-                boolean prepareFlag = null == jobItemProgress || JobStatus.PREPARING.equals(jobItemProgress.getStatus()) || JobStatus.RUNNING.equals(jobItemProgress.getStatus())
+                boolean prepareFlag = JobStatus.PREPARING.equals(jobItemProgress.getStatus()) || JobStatus.RUNNING.equals(jobItemProgress.getStatus())
                         || JobStatus.PREPARING_FAILURE.equals(jobItemProgress.getStatus());
                 if (prepareFlag) {
-                    log.info("execute prepare, jobId={}, shardingItem={}", jobConfig.getJobId(), jobItemContext.getShardingItem());
+                    log.info("execute prepare, jobId={}, shardingItem={}, jobStatus={}", jobConfig.getJobId(), jobItemContext.getShardingItem(), jobItemProgress.getStatus());
                     jobItemContext.setStatus(JobStatus.PREPARING);
                     JOB_API.updateJobItemStatus(jobConfig.getJobId(), jobItemContext.getShardingItem(), JobStatus.PREPARING);
                     prepareAndCheckTarget(jobItemContext);
diff --git a/shardingsphere-kernel/shardingsphere-data-pipeline/shardingsphere-data-pipeline-dialect/shardingsphere-data-pipeline-opengauss/src/main/java/org/apache/shardingsphere/data/pipeline/opengauss/check/datasource/OpenGaussDataSourceChecker.java b/shardingsphere-kernel/shardingsphere-data-pipeline/shardingsphere-data-pipeline-dialect/shardingsphere-data-pipeline-opengauss/src/main/java/org/apache/shardingsphere/data/pipeline/opengauss/check/datasource/OpenGaussDataSourceChecker.java
index f1753588dbc..e545d49a39c 100644
--- a/shardingsphere-kernel/shardingsphere-data-pipeline/shardingsphere-data-pipeline-dialect/shardingsphere-data-pipeline-opengauss/src/main/java/org/apache/shardingsphere/data/pipeline/opengauss/check/datasource/OpenGaussDataSourceChecker.java
+++ b/shardingsphere-kernel/shardingsphere-data-pipeline/shardingsphere-data-pipeline-dialect/shardingsphere-data-pipeline-opengauss/src/main/java/org/apache/shardingsphere/data/pipeline/opengauss/check/datasource/OpenGaussDataSourceChecker.java
@@ -17,12 +17,59 @@
 
 package org.apache.shardingsphere.data.pipeline.opengauss.check.datasource;
 
-import org.apache.shardingsphere.data.pipeline.postgresql.check.datasource.PostgreSQLDataSourceChecker;
+import lombok.extern.slf4j.Slf4j;
+import org.apache.commons.lang3.StringUtils;
+import org.apache.shardingsphere.data.pipeline.core.check.datasource.AbstractDataSourceChecker;
+import org.apache.shardingsphere.data.pipeline.core.exception.PipelineJobPrepareFailedException;
+
+import javax.sql.DataSource;
+import java.sql.Connection;
+import java.sql.DatabaseMetaData;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.util.Collection;
 
 /**
  * Data source checker of openGauss.
  */
-public final class OpenGaussDataSourceChecker extends PostgreSQLDataSourceChecker {
+@Slf4j
+public final class OpenGaussDataSourceChecker extends AbstractDataSourceChecker {
+    
+    private static final String SHOW_GRANTS_SQL = "SELECT * FROM pg_roles WHERE rolname = ?";
+    
+    @Override
+    public void checkPrivilege(final Collection<? extends DataSource> dataSources) {
+        for (DataSource each : dataSources) {
+            checkPrivilege(each);
+        }
+    }
+    
+    private void checkPrivilege(final DataSource dataSource) {
+        try (Connection connection = dataSource.getConnection(); PreparedStatement preparedStatement = connection.prepareStatement(SHOW_GRANTS_SQL)) {
+            DatabaseMetaData metaData = connection.getMetaData();
+            preparedStatement.setString(1, metaData.getUserName());
+            try (ResultSet resultSet = preparedStatement.executeQuery()) {
+                if (!resultSet.next()) {
+                    throw new PipelineJobPrepareFailedException(String.format("No role exists, rolname: %s.", metaData.getUserName()));
+                }
+                String isSuperRole = resultSet.getString("rolsuper");
+                String isReplicationRole = resultSet.getString("rolreplication");
+                String isSystemAdminRole = resultSet.getString("rolsystemadmin");
+                log.info("checkPrivilege: isSuperRole: {}, isReplicationRole: {}, isSystemAdminRole: {}", isSuperRole, isReplicationRole, isSystemAdminRole);
+                if (!StringUtils.equalsAnyIgnoreCase("t", isSuperRole, isReplicationRole, isSystemAdminRole)) {
+                    throw new PipelineJobPrepareFailedException(String.format("Source data source is lack of REPLICATION privileges, you could try `ALTER ROLE \"%s\" REPLICATION;`.",
+                            metaData.getUserName()));
+                }
+            }
+        } catch (final SQLException ex) {
+            throw new PipelineJobPrepareFailedException("Source data source check privileges failed.", ex);
+        }
+    }
+    
+    @Override
+    public void checkVariable(final Collection<? extends DataSource> dataSources) {
+    }
     
     @Override
     protected String getDatabaseType() {
diff --git a/shardingsphere-test/shardingsphere-integration-test/shardingsphere-integration-test-env/src/test/java/org/apache/shardingsphere/test/integration/env/container/atomic/adapter/impl/ShardingSphereProxyClusterContainer.java b/shardingsphere-test/shardingsphere-integration-test/shardingsphere-integration-test-env/src/test/java/org/apache/shardingsphere/test/integration/env/container/atomic/adapter/impl/ShardingSphereProxyClusterContainer.java
index 423cbf26c6c..d669455b864 100644
--- a/shardingsphere-test/shardingsphere-integration-test/shardingsphere-integration-test-env/src/test/java/org/apache/shardingsphere/test/integration/env/container/atomic/adapter/impl/ShardingSphereProxyClusterContainer.java
+++ b/shardingsphere-test/shardingsphere-integration-test/shardingsphere-integration-test-env/src/test/java/org/apache/shardingsphere/test/integration/env/container/atomic/adapter/impl/ShardingSphereProxyClusterContainer.java
@@ -65,7 +65,7 @@ public final class ShardingSphereProxyClusterContainer extends DockerITContainer
     
     @Override
     protected void configure() {
-        withExposedPorts(3307);
+        withExposedPorts(3307, 3308);
         mountConfigurationFiles();
         setWaitStrategy(new JdbcConnectionWaitStrategy(() -> DriverManager.getConnection(DataSourceEnvironment.getURL(databaseType,
                 getHost(), getMappedPort(3307), config.getProxyDataSourceName()), ProxyContainerConstants.USERNAME, ProxyContainerConstants.PASSWORD)));
diff --git a/shardingsphere-test/shardingsphere-integration-test/shardingsphere-integration-test-env/src/test/java/org/apache/shardingsphere/test/integration/env/container/atomic/storage/DockerStorageContainer.java b/shardingsphere-test/shardingsphere-integration-test/shardingsphere-integration-test-env/src/test/java/org/apache/shardingsphere/test/integration/env/container/atomic/storage/DockerStorageContainer.java
index 6dc29fddb5a..09203cb8cb9 100644
--- a/shardingsphere-test/shardingsphere-integration-test/shardingsphere-integration-test-env/src/test/java/org/apache/shardingsphere/test/integration/env/container/atomic/storage/DockerStorageContainer.java
+++ b/shardingsphere-test/shardingsphere-integration-test/shardingsphere-integration-test-env/src/test/java/org/apache/shardingsphere/test/integration/env/container/atomic/storage/DockerStorageContainer.java
@@ -22,6 +22,8 @@ import com.zaxxer.hikari.HikariDataSource;
 import lombok.AccessLevel;
 import lombok.Getter;
 import lombok.SneakyThrows;
+import lombok.extern.slf4j.Slf4j;
+import org.apache.commons.lang3.StringUtils;
 import org.apache.shardingsphere.infra.database.type.DatabaseType;
 import org.apache.shardingsphere.test.integration.env.container.atomic.DockerITContainer;
 import org.apache.shardingsphere.test.integration.env.container.atomic.constants.StorageContainerConstants;
@@ -44,6 +46,7 @@ import java.util.Optional;
  * Docker storage container.
  */
 @Getter
+@Slf4j
 public abstract class DockerStorageContainer extends DockerITContainer implements StorageContainer {
     
     @Getter
@@ -81,6 +84,10 @@ public abstract class DockerStorageContainer extends DockerITContainer implement
     }
     
     protected final void setCommands(final String command) {
+        if (StringUtils.isBlank(command)) {
+            log.info("command is blank, not set");
+            return;
+        }
         setCommand(command);
     }
     
diff --git a/shardingsphere-test/shardingsphere-integration-test/shardingsphere-integration-test-scaling/src/test/java/org/apache/shardingsphere/integration/data/pipeline/cases/base/BaseExtraSQLITCase.java b/shardingsphere-test/shardingsphere-integration-test/shardingsphere-integration-test-scaling/src/test/java/org/apache/shardingsphere/integration/data/pipeline/cases/base/BaseExtraSQLITCase.java
index 9ac98ddad79..f95ded78a51 100644
--- a/shardingsphere-test/shardingsphere-integration-test/shardingsphere-integration-test-scaling/src/test/java/org/apache/shardingsphere/integration/data/pipeline/cases/base/BaseExtraSQLITCase.java
+++ b/shardingsphere-test/shardingsphere-integration-test/shardingsphere-integration-test-scaling/src/test/java/org/apache/shardingsphere/integration/data/pipeline/cases/base/BaseExtraSQLITCase.java
@@ -24,6 +24,7 @@ import org.apache.shardingsphere.integration.data.pipeline.framework.param.Scali
 import org.apache.shardingsphere.test.integration.env.container.atomic.util.DatabaseTypeUtil;
 
 import javax.xml.bind.JAXB;
+import java.sql.SQLException;
 import java.util.Objects;
 
 @Slf4j
@@ -37,11 +38,11 @@ public abstract class BaseExtraSQLITCase extends BaseITCase {
         extraSQLCommand = JAXB.unmarshal(Objects.requireNonNull(BaseExtraSQLITCase.class.getClassLoader().getResource(parameterized.getScenario())), ExtraSQLCommand.class);
     }
     
-    protected void createSourceOrderTable() {
+    protected void createSourceOrderTable() throws SQLException {
         sourceExecuteWithLog(extraSQLCommand.getCreateTableOrder());
     }
     
-    protected void createSourceTableIndexList(final String schema) {
+    protected void createSourceTableIndexList(final String schema) throws SQLException {
         if (DatabaseTypeUtil.isPostgreSQL(getDatabaseType())) {
             sourceExecuteWithLog(String.format("CREATE INDEX IF NOT EXISTS idx_user_id ON %s.t_order ( user_id )", schema));
         } else if (DatabaseTypeUtil.isOpenGauss(getDatabaseType())) {
@@ -49,11 +50,11 @@ public abstract class BaseExtraSQLITCase extends BaseITCase {
         }
     }
     
-    protected void createSourceCommentOnList(final String schema) {
+    protected void createSourceCommentOnList(final String schema) throws SQLException {
         sourceExecuteWithLog(String.format("COMMENT ON COLUMN %s.t_order.user_id IS 'user id'", schema));
     }
     
-    protected void createSourceOrderItemTable() {
+    protected void createSourceOrderItemTable() throws SQLException {
         sourceExecuteWithLog(extraSQLCommand.getCreateTableOrderItem());
     }
 }
diff --git a/shardingsphere-test/shardingsphere-integration-test/shardingsphere-integration-test-scaling/src/test/java/org/apache/shardingsphere/integration/data/pipeline/cases/base/BaseITCase.java b/shardingsphere-test/shardingsphere-integration-test/shardingsphere-integration-test-scaling/src/test/java/org/apache/shardingsphere/integration/data/pipeline/cases/base/BaseITCase.java
index 53b3b6c45f4..7092d4bc933 100644
--- a/shardingsphere-test/shardingsphere-integration-test/shardingsphere-integration-test-scaling/src/test/java/org/apache/shardingsphere/integration/data/pipeline/cases/base/BaseITCase.java
+++ b/shardingsphere-test/shardingsphere-integration-test/shardingsphere-integration-test-scaling/src/test/java/org/apache/shardingsphere/integration/data/pipeline/cases/base/BaseITCase.java
@@ -42,7 +42,6 @@ import org.apache.shardingsphere.test.integration.env.container.atomic.util.Data
 import org.apache.shardingsphere.test.integration.env.runtime.DataSourceEnvironment;
 import org.junit.Rule;
 import org.opengauss.util.PSQLException;
-import org.springframework.jdbc.BadSqlGrammarException;
 
 import javax.sql.DataSource;
 import javax.xml.bind.JAXB;
@@ -195,8 +194,7 @@ public abstract class BaseITCase {
         connectionExecuteWithLog(connection, addSourceResource);
     }
     
-    @SneakyThrows
-    protected void addTargetResource() {
+    protected void addTargetResource() throws SQLException {
         String addTargetResource = migrationDistSQLCommand.getAddMigrationTargetResourceTemplate().replace("${user}", username)
                 .replace("${password}", password)
                 .replace("${ds2}", getActualJdbcUrlTemplate(DS_2, true))
@@ -219,15 +217,15 @@ public abstract class BaseITCase {
         return DataSourceEnvironment.getURL(getDatabaseType(), "127.0.0.1", ENV.getActualDataSourceDefaultPort(databaseType), databaseName);
     }
     
-    protected void createTargetOrderTableRule() {
+    protected void createTargetOrderTableRule() throws SQLException {
         proxyExecuteWithLog(migrationDistSQLCommand.getCreateTargetOrderTableRule(), 3);
     }
     
-    protected void createTargetOrderItemTableRule() {
+    protected void createTargetOrderItemTableRule() throws SQLException {
         proxyExecuteWithLog(migrationDistSQLCommand.getCreateTargetOrderItemTableRule(), 3);
     }
     
-    protected void startMigrationOrder(final boolean withSchema) {
+    protected void startMigrationOrder(final boolean withSchema) throws SQLException {
         if (withSchema) {
             proxyExecuteWithLog(migrationDistSQLCommand.getMigrationOrderSingleTableWithSchema(), 5);
         } else {
@@ -235,7 +233,7 @@ public abstract class BaseITCase {
         }
     }
     
-    protected void startMigrationOrderItem(final boolean withSchema) {
+    protected void startMigrationOrderItem(final boolean withSchema) throws SQLException {
         if (withSchema) {
             proxyExecuteWithLog(migrationDistSQLCommand.getMigrationOrderItemSingleTableWithSchema(), 5);
         } else {
@@ -243,12 +241,18 @@ public abstract class BaseITCase {
         }
     }
     
-    // TODO use new DistSQL
-    protected void addMigrationProcessConfig() {
-        proxyExecuteWithLog(migrationDistSQLCommand.getAddMigrationProcessConfig(), 0);
+    protected void addMigrationProcessConfig() throws SQLException {
+        try {
+            proxyExecuteWithLog(migrationDistSQLCommand.getAddMigrationProcessConfig(), 0);
+        } catch (final SQLException ex) {
+            if ("58000".equals(ex.getSQLState())) {
+                log.warn(ex.getMessage());
+            }
+            throw ex;
+        }
     }
     
-    protected void createSourceSchema(final String schemaName) {
+    protected void createSourceSchema(final String schemaName) throws SQLException {
         if (DatabaseTypeUtil.isPostgreSQL(databaseType)) {
             sourceExecuteWithLog(String.format("CREATE SCHEMA IF NOT EXISTS %s", schemaName));
             return;
@@ -256,9 +260,9 @@ public abstract class BaseITCase {
         if (DatabaseTypeUtil.isOpenGauss(databaseType)) {
             try {
                 sourceExecuteWithLog(String.format("CREATE SCHEMA %s", schemaName));
-            } catch (final BadSqlGrammarException ex) {
+            } catch (final SQLException ex) {
                 // only used for native mode.
-                if (ex.getCause() instanceof PSQLException && "42P06".equals(((PSQLException) ex.getCause()).getSQLState())) {
+                if (ex instanceof PSQLException && "42P06".equals(ex.getSQLState())) {
                     log.info("Schema {} already exists.", schemaName);
                 } else {
                     throw ex;
@@ -267,16 +271,14 @@ public abstract class BaseITCase {
         }
     }
     
-    @SneakyThrows(SQLException.class)
-    protected void sourceExecuteWithLog(final String sql) {
+    protected void sourceExecuteWithLog(final String sql) throws SQLException {
         log.info("source execute :{}", sql);
         try (Connection connection = sourceDataSource.getConnection()) {
             connection.createStatement().execute(sql);
         }
     }
     
-    @SneakyThrows(SQLException.class)
-    protected void proxyExecuteWithLog(final String sql, final int sleepSeconds) {
+    protected void proxyExecuteWithLog(final String sql, final int sleepSeconds) throws SQLException {
         log.info("proxy execute :{}", sql);
         try (Connection connection = proxyDataSource.getConnection()) {
             connection.createStatement().execute(sql);
@@ -325,16 +327,16 @@ public abstract class BaseITCase {
         getIncreaseTaskThread().start();
     }
     
-    protected void stopMigrationByJobId(final String jobId) {
+    protected void stopMigrationByJobId(final String jobId) throws SQLException {
         proxyExecuteWithLog(String.format("STOP MIGRATION '%s'", jobId), 5);
     }
     
     // TODO reopen later
-    protected void startMigrationByJobId(final String jobId) {
+    protected void startMigrationByJobId(final String jobId) throws SQLException {
         proxyExecuteWithLog(String.format("START MIGRATION '%s'", jobId), 10);
     }
     
-    protected void cleanMigrationByJobId(final String jobId) {
+    protected void cleanMigrationByJobId(final String jobId) throws SQLException {
         proxyExecuteWithLog(String.format("CLEAN MIGRATION '%s'", jobId), 1);
     }
     
@@ -370,7 +372,7 @@ public abstract class BaseITCase {
         }
     }
     
-    protected void assertGreaterThanOrderTableInitRows(final int tableInitRows, final String schema) {
+    protected void assertGreaterThanOrderTableInitRows(final int tableInitRows, final String schema) throws SQLException {
         proxyExecuteWithLog("REFRESH TABLE METADATA", 2);
         String countSQL = StringUtils.isBlank(schema) ? "SELECT COUNT(*) as count FROM t_order" : String.format("SELECT COUNT(*) as count FROM %s.t_order", schema);
         Map<String, Object> actual = queryForListWithLog(countSQL).get(0);
diff --git a/shardingsphere-test/shardingsphere-integration-test/shardingsphere-integration-test-scaling/src/test/java/org/apache/shardingsphere/integration/data/pipeline/cases/general/CreateTableSQLGeneratorIT.java b/shardingsphere-test/shardingsphere-integration-test/shardingsphere-integration-test-scaling/src/test/java/org/apache/shardingsphere/integration/data/pipeline/cases/general/CreateTableSQLGeneratorIT.java
index e90e280f7d7..6d0c0dddb0b 100644
--- a/shardingsphere-test/shardingsphere-integration-test/shardingsphere-integration-test-scaling/src/test/java/org/apache/shardingsphere/integration/data/pipeline/cases/general/CreateTableSQLGeneratorIT.java
+++ b/shardingsphere-test/shardingsphere-integration-test/shardingsphere-integration-test-scaling/src/test/java/org/apache/shardingsphere/integration/data/pipeline/cases/general/CreateTableSQLGeneratorIT.java
@@ -57,7 +57,7 @@ public final class CreateTableSQLGeneratorIT {
     
     private static final String MYSQL_CASE_FILE_PATH = "mysql/create-table-sql-generator.xml";
     
-    private static final String OPEN_GAUSS_CASE_FILE_PATH = "openGauss/create-table-sql-generator.xml";
+    private static final String OPEN_GAUSS_CASE_FILE_PATH = "opengauss/create-table-sql-generator.xml";
     
     private static final String PARENT_PATH = "env/scenario/createtablegenerator";
     
@@ -119,7 +119,9 @@ public final class CreateTableSQLGeneratorIT {
     }
     
     private void initData() throws SQLException {
-        storageContainer.createAccessDataSource("").getConnection().createStatement().execute("CREATE DATABASE " + DEFAULT_DATABASE);
+        try (Statement statement = storageContainer.createAccessDataSource("").getConnection().createStatement()) {
+            statement.execute("CREATE DATABASE " + DEFAULT_DATABASE);
+        }
     }
     
     private void assertIsCorrect(final Collection<String> actualSQL, final Collection<String> expectedSQL) {
diff --git a/shardingsphere-test/shardingsphere-integration-test/shardingsphere-integration-test-scaling/src/test/java/org/apache/shardingsphere/integration/data/pipeline/cases/general/MySQLMigrationGeneralIT.java b/shardingsphere-test/shardingsphere-integration-test/shardingsphere-integration-test-scaling/src/test/java/org/apache/shardingsphere/integration/data/pipeline/cases/general/MySQLMigrationGeneralIT.java
index e5f90c14bfe..d54f9b67e49 100644
--- a/shardingsphere-test/shardingsphere-integration-test/shardingsphere-integration-test-scaling/src/test/java/org/apache/shardingsphere/integration/data/pipeline/cases/general/MySQLMigrationGeneralIT.java
+++ b/shardingsphere-test/shardingsphere-integration-test/shardingsphere-integration-test-scaling/src/test/java/org/apache/shardingsphere/integration/data/pipeline/cases/general/MySQLMigrationGeneralIT.java
@@ -17,6 +17,7 @@
 
 package org.apache.shardingsphere.integration.data.pipeline.cases.general;
 
+import lombok.SneakyThrows;
 import lombok.extern.slf4j.Slf4j;
 import org.apache.commons.lang3.tuple.Pair;
 import org.apache.shardingsphere.infra.database.type.dialect.MySQLDatabaseType;
@@ -32,6 +33,7 @@ import org.junit.runners.Parameterized;
 import org.junit.runners.Parameterized.Parameters;
 import org.springframework.jdbc.core.JdbcTemplate;
 
+import java.sql.SQLException;
 import java.util.Collection;
 import java.util.LinkedList;
 import java.util.List;
@@ -68,6 +70,7 @@ public final class MySQLMigrationGeneralIT extends BaseExtraSQLITCase {
     }
     
     @Test
+    @SneakyThrows
     public void assertMigrationSuccess() {
         addMigrationProcessConfig();
         createSourceOrderTable();
@@ -98,7 +101,7 @@ public final class MySQLMigrationGeneralIT extends BaseExtraSQLITCase {
         assertGreaterThanOrderTableInitRows(TABLE_INIT_ROW_COUNT, "");
     }
     
-    private void assertMigrationSuccessById(final String jobId) {
+    private void assertMigrationSuccessById(final String jobId) throws SQLException {
         waitMigrationFinished(jobId);
         assertCheckMigrationSuccess(jobId);
         stopMigrationByJobId(jobId);
diff --git a/shardingsphere-test/shardingsphere-integration-test/shardingsphere-integration-test-scaling/src/test/java/org/apache/shardingsphere/integration/data/pipeline/cases/general/PostgreSQLMigrationGeneralIT.java b/shardingsphere-test/shardingsphere-integration-test/shardingsphere-integration-test-scaling/src/test/java/org/apache/shardingsphere/integration/data/pipeline/cases/general/PostgreSQLMigrationGeneralIT.java
index 641fff7b910..79103f8fd0f 100644
--- a/shardingsphere-test/shardingsphere-integration-test/shardingsphere-integration-test-scaling/src/test/java/org/apache/shardingsphere/integration/data/pipeline/cases/general/PostgreSQLMigrationGeneralIT.java
+++ b/shardingsphere-test/shardingsphere-integration-test/shardingsphere-integration-test-scaling/src/test/java/org/apache/shardingsphere/integration/data/pipeline/cases/general/PostgreSQLMigrationGeneralIT.java
@@ -17,6 +17,7 @@
 
 package org.apache.shardingsphere.integration.data.pipeline.cases.general;
 
+import lombok.SneakyThrows;
 import lombok.extern.slf4j.Slf4j;
 import org.apache.commons.lang3.tuple.Pair;
 import org.apache.shardingsphere.infra.database.type.dialect.OpenGaussDatabaseType;
@@ -33,6 +34,7 @@ import org.junit.runners.Parameterized;
 import org.junit.runners.Parameterized.Parameters;
 import org.springframework.jdbc.core.JdbcTemplate;
 
+import java.sql.SQLException;
 import java.util.Collection;
 import java.util.LinkedList;
 import java.util.List;
@@ -73,6 +75,7 @@ public final class PostgreSQLMigrationGeneralIT extends BaseExtraSQLITCase {
     }
     
     @Test
+    @SneakyThrows
     public void assertMigrationSuccess() {
         addMigrationProcessConfig();
         createSourceSchema(SCHEMA_NAME);
@@ -98,7 +101,7 @@ public final class PostgreSQLMigrationGeneralIT extends BaseExtraSQLITCase {
         assertGreaterThanOrderTableInitRows(TABLE_INIT_ROW_COUNT, SCHEMA_NAME);
     }
     
-    private void checkOrderMigration(final JdbcTemplate jdbcTemplate) {
+    private void checkOrderMigration(final JdbcTemplate jdbcTemplate) throws SQLException {
         startMigrationOrder(true);
         startIncrementTask(new PostgreSQLIncrementTask(jdbcTemplate, SCHEMA_NAME, false, 20));
         String jobId = getJobIdByTableName("t_order");
@@ -111,7 +114,7 @@ public final class PostgreSQLMigrationGeneralIT extends BaseExtraSQLITCase {
         stopMigrationByJobId(jobId);
     }
     
-    private void checkOrderItemMigration() {
+    private void checkOrderItemMigration() throws SQLException {
         startMigrationOrderItem(true);
         String jobId = getJobIdByTableName("t_order_item");
         waitMigrationFinished(jobId);
diff --git a/shardingsphere-test/shardingsphere-integration-test/shardingsphere-integration-test-scaling/src/test/resources/env/opengauss/01-initdb.sql b/shardingsphere-test/shardingsphere-integration-test/shardingsphere-integration-test-scaling/src/test/resources/env/opengauss/01-initdb.sql
index ee81d0cf561..62ce620fa4d 100644
--- a/shardingsphere-test/shardingsphere-integration-test/shardingsphere-integration-test-scaling/src/test/resources/env/opengauss/01-initdb.sql
+++ b/shardingsphere-test/shardingsphere-integration-test/shardingsphere-integration-test-scaling/src/test/resources/env/opengauss/01-initdb.sql
@@ -20,7 +20,6 @@ CREATE DATABASE scaling_it_1;
 CREATE DATABASE scaling_it_2;
 CREATE DATABASE scaling_it_3;
 CREATE DATABASE scaling_it_4;
-CREATE DATABASE scaling;
 
 GRANT CREATE, CONNECT ON DATABASE scaling_it_0 TO test_user;
 GRANT CREATE, CONNECT ON DATABASE scaling_it_1 TO test_user;
diff --git a/shardingsphere-test/shardingsphere-integration-test/shardingsphere-integration-test-scaling/src/test/resources/env/opengauss/pg_hba.conf b/shardingsphere-test/shardingsphere-integration-test/shardingsphere-integration-test-scaling/src/test/resources/env/opengauss/pg_hba.conf
index 28319b9acb0..8e16bdf15ee 100644
--- a/shardingsphere-test/shardingsphere-integration-test/shardingsphere-integration-test-scaling/src/test/resources/env/opengauss/pg_hba.conf
+++ b/shardingsphere-test/shardingsphere-integration-test/shardingsphere-integration-test-scaling/src/test/resources/env/opengauss/pg_hba.conf
@@ -30,4 +30,4 @@ host    all             all             ::1/128                 @authmethodhost@
 #host    replication     @default_username@        ::1/128                 @authmethodhost@
 #
 
-host replication scaling 0.0.0.0/0 md5
+host replication test_user 0.0.0.0/0 md5
diff --git a/shardingsphere-test/shardingsphere-integration-test/shardingsphere-integration-test-scaling/src/test/resources/env/opengauss/server.yaml b/shardingsphere-test/shardingsphere-integration-test/shardingsphere-integration-test-scaling/src/test/resources/env/opengauss/server.yaml
index 066dd9f4bb9..94c23d20771 100644
--- a/shardingsphere-test/shardingsphere-integration-test/shardingsphere-integration-test-scaling/src/test/resources/env/opengauss/server.yaml
+++ b/shardingsphere-test/shardingsphere-integration-test/shardingsphere-integration-test-scaling/src/test/resources/env/opengauss/server.yaml
@@ -31,8 +31,7 @@ mode:
 rules:
   - !AUTHORITY
     users:
-      - root@:Root@123
-      - sharding@:sharding
+      - proxy@:Proxy@123
     provider:
       type: ALL_PERMITTED