You are viewing a plain text version of this content. The canonical link for it is here.
Posted to notifications@shardingsphere.apache.org by su...@apache.org on 2022/10/11 12:32:27 UTC

[shardingsphere] branch master updated: Remove usage of ObjectUtils (#21507)

This is an automated email from the ASF dual-hosted git repository.

sunnianjun pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/shardingsphere.git


The following commit(s) were added to refs/heads/master by this push:
     new f7e397bf59e Remove usage of ObjectUtils (#21507)
f7e397bf59e is described below

commit f7e397bf59ee207618db68ca9d99aa6110277e26
Author: Liang Zhang <zh...@apache.org>
AuthorDate: Tue Oct 11 20:32:14 2022 +0800

    Remove usage of ObjectUtils (#21507)
---
 .../ShowMigrationCheckStatusQueryResultSet.java    | 15 +++++------
 .../handler/update/MigrateTableUpdater.java        |  7 +++---
 .../pipeline/api/config/ImporterConfiguration.java | 12 +++------
 .../data/pipeline/api/metadata/TableName.java      |  1 +
 .../core/api/impl/AbstractPipelineJobAPIImpl.java  |  3 +--
 .../mysql/MySQLContainerConfigurationFactory.java  |  7 +++---
 .../data/pipeline/cases/base/BaseITCase.java       | 29 +++++++++++-----------
 .../core/util/JobConfigurationBuilder.java         | 14 ++++++++++-
 8 files changed, 45 insertions(+), 43 deletions(-)

diff --git a/features/sharding/distsql/handler/src/main/java/org/apache/shardingsphere/migration/distsql/handler/query/ShowMigrationCheckStatusQueryResultSet.java b/features/sharding/distsql/handler/src/main/java/org/apache/shardingsphere/migration/distsql/handler/query/ShowMigrationCheckStatusQueryResultSet.java
index e0cd171743d..d998b57b408 100644
--- a/features/sharding/distsql/handler/src/main/java/org/apache/shardingsphere/migration/distsql/handler/query/ShowMigrationCheckStatusQueryResultSet.java
+++ b/features/sharding/distsql/handler/src/main/java/org/apache/shardingsphere/migration/distsql/handler/query/ShowMigrationCheckStatusQueryResultSet.java
@@ -17,7 +17,6 @@
 
 package org.apache.shardingsphere.migration.distsql.handler.query;
 
-import org.apache.commons.lang3.ObjectUtils;
 import org.apache.shardingsphere.data.pipeline.api.ConsistencyCheckJobPublicAPI;
 import org.apache.shardingsphere.data.pipeline.api.PipelineJobPublicAPIFactory;
 import org.apache.shardingsphere.data.pipeline.api.pojo.ConsistencyCheckJobProgressInfo;
@@ -31,6 +30,7 @@ import java.util.Collection;
 import java.util.Iterator;
 import java.util.LinkedList;
 import java.util.List;
+import java.util.Optional;
 
 /**
  * Show migration check status query result set.
@@ -47,17 +47,14 @@ public final class ShowMigrationCheckStatusQueryResultSet implements DatabaseDis
         ConsistencyCheckJobProgressInfo progressInfo = JOB_API.getJobProgressInfo(checkMigrationStatement.getJobId());
         List<Collection<Object>> result = new LinkedList<>();
         String checkResult = null == progressInfo.getCheckSuccess() ? "" : progressInfo.getCheckSuccess().toString();
-        result.add(Arrays.asList(emptyIfNull(progressInfo.getTableNames()), checkResult, String.valueOf(progressInfo.getFinishedPercentage()),
-                emptyIfNull(progressInfo.getRemainingSeconds()),
-                emptyIfNull(progressInfo.getCheckBeginTime()), emptyIfNull(progressInfo.getCheckEndTime()),
-                emptyIfNull(progressInfo.getDurationSeconds()), emptyIfNull(progressInfo.getErrorMessage())));
+        result.add(Arrays.asList(Optional.ofNullable(progressInfo.getTableNames()).orElse(""), checkResult, String.valueOf(progressInfo.getFinishedPercentage()),
+                progressInfo.getDurationSeconds(),
+                Optional.ofNullable(progressInfo.getCheckBeginTime()).orElse(""),
+                Optional.ofNullable(progressInfo.getCheckEndTime()).orElse(""),
+                progressInfo.getDurationSeconds(), Optional.ofNullable(progressInfo.getErrorMessage()).orElse("")));
         data = result.iterator();
     }
     
-    private Object emptyIfNull(final Object object) {
-        return ObjectUtils.defaultIfNull(object, "");
-    }
-    
     @Override
     public Collection<String> getColumnNames() {
         return Arrays.asList("tables", "result", "finished_percentage", "remaining_seconds", "check_begin_time", "check_end_time", "duration_seconds", "error_message");
diff --git a/features/sharding/distsql/handler/src/main/java/org/apache/shardingsphere/migration/distsql/handler/update/MigrateTableUpdater.java b/features/sharding/distsql/handler/src/main/java/org/apache/shardingsphere/migration/distsql/handler/update/MigrateTableUpdater.java
index 23f41628a5d..bee94607911 100644
--- a/features/sharding/distsql/handler/src/main/java/org/apache/shardingsphere/migration/distsql/handler/update/MigrateTableUpdater.java
+++ b/features/sharding/distsql/handler/src/main/java/org/apache/shardingsphere/migration/distsql/handler/update/MigrateTableUpdater.java
@@ -19,7 +19,6 @@ package org.apache.shardingsphere.migration.distsql.handler.update;
 
 import com.google.common.base.Preconditions;
 import lombok.extern.slf4j.Slf4j;
-import org.apache.commons.lang3.ObjectUtils;
 import org.apache.shardingsphere.data.pipeline.api.MigrationJobPublicAPI;
 import org.apache.shardingsphere.data.pipeline.api.PipelineJobPublicAPIFactory;
 import org.apache.shardingsphere.data.pipeline.api.pojo.CreateMigrationJobParameter;
@@ -37,10 +36,10 @@ public final class MigrateTableUpdater implements RALUpdater<MigrateTableStateme
     @Override
     public void executeUpdate(final String databaseName, final MigrateTableStatement sqlStatement) {
         log.info("start migrate job by {}", sqlStatement);
-        String targetDatabaseName = ObjectUtils.defaultIfNull(sqlStatement.getTargetDatabaseName(), databaseName);
+        String targetDatabaseName = null == sqlStatement.getTargetDatabaseName() ? databaseName : sqlStatement.getTargetDatabaseName();
         Preconditions.checkNotNull(targetDatabaseName, "Target database name is null. You could define it in DistSQL or select a database.");
-        CreateMigrationJobParameter createMigrationJobParameter = new CreateMigrationJobParameter(sqlStatement.getSourceResourceName(), sqlStatement.getSourceSchemaName(),
-                sqlStatement.getSourceTableName(), targetDatabaseName, sqlStatement.getTargetTableName());
+        CreateMigrationJobParameter createMigrationJobParameter = new CreateMigrationJobParameter(
+                sqlStatement.getSourceResourceName(), sqlStatement.getSourceSchemaName(), sqlStatement.getSourceTableName(), targetDatabaseName, sqlStatement.getTargetTableName());
         JOB_API.createJobAndStart(createMigrationJobParameter);
     }
     
diff --git a/kernel/data-pipeline/api/src/main/java/org/apache/shardingsphere/data/pipeline/api/config/ImporterConfiguration.java b/kernel/data-pipeline/api/src/main/java/org/apache/shardingsphere/data/pipeline/api/config/ImporterConfiguration.java
index 700c0e5348c..3646d887368 100644
--- a/kernel/data-pipeline/api/src/main/java/org/apache/shardingsphere/data/pipeline/api/config/ImporterConfiguration.java
+++ b/kernel/data-pipeline/api/src/main/java/org/apache/shardingsphere/data/pipeline/api/config/ImporterConfiguration.java
@@ -19,9 +19,7 @@ package org.apache.shardingsphere.data.pipeline.api.config;
 
 import lombok.Getter;
 import lombok.RequiredArgsConstructor;
-import lombok.Setter;
 import lombok.ToString;
-import org.apache.commons.lang3.ObjectUtils;
 import org.apache.shardingsphere.data.pipeline.api.datasource.config.PipelineDataSourceConfiguration;
 import org.apache.shardingsphere.data.pipeline.api.metadata.LogicTableName;
 import org.apache.shardingsphere.data.pipeline.spi.ratelimit.JobRateLimitAlgorithm;
@@ -29,7 +27,6 @@ import org.apache.shardingsphere.infra.database.type.DatabaseTypeFactory;
 
 import java.util.Collection;
 import java.util.Collections;
-import java.util.List;
 import java.util.Map;
 import java.util.Set;
 import java.util.stream.Collectors;
@@ -39,7 +36,6 @@ import java.util.stream.Collectors;
  */
 @RequiredArgsConstructor
 @Getter
-@Setter
 @ToString(exclude = "dataSourceConfig")
 public final class ImporterConfiguration {
     
@@ -64,8 +60,7 @@ public final class ImporterConfiguration {
      * @return logic table names
      */
     public Collection<String> getLogicTableNames() {
-        List<String> result = shardingColumnsMap.keySet().stream().map(LogicTableName::getLowercase).collect(Collectors.toList());
-        return Collections.unmodifiableList(result);
+        return Collections.unmodifiableList(shardingColumnsMap.keySet().stream().map(LogicTableName::getLowercase).collect(Collectors.toList()));
     }
     
     /**
@@ -75,7 +70,7 @@ public final class ImporterConfiguration {
      * @return sharding columns
      */
     public Set<String> getShardingColumns(final String logicTableName) {
-        return ObjectUtils.defaultIfNull(shardingColumnsMap.get(new LogicTableName(logicTableName)), Collections.emptySet());
+        return shardingColumnsMap.getOrDefault(new LogicTableName(logicTableName), Collections.emptySet());
     }
     
     /**
@@ -85,7 +80,6 @@ public final class ImporterConfiguration {
      * @return schema name. nullable
      */
     public String getSchemaName(final LogicTableName logicTableName) {
-        String databaseType = dataSourceConfig.getDatabaseType().getType();
-        return DatabaseTypeFactory.getInstance(databaseType).isSchemaAvailable() ? tableNameSchemaNameMapping.getSchemaName(logicTableName) : null;
+        return DatabaseTypeFactory.getInstance(dataSourceConfig.getDatabaseType().getType()).isSchemaAvailable() ? tableNameSchemaNameMapping.getSchemaName(logicTableName) : null;
     }
 }
diff --git a/kernel/data-pipeline/api/src/main/java/org/apache/shardingsphere/data/pipeline/api/metadata/TableName.java b/kernel/data-pipeline/api/src/main/java/org/apache/shardingsphere/data/pipeline/api/metadata/TableName.java
index b48332797dd..b7b0f5f9428 100644
--- a/kernel/data-pipeline/api/src/main/java/org/apache/shardingsphere/data/pipeline/api/metadata/TableName.java
+++ b/kernel/data-pipeline/api/src/main/java/org/apache/shardingsphere/data/pipeline/api/metadata/TableName.java
@@ -21,6 +21,7 @@ import lombok.NonNull;
 
 /**
  * Table name.
+ * 
  * <p>It might be logic table name or actual table name.</p>
  * <p>It's case-insensitive.</p>
  */
diff --git a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/api/impl/AbstractPipelineJobAPIImpl.java b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/api/impl/AbstractPipelineJobAPIImpl.java
index 6812e54efaf..b519298f5aa 100644
--- a/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/api/impl/AbstractPipelineJobAPIImpl.java
+++ b/kernel/data-pipeline/core/src/main/java/org/apache/shardingsphere/data/pipeline/core/api/impl/AbstractPipelineJobAPIImpl.java
@@ -20,7 +20,6 @@ package org.apache.shardingsphere.data.pipeline.core.api.impl;
 import com.google.common.base.Preconditions;
 import lombok.extern.slf4j.Slf4j;
 import org.apache.commons.lang.exception.ExceptionUtils;
-import org.apache.commons.lang3.ObjectUtils;
 import org.apache.shardingsphere.data.pipeline.api.config.job.PipelineJobConfiguration;
 import org.apache.shardingsphere.data.pipeline.api.config.job.yaml.YamlPipelineJobConfiguration;
 import org.apache.shardingsphere.data.pipeline.api.job.PipelineJobId;
@@ -164,7 +163,7 @@ public abstract class AbstractPipelineJobAPIImpl implements PipelineJobAPI {
     
     @Override
     public String getJobItemErrorMessage(final String jobId, final int shardingItem) {
-        return ObjectUtils.defaultIfNull(PipelineAPIFactory.getGovernanceRepositoryAPI().getJobItemErrorMessage(jobId, shardingItem), "");
+        return Optional.ofNullable(PipelineAPIFactory.getGovernanceRepositoryAPI().getJobItemErrorMessage(jobId, shardingItem)).orElse("");
     }
     
     @Override
diff --git a/test/integration-test/env/src/test/java/org/apache/shardingsphere/test/integration/env/container/atomic/storage/config/impl/mysql/MySQLContainerConfigurationFactory.java b/test/integration-test/env/src/test/java/org/apache/shardingsphere/test/integration/env/container/atomic/storage/config/impl/mysql/MySQLContainerConfigurationFactory.java
index 8f70bc1aacc..f03d1f25fa7 100644
--- a/test/integration-test/env/src/test/java/org/apache/shardingsphere/test/integration/env/container/atomic/storage/config/impl/mysql/MySQLContainerConfigurationFactory.java
+++ b/test/integration-test/env/src/test/java/org/apache/shardingsphere/test/integration/env/container/atomic/storage/config/impl/mysql/MySQLContainerConfigurationFactory.java
@@ -19,7 +19,6 @@ package org.apache.shardingsphere.test.integration.env.container.atomic.storage.
 
 import lombok.AccessLevel;
 import lombok.NoArgsConstructor;
-import org.apache.commons.lang3.ObjectUtils;
 import org.apache.shardingsphere.test.integration.env.container.atomic.constants.StorageContainerConstants;
 import org.apache.shardingsphere.test.integration.env.container.atomic.storage.config.StorageContainerConfiguration;
 import org.apache.shardingsphere.test.integration.env.container.atomic.util.ContainerUtil;
@@ -27,6 +26,7 @@ import org.apache.shardingsphere.test.integration.env.container.atomic.util.Cont
 import java.util.Collections;
 import java.util.HashMap;
 import java.util.Map;
+import java.util.Optional;
 
 /**
  * MySQL container configuration factory.
@@ -52,8 +52,9 @@ public final class MySQLContainerConfigurationFactory {
      * @return created instance
      */
     public static StorageContainerConfiguration newInstance(final String command, final Map<String, String> containerEnvironments, final Map<String, String> mountedResources) {
-        return new StorageContainerConfiguration(ObjectUtils.defaultIfNull(command, getCommand()), ObjectUtils.defaultIfNull(containerEnvironments, getContainerEnvironments()),
-                ObjectUtils.defaultIfNull(mountedResources, getMountedResources()));
+        return new StorageContainerConfiguration(Optional.ofNullable(command).orElseGet(MySQLContainerConfigurationFactory::getCommand),
+                Optional.ofNullable(containerEnvironments).orElseGet(MySQLContainerConfigurationFactory::getContainerEnvironments),
+                Optional.ofNullable(mountedResources).orElseGet(MySQLContainerConfigurationFactory::getMountedResources));
     }
     
     private static String getCommand() {
diff --git a/test/integration-test/scaling/src/test/java/org/apache/shardingsphere/integration/data/pipeline/cases/base/BaseITCase.java b/test/integration-test/scaling/src/test/java/org/apache/shardingsphere/integration/data/pipeline/cases/base/BaseITCase.java
index 328bbfc0b1d..777984e4c06 100644
--- a/test/integration-test/scaling/src/test/java/org/apache/shardingsphere/integration/data/pipeline/cases/base/BaseITCase.java
+++ b/test/integration-test/scaling/src/test/java/org/apache/shardingsphere/integration/data/pipeline/cases/base/BaseITCase.java
@@ -17,13 +17,12 @@
 
 package org.apache.shardingsphere.integration.data.pipeline.cases.base;
 
+import com.google.common.base.Strings;
 import lombok.AccessLevel;
 import lombok.Getter;
 import lombok.Setter;
 import lombok.extern.slf4j.Slf4j;
 import org.apache.commons.collections4.CollectionUtils;
-import org.apache.commons.lang3.ObjectUtils;
-import org.apache.commons.lang3.StringUtils;
 import org.apache.shardingsphere.data.pipeline.api.job.JobStatus;
 import org.apache.shardingsphere.data.pipeline.core.util.ThreadUtil;
 import org.apache.shardingsphere.infra.database.metadata.url.JdbcUrlAppender;
@@ -237,9 +236,9 @@ public abstract class BaseITCase {
         while (retryNumber <= 3) {
             try (Connection connection = proxyDataSource.getConnection()) {
                 ResultSet resultSet = connection.createStatement().executeQuery(sql);
-                List<Map<String, Object>> result = resultSetToList(resultSet);
+                List<Map<String, Object>> result = transformResultSetToList(resultSet);
                 log.info("proxy query for list, sql: {}, result: {}", sql, result);
-                return ObjectUtils.defaultIfNull(result, Collections.emptyList());
+                return result;
             } catch (final SQLException ex) {
                 log.error("data access error", ex);
             }
@@ -249,18 +248,18 @@ public abstract class BaseITCase {
         throw new RuntimeException("can't get result from proxy");
     }
     
-    protected List<Map<String, Object>> resultSetToList(final ResultSet rs) throws SQLException {
-        ResultSetMetaData md = rs.getMetaData();
-        int columns = md.getColumnCount();
-        List<Map<String, Object>> results = new ArrayList<>();
-        while (rs.next()) {
+    private List<Map<String, Object>> transformResultSetToList(final ResultSet resultSet) throws SQLException {
+        ResultSetMetaData resultSetMetaData = resultSet.getMetaData();
+        int columns = resultSetMetaData.getColumnCount();
+        List<Map<String, Object>> result = new ArrayList<>();
+        while (resultSet.next()) {
             Map<String, Object> row = new HashMap<>();
             for (int i = 1; i <= columns; i++) {
-                row.put(md.getColumnLabel(i).toLowerCase(), rs.getObject(i));
+                row.put(resultSetMetaData.getColumnLabel(i).toLowerCase(), resultSet.getObject(i));
             }
-            results.add(row);
+            result.add(row);
         }
-        return results;
+        return result;
     }
     
     protected void startIncrementTask(final BaseIncrementTask baseIncrementTask) {
@@ -279,10 +278,10 @@ public abstract class BaseITCase {
             Set<String> actualStatus = new HashSet<>();
             List<Integer> incrementalIdleSecondsList = new ArrayList<>();
             for (Map<String, Object> each : listJobStatus) {
-                assertTrue(StringUtils.isBlank(each.get("error_message").toString()));
+                assertTrue(Strings.isNullOrEmpty(each.get("error_message").toString()));
                 actualStatus.add(each.get("status").toString());
                 String incrementalIdleSeconds = each.get("incremental_idle_seconds").toString();
-                incrementalIdleSecondsList.add(StringUtils.isBlank(incrementalIdleSeconds) ? 0 : Integer.parseInt(incrementalIdleSeconds));
+                incrementalIdleSecondsList.add(Strings.isNullOrEmpty(incrementalIdleSeconds) ? 0 : Integer.parseInt(incrementalIdleSeconds));
             }
             assertFalse(CollectionUtils.containsAny(actualStatus, Arrays.asList(JobStatus.PREPARING_FAILURE.name(), JobStatus.EXECUTE_INVENTORY_TASK_FAILURE.name(),
                     JobStatus.EXECUTE_INCREMENTAL_TASK_FAILURE.name())));
@@ -303,7 +302,7 @@ public abstract class BaseITCase {
     
     protected void assertGreaterThanOrderTableInitRows(final int tableInitRows, final String schema) throws SQLException {
         proxyExecuteWithLog("REFRESH TABLE METADATA", 2);
-        String countSQL = StringUtils.isBlank(schema) ? "SELECT COUNT(*) as count FROM t_order" : String.format("SELECT COUNT(*) as count FROM %s.t_order", schema);
+        String countSQL = Strings.isNullOrEmpty(schema) ? "SELECT COUNT(*) as count FROM t_order" : String.format("SELECT COUNT(*) as count FROM %s.t_order", schema);
         Map<String, Object> actual = queryForListWithLog(countSQL).get(0);
         assertTrue("actual count " + actual.get("count"), Integer.parseInt(actual.get("count").toString()) > tableInitRows);
     }
diff --git a/test/pipeline/src/test/java/org/apache/shardingsphere/data/pipeline/core/util/JobConfigurationBuilder.java b/test/pipeline/src/test/java/org/apache/shardingsphere/data/pipeline/core/util/JobConfigurationBuilder.java
index 72f4bb695d3..254f90c2e2f 100644
--- a/test/pipeline/src/test/java/org/apache/shardingsphere/data/pipeline/core/util/JobConfigurationBuilder.java
+++ b/test/pipeline/src/test/java/org/apache/shardingsphere/data/pipeline/core/util/JobConfigurationBuilder.java
@@ -54,11 +54,23 @@ public final class JobConfigurationBuilder {
         result.setSource(createYamlPipelineDataSourceConfiguration(new StandardPipelineDataSourceConfiguration(ConfigurationFileUtil.readFile("migration_standard_jdbc_source.yaml"))));
         result.setTarget(createYamlPipelineDataSourceConfiguration(new ShardingSpherePipelineDataSourceConfiguration(
                 ConfigurationFileUtil.readFile("migration_sharding_sphere_jdbc_target.yaml"))));
-        result.setUniqueKeyColumn(new YamlPipelineColumnMetaData(1, "order_id", 4, "", false, true, true));
+        result.setUniqueKeyColumn(createYamlPipelineColumnMetaData());
         PipelineAPIFactory.getPipelineJobAPI(JobType.MIGRATION).extendYamlJobConfiguration(result);
         return new YamlMigrationJobConfigurationSwapper().swapToObject(result);
     }
     
+    private static YamlPipelineColumnMetaData createYamlPipelineColumnMetaData() {
+        YamlPipelineColumnMetaData result = new YamlPipelineColumnMetaData();
+        result.setOrdinalPosition(1);
+        result.setName("order_id");
+        result.setDataType(4);
+        result.setDataTypeName("");
+        result.setNullable(false);
+        result.setPrimaryKey(true);
+        result.setNullable(true);
+        return result;
+    }
+    
     private static String generateJobId(final YamlMigrationJobConfiguration yamlJobConfig) {
         String sourceTableName = RandomStringUtils.randomAlphabetic(32);
         MigrationJobId migrationJobId = new MigrationJobId(yamlJobConfig.getSourceResourceName(), yamlJobConfig.getSourceSchemaName(), sourceTableName,