You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@dolphinscheduler.apache.org by ki...@apache.org on 2021/05/06 01:32:36 UTC

[dolphinscheduler] branch dev updated: [Improvement][DAO] Add Unique key in t_ds_alertgroup and t_ds_datasource (#5373)

This is an automated email from the ASF dual-hosted git repository.

kirs pushed a commit to branch dev
in repository https://gitbox.apache.org/repos/asf/dolphinscheduler.git


The following commit(s) were added to refs/heads/dev by this push:
     new 0bdf15e  [Improvement][DAO] Add Unique key in t_ds_alertgroup and t_ds_datasource (#5373)
0bdf15e is described below

commit 0bdf15efb9774ee11afab61f79d5f5547457e402
Author: ruanwenjun <86...@qq.com>
AuthorDate: Thu May 6 09:32:28 2021 +0800

    [Improvement][DAO] Add Unique key in t_ds_alertgroup and t_ds_datasource (#5373)
    
    * [Improvement][DAO] Add Unique key to t_ds_worker_group t_ds_datasource
    
    * Add exception catch
    
    * add ddl
    
    * fix ut
---
 .../api/service/impl/AlertGroupServiceImpl.java    | 27 +++++--
 .../api/service/impl/DataSourceServiceImpl.java    | 20 +++--
 .../api/service/AlertGroupServiceTest.java         | 22 ++++++
 .../dao/mapper/AlertGroupMapperTest.java           | 10 +--
 .../dao/mapper/DataSourceMapperTest.java           | 90 ++++++++++++----------
 sql/dolphinscheduler_mysql.sql                     |  6 +-
 sql/dolphinscheduler_postgre.sql                   |  6 +-
 .../1.4.0_schema/mysql/dolphinscheduler_ddl.sql    | 39 ++++++++++
 .../postgresql/dolphinscheduler_ddl.sql            | 36 +++++++++
 9 files changed, 193 insertions(+), 63 deletions(-)

diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/AlertGroupServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/AlertGroupServiceImpl.java
index 3831ad0..02a564a 100644
--- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/AlertGroupServiceImpl.java
+++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/AlertGroupServiceImpl.java
@@ -32,7 +32,10 @@ import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.dao.DuplicateKeyException;
 import org.springframework.stereotype.Service;
 import org.springframework.transaction.annotation.Transactional;
 
@@ -45,6 +48,8 @@ import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
 @Service
 public class AlertGroupServiceImpl extends BaseServiceImpl implements AlertGroupService {
 
+    private Logger logger = LoggerFactory.getLogger(AlertGroupServiceImpl.class);
+
     @Autowired
     private AlertGroupMapper alertGroupMapper;
 
@@ -121,13 +126,14 @@ public class AlertGroupServiceImpl extends BaseServiceImpl implements AlertGroup
         alertGroup.setCreateUserId(loginUser.getId());
 
         // insert
-        int insert = alertGroupMapper.insert(alertGroup);
-
-        if (insert > 0) {
-            putMsg(result, Status.SUCCESS);
-        } else {
-            putMsg(result, Status.CREATE_ALERT_GROUP_ERROR);
+        try {
+            int insert = alertGroupMapper.insert(alertGroup);
+            putMsg(result, insert > 0 ? Status.SUCCESS : Status.CREATE_ALERT_GROUP_ERROR);
+        } catch (DuplicateKeyException ex) {
+            logger.error("Create alert group error.", ex);
+            putMsg(result, Status.ALERT_GROUP_EXIST);
         }
+
         return result;
     }
 
@@ -166,8 +172,13 @@ public class AlertGroupServiceImpl extends BaseServiceImpl implements AlertGroup
         alertGroup.setUpdateTime(now);
         alertGroup.setCreateUserId(loginUser.getId());
         alertGroup.setAlertInstanceIds(alertInstanceIds);
-        alertGroupMapper.updateById(alertGroup);
-        putMsg(result, Status.SUCCESS);
+        try {
+            alertGroupMapper.updateById(alertGroup);
+            putMsg(result, Status.SUCCESS);
+        } catch (DuplicateKeyException ex) {
+            logger.error("Update alert group error.", ex);
+            putMsg(result, Status.ALERT_GROUP_EXIST);
+        }
         return result;
     }
 
diff --git a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/DataSourceServiceImpl.java b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/DataSourceServiceImpl.java
index 8d86690..8869db3 100644
--- a/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/DataSourceServiceImpl.java
+++ b/dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/DataSourceServiceImpl.java
@@ -46,6 +46,7 @@ import java.util.Set;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.dao.DuplicateKeyException;
 import org.springframework.stereotype.Service;
 import org.springframework.transaction.annotation.Transactional;
 
@@ -103,9 +104,13 @@ public class DataSourceServiceImpl extends BaseServiceImpl implements DataSource
         dataSource.setConnectionParams(JSONUtils.toJsonString(connectionParam));
         dataSource.setCreateTime(now);
         dataSource.setUpdateTime(now);
-        dataSourceMapper.insert(dataSource);
-
-        putMsg(result, Status.SUCCESS);
+        try {
+            dataSourceMapper.insert(dataSource);
+            putMsg(result, Status.SUCCESS);
+        } catch (DuplicateKeyException ex) {
+            logger.error("Create datasource error.", ex);
+            putMsg(result, Status.DATASOURCE_EXIST);
+        }
 
         return result;
     }
@@ -164,8 +169,13 @@ public class DataSourceServiceImpl extends BaseServiceImpl implements DataSource
         dataSource.setType(dataSource.getType());
         dataSource.setConnectionParams(JSONUtils.toJsonString(connectionParam));
         dataSource.setUpdateTime(now);
-        dataSourceMapper.updateById(dataSource);
-        putMsg(result, Status.SUCCESS);
+        try {
+            dataSourceMapper.updateById(dataSource);
+            putMsg(result, Status.SUCCESS);
+        } catch (DuplicateKeyException ex) {
+            logger.error("Update datasource error.", ex);
+            putMsg(result, Status.DATASOURCE_EXIST);
+        }
         return result;
     }
 
diff --git a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/AlertGroupServiceTest.java b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/AlertGroupServiceTest.java
index 0f5843b..8466bee 100644
--- a/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/AlertGroupServiceTest.java
+++ b/dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/AlertGroupServiceTest.java
@@ -43,6 +43,7 @@ import org.mockito.Mockito;
 import org.mockito.junit.MockitoJUnitRunner;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
+import org.springframework.dao.DuplicateKeyException;
 
 import com.baomidou.mybatisplus.core.metadata.IPage;
 import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
@@ -110,6 +111,17 @@ public class AlertGroupServiceTest {
     }
 
     @Test
+    public void testCreateAlertgroupDuplicate() {
+
+        Mockito.when(alertGroupMapper.insert(any(AlertGroup.class))).thenThrow(new DuplicateKeyException("group name exist"));
+        User user = new User();
+        user.setUserType(UserType.ADMIN_USER);
+        Map<String, Object> result = alertGroupService.createAlertgroup(user, groupName, groupName, null);
+        logger.info(result.toString());
+        Assert.assertEquals(Status.ALERT_GROUP_EXIST, result.get(Constants.STATUS));
+    }
+
+    @Test
     public void testUpdateAlertgroup() {
 
         User user = new User();
@@ -131,6 +143,16 @@ public class AlertGroupServiceTest {
     }
 
     @Test
+    public void testUpdateAlertgroupDuplicate() {
+        User user = new User();
+        user.setUserType(UserType.ADMIN_USER);
+        Mockito.when(alertGroupMapper.selectById(2)).thenReturn(getEntity());
+        Mockito.when(alertGroupMapper.updateById(Mockito.any())).thenThrow(new DuplicateKeyException("group name exist"));
+        Map<String, Object> result = alertGroupService.updateAlertgroup(user, 2, groupName, groupName, null);
+        Assert.assertEquals(Status.ALERT_GROUP_EXIST, result.get(Constants.STATUS));
+    }
+
+    @Test
     public void testDelAlertgroupById() {
 
         User user = new User();
diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/AlertGroupMapperTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/AlertGroupMapperTest.java
index 9db5d2b..f2bf291 100644
--- a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/AlertGroupMapperTest.java
+++ b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/AlertGroupMapperTest.java
@@ -88,10 +88,10 @@ public class AlertGroupMapperTest {
 
         String groupName = "testGroup";
 
-        Integer count = 4;
+        Integer count = 1;
 
-        Integer offset = 2;
-        Integer size = 2;
+        Integer offset = 0;
+        Integer size = 1;
 
         Map<Integer, AlertGroup> alertGroupMap = createAlertGroups(count, groupName);
 
@@ -152,7 +152,7 @@ public class AlertGroupMapperTest {
      */
     @Test
     public void testQueryByGroupName() {
-        Integer count = 4;
+        Integer count = 1;
         String groupName = "testGroup";
 
         Map<Integer, AlertGroup> alertGroupMap = createAlertGroups(count, groupName);
@@ -175,7 +175,7 @@ public class AlertGroupMapperTest {
      */
     @Test
     public void testQueryAllGroupList() {
-        Integer count = 4;
+        Integer count = 1;
         Map<Integer, AlertGroup> alertGroupMap = createAlertGroups(count);
 
         List<AlertGroup> alertGroupList = alertGroupMapper.queryAllGroupList();
diff --git a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/DataSourceMapperTest.java b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/DataSourceMapperTest.java
index 04d726e..a45c520 100644
--- a/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/DataSourceMapperTest.java
+++ b/dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/DataSourceMapperTest.java
@@ -14,17 +14,31 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+
 package org.apache.dolphinscheduler.dao.mapper;
 
+import static java.util.stream.Collectors.toList;
+
+import static org.hamcrest.Matchers.greaterThan;
+import static org.hamcrest.Matchers.greaterThanOrEqualTo;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertThat;
 
-import com.baomidou.mybatisplus.core.metadata.IPage;
-import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
 import org.apache.dolphinscheduler.common.enums.DbType;
 import org.apache.dolphinscheduler.common.enums.UserType;
 import org.apache.dolphinscheduler.common.utils.DateUtils;
 import org.apache.dolphinscheduler.dao.entity.DataSource;
 import org.apache.dolphinscheduler.dao.entity.DatasourceUser;
 import org.apache.dolphinscheduler.dao.entity.User;
+
+import java.util.Arrays;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Random;
+
 import org.junit.Assert;
 import org.junit.Test;
 import org.junit.runner.RunWith;
@@ -34,12 +48,8 @@ import org.springframework.test.annotation.Rollback;
 import org.springframework.test.context.junit4.SpringRunner;
 import org.springframework.transaction.annotation.Transactional;
 
-import java.util.*;
-
-import static java.util.stream.Collectors.toList;
-import static org.hamcrest.Matchers.greaterThan;
-import static org.hamcrest.Matchers.greaterThanOrEqualTo;
-import static org.junit.Assert.*;
+import com.baomidou.mybatisplus.core.metadata.IPage;
+import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
 
 /**
  *  datasource mapper test
@@ -69,7 +79,7 @@ public class DataSourceMapperTest {
      * test insert
      */
     @Test
-    public void testInsert(){
+    public void testInsert() {
         DataSource dataSource = createDataSource();
         assertThat(dataSource.getId(), greaterThan(0));
     }
@@ -111,7 +121,7 @@ public class DataSourceMapperTest {
      * test delete
      */
     @Test
-    public void testDelete(){
+    public void testDelete() {
         DataSource expectedDataSource = createDataSource();
 
         dataSourceMapper.deleteById(expectedDataSource.getId());
@@ -137,9 +147,9 @@ public class DataSourceMapperTest {
 
         assertThat(actualDataSources.size(), greaterThanOrEqualTo(2));
 
-        for (DataSource actualDataSource : actualDataSources){
+        for (DataSource actualDataSource : actualDataSources) {
             DataSource expectedDataSource = datasourceMap.get(actualDataSource.getId());
-            if (expectedDataSource != null){
+            if (expectedDataSource != null) {
                 assertEquals(expectedDataSource,actualDataSource);
             }
         }
@@ -161,9 +171,9 @@ public class DataSourceMapperTest {
         IPage<DataSource> dataSourceIPage = dataSourceMapper.selectPaging(page, userId, name);
         List<DataSource> actualDataSources = dataSourceIPage.getRecords();
 
-        for (DataSource actualDataSource : actualDataSources){
+        for (DataSource actualDataSource : actualDataSources) {
             DataSource expectedDataSource = expectedDataSourceMap.get(actualDataSource.getId());
-            if (expectedDataSource != null){
+            if (expectedDataSource != null) {
                 assertEquals(expectedDataSource,actualDataSource);
             }
         }
@@ -180,8 +190,8 @@ public class DataSourceMapperTest {
 
         List<DataSource> actualDataSources = dataSourceMapper.queryDataSourceByName(name);
 
-        for (DataSource actualDataSource : actualDataSources){
-            if (expectedDataSource.getId() == actualDataSource.getId()){
+        for (DataSource actualDataSource : actualDataSources) {
+            if (expectedDataSource.getId() == actualDataSource.getId()) {
                 assertEquals(expectedDataSource,actualDataSource);
             }
         }
@@ -200,9 +210,9 @@ public class DataSourceMapperTest {
 
         List<DataSource> actualDataSources = dataSourceMapper.queryAuthedDatasource(userId);
 
-        for (DataSource actualDataSource : actualDataSources){
+        for (DataSource actualDataSource : actualDataSources) {
             DataSource expectedDataSource = expectedDataSourceMap.get(actualDataSource.getId());
-            if (expectedDataSource != null){
+            if (expectedDataSource != null) {
                 assertEquals(expectedDataSource,actualDataSource);
             }
         }
@@ -221,9 +231,9 @@ public class DataSourceMapperTest {
 
         List<DataSource> actualDataSources = dataSourceMapper.queryDatasourceExceptUserId(userId);
 
-        for (DataSource actualDataSource : actualDataSources){
+        for (DataSource actualDataSource : actualDataSources) {
             DataSource expectedDataSource = expectedDataSourceMap.get(actualDataSource.getId());
-            if (expectedDataSource != null){
+            if (expectedDataSource != null) {
                 assertEquals(expectedDataSource,actualDataSource);
             }
         }
@@ -234,7 +244,7 @@ public class DataSourceMapperTest {
      */
     @Test
     public void testListAllDataSourceByType() {
-        Integer count = 10;
+        Integer count = 1;
 
         Map<Integer, DataSource> expectedDataSourceMap = createDataSourceMap(count);
 
@@ -242,16 +252,16 @@ public class DataSourceMapperTest {
 
         assertThat(actualDataSources.size(), greaterThanOrEqualTo(count));
 
-        for (DataSource actualDataSource : actualDataSources){
+        for (DataSource actualDataSource : actualDataSources) {
             DataSource expectedDataSource = expectedDataSourceMap.get(actualDataSource.getId());
-            if (expectedDataSource != null){
+            if (expectedDataSource != null) {
                 assertEquals(expectedDataSource,actualDataSource);
             }
         }
     }
 
     @Test
-    public void testListAuthorizedDataSource(){
+    public void testListAuthorizedDataSource() {
         //create general user
         User generalUser1 = createGeneralUser("user1");
         User generalUser2 = createGeneralUser("user2");
@@ -260,14 +270,13 @@ public class DataSourceMapperTest {
         DataSource dataSource = createDataSource(generalUser1.getId(), "ds-1");
         DataSource unauthorizdDataSource = createDataSource(generalUser2.getId(), "ds-2");
 
-
         //data source ids
-        Integer[] dataSourceIds = new Integer[]{dataSource.getId(),unauthorizdDataSource.getId()};
+        Integer[] dataSourceIds = new Integer[]{dataSource.getId(), unauthorizdDataSource.getId()};
 
         List<DataSource> authorizedDataSource = dataSourceMapper.listAuthorizedDataSource(generalUser1.getId(), dataSourceIds);
 
-        Assert.assertEquals(generalUser1.getId(),dataSource.getUserId());
-        Assert.assertNotEquals(generalUser1.getId(),unauthorizdDataSource.getUserId());
+        assertEquals(generalUser1.getId(), dataSource.getUserId());
+        Assert.assertNotEquals(generalUser1.getId(), unauthorizdDataSource.getUserId());
         Assert.assertFalse(authorizedDataSource.stream().map(t -> t.getId()).collect(toList()).containsAll(Arrays.asList(dataSourceIds)));
 
         //authorize object unauthorizdDataSource to generalUser1
@@ -281,7 +290,7 @@ public class DataSourceMapperTest {
      * create datasource relation
      * @param userId
      */
-    private Map<Integer,DataSource> createDataSourceMap(Integer userId,String name){
+    private Map<Integer,DataSource> createDataSourceMap(Integer userId,String name) {
 
         Map<Integer,DataSource> dataSourceMap = new HashMap<>();
 
@@ -289,7 +298,7 @@ public class DataSourceMapperTest {
 
         dataSourceMap.put(dataSource.getId(),dataSource);
 
-        DataSource otherDataSource = createDataSource(userId + 1,name);
+        DataSource otherDataSource = createDataSource(userId + 1, name + "1");
 
         DatasourceUser datasourceUser = new DatasourceUser();
 
@@ -311,10 +320,10 @@ public class DataSourceMapperTest {
      * @param count datasource count
      * @return datasource map
      */
-    private Map<Integer,DataSource> createDataSourceMap(Integer count){
+    private Map<Integer,DataSource> createDataSourceMap(Integer count) {
         Map<Integer,DataSource> dataSourceMap = new HashMap<>();
 
-        for (int i = 0 ; i < count ;i++){
+        for (int i = 0; i < count; i++) {
             DataSource dataSource = createDataSource("test");
             dataSourceMap.put(dataSource.getId(),dataSource);
         }
@@ -326,17 +335,16 @@ public class DataSourceMapperTest {
      * create datasource
      * @return datasource
      */
-    private DataSource createDataSource(){
+    private DataSource createDataSource() {
         return createDataSource(1,"test");
     }
 
-
     /**
      * create datasource
      * @param name name
      * @return datasource
      */
-    private DataSource createDataSource(String name){
+    private DataSource createDataSource(String name) {
         return createDataSource(1,name);
     }
 
@@ -346,7 +354,7 @@ public class DataSourceMapperTest {
      * @param name name
      * @return datasource
      */
-    private DataSource createDataSource(Integer userId,String name){
+    private DataSource createDataSource(Integer userId,String name) {
         Random random = new Random();
         DataSource dataSource = new DataSource();
         dataSource.setUserId(userId);
@@ -366,7 +374,7 @@ public class DataSourceMapperTest {
      * create general user
      * @return User
      */
-    private User createGeneralUser(String userName){
+    private User createGeneralUser(String userName) {
         User user = new User();
         user.setUserName(userName);
         user.setUserPassword("1");
@@ -381,11 +389,12 @@ public class DataSourceMapperTest {
 
     /**
      * create the relation of user and data source
-     * @param user          user
-     * @param dataSource    data source
+     *
+     * @param user user
+     * @param dataSource data source
      * @return DatasourceUser
      */
-    private DatasourceUser createUserDataSource(User user,DataSource dataSource){
+    private DatasourceUser createUserDataSource(User user, DataSource dataSource) {
         DatasourceUser datasourceUser = new DatasourceUser();
 
         datasourceUser.setDatasourceId(dataSource.getId());
@@ -398,5 +407,4 @@ public class DataSourceMapperTest {
         return datasourceUser;
     }
 
-
 }
\ No newline at end of file
diff --git a/sql/dolphinscheduler_mysql.sql b/sql/dolphinscheduler_mysql.sql
index 1b7fd32..fa860ea 100644
--- a/sql/dolphinscheduler_mysql.sql
+++ b/sql/dolphinscheduler_mysql.sql
@@ -304,7 +304,8 @@ CREATE TABLE `t_ds_alertgroup`(
   `description`    varchar(255) DEFAULT NULL,
   `create_time`    datetime     DEFAULT NULL COMMENT 'create time',
   `update_time`    datetime     DEFAULT NULL COMMENT 'update time',
-  PRIMARY KEY (`id`)
+  PRIMARY KEY (`id`),
+  UNIQUE KEY `t_ds_alertgroup_name_UN` (`group_name`)
 ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8;
 
 -- ----------------------------
@@ -350,7 +351,8 @@ CREATE TABLE `t_ds_datasource` (
   `connection_params` text NOT NULL COMMENT 'json connection params',
   `create_time` datetime NOT NULL COMMENT 'create time',
   `update_time` datetime DEFAULT NULL COMMENT 'update time',
-  PRIMARY KEY (`id`)
+  PRIMARY KEY (`id`),
+  UNIQUE KEY `t_ds_datasource_name_UN` ('name', 'type')
 ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8;
 
 -- ----------------------------
diff --git a/sql/dolphinscheduler_postgre.sql b/sql/dolphinscheduler_postgre.sql
index 50f30ad..842f4c5 100644
--- a/sql/dolphinscheduler_postgre.sql
+++ b/sql/dolphinscheduler_postgre.sql
@@ -208,7 +208,8 @@ CREATE TABLE t_ds_alertgroup(
   description    varchar(255) DEFAULT NULL,
   create_time    timestamp    DEFAULT NULL,
   update_time    timestamp    DEFAULT NULL,
-  PRIMARY KEY (id)
+  PRIMARY KEY (id),
+  CONSTRAINT t_ds_alertgroup_name_UN UNIQUE (group_name)
 ) ;
 
 --
@@ -248,7 +249,8 @@ CREATE TABLE t_ds_datasource (
   connection_params text NOT NULL ,
   create_time timestamp NOT NULL ,
   update_time timestamp DEFAULT NULL ,
-  PRIMARY KEY (id)
+  PRIMARY KEY (id),
+  CONSTRAINT t_ds_datasource_name_UN UNIQUE (name, type)
 ) ;
 
 --
diff --git a/sql/upgrade/1.4.0_schema/mysql/dolphinscheduler_ddl.sql b/sql/upgrade/1.4.0_schema/mysql/dolphinscheduler_ddl.sql
index 148ca33..744294a 100644
--- a/sql/upgrade/1.4.0_schema/mysql/dolphinscheduler_ddl.sql
+++ b/sql/upgrade/1.4.0_schema/mysql/dolphinscheduler_ddl.sql
@@ -277,6 +277,45 @@ delimiter ;
 CALL uc_dolphin_T_t_ds_alertgroup_A_create_user_id();
 DROP PROCEDURE uc_dolphin_T_t_ds_alertgroup_A_create_user_id;
 
+-- uc_dolphin_T_t_ds_alertgroup_A_add_UN_groupName
+drop PROCEDURE if EXISTS uc_dolphin_T_t_ds_alertgroup_A_add_UN_groupName;
+delimiter d//
+CREATE PROCEDURE uc_dolphin_T_t_ds_alertgroup_A_add_UN_groupName()
+BEGIN
+    IF NOT EXISTS (SELECT 1 FROM information_schema.STATISTICS
+                   WHERE TABLE_NAME='t_ds_alertgroup'
+                     AND TABLE_SCHEMA=(SELECT DATABASE())
+                     AND INDEX_NAME ='t_ds_alertgroup_name_UN')
+    THEN
+        ALTER TABLE t_ds_alertgroup ADD UNIQUE KEY `t_ds_alertgroup_name_UN` (`group_name`);
+    END IF;
+END;
+
+d//
+
+delimiter ;
+CALL uc_dolphin_T_t_ds_alertgroup_A_add_UN_groupName();
+DROP PROCEDURE uc_dolphin_T_t_ds_alertgroup_A_add_UN_groupName;
+
+-- uc_dolphin_T_t_ds_datasource_A_add_UN_datasourceName
+drop PROCEDURE if EXISTS uc_dolphin_T_t_ds_datasource_A_add_UN_datasourceName;
+delimiter d//
+CREATE PROCEDURE uc_dolphin_T_t_ds_datasource_A_add_UN_datasourceName()
+BEGIN
+    IF NOT EXISTS (SELECT 1 FROM information_schema.COLUMNS
+                   WHERE TABLE_NAME='t_ds_datasource'
+                     AND TABLE_SCHEMA=(SELECT DATABASE())
+                     AND COLUMN_NAME ='t_ds_datasource_name_UN')
+    THEN
+        ALTER TABLE t_ds_datasource ADD UNIQUE KEY `t_ds_datasource_name_UN` ('name', 'type');
+    END IF;
+END;
+
+d//
+
+delimiter ;
+CALL uc_dolphin_T_t_ds_datasource_A_add_UN_datasourceName();
+DROP PROCEDURE uc_dolphin_T_t_ds_datasource_A_add_UN_datasourceName;
 -- ----------------------------
 -- These columns will not be used in the new version,if you determine that the historical data is useless, you can delete it using the sql below
 -- ----------------------------
diff --git a/sql/upgrade/1.4.0_schema/postgresql/dolphinscheduler_ddl.sql b/sql/upgrade/1.4.0_schema/postgresql/dolphinscheduler_ddl.sql
index 882bd05..13aa298 100644
--- a/sql/upgrade/1.4.0_schema/postgresql/dolphinscheduler_ddl.sql
+++ b/sql/upgrade/1.4.0_schema/postgresql/dolphinscheduler_ddl.sql
@@ -268,6 +268,42 @@ delimiter ;
 SELECT uc_dolphin_T_t_ds_alertgroup_A_create_user_id();
 DROP FUNCTION IF EXISTS uc_dolphin_T_t_ds_alertgroup_A_create_user_id();
 
+-- uc_dolphin_T_t_ds_alertgroup_A_add_UN_groupName
+delimiter d//
+CREATE OR REPLACE FUNCTION uc_dolphin_T_t_ds_alertgroup_A_add_UN_groupName() RETURNS void AS $$
+BEGIN
+    IF NOT EXISTS (SELECT 1 FROM pg_stat_all_indexes
+          WHERE relname='t_ds_alertgroup'
+                            AND indexrelname ='t_ds_alertgroup_name_UN')
+      THEN
+         ALTER TABLE t_ds_process_definition ADD CONSTRAINT t_ds_alertgroup_name_UN UNIQUE (group_name);
+       END IF;
+END;
+$$ LANGUAGE plpgsql;
+d//
+
+delimiter ;
+SELECT uc_dolphin_T_t_ds_alertgroup_A_add_UN_groupName();
+DROP FUNCTION IF EXISTS uc_dolphin_T_t_ds_alertgroup_A_add_UN_groupName();
+
+-- uc_dolphin_T_t_ds_datasource_A_add_UN_datasourceName
+delimiter d//
+CREATE OR REPLACE FUNCTION uc_dolphin_T_t_ds_datasource_A_add_UN_datasourceName() RETURNS void AS $$
+BEGIN
+    IF NOT EXISTS (SELECT 1 FROM pg_stat_all_indexes
+          WHERE relname='t_ds_datasource'
+                            AND indexrelname ='t_ds_datasource_name_UN')
+      THEN
+         ALTER TABLE t_ds_process_definition ADD CONSTRAINT t_ds_datasource_name_UN UNIQUE (name, type);
+       END IF;
+END;
+$$ LANGUAGE plpgsql;
+d//
+
+delimiter ;
+SELECT uc_dolphin_T_t_ds_datasource_A_add_UN_datasourceName();
+DROP FUNCTION IF EXISTS uc_dolphin_T_t_ds_datasource_A_add_UN_datasourceName();
+
 -- ----------------------------
 -- These columns will not be used in the new version,if you determine that the historical data is useless, you can delete it using the sql below
 -- ----------------------------