You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@linkis.apache.org by ca...@apache.org on 2022/09/17 07:11:53 UTC

[incubator-linkis] branch dev-1.3.1 updated: feat: udf-service unit test (#3434)

This is an automated email from the ASF dual-hosted git repository.

casion pushed a commit to branch dev-1.3.1
in repository https://gitbox.apache.org/repos/asf/incubator-linkis.git


The following commit(s) were added to refs/heads/dev-1.3.1 by this push:
     new 4b9a69e8e feat: udf-service unit test (#3434)
4b9a69e8e is described below

commit 4b9a69e8e1b99bff6102dc23d2f84ebb2ca2d9f1
Author: ruY <43...@users.noreply.github.com>
AuthorDate: Sat Sep 17 15:11:46 2022 +0800

    feat: udf-service unit test (#3434)
    
    * feat: udf-service unit test
    
    * feat: remove personal information and insert/update Add query verification
---
 .../src/test/java/org/apache/linkis/udf/Scan.java  |  26 ++
 .../apache/linkis/udf/WebApplicationServer.java    |  34 ++
 .../apache/linkis/udf/api/UDFRestfulApiTest.java   | 445 +++++++++++++++++++++
 .../org/apache/linkis/udf/dao/BaseDaoTest.java     |  31 ++
 .../java/org/apache/linkis/udf/dao/UDFDaoTest.java | 341 ++++++++++++++++
 .../org/apache/linkis/udf/dao/UDFTreeDaoTest.java  | 127 ++++++
 .../apache/linkis/udf/dao/UDFVersionDaoTest.java   | 148 +++++++
 .../apache/linkis/udf/service/UDFServiceTest.java  | 251 ++++++++++++
 .../linkis/udf/service/UDFTreeServiceTest.java     | 126 ++++++
 .../src/test/resources/application.properties      |  62 +++
 .../src/test/resources/create.sql                  |  88 ++++
 .../linkis-udf-service/src/test/resources/data.sql |  61 +++
 .../src/test/resources/linkis.properties           |  21 +
 .../linkis/udf/utils/UdfConfigurationTest.scala    |  38 ++
 14 files changed, 1799 insertions(+)

diff --git a/linkis-public-enhancements/linkis-udf/linkis-udf-service/src/test/java/org/apache/linkis/udf/Scan.java b/linkis-public-enhancements/linkis-udf/linkis-udf-service/src/test/java/org/apache/linkis/udf/Scan.java
new file mode 100644
index 000000000..270d5c6e0
--- /dev/null
+++ b/linkis-public-enhancements/linkis-udf/linkis-udf-service/src/test/java/org/apache/linkis/udf/Scan.java
@@ -0,0 +1,26 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.linkis.udf;
+
+import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
+
+import org.mybatis.spring.annotation.MapperScan;
+
+@EnableAutoConfiguration
+@MapperScan("org.apache.linkis.udf")
+public class Scan {}
diff --git a/linkis-public-enhancements/linkis-udf/linkis-udf-service/src/test/java/org/apache/linkis/udf/WebApplicationServer.java b/linkis-public-enhancements/linkis-udf/linkis-udf-service/src/test/java/org/apache/linkis/udf/WebApplicationServer.java
new file mode 100644
index 000000000..1b8a1768d
--- /dev/null
+++ b/linkis-public-enhancements/linkis-udf/linkis-udf-service/src/test/java/org/apache/linkis/udf/WebApplicationServer.java
@@ -0,0 +1,34 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.linkis.udf;
+
+import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
+import org.springframework.boot.builder.SpringApplicationBuilder;
+import org.springframework.boot.web.servlet.ServletComponentScan;
+import org.springframework.boot.web.servlet.support.SpringBootServletInitializer;
+import org.springframework.context.annotation.ComponentScan;
+
+@EnableAutoConfiguration
+@ServletComponentScan
+@ComponentScan
+public class WebApplicationServer extends SpringBootServletInitializer {
+
+  public static void main(String[] args) {
+    new SpringApplicationBuilder(WebApplicationServer.class).run(args);
+  }
+}
diff --git a/linkis-public-enhancements/linkis-udf/linkis-udf-service/src/test/java/org/apache/linkis/udf/api/UDFRestfulApiTest.java b/linkis-public-enhancements/linkis-udf/linkis-udf-service/src/test/java/org/apache/linkis/udf/api/UDFRestfulApiTest.java
new file mode 100644
index 000000000..627acdc86
--- /dev/null
+++ b/linkis-public-enhancements/linkis-udf/linkis-udf-service/src/test/java/org/apache/linkis/udf/api/UDFRestfulApiTest.java
@@ -0,0 +1,445 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.linkis.udf.api;
+
+import org.apache.linkis.common.utils.JsonUtils;
+import org.apache.linkis.publicservice.common.lock.service.CommonLockService;
+import org.apache.linkis.server.Message;
+import org.apache.linkis.server.MessageStatus;
+import org.apache.linkis.udf.Scan;
+import org.apache.linkis.udf.WebApplicationServer;
+import org.apache.linkis.udf.dao.UDFDao;
+import org.apache.linkis.udf.dao.UDFTreeDao;
+import org.apache.linkis.udf.entity.UDFInfo;
+import org.apache.linkis.udf.entity.UDFTree;
+import org.apache.linkis.udf.service.UDFService;
+import org.apache.linkis.udf.service.UDFTreeService;
+import org.apache.linkis.udf.vo.UDFAddVo;
+import org.apache.linkis.udf.vo.UDFUpdateVo;
+
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.boot.test.autoconfigure.web.servlet.AutoConfigureMockMvc;
+import org.springframework.boot.test.context.SpringBootTest;
+import org.springframework.boot.test.mock.mockito.MockBean;
+import org.springframework.http.MediaType;
+import org.springframework.test.context.junit.jupiter.SpringExtension;
+import org.springframework.test.web.servlet.MockMvc;
+import org.springframework.test.web.servlet.MvcResult;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.Map;
+
+import com.github.pagehelper.PageInfo;
+import com.google.gson.Gson;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.DisplayName;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.extension.ExtendWith;
+import org.mockito.Mockito;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
+import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post;
+import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content;
+import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
+
+@ExtendWith(SpringExtension.class)
+@SpringBootTest(classes = {WebApplicationServer.class, Scan.class})
+@AutoConfigureMockMvc
+public class UDFRestfulApiTest {
+
+  private static final Logger LOG = LoggerFactory.getLogger(UDFRestfulApiTest.class);
+
+  @Autowired protected MockMvc mockMvc;
+
+  @MockBean(name = "udfService")
+  private UDFService udfService;
+
+  @MockBean(name = "udfTreeService")
+  private UDFTreeService udfTreeService;
+
+  @MockBean(name = "CommonLockService")
+  private CommonLockService commonLockService;
+
+  @MockBean(name = "udfDao")
+  private UDFDao udfDao;
+
+  @MockBean(name = "udfTreeDao")
+  private UDFTreeDao udfTreeDao;
+
+  @Test
+  @DisplayName("allUDFTest")
+  public void allUDFTest() throws Exception {
+
+    MvcResult mvcResult =
+        mockMvc
+            .perform(post("/udf/all"))
+            .andExpect(status().isOk())
+            .andExpect(content().contentType(MediaType.APPLICATION_JSON))
+            .andReturn();
+
+    Message res =
+        JsonUtils.jackson().readValue(mvcResult.getResponse().getContentAsString(), Message.class);
+
+    assertEquals(MessageStatus.SUCCESS(), res.getStatus());
+    LOG.info(mvcResult.getResponse().getContentAsString());
+
+    Map<String, Object> param = new HashMap<>();
+    param.put("type", "self");
+    param.put("treeId", -1);
+    param.put("category", "all");
+    String jsonString = new Gson().toJson(param);
+
+    Mockito.when(
+            udfTreeService.getTreeById(
+                Mockito.anyLong(), Mockito.anyString(), Mockito.anyString(), Mockito.anyString()))
+        .thenReturn(new UDFTree());
+
+    mvcResult =
+        mockMvc
+            .perform(post("/udf/all").param("jsonString", jsonString))
+            .andExpect(status().isOk())
+            .andExpect(content().contentType(MediaType.APPLICATION_JSON))
+            .andReturn();
+    res =
+        JsonUtils.jackson().readValue(mvcResult.getResponse().getContentAsString(), Message.class);
+
+    assertEquals(MessageStatus.SUCCESS(), res.getStatus());
+    LOG.info(mvcResult.getResponse().getContentAsString());
+  }
+
+  @Test
+  @DisplayName("listUDFTest")
+  public void listUDFTest() throws Exception {
+
+    Map<String, Object> json = new HashMap<>();
+    json.put("type", "self");
+    json.put("treeId", -1);
+    json.put("category", "all");
+
+    MvcResult mvcResult =
+        mockMvc
+            .perform(post("/udf/list").contentType(MediaType.APPLICATION_JSON))
+            .andExpect(status().is4xxClientError())
+            .andReturn();
+
+    Assertions.assertTrue(mvcResult.getResponse().getStatus() == 400);
+
+    mvcResult =
+        mockMvc
+            .perform(
+                post("/udf/list")
+                    .content(new Gson().toJson(json))
+                    .contentType(MediaType.APPLICATION_JSON))
+            .andExpect(status().isOk())
+            .andReturn();
+
+    Message res =
+        JsonUtils.jackson().readValue(mvcResult.getResponse().getContentAsString(), Message.class);
+
+    assertEquals(MessageStatus.SUCCESS(), res.getStatus());
+    LOG.info(mvcResult.getResponse().getContentAsString());
+  }
+
+  @Test
+  @DisplayName("addUDFTest")
+  public void addUDFTest() throws Exception {
+
+    UDFAddVo udfAddVo = new UDFAddVo();
+    udfAddVo.setCreateUser("hadoop");
+    udfAddVo.setUdfName("test");
+    udfAddVo.setUdfType(3);
+    udfAddVo.setTreeId(13L);
+    udfAddVo.setSys("IDE");
+    udfAddVo.setClusterName("all");
+    Map<String, UDFAddVo> paramMap = new HashMap<>();
+    paramMap.put("udfAddVo", udfAddVo);
+
+    MvcResult mvcResult =
+        mockMvc
+            .perform(post("/udf/add").contentType(MediaType.APPLICATION_JSON))
+            .andExpect(status().is4xxClientError())
+            .andReturn();
+    Assertions.assertTrue(mvcResult.getResponse().getStatus() == 400);
+
+    mvcResult =
+        mockMvc
+            .perform(
+                post("/udf/add")
+                    .content(new Gson().toJson(paramMap))
+                    .contentType(MediaType.APPLICATION_JSON))
+            .andExpect(status().isOk())
+            .andReturn();
+
+    Message res =
+        JsonUtils.jackson().readValue(mvcResult.getResponse().getContentAsString(), Message.class);
+
+    assertEquals(MessageStatus.SUCCESS(), res.getStatus());
+    LOG.info(mvcResult.getResponse().getContentAsString());
+  }
+
+  @Test
+  @DisplayName("updateUDFTest")
+  public void updateUDFTest() throws Exception {
+
+    UDFUpdateVo udfUpdateVo = new UDFUpdateVo();
+    udfUpdateVo.setId(3L);
+    udfUpdateVo.setUdfName("test");
+    udfUpdateVo.setUdfType(3);
+    udfUpdateVo.setPath("file:///home/hadoop/logs/linkis/hadoop/baoyang/udf/scalaUdf.scala");
+
+    MvcResult mvcResult =
+        mockMvc
+            .perform(post("/udf/update").contentType(MediaType.APPLICATION_JSON))
+            .andExpect(status().is4xxClientError())
+            .andReturn();
+    Assertions.assertTrue(mvcResult.getResponse().getStatus() == 400);
+
+    mvcResult =
+        mockMvc
+            .perform(
+                post("/udf/update")
+                    .content(new Gson().toJson(udfUpdateVo))
+                    .contentType(MediaType.APPLICATION_JSON))
+            .andExpect(status().isOk())
+            .andReturn();
+    Message res =
+        JsonUtils.jackson().readValue(mvcResult.getResponse().getContentAsString(), Message.class);
+    assertEquals(MessageStatus.SUCCESS(), res.getStatus());
+    LOG.info(mvcResult.getResponse().getContentAsString());
+  }
+
+  @Test
+  @DisplayName("deleteUDFTest")
+  public void deleteUDFTest() throws Exception {
+
+    MvcResult mvcResult =
+        mockMvc
+            .perform(post("/udf/delete").contentType(MediaType.APPLICATION_JSON))
+            .andExpect(status().is4xxClientError())
+            .andReturn();
+    Assertions.assertTrue(mvcResult.getResponse().getStatus() == 404);
+
+    UDFInfo udfInfo = new UDFInfo();
+    udfInfo.setCreateUser("hadoop");
+    Mockito.when(udfService.getUDFById(Mockito.anyLong(), Mockito.anyString())).thenReturn(udfInfo);
+
+    Long id = 3L;
+    mvcResult =
+        mockMvc
+            .perform(post("/udf/delete/{id}", id).contentType(MediaType.APPLICATION_JSON))
+            .andExpect(status().isOk())
+            .andReturn();
+
+    Message res =
+        JsonUtils.jackson().readValue(mvcResult.getResponse().getContentAsString(), Message.class);
+    assertEquals(MessageStatus.SUCCESS(), res.getStatus());
+    LOG.info(mvcResult.getResponse().getContentAsString());
+  }
+
+  @Test
+  @DisplayName("publishUDFTest")
+  public void publishUDFTest() throws Exception {
+
+    MvcResult mvcResult =
+        mockMvc
+            .perform(post("/udf/publish").contentType(MediaType.APPLICATION_JSON))
+            .andExpect(status().is4xxClientError())
+            .andReturn();
+    Assertions.assertTrue(mvcResult.getResponse().getStatus() == 400);
+
+    Map<String, Object> param = new HashMap<>();
+    param.put("udfId", 3L);
+    param.put("version", "v000001");
+
+    UDFInfo udfInfo = new UDFInfo();
+    udfInfo.setCreateUser("hadoop");
+    Mockito.when(udfService.getUDFById(Mockito.anyLong(), Mockito.anyString())).thenReturn(udfInfo);
+    Mockito.when(udfService.isUDFManager("hadoop")).thenReturn(true);
+
+    mvcResult =
+        mockMvc
+            .perform(
+                post("/udf/publish")
+                    .content(new Gson().toJson(param))
+                    .contentType(MediaType.APPLICATION_JSON))
+            .andExpect(status().isOk())
+            .andReturn();
+    Message res =
+        JsonUtils.jackson().readValue(mvcResult.getResponse().getContentAsString(), Message.class);
+    assertEquals(MessageStatus.SUCCESS(), res.getStatus());
+    LOG.info(mvcResult.getResponse().getContentAsString());
+  }
+
+  @Test
+  @DisplayName("rollbackUDFTest")
+  public void rollbackUDFTest() throws Exception {
+
+    MvcResult mvcResult =
+        mockMvc
+            .perform(post("/udf/rollback").contentType(MediaType.APPLICATION_JSON))
+            .andExpect(status().is4xxClientError())
+            .andReturn();
+    Assertions.assertTrue(mvcResult.getResponse().getStatus() == 400);
+
+    Map<String, Object> param = new HashMap<>();
+    param.put("udfId", 3L);
+    param.put("version", "v000001");
+
+    UDFInfo udfInfo = new UDFInfo();
+    udfInfo.setCreateUser("hadoop");
+    Mockito.when(udfService.getUDFById(Mockito.anyLong(), Mockito.anyString())).thenReturn(udfInfo);
+
+    mvcResult =
+        mockMvc
+            .perform(
+                post("/udf/rollback")
+                    .content(new Gson().toJson(param))
+                    .contentType(MediaType.APPLICATION_JSON))
+            .andExpect(status().isOk())
+            .andReturn();
+    Message res =
+        JsonUtils.jackson().readValue(mvcResult.getResponse().getContentAsString(), Message.class);
+    assertEquals(MessageStatus.SUCCESS(), res.getStatus());
+    LOG.info(mvcResult.getResponse().getContentAsString());
+  }
+
+  @Test
+  @DisplayName("versionListTest")
+  public void versionListTest() throws Exception {
+
+    MvcResult mvcResult =
+        mockMvc
+            .perform(get("/udf/versionList").contentType(MediaType.APPLICATION_JSON))
+            .andExpect(status().is4xxClientError())
+            .andReturn();
+    Assertions.assertTrue(mvcResult.getResponse().getStatus() == 400);
+
+    mvcResult =
+        mockMvc
+            .perform(
+                get("/udf/versionList").contentType(MediaType.APPLICATION_JSON).param("udfId", "3"))
+            .andExpect(status().isOk())
+            .andReturn();
+    Message res =
+        JsonUtils.jackson().readValue(mvcResult.getResponse().getContentAsString(), Message.class);
+    assertEquals(MessageStatus.SUCCESS(), res.getStatus());
+    LOG.info(mvcResult.getResponse().getContentAsString());
+  }
+
+  @Test
+  @DisplayName("managerPagesTest")
+  public void managerPagesTest() throws Exception {
+
+    MvcResult mvcResult =
+        mockMvc
+            .perform(post("/udf/managerPages").contentType(MediaType.APPLICATION_JSON))
+            .andExpect(status().is4xxClientError())
+            .andReturn();
+    Assertions.assertTrue(mvcResult.getResponse().getStatus() == 400);
+
+    HashMap<String, Object> paramMap = new HashMap();
+    paramMap.put("udfName", "test");
+    paramMap.put("udfType", "3");
+    paramMap.put("createUser", "hadoop");
+    paramMap.put("curPage", 0);
+    paramMap.put("pageSize", 10);
+
+    PageInfo<UDFAddVo> pageInfo = new PageInfo<>();
+    pageInfo.setList(new ArrayList<>());
+    pageInfo.setPages(10);
+    pageInfo.setTotal(100);
+    Mockito.when(
+            udfService.getManagerPages(
+                Mockito.anyString(),
+                Mockito.anyCollection(),
+                Mockito.anyString(),
+                Mockito.anyInt(),
+                Mockito.anyInt()))
+        .thenReturn(pageInfo);
+    mvcResult =
+        mockMvc
+            .perform(
+                post("/udf/managerPages")
+                    .content(new Gson().toJson(paramMap))
+                    .contentType(MediaType.APPLICATION_JSON))
+            .andExpect(status().isOk())
+            .andReturn();
+    Message res =
+        JsonUtils.jackson().readValue(mvcResult.getResponse().getContentAsString(), Message.class);
+    assertEquals(MessageStatus.SUCCESS(), res.getStatus());
+    LOG.info(mvcResult.getResponse().getContentAsString());
+  }
+
+  @Test
+  @DisplayName("allUdfUsersTest")
+  public void allUdfUsersTest() throws Exception {
+
+    MvcResult mvcResult =
+        mockMvc
+            .perform(get("/udf/allUdfUsers").contentType(MediaType.APPLICATION_JSON))
+            .andExpect(status().is4xxClientError())
+            .andReturn();
+    Assertions.assertTrue(mvcResult.getResponse().getStatus() == 400);
+
+    HashMap<String, Object> paramMap = new HashMap();
+    paramMap.put("userName", "hadoop");
+
+    mvcResult =
+        mockMvc
+            .perform(
+                get("/udf/allUdfUsers")
+                    .content(new Gson().toJson(paramMap))
+                    .contentType(MediaType.APPLICATION_JSON))
+            .andExpect(status().isOk())
+            .andReturn();
+    Message res =
+        JsonUtils.jackson().readValue(mvcResult.getResponse().getContentAsString(), Message.class);
+    assertEquals(MessageStatus.SUCCESS(), res.getStatus());
+    LOG.info(mvcResult.getResponse().getContentAsString());
+  }
+
+  @Test
+  @DisplayName("getUserDirectoryTest")
+  public void getUserDirectoryTest() throws Exception {
+
+    MvcResult mvcResult =
+        mockMvc
+            .perform(get("/udf/userDirectory").contentType(MediaType.APPLICATION_JSON))
+            .andExpect(status().is4xxClientError())
+            .andReturn();
+    Assertions.assertTrue(mvcResult.getResponse().getStatus() == 400);
+
+    String category = "function";
+    mvcResult =
+        mockMvc
+            .perform(
+                get("/udf/userDirectory")
+                    .contentType(MediaType.APPLICATION_JSON)
+                    .param("category", category))
+            .andExpect(status().isOk())
+            .andReturn();
+    Message res =
+        JsonUtils.jackson().readValue(mvcResult.getResponse().getContentAsString(), Message.class);
+    assertEquals(MessageStatus.SUCCESS(), res.getStatus());
+    LOG.info(mvcResult.getResponse().getContentAsString());
+  }
+}
diff --git a/linkis-public-enhancements/linkis-udf/linkis-udf-service/src/test/java/org/apache/linkis/udf/dao/BaseDaoTest.java b/linkis-public-enhancements/linkis-udf/linkis-udf-service/src/test/java/org/apache/linkis/udf/dao/BaseDaoTest.java
new file mode 100644
index 000000000..bc7dfef51
--- /dev/null
+++ b/linkis-public-enhancements/linkis-udf/linkis-udf-service/src/test/java/org/apache/linkis/udf/dao/BaseDaoTest.java
@@ -0,0 +1,31 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.linkis.udf.dao;
+
+import org.apache.linkis.udf.Scan;
+
+import org.springframework.boot.test.context.SpringBootTest;
+import org.springframework.test.annotation.Rollback;
+import org.springframework.transaction.annotation.EnableTransactionManagement;
+import org.springframework.transaction.annotation.Transactional;
+
+@SpringBootTest(classes = Scan.class)
+@Transactional
+@Rollback(true)
+@EnableTransactionManagement
+public abstract class BaseDaoTest {}
diff --git a/linkis-public-enhancements/linkis-udf/linkis-udf-service/src/test/java/org/apache/linkis/udf/dao/UDFDaoTest.java b/linkis-public-enhancements/linkis-udf/linkis-udf-service/src/test/java/org/apache/linkis/udf/dao/UDFDaoTest.java
new file mode 100644
index 000000000..3e4168bad
--- /dev/null
+++ b/linkis-public-enhancements/linkis-udf/linkis-udf-service/src/test/java/org/apache/linkis/udf/dao/UDFDaoTest.java
@@ -0,0 +1,341 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.linkis.udf.dao;
+
+import org.apache.linkis.udf.entity.UDFInfo;
+import org.apache.linkis.udf.entity.UDFManager;
+import org.apache.linkis.udf.vo.UDFAddVo;
+import org.apache.linkis.udf.vo.UDFInfoVo;
+
+import org.springframework.beans.factory.annotation.Autowired;
+
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Date;
+import java.util.List;
+
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.DisplayName;
+import org.junit.jupiter.api.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class UDFDaoTest extends BaseDaoTest {
+
+  private static final Logger LOG = LoggerFactory.getLogger(UDFDaoTest.class);
+
+  @Autowired private UDFDao udfDao;
+
+  @Test
+  @DisplayName("addUDFTest")
+  public void addUDFTest() {
+    UDFInfo udfInfo = new UDFInfo();
+    udfInfo.setId(6L);
+    udfInfo.setCreateUser("hadoop");
+    udfInfo.setUdfType(3);
+    udfInfo.setTreeId(13L);
+    udfInfo.setCreateTime(new Date());
+    udfInfo.setUpdateTime(new Date());
+    udfInfo.setSys("IDE");
+    udfInfo.setClusterName("ALL");
+    udfInfo.setUdfName("udfNameTest");
+    udfInfo.setExpire(null);
+    udfInfo.setShared(null);
+
+    udfDao.addUDF(udfInfo);
+    UDFInfo info = udfDao.getUDFById(6L);
+
+    Assertions.assertNotNull(info);
+  }
+
+  @Test
+  @DisplayName("updateUDFTest")
+  public void updateUDFTest() {
+
+    UDFInfo udfInfo = new UDFInfo();
+    udfInfo.setId(4L);
+    udfInfo.setCreateUser("hadoops");
+    udfInfo.setUdfName("updateTest");
+    udfInfo.setUdfType(3);
+    udfInfo.setTreeId(13L);
+    udfInfo.setUpdateTime(new Date());
+
+    udfDao.updateUDF(udfInfo);
+    UDFInfo info = udfDao.getUDFById(4L);
+
+    Assertions.assertEquals(udfInfo.getCreateUser(), info.getCreateUser());
+  }
+
+  @Test
+  @DisplayName("deleteUDFTest")
+  public void deleteUDFTest() {
+    udfDao.deleteUDF(4L, "hadoop");
+    UDFInfo info = udfDao.getUDFById(4L);
+    Assertions.assertNull(info);
+  }
+
+  @Test
+  @DisplayName("getUDFByIdTest")
+  public void getUDFByIdTest() {
+    UDFInfo udfInfo = udfDao.getUDFById(4L);
+    Assertions.assertNotNull(udfInfo);
+  }
+
+  @Test
+  @DisplayName("deleteLoadInfoTest")
+  public void deleteLoadInfoTest() {
+    udfDao.deleteLoadInfo(3L, "hadoop");
+    List<Long> udfIds = udfDao.getLoadedUDFIds("hadoop");
+
+    Assertions.assertTrue(udfIds.size() == 0);
+  }
+
+  @Test
+  @DisplayName("addLoadInfoTest")
+  public void addLoadInfoTest() {
+    udfDao.addLoadInfo(7L, "hadoops");
+
+    List<Long> udfIds = udfDao.getLoadedUDFIds("hadoops");
+    Assertions.assertTrue(udfIds.size() == 1);
+  }
+
+  @Test
+  @DisplayName("getUDFSByUserNameTest")
+  public void getUDFSByUserNameTest() {
+    List<UDFInfo> udfInfoList = udfDao.getUDFSByUserName("hadoop");
+    Assertions.assertTrue(udfInfoList.size() > 0);
+  }
+
+  @Test
+  @DisplayName("getUDFSByTreeIdAndUserTest")
+  public void getUDFSByTreeIdAndUserTest() {
+    Collection<Integer> categoryCodes = new ArrayList<>();
+    categoryCodes.add(3);
+    categoryCodes.add(4);
+    List<UDFInfoVo> udfInfoVoList = udfDao.getUDFSByTreeIdAndUser(13L, "hadoop", categoryCodes);
+    Assertions.assertTrue(udfInfoVoList.size() == 2);
+  }
+
+  @Test
+  @DisplayName("getUDFSByUsersTest")
+  public void getUDFSByUsersTest() {
+    Collection<String> users = new ArrayList<>();
+    users.add("hadoop");
+    List<UDFInfoVo> udfInfoVoList = udfDao.getUDFSByUsers(users);
+    Assertions.assertTrue(udfInfoVoList.size() == 4);
+  }
+
+  @Test
+  @DisplayName("getSharedUDFByUserTest")
+  public void getSharedUDFByUserTest() {
+    List<UDFInfoVo> udfInfoVos = udfDao.getSharedUDFByUser("hadoop");
+    Assertions.assertTrue(udfInfoVos.size() == 1);
+  }
+
+  @Test
+  @DisplayName("getUDFInfoByTreeIdTest")
+  public void getUDFInfoByTreeIdTest() {
+    Collection<Integer> categoryCodes = new ArrayList<>();
+    categoryCodes.add(3);
+    categoryCodes.add(4);
+    Exception exception =
+        Assertions.assertThrows(
+            Exception.class, () -> udfDao.getUDFInfoByTreeId(13L, "hadoop", categoryCodes));
+    LOG.info("assertThrows pass, the error message: {} ", exception.getMessage());
+  }
+
+  @Test
+  @DisplayName("getLoadedUDFsTest")
+  public void getLoadedUDFsTest() {
+
+    Exception exception =
+        Assertions.assertThrows(Exception.class, () -> udfDao.getLoadedUDFs("hadoop"));
+    LOG.info("assertThrows pass, the error message: {} ", exception.getMessage());
+  }
+
+  @Test
+  @DisplayName("getLoadedUDFIdsTest")
+  public void getLoadedUDFIdsTest() {
+    List<Long> loadedUDFIds = udfDao.getLoadedUDFIds("hadoop");
+    Assertions.assertTrue(loadedUDFIds.size() == 1);
+  }
+
+  @Test
+  @DisplayName("getSameLoadCountTest")
+  public void getSameLoadCountTest() {
+
+    long loadCount = udfDao.getSameLoadCount("hadoop", "test");
+    Assertions.assertTrue(loadCount == 1L);
+  }
+
+  @Test
+  @DisplayName("getSameJarUDFTest")
+  public void getSameJarUDFTest() {
+    Exception exception =
+        Assertions.assertThrows(
+            Exception.class,
+            () ->
+                udfDao.getSameJarUDF(
+                    "hadoop", "file:///home/hadoop/logs/linkis/hadoop/hadoops/udf/udfPy.py"));
+    LOG.info("assertThrows pass, the error message: {} ", exception.getMessage());
+  }
+
+  @Test
+  @DisplayName("getSameNameCountByUserTest")
+  public void getSameNameCountByUserTest() {
+
+    long counts = udfDao.getSameNameCountByUser("test", "hadoop");
+    Assertions.assertTrue(counts == 1L);
+  }
+
+  @Test
+  @DisplayName("selectSharedUDFInfosByTreeIdAndUserNameTest")
+  public void selectSharedUDFInfosByTreeIdAndUserNameTest() {
+    Collection<Integer> categoryCodes = new ArrayList<>();
+    categoryCodes.add(3);
+    categoryCodes.add(4);
+    Exception exception =
+        Assertions.assertThrows(
+            Exception.class,
+            () -> udfDao.selectSharedUDFInfosByTreeIdAndUserName(10L, "hadoop", null));
+    LOG.info("assertThrows pass, the error message: {} ", exception.getMessage());
+  }
+
+  @Test
+  @DisplayName("selectUDFManagerTest")
+  public void selectUDFManagerTest() {
+    UDFManager udfManager = udfDao.selectUDFManager("hadoop");
+    Assertions.assertNotNull(udfManager);
+  }
+
+  @Test
+  @DisplayName("selectAllUserTest")
+  public void selectAllUserTest() {
+    List<String> allUser = udfDao.selectAllUser();
+    Assertions.assertTrue(allUser.size() > 0);
+  }
+
+  @Test
+  @DisplayName("getShareSameNameCountByUserTest")
+  public void getShareSameNameCountByUserTest() {
+    long count = udfDao.getShareSameNameCountByUser("test", "hadoop");
+    Assertions.assertTrue(count == 1L);
+  }
+
+  @Test
+  @DisplayName("getShareSameNameCountExcludeUserTest")
+  public void getShareSameNameCountExcludeUserTest() {
+    long count = udfDao.getShareSameNameCountExcludeUser("test", "hadoop", "baoyang");
+    Assertions.assertTrue(count == 1L);
+  }
+
+  @Test
+  @DisplayName("insertUDFSharedUserTest")
+  public void insertUDFSharedUserTest() {
+    udfDao.insertUDFSharedUser(4L, "hadoop");
+
+    long sharedCount = udfDao.getSharedUserCountByUdfId(4L);
+    Assertions.assertTrue(sharedCount == 1L);
+  }
+
+  @Test
+  @DisplayName("updateUDFIsSharedTest")
+  public void updateUDFIsSharedTest() {
+
+    udfDao.updateUDFIsShared(true, 3L);
+    UDFInfo udf = udfDao.getUDFById(3L);
+    Assertions.assertTrue(udf.getShared().booleanValue());
+  }
+
+  @Test
+  @DisplayName("selectAllShareUDFInfoIdByUDFIdTest")
+  public void selectAllShareUDFInfoIdByUDFIdTest() {
+
+    Long udfId = udfDao.selectAllShareUDFInfoIdByUDFId("hadoop", "test");
+
+    Assertions.assertNotNull(udfId);
+  }
+
+  @Test
+  @DisplayName("insertSharedUserTest")
+  public void insertSharedUserTest() {
+    udfDao.insertSharedUser("hadoops", 4L);
+    long udfId = udfDao.getSharedUserCountByUdfId(4L);
+
+    Assertions.assertNotNull(udfId);
+  }
+
+  @Test
+  @DisplayName("deleteSharedUserTest")
+  public void deleteSharedUserTest() {
+    udfDao.deleteSharedUser("hadoop", 3L);
+    long udfId = udfDao.getSharedUserCountByUdfId(3L);
+    Assertions.assertTrue(udfId == 0L);
+  }
+
+  @Test
+  @DisplayName("deleteAllSharedUserTest")
+  public void deleteAllSharedUserTest() {
+    udfDao.deleteAllSharedUser(3l);
+    long udfId = udfDao.getSharedUserCountByUdfId(3L);
+    Assertions.assertTrue(udfId == 0L);
+  }
+
+  @Test
+  @DisplayName("getSharedUserCountByUdfIdTest")
+  public void getSharedUserCountByUdfIdTest() {
+    long counts = udfDao.getSharedUserCountByUdfId(3L);
+    Assertions.assertTrue(counts == 1L);
+  }
+
+  @Test
+  @DisplayName("getUserLoadCountByUdfIdTest")
+  public void getUserLoadCountByUdfIdTest() {
+    long count = udfDao.getUserLoadCountByUdfId(3L, "baoyang");
+    Assertions.assertTrue(count == 1L);
+  }
+
+  @Test
+  @DisplayName("updateLoadUserTest")
+  public void updateLoadUserTest() {
+    udfDao.updateLoadUser(3L, "hadoop", "hadoops");
+    long udfCount = udfDao.getUserLoadCountByUdfId(3L, "hadoop");
+    Assertions.assertTrue(udfCount == 1L);
+  }
+
+  @Test
+  @DisplayName("getUdfInfoByPagesTest")
+  public void getUdfInfoByPagesTest() {
+    Collection<Integer> udfTypes = new ArrayList<>();
+    udfTypes.add(3);
+    udfTypes.add(4);
+    List<UDFAddVo> udfAddVos = udfDao.getUdfInfoByPages("test", udfTypes, "hadoop");
+    Assertions.assertTrue(udfAddVos.size() > 0);
+  }
+
+  @Test
+  public void getLatesetPublishedUDF() {
+    Collection<Integer> udfTypes = new ArrayList<>();
+    udfTypes.add(3);
+    udfTypes.add(4);
+    Exception exception =
+        Assertions.assertThrows(
+            Exception.class, () -> udfDao.getLatesetPublishedUDF("hadoop", udfTypes));
+    LOG.info("assertThrows pass, the error message: {} ", exception.getMessage());
+  }
+}
diff --git a/linkis-public-enhancements/linkis-udf/linkis-udf-service/src/test/java/org/apache/linkis/udf/dao/UDFTreeDaoTest.java b/linkis-public-enhancements/linkis-udf/linkis-udf-service/src/test/java/org/apache/linkis/udf/dao/UDFTreeDaoTest.java
new file mode 100644
index 000000000..3e48c5bab
--- /dev/null
+++ b/linkis-public-enhancements/linkis-udf/linkis-udf-service/src/test/java/org/apache/linkis/udf/dao/UDFTreeDaoTest.java
@@ -0,0 +1,127 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.linkis.udf.dao;
+
+import org.apache.linkis.udf.entity.UDFTree;
+
+import org.springframework.beans.factory.annotation.Autowired;
+
+import java.util.Date;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.DisplayName;
+import org.junit.jupiter.api.Test;
+
+public class UDFTreeDaoTest extends BaseDaoTest {
+
+  @Autowired private UDFTreeDao udfTreeDao;
+
+  @Test
+  @DisplayName("addTreeTest")
+  public void addTreeTest() {
+
+    UDFTree udfTree = new UDFTree();
+    udfTree.setId(15L);
+    udfTree.setParent(10L);
+    udfTree.setName("jarTest");
+    udfTree.setUserName("hadoop");
+    udfTree.setDescription("test descs");
+    udfTree.setCreateTime(new Date());
+    udfTree.setUpdateTime(new Date());
+    udfTree.setCategory("function");
+    udfTreeDao.addTree(udfTree);
+
+    UDFTree tree = udfTreeDao.getTreeById(15L);
+    Assertions.assertNotNull(tree);
+  }
+
+  @Test
+  @DisplayName("updateTreeTest")
+  public void updateTreeTest() {
+
+    UDFTree udfTree = new UDFTree();
+    udfTree.setId(13L);
+    udfTree.setParent(10L);
+    udfTree.setName("udfTreeUpdates");
+    udfTree.setUserName("hadoop");
+    udfTree.setDescription("test descs");
+    udfTree.setUpdateTime(new Date());
+    udfTreeDao.updateTree(udfTree);
+
+    UDFTree tree = udfTreeDao.getTreeById(13L);
+    Assertions.assertEquals(udfTree.getName(), tree.getName());
+  }
+
+  @Test
+  @DisplayName("deleteTreeTest")
+  public void deleteTreeTest() {
+
+    udfTreeDao.deleteTree(13L, "hadoop");
+    UDFTree tree = udfTreeDao.getTreeById(13L);
+    Assertions.assertNull(tree);
+  }
+
+  @Test
+  @DisplayName("getTreeByIdAndCategoryTest")
+  public void getTreeByIdAndCategoryTest() {
+    UDFTree udfTree = udfTreeDao.getTreeByIdAndCategory(13L, "function");
+    Assertions.assertNotNull(udfTree);
+  }
+
+  @Test
+  @DisplayName("getTreeByIdAndCategoryAndUserNameTest")
+  public void getTreeByIdAndCategoryAndUserNameTest() {
+    UDFTree udfTree = udfTreeDao.getTreeByIdAndCategoryAndUserName(13L, "function", "hadoop");
+    Assertions.assertNotNull(udfTree);
+  }
+
+  @Test
+  @DisplayName("getTreeByIdTest")
+  public void getTreeByIdTest() {
+    UDFTree udfTree = udfTreeDao.getTreeById(13L);
+    Assertions.assertNotNull(udfTree);
+  }
+
+  @Test
+  @DisplayName("getTreesByParentIdTest")
+  public void getTreesByParentIdTest() {
+    Map<String, Object> params = new HashMap<>();
+    params.put("parent", 10L);
+    params.put("userName", "hadoop");
+    params.put("category", "function");
+    List<UDFTree> udfTreeList = udfTreeDao.getTreesByParentId(params);
+    Assertions.assertTrue(udfTreeList.size() > 0);
+  }
+
+  @Test
+  @DisplayName("getTreeByNameAndUserTest")
+  public void getTreeByNameAndUserTest() {
+    UDFTree udfTree = udfTreeDao.getTreeByNameAndUser("baoyang", "hadoop", "function");
+    Assertions.assertNotNull(udfTree);
+  }
+
+  @Test
+  @DisplayName("getUserDirectoryTest")
+  public void getUserDirectoryTest() {
+    List<String> userDirectoryList = udfTreeDao.getUserDirectory("hadoop", "function");
+    Assertions.assertTrue(userDirectoryList.size() == 1);
+  }
+}
diff --git a/linkis-public-enhancements/linkis-udf/linkis-udf-service/src/test/java/org/apache/linkis/udf/dao/UDFVersionDaoTest.java b/linkis-public-enhancements/linkis-udf/linkis-udf-service/src/test/java/org/apache/linkis/udf/dao/UDFVersionDaoTest.java
new file mode 100644
index 000000000..4790a7797
--- /dev/null
+++ b/linkis-public-enhancements/linkis-udf/linkis-udf-service/src/test/java/org/apache/linkis/udf/dao/UDFVersionDaoTest.java
@@ -0,0 +1,148 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.linkis.udf.dao;
+
+import org.apache.linkis.udf.entity.UDFVersion;
+import org.apache.linkis.udf.vo.UDFVersionVo;
+
+import org.springframework.beans.factory.annotation.Autowired;
+
+import java.util.Date;
+import java.util.List;
+
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.DisplayName;
+import org.junit.jupiter.api.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class UDFVersionDaoTest extends BaseDaoTest {
+
+  private static final Logger LOG = LoggerFactory.getLogger(UDFVersionDaoTest.class);
+
+  @Autowired private UDFVersionDao udfVersionDao;
+
+  @Test
+  @DisplayName("addUdfVersionTest")
+  public void addUdfVersionTest() {
+    UDFVersion udfVersion = new UDFVersion();
+    udfVersion.setId(99L);
+    udfVersion.setUdfId(1L);
+    udfVersion.setPath("file:///home/hadoop/logs/linkis/hadoop/hadoops/udf/udfPy.py");
+    udfVersion.setBmlResourceId("fe124e5e-4fdd-4509-aa93-10c3748ba34a");
+    udfVersion.setBmlResourceVersion("v000006");
+    udfVersion.setPublished(true);
+    udfVersion.setRegisterFormat("udf.register(\"pyUdfTest\",test)");
+    udfVersion.setUseFormat("int pyUdfTest(api)");
+    udfVersion.setDescription("test it");
+    udfVersion.setCreateTime(new Date());
+    udfVersion.setMd5("0774ebbaef1efae6e7554ad569235d2f");
+    udfVersionDao.addUdfVersion(udfVersion);
+
+    UDFVersion udfIdAndVersion = udfVersionDao.selectByUdfIdAndVersion(1L, "v000006");
+    Assertions.assertNotNull(udfIdAndVersion);
+  }
+
+  @Test
+  @DisplayName("selectLatestByUdfIdTest")
+  public void selectLatestByUdfIdTest() {
+    UDFVersion udfVersion = udfVersionDao.selectLatestByUdfId(1L);
+    Assertions.assertNotNull(udfVersion);
+  }
+
+  @Test
+  @DisplayName("selectByUdfIdAndVersionTest")
+  public void selectByUdfIdAndVersionTest() {
+    UDFVersion udfVersion = udfVersionDao.selectByUdfIdAndVersion(1L, "v000001");
+    Assertions.assertNotNull(udfVersion);
+  }
+
+  @Test
+  @DisplayName("updatePublishStatusTest")
+  public void updatePublishStatusTest() {
+    udfVersionDao.updatePublishStatus(3L, "v000001", false);
+    List<UDFVersionVo> versionVos = udfVersionDao.getAllVersionByUdfId(2L);
+    Assertions.assertTrue(versionVos.size() == 1);
+    Assertions.assertFalse(versionVos.get(0).getPublished());
+  }
+
+  @Test
+  @DisplayName("getAllVersionsTest")
+  public void getAllVersionsTest() {
+    List<UDFVersion> allVersions = udfVersionDao.getAllVersions(1L);
+    Assertions.assertTrue(allVersions.size() > 0);
+  }
+
+  @Test
+  @DisplayName("deleteVersionByUdfIdTest")
+  public void deleteVersionByUdfIdTest() {
+    udfVersionDao.deleteVersionByUdfId(4L);
+    List<UDFVersion> allVersions = udfVersionDao.getAllVersions(4L);
+    Assertions.assertTrue(allVersions.size() == 0);
+  }
+
+  @Test
+  @DisplayName("getSameJarCountTest")
+  public void getSameJarCountTest() {
+    Exception exception =
+        Assertions.assertThrows(
+            Exception.class, () -> udfVersionDao.getSameJarCount("hadoop", "activation.jar"));
+    LOG.info("assertThrows pass, the error message: {} ", exception.getMessage());
+  }
+
+  @Test
+  @DisplayName("getOtherSameJarCountTest")
+  public void getOtherSameJarCountTest() {
+
+    Exception exception =
+        Assertions.assertThrows(
+            Exception.class,
+            () -> udfVersionDao.getOtherSameJarCount("hadoop", "activation.jar", 2L));
+    LOG.info("assertThrows pass, the error message: {} ", exception.getMessage());
+  }
+
+  @Test
+  @DisplayName("updateResourceIdByUdfIdTest")
+  public void updateResourceIdByUdfIdTest() {
+
+    udfVersionDao.updateResourceIdByUdfId(
+        2L, "0de8c361-22ce-4402-bf6f-xxxxxxxxx", "hadoop", "hadoop");
+    List<UDFVersionVo> versionVos = udfVersionDao.getAllVersionByUdfId(2L);
+    Assertions.assertTrue(versionVos.size() == 1);
+
+    Assertions.assertEquals(
+        "0de8c361-22ce-4402-bf6f-xxxxxxxxx", versionVos.get(0).getBmlResourceId());
+  }
+
+  @Test
+  @DisplayName("updateResourceIdByUdfIdTest")
+  public void updateUDFVersionTest() {
+
+    UDFVersion udfVersion = new UDFVersion();
+    udfVersion.setId(3L);
+    udfVersion.setPath("file:///home/hadoop/logs/linkis/hadoop/hadoops/udf/activation.jar");
+    udfVersion.setRegisterFormat("0");
+    udfVersion.setUseFormat("string jarUdf(name)");
+    udfVersion.setDescription("updateTests");
+    udfVersionDao.updateUDFVersion(udfVersion);
+
+    List<UDFVersionVo> versionVos = udfVersionDao.getAllVersionByUdfId(2L);
+    Assertions.assertTrue(versionVos.size() == 1);
+    Assertions.assertEquals("updateTests", versionVos.get(0).getDescription());
+  }
+}
diff --git a/linkis-public-enhancements/linkis-udf/linkis-udf-service/src/test/java/org/apache/linkis/udf/service/UDFServiceTest.java b/linkis-public-enhancements/linkis-udf/linkis-udf-service/src/test/java/org/apache/linkis/udf/service/UDFServiceTest.java
new file mode 100644
index 000000000..b2cdcb23e
--- /dev/null
+++ b/linkis-public-enhancements/linkis-udf/linkis-udf-service/src/test/java/org/apache/linkis/udf/service/UDFServiceTest.java
@@ -0,0 +1,251 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.linkis.udf.service;
+
+import org.apache.linkis.publicservice.common.lock.service.CommonLockService;
+import org.apache.linkis.udf.dao.UDFDao;
+import org.apache.linkis.udf.dao.UDFTreeDao;
+import org.apache.linkis.udf.dao.UDFVersionDao;
+import org.apache.linkis.udf.entity.UDFInfo;
+import org.apache.linkis.udf.entity.UDFVersion;
+import org.apache.linkis.udf.service.impl.UDFServiceImpl;
+import org.apache.linkis.udf.vo.UDFAddVo;
+import org.apache.linkis.udf.vo.UDFInfoVo;
+import org.apache.linkis.udf.vo.UDFVersionVo;
+
+import java.util.*;
+
+import com.github.pagehelper.PageInfo;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.DisplayName;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.extension.ExtendWith;
+import org.mockito.InjectMocks;
+import org.mockito.Mock;
+import org.mockito.Mockito;
+import org.mockito.junit.jupiter.MockitoExtension;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@ExtendWith(MockitoExtension.class)
+public class UDFServiceTest {
+
+  private static final Logger LOG = LoggerFactory.getLogger(UDFServiceTest.class);
+
+  @InjectMocks private UDFServiceImpl udfServiceImpl;
+
+  @Mock private UDFDao udfDao;
+
+  @Mock private UDFTreeDao udfTreeDao;
+
+  @Mock private UDFVersionDao udfVersionDao;
+
+  @Mock private CommonLockService commonLockService;
+
+  @Test
+  @DisplayName("deleteUDFTest")
+  public void deleteUDFTest() {
+    UDFInfo udfInfo = new UDFInfo();
+    udfInfo.setShared(false);
+    Mockito.when(udfDao.getUDFById(Mockito.anyLong())).thenReturn(udfInfo);
+    Assertions.assertAll(
+        () -> {
+          Boolean deleteUDF = udfServiceImpl.deleteUDF(4L, "hadoop");
+          Assertions.assertTrue(deleteUDF.booleanValue());
+        });
+  }
+
+  @Test
+  @DisplayName("getUDFByIdTest")
+  public void getUDFByIdTest() {
+
+    Assertions.assertAll(
+        () -> {
+          UDFInfo udfInfo = udfServiceImpl.getUDFById(4L, "hadoop");
+          Assertions.assertNull(udfInfo);
+        });
+  }
+
+  @Test
+  @DisplayName("deleteLoadInfoTest")
+  public void deleteLoadInfoTest() {
+
+    Assertions.assertAll(
+        () -> {
+          Boolean deleteLoadInfo = udfServiceImpl.deleteLoadInfo(4L, "hadoop");
+          Assertions.assertTrue(deleteLoadInfo.booleanValue());
+        });
+  }
+
+  @Test
+  @DisplayName("addLoadInfoTest")
+  public void addLoadInfoTest() {
+    UDFInfo udfInfo = new UDFInfo();
+    udfInfo.setUdfType(2);
+    Mockito.when(udfDao.getUDFById(Mockito.anyLong())).thenReturn(udfInfo);
+    Assertions.assertAll(
+        () -> {
+          Boolean addLoadInfo = udfServiceImpl.addLoadInfo(7L, "hadoop");
+          Assertions.assertTrue(addLoadInfo.booleanValue());
+        });
+  }
+
+  @Test
+  @DisplayName("getUDFSByTreeIdAndUserTest")
+  public void getUDFSByTreeIdAndUserTest() {
+    List<UDFInfoVo> udfInfoVoList = udfServiceImpl.getUDFSByTreeIdAndUser(13L, "hadoop", "all");
+    Assertions.assertTrue(udfInfoVoList.size() == 0);
+  }
+
+  @Test
+  @DisplayName("getUDFInfoByTreeIdTest")
+  public void getUDFInfoByTreeIdTest() {
+    List<UDFInfoVo> udfInfoVos = udfServiceImpl.getUDFInfoByTreeId(13L, "hadoop", "all");
+    Assertions.assertTrue(udfInfoVos.size() == 0);
+  }
+
+  @Test
+  @DisplayName("getSharedUDFsTest")
+  public void getSharedUDFsTest() {
+    List<UDFInfoVo> sharedUDFs = udfServiceImpl.getSharedUDFs("hadoop", "all");
+    Assertions.assertTrue(sharedUDFs.size() == 0);
+  }
+
+  @Test
+  @DisplayName("getExpiredUDFsTest")
+  public void getExpiredUDFsTest() {
+    List<UDFInfoVo> expiredUDFs = udfServiceImpl.getExpiredUDFs("hadoop", "all");
+    Assertions.assertTrue(expiredUDFs.size() == 0);
+  }
+
+  @Test
+  @DisplayName("isUDFManagerTest")
+  public void isUDFManagerTest() {
+    Boolean isUdfManager = udfServiceImpl.isUDFManager("hadoop");
+    Assertions.assertFalse(isUdfManager.booleanValue());
+  }
+
+  @Test
+  @DisplayName("setUDFSharedInfoTest")
+  public void setUDFSharedInfoTest() {
+
+    Assertions.assertAll(() -> udfServiceImpl.setUDFSharedInfo(true, 13L));
+  }
+
+  @Test
+  @DisplayName("setUdfExpireTest")
+  public void setUdfExpireTest() {
+    UDFInfo udfInfo = new UDFInfo();
+    udfInfo.setShared(true);
+    Mockito.when(udfDao.getUDFById(Mockito.anyLong())).thenReturn(udfInfo);
+    Mockito.when(udfDao.getUserLoadCountByUdfId(Mockito.anyLong(), Mockito.anyString()))
+        .thenReturn(2L);
+    Assertions.assertAll(
+        () -> {
+          udfServiceImpl.setUdfExpire(13L, "hadoop");
+        });
+  }
+
+  @Test
+  @DisplayName("getAllSharedUsersByUdfIdTest")
+  public void getAllSharedUsersByUdfIdTest() {
+    List<String> users = udfServiceImpl.getAllSharedUsersByUdfId("hadoop", 13L);
+    Assertions.assertTrue(users.size() == 0);
+  }
+
+  @Test
+  @DisplayName("addSharedUserTest")
+  public void addSharedUserTest() {
+    Set<String> sharedUsers = new HashSet<>();
+    sharedUsers.add("tangxr");
+    sharedUsers.add("baoyang");
+    Assertions.assertAll(() -> udfServiceImpl.addSharedUser(sharedUsers, 4L));
+  }
+
+  @Test
+  @DisplayName("publishUdfTest")
+  public void publishUdfTest() {
+    UDFInfo udfInfo = new UDFInfo();
+    udfInfo.setShared(true);
+    Mockito.when(udfDao.getUDFById(Mockito.anyLong())).thenReturn(udfInfo);
+    Assertions.assertAll(
+        () -> {
+          udfServiceImpl.publishUdf(3L, "v000001");
+        });
+  }
+
+  @Test
+  @DisplayName("publishLatestUdfTest")
+  public void publishLatestUdfTest() {
+
+    UDFVersion udfVersion = new UDFVersion();
+    udfVersion.setBmlResourceVersion("v000001");
+    Mockito.when(udfVersionDao.selectLatestByUdfId(Mockito.anyLong())).thenReturn(udfVersion);
+    Assertions.assertAll(
+        () -> {
+          udfServiceImpl.publishLatestUdf(4L);
+        });
+  }
+
+  @Test
+  @DisplayName("getUdfVersionListTest")
+  public void getUdfVersionListTest() {
+
+    List<UDFVersionVo> udfVersionList = udfServiceImpl.getUdfVersionList(4L);
+    Assertions.assertTrue(udfVersionList.size() == 0);
+  }
+
+  @Test
+  @DisplayName("getManagerPagesTest")
+  public void getManagerPagesTest() {
+    Collection<Integer> udfType = new ArrayList<>();
+    udfType.add(3);
+    udfType.add(4);
+    Assertions.assertAll(
+        () -> {
+          PageInfo<UDFAddVo> managerPages =
+              udfServiceImpl.getManagerPages("test", udfType, "hadoop", 0, 10);
+          Assertions.assertTrue(managerPages.getSize() == 0);
+        });
+  }
+
+  @Test
+  @DisplayName("allUdfUsersTest")
+  public void allUdfUsersTest() {
+
+    List<String> users = udfServiceImpl.allUdfUsers();
+    Assertions.assertTrue(users.size() == 0);
+  }
+
+  @Test
+  @DisplayName("getUserDirectoryTest")
+  public void getUserDirectoryTest() {
+    List<String> userDirectory = udfServiceImpl.getUserDirectory("hadoop", "function");
+    Assertions.assertTrue(userDirectory.size() == 0);
+  }
+
+  @Test
+  @DisplayName("getAllUDFSByUserNameTest")
+  public void getAllUDFSByUserNameTest() {
+    Assertions.assertAll(
+        () -> {
+          List<UDFInfoVo> udfs = udfServiceImpl.getAllUDFSByUserName("hadoop");
+          Assertions.assertTrue(udfs.size() == 0);
+        });
+  }
+}
diff --git a/linkis-public-enhancements/linkis-udf/linkis-udf-service/src/test/java/org/apache/linkis/udf/service/UDFTreeServiceTest.java b/linkis-public-enhancements/linkis-udf/linkis-udf-service/src/test/java/org/apache/linkis/udf/service/UDFTreeServiceTest.java
new file mode 100644
index 000000000..dbeee5d4b
--- /dev/null
+++ b/linkis-public-enhancements/linkis-udf/linkis-udf-service/src/test/java/org/apache/linkis/udf/service/UDFTreeServiceTest.java
@@ -0,0 +1,126 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.linkis.udf.service;
+
+import org.apache.linkis.udf.dao.UDFTreeDao;
+import org.apache.linkis.udf.entity.UDFTree;
+import org.apache.linkis.udf.service.impl.UDFTreeServiceImpl;
+
+import org.apache.commons.collections.map.HashedMap;
+
+import java.util.*;
+
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.DisplayName;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.extension.ExtendWith;
+import org.mockito.InjectMocks;
+import org.mockito.Mock;
+import org.mockito.Mockito;
+import org.mockito.junit.jupiter.MockitoExtension;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@ExtendWith(MockitoExtension.class)
+public class UDFTreeServiceTest {
+
+  private static final Logger LOG = LoggerFactory.getLogger(UDFTreeServiceTest.class);
+
+  @InjectMocks private UDFTreeServiceImpl udfTreeServiceImpl;
+
+  @Mock private UDFTreeDao udfTreeDao;
+
+  @Test
+  @DisplayName("initTreeTest")
+  public void initTreeTest() {
+
+    UDFTree udfTree = new UDFTree();
+    udfTree.setId(13L);
+    List<UDFTree> first = new ArrayList<>();
+    first.add(udfTree);
+
+    Map<String, Object> params = new HashedMap();
+    params.put("parent", -1L);
+    params.put("userName", "hadoop");
+    params.put("category", "all");
+
+    Mockito.when(udfTreeDao.getTreesByParentId(Mockito.anyMap())).thenReturn(first);
+    Assertions.assertAll(
+        () -> {
+          UDFTree initTree = udfTreeServiceImpl.initTree("hadoop", "all");
+          Assertions.assertNotNull(initTree);
+        });
+  }
+
+  @Test
+  @DisplayName("addTreeTest")
+  public void addTreeTest() {
+    UDFTree udfTree = new UDFTree();
+    udfTree.setId(15L);
+    udfTree.setParent(10L);
+    udfTree.setName("jarTest");
+    udfTree.setUserName("hadoop");
+    udfTree.setDescription("test descs");
+    udfTree.setCreateTime(new Date());
+    udfTree.setUpdateTime(new Date());
+    udfTree.setCategory("function");
+
+    Assertions.assertAll(
+        () -> {
+          udfTreeServiceImpl.addTree(udfTree, "hadoop");
+        });
+  }
+
+  @Test
+  @DisplayName("updateTreeTest")
+  public void updateTreeTest() {
+    UDFTree udfTree = new UDFTree();
+    udfTree.setId(13L);
+    udfTree.setParent(10L);
+    udfTree.setName("udfTreeUpdate");
+    udfTree.setUserName("hadoop");
+    udfTree.setDescription("test descs");
+    udfTree.setUpdateTime(new Date());
+
+    Assertions.assertAll(
+        () -> {
+          udfTreeServiceImpl.updateTree(udfTree, "hadoop");
+        });
+  }
+
+  @Test
+  @DisplayName("deleteTreeTest")
+  public void deleteTreeTest() {
+
+    Assertions.assertAll(
+        () -> {
+          udfTreeServiceImpl.deleteTree(13L, "hadoop");
+        });
+  }
+
+  @Test
+  @DisplayName("getTreeByIdTest")
+  public void getTreeByIdTest() {
+
+    Assertions.assertAll(
+        () -> {
+          UDFTree udfTree = udfTreeServiceImpl.getTreeById(13L, "hadoop", "sys", "all");
+          Assertions.assertNull(udfTree);
+        });
+  }
+}
diff --git a/linkis-public-enhancements/linkis-udf/linkis-udf-service/src/test/resources/application.properties b/linkis-public-enhancements/linkis-udf/linkis-udf-service/src/test/resources/application.properties
new file mode 100644
index 000000000..943d6a048
--- /dev/null
+++ b/linkis-public-enhancements/linkis-udf/linkis-udf-service/src/test/resources/application.properties
@@ -0,0 +1,62 @@
+# 
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+# http://www.apache.org/licenses/LICENSE-2.0
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+#wds.linkis.test.mode=true
+wds.linkis.server.version=v1
+
+#test
+wds.linkis.test.mode=true
+wds.linkis.test.user=hadoop
+
+wds.linkis.is.gateway=true
+wds.linkis.server.web.session.timeout=1h
+wds.linkis.gateway.conf.enable.proxy.user=false
+wds.linkis.gateway.conf.url.pass.auth=/dss/
+wds.linkis.gateway.conf.enable.token.auth=true
+wds.linkis.login_encrypt.enable=false
+#logging.level.root=debug
+#logging.file=./test.log
+#debug=true
+
+ng.datasource.driver-class-name=org.h2.Driver
+spring.datasource.url=jdbc:h2:mem:test;MODE=MySQL;DB_CLOSE_DELAY=-1;DATABASE_TO_LOWER=true
+spring.datasource.schema=classpath:create.sql
+spring.datasource.data=classpath:data.sql
+spring.datasource.username=sa
+spring.datasource.password=
+spring.datasource.hikari.connection-test-query=select 1
+spring.datasource.hikari.minimum-idle=5
+spring.datasource.hikari.auto-commit=true
+spring.datasource.hikari.validation-timeout=3000
+spring.datasource.hikari.pool-name=linkis-test
+spring.datasource.hikari.maximum-pool-size=50
+spring.datasource.hikari.connection-timeout=30000
+spring.datasource.hikari.idle-timeout=600000
+spring.datasource.hikari.leak-detection-threshold=0
+spring.datasource.hikari.initialization-fail-timeout=1
+
+spring.main.web-application-type=servlet
+server.port=1234
+spring.h2.console.enabled=true
+
+#disable eureka discovery client
+spring.cloud.service-registry.auto-registration.enabled=false
+eureka.client.enabled=false
+eureka.client.serviceUrl.registerWithEureka=false
+
+mybatis-plus.mapper-locations=classpath*:org/apache/linkis/udf/dao/impl/*.xml
+mybatis-plus.type-aliases-package=org.apache.linkis.udf.entity
+mybatis-plus.configuration.log-impl=org.apache.ibatis.logging.stdout.StdOutImpl
+
diff --git a/linkis-public-enhancements/linkis-udf/linkis-udf-service/src/test/resources/create.sql b/linkis-public-enhancements/linkis-udf/linkis-udf-service/src/test/resources/create.sql
new file mode 100644
index 000000000..c22d27b52
--- /dev/null
+++ b/linkis-public-enhancements/linkis-udf/linkis-udf-service/src/test/resources/create.sql
@@ -0,0 +1,88 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+*/
+
+SET FOREIGN_KEY_CHECKS=0;
+SET REFERENTIAL_INTEGRITY FALSE;
+
+DROP TABLE IF EXISTS linkis_ps_udf_user_load CASCADE;
+CREATE TABLE IF NOT EXISTS linkis_ps_udf_user_load (
+  id bigint(20) NOT NULL AUTO_INCREMENT,
+  udf_id bigint(20) NOT NULL,
+  user_name varchar(50) NOT NULL,
+  PRIMARY KEY (id)
+) ;
+
+DROP TABLE IF EXISTS linkis_ps_udf_baseinfo CASCADE;
+CREATE TABLE IF NOT EXISTS linkis_ps_udf_baseinfo (
+  id numeric(20) NOT NULL AUTO_INCREMENT,
+  create_user varchar(50) NOT NULL,
+  udf_name varchar(255) NOT NULL,
+  udf_type numeric(11) DEFAULT '0',
+  tree_id bigint(20) NOT NULL,
+  create_time timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
+  update_time timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP,
+  sys varchar(255) NOT NULL DEFAULT 'ide' COMMENT 'source system',
+  cluster_name varchar(255) NOT NULL,
+  is_expire numeric(1) DEFAULT NULL,
+  is_shared numeric(1) DEFAULT NULL,
+  PRIMARY KEY (id)
+) ;
+
+DROP TABLE IF EXISTS linkis_ps_udf_tree CASCADE;
+CREATE TABLE IF NOT EXISTS linkis_ps_udf_tree (
+  id bigint(20) NOT NULL AUTO_INCREMENT,
+  parent bigint(20) NOT NULL,
+  name varchar(100) DEFAULT NULL COMMENT 'Category name of the function. It would be displayed in the front-end',
+  user_name varchar(50) NOT NULL,
+  description varchar(255) DEFAULT NULL,
+  create_time timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
+  update_time timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP,
+  category varchar(50) DEFAULT NULL COMMENT 'Used to distinguish between udf and function',
+  PRIMARY KEY (id)
+) ;
+
+DROP TABLE IF EXISTS linkis_ps_udf_version CASCADE;
+CREATE TABLE IF NOT EXISTS linkis_ps_udf_version (
+  id bigint(20) NOT NULL AUTO_INCREMENT,
+  udf_id bigint(20) NOT NULL,
+  path varchar(255) NOT NULL COMMENT 'Source path for uploading files',
+  bml_resource_id varchar(50) NOT NULL,
+  bml_resource_version varchar(20) NOT NULL,
+  is_published bit(1) DEFAULT NULL COMMENT 'is published',
+  register_format varchar(255) DEFAULT NULL,
+  use_format varchar(255) DEFAULT NULL,
+  description varchar(255) NOT NULL COMMENT 'version desc',
+  create_time timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
+  md5 varchar(100) DEFAULT NULL,
+  PRIMARY KEY (id)
+) ;
+
+DROP TABLE IF EXISTS linkis_ps_udf_shared_info CASCADE;
+CREATE TABLE IF NOT EXISTS linkis_ps_udf_shared_info (
+  id bigint(20) NOT NULL AUTO_INCREMENT,
+  udf_id bigint(20) NOT NULL,
+  user_name varchar(50) NOT NULL,
+  PRIMARY KEY (id)
+) ;
+
+
+DROP TABLE IF EXISTS linkis_ps_udf_manager CASCADE;
+CREATE TABLE IF NOT EXISTS linkis_ps_udf_manager (
+  id bigint(20) NOT NULL AUTO_INCREMENT,
+  user_name varchar(20) DEFAULT NULL,
+  PRIMARY KEY (id)
+) ;
\ No newline at end of file
diff --git a/linkis-public-enhancements/linkis-udf/linkis-udf-service/src/test/resources/data.sql b/linkis-public-enhancements/linkis-udf/linkis-udf-service/src/test/resources/data.sql
new file mode 100644
index 000000000..14caf065e
--- /dev/null
+++ b/linkis-public-enhancements/linkis-udf/linkis-udf-service/src/test/resources/data.sql
@@ -0,0 +1,61 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+*/
+
+DELETE FROM linkis_ps_udf_user_load;
+-- ----------------------------
+-- Default Tokens
+-- ----------------------------
+INSERT INTO linkis_ps_udf_user_load (id,udf_id, user_name) VALUES(1,3, 'hadoop');
+
+DELETE FROM linkis_ps_udf_shared_info;
+INSERT INTO linkis_ps_udf_shared_info (id,udf_id, user_name) VALUES(1,3, 'hadoop');
+
+DELETE FROM linkis_ps_udf_manager;
+INSERT INTO linkis_ps_udf_manager (id,user_name) VALUES (1, 'hadoop');
+
+DELETE FROM linkis_ps_udf_baseinfo;
+INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name,is_expire,is_shared) VALUES
+	 ('hadoop','pyUdfTest',1,14,'2022-09-08 11:43:20','2022-09-08 11:43:20','IDE','all',NULL,NULL),
+	 ('hadoop','jarUdf',0,14,'2022-09-08 14:53:56','2022-09-08 14:53:56','IDE','all',NULL,NULL),
+	 ('hadoop','test',3,13,'2022-09-08 14:54:30','2022-09-08 14:54:30','IDE','all',NULL,NULL),
+	 ('hadoop','scalaUdf1',4,13,'2022-09-08 14:55:57','2022-09-08 14:55:57','IDE','all',NULL,NULL);
+
+DELETE FROM linkis_ps_udf_tree;
+INSERT INTO linkis_ps_udf_tree (parent,name,user_name,description,create_time,update_time,category) VALUES
+	 (-1,'系统函数','sys','','2022-07-14 18:58:50','2022-07-14 18:58:50','udf'),
+	 (-1,'BDAP函数','bdp','','2022-07-14 18:58:50','2022-07-14 18:58:50','udf'),
+	 (-1,'共享函数','share','','2022-07-14 18:58:50','2022-07-14 18:58:50','udf'),
+	 (-1,'过期函数','expire','','2022-07-14 18:58:50','2022-07-14 18:58:50','udf'),
+	 (-1,'个人函数','hadoop','','2022-07-14 18:58:50','2022-07-14 18:58:50','udf'),
+	 (-1,'系统函数','sys','','2022-07-14 20:28:34','2022-07-14 20:28:34','function'),
+	 (-1,'BDAP函数','bdp','','2022-07-14 20:28:35','2022-07-14 20:28:35','function'),
+	 (-1,'共享函数','share','','2022-07-14 20:28:35','2022-07-14 20:28:35','function'),
+	 (-1,'过期函数','expire','','2022-07-14 20:28:35','2022-07-14 20:28:35','function'),
+	 (-1,'个人函数','hadoop','','2022-07-14 20:28:35','2022-07-14 20:28:35','function');
+INSERT INTO linkis_ps_udf_tree (parent,name,user_name,description,create_time,update_time,category) VALUES
+	 (-1,'个人函数','','','2022-07-29 09:46:18','2022-07-29 09:46:18','udf'),
+	 (-1,'个人函数','','','2022-07-29 09:46:19','2022-07-29 09:46:19','function'),
+	 (10,'baoyang','hadoop','testBaoYang','2022-07-29 16:30:36','2022-07-29 16:30:36','function'),
+	 (5,'pySpark','hadoop','','2022-09-08 11:43:20','2022-09-08 11:43:20','udf');
+
+DELETE FROM linkis_ps_udf_version;
+INSERT INTO linkis_ps_udf_version (udf_id,`path`,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5) VALUES
+	 (1,'file:///home/hadoop/logs/linkis/hadoop/hadoops/udf/udfPy.py','ede1985f-b594-421f-9e58-7e3d7d8603ef','v000001',0,'udf.register("pyUdfTest",test)','int pyUdfTest(api)','测试使用','2022-09-08 11:43:20','0774ebbaef1efae6e7554ad569235d2f'),
+	 (1,'file:///home/hadoop/logs/linkis/hadoop/hadoops/udf/udfPy.py','ede1985f-b594-421f-9e58-7e3d7d8603ef','v000002',0,'udf.register("pyUdfTest",test)','int pyUdfTest(api)','测试使用','2022-09-08 11:43:26','0774ebbaef1efae6e7554ad569235d2f'),
+	 (2,'file:///home/hadoop/logs/linkis/hadoop/hadoops/udf/activation.jar','0de8c361-22ce-4402-bf6f-098b4021deca','v000001',0,'create temporary function jarUdf as "test"','string jarUdf(name)','','2022-09-08 14:53:56','8ae38e87cd4f86059c0294a8fe3e0b18'),
+	 (3,'file:///home/hadoop/logs/linkis/hadoop/hadoops/udf/udfPy.py','f69e2fc3-c64a-4ff3-ba3c-ab49f5b3651d','v000001',0,NULL,'string test(name)','','2022-09-08 14:54:30','0774ebbaef1efae6e7554ad569235d2f'),
+	 (4,'file:///home/hadoop/logs/linkis/hadoop/hadoops/udf/scalaUdf.scala','fe124e5e-4fdd-4509-aa93-10c3748ba34a','v000001',0,NULL,'String scalaUdf1(Name)','','2022-09-08 14:55:57','0774ebbaef1efae6e7554ad569235d2f');
\ No newline at end of file
diff --git a/linkis-public-enhancements/linkis-udf/linkis-udf-service/src/test/resources/linkis.properties b/linkis-public-enhancements/linkis-udf/linkis-udf-service/src/test/resources/linkis.properties
new file mode 100644
index 000000000..1c575edc5
--- /dev/null
+++ b/linkis-public-enhancements/linkis-udf/linkis-udf-service/src/test/resources/linkis.properties
@@ -0,0 +1,21 @@
+# 
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+# http://www.apache.org/licenses/LICENSE-2.0
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+#wds.linkis.test.mode=true
+wds.linkis.server.version=v1
+
+#test
+wds.linkis.test.mode=true
+wds.linkis.test.user=hadoop
\ No newline at end of file
diff --git a/linkis-public-enhancements/linkis-udf/linkis-udf-service/src/test/scala/org/apache/linkis/udf/utils/UdfConfigurationTest.scala b/linkis-public-enhancements/linkis-udf/linkis-udf-service/src/test/scala/org/apache/linkis/udf/utils/UdfConfigurationTest.scala
new file mode 100644
index 000000000..78d7bc90d
--- /dev/null
+++ b/linkis-public-enhancements/linkis-udf/linkis-udf-service/src/test/scala/org/apache/linkis/udf/utils/UdfConfigurationTest.scala
@@ -0,0 +1,38 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.linkis.udf.utils
+
+import org.junit.jupiter.api.{Assertions, Test}
+
+class UdfConfigurationTest {
+
+  @Test
+  def constantTest(): Unit = {
+    val hivePath = UdfConfiguration.UDF_HIVE_EXEC_PATH.getValue
+    val tmpPath = UdfConfiguration.UDF_TMP_PATH.getValue
+    val sharePath = UdfConfiguration.UDF_SHARE_PATH.getValue
+    val proxyUser = UdfConfiguration.UDF_SHARE_PROXY_USER.getValue
+
+    Assertions.assertNotNull(hivePath)
+    Assertions.assertNotNull(tmpPath)
+    Assertions.assertNotNull(sharePath)
+    Assertions.assertNotNull(proxyUser)
+
+  }
+
+}


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@linkis.apache.org
For additional commands, e-mail: commits-help@linkis.apache.org