You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@linkis.apache.org by pe...@apache.org on 2023/04/25 13:25:22 UTC

[linkis] branch dev-1.4.0 updated: feat: do not kill ec when ecm restart part-1 (#4184) (#4452)

This is an automated email from the ASF dual-hosted git repository.

peacewong pushed a commit to branch dev-1.4.0
in repository https://gitbox.apache.org/repos/asf/linkis.git


The following commit(s) were added to refs/heads/dev-1.4.0 by this push:
     new d12a32624 feat: do not kill ec when ecm restart  part-1 (#4184) (#4452)
d12a32624 is described below

commit d12a32624d39b13ce81ea486d06844accdce2659
Author: CharlieYan <16...@users.noreply.github.com>
AuthorDate: Tue Apr 25 21:25:12 2023 +0800

    feat: do not kill ec when ecm restart  part-1 (#4184) (#4452)
    
    * feat: do not kill ec when ecm restart(#4192)
---
 .../protocol/callback/LogCallbackProtocol.scala    |    3 -
 .../common/conf/GovernanceCommonConf.scala         |    8 +-
 .../governance/common/utils/ECPathUtils.scala      |   18 +
 .../governance/common/utils/GovernanceUtils.scala  |   20 +
 .../linkis/ecm/core/report/NodeHealthReport.scala  |    4 +-
 .../linkis/ecm/server/listener/ECMReadyEvent.scala |   14 +-
 .../server/operator/EngineConnLogOperator.scala    |   58 +-
 .../ecm/server/report/DefaultECMHealthReport.scala |    4 +-
 .../ecm/server/service/EngineConnListService.scala |   47 -
 .../service/EngineConnStatusCallbackService.scala  |   26 -
 .../ecm/server/service/LogCallbackService.scala    |   26 -
 .../impl/AbstractEngineConnLaunchService.scala     |    8 +-
 .../service/impl/DefaultECMHealthService.scala     |    9 +-
 .../service/impl/DefaultEngineConnKillService.java |   99 +-
 .../impl/DefaultEngineConnListService.scala        |  192 ---
 .../impl/DefaultEngineConnPidCallbackService.scala |   39 -
 .../DefaultEngineConnStatusCallbackService.scala   |   67 -
 .../service/impl/DefaultYarnCallbackService.scala  |   40 -
 .../impl/ProcessEngineConnLaunchService.scala      |    6 +-
 .../ecm/server/spring/ECMSpringConfiguration.scala |   28 -
 .../access/ECTaskEntranceInfoAccessHelper.scala    |   96 --
 .../engineconn/launch/EngineConnServer.scala       |    1 +
 .../callback/hook/CallbackEngineConnHook.scala     |   10 +-
 .../service/EngineConnAfterStartCallback.scala     |    5 +-
 .../callback/service/EngineConnCallback.scala      |   20 +-
 .../callback/service/EngineConnPidCallback.scala   |    4 +-
 .../manager/{am => }/LinkisManagerApplication.java |    2 +-
 .../event/message/EngineConnPidCallbackEvent.java} |   18 +-
 .../linkis/manager/am/restful/EMRestfulApi.java    |   31 +-
 .../am/service/EngineConnPidCallbackService.java}  |   11 +-
 .../service/EngineConnStatusCallbackService.java}  |    9 +-
 .../impl/DefaultEngineConnPidCallbackService.java  |   61 +
 .../DefaultEngineConnStatusCallbackService.java    |  146 ++
 .../am/manager/DefaultEngineNodeManager.scala      |   12 +-
 .../am/service/em/DefaultEMEngineService.scala     |   63 +
 .../DefaultEngineConnStatusCallbackService.scala   |  104 --
 .../service/engine/DefaultEngineStopService.scala  |    6 +-
 .../am/service/engine/EngineStopService.scala      |    5 -
 .../linkis/manager/rm/restful/RMMonitorRest.scala  |  448 ++++---
 .../rm/service/impl/ResourceLogService.scala       |   26 +-
 .../manager/common/entity/node/AMEMNode.java       |   11 +
 .../manager/common/entity/node/AMEngineNode.java   |   11 +
 .../manager/common/entity/node/InfoRMNode.java     |   11 +
 .../linkis/manager/common/entity/node/Node.java    |    4 +
 .../common/entity/persistence/PersistenceNode.java |   10 +
 .../entity/persistence/PersistenceNodeEntity.java  |   11 +
 .../entity/persistence/PersistenceNodeMetrics.java |    5 +-
 .../common/protocol/engine/EngineStopRequest.java  |   51 +-
 .../linkis/manager/dao/NodeManagerMapper.java      |    2 +-
 .../persistence/NodeMetricManagerPersistence.java  |    2 +-
 .../impl/DefaultNodeManagerPersistence.java        |   12 +-
 .../impl/DefaultNodeMetricManagerPersistence.java  |    2 +-
 .../resources/mapper/common/NodeManagerMapper.xml  |   22 +-
 .../linkis/manager/dao/NodeManagerMapperTest.java  |    2 +-
 .../linkis/templates/configmap-init-sql.yaml       | 1407 ++++++++++++++++++++
 linkis-dist/package/db/linkis_ddl.sql              |    1 +
 linkis-dist/package/db/module/linkis_manager.sql   |    1 +
 .../package/sbin/ext/linkis-cg-linkismanager       |    2 +-
 58 files changed, 2261 insertions(+), 1100 deletions(-)

diff --git a/linkis-commons/linkis-protocol/src/main/scala/org/apache/linkis/protocol/callback/LogCallbackProtocol.scala b/linkis-commons/linkis-protocol/src/main/scala/org/apache/linkis/protocol/callback/LogCallbackProtocol.scala
index 91b0e0172..0109472a9 100644
--- a/linkis-commons/linkis-protocol/src/main/scala/org/apache/linkis/protocol/callback/LogCallbackProtocol.scala
+++ b/linkis-commons/linkis-protocol/src/main/scala/org/apache/linkis/protocol/callback/LogCallbackProtocol.scala
@@ -19,9 +19,6 @@ package org.apache.linkis.protocol.callback
 
 import org.apache.linkis.protocol.message.RequestProtocol
 
-// TODO: log type
-case class LogCallbackProtocol(nodeId: String, logs: Array[String]) extends RequestProtocol
-
 case class YarnAPPIdCallbackProtocol(nodeId: String, applicationId: String) extends RequestProtocol
 
 case class YarnInfoCallbackProtocol(nodeId: String, uri: String) extends RequestProtocol
diff --git a/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/conf/GovernanceCommonConf.scala b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/conf/GovernanceCommonConf.scala
index 7490f053b..7d34e9d40 100644
--- a/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/conf/GovernanceCommonConf.scala
+++ b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/conf/GovernanceCommonConf.scala
@@ -43,10 +43,16 @@ object GovernanceCommonConf {
   val ENGINE_CONN_MANAGER_SPRING_NAME =
     CommonVars("wds.linkis.engineconn.manager.name", "linkis-cg-engineconnmanager")
 
+  val ENGINE_APPLICATION_MANAGER_SPRING_NAME =
+    CommonVars("wds.linkis.application.manager.name", "linkis-cg-linkismanager")
+
   val ENGINE_CONN_PORT_RANGE = CommonVars("wds.linkis.engineconn.port.range", "-")
 
   val MANAGER_SERVICE_NAME =
-    CommonVars("wds.linkis.engineconn.manager.name", "linkis-cg-linkismanager")
+    CommonVars(
+      "wds.linkis.engineconn.manager.name",
+      GovernanceCommonConf.ENGINE_APPLICATION_MANAGER_SPRING_NAME.getValue
+    )
 
   val ENTRANCE_SERVICE_NAME = CommonVars("wds.linkis.entrance.name", "linkis-cg-entrance")
 
diff --git a/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/utils/ECPathUtils.scala b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/utils/ECPathUtils.scala
index a27de9726..747ef4e92 100644
--- a/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/utils/ECPathUtils.scala
+++ b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/utils/ECPathUtils.scala
@@ -17,6 +17,8 @@
 
 package org.apache.linkis.governance.common.utils
 
+import org.apache.linkis.manager.label.entity.engine.{EngineTypeLabel, UserCreatorLabel}
+
 import org.apache.commons.lang3.StringUtils
 import org.apache.commons.lang3.time.DateFormatUtils
 
@@ -37,4 +39,20 @@ object ECPathUtils {
 
   }
 
+  def getECLogDirSuffix(
+      engineTypeLabel: EngineTypeLabel,
+      userCreatorLabel: UserCreatorLabel,
+      ticketId: String
+  ): String = {
+    if (null == engineTypeLabel || null == userCreatorLabel) {
+      return ""
+    }
+    val suffix = ECPathUtils.getECWOrkDirPathSuffix(
+      userCreatorLabel.getUser,
+      ticketId,
+      engineTypeLabel.getEngineType
+    )
+    suffix + File.separator + "logs"
+  }
+
 }
diff --git a/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/utils/GovernanceUtils.scala b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/utils/GovernanceUtils.scala
index 43fd598f7..301e295ef 100644
--- a/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/utils/GovernanceUtils.scala
+++ b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/utils/GovernanceUtils.scala
@@ -25,6 +25,7 @@ import org.apache.commons.lang3.StringUtils
 
 import java.io.File
 import java.util
+import java.util.{ArrayList, List}
 
 object GovernanceUtils extends Logging {
 
@@ -72,4 +73,23 @@ object GovernanceUtils extends Logging {
     }
   }
 
+  /**
+   * find process id by port number
+   * @param processPort
+   * @return
+   */
+  def findProcessIdentifier(processPort: String) = {
+    val findCmd = "sudo lsof -t -i:" + processPort
+    val cmdList = new util.ArrayList[String]
+    cmdList.add("bash")
+    cmdList.add("-c")
+    cmdList.add(findCmd)
+    try Utils.exec(cmdList.toArray(new Array[String](0)), 5000L)
+    catch {
+      case e: Exception =>
+        logger.warn("Method findPid failed, " + e.getMessage)
+        null
+    }
+  }
+
 }
diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-core/src/main/scala/org/apache/linkis/ecm/core/report/NodeHealthReport.scala b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-core/src/main/scala/org/apache/linkis/ecm/core/report/NodeHealthReport.scala
index 160025ed5..395c9258b 100644
--- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-core/src/main/scala/org/apache/linkis/ecm/core/report/NodeHealthReport.scala
+++ b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-core/src/main/scala/org/apache/linkis/ecm/core/report/NodeHealthReport.scala
@@ -34,9 +34,9 @@ trait NodeHealthReport {
 
   def getNodeMsg: String
 
-  def getUsedResource: Resource
+//  def getUsedResource: Resource
 
-  def setUsedResource(resource: Resource): Unit
+//  def setUsedResource(resource: Resource): Unit
 
   def getTotalResource: Resource
 
diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/listener/ECMReadyEvent.scala b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/listener/ECMReadyEvent.scala
index 6bcb6c3b8..97243d3cc 100644
--- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/listener/ECMReadyEvent.scala
+++ b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/listener/ECMReadyEvent.scala
@@ -17,7 +17,6 @@
 
 package org.apache.linkis.ecm.server.listener
 
-import org.apache.linkis.ecm.core.engineconn.EngineConn
 import org.apache.linkis.ecm.core.listener.ECMEvent
 import org.apache.linkis.governance.common.protocol.task.ResponseEngineConnPid
 import org.apache.linkis.manager.common.entity.enumeration.NodeStatus
@@ -27,14 +26,5 @@ case class ECMReadyEvent(params: Array[String]) extends ECMEvent
 
 case class ECMClosedEvent() extends ECMEvent
 
-case class EngineConnStatusChageEvent(from: NodeStatus, to: NodeStatus) extends ECMEvent
-
-case class YarnAppIdCallbackEvent(protocol: YarnAPPIdCallbackProtocol) extends ECMEvent
-
-case class YarnInfoCallbackEvent(protocol: YarnInfoCallbackProtocol) extends ECMEvent
-
-case class EngineConnPidCallbackEvent(protocol: ResponseEngineConnPid) extends ECMEvent
-
-case class EngineConnAddEvent(conn: EngineConn) extends ECMEvent
-
-case class EngineConnStatusChangeEvent(tickedId: String, updateStatus: NodeStatus) extends ECMEvent
+case class EngineConnLaunchStatusChangeEvent(tickedId: String, updateStatus: NodeStatus)
+    extends ECMEvent
diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/operator/EngineConnLogOperator.scala b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/operator/EngineConnLogOperator.scala
index bc856ba68..40de16849 100644
--- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/operator/EngineConnLogOperator.scala
+++ b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/operator/EngineConnLogOperator.scala
@@ -17,15 +17,13 @@
 
 package org.apache.linkis.ecm.server.operator
 
-import org.apache.linkis.DataWorkCloudApplication
 import org.apache.linkis.common.conf.CommonVars
 import org.apache.linkis.common.utils.{Logging, Utils}
 import org.apache.linkis.ecm.errorcode.EngineconnServerErrorCodeSummary._
 import org.apache.linkis.ecm.server.conf.ECMConfiguration
 import org.apache.linkis.ecm.server.exception.ECMErrorException
-import org.apache.linkis.ecm.server.service.{EngineConnListService, LocalDirsHandleService}
+import org.apache.linkis.ecm.server.service.LocalDirsHandleService
 import org.apache.linkis.manager.common.operator.Operator
-import org.apache.linkis.manager.common.protocol.em.ECMOperateRequest
 
 import org.apache.commons.io.IOUtils
 import org.apache.commons.io.input.ReversedLinesFileReader
@@ -37,12 +35,10 @@ import java.text.MessageFormat
 import java.util
 import java.util.Collections
 
-import scala.collection.JavaConverters.asScalaBufferConverter
 import scala.util.matching.Regex
 
 class EngineConnLogOperator extends Operator with Logging {
 
-  private var engineConnListService: EngineConnListService = _
   private var localDirsHandleService: LocalDirsHandleService = _
 
   override def getNames: Array[String] = Array(EngineConnLogOperator.OPERATOR_NAME)
@@ -158,56 +154,10 @@ class EngineConnLogOperator extends Operator with Logging {
   protected def getEngineConnInfo(implicit
       parameters: Map[String, Any]
   ): (String, String, String) = {
-    if (engineConnListService == null) {
-      engineConnListService =
-        DataWorkCloudApplication.getApplicationContext.getBean(classOf[EngineConnListService])
-      localDirsHandleService =
-        DataWorkCloudApplication.getApplicationContext.getBean(classOf[LocalDirsHandleService])
-    }
     val logDIrSuffix = getAs("logDirSuffix", "")
-    val (engineConnLogDir, engineConnInstance, ticketId) =
-      if (StringUtils.isNotBlank(logDIrSuffix)) {
-        val ecLogPath = ECMConfiguration.ENGINECONN_ROOT_DIR + File.separator + logDIrSuffix
-        val ticketId = getAs("ticketId", "")
-        (ecLogPath, "", ticketId)
-      } else {
-        val engineConnInstance = getAs(
-          ECMOperateRequest.ENGINE_CONN_INSTANCE_KEY,
-          getAs[String]("engineConnInstance", null)
-        )
-        Option(engineConnInstance)
-          .flatMap { instance =>
-            engineConnListService.getEngineConns.asScala.find(
-              _.getServiceInstance.getInstance == instance
-            )
-          }
-          .map(engineConn =>
-            (
-              engineConn.getEngineConnManagerEnv.engineConnLogDirs,
-              engineConnInstance,
-              engineConn.getTickedId
-            )
-          )
-          .getOrElse {
-            val ticketId = getAs("ticketId", "")
-            if (StringUtils.isBlank(ticketId)) {
-              throw new ECMErrorException(
-                BOTH_NOT_EXISTS.getErrorCode,
-                s"the parameters of ${ECMOperateRequest.ENGINE_CONN_INSTANCE_KEY}, engineConnInstance and ticketId are both not exists."
-              )
-            }
-            val logDir = engineConnListService
-              .getEngineConn(ticketId)
-              .map(_.getEngineConnManagerEnv.engineConnLogDirs)
-              .getOrElse {
-                val creator = getAsThrow[String]("creator")
-                val engineConnType = getAsThrow[String]("engineConnType")
-                localDirsHandleService.getEngineConnLogDir(creator, ticketId, engineConnType)
-              }
-            (logDir, engineConnInstance, ticketId)
-          }
-      }
-    (ticketId, engineConnInstance, engineConnLogDir)
+    val ecLogPath = ECMConfiguration.ENGINECONN_ROOT_DIR + File.separator + logDIrSuffix
+    val ticketId = getAs("ticketId", "")
+    (ticketId, "", ecLogPath)
   }
 
   private def includeLine(
diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/report/DefaultECMHealthReport.scala b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/report/DefaultECMHealthReport.scala
index 6fdf8b348..150d0be6b 100644
--- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/report/DefaultECMHealthReport.scala
+++ b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/report/DefaultECMHealthReport.scala
@@ -69,9 +69,9 @@ class DefaultECMHealthReport extends ECMHealthReport {
 
   override def getNodeMsg: String = nodeMsg
 
-  override def getUsedResource: Resource = usedResource
+//  override def getUsedResource: Resource = usedResource
 
-  override def setUsedResource(usedResource: Resource): Unit = this.usedResource = usedResource
+//  override def setUsedResource(usedResource: Resource): Unit = this.usedResource = usedResource
 
   override def getTotalResource: Resource = totalResource
 
diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/EngineConnListService.scala b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/EngineConnListService.scala
deleted file mode 100644
index d36d35cce..000000000
--- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/EngineConnListService.scala
+++ /dev/null
@@ -1,47 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.linkis.ecm.server.service
-
-import org.apache.linkis.ecm.core.engineconn.EngineConn
-import org.apache.linkis.ecm.core.launch.EngineConnLaunchRunner
-import org.apache.linkis.manager.common.entity.resource.Resource
-
-import java.util
-
-/**
- * The enginelistservice interface manages the interface started by the engine The most important
- * submit method is to put the thread that starts the engine into the thread pool to start
- * EngineListService接口管理引擎启动的接口 最重要的submit方法是将启动引擎的线程放入到线程池中进行启动
- */
-trait EngineConnListService {
-
-  def init(): Unit
-
-  def getEngineConn(engineConnId: String): Option[EngineConn]
-
-  def getEngineConns: util.List[EngineConn]
-
-  def addEngineConn(engineConn: EngineConn): Unit
-
-  def killEngineConn(engineConnId: String): Unit
-
-  def getUsedResources: Resource
-
-  def submit(runner: EngineConnLaunchRunner): Option[EngineConn]
-
-}
diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/EngineConnStatusCallbackService.scala b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/EngineConnStatusCallbackService.scala
deleted file mode 100644
index 12ccc088b..000000000
--- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/EngineConnStatusCallbackService.scala
+++ /dev/null
@@ -1,26 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.linkis.ecm.server.service
-
-import org.apache.linkis.manager.common.protocol.engine.EngineConnStatusCallback
-
-trait EngineConnStatusCallbackService {
-
-  def dealEngineConnStatusCallback(protocol: EngineConnStatusCallback): Unit
-
-}
diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/LogCallbackService.scala b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/LogCallbackService.scala
deleted file mode 100644
index 873b09819..000000000
--- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/LogCallbackService.scala
+++ /dev/null
@@ -1,26 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.linkis.ecm.server.service
-
-import org.apache.linkis.protocol.callback.LogCallbackProtocol
-
-trait LogCallbackService {
-
-  def dealLog(protocol: LogCallbackProtocol): Unit
-
-}
diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/AbstractEngineConnLaunchService.scala b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/AbstractEngineConnLaunchService.scala
index 6ac10d1e1..c32467917 100644
--- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/AbstractEngineConnLaunchService.scala
+++ b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/AbstractEngineConnLaunchService.scala
@@ -25,7 +25,7 @@ import org.apache.linkis.ecm.server.LinkisECMApplication
 import org.apache.linkis.ecm.server.conf.ECMConfiguration._
 import org.apache.linkis.ecm.server.engineConn.DefaultEngineConn
 import org.apache.linkis.ecm.server.hook.ECMHook
-import org.apache.linkis.ecm.server.listener.{EngineConnAddEvent, EngineConnStatusChangeEvent}
+import org.apache.linkis.ecm.server.listener.EngineConnLaunchStatusChangeEvent
 import org.apache.linkis.ecm.server.service.{EngineConnLaunchService, ResourceLocalizationService}
 import org.apache.linkis.ecm.server.util.ECMUtils
 import org.apache.linkis.governance.common.conf.GovernanceCommonConf
@@ -79,8 +79,6 @@ abstract class AbstractEngineConnLaunchService extends EngineConnLaunchService w
     conn.setEngineConnManagerEnv(launch.getEngineConnManagerEnv())
     // 2.资源本地化,并且设置ecm的env环境信息
     getResourceLocalizationServie.handleInitEngineConnResources(request, conn)
-    // 3.添加到list
-    LinkisECMApplication.getContext.getECMSyncListenerBus.postToAll(EngineConnAddEvent(conn))
     // 4.run
     Utils.tryCatch {
       beforeLaunch(request, conn, duration)
@@ -118,7 +116,7 @@ abstract class AbstractEngineConnLaunchService extends EngineConnLaunchService w
             ): _*
           )
           LinkisECMApplication.getContext.getECMSyncListenerBus.postToAll(
-            EngineConnStatusChangeEvent(conn.getTickedId, Failed)
+            EngineConnLaunchStatusChangeEvent(conn.getTickedId, Failed)
           )
         case Success(_) =>
           logger.info(
@@ -158,7 +156,7 @@ abstract class AbstractEngineConnLaunchService extends EngineConnLaunchService w
           )
         )
       LinkisECMApplication.getContext.getECMSyncListenerBus.postToAll(
-        EngineConnStatusChangeEvent(conn.getTickedId, Failed)
+        EngineConnLaunchStatusChangeEvent(conn.getTickedId, Failed)
       )
       throw t
     }
diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/DefaultECMHealthService.scala b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/DefaultECMHealthService.scala
index 132749cbe..be879f687 100644
--- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/DefaultECMHealthService.scala
+++ b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/DefaultECMHealthService.scala
@@ -25,7 +25,7 @@ import org.apache.linkis.ecm.server.conf.ECMConfiguration
 import org.apache.linkis.ecm.server.conf.ECMConfiguration._
 import org.apache.linkis.ecm.server.listener.{ECMClosedEvent, ECMReadyEvent}
 import org.apache.linkis.ecm.server.report.DefaultECMHealthReport
-import org.apache.linkis.ecm.server.service.{ECMHealthService, EngineConnListService}
+import org.apache.linkis.ecm.server.service.ECMHealthService
 import org.apache.linkis.ecm.server.util.ECMUtils
 import org.apache.linkis.manager.common.entity.enumeration.{NodeHealthy, NodeStatus}
 import org.apache.linkis.manager.common.entity.metrics.{NodeHealthyInfo, NodeOverLoadInfo}
@@ -38,8 +38,6 @@ import org.apache.linkis.manager.common.protocol.node.{
 import org.apache.linkis.rpc.Sender
 import org.apache.linkis.rpc.message.annotation.Receiver
 
-import org.springframework.beans.factory.annotation.Autowired
-
 import java.util.Date
 import java.util.concurrent.TimeUnit
 
@@ -79,9 +77,6 @@ class DefaultECMHealthService extends ECMHealthService with ECMEventListener {
     TimeUnit.SECONDS
   )
 
-  @Autowired
-  private var engineConnListService: EngineConnListService = _
-
   override def getLastEMHealthReport: ECMHealthReport = {
     val report = new DefaultECMHealthReport
     report.setNodeId(LinkisECMApplication.getECMServiceInstance.toString)
@@ -89,7 +84,6 @@ class DefaultECMHealthService extends ECMHealthService with ECMEventListener {
     // todo report right metrics
     report.setTotalResource(maxResource)
     report.setProtectedResource(minResource)
-    report.setUsedResource(engineConnListService.getUsedResources)
     report.setReportTime(new Date().getTime)
     report.setRunningEngineConns(
       LinkisECMApplication.getContext.getECMMetrics.getRunningEngineConns
@@ -117,7 +111,6 @@ class DefaultECMHealthService extends ECMHealthService with ECMEventListener {
     // todo report latest engineconn metrics
     resource.setMaxResource(maxResource)
     resource.setMinResource(minResource)
-    resource.setUsedResource(report.getUsedResource)
     heartbeat.setNodeResource(resource)
     heartbeat.setHeartBeatMsg("")
     val nodeHealthyInfo = new NodeHealthyInfo
diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/DefaultEngineConnKillService.java b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/DefaultEngineConnKillService.java
index 3c335abf4..bcfe36f4c 100644
--- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/DefaultEngineConnKillService.java
+++ b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/DefaultEngineConnKillService.java
@@ -20,17 +20,13 @@ package org.apache.linkis.ecm.server.service.impl;
 import org.apache.commons.io.IOUtils;
 import org.apache.linkis.common.ServiceInstance;
 import org.apache.linkis.common.utils.Utils;
-import org.apache.linkis.ecm.core.engineconn.EngineConn;
 import org.apache.linkis.ecm.server.conf.ECMConfiguration;
 import org.apache.linkis.ecm.server.service.EngineConnKillService;
-import org.apache.linkis.ecm.server.service.EngineConnListService;
 import org.apache.linkis.engineconn.common.conf.EngineConnConf;
 import org.apache.linkis.governance.common.utils.GovernanceUtils;
 import org.apache.linkis.manager.common.protocol.engine.EngineStopRequest;
 import org.apache.linkis.manager.common.protocol.engine.EngineStopResponse;
 import org.apache.linkis.manager.common.protocol.engine.EngineSuicideRequest;
-import org.apache.linkis.manager.label.entity.Label;
-import org.apache.linkis.manager.label.entity.engine.EngineTypeLabel;
 import org.apache.linkis.rpc.message.annotation.Receiver;
 import org.apache.linkis.rpc.Sender;
 import org.apache.commons.lang3.StringUtils;
@@ -44,7 +40,6 @@ import java.io.FileReader;
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.List;
-import java.util.Optional;
 import java.util.concurrent.ThreadPoolExecutor;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
@@ -53,34 +48,37 @@ public class DefaultEngineConnKillService implements EngineConnKillService {
 
     private static final Logger logger = LoggerFactory.getLogger(DefaultEngineConnKillService.class);
 
-    private EngineConnListService engineConnListService;
-
-    public void setEngineConnListService(EngineConnListService engineConnListService) {
-        this.engineConnListService = engineConnListService;
-    }
-
     private static final ThreadPoolExecutor ecYarnAppKillService = Utils.newCachedThreadPool(10, "ECM-Kill-EC-Yarn-App", true);
 
     @Override
     @Receiver
     public EngineStopResponse dealEngineConnStop(EngineStopRequest engineStopRequest) {
         logger.info("received EngineStopRequest " +  engineStopRequest);
-        EngineConn engineConn = getEngineConnByServiceInstance(engineStopRequest.getServiceInstance());
+        String pid = null;
+        if("process".equals(engineStopRequest.getIdentifierType()) && StringUtils.isNotBlank(engineStopRequest.getIdentifier())){
+            pid = engineStopRequest.getIdentifier();
+        }else {
+            String processPort = engineStopRequest.getServiceInstance().getInstance().split(":")[1];
+            pid = GovernanceUtils.findProcessIdentifier(processPort);
+        }
+
+        logger.info("dealEngineConnStop return pid: {}", pid);
         EngineStopResponse response = new EngineStopResponse();
-        if (null != engineConn) {
-            if(!killEngineConnByPid(engineConn)) {
+        if (StringUtils.isNotBlank(pid)) {
+            if(!killEngineConnByPid(pid, engineStopRequest.getServiceInstance())) {
                 response.setStopStatus(false);
-                response.setMsg("Kill engine " + engineConn.getServiceInstance().toString() + " failed.");
+                response.setMsg("Kill engine " + engineStopRequest.getServiceInstance().toString() + " failed.");
             } else {
                 response.setStopStatus(true);
-                response.setMsg("Kill engine " + engineConn.getServiceInstance().toString() + " succeed.");
+                response.setMsg("Kill engine " + engineStopRequest.getServiceInstance().toString() + " succeed.");
             }
-            killYarnAppIdOfOneEc(engineConn);
+            killYarnAppIdOfOneEc(engineStopRequest.getLogDirSuffix(), engineStopRequest.getServiceInstance(),
+                    engineStopRequest.getEngineType());
         } else {
-            logger.warn("Cannot find engineconn : " + engineStopRequest.getServiceInstance().toString() + " in this engineConnManager engineConn list, cannot kill.");
-            response.setStopStatus(true);
-            response.setMsg("EngineConn " + engineStopRequest.getServiceInstance().toString() + " was not found in this engineConnManager.");
+            logger.warn("Cannot find engineConn pid, try kill with rpc");
+            response.setStopStatus(false);
         }
+
         if (!response.getStopStatus()) {
             EngineSuicideRequest request = new EngineSuicideRequest(engineStopRequest.getServiceInstance(), engineStopRequest.getUser());
             try {
@@ -94,21 +92,21 @@ public class DefaultEngineConnKillService implements EngineConnKillService {
         return response;
     }
 
-    public void killYarnAppIdOfOneEc(EngineConn engineConn) {
-        String engineConnInstance = engineConn.getServiceInstance().toString();
-        logger.info("try to kill yarn app ids in the engine of ({}).", engineConnInstance);
-        String engineLogDir = engineConn.getEngineConnManagerEnv().engineConnLogDirs();
+    public void killYarnAppIdOfOneEc(String logDirSuffix, ServiceInstance serviceInstance, String engineType) {
+        String engineConnInstance = serviceInstance.toString();
+        String engineLogDir = ECMConfiguration.ENGINECONN_ROOT_DIR() + File.separator + logDirSuffix;
+        logger.info("try to kill yarn app ids in the engine of: [{}] engineLogDir: [{}]", engineConnInstance, engineLogDir);
+
         final String errEngineLogPath = engineLogDir.concat(File.separator).concat("yarnApp");
-        logger.info("try to parse the yarn app id from the engine err log file path: {}", errEngineLogPath);
+        logger.info("try to parse the yarn app id from the engine err log file path: [{}]", errEngineLogPath);
         File file = new File(errEngineLogPath);
-        if (file.exists())
-        {
+        if (file.exists()) {
             ecYarnAppKillService.execute(() -> {
                 BufferedReader in = null;
                 try {
                     in = new BufferedReader(new FileReader(errEngineLogPath));
                     String line;
-                    String regex = getYarnAppRegexByEngineType(engineConn);
+                    String regex = getYarnAppRegexByEngineType(engineType);
                     if (StringUtils.isBlank(regex)) {
                         return;
                     }
@@ -137,20 +135,10 @@ public class DefaultEngineConnKillService implements EngineConnKillService {
                     IOUtils.closeQuietly(in);
                 }
             });
-    }
+        }
     }
 
-    private String getYarnAppRegexByEngineType(EngineConn engineConn) {
-        List<Label<?>> labels = engineConn.getLabels();
-        String engineType = "";
-        if (labels != null && !labels.isEmpty()) {
-            Optional<EngineTypeLabel> labelOptional = labels.stream().filter(label -> label instanceof EngineTypeLabel)
-                    .map(label -> (EngineTypeLabel) label).findFirst();
-            if (labelOptional.isPresent()) {
-                EngineTypeLabel engineTypeLabel = labelOptional.get();
-                engineType = engineTypeLabel.getEngineType();
-            }
-        }
+    private String getYarnAppRegexByEngineType(String engineType) {
         if (StringUtils.isBlank(engineType)) {
             return "";
         }
@@ -172,34 +160,17 @@ public class DefaultEngineConnKillService implements EngineConnKillService {
         return regex;
     }
 
-    private EngineConn getEngineConnByServiceInstance(ServiceInstance serviceInstance) {
-        if (null == serviceInstance) {
-            return null;
-        }
-        List<EngineConn> engineConnList = engineConnListService.getEngineConns();
-        for (EngineConn engineConn : engineConnList) {
-            if (null != engineConn && serviceInstance.equals(engineConn.getServiceInstance())) {
-                return engineConn;
-            }
-        }
-        return null;
-    }
-
-    private boolean killEngineConnByPid(EngineConn engineConn) {
-        logger.info("try to kill {} toString with pid({}).", engineConn.getServiceInstance().toString(), engineConn.getPid());
-        if (StringUtils.isNotBlank(engineConn.getPid())) {
+    private boolean killEngineConnByPid(String processId, ServiceInstance serviceInstance) {
+        logger.info("try to kill {} toString with pid({}).", serviceInstance.toString(), processId);
+        if (StringUtils.isNotBlank(processId)) {
             if (ECMConfiguration.ECM_PROCESS_SCRIPT_KILL()) {
-                GovernanceUtils.killProcess(engineConn.getPid(), engineConn.getServiceInstance().toString(), true);
-            } else {
-                killProcessByKillCmd(engineConn.getPid(), engineConn.getServiceInstance().toString());
-            }
-            if (isProcessAlive(engineConn.getPid())) {
-                return false;
+                GovernanceUtils.killProcess(processId, serviceInstance.toString(), true);
             } else {
-                return true;
+                killProcessByKillCmd(processId, serviceInstance.toString());
             }
+            return !isProcessAlive(processId);
         } else {
-            logger.warn("cannot kill {} with empty pid.", engineConn.getServiceInstance().toString());
+            logger.warn("cannot kill {} with empty pid.", serviceInstance.toString());
             return false;
         }
     }
diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/DefaultEngineConnListService.scala b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/DefaultEngineConnListService.scala
deleted file mode 100644
index 4b9a59b4d..000000000
--- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/DefaultEngineConnListService.scala
+++ /dev/null
@@ -1,192 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.linkis.ecm.server.service.impl
-
-import org.apache.linkis.DataWorkCloudApplication
-import org.apache.linkis.common.utils.{Logging, Utils}
-import org.apache.linkis.ecm.core.engineconn.{EngineConn, YarnEngineConn}
-import org.apache.linkis.ecm.core.launch.EngineConnLaunchRunner
-import org.apache.linkis.ecm.core.listener.{ECMEvent, ECMEventListener}
-import org.apache.linkis.ecm.server.LinkisECMApplication
-import org.apache.linkis.ecm.server.converter.ECMEngineConverter
-import org.apache.linkis.ecm.server.listener._
-import org.apache.linkis.ecm.server.service.EngineConnListService
-import org.apache.linkis.manager.common.entity.enumeration.NodeStatus
-import org.apache.linkis.manager.common.entity.resource.{Resource, ResourceType}
-import org.apache.linkis.manager.common.protocol.engine.EngineStopRequest
-
-import org.apache.commons.lang3.StringUtils
-
-import java.util
-import java.util.concurrent.ConcurrentHashMap
-
-import scala.collection.JavaConverters._
-
-import com.google.common.collect.Interners
-
-class DefaultEngineConnListService
-    extends EngineConnListService
-    with ECMEventListener
-    with Logging {
-
-  /**
-   * key:tickedId,value :engineConn
-   */
-  private val engineConnMap = new ConcurrentHashMap[String, EngineConn]
-
-  private var engineConnKillService: DefaultEngineConnKillService = _
-
-  val lock = Interners.newWeakInterner[String]
-
-  override def init(): Unit = {}
-
-  override def getEngineConn(engineConnId: String): Option[EngineConn] = Option(
-    engineConnMap.get(engineConnId)
-  )
-
-  override def getEngineConns: util.List[EngineConn] =
-    new util.ArrayList[EngineConn](engineConnMap.values())
-
-  override def addEngineConn(engineConn: EngineConn): Unit = {
-    logger.info(s"add engineConn ${engineConn.getServiceInstance} to engineConnMap")
-    if (LinkisECMApplication.isReady) {
-      engineConnMap.put(engineConn.getTickedId, engineConn)
-    }
-  }
-
-  override def killEngineConn(engineConnId: String): Unit = {
-    var conn = engineConnMap.get(engineConnId)
-    if (conn != null) engineConnId.intern().synchronized {
-      conn = engineConnMap.get(engineConnId)
-      if (conn != null) {
-        Utils.tryAndWarn {
-          if (NodeStatus.Failed == conn.getStatus && StringUtils.isNotBlank(conn.getPid)) {
-            killECByEngineConnKillService(conn)
-          } else {
-            getEngineConnKillService().killYarnAppIdOfOneEc(conn)
-          }
-          conn.close()
-        }
-        engineConnMap.remove(engineConnId)
-        logger.info(s"engineconn ${conn.getServiceInstance} was closed.")
-      }
-    }
-  }
-
-  override def getUsedResources: Resource = engineConnMap
-    .values()
-    .asScala
-    .map(_.getResource.getMinResource)
-    .fold(Resource.initResource(ResourceType.Default))(_ + _)
-
-  override def submit(runner: EngineConnLaunchRunner): Option[EngineConn] = {
-    None
-  }
-
-  def updateYarnAppId(event: YarnAppIdCallbackEvent): Unit = {
-    updateYarnEngineConn(
-      x => x.setApplicationId(event.protocol.applicationId),
-      event.protocol.nodeId
-    )
-  }
-
-  def updateYarnEngineConn(implicit
-      updateFunction: YarnEngineConn => Unit,
-      nodeId: String
-  ): Unit = {
-    lock.intern(nodeId) synchronized {
-      engineConnMap.get(nodeId) match {
-        case e: YarnEngineConn => updateFunction(e)
-        case e: EngineConn =>
-          engineConnMap.put(nodeId, ECMEngineConverter.engineConn2YarnEngineConn(e))
-      }
-    }
-  }
-
-  def updateEngineConn(updateFunction: EngineConn => Unit, nodeId: String): Unit = {
-    lock.intern(nodeId) synchronized {
-      engineConnMap.get(nodeId) match {
-        case e: EngineConn => updateFunction(e)
-        case _ =>
-      }
-    }
-  }
-
-  def updateYarnInfo(event: YarnInfoCallbackEvent): Unit = {
-    updateYarnEngineConn(x => x.setApplicationURL(event.protocol.uri), event.protocol.nodeId)
-  }
-
-  def updatePid(event: EngineConnPidCallbackEvent): Unit = {
-    updateEngineConn(
-      x => {
-        x.setPid(event.protocol.pid)
-        x.setServiceInstance(event.protocol.serviceInstance)
-      },
-      event.protocol.ticketId
-    )
-  }
-
-  def updateEngineConnStatus(tickedId: String, updateStatus: NodeStatus): Unit = {
-    updateEngineConn(x => x.setStatus(updateStatus), tickedId)
-    if (NodeStatus.isCompleted(updateStatus)) {
-      logger.info(s" from engineConnMap to remove engineconn ticketId ${tickedId}")
-      killEngineConn(tickedId)
-    }
-  }
-
-  override def onEvent(event: ECMEvent): Unit = {
-    logger.info(s"Deal event $event")
-    event match {
-      case event: ECMClosedEvent => shutdownEngineConns(event)
-      case event: YarnAppIdCallbackEvent => updateYarnAppId(event)
-      case event: YarnInfoCallbackEvent => updateYarnInfo(event)
-      case event: EngineConnPidCallbackEvent => updatePid(event)
-      case EngineConnAddEvent(engineConn) => addEngineConn(engineConn)
-      case EngineConnStatusChangeEvent(tickedId, updateStatus) =>
-        updateEngineConnStatus(tickedId, updateStatus)
-      case _ =>
-    }
-  }
-
-  private def getEngineConnKillService(): DefaultEngineConnKillService = {
-    if (engineConnKillService == null) {
-      val applicationContext = DataWorkCloudApplication.getApplicationContext
-      engineConnKillService = applicationContext.getBean(classOf[DefaultEngineConnKillService])
-    }
-    engineConnKillService
-  }
-
-  private def shutdownEngineConns(event: ECMClosedEvent): Unit = {
-    logger.info("start to kill all engines belonging the ecm")
-    engineConnMap
-      .values()
-      .asScala
-      .foreach(engineconn => {
-        killECByEngineConnKillService(engineconn)
-      })
-    logger.info("Done! success to kill all engines belonging the ecm")
-  }
-
-  private def killECByEngineConnKillService(engineconn: EngineConn): Unit = {
-    logger.info(s"start to kill ec by engineConnKillService ${engineconn.getServiceInstance}")
-    val engineStopRequest = new EngineStopRequest()
-    engineStopRequest.setServiceInstance(engineconn.getServiceInstance)
-    getEngineConnKillService().dealEngineConnStop(engineStopRequest)
-  }
-
-}
diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/DefaultEngineConnPidCallbackService.scala b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/DefaultEngineConnPidCallbackService.scala
deleted file mode 100644
index 6fb2d4700..000000000
--- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/DefaultEngineConnPidCallbackService.scala
+++ /dev/null
@@ -1,39 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.linkis.ecm.server.service.impl
-
-import org.apache.linkis.common.utils.Logging
-import org.apache.linkis.ecm.server.LinkisECMApplication
-import org.apache.linkis.ecm.server.listener.EngineConnPidCallbackEvent
-import org.apache.linkis.ecm.server.service.EngineConnPidCallbackService
-import org.apache.linkis.governance.common.protocol.task.ResponseEngineConnPid
-import org.apache.linkis.rpc.message.annotation.Receiver
-
-class DefaultEngineConnPidCallbackService extends EngineConnPidCallbackService with Logging {
-
-  @Receiver
-  override def dealPid(protocol: ResponseEngineConnPid): Unit = {
-    // 1.设置pid
-    // 2.设置serviceInstance
-    // 3.状态为running
-    LinkisECMApplication.getContext.getECMSyncListenerBus.postToAll(
-      EngineConnPidCallbackEvent(protocol)
-    )
-  }
-
-}
diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/DefaultEngineConnStatusCallbackService.scala b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/DefaultEngineConnStatusCallbackService.scala
deleted file mode 100644
index af627afec..000000000
--- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/DefaultEngineConnStatusCallbackService.scala
+++ /dev/null
@@ -1,67 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.linkis.ecm.server.service.impl
-
-import org.apache.linkis.common.utils.Logging
-import org.apache.linkis.ecm.server.LinkisECMApplication
-import org.apache.linkis.ecm.server.conf.ECMConfiguration.MANAGER_SERVICE_NAME
-import org.apache.linkis.ecm.server.listener.EngineConnStatusChangeEvent
-import org.apache.linkis.ecm.server.service.EngineConnStatusCallbackService
-import org.apache.linkis.manager.common.entity.enumeration.NodeStatus
-import org.apache.linkis.manager.common.entity.enumeration.NodeStatus.{Failed, Running}
-import org.apache.linkis.manager.common.protocol.engine.{
-  EngineConnStatusCallback,
-  EngineConnStatusCallbackToAM
-}
-import org.apache.linkis.rpc.Sender
-import org.apache.linkis.rpc.message.annotation.Receiver
-
-import org.springframework.stereotype.Service
-
-@Service
-class DefaultEngineConnStatusCallbackService extends EngineConnStatusCallbackService with Logging {
-
-  @Receiver
-  override def dealEngineConnStatusCallback(protocol: EngineConnStatusCallback): Unit = {
-    logger.info(s"Start to deal EngineConnStatusCallback $protocol")
-
-    if (NodeStatus.isAvailable(protocol.status)) {
-
-      LinkisECMApplication.getContext.getECMSyncListenerBus.postToAll(
-        EngineConnStatusChangeEvent(protocol.ticketId, Running)
-      )
-    } else {
-
-      Sender
-        .getSender(MANAGER_SERVICE_NAME)
-        .send(
-          EngineConnStatusCallbackToAM(
-            protocol.serviceInstance,
-            protocol.status,
-            protocol.initErrorMsg
-          )
-        )
-      LinkisECMApplication.getContext.getECMSyncListenerBus.postToAll(
-        EngineConnStatusChangeEvent(protocol.ticketId, Failed)
-      )
-    }
-
-    logger.info(s"Finished to deal EngineConnStatusCallback $protocol")
-  }
-
-}
diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/DefaultYarnCallbackService.scala b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/DefaultYarnCallbackService.scala
deleted file mode 100644
index 0bb2e1366..000000000
--- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/DefaultYarnCallbackService.scala
+++ /dev/null
@@ -1,40 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.linkis.ecm.server.service.impl
-
-import org.apache.linkis.ecm.server.LinkisECMApplication
-import org.apache.linkis.ecm.server.listener.{YarnAppIdCallbackEvent, YarnInfoCallbackEvent}
-import org.apache.linkis.ecm.server.service.YarnCallbackService
-import org.apache.linkis.protocol.callback.{YarnAPPIdCallbackProtocol, YarnInfoCallbackProtocol}
-import org.apache.linkis.rpc.message.annotation.Receiver
-
-class DefaultYarnCallbackService extends YarnCallbackService {
-
-  @Receiver
-  override def dealApplicationId(protocol: YarnAPPIdCallbackProtocol): Unit = {
-    LinkisECMApplication.getContext.getECMSyncListenerBus.postToAll(
-      YarnAppIdCallbackEvent(protocol)
-    )
-  }
-
-  @Receiver
-  override def dealApplicationURI(protocol: YarnInfoCallbackProtocol): Unit = {
-    LinkisECMApplication.getContext.getECMSyncListenerBus.postToAll(YarnInfoCallbackEvent(protocol))
-  }
-
-}
diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/ProcessEngineConnLaunchService.scala b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/ProcessEngineConnLaunchService.scala
index ad58ba723..e6ab513f1 100644
--- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/ProcessEngineConnLaunchService.scala
+++ b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/ProcessEngineConnLaunchService.scala
@@ -27,7 +27,7 @@ import org.apache.linkis.ecm.server.LinkisECMApplication
 import org.apache.linkis.ecm.server.conf.ECMConfiguration
 import org.apache.linkis.ecm.server.conf.ECMConfiguration.MANAGER_SERVICE_NAME
 import org.apache.linkis.ecm.server.exception.ECMErrorException
-import org.apache.linkis.ecm.server.listener.EngineConnStatusChangeEvent
+import org.apache.linkis.ecm.server.listener.EngineConnLaunchStatusChangeEvent
 import org.apache.linkis.ecm.server.service.LocalDirsHandleService
 import org.apache.linkis.manager.common.entity.enumeration.NodeStatus
 import org.apache.linkis.manager.common.entity.enumeration.NodeStatus._
@@ -128,11 +128,11 @@ abstract class ProcessEngineConnLaunchService extends AbstractEngineConnLaunchSe
       if (exitCode.exists(_ != 0)) {
         logger.info(s"engine ${tickedId} process exit ")
         LinkisECMApplication.getContext.getECMSyncListenerBus.postToAll(
-          EngineConnStatusChangeEvent(tickedId, ShuttingDown)
+          EngineConnLaunchStatusChangeEvent(tickedId, ShuttingDown)
         )
       } else {
         LinkisECMApplication.getContext.getECMSyncListenerBus.postToAll(
-          EngineConnStatusChangeEvent(tickedId, Success)
+          EngineConnLaunchStatusChangeEvent(tickedId, Success)
         )
       }
     }
diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/spring/ECMSpringConfiguration.scala b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/spring/ECMSpringConfiguration.scala
index 4a684bbec..9084d829f 100644
--- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/spring/ECMSpringConfiguration.scala
+++ b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/spring/ECMSpringConfiguration.scala
@@ -39,12 +39,6 @@ class ECMSpringConfiguration {
     new DefaultECMContext
   }
 
-  @Bean
-  @ConditionalOnMissingBean
-  def getDefaultYarnCallbackService: YarnCallbackService = {
-    new DefaultYarnCallbackService
-  }
-
   @Bean
   @ConditionalOnMissingBean
   def getBmlResourceLocalizationService(
@@ -57,32 +51,12 @@ class ECMSpringConfiguration {
     service
   }
 
-  @Bean
-  @ConditionalOnMissingBean
-  def getDefaultLogCallbackService: LogCallbackService = {
-    null
-  }
-
   @Bean
   @ConditionalOnMissingBean
   def getDefaultlocalDirsHandleService: LocalDirsHandleService = {
     new DefaultLocalDirsHandleService
   }
 
-  @Bean
-  @ConditionalOnMissingBean
-  def getDefaultEngineConnPidCallbackService: EngineConnPidCallbackService = {
-    new DefaultEngineConnPidCallbackService
-  }
-
-  @Bean
-  @ConditionalOnMissingBean
-  def getDefaultEngineConnListService(context: ECMContext): EngineConnListService = {
-    implicit val service: DefaultEngineConnListService = new DefaultEngineConnListService
-    registerSyncListener(context)
-    service
-  }
-
   @Bean
   @ConditionalOnMissingBean
   def getLinuxProcessEngineConnLaunchService(
@@ -114,10 +88,8 @@ class ECMSpringConfiguration {
   @Bean
   @ConditionalOnMissingBean
   def getDefaultEngineConnKillService(
-      engineConnListService: EngineConnListService
   ): EngineConnKillService = {
     val service = new DefaultEngineConnKillService
-    service.setEngineConnListService(engineConnListService)
     service
   }
 
diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/test/scala/org/apache/linkis/engineconn/computation/executor/upstream/access/ECTaskEntranceInfoAccessHelper.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/test/scala/org/apache/linkis/engineconn/computation/executor/upstream/access/ECTaskEntranceInfoAccessHelper.scala
deleted file mode 100644
index 42e79c52c..000000000
--- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/test/scala/org/apache/linkis/engineconn/computation/executor/upstream/access/ECTaskEntranceInfoAccessHelper.scala
+++ /dev/null
@@ -1,96 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.linkis.engineconn.computation.executor.upstream.access
-
-import org.apache.linkis.DataWorkCloudApplication
-import org.apache.linkis.common.ServiceInstance
-import org.apache.linkis.common.conf.{CommonVars, DWCArgumentsParser}
-import org.apache.linkis.common.utils.Utils
-import org.apache.linkis.engineconn.common.creation.DefaultEngineCreationContext
-import org.apache.linkis.engineconn.core.util.EngineConnUtils
-import org.apache.linkis.governance.common.conf.GovernanceCommonConf
-import org.apache.linkis.governance.common.utils.EngineConnArgumentsParser
-import org.apache.linkis.manager.engineplugin.common.launch.process.Environment
-import org.apache.linkis.manager.label.builder.factory.{
-  LabelBuilderFactory,
-  LabelBuilderFactoryContext
-}
-import org.apache.linkis.manager.label.entity.Label
-import org.apache.linkis.server.conf.ServerConfiguration
-
-import org.apache.commons.lang3.StringUtils
-
-import java.util
-
-import org.slf4j.{Logger, LoggerFactory}
-
-object ECTaskEntranceInfoAccessHelper {
-  val logger: Logger = LoggerFactory.getLogger(ECTaskEntranceInfoAccessHelper.getClass)
-
-  val engineCreationContext = new DefaultEngineCreationContext
-  val labelBuilderFactory: LabelBuilderFactory = LabelBuilderFactoryContext.getLabelBuilderFactory
-
-  def initApp(args: Array[String]): Unit = {
-    val arguments = EngineConnArgumentsParser.getEngineConnArgumentsParser.parseToObj(args)
-    val engineConf = arguments.getEngineConnConfMap
-    engineCreationContext.setUser(engineConf.getOrElse("user", Utils.getJvmUser))
-    engineCreationContext.setTicketId(engineConf.getOrElse("ticketId", ""))
-    val host = CommonVars(Environment.ECM_HOST.toString, "127.0.0.1").getValue
-    val port = CommonVars(Environment.ECM_PORT.toString, "80").getValue
-    engineCreationContext.setEMInstance(
-      ServiceInstance(GovernanceCommonConf.ENGINE_CONN_MANAGER_SPRING_NAME.getValue, s"$host:$port")
-    )
-    val labels = new util.ArrayList[Label[_]]
-    val labelArgs = engineConf.filter(_._1.startsWith(EngineConnArgumentsParser.LABEL_PREFIX))
-    if (labelArgs.nonEmpty) {
-      labelArgs.foreach { case (key, value) =>
-        labels.add(
-          labelBuilderFactory
-            .createLabel[Label[_]](key.replace(EngineConnArgumentsParser.LABEL_PREFIX, ""), value)
-        )
-      }
-      engineCreationContext.setLabels(labels)
-    }
-    val jMap = new java.util.HashMap[String, String](engineConf.size)
-    engineConf.foreach(kv => jMap.put(kv._1, kv._2))
-    engineCreationContext.setOptions(jMap)
-    engineCreationContext.setArgs(args)
-    //    EngineConnObject.setEngineCreationContext(engineCreationContext)
-    logger.info(
-      "Finished to init engineCreationContext: " + EngineConnUtils.GSON
-        .toJson(engineCreationContext)
-    )
-
-    logger.info("Spring is enabled, now try to start SpringBoot.")
-    logger.info("<--------------------Start SpringBoot App-------------------->")
-    val parser = DWCArgumentsParser.parse(engineCreationContext.getArgs)
-    DWCArgumentsParser.setDWCOptionMap(parser.getDWCConfMap)
-    val existsExcludePackages = ServerConfiguration.BDP_SERVER_EXCLUDE_PACKAGES.getValue
-    if (!StringUtils.isEmpty(existsExcludePackages)) {
-      DataWorkCloudApplication.setProperty(
-        ServerConfiguration.BDP_SERVER_EXCLUDE_PACKAGES.key,
-        existsExcludePackages
-      )
-    }
-    // 加载spring类
-    DataWorkCloudApplication.main(DWCArgumentsParser.formatSpringOptions(parser.getSpringConfMap))
-
-    logger.info("<--------------------SpringBoot App init succeed-------------------->")
-  }
-
-}
diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-core/src/main/scala/org/apache/linkis/engineconn/launch/EngineConnServer.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-core/src/main/scala/org/apache/linkis/engineconn/launch/EngineConnServer.scala
index 180798a77..14a0701d9 100644
--- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-core/src/main/scala/org/apache/linkis/engineconn/launch/EngineConnServer.scala
+++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-core/src/main/scala/org/apache/linkis/engineconn/launch/EngineConnServer.scala
@@ -127,6 +127,7 @@ object EngineConnServer extends Logging {
     this.engineCreationContext.setEMInstance(
       ServiceInstance(GovernanceCommonConf.ENGINE_CONN_MANAGER_SPRING_NAME.getValue, s"$host:$port")
     )
+
     val labels = new ArrayBuffer[Label[_]]
     val labelArgs = engineConf.filter(_._1.startsWith(EngineConnArgumentsParser.LABEL_PREFIX))
     if (labelArgs.nonEmpty) {
diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/callback/hook/CallbackEngineConnHook.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/callback/hook/CallbackEngineConnHook.scala
index d7ad2c797..fa7d9ff01 100644
--- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/callback/hook/CallbackEngineConnHook.scala
+++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/callback/hook/CallbackEngineConnHook.scala
@@ -59,7 +59,7 @@ class CallbackEngineConnHook extends EngineConnHook with Logging {
     newMap.put("spring.mvc.servlet.path", ServerConfiguration.BDP_SERVER_RESTFUL_URI.getValue)
     DataWorkCloudApplication.main(DWCArgumentsParser.formatSpringOptions(newMap.toMap))
 
-    val engineConnPidCallBack = new EngineConnPidCallback(engineCreationContext.getEMInstance)
+    val engineConnPidCallBack = new EngineConnPidCallback()
     Utils.tryAndError(engineConnPidCallBack.callback())
     logger.info("<--------------------SpringBoot App init succeed-------------------->")
   }
@@ -78,9 +78,7 @@ class CallbackEngineConnHook extends EngineConnHook with Logging {
       engineCreationContext: EngineCreationContext,
       throwable: Throwable
   ): Unit = {
-    val engineConnAfterStartCallback = new EngineConnAfterStartCallback(
-      engineCreationContext.getEMInstance
-    )
+    val engineConnAfterStartCallback = new EngineConnAfterStartCallback
     val prefixMsg = Sender.getThisServiceInstance + s": log dir: ${EngineConnConf.getLogDir},"
     Utils.tryAndError(
       engineConnAfterStartCallback.callback(
@@ -105,9 +103,7 @@ class CallbackEngineConnHook extends EngineConnHook with Logging {
       engineCreationContext: EngineCreationContext,
       engineConn: EngineConn
   ): Unit = {
-    val engineConnAfterStartCallback = new EngineConnAfterStartCallback(
-      engineCreationContext.getEMInstance
-    )
+    val engineConnAfterStartCallback = new EngineConnAfterStartCallback
     Utils.tryAndError(
       engineConnAfterStartCallback.callback(
         EngineConnStatusCallback(
diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/callback/service/EngineConnAfterStartCallback.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/callback/service/EngineConnAfterStartCallback.scala
index fe6275ce6..d61e711f5 100644
--- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/callback/service/EngineConnAfterStartCallback.scala
+++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/callback/service/EngineConnAfterStartCallback.scala
@@ -17,10 +17,7 @@
 
 package org.apache.linkis.engineconn.callback.service
 
-import org.apache.linkis.common.ServiceInstance
-
-class EngineConnAfterStartCallback(emInstance: ServiceInstance)
-    extends AbstractEngineConnStartUpCallback(emInstance) {
+class EngineConnAfterStartCallback extends AbstractEngineConnStartUpCallback {
 
   override def callback(): Unit = {}
 }
diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/callback/service/EngineConnCallback.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/callback/service/EngineConnCallback.scala
index 8a028d0a9..1b629cf30 100644
--- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/callback/service/EngineConnCallback.scala
+++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/callback/service/EngineConnCallback.scala
@@ -17,8 +17,8 @@
 
 package org.apache.linkis.engineconn.callback.service
 
-import org.apache.linkis.common.ServiceInstance
 import org.apache.linkis.common.utils.Logging
+import org.apache.linkis.governance.common.conf.GovernanceCommonConf
 import org.apache.linkis.manager.common.entity.enumeration.NodeStatus
 import org.apache.linkis.manager.common.protocol.engine.EngineConnStatusCallback
 import org.apache.linkis.protocol.message.RequestProtocol
@@ -26,31 +26,25 @@ import org.apache.linkis.rpc.Sender
 
 trait EngineConnCallback {
 
-  protected def getEMSender: Sender
-
   def callback(): Unit
 
 }
 
-abstract class AbstractEngineConnStartUpCallback(emInstance: ServiceInstance)
-    extends EngineConnCallback
-    with Logging {
-
-  override protected def getEMSender: Sender = {
-    Sender.getSender(emInstance)
-  }
+abstract class AbstractEngineConnStartUpCallback() extends EngineConnCallback with Logging {
 
   def callback(protocol: RequestProtocol): Unit = {
     protocol match {
       case protocol: EngineConnStatusCallback =>
         if (protocol.status.equals(NodeStatus.Failed)) {
-          logger.error(s"protocol will send to em: ${protocol}")
+          logger.error(s"protocol will send to lm: ${protocol}")
         } else {
-          logger.info(s"protocol will send to em: ${protocol}")
+          logger.info(s"protocol will send to lm: ${protocol}")
         }
       case _ =>
     }
-    getEMSender.send(protocol)
+    Sender
+      .getSender(GovernanceCommonConf.ENGINE_APPLICATION_MANAGER_SPRING_NAME.getValue)
+      .ask(protocol)
   }
 
 }
diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/callback/service/EngineConnPidCallback.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/callback/service/EngineConnPidCallback.scala
index 23a3f90a2..f0995c0b9 100644
--- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/callback/service/EngineConnPidCallback.scala
+++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/callback/service/EngineConnPidCallback.scala
@@ -17,15 +17,13 @@
 
 package org.apache.linkis.engineconn.callback.service
 
-import org.apache.linkis.common.ServiceInstance
 import org.apache.linkis.engineconn.core.EngineConnObject
 import org.apache.linkis.governance.common.protocol.task.ResponseEngineConnPid
 import org.apache.linkis.rpc.Sender
 
 import java.lang.management.ManagementFactory
 
-class EngineConnPidCallback(emInstance: ServiceInstance)
-    extends AbstractEngineConnStartUpCallback(emInstance) {
+class EngineConnPidCallback extends AbstractEngineConnStartUpCallback {
 
   override def callback(): Unit = {
     val pid = ManagementFactory.getRuntimeMXBean.getName.split("@")(0)
diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/LinkisManagerApplication.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/LinkisManagerApplication.java
similarity index 96%
rename from linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/LinkisManagerApplication.java
rename to linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/LinkisManagerApplication.java
index cfbefdbaa..064d61a6f 100644
--- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/LinkisManagerApplication.java
+++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/LinkisManagerApplication.java
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 
-package org.apache.linkis.manager.am;
+package org.apache.linkis.manager;
 
 import org.apache.linkis.LinkisBaseServerApp;
 
diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/YarnCallbackService.scala b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/event/message/EngineConnPidCallbackEvent.java
similarity index 62%
rename from linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/YarnCallbackService.scala
rename to linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/event/message/EngineConnPidCallbackEvent.java
index 06a9c787c..1202433fb 100644
--- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/YarnCallbackService.scala
+++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/event/message/EngineConnPidCallbackEvent.java
@@ -15,14 +15,22 @@
  * limitations under the License.
  */
 
-package org.apache.linkis.ecm.server.service
+package org.apache.linkis.manager.am.event.message;
 
-import org.apache.linkis.protocol.callback.{YarnAPPIdCallbackProtocol, YarnInfoCallbackProtocol}
+import org.apache.linkis.governance.common.protocol.task.ResponseEngineConnPid;
 
-trait YarnCallbackService {
+import org.springframework.context.ApplicationEvent;
 
-  def dealApplicationId(protocol: YarnAPPIdCallbackProtocol): Unit
+public class EngineConnPidCallbackEvent extends ApplicationEvent {
 
-  def dealApplicationURI(protocol: YarnInfoCallbackProtocol): Unit
+  private ResponseEngineConnPid protocol;
 
+  public EngineConnPidCallbackEvent(ResponseEngineConnPid protocol) {
+    super(protocol);
+    this.protocol = protocol;
+  }
+
+  public ResponseEngineConnPid getProtocol() {
+    return protocol;
+  }
 }
diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/restful/EMRestfulApi.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/restful/EMRestfulApi.java
index 4477d60ef..05cda79c5 100644
--- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/restful/EMRestfulApi.java
+++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/restful/EMRestfulApi.java
@@ -25,6 +25,7 @@ import org.apache.linkis.manager.am.converter.DefaultMetricsConverter;
 import org.apache.linkis.manager.am.exception.AMErrorCode;
 import org.apache.linkis.manager.am.exception.AMErrorException;
 import org.apache.linkis.manager.am.manager.EngineNodeManager;
+import org.apache.linkis.manager.am.service.ECResourceInfoService;
 import org.apache.linkis.manager.am.service.em.ECMOperateService;
 import org.apache.linkis.manager.am.service.em.EMInfoService;
 import org.apache.linkis.manager.am.utils.AMUtils;
@@ -33,6 +34,7 @@ import org.apache.linkis.manager.common.entity.enumeration.NodeHealthy;
 import org.apache.linkis.manager.common.entity.metrics.NodeHealthyInfo;
 import org.apache.linkis.manager.common.entity.node.EMNode;
 import org.apache.linkis.manager.common.entity.node.EngineNode;
+import org.apache.linkis.manager.common.entity.persistence.ECResourceInfoRecord;
 import org.apache.linkis.manager.common.protocol.OperateRequest$;
 import org.apache.linkis.manager.common.protocol.em.ECMOperateRequest;
 import org.apache.linkis.manager.common.protocol.em.ECMOperateRequest$;
@@ -91,6 +93,7 @@ public class EMRestfulApi {
 
   @Autowired private ECMOperateService ecmOperateService;
 
+  @Autowired private ECResourceInfoService ecResourceInfoService;
   private LabelBuilderFactory stdLabelBuilderFactory =
       LabelBuilderFactoryContext.getLabelBuilderFactory();
 
@@ -323,7 +326,10 @@ public class EMRestfulApi {
       return Message.error(
           "You have no permission to execute ECM Operation by this EngineConn " + serviceInstance);
     }
-    return executeECMOperation(engineNode.getEMNode(), new ECMOperateRequest(userName, parameters));
+    return executeECMOperation(
+        engineNode.getEMNode(),
+        engineNode.getServiceInstance().getInstance(),
+        new ECMOperateRequest(userName, parameters));
   }
 
   @ApiOperation(
@@ -354,7 +360,7 @@ public class EMRestfulApi {
           "Fail to process the operation parameters, cased by "
               + ExceptionUtils.getRootCauseMessage(e));
     }
-    return executeECMOperation(ecmNode, new ECMOperateRequest(userName, parameters));
+    return executeECMOperation(ecmNode, "", new ECMOperateRequest(userName, parameters));
   }
 
   @ApiOperation(value = "openEngineLog", notes = "open Engine log", response = Message.class)
@@ -377,9 +383,10 @@ public class EMRestfulApi {
     String userName = ModuleUserUtils.getOperationUser(req, "openEngineLog");
     EMNode ecmNode;
     Map<String, Object> parameters;
+    String engineInstance;
     try {
       String emInstance = jsonNode.get("emInstance").asText();
-      String engineInstance = jsonNode.get("instance").asText();
+      engineInstance = jsonNode.get("instance").asText();
       ServiceInstance serviceInstance = EngineRestfulApi.getServiceInstance(jsonNode);
       logger.info("User {} try to open engine: {} log.", userName, serviceInstance);
       ecmNode =
@@ -416,10 +423,12 @@ public class EMRestfulApi {
       logger.error("Failed to open engine log, error:", e);
       return Message.error(e.getMessage());
     }
-    return executeECMOperation(ecmNode, new ECMOperateRequest(userName, parameters));
+    return executeECMOperation(
+        ecmNode, engineInstance, new ECMOperateRequest(userName, parameters));
   }
 
-  private Message executeECMOperation(EMNode ecmNode, ECMOperateRequest ecmOperateRequest) {
+  private Message executeECMOperation(
+      EMNode ecmNode, String engineInstance, ECMOperateRequest ecmOperateRequest) {
     String operationName = OperateRequest$.MODULE$.getOperationName(ecmOperateRequest.parameters());
     if (ArrayUtils.contains(adminOperations, operationName)
         && Configuration.isNotAdmin(ecmOperateRequest.user())) {
@@ -434,6 +443,18 @@ public class EMRestfulApi {
               + " admin Operation in ECM "
               + ecmNode.getServiceInstance());
     }
+
+    // fill in logDirSuffix
+    if (StringUtils.isNotBlank(engineInstance)
+        && Objects.isNull(ecmOperateRequest.parameters().get("logDirSuffix"))) {
+      ECResourceInfoRecord ecResourceInfoRecord =
+          ecResourceInfoService.getECResourceInfoRecordByInstance(engineInstance);
+      if (Objects.isNull(ecResourceInfoRecord)) {
+        return Message.error("ECM instance: " + ecmNode.getServiceInstance() + " not exist ");
+      }
+      ecmOperateRequest.parameters().put("logDirSuffix", ecResourceInfoRecord.getLogDirSuffix());
+    }
+
     ECMOperateResponse engineOperateResponse =
         ecmOperateService.executeOperation(ecmNode, ecmOperateRequest);
 
diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/EngineConnPidCallbackService.scala b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/service/EngineConnPidCallbackService.java
similarity index 82%
rename from linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/EngineConnPidCallbackService.scala
rename to linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/service/EngineConnPidCallbackService.java
index 8e8339473..194eea590 100644
--- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/EngineConnPidCallbackService.scala
+++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/service/EngineConnPidCallbackService.java
@@ -15,12 +15,11 @@
  * limitations under the License.
  */
 
-package org.apache.linkis.ecm.server.service
+package org.apache.linkis.manager.am.service;
 
-import org.apache.linkis.governance.common.protocol.task.ResponseEngineConnPid
-
-trait EngineConnPidCallbackService {
-
-  def dealPid(protocol: ResponseEngineConnPid): Unit
+import org.apache.linkis.governance.common.protocol.task.ResponseEngineConnPid;
 
+@FunctionalInterface
+public interface EngineConnPidCallbackService {
+  void dealPid(ResponseEngineConnPid protocol);
 }
diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/engine/EngineConnStatusCallbackService.scala b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/service/EngineConnStatusCallbackService.java
similarity index 79%
rename from linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/engine/EngineConnStatusCallbackService.scala
rename to linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/service/EngineConnStatusCallbackService.java
index b4e498155..477d49aa1 100644
--- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/engine/EngineConnStatusCallbackService.scala
+++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/service/EngineConnStatusCallbackService.java
@@ -15,12 +15,11 @@
  * limitations under the License.
  */
 
-package org.apache.linkis.manager.am.service.engine
+package org.apache.linkis.manager.am.service;
 
-import org.apache.linkis.manager.common.protocol.engine.EngineConnStatusCallbackToAM
+import org.apache.linkis.manager.common.protocol.engine.EngineConnStatusCallbackToAM;
 
-trait EngineConnStatusCallbackService {
-
-  def dealEngineConnStatusCallback(engineConnStatusCallbackToAM: EngineConnStatusCallbackToAM): Unit
+public interface EngineConnStatusCallbackService {
 
+  void dealEngineConnStatusCallbackToAM(EngineConnStatusCallbackToAM engineConnStatusCallbackToAM);
 }
diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/service/impl/DefaultEngineConnPidCallbackService.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/service/impl/DefaultEngineConnPidCallbackService.java
new file mode 100644
index 000000000..e9a8290b0
--- /dev/null
+++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/service/impl/DefaultEngineConnPidCallbackService.java
@@ -0,0 +1,61 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.linkis.manager.am.service.impl;
+
+import org.apache.linkis.governance.common.protocol.task.ResponseEngineConnPid;
+import org.apache.linkis.manager.am.manager.DefaultEngineNodeManager;
+import org.apache.linkis.manager.am.service.EngineConnPidCallbackService;
+import org.apache.linkis.manager.common.entity.node.EngineNode;
+import org.apache.linkis.rpc.message.annotation.Receiver;
+
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.stereotype.Service;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@Service
+public class DefaultEngineConnPidCallbackService implements EngineConnPidCallbackService {
+  private static final Logger logger =
+      LoggerFactory.getLogger(DefaultEngineConnPidCallbackService.class);
+
+  @Autowired private DefaultEngineNodeManager defaultEngineNodeManager;
+
+  @Receiver
+  @Override
+  public void dealPid(ResponseEngineConnPid protocol) {
+    // 设置pid
+    logger.info(
+        "DefaultEngineConnPidCallbackService dealPid serviceInstance: [{}] pid: [{}]"
+            + " ticketId: [{}]",
+        protocol.serviceInstance(),
+        protocol.pid(),
+        protocol.ticketId());
+
+    EngineNode engineNode = defaultEngineNodeManager.getEngineNode(protocol.serviceInstance());
+    if (engineNode == null) {
+      logger.error(
+          "DefaultEngineConnPidCallbackService dealPid failed, engineNode is null, serviceInstance:{}",
+          protocol.serviceInstance());
+      return;
+    }
+
+    engineNode.setIdentifier(protocol.pid());
+    defaultEngineNodeManager.updateEngine(engineNode);
+  }
+}
diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/service/impl/DefaultEngineConnStatusCallbackService.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/service/impl/DefaultEngineConnStatusCallbackService.java
new file mode 100644
index 000000000..12c45e231
--- /dev/null
+++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/service/impl/DefaultEngineConnStatusCallbackService.java
@@ -0,0 +1,146 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.linkis.manager.am.service.impl;
+
+import org.apache.linkis.manager.am.conf.AMConfiguration;
+import org.apache.linkis.manager.am.service.EngineConnStatusCallbackService;
+import org.apache.linkis.manager.am.service.engine.EngineStopService;
+import org.apache.linkis.manager.common.constant.AMConstant;
+import org.apache.linkis.manager.common.entity.enumeration.NodeStatus;
+import org.apache.linkis.manager.common.entity.metrics.AMNodeMetrics;
+import org.apache.linkis.manager.common.protocol.engine.EngineConnStatusCallback;
+import org.apache.linkis.manager.common.protocol.engine.EngineConnStatusCallbackToAM;
+import org.apache.linkis.manager.common.protocol.engine.EngineStopRequest;
+import org.apache.linkis.manager.persistence.NodeMetricManagerPersistence;
+import org.apache.linkis.manager.service.common.metrics.MetricsConverter;
+import org.apache.linkis.rpc.Sender$;
+import org.apache.linkis.rpc.message.annotation.Receiver;
+import org.apache.linkis.server.BDPJettyServerHelper;
+
+import org.apache.commons.lang3.StringUtils;
+
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.stereotype.Service;
+
+import java.io.UnsupportedEncodingException;
+import java.util.HashMap;
+import java.util.Locale;
+import java.util.Map;
+
+import com.fasterxml.jackson.core.JsonProcessingException;
+
+@Service
+public class DefaultEngineConnStatusCallbackService implements EngineConnStatusCallbackService {
+  private org.slf4j.Logger logger =
+      org.slf4j.LoggerFactory.getLogger(DefaultEngineConnStatusCallbackService.class);
+
+  @Autowired private NodeMetricManagerPersistence nodeMetricManagerPersistence;
+
+  @Autowired private MetricsConverter metricsConverter;
+
+  @Autowired private EngineStopService engineStopService;
+
+  private static final String[] canRetryLogs =
+      AMConfiguration.AM_CAN_RETRY_LOGS().getValue().split(";");
+
+  @Receiver
+  public void dealEngineConnStatusCallback(EngineConnStatusCallback protocol) {
+    logger.info(
+        "EngineConnStatusCallbackServiceImpl handle engineConnStatus callback serviceInstance: [{}] status: [{}]",
+        protocol.serviceInstance(),
+        protocol.status());
+    if (!NodeStatus.isAvailable(protocol.status())) {
+      EngineStopRequest engineStopRequest = new EngineStopRequest();
+      engineStopRequest.setServiceInstance(protocol.serviceInstance());
+      engineStopRequest.setUser("hadoop");
+      try {
+        engineStopService.stopEngine(
+            engineStopRequest, Sender$.MODULE$.getSender(Sender$.MODULE$.getThisServiceInstance()));
+      } catch (Exception e) {
+        logger.warn(
+            "DefaultEngineConnStatusCallbackService stopEngine failed, serviceInstance:{}",
+            engineStopRequest.getServiceInstance(),
+            e);
+      }
+
+      dealEngineConnStatusCallbackToAM(
+          new EngineConnStatusCallbackToAM(
+              protocol.serviceInstance(), protocol.status(), protocol.initErrorMsg(), false));
+    }
+    logger.info("Finished to deal EngineConnStatusCallback: {}", protocol);
+  }
+
+  @Receiver
+  public void dealEngineConnStatusCallbackToAM(
+      EngineConnStatusCallbackToAM engineConnStatusCallbackToAM) {
+    if (engineConnStatusCallbackToAM.serviceInstance() == null) {
+      logger.warn("call back service instance is null");
+    }
+    logger.info(
+        "EngineConnStatusCallbackServiceImpl start to deal engineConnStatusCallbackToAM {}",
+        engineConnStatusCallbackToAM);
+
+    AMNodeMetrics nodeMetrics = new AMNodeMetrics();
+    Map<String, Object> heartBeatMsg = new HashMap<>();
+    int initErrorMsgMaxByteNum = 60000;
+
+    String initErrorMsg = engineConnStatusCallbackToAM.initErrorMsg();
+    try {
+      if (StringUtils.isNotBlank(initErrorMsg)
+          && initErrorMsg.getBytes("utf-8").length >= initErrorMsgMaxByteNum) {
+        initErrorMsg = initErrorMsg.substring(0, initErrorMsgMaxByteNum);
+      }
+    } catch (UnsupportedEncodingException e) {
+      logger.warn("dealEngineConnStatusCallbackToAM getBytes failed", e);
+    }
+    heartBeatMsg.put(AMConstant.START_REASON, initErrorMsg);
+
+    if (engineConnStatusCallbackToAM.canRetry()) {
+      heartBeatMsg.put(AMConstant.EC_CAN_RETRY, engineConnStatusCallbackToAM.canRetry());
+    } else if (matchRetryLog(engineConnStatusCallbackToAM.initErrorMsg())) {
+      logger.info("match canRetry log {}", engineConnStatusCallbackToAM.serviceInstance());
+      heartBeatMsg.put(AMConstant.EC_CAN_RETRY, engineConnStatusCallbackToAM.canRetry());
+    }
+
+    try {
+      nodeMetrics.setHeartBeatMsg(
+          BDPJettyServerHelper.jacksonJson().writeValueAsString(heartBeatMsg));
+    } catch (JsonProcessingException e) {
+      logger.warn("dealEngineConnStatusCallbackToAM writeValueAsString failed", e);
+    }
+    nodeMetrics.setServiceInstance(engineConnStatusCallbackToAM.serviceInstance());
+    nodeMetrics.setStatus(metricsConverter.convertStatus(engineConnStatusCallbackToAM.status()));
+
+    nodeMetricManagerPersistence.addOrupdateNodeMetrics(nodeMetrics);
+    logger.info("Finished to deal engineConnStatusCallbackToAM {}", engineConnStatusCallbackToAM);
+  }
+
+  private boolean matchRetryLog(String errorMsg) {
+    boolean flag = false;
+    if (StringUtils.isNotBlank(errorMsg)) {
+      String errorMsgLowCase = errorMsg.toLowerCase(Locale.getDefault());
+      for (String canRetry : canRetryLogs) {
+        if (errorMsgLowCase.contains(canRetry)) {
+          logger.info("match engineConn log fatal logs, is {}", canRetry);
+          flag = true;
+        }
+      }
+    }
+    return flag;
+  }
+}
diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/manager/DefaultEngineNodeManager.scala b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/manager/DefaultEngineNodeManager.scala
index 86cf986d2..098d6e5c7 100644
--- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/manager/DefaultEngineNodeManager.scala
+++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/manager/DefaultEngineNodeManager.scala
@@ -25,7 +25,12 @@ import org.apache.linkis.manager.am.exception.{AMErrorCode, AMErrorException}
 import org.apache.linkis.manager.am.locker.EngineNodeLocker
 import org.apache.linkis.manager.common.constant.AMConstant
 import org.apache.linkis.manager.common.entity.enumeration.NodeStatus
-import org.apache.linkis.manager.common.entity.node.{AMEngineNode, EngineNode, ScoreServiceInstance}
+import org.apache.linkis.manager.common.entity.node.{
+  AMEngineNode,
+  EngineNode,
+  RMNode,
+  ScoreServiceInstance
+}
 import org.apache.linkis.manager.common.entity.persistence.PersistenceLabel
 import org.apache.linkis.manager.common.protocol.engine.{
   EngineOperateRequest,
@@ -105,7 +110,6 @@ class DefaultEngineNodeManager extends EngineNodeManager with Logging {
     val heartMsg = engine.getNodeHeartbeatMsg()
     engineNode.setNodeHealthyInfo(heartMsg.getHealthyInfo)
     engineNode.setNodeOverLoadInfo(heartMsg.getOverLoadInfo)
-    engineNode.setNodeResource(heartMsg.getNodeResource)
     engineNode.setNodeStatus(heartMsg.getStatus)
     engineNode
   }
@@ -125,7 +129,9 @@ class DefaultEngineNodeManager extends EngineNodeManager with Logging {
       toState: NodeStatus
   ): Unit = {}
 
-  override def updateEngine(engineNode: EngineNode): Unit = {}
+  override def updateEngine(engineNode: EngineNode): Unit = {
+    nodeManagerPersistence.updateNodeInstance(engineNode)
+  }
 
   override def switchEngine(engineNode: EngineNode): EngineNode = {
     null
diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/em/DefaultEMEngineService.scala b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/em/DefaultEMEngineService.scala
index 918faf912..f63a39504 100644
--- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/em/DefaultEMEngineService.scala
+++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/em/DefaultEMEngineService.scala
@@ -19,18 +19,31 @@ package org.apache.linkis.manager.am.service.em
 
 import org.apache.linkis.common.utils.Logging
 import org.apache.linkis.engineplugin.server.service.EngineConnLaunchService
+import org.apache.linkis.governance.common.utils.ECPathUtils
 import org.apache.linkis.manager.am.exception.AMErrorException
 import org.apache.linkis.manager.am.manager.{EMNodeManager, EngineNodeManager}
+import org.apache.linkis.manager.am.service.ECResourceInfoService
 import org.apache.linkis.manager.am.service.EMEngineService
 import org.apache.linkis.manager.common.constant.AMConstant
 import org.apache.linkis.manager.common.entity.node._
+import org.apache.linkis.manager.common.entity.persistence.{
+  ECResourceInfoRecord,
+  PersistenceResource
+}
 import org.apache.linkis.manager.common.protocol.em._
 import org.apache.linkis.manager.common.protocol.engine.EngineStopRequest
 import org.apache.linkis.manager.common.utils.ManagerUtils
 import org.apache.linkis.manager.engineplugin.common.launch.entity.EngineConnBuildRequest
 import org.apache.linkis.manager.label.entity.{EngineNodeLabel, Label}
 import org.apache.linkis.manager.label.entity.em.EMInstanceLabel
+import org.apache.linkis.manager.label.entity.engine.{
+  EngineInstanceLabel,
+  EngineTypeLabel,
+  UserCreatorLabel
+}
 import org.apache.linkis.manager.label.service.NodeLabelService
+import org.apache.linkis.manager.rm.domain.RMLabelContainer
+import org.apache.linkis.manager.rm.service.LabelResourceService
 import org.apache.linkis.manager.service.common.label.LabelFilter
 
 import org.apache.commons.collections.MapUtils
@@ -57,9 +70,15 @@ class DefaultEMEngineService extends EMEngineService with Logging {
   @Autowired
   private var engineConnLaunchService: EngineConnLaunchService = _
 
+  @Autowired
+  private var ecResourceInfoService: ECResourceInfoService = _
+
   @Autowired
   private var labelFilter: LabelFilter = _
 
+  @Autowired
+  private var labelResourceService: LabelResourceService = _
+
   override def listEngines(getEMEnginesRequest: GetEMEnginesRequest): util.List[EngineNode] = {
     val emNode = new AMEMNode()
     emNode.setServiceInstance(getEMEnginesRequest.getEm)
@@ -95,6 +114,50 @@ class DefaultEMEngineService extends EMEngineService with Logging {
     )
     val engineStopRequest = new EngineStopRequest
     engineStopRequest.setServiceInstance(engineNode.getServiceInstance)
+    engineStopRequest.setIdentifierType(engineNode.getMark)
+    engineStopRequest.setIdentifier(engineNode.getIdentifier)
+
+    val ecResourceInfo: ECResourceInfoRecord =
+      ecResourceInfoService.getECResourceInfoRecordByInstance(
+        engineNode.getServiceInstance.getInstance
+      )
+
+    if (ecResourceInfo != null) {
+      engineStopRequest.setEngineType(ecResourceInfo.getLabelValue.split(",")(1).split("-")(0))
+      engineStopRequest.setLogDirSuffix(ecResourceInfo.getLogDirSuffix)
+    } else {
+      if (engineNode.getLabels.isEmpty) {
+        // node labels is empty, engine already been stopped
+        logger.info(
+          s"DefaultEMEngineService stopEngine node labels is empty, engine: ${engineStopRequest.getServiceInstance} have already been stopped."
+        )
+        return
+      }
+
+      val rMLabelContainer: RMLabelContainer =
+        labelResourceService.enrichLabels(engineNode.getLabels)
+
+      val persistenceResource: PersistenceResource =
+        labelResourceService.getPersistenceResource(rMLabelContainer.getEngineInstanceLabel)
+      if (persistenceResource == null) {
+        // persistenceResource is null, engine already been stopped
+        logger.info(
+          s"DefaultEMEngineService stopEngine persistenceResource is null, engine: ${engineStopRequest.getServiceInstance} have already been stopped."
+        )
+        return
+      }
+
+      engineStopRequest.setEngineType(rMLabelContainer.getEngineTypeLabel.getEngineType)
+      engineStopRequest.setLogDirSuffix(
+        ECPathUtils
+          .getECLogDirSuffix(
+            rMLabelContainer.getEngineTypeLabel,
+            rMLabelContainer.getUserCreatorLabel,
+            persistenceResource.getTicketId
+          )
+      )
+    }
+
     emNodeManager.stopEngine(engineStopRequest, emNode)
     // engineNodeManager.deleteEngineNode(engineNode)
     logger.info(
diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/engine/DefaultEngineConnStatusCallbackService.scala b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/engine/DefaultEngineConnStatusCallbackService.scala
deleted file mode 100644
index ccfcb7aa2..000000000
--- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/engine/DefaultEngineConnStatusCallbackService.scala
+++ /dev/null
@@ -1,104 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.linkis.manager.am.service.engine
-
-import org.apache.linkis.common.utils.Logging
-import org.apache.linkis.manager.am.conf.AMConfiguration
-import org.apache.linkis.manager.common.constant.AMConstant
-import org.apache.linkis.manager.common.entity.metrics.AMNodeMetrics
-import org.apache.linkis.manager.common.protocol.engine.EngineConnStatusCallbackToAM
-import org.apache.linkis.manager.persistence.NodeMetricManagerPersistence
-import org.apache.linkis.manager.service.common.metrics.MetricsConverter
-import org.apache.linkis.rpc.message.annotation.Receiver
-import org.apache.linkis.server.BDPJettyServerHelper
-
-import org.apache.commons.lang3.StringUtils
-
-import org.springframework.beans.factory.annotation.Autowired
-import org.springframework.stereotype.Service
-
-import java.util
-import java.util.Locale
-
-@Service
-class DefaultEngineConnStatusCallbackService extends EngineConnStatusCallbackService with Logging {
-
-  @Autowired
-  private var nodeMetricManagerPersistence: NodeMetricManagerPersistence = _
-
-  @Autowired
-  private var metricsConverter: MetricsConverter = _
-
-  private val canRetryLogs = AMConfiguration.AM_CAN_RETRY_LOGS.getValue.split(";")
-
-  // The heartBeatMsg field is of type text, mysql text max byte num is 65535
-  private val initErrorMsgMaxByteNum = 60000
-
-  @Receiver
-  override def dealEngineConnStatusCallback(
-      engineConnStatusCallbackToAM: EngineConnStatusCallbackToAM
-  ): Unit = {
-
-    if (null == engineConnStatusCallbackToAM.serviceInstance) {
-      logger.warn(s"call back service instance is null")
-    }
-    logger.info(s"Start to deal engineConnStatusCallbackToAM $engineConnStatusCallbackToAM")
-    val nodeMetrics = new AMNodeMetrics
-    val heartBeatMsg: java.util.Map[String, Any] = new util.HashMap[String, Any]()
-
-    var initErrorMsg = engineConnStatusCallbackToAM.initErrorMsg
-    if (
-        StringUtils.isNotBlank(initErrorMsg) && initErrorMsg
-          .getBytes("utf-8")
-          .length >= initErrorMsgMaxByteNum
-    ) {
-      initErrorMsg = initErrorMsg.substring(0, initErrorMsgMaxByteNum)
-    }
-    heartBeatMsg.put(AMConstant.START_REASON, initErrorMsg)
-
-    if (engineConnStatusCallbackToAM.canRetry) {
-      heartBeatMsg.put(AMConstant.EC_CAN_RETRY, engineConnStatusCallbackToAM.canRetry)
-    } else if (matchRetryLog(engineConnStatusCallbackToAM.initErrorMsg)) {
-      logger.info(s"match canRetry log ${engineConnStatusCallbackToAM.serviceInstance}")
-      heartBeatMsg.put(AMConstant.EC_CAN_RETRY, engineConnStatusCallbackToAM.canRetry)
-    }
-
-    nodeMetrics.setHeartBeatMsg(BDPJettyServerHelper.jacksonJson.writeValueAsString(heartBeatMsg))
-    nodeMetrics.setServiceInstance(engineConnStatusCallbackToAM.serviceInstance)
-    nodeMetrics.setStatus(metricsConverter.convertStatus(engineConnStatusCallbackToAM.status))
-
-    nodeMetricManagerPersistence.addOrupdateNodeMetrics(nodeMetrics)
-    logger.info(s"Finished to deal engineConnStatusCallbackToAM $engineConnStatusCallbackToAM")
-
-  }
-
-  private def matchRetryLog(errorMsg: String): Boolean = {
-    var flag = false
-    if (StringUtils.isNotBlank(errorMsg)) {
-      val errorMsgLowCase = errorMsg.toLowerCase(Locale.getDefault)
-      canRetryLogs.foreach(canRetry =>
-        if (errorMsgLowCase.contains(canRetry)) {
-          logger.info(s"match engineConn log fatal logs,is $canRetry")
-          flag = true
-        }
-      )
-    }
-    flag
-  }
-
-}
diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/engine/DefaultEngineStopService.scala b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/engine/DefaultEngineStopService.scala
index 801114c4b..3267b698d 100644
--- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/engine/DefaultEngineStopService.scala
+++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/engine/DefaultEngineStopService.scala
@@ -94,13 +94,17 @@ class DefaultEngineStopService extends AbstractEngineService with EngineStopServ
       logger.info(s" engineConn does not exist in db: $engineStopRequest ")
       return
     }
+
+    val labels = nodeLabelService.getNodeLabels(engineStopRequest.getServiceInstance)
+    node.setLabels(labels)
+
     // 1. request em to kill ec
     logger.info(s"Start to kill engine invoke enginePointer ${node.getServiceInstance}")
     Utils.tryAndErrorMsg {
       getEMService().stopEngine(node, node.getEMNode)
       logger.info(s"Finished to kill engine invoke enginePointer ${node.getServiceInstance}")
     }(s"Failed to stop engine ${node.getServiceInstance}")
-    node.setLabels(nodeLabelService.getNodeLabels(engineStopRequest.getServiceInstance))
+
     if (null == node.getNodeStatus) {
       node.setNodeStatus(NodeStatus.ShuttingDown)
     }
diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/engine/EngineStopService.scala b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/engine/EngineStopService.scala
index 228f3e080..beca54790 100644
--- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/engine/EngineStopService.scala
+++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/engine/EngineStopService.scala
@@ -17,7 +17,6 @@
 
 package org.apache.linkis.manager.am.service.engine
 
-import org.apache.linkis.manager.am.vo.ResourceVo
 import org.apache.linkis.manager.common.entity.node.EngineNode
 import org.apache.linkis.manager.common.protocol.engine.{
   EngineConnReleaseRequest,
@@ -26,10 +25,6 @@ import org.apache.linkis.manager.common.protocol.engine.{
 }
 import org.apache.linkis.rpc.Sender
 
-import java.util
-
-import scala.collection.mutable
-
 trait EngineStopService {
 
   def stopEngine(engineStopRequest: EngineStopRequest, sender: Sender): Unit
diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/rm/restful/RMMonitorRest.scala b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/rm/restful/RMMonitorRest.scala
index 193066995..4356d16a9 100644
--- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/rm/restful/RMMonitorRest.scala
+++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/rm/restful/RMMonitorRest.scala
@@ -69,6 +69,7 @@ import scala.collection.mutable
 import scala.collection.mutable.ArrayBuffer
 
 import com.fasterxml.jackson.databind.ObjectMapper
+import com.fasterxml.jackson.module.scala.DefaultScalaModule
 import com.github.pagehelper.page.PageMethod
 import com.google.common.collect.Lists
 import io.swagger.annotations.{Api, ApiOperation}
@@ -79,6 +80,7 @@ import io.swagger.annotations.{Api, ApiOperation}
 class RMMonitorRest extends Logging {
 
   val mapper = new ObjectMapper()
+  mapper.registerModule(DefaultScalaModule)
 
   private val dateFormatLocal = new ThreadLocal[SimpleDateFormat]() {
     override protected def initialValue = new SimpleDateFormat("EEE MMM dd HH:mm:ss z yyyy")
@@ -121,8 +123,11 @@ class RMMonitorRest extends Logging {
 
   var COMBINED_USERCREATOR_ENGINETYPE: String = _
 
-  def appendMessageData(message: Message, key: String, value: AnyRef): Message =
-    message.data(key, mapper.readTree(gson.toJson(value)))
+  def appendMessageData(message: Message, key: String, value: AnyRef): Message = {
+    val result = mapper.writeValueAsString(value)
+    logger.info(s"appendMessageData result: $result")
+    message.data(key, mapper.readTree(result))
+  }
 
   @ApiOperation(value = "getApplicationList", notes = "get applicationList")
   @RequestMapping(path = Array("applicationlist"), method = Array(RequestMethod.POST))
@@ -138,92 +143,10 @@ class RMMonitorRest extends Logging {
     val engineType =
       if (param.get("engineType") == null) null else param.get("engineType").asInstanceOf[String]
     val nodes = getEngineNodes(userName, true)
-    val creatorToApplicationList = new mutable.HashMap[String, mutable.HashMap[String, Any]]
-    nodes.foreach { node =>
-      val userCreatorLabel = node.getLabels.asScala
-        .find(_.isInstanceOf[UserCreatorLabel])
-        .get
-        .asInstanceOf[UserCreatorLabel]
-      val engineTypeLabel = node.getLabels.asScala
-        .find(_.isInstanceOf[EngineTypeLabel])
-        .get
-        .asInstanceOf[EngineTypeLabel]
-      if (getUserCreator(userCreatorLabel).equals(userCreator)) {
-        if (engineType == null || getEngineType(engineTypeLabel).equals(engineType)) {
-          if (!creatorToApplicationList.contains(userCreatorLabel.getCreator)) {
-            val applicationList = new mutable.HashMap[String, Any]
-            applicationList.put("engineInstances", new mutable.ArrayBuffer[Any])
-            applicationList.put("usedResource", Resource.initResource(ResourceType.LoadInstance))
-            applicationList.put("maxResource", Resource.initResource(ResourceType.LoadInstance))
-            applicationList.put("minResource", Resource.initResource(ResourceType.LoadInstance))
-            applicationList.put("lockedResource", Resource.initResource(ResourceType.LoadInstance))
-            creatorToApplicationList.put(userCreatorLabel.getCreator, applicationList)
-          }
-          val applicationList = creatorToApplicationList(userCreatorLabel.getCreator)
-          applicationList.put(
-            "usedResource",
-            (if (applicationList("usedResource") == null) {
-               Resource.initResource(ResourceType.LoadInstance)
-             } else {
-               applicationList("usedResource")
-                 .asInstanceOf[Resource]
-             }) + node.getNodeResource.getUsedResource
-          )
-          applicationList.put(
-            "maxResource",
-            (if (applicationList("maxResource") == null) {
-               Resource.initResource(ResourceType.LoadInstance)
-             } else {
-               applicationList("maxResource")
-                 .asInstanceOf[Resource]
-             }) + node.getNodeResource.getMaxResource
-          )
-          applicationList.put(
-            "minResource",
-            (if (applicationList("minResource") == null) {
-               Resource.initResource(ResourceType.LoadInstance)
-             } else {
-               applicationList("minResource")
-                 .asInstanceOf[Resource]
-             }) + node.getNodeResource.getMinResource
-          )
-          applicationList.put(
-            "lockedResource",
-            (if (applicationList("lockedResource") == null) {
-               Resource.initResource(ResourceType.LoadInstance)
-             } else {
-               applicationList("lockedResource")
-                 .asInstanceOf[Resource]
-             }) + node.getNodeResource.getLockedResource
-          )
-          val engineInstance = new mutable.HashMap[String, Any]
-          engineInstance.put("creator", userCreatorLabel.getCreator)
-          engineInstance.put("engineType", engineTypeLabel.getEngineType)
-          engineInstance.put("instance", node.getServiceInstance.getInstance)
-          engineInstance.put("label", engineTypeLabel.getStringValue)
-          node.setNodeResource(
-            ResourceUtils.convertTo(node.getNodeResource, ResourceType.LoadInstance)
-          )
-          engineInstance.put("resource", node.getNodeResource)
-          if (node.getNodeStatus == null) {
-            engineInstance.put("status", "Busy")
-          } else {
-            engineInstance.put("status", node.getNodeStatus.toString)
-          }
-          engineInstance.put("startTime", dateFormatLocal.get().format(node.getStartTime))
-          engineInstance.put("owner", node.getOwner)
-          applicationList("engineInstances")
-            .asInstanceOf[mutable.ArrayBuffer[Any]]
-            .append(engineInstance)
-        }
-      }
-    }
-    val applications = creatorToApplicationList.map { creatorEntry =>
-      val application = new mutable.HashMap[String, Any]
-      application.put("creator", creatorEntry._1)
-      application.put("applicationList", creatorEntry._2)
-      application
-    }
+
+    val creatorToApplicationList = getCreatorToApplicationList(userCreator, engineType, nodes)
+
+    val applications = getApplications(creatorToApplicationList)
     appendMessageData(message, "applications", applications)
     message
   }
@@ -333,112 +256,12 @@ class RMMonitorRest extends Logging {
         node.getLabels.asScala.find(_.isInstanceOf[EngineTypeLabel]).get != null
       })
     }
+
     val userCreatorEngineTypeResourceMap =
-      new mutable.HashMap[String, mutable.HashMap[String, NodeResource]]
-    nodes.foreach { node =>
-      val userCreatorLabel = node.getLabels.asScala
-        .find(_.isInstanceOf[UserCreatorLabel])
-        .get
-        .asInstanceOf[UserCreatorLabel]
-      val engineTypeLabel = node.getLabels.asScala
-        .find(_.isInstanceOf[EngineTypeLabel])
-        .get
-        .asInstanceOf[EngineTypeLabel]
-      val userCreator = getUserCreator(userCreatorLabel)
-      if (!userCreatorEngineTypeResourceMap.contains(userCreator)) {
-        userCreatorEngineTypeResourceMap.put(userCreator, new mutable.HashMap[String, NodeResource])
-      }
-      val engineTypeResourceMap = userCreatorEngineTypeResourceMap.get(userCreator).get
-      val engineType = getEngineType(engineTypeLabel)
-      if (!engineTypeResourceMap.contains(engineType)) {
-        val nodeResource = CommonNodeResource.initNodeResource(ResourceType.LoadInstance)
-        engineTypeResourceMap.put(engineType, nodeResource)
-      }
-      val resource = engineTypeResourceMap.get(engineType).get
-      resource.setUsedResource(node.getNodeResource.getUsedResource + resource.getUsedResource)
-      // combined label
-      val combinedLabel =
-        combinedLabelBuilder.build("", Lists.newArrayList(userCreatorLabel, engineTypeLabel));
-      var labelResource = labelResourceService.getLabelResource(combinedLabel)
-      if (labelResource == null) {
-        resource.setLeftResource(node.getNodeResource.getMaxResource - resource.getUsedResource)
-      } else {
-        labelResource = ResourceUtils.convertTo(labelResource, ResourceType.LoadInstance)
-        resource.setUsedResource(labelResource.getUsedResource)
-        resource.setLockedResource(labelResource.getLockedResource)
-        resource.setLeftResource(labelResource.getLeftResource)
-        resource.setMaxResource(labelResource.getMaxResource)
-      }
-      resource.getLeftResource match {
-        case dResource: DriverAndYarnResource =>
-          resource.setLeftResource(dResource.loadInstanceResource)
-        case _ =>
-      }
-    }
-    val userCreatorEngineTypeResources = userCreatorEngineTypeResourceMap.map { userCreatorEntry =>
-      val userCreatorEngineTypeResource = new mutable.HashMap[String, Any]
-      userCreatorEngineTypeResource.put("userCreator", userCreatorEntry._1)
-      var totalUsedMemory: Long = 0L
-      var totalUsedCores: Int = 0
-      var totalUsedInstances = 0
-      var totalLockedMemory: Long = 0L
-      var totalLockedCores: Int = 0
-      var totalLockedInstances: Int = 0
-      var totalMaxMemory: Long = 0L
-      var totalMaxCores: Int = 0
-      var totalMaxInstances: Int = 0
-      val engineTypeResources = userCreatorEntry._2.map { engineTypeEntry =>
-        val engineTypeResource = new mutable.HashMap[String, Any]
-        engineTypeResource.put("engineType", engineTypeEntry._1)
-        val engineResource = engineTypeEntry._2
-        val usedResource = engineResource.getUsedResource.asInstanceOf[LoadInstanceResource]
-        val lockedResource = engineResource.getLockedResource.asInstanceOf[LoadInstanceResource]
-        val maxResource = engineResource.getMaxResource.asInstanceOf[LoadInstanceResource]
-        val usedMemory = usedResource.memory
-        val usedCores = usedResource.cores
-        val usedInstances = usedResource.instances
-        totalUsedMemory += usedMemory
-        totalUsedCores += usedCores
-        totalUsedInstances += usedInstances
-        val lockedMemory = lockedResource.memory
-        val lockedCores = lockedResource.cores
-        val lockedInstances = lockedResource.instances
-        totalLockedMemory += lockedMemory
-        totalLockedCores += lockedCores
-        totalLockedInstances += lockedInstances
-        val maxMemory = maxResource.memory
-        val maxCores = maxResource.cores
-        val maxInstances = maxResource.instances
-        totalMaxMemory += maxMemory
-        totalMaxCores += maxCores
-        totalMaxInstances += maxInstances
+      getUserCreatorEngineTypeResourceMap(nodes)
+
+    val userCreatorEngineTypeResources = getUserResources(userCreatorEngineTypeResourceMap)
 
-        val memoryPercent =
-          if (maxMemory > 0) (usedMemory + lockedMemory) / maxMemory.toDouble else 0
-        val coresPercent =
-          if (maxCores > 0) (usedCores + lockedCores) / maxCores.toDouble else 0
-        val instancePercent =
-          if (maxInstances > 0) (usedInstances + lockedInstances) / maxInstances.toDouble else 0
-        val maxPercent = Math.max(Math.max(memoryPercent, coresPercent), instancePercent)
-        engineTypeResource.put("percent", maxPercent.formatted("%.2f"))
-        engineTypeResource
-      }
-      val totalMemoryPercent =
-        if (totalMaxMemory > 0) (totalUsedMemory + totalLockedMemory) / totalMaxMemory.toDouble
-        else 0
-      val totalCoresPercent =
-        if (totalMaxCores > 0) (totalUsedCores + totalLockedCores) / totalMaxCores.toDouble
-        else 0
-      val totalInstancePercent =
-        if (totalMaxInstances > 0) {
-          (totalUsedInstances + totalLockedInstances) / totalMaxInstances.toDouble
-        } else 0
-      val totalPercent =
-        Math.max(Math.max(totalMemoryPercent, totalCoresPercent), totalInstancePercent)
-      userCreatorEngineTypeResource.put("engineTypes", engineTypeResources)
-      userCreatorEngineTypeResource.put("percent", totalPercent.formatted("%.2f"))
-      userCreatorEngineTypeResource
-    }
     appendMessageData(message, "userResources", userCreatorEngineTypeResources)
     message
   }
@@ -775,4 +598,245 @@ class RMMonitorRest extends Logging {
       .groupBy(_.getOwner)
   }
 
+  private def getUserResources(
+      userCreatorEngineTypeResourceMap: mutable.HashMap[
+        String,
+        mutable.HashMap[String, NodeResource]
+      ]
+  ) = {
+
+    val userCreatorEngineTypeResource = new util.HashMap[String, Any]
+
+    val userResources = new util.ArrayList[Any]()
+    userCreatorEngineTypeResourceMap.map { userCreatorEntry =>
+      userCreatorEngineTypeResource.put("userCreator", userCreatorEntry._1)
+      var totalUsedMemory: Long = 0L
+      var totalUsedCores: Int = 0
+      var totalUsedInstances = 0
+      var totalLockedMemory: Long = 0L
+      var totalLockedCores: Int = 0
+      var totalLockedInstances: Int = 0
+      var totalMaxMemory: Long = 0L
+      var totalMaxCores: Int = 0
+      var totalMaxInstances: Int = 0
+
+      val engineTypeResources = new util.ArrayList[Any]()
+      for (engineTypeEntry <- userCreatorEntry._2) {
+        val engineTypeResource = new util.HashMap[String, Any]
+        engineTypeResource.put("engineType", engineTypeEntry._1)
+        val engineResource = engineTypeEntry._2
+        val usedResource = engineResource.getUsedResource.asInstanceOf[LoadInstanceResource]
+        val lockedResource = engineResource.getLockedResource.asInstanceOf[LoadInstanceResource]
+        val maxResource = engineResource.getMaxResource.asInstanceOf[LoadInstanceResource]
+        val usedMemory = usedResource.memory
+        val usedCores = usedResource.cores
+        val usedInstances = usedResource.instances
+        totalUsedMemory += usedMemory
+        totalUsedCores += usedCores
+        totalUsedInstances += usedInstances
+        val lockedMemory = lockedResource.memory
+        val lockedCores = lockedResource.cores
+        val lockedInstances = lockedResource.instances
+        totalLockedMemory += lockedMemory
+        totalLockedCores += lockedCores
+        totalLockedInstances += lockedInstances
+        val maxMemory = maxResource.memory
+        val maxCores = maxResource.cores
+        val maxInstances = maxResource.instances
+        totalMaxMemory += maxMemory
+        totalMaxCores += maxCores
+        totalMaxInstances += maxInstances
+
+        val memoryPercent =
+          if (maxMemory > 0) (usedMemory + lockedMemory) / maxMemory.toDouble else 0
+        val coresPercent =
+          if (maxCores > 0) (usedCores + lockedCores) / maxCores.toDouble else 0
+        val instancePercent =
+          if (maxInstances > 0) (usedInstances + lockedInstances) / maxInstances.toDouble else 0
+        val maxPercent = Math.max(Math.max(memoryPercent, coresPercent), instancePercent)
+        engineTypeResource.put("percent", maxPercent.formatted("%.2f"))
+        engineTypeResources.add(engineTypeResource)
+      }
+
+      val totalMemoryPercent =
+        if (totalMaxMemory > 0) (totalUsedMemory + totalLockedMemory) / totalMaxMemory.toDouble
+        else 0
+      val totalCoresPercent =
+        if (totalMaxCores > 0) (totalUsedCores + totalLockedCores) / totalMaxCores.toDouble
+        else 0
+      val totalInstancePercent =
+        if (totalMaxInstances > 0) {
+          (totalUsedInstances + totalLockedInstances) / totalMaxInstances.toDouble
+        } else 0
+      val totalPercent =
+        Math.max(Math.max(totalMemoryPercent, totalCoresPercent), totalInstancePercent)
+      userCreatorEngineTypeResource.put("engineTypes", engineTypeResources)
+      userCreatorEngineTypeResource.put("percent", totalPercent.formatted("%.2f"))
+      userResources.add(userCreatorEngineTypeResource)
+    }
+
+    userResources
+  }
+
+  private def getUserCreatorEngineTypeResourceMap(nodes: Array[EngineNode]) = {
+    val userCreatorEngineTypeResourceMap =
+      new mutable.HashMap[String, mutable.HashMap[String, NodeResource]]
+
+    for (node <- nodes) {
+      val userCreatorLabel = node.getLabels.asScala
+        .find(_.isInstanceOf[UserCreatorLabel])
+        .get
+        .asInstanceOf[UserCreatorLabel]
+      val engineTypeLabel = node.getLabels.asScala
+        .find(_.isInstanceOf[EngineTypeLabel])
+        .get
+        .asInstanceOf[EngineTypeLabel]
+      val userCreator = getUserCreator(userCreatorLabel)
+
+      if (!userCreatorEngineTypeResourceMap.contains(userCreator)) {
+        userCreatorEngineTypeResourceMap.put(userCreator, new mutable.HashMap[String, NodeResource])
+      }
+      val engineTypeResourceMap = userCreatorEngineTypeResourceMap.get(userCreator).get
+      val engineType = getEngineType(engineTypeLabel)
+      if (!engineTypeResourceMap.contains(engineType)) {
+        val nodeResource = CommonNodeResource.initNodeResource(ResourceType.LoadInstance)
+        engineTypeResourceMap.put(engineType, nodeResource)
+      }
+      val resource = engineTypeResourceMap.get(engineType).get
+      resource.setUsedResource(node.getNodeResource.getUsedResource + resource.getUsedResource)
+      // combined label
+      val combinedLabel =
+        combinedLabelBuilder.build("", Lists.newArrayList(userCreatorLabel, engineTypeLabel));
+      var labelResource = labelResourceService.getLabelResource(combinedLabel)
+      if (labelResource == null) {
+        resource.setLeftResource(node.getNodeResource.getMaxResource - resource.getUsedResource)
+      } else {
+        labelResource = ResourceUtils.convertTo(labelResource, ResourceType.LoadInstance)
+        resource.setUsedResource(labelResource.getUsedResource)
+        resource.setLockedResource(labelResource.getLockedResource)
+        resource.setLeftResource(labelResource.getLeftResource)
+        resource.setMaxResource(labelResource.getMaxResource)
+      }
+      resource.getLeftResource match {
+        case dResource: DriverAndYarnResource =>
+          resource.setLeftResource(dResource.loadInstanceResource)
+        case _ =>
+      }
+    }
+
+    userCreatorEngineTypeResourceMap
+  }
+
+  private def getCreatorToApplicationList(
+      userCreator: String,
+      engineType: String,
+      nodes: Array[EngineNode]
+  ) = {
+    val creatorToApplicationList = new util.HashMap[String, util.HashMap[String, Any]]
+    nodes.foreach { node =>
+      val userCreatorLabel = node.getLabels.asScala
+        .find(_.isInstanceOf[UserCreatorLabel])
+        .get
+        .asInstanceOf[UserCreatorLabel]
+      val engineTypeLabel = node.getLabels.asScala
+        .find(_.isInstanceOf[EngineTypeLabel])
+        .get
+        .asInstanceOf[EngineTypeLabel]
+      if (getUserCreator(userCreatorLabel).equals(userCreator)) {
+        if (engineType == null || getEngineType(engineTypeLabel).equals(engineType)) {
+          if (!creatorToApplicationList.containsKey(userCreatorLabel.getCreator)) {
+            val applicationList = new util.HashMap[String, Any]
+            applicationList.put("engineInstances", new util.ArrayList[Any])
+            applicationList.put("usedResource", Resource.initResource(ResourceType.LoadInstance))
+            applicationList.put("maxResource", Resource.initResource(ResourceType.LoadInstance))
+            applicationList.put("minResource", Resource.initResource(ResourceType.LoadInstance))
+            applicationList.put("lockedResource", Resource.initResource(ResourceType.LoadInstance))
+            creatorToApplicationList.put(userCreatorLabel.getCreator, applicationList)
+          }
+          val applicationList = creatorToApplicationList.get(userCreatorLabel.getCreator)
+          applicationList.put(
+            "usedResource",
+            (if (applicationList.get("usedResource") == null) {
+               Resource.initResource(ResourceType.LoadInstance)
+             } else {
+               applicationList
+                 .get("usedResource")
+                 .asInstanceOf[Resource]
+             }) + node.getNodeResource.getUsedResource
+          )
+          applicationList.put(
+            "maxResource",
+            (if (applicationList.get("maxResource") == null) {
+               Resource.initResource(ResourceType.LoadInstance)
+             } else {
+               applicationList
+                 .get("maxResource")
+                 .asInstanceOf[Resource]
+             }) + node.getNodeResource.getMaxResource
+          )
+          applicationList.put(
+            "minResource",
+            (if (applicationList.get("minResource") == null) {
+               Resource.initResource(ResourceType.LoadInstance)
+             } else {
+               applicationList
+                 .get("minResource")
+                 .asInstanceOf[Resource]
+             }) + node.getNodeResource.getMinResource
+          )
+          applicationList.put(
+            "lockedResource",
+            (if (applicationList.get("lockedResource") == null) {
+               Resource.initResource(ResourceType.LoadInstance)
+             } else {
+               applicationList
+                 .get("lockedResource")
+                 .asInstanceOf[Resource]
+             }) + node.getNodeResource.getLockedResource
+          )
+          val engineInstance = new mutable.HashMap[String, Any]
+          engineInstance.put("creator", userCreatorLabel.getCreator)
+          engineInstance.put("engineType", engineTypeLabel.getEngineType)
+          engineInstance.put("instance", node.getServiceInstance.getInstance)
+          engineInstance.put("label", engineTypeLabel.getStringValue)
+          node.setNodeResource(
+            ResourceUtils.convertTo(node.getNodeResource, ResourceType.LoadInstance)
+          )
+          engineInstance.put("resource", node.getNodeResource)
+          if (node.getNodeStatus == null) {
+            engineInstance.put("status", "Busy")
+          } else {
+            engineInstance.put("status", node.getNodeStatus.toString)
+          }
+          engineInstance.put(
+            "st" +
+              "artTime",
+            dateFormatLocal.get().format(node.getStartTime)
+          )
+          engineInstance.put("owner", node.getOwner)
+          applicationList
+            .get("engineInstances")
+            .asInstanceOf[util.ArrayList[Any]]
+            .add(engineInstance)
+        }
+      }
+    }
+    creatorToApplicationList
+  }
+
+  private def getApplications(
+      creatorToApplicationList: util.HashMap[String, util.HashMap[String, Any]]
+  ) = {
+    val applications = new util.ArrayList[util.HashMap[String, Any]]()
+    val iterator = creatorToApplicationList.entrySet().iterator();
+    while (iterator.hasNext) {
+      val entry = iterator.next()
+      val application = new util.HashMap[String, Any]
+      application.put("creator", entry.getKey)
+      application.put("applicationList", entry.getValue)
+      applications.add(application)
+    }
+    applications
+  }
+
 }
diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/rm/service/impl/ResourceLogService.scala b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/rm/service/impl/ResourceLogService.scala
index e332854bc..2d67edb9a 100644
--- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/rm/service/impl/ResourceLogService.scala
+++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/rm/service/impl/ResourceLogService.scala
@@ -25,7 +25,11 @@ import org.apache.linkis.manager.common.entity.resource.Resource
 import org.apache.linkis.manager.dao.ECResourceRecordMapper
 import org.apache.linkis.manager.label.entity.CombinedLabel
 import org.apache.linkis.manager.label.entity.em.EMInstanceLabel
-import org.apache.linkis.manager.label.entity.engine.EngineInstanceLabel
+import org.apache.linkis.manager.label.entity.engine.{
+  EngineInstanceLabel,
+  EngineTypeLabel,
+  UserCreatorLabel
+}
 import org.apache.linkis.manager.rm.domain.RMLabelContainer
 import org.apache.linkis.manager.rm.utils.RMUtils
 
@@ -150,7 +154,11 @@ class ResourceLogService extends Logging {
     if (null == userCreatorEngineType) return
     var ecResourceInfoRecord = ecResourceRecordMapper.getECResourceInfoRecord(ticketId)
     if (ecResourceInfoRecord == null) {
-      val logDirSuffix = getECLogDirSuffix(labelContainer, ticketId)
+      val logDirSuffix = ECPathUtils.getECLogDirSuffix(
+        labelContainer.getEngineTypeLabel,
+        labelContainer.getUserCreatorLabel,
+        ticketId
+      )
       val user =
         if (null != labelContainer.getUserCreatorLabel) labelContainer.getUserCreatorLabel.getUser
         else ""
@@ -200,20 +208,6 @@ class ResourceLogService extends Logging {
     ecResourceRecordMapper.updateECResourceInfoRecord(ecResourceInfoRecord)
   }
 
-  def getECLogDirSuffix(labelContainer: RMLabelContainer, ticketId: String): String = {
-    val engineTypeLabel = labelContainer.getEngineTypeLabel
-    val userCreatorLabel = labelContainer.getUserCreatorLabel
-    if (null == engineTypeLabel || null == userCreatorLabel) {
-      return ""
-    }
-    val suffix = ECPathUtils.getECWOrkDirPathSuffix(
-      userCreatorLabel.getUser,
-      ticketId,
-      engineTypeLabel.getEngineType
-    )
-    suffix + File.separator + "logs"
-  }
-
 }
 
 object ChangeType {
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/entity/node/AMEMNode.java b/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/entity/node/AMEMNode.java
index 132bc32bb..c234bc00a 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/entity/node/AMEMNode.java
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/entity/node/AMEMNode.java
@@ -43,6 +43,7 @@ public class AMEMNode implements EMNode, ScoreServiceInstance {
   private String owner;
 
   private String mark;
+  private String identifier;
 
   private NodeTaskInfo nodeTaskInfo;
 
@@ -139,6 +140,16 @@ public class AMEMNode implements EMNode, ScoreServiceInstance {
     this.mark = mark;
   }
 
+  @Override
+  public String getIdentifier() {
+    return identifier;
+  }
+
+  @Override
+  public void setIdentifier(String identifier) {
+    this.identifier = identifier;
+  }
+
   @Override
   public NodeResource getNodeResource() {
     return nodeResource;
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/entity/node/AMEngineNode.java b/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/entity/node/AMEngineNode.java
index 40107aaba..243927f13 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/entity/node/AMEngineNode.java
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/entity/node/AMEngineNode.java
@@ -48,6 +48,7 @@ public class AMEngineNode implements EngineNode, ScoreServiceInstance {
   private String owner;
 
   private String mark;
+  private String identifier;
 
   private NodeTaskInfo nodeTaskInfo;
 
@@ -129,6 +130,16 @@ public class AMEngineNode implements EngineNode, ScoreServiceInstance {
     this.mark = mark;
   }
 
+  @Override
+  public String getIdentifier() {
+    return identifier;
+  }
+
+  @Override
+  public void setIdentifier(String identifier) {
+    this.identifier = identifier;
+  }
+
   @Override
   public EMNode getEMNode() {
     return this.emNode;
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/entity/node/InfoRMNode.java b/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/entity/node/InfoRMNode.java
index c6a329e95..660ff0cf4 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/entity/node/InfoRMNode.java
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/entity/node/InfoRMNode.java
@@ -32,6 +32,7 @@ public class InfoRMNode implements RMNode {
   private String owner;
 
   private String mark;
+  private String identifier;
 
   private NodeStatus nodeStatus;
 
@@ -79,6 +80,16 @@ public class InfoRMNode implements RMNode {
     return mark;
   }
 
+  @Override
+  public String getIdentifier() {
+    return identifier;
+  }
+
+  @Override
+  public void setIdentifier(String identifier) {
+    this.identifier = identifier;
+  }
+
   @Override
   public Date getUpdateTime() {
     return updateTime;
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/entity/node/Node.java b/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/entity/node/Node.java
index 135ff76db..1ff76a931 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/entity/node/Node.java
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/entity/node/Node.java
@@ -44,4 +44,8 @@ public interface Node extends RequestProtocol {
   Date getStartTime();
 
   void setStartTime(Date startTime);
+
+  String getIdentifier();
+
+  void setIdentifier(String identifier);
 }
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/entity/persistence/PersistenceNode.java b/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/entity/persistence/PersistenceNode.java
index 006bcf06f..7302ffd63 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/entity/persistence/PersistenceNode.java
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/entity/persistence/PersistenceNode.java
@@ -26,6 +26,8 @@ public class PersistenceNode {
   private String owner;
 
   private String mark;
+  /** identifier if mark equals "process", then identifier equals pid */
+  private String identifier;
 
   private Date updateTime;
   private Date createTime;
@@ -40,6 +42,14 @@ public class PersistenceNode {
     this.mark = mark;
   }
 
+  public String getIdentifier() {
+    return identifier;
+  }
+
+  public void setIdentifier(String identifier) {
+    this.identifier = identifier;
+  }
+
   public Integer getId() {
     return id;
   }
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/entity/persistence/PersistenceNodeEntity.java b/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/entity/persistence/PersistenceNodeEntity.java
index 3eefbc75c..e119d5d82 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/entity/persistence/PersistenceNodeEntity.java
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/entity/persistence/PersistenceNodeEntity.java
@@ -28,6 +28,7 @@ public class PersistenceNodeEntity implements Node {
   private ServiceInstance serviceInstance;
   private String owner;
   private String mark;
+  private String identifier;
   private NodeStatus nodeStatus;
 
   private Date startTime;
@@ -88,6 +89,16 @@ public class PersistenceNodeEntity implements Node {
     return this.mark;
   }
 
+  @Override
+  public String getIdentifier() {
+    return identifier;
+  }
+
+  @Override
+  public void setIdentifier(String identifier) {
+    this.identifier = identifier;
+  }
+
   public void setOwner(String owner) {
     this.owner = owner;
   }
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/entity/persistence/PersistenceNodeMetrics.java b/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/entity/persistence/PersistenceNodeMetrics.java
index 01ecc24fc..12ddf17b2 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/entity/persistence/PersistenceNodeMetrics.java
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/entity/persistence/PersistenceNodeMetrics.java
@@ -21,6 +21,7 @@ import org.apache.linkis.common.ServiceInstance;
 import org.apache.linkis.manager.common.entity.metrics.NodeMetrics;
 
 import java.util.Date;
+import java.util.Objects;
 
 public class PersistenceNodeMetrics implements NodeMetrics {
 
@@ -59,7 +60,9 @@ public class PersistenceNodeMetrics implements NodeMetrics {
   }
 
   public void setStatus(Integer status) {
-    this.status = status;
+    if (Objects.nonNull(status)) {
+      this.status = status;
+    }
   }
 
   @Override
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/protocol/engine/EngineStopRequest.java b/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/protocol/engine/EngineStopRequest.java
index 90515c48b..ce14d09c4 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/protocol/engine/EngineStopRequest.java
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/protocol/engine/EngineStopRequest.java
@@ -23,8 +23,13 @@ import org.apache.linkis.protocol.message.RequestMethod;
 public class EngineStopRequest implements EngineRequest, RequestMethod {
 
   private ServiceInstance serviceInstance;
-
+  private String logDirSuffix;
+  private String engineType;
   private String user;
+  /** identifierType, Reserved for ec containerized startup scenarios */
+  private String identifierType;
+  /** identifier */
+  private String identifier;
 
   public EngineStopRequest() {}
 
@@ -41,6 +46,38 @@ public class EngineStopRequest implements EngineRequest, RequestMethod {
     this.serviceInstance = serviceInstance;
   }
 
+  public String getLogDirSuffix() {
+    return logDirSuffix;
+  }
+
+  public void setLogDirSuffix(String logDirSuffix) {
+    this.logDirSuffix = logDirSuffix;
+  }
+
+  public String getEngineType() {
+    return engineType;
+  }
+
+  public void setEngineType(String engineType) {
+    this.engineType = engineType;
+  }
+
+  public String getIdentifierType() {
+    return identifierType;
+  }
+
+  public void setIdentifierType(String identifierType) {
+    this.identifierType = identifierType;
+  }
+
+  public String getIdentifier() {
+    return identifier;
+  }
+
+  public void setIdentifier(String identifier) {
+    this.identifier = identifier;
+  }
+
   public void setUser(String user) {
     this.user = user;
   }
@@ -60,9 +97,21 @@ public class EngineStopRequest implements EngineRequest, RequestMethod {
     return "EngineStopRequest{"
         + "serviceInstance="
         + serviceInstance
+        + ", logDirSuffix='"
+        + logDirSuffix
+        + '\''
+        + ", engineType='"
+        + engineType
+        + '\''
         + ", user='"
         + user
         + '\''
+        + ", identifierType='"
+        + identifierType
+        + '\''
+        + ", identifier='"
+        + identifier
+        + '\''
         + '}';
   }
 }
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/org/apache/linkis/manager/dao/NodeManagerMapper.java b/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/org/apache/linkis/manager/dao/NodeManagerMapper.java
index 7275561a8..15ef5616d 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/org/apache/linkis/manager/dao/NodeManagerMapper.java
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/org/apache/linkis/manager/dao/NodeManagerMapper.java
@@ -41,7 +41,7 @@ public interface NodeManagerMapper {
 
   List<PersistenceNode> getAllNodes();
 
-  void updateNodeInstanceOverload(@Param("persistenceNode") PersistenceNode persistenceNode);
+  void updateNodeInstanceByInstance(@Param("persistenceNode") PersistenceNode persistenceNode);
 
   Integer getNodeInstanceId(@Param("instance") String instance);
 
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/org/apache/linkis/manager/persistence/NodeMetricManagerPersistence.java b/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/org/apache/linkis/manager/persistence/NodeMetricManagerPersistence.java
index afbf48b9f..1a0887df0 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/org/apache/linkis/manager/persistence/NodeMetricManagerPersistence.java
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/org/apache/linkis/manager/persistence/NodeMetricManagerPersistence.java
@@ -39,7 +39,7 @@ public interface NodeMetricManagerPersistence {
    * @param nodeMetrics
    * @throws PersistenceErrorException
    */
-  void addOrupdateNodeMetrics(NodeMetrics nodeMetrics) throws PersistenceErrorException;
+  void addOrupdateNodeMetrics(NodeMetrics nodeMetrics);
 
   /**
    * 获取多个节点的 metrics列表
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/org/apache/linkis/manager/persistence/impl/DefaultNodeManagerPersistence.java b/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/org/apache/linkis/manager/persistence/impl/DefaultNodeManagerPersistence.java
index 86f202ace..4c6df1e16 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/org/apache/linkis/manager/persistence/impl/DefaultNodeManagerPersistence.java
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/org/apache/linkis/manager/persistence/impl/DefaultNodeManagerPersistence.java
@@ -40,6 +40,7 @@ import java.text.MessageFormat;
 import java.util.ArrayList;
 import java.util.Date;
 import java.util.List;
+import java.util.Objects;
 import java.util.stream.Collectors;
 
 import com.google.common.collect.Lists;
@@ -183,24 +184,23 @@ public class DefaultNodeManagerPersistence implements NodeManagerPersistence {
   }
 
   @Override
-  public void updateNodeInstance(Node node) throws PersistenceErrorException {
+  public void updateNodeInstance(Node node) {
 
-    if (null != node) {
+    if (Objects.nonNull(node)) {
       PersistenceNode persistenceNode = new PersistenceNode();
       persistenceNode.setInstance(node.getServiceInstance().getInstance());
       persistenceNode.setName(node.getServiceInstance().getApplicationName());
-      persistenceNode.setOwner(node.getOwner());
       persistenceNode.setMark(node.getMark());
-      persistenceNode.setCreateTime(new Date());
       persistenceNode.setUpdateTime(new Date());
       persistenceNode.setCreator(node.getOwner());
       persistenceNode.setUpdator(node.getOwner());
-      nodeManagerMapper.updateNodeInstanceOverload(persistenceNode);
+      persistenceNode.setIdentifier(node.getIdentifier());
+      nodeManagerMapper.updateNodeInstanceByInstance(persistenceNode);
     }
   }
 
   @Override
-  public Node getNode(ServiceInstance serviceInstance) throws PersistenceErrorException {
+  public Node getNode(ServiceInstance serviceInstance) {
     String instance = serviceInstance.getInstance();
     PersistenceNode nodeInstances = nodeManagerMapper.getNodeInstance(instance);
     if (null == nodeInstances) {
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/org/apache/linkis/manager/persistence/impl/DefaultNodeMetricManagerPersistence.java b/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/org/apache/linkis/manager/persistence/impl/DefaultNodeMetricManagerPersistence.java
index dc677da19..d20310ec6 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/org/apache/linkis/manager/persistence/impl/DefaultNodeMetricManagerPersistence.java
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/org/apache/linkis/manager/persistence/impl/DefaultNodeMetricManagerPersistence.java
@@ -77,7 +77,7 @@ public class DefaultNodeMetricManagerPersistence implements NodeMetricManagerPer
   }
 
   @Override
-  public void addOrupdateNodeMetrics(NodeMetrics nodeMetrics) throws PersistenceErrorException {
+  public void addOrupdateNodeMetrics(NodeMetrics nodeMetrics) {
     if (null == nodeMetrics.getServiceInstance()) {
       logger.warn(
           "The request of update node metrics was ignored, because the node metrics service instance is null");
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/resources/mapper/common/NodeManagerMapper.xml b/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/resources/mapper/common/NodeManagerMapper.xml
index 6eb38c52b..eaf778f59 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/resources/mapper/common/NodeManagerMapper.xml
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/resources/mapper/common/NodeManagerMapper.xml
@@ -61,9 +61,27 @@
         FROM linkis_cg_manager_service_instance
     </select>
 
-    <update id="updateNodeInstanceOverload">
+    <update id="updateNodeInstanceByInstance">
         UPDATE linkis_cg_manager_service_instance
-        SET owner = #{persistenceNode.owner}, mark = #{persistenceNode.mark}, name = #{persistenceNode.name}, update_time = #{persistenceNode.updateTime}, create_time = #{persistenceNode.createTime}, updator = #{persistenceNode.updator}, creator = #{persistenceNode.creator}
+        SET
+        <if test="persistenceNode.mark != null">
+            mark = #{persistenceNode.mark},
+        </if>
+        <if test="persistenceNode.name != null">
+            name = #{persistenceNode.name},
+        </if>
+        <if test="persistenceNode.updateTime != null">
+            update_time = #{persistenceNode.updateTime},
+        </if>
+        <if test="persistenceNode.updator != null">
+            updator = #{persistenceNode.updator},
+        </if>
+        <if test="persistenceNode.creator != null">
+            creator = #{persistenceNode.creator},
+        </if>
+        <if test="persistenceNode.identifier != null">
+            identifier = #{persistenceNode.identifier}
+        </if>
         WHERE instance = #{persistenceNode.instance}
     </update>
 
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/test/java/org/apache/linkis/manager/dao/NodeManagerMapperTest.java b/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/test/java/org/apache/linkis/manager/dao/NodeManagerMapperTest.java
index 67b4bc354..861b557fb 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/test/java/org/apache/linkis/manager/dao/NodeManagerMapperTest.java
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/test/java/org/apache/linkis/manager/dao/NodeManagerMapperTest.java
@@ -93,7 +93,7 @@ class NodeManagerMapperTest extends BaseDaoTest {
     persistenceNode.setMark("testmark3");
     persistenceNode.setUpdator("testupdator3");
     persistenceNode.setCreator("testcreator3");
-    nodeManagerMapper.updateNodeInstanceOverload(persistenceNode);
+    nodeManagerMapper.updateNodeInstanceByInstance(persistenceNode);
     PersistenceNode persistenceNodes = nodeManagerMapper.getNodeInstance("instance2");
     assertTrue(persistenceNode.getName().equals(persistenceNodes.getName()));
   }
diff --git a/linkis-dist/helm/charts/linkis/templates/configmap-init-sql.yaml b/linkis-dist/helm/charts/linkis/templates/configmap-init-sql.yaml
new file mode 100644
index 000000000..6d26ae863
--- /dev/null
+++ b/linkis-dist/helm/charts/linkis/templates/configmap-init-sql.yaml
@@ -0,0 +1,1407 @@
+---
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+kind: ConfigMap
+apiVersion: v1
+metadata:
+  name: {{ include "linkis.fullname" . }}-init-sql
+data:
+  linkis_ddl.sql: |
+    SET FOREIGN_KEY_CHECKS=0;
+  {{- if eq .Values.linkis.datasource.initSchema "Reset" }}
+    DROP TABLE IF EXISTS `linkis_ps_configuration_config_key`;
+    DROP TABLE IF EXISTS `linkis_ps_configuration_key_engine_relation`;
+    DROP TABLE IF EXISTS `linkis_ps_configuration_config_value`;
+    DROP TABLE IF EXISTS `linkis_ps_configuration_category`;
+    DROP TABLE IF EXISTS `linkis_ps_job_history_group_history`;
+    DROP TABLE IF EXISTS `linkis_ps_job_history_detail`;
+    DROP TABLE IF EXISTS `linkis_ps_common_lock`;
+    DROP TABLE IF EXISTS `linkis_ps_udf_manager`;
+    DROP TABLE IF EXISTS `linkis_ps_udf_shared_group`;
+    DROP TABLE IF EXISTS `linkis_ps_udf_shared_info`;
+    DROP TABLE IF EXISTS `linkis_ps_udf_tree`;
+    DROP TABLE IF EXISTS `linkis_ps_udf_user_load`;
+    DROP TABLE IF EXISTS `linkis_ps_udf_baseinfo`;
+    DROP TABLE IF EXISTS `linkis_ps_udf_version`;
+    DROP TABLE IF EXISTS `linkis_ps_variable_key_user`;
+    DROP TABLE IF EXISTS `linkis_ps_variable_key`;
+    DROP TABLE IF EXISTS `linkis_ps_datasource_access`;
+    DROP TABLE IF EXISTS `linkis_ps_datasource_field`;
+    DROP TABLE IF EXISTS `linkis_ps_datasource_import`;
+    DROP TABLE IF EXISTS `linkis_ps_datasource_lineage`;
+    DROP TABLE IF EXISTS `linkis_ps_datasource_table`;
+    DROP TABLE IF EXISTS `linkis_ps_datasource_table_info`;
+    DROP TABLE IF EXISTS `linkis_ps_cs_context_map`;
+    DROP TABLE IF EXISTS `linkis_ps_cs_context_map_listener`;
+    DROP TABLE IF EXISTS `linkis_ps_cs_context_history`;
+    DROP TABLE IF EXISTS `linkis_ps_cs_context_id`;
+    DROP TABLE IF EXISTS `linkis_ps_cs_context_listener`;
+    DROP TABLE IF EXISTS `linkis_ps_bml_resources`;
+    DROP TABLE IF EXISTS `linkis_ps_bml_resources_version`;
+    DROP TABLE IF EXISTS `linkis_ps_bml_resources_permission`;
+    DROP TABLE IF EXISTS `linkis_ps_resources_download_history`;
+    DROP TABLE IF EXISTS `linkis_ps_bml_resources_task`;
+    DROP TABLE IF EXISTS `linkis_ps_bml_project`;
+    DROP TABLE IF EXISTS `linkis_ps_bml_project_user`;
+    DROP TABLE IF EXISTS `linkis_ps_bml_project_resource`;
+    DROP TABLE IF EXISTS `linkis_ps_instance_label`;
+    DROP TABLE IF EXISTS `linkis_ps_instance_label_value_relation`;
+    DROP TABLE IF EXISTS `linkis_ps_instance_label_relation`;
+    DROP TABLE IF EXISTS `linkis_ps_instance_info`;
+    DROP TABLE IF EXISTS `linkis_ps_error_code`;
+    DROP TABLE IF EXISTS `linkis_cg_manager_service_instance`;
+    DROP TABLE IF EXISTS `linkis_cg_manager_linkis_resources`;
+    DROP TABLE IF EXISTS `linkis_cg_manager_lock`;
+    DROP TABLE IF EXISTS `linkis_cg_rm_external_resource_provider`;
+    DROP TABLE IF EXISTS `linkis_cg_manager_engine_em`;
+    DROP TABLE IF EXISTS `linkis_cg_manager_label`;
+    DROP TABLE IF EXISTS `linkis_cg_manager_label_value_relation`;
+    DROP TABLE IF EXISTS `linkis_cg_manager_label_resource`;
+    DROP TABLE IF EXISTS `linkis_cg_ec_resource_info_record`;
+    DROP TABLE IF EXISTS `linkis_cg_manager_label_service_instance`;
+    DROP TABLE IF EXISTS `linkis_cg_manager_label_user`;
+    DROP TABLE IF EXISTS `linkis_cg_manager_metrics_history`;
+    DROP TABLE IF EXISTS `linkis_cg_manager_service_instance_metrics`;
+    DROP TABLE IF EXISTS `linkis_cg_engine_conn_plugin_bml_resources`;
+    DROP TABLE IF EXISTS `linkis_ps_dm_datasource`;
+    DROP TABLE IF EXISTS `linkis_ps_dm_datasource_env`;
+    DROP TABLE IF EXISTS `linkis_ps_dm_datasource_type`;
+    DROP TABLE IF EXISTS `linkis_ps_dm_datasource_type_key`;
+    DROP TABLE IF EXISTS `linkis_ps_dm_datasource_version`;
+    DROP TABLE IF EXISTS `linkis_mg_gateway_auth_token`;
+  {{- end }}
+
+    CREATE TABLE IF NOT EXISTS `linkis_ps_configuration_config_key`(
+    `id` bigint(20) NOT NULL AUTO_INCREMENT,
+    `key` varchar(50) DEFAULT NULL COMMENT 'Set key, e.g. spark.executor.instances',
+    `description` varchar(200) DEFAULT NULL,
+    `name` varchar(50) DEFAULT NULL,
+    `default_value` varchar(200) DEFAULT NULL COMMENT 'Adopted when user does not set key',
+    `validate_type` varchar(50) DEFAULT NULL COMMENT 'Validate type, one of the following: None, NumInterval, FloatInterval, Include, Regex, OPF, Custom Rules',
+    `validate_range` varchar(50) DEFAULT NULL COMMENT 'Validate range',
+    `engine_conn_type` varchar(50) DEFAULT NULL COMMENT 'engine type,such as spark,hive etc',
+    `is_hidden` tinyint(1) DEFAULT NULL COMMENT 'Whether it is hidden from user. If set to 1(true), then user cannot modify, however, it could still be used in back-end',
+    `is_advanced` tinyint(1) DEFAULT NULL COMMENT 'Whether it is an advanced parameter. If set to 1(true), parameters would be displayed only when user choose to do so',
+    `level` tinyint(1) DEFAULT NULL COMMENT 'Basis for displaying sorting in the front-end. Higher the level is, higher the rank the parameter gets',
+    `treeName` varchar(20) DEFAULT NULL COMMENT 'Reserved field, representing the subdirectory of engineType',
+    PRIMARY KEY (`id`)
+    ) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin;
+
+
+    CREATE TABLE IF NOT EXISTS `linkis_ps_configuration_key_engine_relation`(
+    `id` bigint(20) NOT NULL AUTO_INCREMENT,
+    `config_key_id` bigint(20) NOT NULL COMMENT 'config key id',
+    `engine_type_label_id` bigint(20) NOT NULL COMMENT 'engine label id',
+    PRIMARY KEY (`id`),
+    UNIQUE INDEX(`config_key_id`, `engine_type_label_id`)
+    ) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin;
+
+
+    CREATE TABLE IF NOT EXISTS `linkis_ps_configuration_config_value`(
+    `id` bigint(20) NOT NULL AUTO_INCREMENT,
+    `config_key_id` bigint(20),
+    `config_value` varchar(200),
+    `config_label_id`int(20),
+    `update_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP,
+    `create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP,
+    PRIMARY KEY (`id`),
+    UNIQUE INDEX(`config_key_id`, `config_label_id`)
+    )ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin;
+
+    CREATE TABLE IF NOT EXISTS `linkis_ps_configuration_category` (
+    `id` int(20) NOT NULL AUTO_INCREMENT,
+    `label_id` int(20) NOT NULL,
+    `level` int(20) NOT NULL,
+    `description` varchar(200),
+    `tag` varchar(200),
+    `update_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP,
+    `create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP,
+    PRIMARY KEY (`id`),
+    UNIQUE INDEX(`label_id`)
+    ) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin;
+
+    --
+    -- New linkis job
+    --
+
+    CREATE TABLE IF NOT EXISTS `linkis_ps_job_history_group_history` (
+    `id` bigint(20) NOT NULL AUTO_INCREMENT COMMENT 'Primary Key, auto increment',
+    `job_req_id` varchar(64) DEFAULT NULL COMMENT 'job execId',
+    `submit_user` varchar(50) DEFAULT NULL COMMENT 'who submitted this Job',
+    `execute_user` varchar(50) DEFAULT NULL COMMENT 'who actually executed this Job',
+    `source` text DEFAULT NULL COMMENT 'job source',
+    `labels` text DEFAULT NULL COMMENT 'job labels',
+    `params` text DEFAULT NULL COMMENT 'job params',
+    `progress` varchar(32) DEFAULT NULL COMMENT 'Job execution progress',
+    `status` varchar(50) DEFAULT NULL COMMENT 'Script execution status, must be one of the following: Inited, WaitForRetry, Scheduled, Running, Succeed, Failed, Cancelled, Timeout',
+    `log_path` varchar(200) DEFAULT NULL COMMENT 'File path of the job log',
+    `error_code` int DEFAULT NULL COMMENT 'Error code. Generated when the execution of the script fails',
+    `error_desc` varchar(1000) DEFAULT NULL COMMENT 'Execution description. Generated when the execution of script fails',
+    `created_time` datetime(3) DEFAULT CURRENT_TIMESTAMP(3) COMMENT 'Creation time',
+    `updated_time` datetime(3) DEFAULT CURRENT_TIMESTAMP(3) COMMENT 'Update time',
+    `instances` varchar(250) DEFAULT NULL COMMENT 'Entrance instances',
+    `metrics` text DEFAULT NULL COMMENT   'Job Metrics',
+    `engine_type` varchar(32) DEFAULT NULL COMMENT 'Engine type',
+    `execution_code` text DEFAULT NULL COMMENT 'Job origin code or code path',
+    `result_location` varchar(500) DEFAULT NULL COMMENT 'File path of the resultsets',
+    PRIMARY KEY (`id`),
+    KEY `created_time` (`created_time`),
+    KEY `submit_user` (`submit_user`)
+    ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
+
+
+    CREATE TABLE IF NOT EXISTS `linkis_ps_job_history_detail` (
+    `id` bigint(20) NOT NULL AUTO_INCREMENT COMMENT 'Primary Key, auto increment',
+    `job_history_id` bigint(20) NOT NULL COMMENT 'ID of JobHistory',
+    `result_location` varchar(500) DEFAULT NULL COMMENT 'File path of the resultsets',
+    `execution_content` text DEFAULT NULL COMMENT 'The script code or other execution content executed by this Job',
+    `result_array_size` int(4) DEFAULT 0 COMMENT 'size of result array',
+    `job_group_info` text DEFAULT NULL COMMENT 'Job group info/path',
+    `created_time` datetime(3) DEFAULT CURRENT_TIMESTAMP(3) COMMENT 'Creation time',
+    `updated_time` datetime(3) DEFAULT CURRENT_TIMESTAMP(3) COMMENT 'Update time',
+    `status` varchar(32) DEFAULT NULL COMMENT 'status',
+    `priority` int(4) DEFAULT 0 COMMENT 'order of subjob',
+    PRIMARY KEY (`id`)
+    ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
+
+    CREATE TABLE IF NOT EXISTS `linkis_ps_common_lock` (
+    `id` int(11) NOT NULL AUTO_INCREMENT,
+    `lock_object` varchar(255) COLLATE utf8_bin DEFAULT NULL,
+    `time_out` longtext COLLATE utf8_bin,
+    `update_time` datetime DEFAULT CURRENT_TIMESTAMP,
+    `create_time` datetime DEFAULT CURRENT_TIMESTAMP,
+    PRIMARY KEY (`id`),
+    UNIQUE KEY `lock_object` (`lock_object`)
+    ) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin;
+
+    SET FOREIGN_KEY_CHECKS=0;
+
+    -- ----------------------------
+    -- Table structure for linkis_ps_udf_manager
+    -- ----------------------------
+    CREATE TABLE IF NOT EXISTS `linkis_ps_udf_manager` (
+    `id` bigint(20) NOT NULL AUTO_INCREMENT,
+    `user_name` varchar(20) DEFAULT NULL,
+    PRIMARY KEY (`id`)
+    ) ENGINE=InnoDB  DEFAULT CHARSET=utf8;
+
+
+    -- ----------------------------
+    -- Table structure for linkis_ps_udf_shared_group
+    -- An entry would be added when a user share a function to other user group
+    -- ----------------------------
+    CREATE TABLE IF NOT EXISTS `linkis_ps_udf_shared_group` (
+    `id` bigint(20) NOT NULL AUTO_INCREMENT,
+    `udf_id` bigint(20) NOT NULL,
+    `shared_group` varchar(50) NOT NULL,
+    PRIMARY KEY (`id`)
+    ) ENGINE=InnoDB DEFAULT CHARSET=utf8;
+
+    CREATE TABLE IF NOT EXISTS `linkis_ps_udf_shared_info`
+    (
+    `id` bigint(20) PRIMARY KEY NOT NULL AUTO_INCREMENT,
+    `udf_id` bigint(20) NOT NULL,
+    `user_name` varchar(50) NOT NULL
+    ) ENGINE=InnoDB DEFAULT CHARSET=utf8;
+
+    -- ----------------------------
+    -- Table structure for linkis_ps_udf_tree
+    -- ----------------------------
+    CREATE TABLE IF NOT EXISTS `linkis_ps_udf_tree` (
+    `id` bigint(20) NOT NULL AUTO_INCREMENT,
+    `parent` bigint(20) NOT NULL,
+    `name` varchar(100) DEFAULT NULL COMMENT 'Category name of the function. It would be displayed in the front-end',
+    `user_name` varchar(50) NOT NULL,
+    `description` varchar(255) DEFAULT NULL,
+    `create_time` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
+    `update_time` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP,
+    `category` varchar(50) DEFAULT NULL COMMENT 'Used to distinguish between udf and function',
+    PRIMARY KEY (`id`)
+    ) ENGINE=InnoDB  DEFAULT CHARSET=utf8;
+
+
+    -- ----------------------------
+    -- Table structure for linkis_ps_udf_user_load
+    -- Used to store the function a user selects in the front-end
+    -- ----------------------------
+    CREATE TABLE IF NOT EXISTS `linkis_ps_udf_user_load` (
+    `id` bigint(20) NOT NULL AUTO_INCREMENT,
+    `udf_id` bigint(20) NOT NULL,
+    `user_name` varchar(50) NOT NULL,
+    PRIMARY KEY (`id`)
+    ) ENGINE=InnoDB DEFAULT CHARSET=utf8;
+
+    CREATE TABLE IF NOT EXISTS `linkis_ps_udf_baseinfo` (
+    `id` bigint(20) NOT NULL AUTO_INCREMENT,
+    `create_user` varchar(50) NOT NULL,
+    `udf_name` varchar(255) NOT NULL,
+    `udf_type` int(11) DEFAULT '0',
+    `tree_id` bigint(20) NOT NULL,
+    `create_time` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
+    `update_time` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP,
+    `sys` varchar(255) NOT NULL DEFAULT 'ide' COMMENT 'source system',
+    `cluster_name` varchar(255) NOT NULL,
+    `is_expire` bit(1) DEFAULT NULL,
+    `is_shared` bit(1) DEFAULT NULL,
+    PRIMARY KEY (`id`)
+    ) ENGINE=InnoDB DEFAULT CHARSET=utf8;
+
+    -- bdp_easy_ide.linkis_ps_udf_version definition
+    CREATE TABLE IF NOT EXISTS `linkis_ps_udf_version` (
+    `id` bigint(20) NOT NULL AUTO_INCREMENT,
+    `udf_id` bigint(20) NOT NULL,
+    `path` varchar(255) NOT NULL COMMENT 'Source path for uploading files',
+    `bml_resource_id` varchar(50) NOT NULL,
+    `bml_resource_version` varchar(20) NOT NULL,
+    `is_published` bit(1) DEFAULT NULL COMMENT 'is published',
+    `register_format` varchar(255) DEFAULT NULL,
+    `use_format` varchar(255) DEFAULT NULL,
+    `description` varchar(255) NOT NULL COMMENT 'version desc',
+    `create_time` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
+    `md5` varchar(100) DEFAULT NULL,
+    PRIMARY KEY (`id`)
+    ) ENGINE=InnoDB DEFAULT CHARSET=utf8;
+
+    SET FOREIGN_KEY_CHECKS=0;
+
+    -- ----------------------------
+    -- Table structure for linkis_ps_variable_key_user
+    -- ----------------------------
+    CREATE TABLE IF NOT EXISTS `linkis_ps_variable_key_user` (
+    `id` bigint(20) NOT NULL AUTO_INCREMENT,
+    `application_id` bigint(20) DEFAULT NULL COMMENT 'Reserved word',
+    `key_id` bigint(20) DEFAULT NULL,
+    `user_name` varchar(50) DEFAULT NULL,
+    `value` varchar(200) DEFAULT NULL COMMENT 'Value of the global variable',
+    PRIMARY KEY (`id`),
+    UNIQUE KEY `application_id_2` (`application_id`,`key_id`,`user_name`),
+    KEY `key_id` (`key_id`),
+    KEY `application_id` (`application_id`)
+    ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
+
+
+    -- ----------------------------
+    -- Table structure for linkis_ps_variable_key
+    -- ----------------------------
+    CREATE TABLE IF NOT EXISTS `linkis_ps_variable_key` (
+    `id` bigint(20) NOT NULL AUTO_INCREMENT,
+    `key` varchar(50) DEFAULT NULL COMMENT 'Key of the global variable',
+    `description` varchar(200) DEFAULT NULL COMMENT 'Reserved word',
+    `name` varchar(50) DEFAULT NULL COMMENT 'Reserved word',
+    `application_id` bigint(20) DEFAULT NULL COMMENT 'Reserved word',
+    `default_value` varchar(200) DEFAULT NULL COMMENT 'Reserved word',
+    `value_type` varchar(50) DEFAULT NULL COMMENT 'Reserved word',
+    `value_regex` varchar(100) DEFAULT NULL COMMENT 'Reserved word',
+    PRIMARY KEY (`id`),
+    KEY `application_id` (`application_id`)
+    ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
+
+    -- ----------------------------
+    -- Table structure for linkis_ps_datasource_access
+    -- ----------------------------
+    CREATE TABLE IF NOT EXISTS `linkis_ps_datasource_access` (
+    `id` bigint(20) NOT NULL AUTO_INCREMENT,
+    `table_id` bigint(20) NOT NULL,
+    `visitor` varchar(16) COLLATE utf8_bin NOT NULL,
+    `fields` varchar(255) COLLATE utf8_bin DEFAULT NULL,
+    `application_id` int(4) NOT NULL,
+    `access_time` datetime NOT NULL,
+    PRIMARY KEY (`id`)
+    ) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin;
+
+    -- ----------------------------
+    -- Table structure for linkis_ps_datasource_field
+    -- ----------------------------
+    CREATE TABLE IF NOT EXISTS `linkis_ps_datasource_field` (
+    `id` bigint(20) NOT NULL AUTO_INCREMENT,
+    `table_id` bigint(20) NOT NULL,
+    `name` varchar(64) COLLATE utf8_bin NOT NULL,
+    `alias` varchar(64) COLLATE utf8_bin DEFAULT NULL,
+    `type` varchar(64) COLLATE utf8_bin NOT NULL,
+    `comment` varchar(255) COLLATE utf8_bin DEFAULT NULL,
+    `express` varchar(255) COLLATE utf8_bin DEFAULT NULL,
+    `rule` varchar(128) COLLATE utf8_bin DEFAULT NULL,
+    `is_partition_field` tinyint(1) NOT NULL,
+    `is_primary` tinyint(1) NOT NULL,
+    `length` int(11) DEFAULT NULL,
+    PRIMARY KEY (`id`)
+    ) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin;
+
+    -- ----------------------------
+    -- Table structure for linkis_ps_datasource_import
+    -- ----------------------------
+    CREATE TABLE IF NOT EXISTS `linkis_ps_datasource_import` (
+    `id` bigint(20) NOT NULL AUTO_INCREMENT,
+    `table_id` bigint(20) NOT NULL,
+    `import_type` int(4) NOT NULL,
+    `args` varchar(255) COLLATE utf8_bin NOT NULL,
+    PRIMARY KEY (`id`)
+    ) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin;
+
+    -- ----------------------------
+    -- Table structure for linkis_ps_datasource_lineage
+    -- ----------------------------
+    CREATE TABLE IF NOT EXISTS `linkis_ps_datasource_lineage` (
+    `id` bigint(20) NOT NULL AUTO_INCREMENT,
+    `table_id` bigint(20) DEFAULT NULL,
+    `source_table` varchar(64) COLLATE utf8_bin DEFAULT NULL,
+    `update_time` datetime DEFAULT NULL,
+    PRIMARY KEY (`id`)
+    ) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin;
+
+    -- ----------------------------
+    -- Table structure for linkis_ps_datasource_table
+    -- ----------------------------
+    CREATE TABLE IF NOT EXISTS `linkis_ps_datasource_table` (
+    `id` bigint(255) NOT NULL AUTO_INCREMENT,
+    `database` varchar(64) COLLATE utf8_bin NOT NULL,
+    `name` varchar(64) COLLATE utf8_bin NOT NULL,
+    `alias` varchar(64) COLLATE utf8_bin DEFAULT NULL,
+    `creator` varchar(16) COLLATE utf8_bin NOT NULL,
+    `comment` varchar(255) COLLATE utf8_bin DEFAULT NULL,
+    `create_time` datetime NOT NULL,
+    `product_name` varchar(64) COLLATE utf8_bin DEFAULT NULL,
+    `project_name` varchar(255) COLLATE utf8_bin DEFAULT NULL,
+    `usage` varchar(128) COLLATE utf8_bin DEFAULT NULL,
+    `lifecycle` int(4) NOT NULL,
+    `use_way` int(4) NOT NULL,
+    `is_import` tinyint(1) NOT NULL,
+    `model_level` int(4) NOT NULL,
+    `is_external_use` tinyint(1) NOT NULL,
+    `is_partition_table` tinyint(1) NOT NULL,
+    `is_available` tinyint(1) NOT NULL,
+    PRIMARY KEY (`id`),
+    UNIQUE KEY `database` (`database`,`name`)
+    ) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin;
+
+    -- ----------------------------
+    -- Table structure for linkis_ps_datasource_table_info
+    -- ----------------------------
+    CREATE TABLE IF NOT EXISTS `linkis_ps_datasource_table_info` (
+    `id` bigint(20) NOT NULL AUTO_INCREMENT,
+    `table_id` bigint(20) NOT NULL,
+    `table_last_update_time` datetime NOT NULL,
+    `row_num` bigint(20) NOT NULL,
+    `file_num` int(11) NOT NULL,
+    `table_size` varchar(32) COLLATE utf8_bin NOT NULL,
+    `partitions_num` int(11) NOT NULL,
+    `update_time` datetime NOT NULL,
+    `field_num` int(11) NOT NULL,
+    PRIMARY KEY (`id`)
+    ) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin;
+
+    -- ----------------------------
+    -- Table structure for linkis_ps_cs_context_map
+    -- ----------------------------
+    CREATE TABLE IF NOT EXISTS `linkis_ps_cs_context_map` (
+    `id` int(11) NOT NULL AUTO_INCREMENT,
+    `key` varchar(128) DEFAULT NULL,
+    `context_scope` varchar(32) DEFAULT NULL,
+    `context_type` varchar(32) DEFAULT NULL,
+    `props` text,
+    `value` mediumtext,
+    `context_id` int(11) DEFAULT NULL,
+    `keywords` varchar(255) DEFAULT NULL,
+    `update_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'update unix timestamp',
+    `create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'create time',
+    `access_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'last access time',
+    PRIMARY KEY (`id`),
+    UNIQUE KEY `key` (`key`,`context_id`,`context_type`),
+    KEY `keywords` (`keywords`(191))
+    ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
+
+    -- ----------------------------
+    -- Table structure for linkis_ps_cs_context_map_listener
+    -- ----------------------------
+    CREATE TABLE IF NOT EXISTS `linkis_ps_cs_context_map_listener` (
+    `id` int(11) NOT NULL AUTO_INCREMENT,
+    `listener_source` varchar(255) DEFAULT NULL,
+    `key_id` int(11) DEFAULT NULL,
+    `update_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'update unix timestamp',
+    `create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'create time',
+    `access_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'last access time',
+    PRIMARY KEY (`id`)
+    ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
+
+    -- ----------------------------
+    -- Table structure for linkis_ps_cs_context_history
+    -- ----------------------------
+    CREATE TABLE IF NOT EXISTS `linkis_ps_cs_context_history` (
+    `id` int(11) NOT NULL AUTO_INCREMENT,
+    `context_id` int(11) DEFAULT NULL,
+    `source` text,
+    `context_type` varchar(32) DEFAULT NULL,
+    `history_json` text,
+    `keyword` varchar(255) DEFAULT NULL,
+    PRIMARY KEY (`id`),
+    `update_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'update unix timestamp',
+    `create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'create time',
+    `access_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'last access time',
+    KEY `keyword` (`keyword`(191))
+    ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
+
+    -- ----------------------------
+    -- Table structure for linkis_ps_cs_context_id
+    -- ----------------------------
+    CREATE TABLE IF NOT EXISTS `linkis_ps_cs_context_id` (
+    `id` int(11) NOT NULL AUTO_INCREMENT,
+    `user` varchar(32) DEFAULT NULL,
+    `application` varchar(32) DEFAULT NULL,
+    `source` varchar(255) DEFAULT NULL,
+    `expire_type` varchar(32) DEFAULT NULL,
+    `expire_time` datetime DEFAULT NULL,
+    `instance` varchar(128) DEFAULT NULL,
+    `backup_instance` varchar(255) DEFAULT NULL,
+    `update_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'update unix timestamp',
+    `create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'create time',
+    `access_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'last access time',
+    PRIMARY KEY (`id`),
+    KEY `instance` (`instance`(128)),
+    KEY `backup_instance` (`backup_instance`(191)),
+    KEY `instance_2` (`instance`(128),`backup_instance`(128))
+    ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
+
+    -- ----------------------------
+    -- Table structure for linkis_ps_cs_context_listener
+    -- ----------------------------
+    CREATE TABLE IF NOT EXISTS `linkis_ps_cs_context_listener` (
+    `id` int(11) NOT NULL AUTO_INCREMENT,
+    `listener_source` varchar(255) DEFAULT NULL,
+    `context_id` int(11) DEFAULT NULL,
+    `update_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'update unix timestamp',
+    `create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'create time',
+    `access_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'last access time',
+    PRIMARY KEY (`id`)
+    ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
+
+
+    CREATE TABLE IF NOT EXISTS `linkis_ps_bml_resources` (
+    `id` bigint(20) NOT NULL AUTO_INCREMENT COMMENT 'Primary key',
+    `resource_id` varchar(50) NOT NULL COMMENT 'resource uuid',
+    `is_private` TINYINT(1) DEFAULT 0 COMMENT 'Whether the resource is private, 0 means private, 1 means public',
+    `resource_header` TINYINT(1) DEFAULT 0 COMMENT 'Classification, 0 means unclassified, 1 means classified',
+    `downloaded_file_name` varchar(200) DEFAULT NULL COMMENT 'File name when downloading',
+    `sys` varchar(100) NOT NULL COMMENT 'Owning system',
+    `create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'Created time',
+    `owner` varchar(200) NOT NULL COMMENT 'Resource owner',
+    `is_expire` TINYINT(1) DEFAULT 0 COMMENT 'Whether expired, 0 means not expired, 1 means expired',
+    `expire_type` varchar(50) DEFAULT null COMMENT 'Expiration type, date refers to the expiration on the specified date, TIME refers to the time',
+    `expire_time` varchar(50) DEFAULT null COMMENT 'Expiration time, one day by default',
+    `max_version` int(20) DEFAULT 10 COMMENT 'The default is 10, which means to keep the latest 10 versions',
+    `update_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'Updated time',
+    `updator` varchar(50) DEFAULT NULL COMMENT 'updator',
+    `enable_flag` tinyint(1) NOT NULL DEFAULT '1' COMMENT 'Status, 1: normal, 0: frozen',
+       PRIMARY KEY (`id`)
+    ) ENGINE=InnoDB AUTO_INCREMENT=9 DEFAULT CHARSET=utf8mb4;
+
+
+    CREATE TABLE IF NOT EXISTS `linkis_ps_bml_resources_version` (
+    `id` bigint(20) NOT NULL AUTO_INCREMENT COMMENT 'Primary key',
+    `resource_id` varchar(50) NOT NULL COMMENT 'Resource uuid',
+    `file_md5` varchar(32) NOT NULL COMMENT 'Md5 summary of the file',
+    `version` varchar(20) NOT NULL COMMENT 'Resource version (v plus five digits)',
+    `size` int(10) NOT NULL COMMENT 'File size',
+    `start_byte` BIGINT(20) UNSIGNED NOT NULL DEFAULT 0,
+    `end_byte` BIGINT(20) UNSIGNED NOT NULL DEFAULT 0,
+    `resource` varchar(2000) NOT NULL COMMENT 'Resource content (file information including path and file name)',
+    `description` varchar(2000) DEFAULT NULL COMMENT 'description',
+    `start_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'Started time',
+    `end_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'Stoped time',
+    `client_ip` varchar(200) NOT NULL COMMENT 'Client ip',
+    `updator` varchar(50) DEFAULT NULL COMMENT 'updator',
+    `enable_flag` tinyint(1) NOT NULL DEFAULT '1' COMMENT 'Status, 1: normal, 0: frozen',
+    unique key `resource_id_version`(`resource_id`, `version`),
+    PRIMARY KEY (`id`)
+    ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
+
+
+
+    CREATE TABLE IF NOT EXISTS `linkis_ps_bml_resources_permission` (
+    `id` bigint(20) NOT NULL AUTO_INCREMENT COMMENT 'Primary key',
+    `resource_id` varchar(50) NOT NULL COMMENT 'Resource uuid',
+    `permission` varchar(10) NOT NULL COMMENT 'permission',
+    `create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'created time',
+    `system` varchar(50) default "dss" COMMENT 'creator',
+    `update_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'updated time',
+    `updator` varchar(50) NOT NULL COMMENT 'updator',
+    PRIMARY KEY (`id`)
+    ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
+
+
+
+    CREATE TABLE IF NOT EXISTS `linkis_ps_resources_download_history` (
+    `id` bigint(20) NOT NULL AUTO_INCREMENT COMMENT 'primary key',
+    `start_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'start time',
+    `end_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'stop time',
+    `client_ip` varchar(200) NOT NULL COMMENT 'client ip',
+    `state` TINYINT(1) NOT NULL COMMENT 'Download status, 0 download successful, 1 download failed',
+    `resource_id` varchar(50) not null,
+    `version` varchar(20) not null,
+    `downloader` varchar(50) NOT NULL COMMENT 'Downloader',
+    PRIMARY KEY (`id`)
+    ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
+
+
+
+
+    -- 创建资源任务表,包括上传,更新,下载
+    CREATE TABLE IF NOT EXISTS `linkis_ps_bml_resources_task` (
+    `id` bigint(20) NOT NULL AUTO_INCREMENT,
+    `resource_id` varchar(50) DEFAULT NULL COMMENT 'resource uuid',
+    `version` varchar(20) DEFAULT NULL COMMENT 'Resource version number of the current operation',
+    `operation` varchar(20) NOT NULL COMMENT 'Operation type. upload = 0, update = 1',
+    `state` varchar(20) NOT NULL DEFAULT 'Schduled' COMMENT 'Current status of the task:Schduled, Running, Succeed, Failed,Cancelled',
+    `submit_user` varchar(20) NOT NULL DEFAULT '' COMMENT 'Job submission user name',
+    `system` varchar(20) DEFAULT 'dss' COMMENT 'Subsystem name: wtss',
+    `instance` varchar(128) NOT NULL COMMENT 'Material library example',
+    `client_ip` varchar(50) DEFAULT NULL COMMENT 'Request IP',
+    `extra_params` text COMMENT 'Additional key information. Such as the resource IDs and versions that are deleted in batches, and all versions under the resource are deleted',
+    `err_msg` varchar(2000) DEFAULT NULL COMMENT 'Task failure information.e.getMessage',
+    `start_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'Starting time',
+    `end_time` datetime DEFAULT NULL COMMENT 'End Time',
+    `last_update_time` datetime NOT NULL COMMENT 'Last update time',
+    PRIMARY KEY (`id`)
+    ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
+
+
+
+    CREATE TABLE IF NOT EXISTS `linkis_ps_bml_project` (
+    `id` int(10) NOT NULL AUTO_INCREMENT,
+    `name` varchar(128) DEFAULT NULL,
+    `system` varchar(64) not null default "dss",
+    `source` varchar(1024) default null,
+    `description` varchar(1024) default null,
+    `creator` varchar(128) not null,
+    `enabled` tinyint default 1,
+    `create_time` datetime DEFAULT now(),
+    unique key(`name`),
+    PRIMARY KEY (`id`)
+    )ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin ROW_FORMAT=COMPACT;
+
+
+
+    CREATE TABLE IF NOT EXISTS `linkis_ps_bml_project_user` (
+    `id` int(10) NOT NULL AUTO_INCREMENT,
+    `project_id` int(10) NOT NULL,
+    `username` varchar(64) DEFAULT NULL,
+    `priv` int(10) not null default 7, -- rwx 421 The permission value is 7. 8 is the administrator, which can authorize other users
+    `creator` varchar(128) not null,
+    `create_time` datetime DEFAULT now(),
+    `expire_time` datetime default null,
+    unique key user_project(`username`, `project_id`),
+    PRIMARY KEY (`id`)
+    )ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin ROW_FORMAT=COMPACT;
+
+
+    CREATE TABLE IF NOT EXISTS `linkis_ps_bml_project_resource` (
+    `id` int(10) NOT NULL AUTO_INCREMENT,
+    `project_id` int(10) NOT NULL,
+    `resource_id` varchar(128) DEFAULT NULL,
+    PRIMARY KEY (`id`)
+    )ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin ROW_FORMAT=COMPACT;
+
+
+    CREATE TABLE IF NOT EXISTS `linkis_ps_instance_label` (
+    `id` int(20) NOT NULL AUTO_INCREMENT,
+    `label_key` varchar(32) COLLATE utf8_bin NOT NULL COMMENT 'string key',
+    `label_value` varchar(255) COLLATE utf8_bin NOT NULL COMMENT 'string value',
+    `label_feature` varchar(16) COLLATE utf8_bin NOT NULL COMMENT 'store the feature of label, but it may be redundant',
+    `label_value_size` int(20) NOT NULL COMMENT 'size of key -> value map',
+    `update_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'update unix timestamp',
+    `create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'update unix timestamp',
+    PRIMARY KEY (`id`),
+    UNIQUE KEY `label_key_value` (`label_key`,`label_value`)
+    ) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin;
+
+
+    CREATE TABLE IF NOT EXISTS `linkis_ps_instance_label_value_relation` (
+    `id` int(20) NOT NULL AUTO_INCREMENT,
+    `label_value_key` varchar(255) COLLATE utf8_bin NOT NULL COMMENT 'value key',
+    `label_value_content` varchar(255) COLLATE utf8_bin DEFAULT NULL COMMENT 'value content',
+    `label_id` int(20) DEFAULT NULL COMMENT 'id reference linkis_ps_instance_label -> id',
+    `update_time` datetime DEFAULT CURRENT_TIMESTAMP COMMENT 'update unix timestamp',
+    `create_time` datetime DEFAULT CURRENT_TIMESTAMP COMMENT 'create unix timestamp',
+    PRIMARY KEY (`id`),
+    UNIQUE KEY `label_value_key_label_id` (`label_value_key`,`label_id`)
+    ) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin;
+
+    CREATE TABLE IF NOT EXISTS `linkis_ps_instance_label_relation` (
+    `id` int(20) NOT NULL AUTO_INCREMENT,
+    `label_id` int(20) DEFAULT NULL COMMENT 'id reference linkis_ps_instance_label -> id',
+    `service_instance` varchar(128) NOT NULL COLLATE utf8_bin COMMENT 'structure like ${host|machine}:${port}',
+    `update_time` datetime DEFAULT CURRENT_TIMESTAMP COMMENT 'update unix timestamp',
+    `create_time` datetime DEFAULT CURRENT_TIMESTAMP COMMENT 'create unix timestamp',
+    PRIMARY KEY (`id`)
+    ) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin;
+
+
+    CREATE TABLE IF NOT EXISTS `linkis_ps_instance_info` (
+    `id` int(11) NOT NULL AUTO_INCREMENT,
+    `instance` varchar(128) COLLATE utf8_bin DEFAULT NULL COMMENT 'structure like ${host|machine}:${port}',
+    `name` varchar(128) COLLATE utf8_bin DEFAULT NULL COMMENT 'equal application name in registry',
+    `update_time` datetime DEFAULT CURRENT_TIMESTAMP COMMENT 'update unix timestamp',
+    `create_time` datetime DEFAULT CURRENT_TIMESTAMP COMMENT 'create unix timestamp',
+    PRIMARY KEY (`id`),
+    UNIQUE KEY `instance` (`instance`)
+    ) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin;
+
+    CREATE TABLE IF NOT EXISTS `linkis_ps_error_code` (
+    `id` bigint(20) NOT NULL AUTO_INCREMENT,
+    `error_code` varchar(50) NOT NULL,
+    `error_desc` varchar(1024) NOT NULL,
+    `error_regex` varchar(1024) DEFAULT NULL,
+    `error_type` int(3) DEFAULT 0,
+    PRIMARY KEY (`id`)
+    ) ENGINE=InnoDB  DEFAULT CHARSET=utf8 COLLATE=utf8_bin;
+
+    CREATE TABLE IF NOT EXISTS `linkis_cg_manager_service_instance` (
+    `id` int(11) NOT NULL AUTO_INCREMENT,
+    `instance` varchar(128) COLLATE utf8_bin DEFAULT NULL,
+    `name` varchar(32) COLLATE utf8_bin DEFAULT NULL,
+    `owner` varchar(32) COLLATE utf8_bin DEFAULT NULL,
+    `mark` varchar(32) COLLATE utf8_bin DEFAULT NULL,
+    `identifier` varchar(32) COLLATE utf8_bin DEFAULT NULL,
+    `update_time` datetime DEFAULT CURRENT_TIMESTAMP,
+    `create_time` datetime DEFAULT CURRENT_TIMESTAMP,
+    `updator` varchar(32) COLLATE utf8_bin DEFAULT NULL,
+    `creator` varchar(32) COLLATE utf8_bin DEFAULT NULL,
+    PRIMARY KEY (`id`),
+    UNIQUE KEY `instance` (`instance`)
+    ) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin;
+
+    CREATE TABLE IF NOT EXISTS `linkis_cg_manager_linkis_resources` (
+    `id` int(11) NOT NULL AUTO_INCREMENT,
+    `max_resource` varchar(1020) COLLATE utf8_bin DEFAULT NULL,
+    `min_resource` varchar(1020) COLLATE utf8_bin DEFAULT NULL,
+    `used_resource` varchar(1020) COLLATE utf8_bin DEFAULT NULL,
+    `left_resource` varchar(1020) COLLATE utf8_bin DEFAULT NULL,
+    `expected_resource` varchar(1020) COLLATE utf8_bin DEFAULT NULL,
+    `locked_resource` varchar(1020) COLLATE utf8_bin DEFAULT NULL,
+    `resourceType` varchar(255) COLLATE utf8_bin DEFAULT NULL,
+    `ticketId` varchar(255) COLLATE utf8_bin DEFAULT NULL,
+    `update_time` datetime DEFAULT CURRENT_TIMESTAMP,
+    `create_time` datetime DEFAULT CURRENT_TIMESTAMP,
+    `updator` varchar(255) COLLATE utf8_bin DEFAULT NULL,
+    `creator` varchar(255) COLLATE utf8_bin DEFAULT NULL,
+    PRIMARY KEY (`id`)
+    ) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin;
+
+    CREATE TABLE IF NOT EXISTS `linkis_cg_manager_lock` (
+    `id` int(11) NOT NULL AUTO_INCREMENT,
+    `lock_object` varchar(255) COLLATE utf8_bin DEFAULT NULL,
+    `time_out` longtext COLLATE utf8_bin,
+    `update_time` datetime DEFAULT CURRENT_TIMESTAMP,
+    `create_time` datetime DEFAULT CURRENT_TIMESTAMP,
+    PRIMARY KEY (`id`),
+    UNIQUE KEY `lock_object` (`lock_object`)
+    ) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin;
+
+    CREATE TABLE IF NOT EXISTS `linkis_cg_rm_external_resource_provider` (
+    `id` int(10) NOT NULL AUTO_INCREMENT,
+    `resource_type` varchar(32) NOT NULL,
+    `name` varchar(32) NOT NULL,
+    `labels` varchar(32) DEFAULT NULL,
+    `config` text NOT NULL,
+    PRIMARY KEY (`id`)
+    ) ENGINE=InnoDB DEFAULT CHARSET=utf8;
+
+    CREATE TABLE IF NOT EXISTS `linkis_cg_manager_engine_em` (
+    `id` int(20) NOT NULL AUTO_INCREMENT,
+    `engine_instance` varchar(128) COLLATE utf8_bin DEFAULT NULL,
+    `em_instance` varchar(128) COLLATE utf8_bin DEFAULT NULL,
+    `update_time` datetime DEFAULT CURRENT_TIMESTAMP,
+    `create_time` datetime DEFAULT CURRENT_TIMESTAMP,
+    PRIMARY KEY (`id`)
+    ) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin;
+
+    CREATE TABLE IF NOT EXISTS `linkis_cg_manager_label` (
+    `id` int(20) NOT NULL AUTO_INCREMENT,
+    `label_key` varchar(32) COLLATE utf8_bin NOT NULL,
+    `label_value` varchar(255) COLLATE utf8_bin NOT NULL,
+    `label_feature` varchar(16) COLLATE utf8_bin NOT NULL,
+    `label_value_size` int(20) NOT NULL,
+    `update_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP,
+    `create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP,
+    PRIMARY KEY (`id`),
+    UNIQUE KEY `label_key_value` (`label_key`,`label_value`)
+    ) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin;
+
+    CREATE TABLE IF NOT EXISTS `linkis_cg_manager_label_value_relation` (
+    `id` int(20) NOT NULL AUTO_INCREMENT,
+    `label_value_key` varchar(255) COLLATE utf8_bin NOT NULL,
+    `label_value_content` varchar(255) COLLATE utf8_bin DEFAULT NULL,
+    `label_id` int(20) DEFAULT NULL,
+    `update_time` datetime DEFAULT CURRENT_TIMESTAMP,
+    `create_time` datetime DEFAULT CURRENT_TIMESTAMP,
+    PRIMARY KEY (`id`),
+    UNIQUE KEY `label_value_key_label_id` (`label_value_key`,`label_id`)
+    ) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin;
+
+    CREATE TABLE IF NOT EXISTS `linkis_cg_manager_label_resource` (
+    `id` int(20) NOT NULL AUTO_INCREMENT,
+    `label_id` int(20) DEFAULT NULL,
+    `resource_id` int(20) DEFAULT NULL,
+    `update_time` datetime DEFAULT CURRENT_TIMESTAMP,
+    `create_time` datetime DEFAULT CURRENT_TIMESTAMP,
+    PRIMARY KEY (`id`),
+    UNIQUE KEY `label_id` (`label_id`)
+    ) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin;
+
+    CREATE TABLE IF NOT EXISTS `linkis_cg_ec_resource_info_record` (
+    `id` INT(20) NOT NULL AUTO_INCREMENT,
+    `label_value` VARCHAR(255) NOT NULL COMMENT 'ec labels stringValue',
+    `create_user` VARCHAR(128) NOT NULL COMMENT 'ec create user',
+    `service_instance` varchar(128) COLLATE utf8_bin DEFAULT NULL COMMENT 'ec instance info',
+    `ecm_instance` varchar(128) COLLATE utf8_bin DEFAULT NULL COMMENT 'ecm instance info ',
+    `ticket_id` VARCHAR(100) NOT NULL COMMENT 'ec ticket id',
+    `log_dir_suffix` varchar(128) COLLATE utf8_bin DEFAULT NULL COMMENT 'log path',
+    `request_times` INT(8) COMMENT 'resource request times',
+    `request_resource` VARCHAR(1020) COMMENT 'request resource',
+    `used_times` INT(8) COMMENT 'resource used times',
+    `used_resource` VARCHAR(1020) COMMENT 'used resource',
+    `release_times` INT(8) COMMENT 'resource released times',
+    `released_resource` VARCHAR(1020)  COMMENT 'released resource',
+    `release_time` datetime DEFAULT NULL COMMENT 'released time',
+    `used_time` datetime DEFAULT NULL COMMENT 'used time',
+    `create_time` datetime DEFAULT CURRENT_TIMESTAMP COMMENT 'create time',
+    PRIMARY KEY (`id`),
+    KEY (`ticket_id`),
+    UNIQUE KEY `label_value_ticket_id` (`ticket_id`,`label_value`)
+    ) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin;
+
+    CREATE TABLE IF NOT EXISTS `linkis_cg_manager_label_service_instance` (
+    `id` int(20) NOT NULL AUTO_INCREMENT,
+    `label_id` int(20) DEFAULT NULL,
+    `service_instance` varchar(128) COLLATE utf8_bin DEFAULT NULL,
+    `update_time` datetime DEFAULT CURRENT_TIMESTAMP,
+    `create_time` datetime DEFAULT CURRENT_TIMESTAMP,
+    PRIMARY KEY (`id`),
+    KEY label_serviceinstance(label_id,service_instance)
+    ) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin;
+
+
+    CREATE TABLE IF NOT EXISTS `linkis_cg_manager_label_user` (
+    `id` int(20) NOT NULL AUTO_INCREMENT,
+    `username` varchar(255) COLLATE utf8_bin DEFAULT NULL,
+    `label_id` int(20) DEFAULT NULL,
+    `update_time` datetime DEFAULT CURRENT_TIMESTAMP,
+    `create_time` datetime DEFAULT CURRENT_TIMESTAMP,
+    PRIMARY KEY (`id`)
+    ) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin;
+
+
+    CREATE TABLE IF NOT EXISTS `linkis_cg_manager_metrics_history` (
+    `id` int(20) NOT NULL AUTO_INCREMENT,
+    `instance_status` int(20) DEFAULT NULL,
+    `overload` varchar(255) COLLATE utf8_bin DEFAULT NULL,
+    `heartbeat_msg` varchar(255) COLLATE utf8_bin DEFAULT NULL,
+    `healthy_status` int(20) DEFAULT NULL,
+    `create_time` datetime DEFAULT CURRENT_TIMESTAMP,
+    `creator` varchar(255) COLLATE utf8_bin DEFAULT NULL,
+    `ticketID` varchar(255) COLLATE utf8_bin DEFAULT NULL,
+    `serviceName` varchar(255) COLLATE utf8_bin DEFAULT NULL,
+    `instance` varchar(255) COLLATE utf8_bin DEFAULT NULL,
+    PRIMARY KEY (`id`)
+    ) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin;
+
+    CREATE TABLE IF NOT EXISTS `linkis_cg_manager_service_instance_metrics` (
+    `instance` varchar(128) COLLATE utf8_bin NOT NULL,
+    `instance_status` int(11) DEFAULT NULL,
+    `overload` varchar(255) COLLATE utf8_bin DEFAULT NULL,
+    `heartbeat_msg` text COLLATE utf8_bin DEFAULT NULL,
+    `healthy_status` varchar(255) COLLATE utf8_bin DEFAULT NULL,
+    `update_time` datetime DEFAULT CURRENT_TIMESTAMP,
+    `create_time` datetime DEFAULT CURRENT_TIMESTAMP,
+    PRIMARY KEY (`instance`)
+    ) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin;
+
+    CREATE TABLE IF NOT EXISTS `linkis_cg_engine_conn_plugin_bml_resources` (
+    `id` bigint(20) NOT NULL AUTO_INCREMENT COMMENT 'Primary key',
+    `engine_conn_type` varchar(100) NOT NULL COMMENT 'Engine type',
+    `version` varchar(100) COMMENT 'version',
+    `file_name` varchar(255) COMMENT 'file name',
+    `file_size` bigint(20)  DEFAULT 0 NOT NULL COMMENT 'file size',
+    `last_modified` bigint(20)  COMMENT 'File update time',
+    `bml_resource_id` varchar(100) NOT NULL COMMENT 'Owning system',
+    `bml_resource_version` varchar(200) NOT NULL COMMENT 'Resource owner',
+    `create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'created time',
+    `last_update_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'updated time',
+    PRIMARY KEY (`id`)
+    ) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin;
+
+    -- ----------------------------
+    -- Table structure for linkis_ps_dm_datasource
+    -- ----------------------------
+    CREATE TABLE IF NOT EXISTS `linkis_ps_dm_datasource`
+    (
+    `id`                   int(11)                       NOT NULL AUTO_INCREMENT,
+    `datasource_name`      varchar(255) COLLATE utf8_bin NOT NULL,
+    `datasource_desc`      varchar(255) COLLATE utf8_bin      DEFAULT NULL,
+    `datasource_type_id`   int(11)                       NOT NULL,
+    `create_identify`      varchar(255) COLLATE utf8_bin      DEFAULT NULL,
+    `create_system`        varchar(255) COLLATE utf8_bin      DEFAULT NULL,
+    `parameter`            varchar(255) COLLATE utf8_bin NULL DEFAULT NULL,
+    `create_time`          datetime                      NULL DEFAULT CURRENT_TIMESTAMP,
+    `modify_time`          datetime                      NULL DEFAULT CURRENT_TIMESTAMP,
+    `create_user`          varchar(255) COLLATE utf8_bin      DEFAULT NULL,
+    `modify_user`          varchar(255) COLLATE utf8_bin      DEFAULT NULL,
+    `labels`               varchar(255) COLLATE utf8_bin      DEFAULT NULL,
+    `version_id`           int(11)                            DEFAULT NULL COMMENT 'current version id',
+    `expire`               tinyint(1)                         DEFAULT 0,
+    `published_version_id` int(11)                            DEFAULT NULL,
+    PRIMARY KEY (`id`)
+    ) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin;
+
+    -- ----------------------------
+    -- Table structure for linkis_ps_dm_datasource_env
+    -- ----------------------------
+    CREATE TABLE IF NOT EXISTS `linkis_ps_dm_datasource_env`
+    (
+    `id`                 int(11)                       NOT NULL AUTO_INCREMENT,
+    `env_name`           varchar(32) COLLATE utf8_bin  NOT NULL,
+    `env_desc`           varchar(255) COLLATE utf8_bin          DEFAULT NULL,
+    `datasource_type_id` int(11)                       NOT NULL,
+    `parameter`          varchar(255) COLLATE utf8_bin          DEFAULT NULL,
+    `create_time`        datetime                      NOT NULL DEFAULT CURRENT_TIMESTAMP,
+    `create_user`        varchar(255) COLLATE utf8_bin NULL     DEFAULT NULL,
+    `modify_time`        datetime                      NOT NULL DEFAULT CURRENT_TIMESTAMP,
+    `modify_user`        varchar(255) COLLATE utf8_bin NULL     DEFAULT NULL,
+    PRIMARY KEY (`id`)
+    ) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin;
+
+
+    -- ----------------------------
+    -- Table structure for linkis_ps_dm_datasource_type
+    -- ----------------------------
+    CREATE TABLE IF NOT EXISTS `linkis_ps_dm_datasource_type`
+    (
+    `id`          int(11)                      NOT NULL AUTO_INCREMENT,
+    `name`        varchar(32) COLLATE utf8_bin NOT NULL,
+    `description` varchar(255) COLLATE utf8_bin DEFAULT NULL,
+    `option`      varchar(32) COLLATE utf8_bin  DEFAULT NULL,
+    `classifier`  varchar(32) COLLATE utf8_bin NOT NULL,
+    `icon`        varchar(255) COLLATE utf8_bin DEFAULT NULL,
+    `layers`      int(3)                       NOT NULL,
+    PRIMARY KEY (`id`)
+    ) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin;
+
+    -- ----------------------------
+    -- Table structure for linkis_ps_dm_datasource_type_key
+    -- ----------------------------
+    CREATE TABLE IF NOT EXISTS `linkis_ps_dm_datasource_type_key`
+    (
+    `id`                  int(11)                       NOT NULL AUTO_INCREMENT,
+    `data_source_type_id` int(11)                       NOT NULL,
+    `key`                 varchar(32) COLLATE utf8_bin  NOT NULL,
+    `name`                varchar(32) COLLATE utf8_bin  NOT NULL,
+    `name_en`             varchar(32) COLLATE utf8_bin  NOT NULL,
+    `default_value`       varchar(50) COLLATE utf8_bin  NULL     DEFAULT NULL,
+    `value_type`          varchar(50) COLLATE utf8_bin  NOT NULL,
+    `scope`               varchar(50) COLLATE utf8_bin  NULL     DEFAULT NULL,
+    `require`             tinyint(1)                    NULL     DEFAULT 0,
+    `description`         varchar(200) COLLATE utf8_bin NULL     DEFAULT NULL,
+    `description_en`      varchar(200) COLLATE utf8_bin NULL     DEFAULT NULL,
+    `value_regex`         varchar(200) COLLATE utf8_bin NULL     DEFAULT NULL,
+    `ref_id`              bigint(20)                    NULL     DEFAULT NULL,
+    `ref_value`           varchar(50) COLLATE utf8_bin  NULL     DEFAULT NULL,
+    `data_source`         varchar(200) COLLATE utf8_bin NULL     DEFAULT NULL,
+    `update_time`         datetime                      NOT NULL DEFAULT CURRENT_TIMESTAMP,
+    `create_time`         datetime                      NOT NULL DEFAULT CURRENT_TIMESTAMP,
+    PRIMARY KEY (`id`)
+    ) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin;
+    -- ----------------------------
+    -- Table structure for linkis_ps_dm_datasource_version
+    -- ----------------------------
+    CREATE TABLE IF NOT EXISTS `linkis_ps_dm_datasource_version`
+    (
+    `version_id`    int(11)                        NOT NULL AUTO_INCREMENT,
+    `datasource_id` int(11)                        NOT NULL,
+    `parameter`     varchar(2048) COLLATE utf8_bin NULL DEFAULT NULL,
+    `comment`       varchar(255) COLLATE utf8_bin  NULL DEFAULT NULL,
+    `create_time`   datetime(0)                    NULL DEFAULT CURRENT_TIMESTAMP,
+    `create_user`   varchar(255) COLLATE utf8_bin  NULL DEFAULT NULL,
+    PRIMARY KEY (`version_id`, `datasource_id`) USING BTREE
+    ) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin;
+
+    -- ----------------------------
+    -- Table structure for linkis_mg_gateway_auth_token
+    -- ----------------------------
+    CREATE TABLE IF NOT EXISTS `linkis_mg_gateway_auth_token` (
+    `id` int(11) NOT NULL AUTO_INCREMENT,
+    `token_name` varchar(128) NOT NULL,
+    `legal_users` text,
+    `legal_hosts` text,
+    `business_owner` varchar(32),
+    `create_time` DATE DEFAULT NULL,
+    `update_time` DATE DEFAULT NULL,
+    `elapse_day` BIGINT DEFAULT NULL,
+    `update_by` varchar(32),
+    PRIMARY KEY (`id`),
+    UNIQUE KEY `token_name` (`token_name`)
+    ) ENGINE=InnoDB DEFAULT CHARSET=utf8;
+
+  linkis_dml.sql: |
+    -- 变量:
+    SET @SPARK_LABEL="spark-{{ .Values.linkis.deps.spark.version }}";
+    SET @HIVE_LABEL="hive-{{ .Values.linkis.deps.hive.version }}";
+    SET @PYTHON_LABEL="python-{{ .Values.linkis.deps.python.version }}";
+    SET @PIPELINE_LABEL="pipeline-1";
+    SET @JDBC_LABEL="jdbc-4";
+    SET @PRESTO_LABEL="presto-0.234";
+    SET @IO_FILE_LABEL="io_file-1.0";
+    SET @OPENLOOKENG_LABEL="openlookeng-1.5.0";
+    -- 衍生变量:
+    SET @SPARK_ALL=CONCAT('*-*,',@SPARK_LABEL);
+    SET @SPARK_IDE=CONCAT('*-IDE,',@SPARK_LABEL);
+    SET @SPARK_NODE=CONCAT('*-nodeexecution,',@SPARK_LABEL);
+    SET @SPARK_VISUALIS=CONCAT('*-Visualis,',@SPARK_LABEL);
+
+    SET @HIVE_ALL=CONCAT('*-*,',@HIVE_LABEL);
+    SET @HIVE_IDE=CONCAT('*-IDE,',@HIVE_LABEL);
+    SET @HIVE_NODE=CONCAT('*-nodeexecution,',@HIVE_LABEL);
+
+    SET @PYTHON_ALL=CONCAT('*-*,',@PYTHON_LABEL);
+    SET @PYTHON_IDE=CONCAT('*-IDE,',@PYTHON_LABEL);
+    SET @PYTHON_NODE=CONCAT('*-nodeexecution,',@PYTHON_LABEL);
+
+    SET @PIPELINE_ALL=CONCAT('*-*,',@PIPELINE_LABEL);
+    SET @PIPELINE_IDE=CONCAT('*-IDE,',@PIPELINE_LABEL);
+
+    SET @JDBC_ALL=CONCAT('*-*,',@JDBC_LABEL);
+    SET @JDBC_IDE=CONCAT('*-IDE,',@JDBC_LABEL);
+
+    SET @PRESTO_ALL=CONCAT('*-*,',@PRESTO_LABEL);
+    SET @PRESTO_IDE=CONCAT('*-IDE,',@PRESTO_LABEL);
+
+    SET @IO_FILE_ALL=CONCAT('*-*,',@IO_FILE_LABEL);
+    SET @IO_FILE_IDE=CONCAT('*-IDE,',@IO_FILE_LABEL);
+
+    SET @OPENLOOKENG_ALL=CONCAT('*-*,',@OPENLOOKENG_LABEL);
+    SET @OPENLOOKENG_IDE=CONCAT('*-IDE,',@OPENLOOKENG_LABEL);
+
+    -- Global Settings
+    INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`) VALUES ('wds.linkis.rm.yarnqueue', 'yarn队列名', 'yarn队列名', 'default', 'None', NULL, '0', '0', '1', '队列资源');
+    INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`) VALUES ('wds.linkis.rm.yarnqueue.instance.max', '取值范围:1-128,单位:个', '队列实例最大个数', '30', 'Regex', '^(?:[1-9]\\d?|[1234]\\d{2}|128)$', '0', '0', '1', '队列资源');
+    INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`) VALUES ('wds.linkis.rm.yarnqueue.cores.max', '取值范围:1-500,单位:个', '队列CPU使用上限', '150', 'Regex', '^(?:[1-9]\\d?|[1234]\\d{2}|500)$', '0', '0', '1', '队列资源');
+    INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`) VALUES ('wds.linkis.rm.yarnqueue.memory.max', '取值范围:1-1000,单位:G', '队列内存使用上限', '300G', 'Regex', '^([1-9]\\d{0,2}|1000)(G|g)$', '0', '0', '1', '队列资源');
+    INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`) VALUES ('wds.linkis.rm.client.memory.max', '取值范围:1-100,单位:G', '全局各个引擎内存使用上限', '20G', 'Regex', '^([1-9]\\d{0,1}|100)(G|g)$', '0', '0', '1', '队列资源');
+    INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`) VALUES ('wds.linkis.rm.client.core.max', '取值范围:1-128,单位:个', '全局各个引擎核心个数上限', '10', 'Regex', '^(?:[1-9]\\d?|[1][0-2][0-8])$', '0', '0', '1', '队列资源');
+    INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`) VALUES ('wds.linkis.rm.instance', '范围:1-20,单位:个', '全局各个引擎最大并发数', '10', 'NumInterval', '[1,20]', '0', '0', '1', '队列资源');
+    -- spark
+    INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.rm.instance', '范围:1-20,单位:个', 'spark引擎最大并发数', '10', 'NumInterval', '[1,20]', '0', '0', '1', '队列资源', 'spark');
+    INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.executor.instances', '取值范围:1-40,单位:个', 'spark执行器实例最大并发数', '1', 'NumInterval', '[1,40]', '0', '0', '2', 'spark资源设置', 'spark');
+    INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.executor.cores', '取值范围:1-8,单位:个', 'spark执行器核心个数',  '1', 'NumInterval', '[1,8]', '0', '0', '1','spark资源设置', 'spark');
+    INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.executor.memory', '取值范围:1-15,单位:G', 'spark执行器内存大小', '1g', 'Regex', '^([1-9]|1[0-5])(G|g)$', '0', '0', '3', 'spark资源设置', 'spark');
+    INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.driver.cores', '取值范围:只能取1,单位:个', 'spark驱动器核心个数', '1', 'NumInterval', '[1,1]', '0', '1', '1', 'spark资源设置','spark');
+    INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.driver.memory', '取值范围:1-15,单位:G', 'spark驱动器内存大小','1g', 'Regex', '^([1-9]|1[0-5])(G|g)$', '0', '0', '1', 'spark资源设置', 'spark');
+    INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.engineconn.max.free.time', '取值范围:3m,15m,30m,1h,2h', '引擎空闲退出时间','1h', 'OFT', '[\"1h\",\"2h\",\"30m\",\"15m\",\"3m\"]', '0', '0', '1', 'spark引擎设置', 'spark');
+    INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.tispark.pd.addresses', NULL, NULL, 'pd0:2379', 'None', NULL, '0', '0', '1', 'tidb设置', 'spark');
+    INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.tispark.tidb.addr', NULL, NULL, 'tidb', 'None', NULL, '0', '0', '1', 'tidb设置', 'spark');
+    INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.tispark.tidb.password', NULL, NULL, NULL, 'None', NULL, '0', '0', '1', 'tidb设置', 'spark');
+    INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.tispark.tidb.port', NULL, NULL, '4000', 'None', NULL, '0', '0', '1', 'tidb设置', 'spark');
+    INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.tispark.tidb.user', NULL, NULL, 'root', 'None', NULL, '0', '0', '1', 'tidb设置', 'spark');
+    INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.python.version', '取值范围:python2,python3', 'python版本','python2', 'OFT', '[\"python3\",\"python2\"]', '0', '0', '1', 'spark引擎设置', 'spark');
+    -- hive
+    INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.rm.instance', '范围:1-20,单位:个', 'hive引擎最大并发数', '10', 'NumInterval', '[1,20]', '0', '0', '1', '队列资源', 'hive');
+    INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.engineconn.java.driver.memory', '取值范围:1-10,单位:G', 'hive引擎初始化内存大小','1g', 'Regex', '^([1-9]|10)(G|g)$', '0', '0', '1', 'hive引擎设置', 'hive');
+    INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('hive.client.java.opts', 'hive客户端进程参数', 'hive引擎启动时jvm参数','', 'None', NULL, '1', '1', '1', 'hive引擎设置', 'hive');
+    INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('mapred.reduce.tasks', '范围:1-20,单位:个', 'reduce数', '10', 'NumInterval', '[1,20]', '0', '1', '1', 'hive资源设置', 'hive');
+    INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('dfs.block.size', '取值范围:2-10,单位:G', 'map数据块大小', '10', 'NumInterval', '[2,10]', '0', '1', '1', 'hive资源设置', 'hive');
+    INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('hive.exec.reduce.bytes.per.reducer', '取值范围:2-10,单位:G', 'reduce处理的数据量', '10', 'NumInterval', '[2,10]', '0', '1', '1', 'hive资源设置', 'hive');
+    INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.engineconn.max.free.time', '取值范围:3m,15m,30m,1h,2h', '引擎空闲退出时间','1h', 'OFT', '[\"1h\",\"2h\",\"30m\",\"15m\",\"3m\"]', '0', '0', '1', 'hive引擎设置', 'hive');
+
+    -- python
+    INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.rm.client.memory.max', '取值范围:1-100,单位:G', 'python驱动器内存使用上限', '20G', 'Regex', '^([1-9]\\d{0,1}|100)(G|g)$', '0', '0', '1', '队列资源', 'python');
+    INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.rm.client.core.max', '取值范围:1-128,单位:个', 'python驱动器核心个数上限', '10', 'Regex', '^(?:[1-9]\\d?|[1234]\\d{2}|128)$', '0', '0', '1', '队列资源', 'python');
+    INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.rm.instance', '范围:1-20,单位:个', 'python引擎最大并发数', '10', 'NumInterval', '[1,20]', '0', '0', '1', '队列资源', 'python');
+    INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.engineconn.java.driver.memory', '取值范围:1-2,单位:G', 'python引擎初始化内存大小', '1g', 'Regex', '^([1-2])(G|g)$', '0', '0', '1', 'python引擎设置', 'python');
+    INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('python.version', '取值范围:python2,python3', 'python版本','python2', 'OFT', '[\"python3\",\"python2\"]', '0', '0', '1', 'python引擎设置', 'python');
+    INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.engineconn.max.free.time', '取值范围:3m,15m,30m,1h,2h', '引擎空闲退出时间','1h', 'OFT', '[\"1h\",\"2h\",\"30m\",\"15m\",\"3m\"]', '0', '0', '1', 'python引擎设置', 'python');
+
+    -- pipeline
+    INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('pipeline.output.mold', '取值范围:csv或excel', '结果集导出类型','csv', 'OFT', '[\"csv\",\"excel\"]', '0', '0', '1', 'pipeline引擎设置', 'pipeline');
+    INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('pipeline.field.split', '取值范围:,或\\t', 'csv分隔符',',', 'OFT', '[\",\",\"\\\\t\"]', '0', '0', '1', 'pipeline引擎设置', 'pipeline');
+    INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('pipeline.output.charset', '取值范围:utf-8或gbk', '结果集导出字符集','gbk', 'OFT', '[\"utf-8\",\"gbk\"]', '0', '0', '1', 'pipeline引擎设置', 'pipeline');
+    INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('pipeline.output.isoverwrite', '取值范围:true或false', '是否覆写','true', 'OFT', '[\"true\",\"false\"]', '0', '0', '1', 'pipeline引擎设置', 'pipeline');
+    INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.rm.instance', '范围:1-3,单位:个', 'pipeline引擎最大并发数','3', 'NumInterval', '[1,3]', '0', '0', '1', 'pipeline引擎设置', 'pipeline');
+    INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.engineconn.java.driver.memory', '取值范围:1-10,单位:G', 'pipeline引擎初始化内存大小','2g', 'Regex', '^([1-9]|10)(G|g)$', '0', '0', '1', 'pipeline资源设置', 'pipeline');
+    INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('pipeline.output.shuffle.null.type', '取值范围:NULL或者BLANK', '空值替换','NULL', 'OFT', '[\"NULL\",\"BLANK\"]', '0', '0', '1', 'pipeline引擎设置', 'pipeline');
+    -- jdbc
+    insert into `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.jdbc.connect.url', '例如:jdbc:hive2://127.0.0.1:10000', 'jdbc连接地址', 'jdbc:hive2://127.0.0.1:10000', 'Regex', '^\\s*jdbc:\\w+://([^:]+)(:\\d+)(/[^\\?]+)?(\\?\\S*)?$', '0', '0', '1', '数据源配置', 'jdbc');
+    insert into `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.jdbc.driver', '例如:org.apache.hive.jdbc.HiveDriver', 'jdbc连接驱动', '', 'None', '', '0', '0', '1', '用户配置', 'jdbc');
+    insert into `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.jdbc.version', '取值范围:jdbc3,jdbc4', 'jdbc版本','jdbc4', 'OFT', '[\"jdbc3\",\"jdbc4\"]', '0', '0', '1', '用户配置', 'jdbc');
+    insert into `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.jdbc.username', 'username', '数据库连接用户名', '', 'None', '', '0', '0', '1', '用户配置', 'jdbc');
+    insert into `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.jdbc.password', 'password', '数据库连接密码', '', 'None', '', '0', '0', '1', '用户配置', 'jdbc');
+    insert into `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.jdbc.connect.max', '范围:1-20,单位:个', 'jdbc引擎最大连接数', '10', 'NumInterval', '[1,20]', '0', '0', '1', '数据源配置', 'jdbc');
+
+    -- io_file
+    INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.rm.instance', '范围:1-20,单位:个', 'io_file引擎最大并发数', '10', 'NumInterval', '[1,20]', '0', '0', '1', 'io_file引擎资源上限', 'io_file');
+    INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.rm.client.memory.max', '取值范围:1-50,单位:G', 'io_file引擎最大内存', '20G', 'Regex', '^([1-9]\\d{0,1}|100)(G|g)$', '0', '0', '1', 'io_file引擎资源上限', 'io_file');
+    INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.rm.client.core.max', '取值范围:1-100,单位:个', 'io_file引擎最大核心数', '40', 'Regex', '^(?:[1-9]\\d?|[1234]\\d{2}|128)$', '0', '0', '1', 'io_file引擎资源上限', 'io_file');
+
+    -- openlookeng
+    INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `engine_conn_type`, `is_hidden`, `is_advanced`, `level`, `treeName`) VALUES ('linkis.openlookeng.url', '例如:http://127.0.0.1:8080', '连接地址', 'http://127.0.0.1:8080', 'Regex', '^\\s*http://([^:]+)(:\\d+)(/[^\\?]+)?(\\?\\S*)?$', 'openlookeng', 0, 0, 1, '数据源配置');
+    INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `engine_conn_type`, `is_hidden`, `is_advanced`, `level`, `treeName`) VALUES ('linkis.openlookeng.catalog', 'catalog', 'catalog', 'system', 'None', '', 'openlookeng', 0, 0, 1, '数据源配置');
+    INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `engine_conn_type`, `is_hidden`, `is_advanced`, `level`, `treeName`) VALUES ('linkis.openlookeng.source', 'source', 'source', 'global', 'None', '', 'openlookeng', 0, 0, 1, '数据源配置');
+
+
+    -- Configuration first level directory
+    replace into `linkis_cg_manager_label` (`label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES ('combined_userCreator_engineType','*-全局设置,*-*', 'OPTIONAL', 2, now(), now());
+    replace into `linkis_cg_manager_label` (`label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES ('combined_userCreator_engineType','*-IDE,*-*', 'OPTIONAL', 2, now(), now());
+    replace into `linkis_cg_manager_label` (`label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES ('combined_userCreator_engineType','*-Visualis,*-*', 'OPTIONAL', 2, now(), now());
+    replace into `linkis_cg_manager_label` (`label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES ('combined_userCreator_engineType','*-nodeexecution,*-*', 'OPTIONAL', 2, now(), now());
+
+
+    -- Engine level default configuration
+    replace into `linkis_cg_manager_label` (`label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES ('combined_userCreator_engineType','*-*,*-*', 'OPTIONAL', 2, now(), now());
+    replace into `linkis_cg_manager_label` (`label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES ('combined_userCreator_engineType',@SPARK_ALL, 'OPTIONAL', 2, now(), now());
+    replace into `linkis_cg_manager_label` (`label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES ('combined_userCreator_engineType',@HIVE_ALL, 'OPTIONAL', 2, now(), now());
+    replace into `linkis_cg_manager_label` (`label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES ('combined_userCreator_engineType',@PYTHON_ALL, 'OPTIONAL', 2, now(), now());
+    replace into `linkis_cg_manager_label` (`label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES ('combined_userCreator_engineType',@PIPELINE_ALL, 'OPTIONAL', 2, now(), now());
+    replace into `linkis_cg_manager_label` (`label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES ('combined_userCreator_engineType',@JDBC_ALL, 'OPTIONAL', 2, now(), now());
+    replace into `linkis_cg_manager_label` (`label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES ('combined_userCreator_engineType',@OPENLOOKENG_ALL, 'OPTIONAL', 2, now(), now());
+
+    -- Custom correlation engine (e.g. spark-{{ .Values.linkis.deps.spark.version }}) and configKey value
+    -- Global Settings
+    insert into `linkis_ps_configuration_key_engine_relation` (`config_key_id`, `engine_type_label_id`)
+    (select config.id as `config_key_id`, label.id AS `engine_type_label_id` FROM linkis_ps_configuration_config_key config
+    INNER JOIN linkis_cg_manager_label label ON config.engine_conn_type is null and label.label_value = "*-*,*-*");
+
+    -- spark-{{ .Values.linkis.deps.spark.version }} (Here choose to associate all spark type Key values with spark-{{ .Values.linkis.deps.spark.version }})
+    insert into `linkis_ps_configuration_key_engine_relation` (`config_key_id`, `engine_type_label_id`)
+    (select config.id as `config_key_id`, label.id AS `engine_type_label_id` FROM linkis_ps_configuration_config_key config
+    INNER JOIN linkis_cg_manager_label label ON config.engine_conn_type = 'spark' and label.label_value = @SPARK_ALL);
+
+    -- hive-{{ .Values.linkis.deps.hive.version }}
+    insert into `linkis_ps_configuration_key_engine_relation` (`config_key_id`, `engine_type_label_id`)
+    (select config.id as `config_key_id`, label.id AS `engine_type_label_id` FROM linkis_ps_configuration_config_key config
+    INNER JOIN linkis_cg_manager_label label ON config.engine_conn_type = 'hive' and label_value = @HIVE_ALL);
+
+    -- python-{{ .Values.linkis.deps.python.version }}
+    insert into `linkis_ps_configuration_key_engine_relation` (`config_key_id`, `engine_type_label_id`)
+    (select config.id as `config_key_id`, label.id AS `engine_type_label_id` FROM linkis_ps_configuration_config_key config
+    INNER JOIN linkis_cg_manager_label label ON config.engine_conn_type = 'python' and label_value = @PYTHON_ALL);
+
+    -- pipeline-*
+    insert into `linkis_ps_configuration_key_engine_relation` (`config_key_id`, `engine_type_label_id`)
+    (select config.id as `config_key_id`, label.id AS `engine_type_label_id` FROM linkis_ps_configuration_config_key config
+    INNER JOIN linkis_cg_manager_label label ON config.engine_conn_type = 'pipeline' and label_value = @PIPELINE_ALL);
+
+    -- jdbc-4
+    insert into `linkis_ps_configuration_key_engine_relation` (`config_key_id`, `engine_type_label_id`)
+    (select config.id as `config_key_id`, label.id AS `engine_type_label_id` FROM linkis_ps_configuration_config_key config
+    INNER JOIN linkis_cg_manager_label label ON config.engine_conn_type = 'jdbc' and label_value = @JDBC_ALL);
+
+    -- io_file-1.0
+    INSERT INTO `linkis_ps_configuration_key_engine_relation` (`config_key_id`, `engine_type_label_id`)
+    (SELECT config.id AS `config_key_id`, label.id AS `engine_type_label_id` FROM linkis_ps_configuration_config_key config
+    INNER JOIN linkis_cg_manager_label label ON config.engine_conn_type = 'io_file' and label_value = @IO_FILE_ALL);
+
+    -- openlookeng-*
+    insert into `linkis_ps_configuration_key_engine_relation` (`config_key_id`, `engine_type_label_id`)
+    (select config.id as `config_key_id`, label.id AS `engine_type_label_id` FROM linkis_ps_configuration_config_key config
+    INNER JOIN linkis_cg_manager_label label ON config.engine_conn_type = 'openlookeng' and label_value = @OPENLOOKENG_ALL);
+
+    -- If you need to customize the parameters of the new engine, the following configuration does not need to write SQL initialization
+    -- Just write the SQL above, and then add applications and engines to the management console to automatically initialize the configuration
+
+
+    -- Configuration secondary directory (creator level default configuration)
+    -- IDE
+    replace into `linkis_cg_manager_label` (`label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES ('combined_userCreator_engineType',@SPARK_IDE, 'OPTIONAL', 2, now(), now());
+    replace into `linkis_cg_manager_label` (`label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES ('combined_userCreator_engineType',@HIVE_IDE, 'OPTIONAL', 2, now(), now());
+    replace into `linkis_cg_manager_label` (`label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES ('combined_userCreator_engineType',@PYTHON_IDE, 'OPTIONAL', 2, now(), now());
+    replace into `linkis_cg_manager_label` (`label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES ('combined_userCreator_engineType',@PIPELINE_IDE, 'OPTIONAL', 2, now(), now());
+    replace into `linkis_cg_manager_label` (`label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES ('combined_userCreator_engineType',@JDBC_IDE, 'OPTIONAL', 2, now(), now());
+    replace into `linkis_cg_manager_label` (`label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES ('combined_userCreator_engineType',@OPENLOOKENG_IDE, 'OPTIONAL', 2, now(), now());
+
+    -- Visualis
+    replace into `linkis_cg_manager_label` (`label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES ('combined_userCreator_engineType',@SPARK_VISUALIS, 'OPTIONAL', 2, now(), now());
+    -- nodeexecution
+    replace into `linkis_cg_manager_label` (`label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES ('combined_userCreator_engineType',@SPARK_NODE, 'OPTIONAL', 2, now(), now());
+    replace into `linkis_cg_manager_label` (`label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES ('combined_userCreator_engineType',@HIVE_NODE, 'OPTIONAL', 2, now(), now());
+    replace into `linkis_cg_manager_label` (`label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES ('combined_userCreator_engineType',@PYTHON_NODE, 'OPTIONAL', 2, now(), now());
+
+
+    -- Associate first-level and second-level directories
+    select @label_id := id from linkis_cg_manager_label where `label_value` = '*-全局设置,*-*';
+    insert into linkis_ps_configuration_category (`label_id`, `level`) VALUES (@label_id, 1);
+
+    select @label_id := id from linkis_cg_manager_label where `label_value` = '*-IDE,*-*';
+    insert into linkis_ps_configuration_category (`label_id`, `level`) VALUES (@label_id, 1);
+
+    select @label_id := id from linkis_cg_manager_label where `label_value` = '*-Visualis,*-*';
+    insert into linkis_ps_configuration_category (`label_id`, `level`) VALUES (@label_id, 1);
+
+    select @label_id := id from linkis_cg_manager_label where `label_value` = '*-nodeexecution,*-*';
+    insert into linkis_ps_configuration_category (`label_id`, `level`) VALUES (@label_id, 1);
+
+    select @label_id := id from linkis_cg_manager_label where `label_value` = @SPARK_IDE;
+    insert into linkis_ps_configuration_category (`label_id`, `level`) VALUES (@label_id, 2);
+
+    select @label_id := id from linkis_cg_manager_label where `label_value` = @HIVE_IDE;
+    insert into linkis_ps_configuration_category (`label_id`, `level`) VALUES (@label_id, 2);
+
+    select @label_id := id from linkis_cg_manager_label where `label_value` = @PYTHON_IDE;
+    insert into linkis_ps_configuration_category (`label_id`, `level`) VALUES (@label_id, 2);
+
+    select @label_id := id from linkis_cg_manager_label where `label_value` = @PIPELINE_IDE;
+    insert into linkis_ps_configuration_category (`label_id`, `level`) VALUES (@label_id, 2);
+
+    select @label_id := id from linkis_cg_manager_label where `label_value` = @JDBC_IDE;
+    insert into linkis_ps_configuration_category (`label_id`, `level`) VALUES (@label_id, 2);
+
+    select @label_id := id from linkis_cg_manager_label where `label_value` = @OPENLOOKENG_IDE;
+    insert into linkis_ps_configuration_category (`label_id`, `level`) VALUES (@label_id, 2);
+
+    select @label_id := id from linkis_cg_manager_label where `label_value` =  @SPARK_VISUALIS;
+    insert into linkis_ps_configuration_category (`label_id`, `level`) VALUES (@label_id, 2);
+
+    select @label_id := id from linkis_cg_manager_label where `label_value` = @SPARK_NODE;
+    insert into linkis_ps_configuration_category (`label_id`, `level`) VALUES (@label_id, 2);
+
+    select @label_id := id from linkis_cg_manager_label where `label_value` = @HIVE_NODE;
+    insert into linkis_ps_configuration_category (`label_id`, `level`) VALUES (@label_id, 2);
+
+    select @label_id := id from linkis_cg_manager_label where `label_value` = @PYTHON_NODE;
+    insert into linkis_ps_configuration_category (`label_id`, `level`) VALUES (@label_id, 2);
+
+    -- Associate label and default configuration
+    insert into `linkis_ps_configuration_config_value` (`config_key_id`, `config_value`, `config_label_id`)
+    (select `relation`.`config_key_id` AS `config_key_id`, '' AS `config_value`, `relation`.`engine_type_label_id` AS `config_label_id` FROM linkis_ps_configuration_key_engine_relation relation
+    INNER JOIN linkis_cg_manager_label label ON relation.engine_type_label_id = label.id AND label.label_value = '*-*,*-*');
+
+    -- spark default configuration
+    insert into `linkis_ps_configuration_config_value` (`config_key_id`, `config_value`, `config_label_id`)
+    (select `relation`.`config_key_id` AS `config_key_id`, '' AS `config_value`, `relation`.`engine_type_label_id` AS `config_label_id` FROM linkis_ps_configuration_key_engine_relation relation
+    INNER JOIN linkis_cg_manager_label label ON relation.engine_type_label_id = label.id AND label.label_value = @SPARK_ALL);
+
+    -- hive default configuration
+    insert into `linkis_ps_configuration_config_value` (`config_key_id`, `config_value`, `config_label_id`)
+    (select `relation`.`config_key_id` AS `config_key_id`, '' AS `config_value`, `relation`.`engine_type_label_id` AS `config_label_id` FROM linkis_ps_configuration_key_engine_relation relation
+    INNER JOIN linkis_cg_manager_label label ON relation.engine_type_label_id = label.id AND label.label_value = @HIVE_ALL);
+
+    -- python default configuration
+    insert into `linkis_ps_configuration_config_value` (`config_key_id`, `config_value`, `config_label_id`)
+    (select `relation`.`config_key_id` AS `config_key_id`, '' AS `config_value`, `relation`.`engine_type_label_id` AS `config_label_id` FROM linkis_ps_configuration_key_engine_relation relation
+    INNER JOIN linkis_cg_manager_label label ON relation.engine_type_label_id = label.id AND label.label_value = @PYTHON_ALL);
+
+    -- pipeline default configuration
+    insert into `linkis_ps_configuration_config_value` (`config_key_id`, `config_value`, `config_label_id`)
+    (select `relation`.`config_key_id` AS `config_key_id`, '' AS `config_value`, `relation`.`engine_type_label_id` AS `config_label_id` FROM linkis_ps_configuration_key_engine_relation relation
+    INNER JOIN linkis_cg_manager_label label ON relation.engine_type_label_id = label.id AND label.label_value = @PIPELINE_ALL);
+
+    -- jdbc default configuration
+    insert into `linkis_ps_configuration_config_value` (`config_key_id`, `config_value`, `config_label_id`)
+    (select `relation`.`config_key_id` AS `config_key_id`, '' AS `config_value`, `relation`.`engine_type_label_id` AS `config_label_id` FROM linkis_ps_configuration_key_engine_relation relation
+    INNER JOIN linkis_cg_manager_label label ON relation.engine_type_label_id = label.id AND label.label_value = @JDBC_ALL);
+
+    -- openlookeng default configuration
+    insert into `linkis_ps_configuration_config_value` (`config_key_id`, `config_value`, `config_label_id`)
+    (select `relation`.`config_key_id` AS `config_key_id`, '' AS `config_value`, `relation`.`engine_type_label_id` AS `config_label_id` FROM linkis_ps_configuration_key_engine_relation relation
+    INNER JOIN linkis_cg_manager_label label ON relation.engine_type_label_id = label.id AND label.label_value = @OPENLOOKENG_ALL);
+
+    insert  into `linkis_cg_rm_external_resource_provider`(`id`,`resource_type`,`name`,`labels`,`config`) values
+      (1,'Yarn','default',NULL,'{\r\n\"rmWebAddress\": \"{{ .Values.linkis.deps.yarn.restfulUrl }}\",\r\n\"hadoopVersion\": \"{{ .Values.linkis.deps.hadoop.version }}\",\r\n\"authorEnable\":{{ .Values.linkis.deps.yarn.authEnable }},\r\n\"user\":\"{{ .Values.linkis.deps.yarn.authUser }}\",\r\n\"pwd\":\"{{ .Values.linkis.deps.yarn.authPassword }}\",\r\n\"kerberosEnable\":{{ .Values.linkis.deps.yarn.kerberosEnable }},\r\n\"principalName\":\"{{ .Values.linkis.deps.yarn.principal }}\",\r\n\"k [...]
+     ON DUPLICATE KEY UPDATE resource_type='Yarn', config='{\r\n\"rmWebAddress\": \"{{ .Values.linkis.deps.yarn.restfulUrl }}\",\r\n\"hadoopVersion\": \"{{ .Values.linkis.deps.hadoop.version }}\",\r\n\"authorEnable\":{{ .Values.linkis.deps.yarn.authEnable }},\r\n\"user\":\"{{ .Values.linkis.deps.yarn.authUser }}\",\r\n\"pwd\":\"{{ .Values.linkis.deps.yarn.authPassword }}\",\r\n\"kerberosEnable\":{{ .Values.linkis.deps.yarn.kerberosEnable }},\r\n\"principalName\":\"{{ .Values.linkis.deps. [...]
+
+    -- errorcode
+    -- 01 linkis server
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('01001','您的任务没有路由到后台ECM,请联系管理员','The em of labels',0);
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('01002','Linkis服务负载过高,请联系管理员扩容','Unexpected end of file from server',0);
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('01003','Linkis服务负载过高,请联系管理员扩容','failed to ask linkis Manager Can be retried SocketTimeoutException',0);
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('01004','引擎在启动时被Kill,请联系管理员',' [0-9]+ Killed',0);
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('01005','请求Yarn获取队列信息重试2次仍失败,请联系管理员','Failed to request external resourceClassCastException',0);
+
+
+    -- 11 linkis resource 12 user resource 13 user task resouce
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('01101','ECM资源不足,请联系管理员扩容','ECM resources are insufficient',0);
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('01102','ECM 内存资源不足,请联系管理员扩容','ECM memory resources are insufficient',0);
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('01103','ECM CPU资源不足,请联系管理员扩容','ECM CPU resources are insufficient',0);
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('01004','ECM 实例资源不足,请联系管理员扩容','ECM Insufficient number of instances',0);
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('01005','机器内存不足,请联系管理员扩容','Cannot allocate memory',0);
+
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('12001','队列CPU资源不足,可以调整Spark执行器个数','Queue CPU resources are insufficient',0);
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('12002','队列内存资源不足,可以调整Spark执行器个数','Insufficient queue memory',0);
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('12003','队列实例数超过限制','Insufficient number of queue instances',0);
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('12004','全局驱动器内存使用上限,可以设置更低的驱动内存','Drive memory resources are insufficient',0);
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('12005','超出全局驱动器CPU个数上限,可以清理空闲引擎','Drive core resources are insufficient',0);
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('12006','超出引擎最大并发数上限,可以清理空闲引擎','Insufficient number of instances',0);
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('12008','获取Yarn队列信息异常,可能是您设置的yarn队列不存在','获取Yarn队列信息异常',0);
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('12009','会话创建失败,%s队列不存在,请检查队列设置是否正确','queue (\\S+) does not exist in YARN',0);
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('12010','集群队列内存资源不足,可以联系组内人员释放资源','Insufficient cluster queue memory',0);
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('12011','集群队列CPU资源不足,可以联系组内人员释放资源','Insufficient cluster queue cpu',0);
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('12012','集群队列实例数超过限制','Insufficient cluster queue instance',0);
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('12013','资源不足导致启动引擎超时,您可以进行任务重试','wait for DefaultEngineConn',0);
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('12014','请求引擎超时,可能是因为队列资源不足导致,请重试','wait for engineConn initial timeout',0);
+
+
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('13001','Java进程内存溢出,建议优化脚本内容','OutOfMemoryError',0);
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('13002','使用资源过大,请调优sql或者加大资源','Container killed by YARN for exceeding memory limits',0);
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('13003','使用资源过大,请调优sql或者加大资源','read record exception',0);
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('13004','引擎意外退出,可能是使用资源过大导致','failed because the engine quitted unexpectedly',0);
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('13005','Spark app应用退出,可能是复杂任务导致','Spark application has already stopped',0);
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('13006','Spark context退出,可能是复杂任务导致','Spark application sc has already stopped',0);
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('13007','Pyspark子进程意外退出,可能是复杂任务导致','Pyspark process  has stopped',0);
+    -- 21 cluster Authority  22 db Authority
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('21001','会话创建失败,用户%s不能提交应用到队列:%s,请联系提供队列给您的人员','User (\\S+) cannot submit applications to queue (\\S+)',0);
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('21002','创建Python解释器失败,请联系管理员','initialize python executor failed',0);
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('21003','创建单机Python解释器失败,请联系管理员','PythonSession process cannot be initialized',0);
+
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('22001','%s无权限访问,请申请开通数据表权限,请联系您的数据管理人员','Permission denied:\\s*user=[a-zA-Z0-9_]+,\\s*access=[A-Z]+\\s*,\\s*inode="([a-zA-Z0-9/_\\.]+)"',0);
+    -- INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('22002','您可能没有相关权限','Permission denied',0);
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('22003','所查库表无权限','Authorization failed:No privilege',0);
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('22004','用户%s在机器不存在,请确认是否申请了相关权限','user (\\S+) does not exist',0);
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('22005','用户在机器不存在,请确认是否申请了相关权限','engineConnExec.sh: Permission denied',0);
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('22006','用户在机器不存在,请确认是否申请了相关权限','at com.sun.security.auth.UnixPrincipal',0);
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('22007','用户在机器不存在,请确认是否申请了相关权限','LoginException: java.lang.NullPointerException: invalid null input: name',0);
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('22008','用户在机器不存在,请确认是否申请了相关权限','User not known to the underlying authentication module',0);
+
+    -- 30 Space exceeded 31 user operation
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('30001','库超过限制','is exceeded',0);
+
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('31001','用户主动kill任务','is killed by user',0);
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('31002','您提交的EngineTypeLabel没有对应的引擎版本','EngineConnPluginNotFoundException',0);
+
+    -- 41 not exist 44 sql  43 python 44 shell 45 scala 46 importExport
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('41001','数据库%s不存在,请检查引用的数据库是否有误','Database ''([a-zA-Z_0-9]+)'' not found',0);
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('41001','数据库%s不存在,请检查引用的数据库是否有误','Database does not exist: ([a-zA-Z_0-9]+)',0);
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('41002','表%s不存在,请检查引用的表是否有误','Table or view not found: ([`\\.a-zA-Z_0-9]+)',0);
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('41002','表%s不存在,请检查引用的表是否有误','Table not found ''([a-zA-Z_0-9]+)''',0);
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('41002','表%s不存在,请检查引用的表是否有误','Table ([a-zA-Z_0-9]+) not found',0);
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('41003','字段%s不存在,请检查引用的字段是否有误','cannot resolve ''`(.+)`'' given input columns',0);
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('41003','字段%s不存在,请检查引用的字段是否有误',' Invalid table alias or column reference ''(.+)'':',0);
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('41003','字段%s不存在,请检查引用的字段是否有误','Column ''(.+)'' cannot be resolved',0);
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('41004','分区字段%s不存在,请检查引用的表%s是否为分区表或分区字段有误','([a-zA-Z_0-9]+) is not a valid partition column in table ([`\\.a-zA-Z_0-9]+)',0);
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('41004','分区字段%s不存在,请检查引用的表是否为分区表或分区字段有误','Partition spec \\{(\\S+)\\} contains non-partition columns',0);
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('41004','分区字段%s不存在,请检查引用的表是否为分区表或分区字段有误','table is not partitioned but partition spec exists:\\{(.+)\\}',0);
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('41004','表对应的路径不存在,请联系您的数据管理人员','Path does not exist: viewfs',0);
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('41005','文件%s不存在','Caused by:\\s*java.io.FileNotFoundException',0);
+
+    -- 42 sql
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('42001','括号不匹配,请检查代码中括号是否前后匹配','extraneous input ''\\)''',0);
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('42002','非聚合函数%s必须写在group by中,请检查代码的group by语法','expression ''(\\S+)'' is neither present in the group by',0);
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('42002','非聚合函数%s必须写在group by中,请检查代码的group by语法','grouping expressions sequence is empty,\\s?and ''(\\S+)'' is not an aggregate function',0);
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('42002','非聚合函数%s必须写在group by中,请检查代码的group by语法','Expression not in GROUP BY key ''(\\S+)''',0);
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('42003','未知函数%s,请检查代码中引用的函数是否有误','Undefined function: ''(\\S+)''',0);
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('42003','未知函数%s,请检查代码中引用的函数是否有误','Invalid function ''(\\S+)''',0);
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('42004','字段%s存在名字冲突,请检查子查询内是否有同名字段','Reference ''(\\S+)'' is ambiguous',0);
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('42004','字段%s存在名字冲突,请检查子查询内是否有同名字段','Ambiguous column Reference ''(\\S+)'' in subquery',0);
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('42005','字段%s必须指定表或者子查询别名,请检查该字段来源','Column ''(\\S+)'' Found in more than One Tables/Subqueries',0);
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('42006','表%s在数据库%s中已经存在,请删除相应表后重试','Table or view ''(\\S+)'' already exists in database ''(\\S+)''',0);
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('42006','表%s在数据库中已经存在,请删除相应表后重试','Table (\\S+) already exists',0);
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('42006','表%s在数据库中已经存在,请删除相应表后重试','Table already exists',0);
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('42006','表%s在数据库中已经存在,请删除相应表后重试','AnalysisException: (S+) already exists',0);
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('42007','插入目标表字段数量不匹配,请检查代码!','requires that the data to be inserted have the same number of columns as the target table',0);
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('42008','数据类型不匹配,请检查代码!','due to data type mismatch: differing types in',0);
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('42009','字段%s引用有误,请检查字段是否存在!','Invalid column reference (S+)',0);
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('42010','字段%s提取数据失败','Can''t extract value from (S+): need',0);
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('42011','括号或者关键字不匹配,请检查代码!','mismatched input ''(\\S+)'' expecting',0);
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('42012','group by 位置2不在select列表中,请检查代码!','GROUP BY position (S+) is not in select list',0);
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('42013','字段提取数据失败请检查字段类型','Can''t extract value from (S+): need struct type but got string',0);
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('42014','插入数据未指定目标表字段%s,请检查代码!','Cannot insert into target table because column number/types are different ''(S+)''',0);
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('42015','表别名%s错误,请检查代码!','Invalid table alias ''(\\S+)''',0);
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('42016','UDF函数未指定参数,请检查代码!','UDFArgumentException Argument expected',0);
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('42017','聚合函数%s不能写在group by 中,请检查代码!','aggregate functions are not allowed in GROUP BY',0);
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('42018','您的代码有语法错误,请您修改代码之后执行','SemanticException Error in parsing',0);
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('42019','表不存在,请检查引用的表是否有误','table not found',0);
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('42020','函数使用错误,请检查您使用的函数方式','No matching method',0);
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('42021','您的sql代码可能有语法错误,请检查sql代码','FAILED: ParseException',0);
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('42022','您的sql代码可能有语法错误,请检查sql代码','org.apache.spark.sql.catalyst.parser.ParseException',0);
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('42022','您的sql代码可能有语法错误,请检查sql代码','org.apache.hadoop.hive.ql.parse.ParseException',0);
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('42023','聚合函数不能嵌套','aggregate function in the argument of another aggregate function',0);
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('42024','聚合函数不能嵌套','aggregate function parameters overlap with the aggregation',0);
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('42025','union 的左右查询字段不一致','Union can only be performed on tables',0);
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('42025','hql报错,union 的左右查询字段不一致','both sides of union should match',0);
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('42025','union左表和右表类型不一致','on first table and type',0);
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('42026','您的建表sql不能推断出列信息','Unable to infer the schema',0);
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('42027','动态分区的严格模式需要指定列,您可用通过设置set hive.exec.dynamic.partition.mode=nostrict','requires at least one static partition',0);
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('42028','函数输入参数有误','Invalid number of arguments for function',0);
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('42029','sql语法报错,select * 与group by无法一起使用','not allowed in select list when GROUP BY  ordinal',0);
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('42030','where/having子句之外不支持引用外部查询的表达式','the outer query are not supported outside of WHERE',0);
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('42031','sql语法报错,group by 后面不能跟一个表','show up in the GROUP BY list',0);
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('42032','hql报错,窗口函数中的字段重复','check for circular dependencies',0);
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('42033','sql中出现了相同的字段','Found duplicate column',0);
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('42034','sql语法不支持','not supported in current context',0);
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('42035','hql语法报错,嵌套子查询语法问题','Unsupported SubQuery Expression',0);
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('42036','hql报错,子查询中in 用法有误','in definition of SubQuery',0);
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43037','表字段类型修改导致的转型失败,请联系修改人员','cannot be cast to',0);
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43038','select 的表可能有误','Invalid call to toAttribute on unresolved object',0);
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43039','语法问题,请检查脚本','Distinct window functions are not supported',0);
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43040','Presto查询一定要指定数据源和库信息','Schema must be specified when session schema is not set',0);
+
+    --  43 python
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43001','代码中存在NoneType空类型变量,请检查代码','''NoneType'' object',0);
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43002','数组越界','IndexError:List index out of range',0);
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43003','您的代码有语法错误,请您修改代码之后执行','SyntaxError',0);
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43004','python代码变量%s未定义','name ''(S+)'' is not defined',0);
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43005','python udf %s 未定义','Undefined function:s+''(S+)''',0);
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43006','python执行不能将%s和%s两种类型进行连接','cannot concatenate ''(S+)'' and ''(S+)''',0);
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43007','pyspark执行失败,可能是语法错误或stage失败','Py4JJavaError: An error occurred',0);
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43008','python代码缩进对齐有误','unexpected indent',0);
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43009','python代码缩进有误','unexpected indent',0);
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43010','python代码反斜杠后面必须换行','unexpected character after line',0);
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43011','导出Excel表超过最大限制1048575','Invalid row number',0);
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43012','python save as table未指定格式,默认用parquet保存,hive查询报错','parquet.io.ParquetDecodingException',0);
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43013','索引使用错误','IndexError',0);
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43014','sql语法有问题','raise ParseException',0);
+
+    -- 46 importExport
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('46001','找不到导入文件地址:%s','java.io.FileNotFoundException: (\\S+) \\(No such file or directory\\)',0);
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('46002','导出为excel时临时文件目录权限异常','java.io.IOException: Permission denied(.+)at org.apache.poi.xssf.streaming.SXSSFWorkbook.createAndRegisterSXSSFSheet',0);
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('46003','导出文件时无法创建目录:%s','java.io.IOException: Mkdirs failed to create (\\S+) (.+)',0);
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('46004','导入模块错误,系统没有%s模块,请联系运维人员安装','ImportError: No module named (S+)',0);
+
+    -- 91 wtss
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('91001','找不到变量值,请确认您是否设置相关变量','not find variable substitution for',0);
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('91002','不存在的代理用户,请检查你是否申请过平台层(bdp或者bdap)用户','failed to change current working directory ownership',0);
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('91003','请检查提交用户在WTSS内是否有该代理用户的权限,代理用户中是否存在特殊字符,是否用错了代理用户,OS层面是否有该用户,系统设置里面是否设置了该用户为代理用户','没有权限执行当前任务',0);
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('91004','平台层不存在您的执行用户,请在ITSM申请平台层(bdp或者bdap)用户','使用chown命令修改',0);
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('91005','未配置代理用户,请在ITSM走WTSS用户变更单,为你的用户授权改代理用户','请联系系统管理员为您的用户添加该代理用户',0);
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('91006','您的用户初始化有问题,请联系管理员','java: No such file or directory',0);
+    INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('91007','JobServer中不存在您的脚本文件,请将你的脚本文件放入对应的JobServer路径中', 'Could not open input file for reading%does not exist',0);
+
+    -- ----------------------------
+    -- Default Tokens
+    -- ----------------------------
+    REPLACE INTO `linkis_mg_gateway_auth_token`(`token_name`,`legal_users`,`legal_hosts`,`business_owner`,`create_time`,`update_time`,`elapse_day`,`update_by`) VALUES ('QML-AUTH','*','*','BDP',curdate(),curdate(),-1,'LINKIS');
+    REPLACE INTO `linkis_mg_gateway_auth_token`(`token_name`,`legal_users`,`legal_hosts`,`business_owner`,`create_time`,`update_time`,`elapse_day`,`update_by`) VALUES ('BML-AUTH','*','*','BDP',curdate(),curdate(),-1,'LINKIS');
+    REPLACE INTO `linkis_mg_gateway_auth_token`(`token_name`,`legal_users`,`legal_hosts`,`business_owner`,`create_time`,`update_time`,`elapse_day`,`update_by`) VALUES ('WS-AUTH','*','*','BDP',curdate(),curdate(),-1,'LINKIS');
+    REPLACE INTO `linkis_mg_gateway_auth_token`(`token_name`,`legal_users`,`legal_hosts`,`business_owner`,`create_time`,`update_time`,`elapse_day`,`update_by`) VALUES ('dss-AUTH','*','*','BDP',curdate(),curdate(),-1,'LINKIS');
+    REPLACE INTO `linkis_mg_gateway_auth_token`(`token_name`,`legal_users`,`legal_hosts`,`business_owner`,`create_time`,`update_time`,`elapse_day`,`update_by`) VALUES ('QUALITIS-AUTH','*','*','BDP',curdate(),curdate(),-1,'LINKIS');
+    REPLACE INTO `linkis_mg_gateway_auth_token`(`token_name`,`legal_users`,`legal_hosts`,`business_owner`,`create_time`,`update_time`,`elapse_day`,`update_by`) VALUES ('VALIDATOR-AUTH','*','*','BDP',curdate(),curdate(),-1,'LINKIS');
+    REPLACE INTO `linkis_mg_gateway_auth_token`(`token_name`,`legal_users`,`legal_hosts`,`business_owner`,`create_time`,`update_time`,`elapse_day`,`update_by`) VALUES ('LINKISCLI-AUTH','*','*','BDP',curdate(),curdate(),-1,'LINKIS');
+    REPLACE INTO `linkis_mg_gateway_auth_token`(`token_name`,`legal_users`,`legal_hosts`,`business_owner`,`create_time`,`update_time`,`elapse_day`,`update_by`) VALUES ('DSM-AUTH','*','*','BDP',curdate(),curdate(),-1,'LINKIS');
+    REPLACE INTO `linkis_mg_gateway_auth_token`(`token_name`,`legal_users`,`legal_hosts`,`business_owner`,`create_time`,`update_time`,`elapse_day`,`update_by`) VALUES ('LINKIS_CLI_TEST','*','*','BDP',curdate(),curdate(),-1,'LINKIS');
+
+    INSERT INTO `linkis_ps_dm_datasource_type` (`name`, `description`, `option`, `classifier`, `icon`, `layers`) VALUES ('mysql', 'mysql数据库', 'mysql数据库', '关系型数据库', '', 3);
+    INSERT INTO `linkis_ps_dm_datasource_type` (`name`, `description`, `option`, `classifier`, `icon`, `layers`) VALUES ('kafka', 'kafka', 'kafka', '消息队列', '', 2);
+    INSERT INTO `linkis_ps_dm_datasource_type` (`name`, `description`, `option`, `classifier`, `icon`, `layers`) VALUES ('presto', 'presto SQL', 'presto', '大数据存储', '', 3);
+    INSERT INTO `linkis_ps_dm_datasource_type` (`name`, `description`, `option`, `classifier`, `icon`, `layers`) VALUES ('hive', 'hive数据库', 'hive', '大数据存储', '', 3);
+    INSERT INTO `linkis_ps_dm_datasource_type` (`name`, `description`, `option`, `classifier`, `icon`, `layers`) VALUES ('mongodb', 'default', 'default', 'DEFAULT', NULL, 3);
+
+    INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `name_en`, `default_value`, `value_type`, `scope`, `require`, `description`, `description_en`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`) VALUES (1, 'host', '主机名(Host)', 'Host', NULL, 'TEXT', NULL, 1, '主机名(Host)', 'Host', NULL, NULL, NULL, NULL,  now(), now());
+    INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `name_en`, `default_value`, `value_type`, `scope`, `require`, `description`, `description_en`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`) VALUES (1, 'port', '端口号(Port)','Port', NULL, 'TEXT', NULL, 1, '端口号(Port)','Port', NULL, NULL, NULL, NULL,  now(), now());
+    INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `name_en`, `default_value`, `value_type`, `scope`, `require`, `description`, `description_en`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`) VALUES (1, 'driverClassName', '驱动类名(Driver class name)', 'Driver class name', 'com.mysql.jdbc.Driver', 'TEXT', NULL, 1, '驱动类名(Driver class name)', 'Driver class name', NULL, NULL, NULL, NULL,  now(), now());
+    INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `name_en`, `default_value`, `value_type`, `scope`, `require`, `description`, `description_en`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`) VALUES (1, 'params', '连接参数(Connection params)', 'Connection params', NULL, 'TEXT', NULL, 0, '输入JSON格式(Input JSON format): {"param":"value"}', 'Input JSON format: {"param":"value"}', NULL, NULL, NULL, NULL,  now(), now());
+    INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `name_en`, `default_value`, `value_type`, `scope`, `require`, `description`, `description_en`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`) VALUES (1, 'username', '用户名(Username)', 'Username', NULL, 'TEXT', NULL, 1, '用户名(Username)', 'Username', '^[0-9A-Za-z_-]+$', NULL, NULL, NULL,  now(), now());
+    INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `name_en`, `default_value`, `value_type`, `scope`, `require`, `description`, `description_en`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`) VALUES (1, 'password', '密码(Password)', 'Password', NULL, 'PASSWORD', NULL, 1, '密码(Password)', 'Password', '', NULL, NULL, NULL,  now(), now());
+    INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `name_en`, `default_value`, `value_type`, `scope`, `require`, `description`, `description_en`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`) VALUES (1, 'databaseName', '数据库名(Database name)', 'Database name', NULL, 'TEXT', NULL, 0, '数据库名(Database name)', 'Database name', NULL, NULL, NULL, NULL,  now(), now());
+    INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `name_en`, `default_value`, `value_type`, `scope`, `require`, `description`, `description_en`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`) VALUES (4, 'envId', '集群环境(Cluster env)', 'Cluster env', NULL, 'SELECT', NULL, 1, '集群环境(Cluster env)', 'Cluster env', NULL, NULL, NULL, '/data-source-manager/env-list/all/type/4', now(), now());
+
+    INSERT INTO `linkis_ps_dm_datasource_env` (`env_name`, `env_desc`, `datasource_type_id`, `parameter`, `create_time`, `create_user`, `modify_time`, `modify_user`) VALUES ('测试环境SIT', '测试环境SIT', 4, '{"uris":"thrift://localhost:9083", "hadoopConf":{"hive.metastore.execute.setugi":"true"}}',  now(), NULL,  now(), NULL);
+    INSERT INTO `linkis_ps_dm_datasource_env` (`env_name`, `env_desc`, `datasource_type_id`, `parameter`, `create_time`, `create_user`, `modify_time`, `modify_user`) VALUES ('测试环境UAT', '测试环境UAT', 4, '{"uris":"thrift://localhost:9083", "hadoopConf":{"hive.metastore.execute.setugi":"true"}}',  now(), NULL,  now(), NULL);
+
diff --git a/linkis-dist/package/db/linkis_ddl.sql b/linkis-dist/package/db/linkis_ddl.sql
index 8fe51ceef..568b055ff 100644
--- a/linkis-dist/package/db/linkis_ddl.sql
+++ b/linkis-dist/package/db/linkis_ddl.sql
@@ -670,6 +670,7 @@ CREATE TABLE `linkis_cg_manager_service_instance` (
   `name` varchar(32) COLLATE utf8_bin DEFAULT NULL,
   `owner` varchar(32) COLLATE utf8_bin DEFAULT NULL,
   `mark` varchar(32) COLLATE utf8_bin DEFAULT NULL,
+  `identifier` varchar(32) COLLATE utf8_bin DEFAULT NULL,
   `update_time` datetime DEFAULT CURRENT_TIMESTAMP,
   `create_time` datetime DEFAULT CURRENT_TIMESTAMP,
   `updator` varchar(32) COLLATE utf8_bin DEFAULT NULL,
diff --git a/linkis-dist/package/db/module/linkis_manager.sql b/linkis-dist/package/db/module/linkis_manager.sql
index 1a404af9b..c3128633e 100644
--- a/linkis-dist/package/db/module/linkis_manager.sql
+++ b/linkis-dist/package/db/module/linkis_manager.sql
@@ -23,6 +23,7 @@ CREATE TABLE `linkis_cg_manager_service_instance` (
   `name` varchar(32) COLLATE utf8_bin DEFAULT NULL,
   `owner` varchar(32) COLLATE utf8_bin DEFAULT NULL,
   `mark` varchar(32) COLLATE utf8_bin DEFAULT NULL,
+  `identifier` varchar(32) COLLATE utf8_bin DEFAULT NULL,
   `update_time` datetime DEFAULT CURRENT_TIMESTAMP,
   `create_time` datetime DEFAULT CURRENT_TIMESTAMP,
   `updator` varchar(32) COLLATE utf8_bin DEFAULT NULL,
diff --git a/linkis-dist/package/sbin/ext/linkis-cg-linkismanager b/linkis-dist/package/sbin/ext/linkis-cg-linkismanager
index 7a3849534..9a3cd3317 100644
--- a/linkis-dist/package/sbin/ext/linkis-cg-linkismanager
+++ b/linkis-dist/package/sbin/ext/linkis-cg-linkismanager
@@ -22,7 +22,7 @@ export SERVER_SUFFIX="linkis-computation-governance/linkis-cg-linkismanager"
 
 export SERVER_HEAP_SIZE="1G"
 
-export SERVER_CLASS=org.apache.linkis.manager.am.LinkisManagerApplication
+export SERVER_CLASS=org.apache.linkis.manager.LinkisManagerApplication
 
 if test -z "$MANAGER_HEAP_SIZE"
   then


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@linkis.apache.org
For additional commands, e-mail: commits-help@linkis.apache.org