You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@linkis.apache.org by pe...@apache.org on 2022/03/05 09:04:13 UTC

[incubator-linkis] branch dev-1.1.1 updated (8011773 -> e311d0d)

This is an automated email from the ASF dual-hosted git repository.

peacewong pushed a change to branch dev-1.1.1
in repository https://gitbox.apache.org/repos/asf/incubator-linkis.git.


    from 8011773  remove message XmlRootElement annotation #1607
     new b9ed5b8  1. linkis-resource-manager - Refactor YarnResourceRequester to support high-available address of resourcemanager 2. linkis-manager-common - Change default yarn name from 'sit' to 'default'
     new 97d1c7b  1. linkis-computation-engineconn - Add support for computation-engineconn to query running task status from entrance. 2. linkis-computation-engineconn - Add some tests.
     new e1f2ebf  1. linkis-accessible-executor - change default engine free time from '30m' to '1h'
     new 7fb89c3  1. linkis-computation-client - add some method of datasource module in sdk api
     new a55e877  1. linkis-engineconn-manager - add core dump save for engineconn
     new 294da24  1. linkis-manager-common - refactor equal method in PersistenceLabel
     new 29025eb  1. linkis-module - refactor USER_TICKET_ID_STRING in cookie 2. linkis-scheduler - add interface getJobListener 3. linkis-storage - refactor method toValue to fix decimal bug 4. linkis-httpclient - refactor method execute to handle parse exception for response
     new 0aadd69  1. linkis-entrance - add heart-beat support for  linkis-cli task
     new f42346a  1. linkis-engineplugin-spark - fix kill task bug
     new 7eddfb6  1. linkis-instance-label-server - fix equal method 2. linkis-jobhistory - fix job time
     new 9a27159  1. linkis-install-package - refactor install scripts 2. linkis-package - refactor properties
     new 1cb0fb6  1. linkis-computation-engineconn - fix dependency for junit
     new 8124f4a  Add missing license header.
     new a18497d  fix some import.
     new f409fc8  1. reformat some code. 2. fix some config.
     new a9a9767  Support to remove the limit #1573
     new 92a8536  fix:HttpMessageNotWritableException about x-msdownload
     new e311d0d  optimize license

The 18 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 .../assembly-combined/bin/linkis-cli               |  12 +-
 .../assembly-combined/bin/linkis-cli-hive          |  76 ++--
 .../assembly-combined/bin/linkis-cli-pre           | 118 ++++++
 .../assembly-combined/bin/linkis-cli-spark-sql     |  40 +-
 .../assembly-combined/bin/linkis-cli-spark-submit  |  38 +-
 .../assembly-combined/bin/linkis-cli-sqoop         | 433 +++++++++++++++++++++
 .../assembly-combined/conf/application-eureka.yml  |   9 +-
 .../conf/{log4j2.xml => log4j2-console.xml}        |  26 +-
 .../deploy-config/linkis-env.sh                    |   3 +-
 .../linkis/httpclient/AbstractHttpClient.scala     |  13 +-
 .../linkis/server/conf/ServerConfiguration.scala   |   2 +-
 .../org/apache/linkis/scheduler/queue/Job.scala    |   2 +
 .../linkis/storage/csv/StorageCSVWriter.scala      |   4 +-
 .../apache/linkis/storage/domain/DataType.scala    |  14 +-
 .../linkis/storage/excel/StorageExcelWriter.scala  |   8 +-
 .../apache/linkis/storage/source/FileSplit.scala   |   4 +-
 .../client/once/LinkisManagerClient.scala          |   2 +-
 .../org/apache/linkis/ujes/client/UJESClient.scala |   8 +
 ...scala => GetPartitionStatisticInfoAction.scala} |  37 +-
 ...nsAction.scala => GetTableBaseInfoAction.scala} |  40 +-
 ...scala => GetPartitionStatisticInfoResult.scala} |  19 +-
 ...nsResult.scala => GetTableBaseInfoResult.scala} |  15 +-
 .../apache/linkis/ecm/core/conf/ECPCoreConf.scala  |  14 +-
 .../core/launch/ProcessEngineCommandBuilder.scala  |   6 +-
 .../server/operator/EngineConnLogOperator.scala    |  14 +-
 .../impl/ProcessEngineConnLaunchService.scala      |   6 +-
 .../linkis-computation-engineconn/pom.xml          |   6 -
 .../executor/conf/ComputationExecutorConf.scala    |   5 +
 .../executor/execute/ComputationExecutor.scala     |   2 +
 .../service/TaskExecutionServiceImpl.scala         |  15 +-
 .../executor/upstream/ECTaskEntranceMonitor.scala  |  57 +++
 .../SingleThreadUpstreamConnectionMonitor.scala    | 149 +++++++
 .../upstream/UpstreamConnectionMonitor.scala       |  14 +-
 .../upstream/access/ConnectionInfoAccess.scala     |  12 +-
 .../access/ConnectionInfoAccessRequest.scala       |   5 +-
 .../upstream/access/ECTaskEntranceInfoAccess.scala | 119 ++++++
 .../access/ECTaskEntranceInfoAccessRequest.scala}  |   9 +-
 .../upstream/entity/ECTaskEntranceConnection.scala |  56 +++
 .../upstream/entity/UpstreamConnection.scala       |  21 +-
 ...TaskStatusChangedForUpstreamMonitorEvent.scala} |  12 +-
 .../upstream/handler/ECTaskKillHandler.scala       |  68 ++++
 .../handler/ECTaskKillHandlerRequest.scala}        |  10 +-
 .../executor/upstream/handler/MonitorHandler.scala |  17 +-
 .../upstream/handler/MonitorHandlerRequest.scala   |   6 +-
 ...kStatusChangedForUpstreamMonitorListener.scala} |  17 +-
 .../service/ECTaskEntranceMonitorService.scala     |  68 ++++
 .../upstream/wrapper/ConnectionInfoWrapper.scala}  |  19 +-
 .../wrapper/ECTaskEntranceConnectionWrapper.scala  |  64 +++
 .../executor/utlis/ComputationErrorCode.scala      |   7 +
 .../access/ECTaskEntranceInfoAccessHelper.scala    |  81 ++++
 ...ion.java => CliHeartBeatMonitorAnnotation.java} |  10 +-
 .../entrance/conf/EntranceSpringConfiguration.java |  19 +-
 .../entrance/exception/EntranceErrorCode.java      |   4 +-
 .../linkis/entrance/job/EntranceExecutionJob.java  |   4 +
 .../persistence/QueryPersistenceManager.java       |  13 +
 .../entrance/restful/EntranceRestfulApi.java       |   3 +
 .../apache/linkis/entrance/EntranceServer.scala    |   6 +
 .../cli/heartbeat/CliHeartbeatMonitor.scala        | 136 +++++++
 .../cli/heartbeat/HeartbeatLossHandler.scala       |   7 +-
 .../entrance/cli/heartbeat/KillHandler.scala}      |  30 +-
 .../entrance/conf/EntranceConfiguration.scala      |  12 +-
 .../linkis/entrance/execute/EntranceJob.scala      |  17 +-
 .../impl/SQLLimitEntranceInterceptor.scala         |   9 +
 .../linkis/entrance/log/CacheLogWriter.scala       |   2 +-
 .../entity/persistence/PersistenceLabel.java       |   7 +-
 .../manager/common/conf/RMConfiguration.scala      |   5 +-
 .../request/ExternalResourceRequester.java         |   2 +
 .../external/service/ExternalResourceService.java  |   4 +
 .../service/impl/ExternalResourceServiceImpl.java  |  29 +-
 .../external/yarn/YarnResourceRequester.scala      |  76 +++-
 .../service/impl/DefaultResourceManager.scala      |   1 -
 .../engineplugin/spark/executor/SQLSession.scala   |  32 +-
 .../spark/executor/SparkEngineConnExecutor.scala   |   8 +-
 .../execution/datasources/csv/DolphinToSpark.scala |  15 +-
 .../linkis/bml/client/impl/HttpBmlClient.scala     |  61 ++-
 .../instance/label/entity/InsPersistenceLabel.java |   2 +-
 .../jobhistory/conversions/TaskConversions.scala   |  16 +-
 pom.xml                                            |   1 +
 78 files changed, 1956 insertions(+), 377 deletions(-)
 create mode 100755 assembly-combined-package/assembly-combined/bin/linkis-cli-pre
 create mode 100755 assembly-combined-package/assembly-combined/bin/linkis-cli-sqoop
 copy assembly-combined-package/assembly-combined/conf/{log4j2.xml => log4j2-console.xml} (67%)
 copy linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/request/{GetTableStatisticInfoAction.scala => GetPartitionStatisticInfoAction.scala} (63%)
 copy linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/request/{GetColumnsAction.scala => GetTableBaseInfoAction.scala} (58%)
 copy linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/response/{GetColumnsResult.scala => GetPartitionStatisticInfoResult.scala} (78%)
 copy linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/response/{GetColumnsResult.scala => GetTableBaseInfoResult.scala} (80%)
 copy linkis-public-enhancements/linkis-bml/linkis-bml-engine-hook/src/main/scala/org/apache/linkis/bml/conf/BmlHookConf.scala => linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-core/src/main/scala/org/apache/linkis/ecm/core/conf/ECPCoreConf.scala (84%)
 create mode 100644 linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/ECTaskEntranceMonitor.scala
 create mode 100644 linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/SingleThreadUpstreamConnectionMonitor.scala
 copy linkis-orchestrator/linkis-orchestrator-core/src/main/scala/org/apache/linkis/orchestrator/listener/task/ResourceReportListener.scala => linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/UpstreamConnectionMonitor.scala (61%)
 copy linkis-orchestrator/linkis-orchestrator-core/src/main/scala/org/apache/linkis/orchestrator/listener/task/ResourceReportListener.scala => linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/access/ConnectionInfoAccess.scala (69%)
 copy linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-authentication/src/main/scala/org/apache/linkis/gateway/authentication/bo/User.scala => linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/access/ConnectionInfoAccessRequest.scala (87%)
 create mode 100644 linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/access/ECTaskEntranceInfoAccess.scala
 copy linkis-computation-governance/linkis-engineconn/{linkis-engineconn-executor/executor-core/src/main/scala/org/apache/linkis/engineconn/executor/entity/ResourceFetchExecutor.scala => linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/access/ECTaskEntranceInfoAccessRequest.scala} (70%)
 create mode 100644 linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/entity/ECTaskEntranceConnection.scala
 copy linkis-orchestrator/linkis-orchestrator-core/src/main/scala/org/apache/linkis/orchestrator/listener/task/ResourceReportListener.scala => linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/entity/UpstreamConnection.scala (63%)
 copy linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/{async/AsyncExecuteRequest.scala => upstream/event/TaskStatusChangedForUpstreamMonitorEvent.scala} (67%)
 create mode 100644 linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/handler/ECTaskKillHandler.scala
 copy linkis-computation-governance/linkis-engineconn/{linkis-engineconn-executor/executor-core/src/main/scala/org/apache/linkis/engineconn/executor/entity/ResourceFetchExecutor.scala => linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/handler/ECTaskKillHandlerRequest.scala} (70%)
 copy linkis-orchestrator/linkis-computation-orchestrator/src/main/scala/org/apache/linkis/orchestrator/computation/catalyst/validator/DefaultLabelRegularCheckRuler.scala => linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/handler/MonitorHandler.scala (80%)
 copy linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-authentication/src/main/scala/org/apache/linkis/gateway/authentication/bo/User.scala => linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/handler/MonitorHandlerRequest.scala (88%)
 copy linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/{listener/TaskProgressListener.scala => upstream/listener/TaskStatusChangedForUpstreamMonitorListener.scala} (69%)
 create mode 100644 linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/service/ECTaskEntranceMonitorService.scala
 copy linkis-computation-governance/{linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/execution/jobexec/JobExec.java => linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/wrapper/ConnectionInfoWrapper.scala} (67%)
 create mode 100644 linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/wrapper/ECTaskEntranceConnectionWrapper.scala
 create mode 100644 linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/test/scala/org/apache/linkis/engineconn/computation/executor/upstream/access/ECTaskEntranceInfoAccessHelper.scala
 copy linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/annotation/{ConsumerManagerBeanAnnotation.java => CliHeartBeatMonitorAnnotation.java} (87%)
 create mode 100644 linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/cli/heartbeat/CliHeartbeatMonitor.scala
 copy linkis-commons/linkis-module/src/main/java/org/apache/linkis/proxy/ProxyUserService.java => linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/cli/heartbeat/HeartbeatLossHandler.scala (82%)
 copy linkis-computation-governance/{linkis-manager/label-common/src/main/java/org/apache/linkis/manager/label/entity/CloneableLabel.java => linkis-entrance/src/main/scala/org/apache/linkis/entrance/cli/heartbeat/KillHandler.scala} (59%)

---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@linkis.apache.org
For additional commands, e-mail: commits-help@linkis.apache.org


[incubator-linkis] 03/18: 1. linkis-accessible-executor - change default engine free time from '30m' to '1h'

Posted by pe...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

peacewong pushed a commit to branch dev-1.1.1
in repository https://gitbox.apache.org/repos/asf/incubator-linkis.git

commit e1f2ebfbe33b26f0a77e2c368f24af96be80ec8c
Author: alexkun <xu...@qq.com>
AuthorDate: Fri Mar 4 14:43:38 2022 +0800

    1. linkis-accessible-executor - change default engine free time from '30m' to '1h'
---
 .../acessible/executor/conf/AccessibleExecutorConfiguration.scala       | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/conf/AccessibleExecutorConfiguration.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/conf/AccessibleExecutorConfiguration.scala
index dff9b79..9380f13 100644
--- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/conf/AccessibleExecutorConfiguration.scala
+++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/conf/AccessibleExecutorConfiguration.scala
@@ -35,7 +35,7 @@ object AccessibleExecutorConfiguration {
   val ENGINECONN_LOG_SEND_SIZE = CommonVars[Int]("wds.linkis.engineconn.log.send.cache.size", 300)
 
 
-  val ENGINECONN_MAX_FREE_TIME = CommonVars("wds.linkis.engineconn.max.free.time", new TimeType("30m"))
+  val ENGINECONN_MAX_FREE_TIME = CommonVars("wds.linkis.engineconn.max.free.time", new TimeType("1h"))
 
   val ENGINECONN_LOCK_CHECK_INTERVAL = CommonVars("wds.linkis.engineconn.lock.free.interval", new TimeType("3m"))
 

---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@linkis.apache.org
For additional commands, e-mail: commits-help@linkis.apache.org


[incubator-linkis] 11/18: 1. linkis-install-package - refactor install scripts 2. linkis-package - refactor properties

Posted by pe...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

peacewong pushed a commit to branch dev-1.1.1
in repository https://gitbox.apache.org/repos/asf/incubator-linkis.git

commit 9a27159b7ec506d23d7c66a1eb0365b4352f9a59
Author: alexkun <xu...@qq.com>
AuthorDate: Fri Mar 4 19:56:40 2022 +0800

    1. linkis-install-package - refactor install scripts
    2. linkis-package - refactor properties
---
 .../assembly-combined/bin/linkis-cli               |  96 ++---
 .../assembly-combined/bin/linkis-cli-hive          |  76 ++--
 .../assembly-combined/bin/linkis-cli-pre           | 119 ++++++
 .../assembly-combined/bin/linkis-cli-spark-sql     |  40 +-
 .../assembly-combined/bin/linkis-cli-spark-submit  |  38 +-
 .../assembly-combined/bin/linkis-cli-sqoop         | 433 +++++++++++++++++++++
 .../assembly-combined/conf/application-eureka.yml  |   9 +-
 .../conf/linkis-cg-engineconnmanager.properties    |  12 +-
 .../conf/linkis-cg-engineplugin.properties         |   2 +
 .../conf/linkis-cg-entrance.properties             |  16 +-
 .../conf/linkis-cg-linkismanager.properties        |   7 +-
 .../conf/linkis-mg-gateway.properties              |   6 +-
 .../assembly-combined/conf/linkis-ps-cs.properties |   1 +
 .../conf/linkis-ps-data-source-manager.properties  |   6 +-
 .../conf/linkis-ps-publicservice.properties        |  17 +
 .../assembly-combined/conf/linkis.properties       |  24 +-
 .../conf/{log4j2.xml => log4j2-console.xml}        |  39 +-
 .../assembly-combined/conf/log4j2.xml              |   6 +
 .../assembly-combined/conf/token.properties        |   4 +-
 .../src/main/assembly/distribution.xml             |   1 +
 .../assembly-combined/sbin/common.sh               |   3 +-
 .../assembly-combined/sbin/ext/linkis-common-start |   4 +-
 .../assembly-combined/sbin/ext/linkis-mg-eureka    |  12 +-
 .../assembly-combined/sbin/ext/linkis-mg-gateway   |   6 +-
 .../sbin/ext/linkis-ps-metadatamanager             |   9 +-
 .../assembly-combined/sbin/linkis-daemon.sh        |   2 +-
 .../assembly-combined/sbin/linkis-start-all.sh     |   2 +-
 .../assembly-combined/sbin/linkis-stop-all.sh      |  33 +-
 .../src/main/assembly/assembly.xml                 |  65 +++-
 assembly-combined-package/bin/checkEnv.sh          |   1 +
 assembly-combined-package/bin/install.sh           | 101 +++--
 .../deploy-config/linkis-env.sh                    |   9 +-
 32 files changed, 976 insertions(+), 223 deletions(-)

diff --git a/assembly-combined-package/assembly-combined/bin/linkis-cli b/assembly-combined-package/assembly-combined/bin/linkis-cli
index 7f6d20c..dbf3392 100644
--- a/assembly-combined-package/assembly-combined/bin/linkis-cli
+++ b/assembly-combined-package/assembly-combined/bin/linkis-cli
@@ -13,57 +13,43 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 #
-
-#set -x
 export LANG=en_US.utf-8
-LINKIS_CLIENT='org.apache.linkis.cli.application.LinkisClientApplication'
-
-
-## color
-RED='\033[0;31m'
-NC='\033[0m' # No Color
-GREEN='\033[0;32m'
-#used as: echo -e "Apache ${RED}Linkis ${NC} Test \n"
+#set -x
 
+LINKIS_CLIENT='com.webank.wedatasphere.linkis.cli.application.LinkisClientApplication'
 
 i=0
 for arg in "$@"
 do
-        args[i]=${arg}
-        input_args[i]=${arg}
+        ARG[i]=${arg}
         ((i++))
 done
 
 
 #===============================================
-# finf java_home
+# find java_home
 #===============================================
 locate_java_home() {
+  local JAVA8_HOME_CANDIDATES='\
+    /usr/java/jdk1.8* \
+    /nemo/jdk1.8*'
+
+  JAVA_HOME_CANDIDATES="$JAVA8_HOME_CANDIDATES"
+
+  # attempt to find java 8
   flag=""
-  if [[ -e "$JAVA_HOME" && -e $JAVA_HOME/bin/java ]]; then
-    flag="true"
-  else
-    local JAVA8_HOME_CANDIDATES='\
-      /usr/java/jdk1.8* \
-      /nemo/jdk1.8*'
-
-    JAVA_HOME_CANDIDATES="$JAVA8_HOME_CANDIDATES"
-
-    # attempt to find java 8
-
-    for candidate_regex in $JAVA_HOME_CANDIDATES ; do
-        for candidate in `ls -rd $candidate_regex 2>/dev/null`; do
-          if [ -e $candidate/bin/java ]; then
-            export JAVA_HOME=$candidate
-            flag="true"
-            break 2
-          fi
-        done
-    done
-  fi
+  for candidate_regex in $JAVA_HOME_CANDIDATES ; do
+      for candidate in `ls -rd $candidate_regex 2>/dev/null`; do
+        if [ -e $candidate/bin/java ]; then
+          export JAVA_HOME=$candidate
+          flag="true"
+          break 2
+        fi
+      done
+  done
 
   if [ -z "$flag" ]; then
-    echo -e "${RED}No JDK 8 found. linkis-client requires Java 1.8${NC}" 1>&2
+    echo -e "\033[0;31;40mNo JDK 8 found. linkis-client requires Java 1.8\033[0m" 1>&2
     exit 1
   fi
 
@@ -85,19 +71,19 @@ EOF
 
 
 function call_linkis_client() {
-        current_dir=`pwd`
-        workdir=`dirname "$0"`/../
-        workdir=`cd ${workdir};pwd`
-        cd ${current_dir}
-
-        LINKIS_DEPLOY_LIB_DIR='lib/linkis-computation-governance/linkis-client/linkis-cli/'
-        LINKIS_COMMON_LIB_DIR='lib/linkis-commons/public-module/'
-        LINKIS_DEPLOY_CONF_DIR='conf/linkis-cli'
-        LINKIS_DEPLOY_LOG_DIR='logs/linkis-cli'
-        LINKIS_CLIENT_CLASSPATH=${workdir}/${LINKIS_DEPLOY_CONF_DIR}:${workdir}/${LINKIS_DEPLOY_LIB_DIR}*:${workdir}/${LINKIS_COMMON_LIB_DIR}*:${CLASSPATH}
-        LINKIS_CLIENT_LOG_DIR=${LINKIS_CLIENT_LOG_DIR:-"${workdir}/${LINKIS_DEPLOY_LOG_DIR}"}
-        LINKIS_CLIENT_CONF_DIR=${LINKIS_CLIENT_CONF_DIR:-"${workdir}/${LINKIS_DEPLOY_CONF_DIR}"}
-        LINKIS_CLIENT_CONF_FILE=${LINKIS_CLIENT_CONF_FILE:-"linkis-cli.properties"}
+
+        LINKIS_DEPLOY_SUB_DIR='/linkis-computation-governance/linkis-client/linkis-cli/'
+        LINKIS_CLIENT_LOG_DIR="/appcom/logs/linkis-cli"
+        if [ ! -d $LINKIS_CLIENT_LOG_DIR ];then
+            LINKIS_CLIENT_LOG_DIR="${WORK_DIR}/logs"
+        fi
+        LINKIS_CLIENT_CONF_DIR="/appcom/config/linkisCli-config"
+        if [ ! -d $LINKIS_CLIENT_CONF_DIR ];then
+            LINKIS_CLIENT_CONF_DIR="${WORK_DIR}/conf/${LINKIS_DEPLOY_SUB_DIR}"
+        fi
+        LINKIS_CLIENT_CONF_FILES=${LINKIS_CLIENT_CONF_FILES:-"linkis-cli.properties"}
+
+        LINKIS_CLIENT_CLASSPATH="${LINKIS_CLIENT_CONF_DIR}:${WORK_DIR}/lib/${LINKIS_DEPLOY_SUB_DIR}*"
 
         LINKIS_CLIENT_HEAP_OPTS="-server -Xms32m -Xmx2048m -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=${LINKIS_CLIENT_LOG_DIR} -XX:ErrorFile=${LINKIS_CLIENT_LOG_DIR}/ps_err_pid%p.log"
         LINKIS_CLIENT_GC_OPTS="-XX:+UseConcMarkSweepGC -XX:CMSInitiatingOccupancyFraction=80 -XX:+DisableExplicitGC"
@@ -105,13 +91,17 @@ function call_linkis_client() {
         #DEBUG_OPTS="-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=5005"
         LINKIS_CLIENT_OPTS=${LINKIS_CLIENT_OPTS:-" ${DEBUG_OPTS} "}
 
-
-        echo  "=====Java Start Command====="
-        echo "exec ${JAVA} ${LINKIS_CLIENT_HEAP_OPTS} ${LINKIS_CLIENT_GC_OPTS} ${LINKIS_CLIENT_OPTS} -classpath ${LINKIS_CLIENT_CLASSPATH} -Dconf.root=${LINKIS_CLIENT_CONF_DIR} -Dconf.file=${LINKIS_CLIENT_CONF_FILE} ${LINKIS_CLIENT_LOG_OPTS}  ${LINKIS_CLIENT} '${input_args[@]}'"
-
-        exec ${JAVA} ${LINKIS_CLIENT_HEAP_OPTS} ${LINKIS_CLIENT_GC_OPTS} ${LINKIS_CLIENT_OPTS} -classpath ${LINKIS_CLIENT_CLASSPATH} -Dconf.root=${LINKIS_CLIENT_CONF_DIR} -Dconf.file=${LINKIS_CLIENT_CONF_FILE} ${LINKIS_CLIENT_LOG_OPTS}  ${LINKIS_CLIENT} "${input_args[@]}"
+        exec ${JAVA} ${LINKIS_CLIENT_HEAP_OPTS} ${LINKIS_CLIENT_GC_OPTS} ${LINKIS_CLIENT_OPTS} -classpath ${LINKIS_CLIENT_CLASSPATH} -Dconf.root=${LINKIS_CLIENT_CONF_DIR} ${LINKIS_CLIENT_LOG_OPTS}  ${LINKIS_CLIENT} "${ARG[@]}"
 }
 
+if [ -z $LINKIS_HOME ]; then
+  current_dir=`pwd`
+  work_dir=`dirname "$0"`/../
+  export WORK_DIR=`cd ${work_dir};pwd`
+  cd ${current_dir}/
+else
+  export WORK_DIR=$LINKIS_HOME
+fi
 
 locate_java_home
 JAVA=${JAVA_HOME}/bin/java
diff --git a/assembly-combined-package/assembly-combined/bin/linkis-cli-hive b/assembly-combined-package/assembly-combined/bin/linkis-cli-hive
index 5ea76ce..9bfc416 100644
--- a/assembly-combined-package/assembly-combined/bin/linkis-cli-hive
+++ b/assembly-combined-package/assembly-combined/bin/linkis-cli-hive
@@ -25,7 +25,7 @@ declare -A ord_opts=(\
         # ['--silent']=true \ #not supported
         # ['-v']=true \ #not supported
         # ['--verbose']=true \ #not supported
-        ['--hiveconf']='-confMap' \
+        # ['--hiveconf']='-confMap' \
         ['-d']='-varMap' \
         ['--hivevar']='-varMap' \
         #linkis opts
@@ -37,17 +37,14 @@ declare -A ord_opts=(\
 declare -A confMap_opts=(\
         #cli options
         # ['--database']='k_db' \
-)
-
-#options that should be put in varMap(for variable substitution)
-declare -A kv_var_opts=(\
-        ['-d']=true \
-        ['--hivevar']=true \
+        ['-v']='wds.linkis.hive.verbose' \
+        ['--verbose']='wds.linkis.hive.verbose' \
 )
 
 #k-v pairs style options
-declare -A kv_conf_opts=(\
-        ['--hiveconf']=true \
+declare -A confMap_kv_opts=(\
+        #cli options
+        ['--hiveconf']='-confMap' \
 )
 
 # for help
@@ -68,6 +65,8 @@ declare -A help_msg=(\
         ['--status']="--status" \
 )
 
+# LINKIS_HIVECONF_PREFIX='linkis.hiveconf.'
+
 function print_help() {
     printf "Usage:\n"
     for key in $(echo ${!help_msg[*]})
@@ -83,9 +82,9 @@ function print_help() {
                 else
                     printf " %-30s%-30s\n" "" "${msg: i}"
                 fi
-                
+
             done
-            
+
         fi
     done
 }
@@ -100,10 +99,6 @@ do
 done
 NUM_ARGS=$i
 
-CONFMAP_OPTS=""
-VARMAP_OPTS=""
-LABELMAP_OPTS=""
-REMAINS_STR=""
 declare -a PARSED_CMD
 j=0
 
@@ -123,28 +118,23 @@ function parse() {
                         PARSED_CMD[$j]=$lks_opt
                         PARSED_CMD[$j+1]=$val
                         ((j=j+2))
-                elif [ -n "${kv_conf_opts[${arg}]}" ]; then
-                        kv_str=$val
-                        if [ -n "${CONFMAP_OPT}" ]; then
-                            CONFMAP_OPT=$CONFMAP_OPT","$kv_str
-                        else
-                            CONFMAP_OPT=$kv_str
-                        fi
-                elif [ -n "${kv_var_opts[${arg}]}" ]; then
+                elif [ -n "${confMap_kv_opts[${arg}]}" ]; then
+                        key=${confMap_kv_opts[${arg}]}
+                        # if [ "${arg}"x == "--hiveconf"x ]; then
+                        #     kv_str=$LINKIS_HIVECONF_PREFIX$val
+                        # else
+                        #     kv_str=$val
+                        # fi
                         kv_str=$val
-                        if [ -n "${VARMAP_OPTS}" ]; then
-                            VARMAP_OPTS=$VARMAP_OPTS","$kv_str
-                        else
-                            VARMAP_OPTS=$kv_str
-                        fi
+                        PARSED_CMD[$j]='-confMap'
+                        PARSED_CMD[$j+1]=$kv_str
+                        ((j=j+2))
                 elif [ -n "${confMap_opts[${arg}]}" ]; then
                         key=${confMap_opts[${arg}]}
                         kv_str=$key"="$val
-                        if [ -n "${CONFMAP_OPT}" ]; then
-                            CONFMAP_OPT=$CONFMAP_OPT","$kv_str
-                        else
-                            CONFMAP_OPT=$kv_str   
-                        fi
+                        PARSED_CMD[$j]='-confMap'
+                        PARSED_CMD[$j+1]=$kv_str
+                        ((j=j+2))
                 else
                         PARSED_CMD[$j]=$arg
                         PARSED_CMD[$j+1]=$val
@@ -156,31 +146,21 @@ function parse() {
                 ((j++))
         fi
     done
-    if [ -n "${CONFMAP_OPT}" ]; then
-        PARSED_CMD[$j]="-confMap"
-        PARSED_CMD[$j+1]=$CONFMAP_OPT
-        ((j=j+2))
-    fi
-    if [ -n "${VARMAP_OPTS}" ]; then
-        PARSED_CMD[$j]="-varMap"
-        PARSED_CMD[$j+1]=$VARMAP_OPTS
-        ((j=j+2))
-    fi
 }
 
 
 current_dir=`pwd`
-if [ -n $WORK_DIR ]; then
-    work_dir=`dirname "$0"`/../
-    export WORK_DIR=`cd ${work_dir};pwd`
-fi
+work_dir=`dirname "$0"`/../
+export WORK_DIR=`cd ${work_dir};pwd`
 cd ${current_dir}/
+
 if (( NUM_ARGS == 0 )); then
     PARSED_CMD[$j]='--help'
     print_help
 else
     parse
 fi
-exec ${WORK_DIR}/bin/linkis-cli -engineType hive-1.2.1 -codeType hql  "${PARSED_CMD[@]}"
+
+exec ${WORK_DIR}/bin/linkis-cli-pre -engineType hive-2.3.3 -codeType hql  "${PARSED_CMD[@]}"
 
 
diff --git a/assembly-combined-package/assembly-combined/bin/linkis-cli-pre b/assembly-combined-package/assembly-combined/bin/linkis-cli-pre
new file mode 100755
index 0000000..647b919
--- /dev/null
+++ b/assembly-combined-package/assembly-combined/bin/linkis-cli-pre
@@ -0,0 +1,119 @@
+#!/bin/bash
+#
+# Copyright 2019 WeBank
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+#  you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+export LANG=en_US.utf-8
+# set -x
+
+declare -A map_opts=(\
+    ['-confMap']="true" \
+    ['-runtimeMap']="true" \
+    ['-varMap']="true" \
+    ['-labelMap']="true" \
+    ['-execMap']="true" \
+    ['-sourceMap']="true" \
+    ['-jobContentMap']="true" \
+)
+
+declare -A non_map_opts=(\
+    ['--gatewayUrl']="1st" \
+    ['--authStg']="1st" \
+    ['--authKey']="1st" \
+    ['--authVal']="1st" \
+    ['--userConf']="1st" \
+    ['--kill']="1st" \
+    ['--status']="1st" \
+    ['--help']="1st" \
+    ['--mode']="1st" \
+    ['-engineType']="1st" \
+    ['-codeType']="1st" \
+    ['-code']="1st" \
+    ['-codePath']="1st" \
+    ['-scriptPath']="1st" \
+    ['-submitUser']="1st" \
+    ['-proxyUser']="1st" \
+    ['-creator']="1st" \
+    ['-outPath']="1st" \
+)
+
+declare -a ARGS
+declare -a PARSED_CMD
+i=0
+for arg in "$@"
+do
+    ARGS[i]=${arg}
+    ((i++))
+done
+NUM_ARGS=$i
+
+function preparse() {
+    ((j=0))
+    for((i=0;i<NUM_ARGS;i++));
+    do
+    if [ $((${i}+1)) -lt ${NUM_ARGS} ]; then
+        opt=${ARGS[$i]}
+        val=${ARGS[$i+1]}
+        if [ -n "${non_map_opts[$opt]}" ]; then
+            if [ "${non_map_opts[$opt]}"x == "1st"x  ]; then
+                non_map_opts[$opt]=$((j+1))
+                PARSED_CMD[$j]=$opt
+                PARSED_CMD[$j+1]=$val
+                ((j=j+2))
+                ((i++))
+            elif [ $opt == "--help" ];then
+                PARSED_CMD[$j]=$opt
+                ((j++))
+            else
+                idx=non_map_opts[$opt]
+                PARSED_CMD[$idx]=$val
+                ((i++))
+            fi
+        elif [ -n "${map_opts[$opt]}" ]; then
+            PARSED_CMD[$j]=$opt
+            PARSED_CMD[$j+1]=$val
+            ((j=j+2))
+            ((i++))
+        else
+            for((k=i;k<NUM_ARGS;k++));
+            do
+                PARSED_CMD[$j]=${ARGS[$k]}
+                ((i++))
+                ((j++))
+            done
+        fi
+    else
+        PARSED_CMD[$j]=${ARGS[$i]}
+        ((j++))
+    fi
+    done
+}
+
+
+current_dir=`pwd`
+work_dir=`dirname "$0"`/../
+export WORK_DIR=`cd ${work_dir};pwd`
+cd ${current_dir}/
+
+if (( NUM_ARGS == 0 )); then
+    PARSED_CMD[$j]='--help'
+else
+    preparse
+fi
+
+exec ${WORK_DIR}/bin/linkis-cli "${PARSED_CMD[@]}"
+
+
+
diff --git a/assembly-combined-package/assembly-combined/bin/linkis-cli-spark-sql b/assembly-combined-package/assembly-combined/bin/linkis-cli-spark-sql
index c1600b5..4694dc4 100644
--- a/assembly-combined-package/assembly-combined/bin/linkis-cli-spark-sql
+++ b/assembly-combined-package/assembly-combined/bin/linkis-cli-spark-sql
@@ -25,7 +25,7 @@ declare -A ord_opts=(\
         # ['--silent']=true \ #not supported
         # ['-v']=true \ #not supported
         # ['--verbose']=true \ #not supported
-        ['--hiveconf']='-confMap' \
+        # ['--hiveconf']='-confMap' \ # treated in another way
         ['-d']='-varMap' \
         ['--hivevar']='-varMap' \
         #linkis opts
@@ -36,6 +36,14 @@ declare -A ord_opts=(\
 declare -A confMap_opts=(\
         #cli options
         # ['--database']='k_db' \
+        ['-v']='wds.linkis.hive.verbose' \
+        ['--verbose']='wds.linkis.hive.verbose' \
+)
+
+#options that should be put in confMap(startupMap) but in the form of kv-pairs
+declare -A confMap_kv_opts=(\
+        #cli options
+        ['--hiveconf']='-confMap' \
 )
 
 # for help
@@ -54,6 +62,8 @@ declare -A help_msg=(\
         ['--hivevar']="Variable subsitution to apply to hive commands. e.g. --hivevar A=B" \
 )
 
+SPARK_HADOOP_PREFIX='spark.hadoop.'
+
 function print_help() {
     printf "Usage:\n"
     for key in $(echo ${!help_msg[*]})
@@ -69,9 +79,9 @@ function print_help() {
                 else
                     printf " %-30s%-30s\n" "" "${msg: i}"
                 fi
-                
+
             done
-            
+
         fi
     done
 }
@@ -85,10 +95,6 @@ do
 done
 NUM_ARGS=$i
 
-CONFMAP_OPTS=""
-VARMAP_OPTS=""
-LABELMAP_OPTS=""
-REMAINS_STR=""
 declare -a PARSED_CMD
 j=0
 
@@ -108,6 +114,16 @@ function parse() {
                         PARSED_CMD[$j]=$lks_opt
                         PARSED_CMD[$j+1]=$val
                         ((j=j+2))
+                elif [ -n "${confMap_kv_opts[${arg}]}" ]; then
+                        key=${confMap_kv_opts[${arg}]}
+                        if [ "${arg}"x == "--hiveconf"x ]; then
+                            kv_str=$SPARK_HADOOP_PREFIX$val
+                        else
+                            kv_str=$val
+                        fi
+                        PARSED_CMD[$j]='-confMap'
+                        PARSED_CMD[$j+1]=$kv_str
+                        ((j=j+2))
                 elif [ -n "${confMap_opts[${arg}]}" ]; then
                         key=${confMap_opts[${arg}]}
                         kv_str=$key"="$val
@@ -128,15 +144,15 @@ function parse() {
 }
 
 current_dir=`pwd`
-if [ -n $WORK_DIR ]; then
-    work_dir=`dirname "$0"`/../
-    export WORK_DIR=`cd ${work_dir};pwd`
-fi
+work_dir=`dirname "$0"`/../
+export WORK_DIR=`cd ${work_dir};pwd`
 cd ${current_dir}/
+
 if (( NUM_ARGS == 0 )); then
     PARSED_CMD[$j]='--help'
     print_help
 else
     parse
 fi
-exec ${WORK_DIR}/bin/linkis-cli-spark-submit "${PARSED_CMD[@]}"
+
+exec ${WORK_DIR}/bin/linkis-cli-spark-submit  -codeType sql "${PARSED_CMD[@]}"
diff --git a/assembly-combined-package/assembly-combined/bin/linkis-cli-spark-submit b/assembly-combined-package/assembly-combined/bin/linkis-cli-spark-submit
index 5742566..5831890 100644
--- a/assembly-combined-package/assembly-combined/bin/linkis-cli-spark-submit
+++ b/assembly-combined-package/assembly-combined/bin/linkis-cli-spark-submit
@@ -14,8 +14,8 @@
 # limitations under the License.
 #
 
-# set -x
 export LANG=en_US.utf-8
+#set -x
 #ordinary options
 declare -A ord_opts=(\
         # ['--master']=true \ #not supported
@@ -39,10 +39,10 @@ declare -A ord_opts=(\
 #options that should be put in confMap(startupMap)
 declare -A confMap_opts=(\
         ['--name']='appName' \
-        ['--jars']='jars' \
-        ['--py-files']='spark.yarn.dist.files' \
+        ['--jars']='spark.jars' \
+        ['--py-files']='spark.submit.pyFiles' \
         ['--driver-memory']='spark.driver.memory' \
-        ['--driver-java-options']='spark.driver.extraJavaOptions' \
+        # ['--driver-java-options']='spark.driver.extraJavaOptions' \
         # ['--driver-library-path']='k3' \
         ['--driver-class-path']='wds.linkis.spark.driver.extra.class.path' \
         ['--executor-memory']='spark.executor.memory' \
@@ -57,6 +57,7 @@ declare -A confMap_opts=(\
         # ['--archives']='k10' \
         # ['--principal']=true \ #not supported
         # ['--keytab']=true \ #not supported
+        ['--verbose']='wds.linkis.spark.verbose' \
 )
 
 #options that should be put in varMap(for variable substitution)
@@ -85,8 +86,8 @@ declare -A help_msg=(\
         ['--kill']="If given, kills the linkis job specified" \
         ['--status']="If given, requests the status of the linkis job" \
         ['--driver-memory']="Memory for driver, unit: G (e.g. 2) " \
-        ['--driver-java-options']="Extra Java options to pass to the driver." \
-        #['--driver-library-path']="Extra library path entries to pass to the driver." \
+        # ['--driver-java-options']="Extra Java options to pass to the driver." \
+        # ['--driver-library-path']="Extra library path entries to pass to the driver." \
         ['--driver-class-path']="Extra class path entries to pass to the driver." \
         ['--conf']="Arbitrary Spark configuration property"
         ['--executor-memory']="Memory per executor, unit: G (e.g. 2)" \
@@ -135,17 +136,18 @@ do
 done
 NUM_ARGS=$i
 
-CONFMAP_OPTS=""
-VARMAP_OPTS=""
-LABELMAP_OPTS=""
-REMAINS_STR=""
 declare -a PARSED_CMD
 j=0
+IS_PYSPARK="false"
+IS_SCALA="false"
 
 function parse() {
     for((i=0;i<NUM_ARGS;i++));
     do
         arg=${ARGS[${i}]}
+        if [[ "$arg" =~ .*\.py$ ]] || [[ "$arg" =~ .*\.py3$ ]]; then
+            IS_PYSPARK="true"
+        fi
         if [ -n "${help_opts[${arg}]}" ]; then
             print_help
             PARSED_CMD[$j]=${help_opts[${arg}]}
@@ -178,15 +180,21 @@ function parse() {
 }
 
 current_dir=`pwd`
-if [ -n $WORK_DIR ]; then
-    work_dir=`dirname "$0"`/../
-    export WORK_DIR=`cd ${work_dir};pwd`
-fi
+work_dir=`dirname "$0"`/../
+export WORK_DIR=`cd ${work_dir};pwd`
 cd ${current_dir}/
+
 if (( NUM_ARGS == 0 )); then
     PARSED_CMD[$j]='--help'
     print_help
 else
     parse
 fi
-exec ${WORK_DIR}/bin/linkis-cli -engineType spark-2.4.3 -codeType sql "${PARSED_CMD[@]}"
\ No newline at end of file
+
+if [ "$IS_PYSPARK"x == "true"x ]; then
+    exec ${WORK_DIR}/bin/linkis-cli-pre -engineType spark-2.4.3 -codeType py "${PARSED_CMD[@]}"
+elif [ "IS_SCALA"x == "true"x ]; then
+    exec ${WORK_DIR}/bin/linkis-cli-pre -engineType spark-2.4.3 -codeType scala "${PARSED_CMD[@]}"
+else
+    exec ${WORK_DIR}/bin/linkis-cli-pre -engineType spark-2.4.3 "${PARSED_CMD[@]}"
+fi
\ No newline at end of file
diff --git a/assembly-combined-package/assembly-combined/bin/linkis-cli-sqoop b/assembly-combined-package/assembly-combined/bin/linkis-cli-sqoop
new file mode 100755
index 0000000..27fca1a
--- /dev/null
+++ b/assembly-combined-package/assembly-combined/bin/linkis-cli-sqoop
@@ -0,0 +1,433 @@
+#!/bin/bash
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+# http://www.apache.org/licenses/LICENSE-2.0
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# import
+declare -A sqoop_mode_opts_key_only=(\
+    # linkis-arguements
+    ['import']='import' \
+    ['export']='export' \
+)
+SQOOP_MODE="sqoop.mode"
+
+# -D k=v
+declare -A jvm_opts_kv=(\
+    ['-D']='sqoop.env' \
+)
+SQOOP_ENV="sqoop.env"
+
+# --query,-e 'xxxx'
+declare -A job_content_query_opts_kv=(\
+    ['-e']='sqoop.args.query' \
+    ['--query']='sqoop.args.query' \
+)
+# --option value
+declare -A job_content_opts_kv=(\
+# linkis-arguements
+    ['--datasource-name']='sqoop.args.datasource.name' \
+# common-arguements
+    ['--connect']='sqoop.args.connect' \
+    ['--connection-manager']='sqoop.args.connection.manager' \
+    ['--connection-param-file']='sqoop.args.connection.param.file' \
+    ['--driver']='sqoop.args.driver' \
+    ['--hadoop-home']='sqoop.args.hadoop.home' \
+    ['--hadoop-mapred-home']='sqoop.args.hadoop.mapred.home' \
+    # ['--help']='sqoop.args.help' \
+    ['--password']='sqoop.args.password' \
+    # ['--password-alias']='sqoop.args.password.alias' \
+    # ['--password-file']='sqoop.args.password.file' \
+    ['--username']='sqoop.args.username' \
+# export-control-arguments
+    ['--call']='sqoop.args.call' \
+    ['--columns']='sqoop.args.columns' \
+    ['--export-dir']='sqoop.args.export.dir' \
+    ['-m']='sqoop.args.num.mappers' \
+    ['--num-mappers']='sqoop.args.num.mappers' \
+    ['--mapreduce-job-name']='sqoop.args.mapreduce.job.name' \
+    ['--staging-table']='sqoop.args.staging.table' \
+    ['--table']='sqoop.args.table' \
+    ['--update-key']='sqoop.args.update.key' \
+    ['--update-mode']='sqoop.args.update.mode' \
+    ['--validation-failurehandler']='sqoop.args.validation.failurehandler' \
+    ['--validation-threshold']='sqoop.args.validation.threshold' \
+    ['--validator']='sqoop.args.validator' \
+# import-control-arguments
+    ['--boundary-query']='sqoop.args.boundary.query' \
+    ['--columns']='sqoop.args.columns' \
+    ['--compression-codec']='sqoop.args.compression.codec' \
+    ['--direct-split-size']='sqoop.args.direct.split.size' \
+    ['--fetch-size']='sqoop.args.fetch.size' \
+    ['--inline-lob-limit']='sqoop.args.inline.lob.limit' \
+    ['-m']='sqoop.args.num.mappers' \
+    ['--num-mappers']='sqoop.args.num.mappers' \
+    ['--mapreduce-job-name']='sqoop.args.mapreduce.job.name' \
+    ['--merge-key']='sqoop.args.merge.key' \
+    ['--split-by']='sqoop.args.split.by' \
+    ['--table']='sqoop.args.table' \
+    ['--target-dir']='sqoop.args.target.dir' \
+    ['--validation-failurehandler']='sqoop.args.validation.failurehandler' \
+    ['--validation-threshold']='sqoop.args.validation.threshold' \
+    ['--validator']='sqoop.args.validator' \
+    ['--warehouse-dir']='sqoop.args.warehouse.dir' \
+    ['--where']='sqoop.args.where' \
+# incremental-import-arguments
+    ['--check-column']='sqoop.args.check.column' \
+    ['--incremental']='sqoop.args.incremental' \
+    ['--last-value']='sqoop.args.last.value' \
+# output-line-formatting-arguments
+    ['--enclosed-by']='sqoop.args.enclosed.by' \
+    ['--escaped-by']='sqoop.args.escaped.by' \
+    ['--fields-terminated-by']='sqoop.args.fields.terminated.by' \
+    ['--lines-terminated-by']='sqoop.args.lines.terminated.by' \
+    ['--optionally-enclosed-by']='sqoop.args.optionally.enclosed.by' \
+# input-parsing-arguments
+    ['--input-enclosed-by']='sqoop.args.input.enclosed.by' \
+    ['--input-escaped-by']='sqoop.args.input.escaped.by' \
+    ['--input-fields-terminated-by']='sqoop.args.input.fields.terminated.by' \
+    ['--input-lines-terminated-by']='sqoop.args.input.lines.terminated.by' \
+    ['--input-optionally-enclosed-by']='sqoop.args.input.optionally.enclosed.by' \
+# hive arguments:
+    ['--hive-database']='sqoop.args.hive.database' \
+    ['--hive-delims-replacement']='sqoop.args.hive.delims.replacement' \
+    ['--hive-home']='sqoop.args.hive.home' \
+    ['--hive-partition-key']='sqoop.args.hive.partition.key' \
+    ['--hive-partition-value']='sqoop.args.hive.partition.value' \
+    ['--hive-table']='sqoop.args.hive.table' \
+    ['--map-column-hive']='sqoop.args.map.column.hive' \
+# hBase-arguments
+    ['--column-family']='sqoop.args.column.family' \
+    ['--hbase-row-key']='sqoop.args.hbase.row.key' \
+    ['--hbase-table']='sqoop.args.hbase.table' \
+# HCatalog arguments:
+    ['--hcatalog-database']='sqoop.args.hcatalog.database' \
+    ['--hcatalog-home']='sqoop.args.hcatalog.home' \
+    ['--hcatalog-partition-keys']='sqoop.args.hcatalog.partition.keys' \
+    ['--hcatalog-partition-values']='sqoop.args.hcatalog.partition.values' \
+    ['--hcatalog-table']='sqoop.args.hcatalog.table' \
+    ['--hive-home']='sqoop.args.hive.home' \
+    ['--hive-partition-key']='sqoop.args.hive.partition.key' \
+    ['--hive-partition-value']='sqoop.args.hive.partition.value' \
+    ['--map-column-hive']='sqoop.args.map.column.hive' \
+    ['--hcatalog-storage-stanza']='sqoop.args.hcatalog.storage.stanza' \
+# Accumulo arguments:
+    ['--accumulo-batch-size']='sqoop.args.accumulo.batch.size' \
+    ['--accumulo-column-family']='sqoop.args.accumulo.column.family' \
+    ['--accumulo-instance']='sqoop.args.accumulo.instance' \
+    ['--accumulo-max-latency']='sqoop.args.accumulo.max.latency' \
+    ['--accumulo-password']='sqoop.args.accumulo.password' \
+    ['--accumulo-row-key	sqoop.args.accumulo.row.key' \
+    ['--accumulo-table']='']='qoop.args.accumulo.table' \
+    ['--accumulo-user']='sqoop.args.accumulo.user' \
+    ['--accumulo-visibility']='sqoop.args.accumulo.visibility' \
+    ['--accumulo-zookeepers']='sqoop.args.accumulo.zookeepers' \
+# Code-generation-arguments
+    ['--bindir']='sqoop.args.bindir' \
+    ['--class-name']='sqoop.args.class.name' \
+    ['--input-null-non-string']='sqoop.args.input.null.non.string' \
+    ['--input-null-string']='sqoop.args.input.null.string' \
+    ['--jar-file']='sqoop.args.jar.file' \
+    ['--map-column-java']='sqoop.args.map.column.java' \
+    ['--null-non-string']='sqoop.args.null.non.string' \
+    ['--null-string']='sqoop.args.null.string' \
+    ['--outdir']='sqoop.args.outdir' \
+    ['--package-name']='sqoop.args.package.name' \
+# Generic Hadoop command-line arguments
+
+)
+
+# --option
+declare -A job_content_opts_key_only=(\
+# common-arguements
+    ['--relaxed-isolation']='sqoop.args.relaxed.isolation' \
+    ['--skip-dist-cache']='sqoop.args.skip.dist.cache' \
+    ['--verbose']='sqoop.args.verbose' \
+# export-control-arguments
+    ['--batch']='sqoop.args.batch' \
+    ['--clear-staging-table']='sqoop.args.clear.staging.table' \
+    ['--direct']='sqoop.args.direct' \
+    ['--validate']='sqoop.args.validate' \
+# import-control-arguments
+    ['--append']='sqoop.args.append' \
+    ['--as-avrodatafile']='sqoop.args.as.avrodatafile' \
+    ['--as-parquetfile']='sqoop.args.as.parquetfile' \
+    ['--as-sequencefile']='sqoop.args.as.sequencefile' \
+    ['--as-textfile']='sqoop.args.as.textfile' \
+    ['--autoreset-to-one-mapper']='sqoop.args.autoreset.to.one.mapper' \
+    ['--case-insensitive']='sqoop.args.case.insensitive' \
+    ['--delete-target-dir']='sqoop.args.delete.target.dir' \
+    ['--direct']='sqoop.args.direct' \
+    ['--validate']='sqoop.args.validate' \
+    ['-z']='sqoop.args.compress' \
+    ['--compress']='sqoop.args.compress' \
+# incremental-import-arguments
+#   none
+# output line formatting arguments
+    ['--mysql-delimiters']='sqoop.args.mysql.delimiters' \
+# input-parsing-arguments
+#   none
+# hive arguments:
+    ['--create-hive-table']='sqoop.args.create.hive.table' \
+    ['--hive-drop-import-delims']='sqoop.args.hive.drop.import.delims' \
+    ['--hive-import']='sqoop.args.hive.import' \
+    ['--hive-overwrite']='sqoop.args.hive.overwrite' \
+# hBase arguments
+    ['--hbase-bulkload']='sqoop.args.hbase.bulkload' \
+    ['--hbase-create-table']='sqoop.args.hbase.create.table' \
+# HCatalog arguments:
+    ['--create-hcatalog-table']='sqoop.args.create.hcatalog.table' \
+# Accumulo arguments:
+    ['--accumulo-create-table']='sqoop.args.accumulo.create.table' \
+# Generic Hadoop command-line arguments
+    ['-conf']='sqoop.args.conf' \
+    ['-fs']='sqoop.args.fs' \
+    ['-jt']='sqoop.args.jt' \
+    ['-files']='sqoop.args.files' \
+    ['-libjars']='sqoop.args.libjars' \
+    ['-archives']='sqoop.args.archives' \
+)
+
+declare -A help=(\
+    ['-h']='--help'
+    ['--help']='--help'
+)
+
+declare -A help_msg=(\
+    ['Common_arguments']='--connect <jdbc-uri>
+        --connection-manager <class-name>
+        --connection-param-file <properties-file>
+        --driver <class-name>
+        --hadoop-home <hdir>
+        --hadoop-mapred-home <dir>
+        --help
+        --password <password>
+        --password-alias <password-alias>
+        --password-file <password-file>
+        --relaxed-isolation
+        --skip-dist-cache
+        --username <username>
+        --verbose' \
+    ['Import_control_arguments:']='--append
+        --as-avrodatafile
+        --as-parquetfile
+        --as-sequencefile
+        --as-textfile
+        --autoreset-to-one-mapper
+        --boundary-query <statement>
+        --case-insensitive
+        --columns <col,col,col...>
+        --compression-codec <codec>
+        --delete-target-dir
+        --direct
+        --direct-split-size <n>
+        -e,--query <statement>
+        --fetch-size <n>
+        --inline-lob-limit <n>
+        -m,--num-mappers <n>
+        --mapreduce-job-name <name>
+        --split-by <column-name>
+        --table <table-name>
+        --target-dir <dir>
+        --validate
+        --validation-failurehandler <validation-failurehandler>
+        --validation-threshold <validation-threshold>
+        --validator <validator>
+        --warehouse-dir <dir>
+        --where <where clause>
+        -z,--compress' \
+    ['Incremental_import_arguments']='--check-column <column>
+        --incremental <import-type>
+        --last-value <value>' \
+    ['Output_line_formatting_arguments']='--enclosed-by <char>
+        --escaped-by <char>
+        --fields-terminated-by <char>
+        --lines-terminated-by <char>
+        --mysql-delimiters' \
+    ['Input_parsing_arguments']='--input-enclosed-by <char>
+        --input-escaped-by <char>
+        --input-fields-terminated-by <char>
+        --input-lines-terminated-by <char>
+        --input-optionally-enclosed-by <char>' \
+    ['Hive_arguments']='--create-hive-table
+        --hive-database <database-name>
+        --hive-delims-replacement <arg>
+        --hive-drop-import-delims
+        --hive-home <dir>
+        --hive-import
+        --hive-overwrite
+        --hive-partition-key <partition-key>
+        --hive-partition-value <partition-value>
+        --hive-table <table-name>
+        --map-column-hive <arg>' \
+    ['HBase_arguments']='--column-family <family>
+        --hbase-bulkload
+        --hbase-create-table
+        --hbase-row-key <col>
+        --hbase-table <table>' \
+    ['HCatalog_arguments']='--hcatalog-database <arg>
+        --hcatalog-home <hdir>
+        --hcatalog-partition-keys <partition-key>
+        --hcatalog-partition-values <partition-value>
+        --hcatalog-table <arg>
+        --hive-home <dir>
+        --hive-partition-key <partition-key>
+        --hive-partition-value <partition-value>
+        --map-column-hive <arg>
+        --create-hcatalog-table
+        --hcatalog-storage-stanza <arg>' \
+    ['Accumulo_arguments']='--accumulo-batch-size <size>
+        --accumulo-column-family <family>
+        --accumulo-create-table
+        --accumulo-instance <instance>
+        --accumulo-max-latency <latency>
+        --accumulo-password <password>
+        --accumulo-row-key <col>
+        --accumulo-table <table>
+        --accumulo-user <user>
+        --accumulo-visibility <vis>
+        --accumulo-zookeepers <zookeepers>' \
+    ['Code_generation_arguments']='--bindir <dir>
+        --class-name <name>
+        --input-null-non-string <null-str>
+        --input-null-string <null-str>
+        --jar-file <file>
+        --map-column-java <arg>
+        --null-non-string <null-str>
+        --null-string <null-str>
+        --outdir <dir>
+        --package-name <name>' \
+    ['Generic_Hadoop_command-line_arguments']='-conf <configuration file>
+        -D <property=value>
+        -fs <local|namenode:port>
+        -jt <local|resourcemanager:port>
+        -files <comma separated list of files>
+        -libjars <comma separated list of jars>
+        -archives <comma separated list of archives>'
+
+)
+
+i=0
+for arg in "$@"
+do
+    ARGS[i]=${arg}
+    ((i++))
+done
+
+NUM_ARGS=${#ARGS[@]}
+
+declare -a PARSED_CMD
+
+function print_help() {
+    printf "Usage:\n"
+    for key in $(echo ${!help_msg[*]})
+    do
+        if [ -n "${help_msg[${key}]}" ]; then
+            msg=${help_msg[${key}]}
+            len=${#msg}
+            printf " %-30s%-30s\n" "$key" "${msg: 0:50}"
+            for ((i=50;i<len;i+=50))
+            do
+                if (( i+50<len )); then
+                    printf " %-30s%-30s\n" "" "${msg: i:50}"
+                else
+                    printf " %-30s%-30s\n" "" "${msg: i}"
+                fi
+
+            done
+
+        fi
+    done
+}
+
+j=0
+function parse() {
+    for((i=0;i<NUM_ARGS;i++));
+    do
+        arg=${ARGS[$i]}
+        if [ -n "${help[${arg}]}" ];then
+            print_help
+            PARSED_CMD[$j]=${help_opts[${arg}]}
+            break
+        fi
+        if [ -n "${sqoop_mode_opts_key_only[$arg]}" ];then
+            key="-jobContentMap"
+            kv_str=$SQOOP_MODE"="${sqoop_mode_opts_key_only[$arg]}
+            PARSED_CMD[$j]=$key
+            PARSED_CMD[$j+1]=$kv_str
+            ((j=j+2))
+        elif [ -n "${jvm_opts_kv[$arg]}" ];then
+        #-D key=value
+            if [ $((${i}+1)) -lt ${NUM_ARGS} ]; then
+                key="-jobContentMap"
+                kv_str=$SQOOP_ENV"."${ARGS[$i+1]}
+                PARSED_CMD[$j]=$key
+                PARSED_CMD[$j+1]=$kv_str
+                ((i=i+1))
+                ((j=j+2))
+            else
+                PARSED_CMD[$j]=$arg
+                ((j++))
+            fi
+        elif [[ $arg =~ -D[^\s]+ ]];then
+        #-Dkey=value
+            key="-jobContentMap"
+            kv_str=$SQOOP_ENV"."${arg:2}
+            PARSED_CMD[$j]=$key
+            PARSED_CMD[$j+1]=$kv_str
+            ((j=j+2))
+        elif [ -n "${job_content_opts_kv[$arg]}" ]; then
+            if [ $((${i}+1)) -lt ${NUM_ARGS} ]; then
+                key="-jobContentMap"
+                kv_str=${job_content_opts_kv[$arg]}"="${ARGS[$i+1]}
+                PARSED_CMD[$j]=$key
+                PARSED_CMD[$j+1]=$kv_str
+                ((i=i+1))
+                ((j=j+2))
+            else
+                PARSED_CMD[$j]=$arg
+                ((j++))
+            fi
+        elif [ -n "${job_content_query_opts_kv[$arg]}" ];then
+        #--query xxxx
+            if [ $((${i}+1)) -lt ${NUM_ARGS} ]; then
+                key="-jobContentMap"
+                val=${ARGS[$i+1]}
+                PARSED_CMD[$j]=$key
+                PARSED_CMD[$j+1]=${job_content_query_opts_kv[$arg]}"="$val
+                ((i=i+1))
+                ((j=j+2))
+            else
+                PARSED_CMD[$j]=$arg
+                ((j++))
+            fi
+        elif [ -n "${job_content_opts_key_only[$arg]}" ];then
+            key="-jobContentMap"
+            kv_str=${job_content_opts_key_only[$arg]}"="
+            PARSED_CMD[$j]=$key
+            PARSED_CMD[$j+1]=$kv_str
+            ((j=j+2))
+        else
+            PARSED_CMD[$j]=$arg
+            ((j++))
+        fi
+    done
+}
+
+current_dir=`pwd`
+work_dir=`dirname "$0"`/../
+export WORK_DIR=`cd ${work_dir};pwd`
+cd ${current_dir}/
+
+parse
+exec ${WORK_DIR}/bin/linkis-cli-pre --mode once -engineType sqoop-1.4.6 -codeType none "${PARSED_CMD[@]}"
\ No newline at end of file
diff --git a/assembly-combined-package/assembly-combined/conf/application-eureka.yml b/assembly-combined-package/assembly-combined/conf/application-eureka.yml
index ddc3ab7..d879708 100644
--- a/assembly-combined-package/assembly-combined/conf/application-eureka.yml
+++ b/assembly-combined-package/assembly-combined/conf/application-eureka.yml
@@ -20,6 +20,9 @@ spring:
 
 server:
   port: 20303
+  enable-self-preservation: false
+  eviction-interval-timer-in-ms: 3000
+  response-cache-update-interval-ms: 2000
 eureka:
   instance:
     hostname:
@@ -29,9 +32,3 @@ eureka:
 #    fetch-registry: false
     serviceUrl:
       defaultZone: http://127.0.0.1:20303/eureka/
-#  server:
-#    enableSelfPreservation: false
-    enable-self-preservation: false
-    eviction-interval-timer-in-ms: 3000
-  server:
-    response-cache-update-interval-ms: 2000
diff --git a/assembly-combined-package/assembly-combined/conf/linkis-cg-engineconnmanager.properties b/assembly-combined-package/assembly-combined/conf/linkis-cg-engineconnmanager.properties
index 8189cc8..639f467 100644
--- a/assembly-combined-package/assembly-combined/conf/linkis-cg-engineconnmanager.properties
+++ b/assembly-combined-package/assembly-combined/conf/linkis-cg-engineconnmanager.properties
@@ -16,7 +16,17 @@
 #
 ##restful
 wds.linkis.server.restful.scan.packages=org.apache.linkis.em.restful
-wds.linkis.engineconn.root.dir=/appcom/tmp
+wds.linkis.engineconn.root.dir=/data/bdp/linkis
+#wds.linkis.ecm.engineconn.create.duration=600000
+
+#wds.linkis.ecm.health.report.period=30
+
+#wds.linkis.ecm.cores.max=24
+
+#wds.linkis.ecm.memory.max=64424509440
+
+#wds.linkis.ecm.engineconn.instances.max=24
+wds.linkis.ecm.protected.load.enabled=false
 ##Spring
 spring.server.port=9102
 ##set engine environment in econn start script, such as SPARK3_HOME,the value of env will read from ecm host by key.
diff --git a/assembly-combined-package/assembly-combined/conf/linkis-cg-engineplugin.properties b/assembly-combined-package/assembly-combined/conf/linkis-cg-engineplugin.properties
index 2919ccf..d25b690 100644
--- a/assembly-combined-package/assembly-combined/conf/linkis-cg-engineplugin.properties
+++ b/assembly-combined-package/assembly-combined/conf/linkis-cg-engineplugin.properties
@@ -24,5 +24,7 @@ wds.linkis.engineConn.plugin.cache.expire-in-seconds=100000
 wds.linkis.engineConn.dist.load.enable=true
 #wds.linkis.engineconn.home=/appcom/Install/LinkisInstall/lib/linkis-engineconn-plugins
 #wds.linkis.engineconn.plugin.loader.store.path=/appcom/Install/LinkisInstall/lib/linkis-engineconn-plugins
+
+wds.linkis.ms.parallelism.consumer.max=200
 ##Spring
 spring.server.port=9103
\ No newline at end of file
diff --git a/assembly-combined-package/assembly-combined/conf/linkis-cg-entrance.properties b/assembly-combined-package/assembly-combined/conf/linkis-cg-entrance.properties
index 2122464..aee36d9 100644
--- a/assembly-combined-package/assembly-combined/conf/linkis-cg-entrance.properties
+++ b/assembly-combined-package/assembly-combined/conf/linkis-cg-entrance.properties
@@ -17,7 +17,19 @@
 ##restful
 wds.linkis.server.restful.scan.packages=org.apache.linkis.entrance.restful
 wds.linkis.server.socket.mode=false
-#wds.linkis.entrance.config.log.path=hdfs:///tmp/linkis/
-wds.linkis.resultSet.store.path=hdfs:///tmp/linkis
+wds.linkis.entrance.config.log.path=hdfs:///appcom/logs/linkis
+#wds.linkis.resultSet.store.path=hdfs:///tmp/linkis
+wds.linkis.orchestrator.computation.operation.builder.class=com.webank.wedatasphere.linkis.orchestrator.operation.TuningOperationBuilder
+
+
+wds.linkis.entrance.shell.danger.check.enabled=false
+#group capacity
+wds.linkis.concurrent.group.factory.capacity=30000
+
+wds.linkis.entrance.max.capacity=30000
+#errorcode
+wds.linkis.errorcode.future.timeout=5000
+
+wds.linkis.ms.parallelism.consumer.max=200
 ##Spring
 spring.server.port=9104
\ No newline at end of file
diff --git a/assembly-combined-package/assembly-combined/conf/linkis-cg-linkismanager.properties b/assembly-combined-package/assembly-combined/conf/linkis-cg-linkismanager.properties
index 5653b8d..3bbb431 100644
--- a/assembly-combined-package/assembly-combined/conf/linkis-cg-linkismanager.properties
+++ b/assembly-combined-package/assembly-combined/conf/linkis-cg-linkismanager.properties
@@ -21,4 +21,9 @@ wds.linkis.server.mybatis.mapperLocations=classpath:org/apache/linkis/manager/da
 wds.linkis.server.mybatis.typeAliasesPackage=
 wds.linkis.server.mybatis.BasePackage=org.apache.linkis.manager.dao,org.apache.linkis.resourcemanager.external.dao
 ##Spring
-spring.server.port=9101
\ No newline at end of file
+spring.server.port=9101
+
+wds.linkis.ms.parallelism.consumer.max=200
+
+#Resource action record for debug
+wds.linkis.manager.rm.resource.action.record=false
diff --git a/assembly-combined-package/assembly-combined/conf/linkis-mg-gateway.properties b/assembly-combined-package/assembly-combined/conf/linkis-mg-gateway.properties
index 0eb4092..625fd48 100644
--- a/assembly-combined-package/assembly-combined/conf/linkis-mg-gateway.properties
+++ b/assembly-combined-package/assembly-combined/conf/linkis-mg-gateway.properties
@@ -32,4 +32,8 @@ wds.linkis.ldap.proxy.userNameFormat=
 wds.linkis.admin.user=hadoop
 #wds.linkis.admin.password=
 ##Spring
-spring.server.port=9001
\ No newline at end of file
+spring.server.port=9001
+
+
+wds.linkis.gateway.conf.publicservice.list=query,application,filesystem,udf,variable,microservice,errorcode
+
diff --git a/assembly-combined-package/assembly-combined/conf/linkis-ps-cs.properties b/assembly-combined-package/assembly-combined/conf/linkis-ps-cs.properties
index 26b9c97..7fc53ea 100644
--- a/assembly-combined-package/assembly-combined/conf/linkis-ps-cs.properties
+++ b/assembly-combined-package/assembly-combined/conf/linkis-ps-cs.properties
@@ -22,5 +22,6 @@ wds.linkis.server.mybatis.typeAliasesPackage=org.apache.linkis.cs.persistence.en
 wds.linkis.server.mybatis.BasePackage=org.apache.linkis.cs.persistence.dao
 ##Spring
 spring.server.port=9108
+wds.linkis.ms.parallelism.consumer.max=200
 # ps-cs prefix must be started with 'cs_'
 spring.eureka.instance.metadata-map.route=cs_1_dev
\ No newline at end of file
diff --git a/assembly-combined-package/assembly-combined/conf/linkis-ps-data-source-manager.properties b/assembly-combined-package/assembly-combined/conf/linkis-ps-data-source-manager.properties
index 0d374b0..d73091f 100644
--- a/assembly-combined-package/assembly-combined/conf/linkis-ps-data-source-manager.properties
+++ b/assembly-combined-package/assembly-combined/conf/linkis-ps-data-source-manager.properties
@@ -30,8 +30,12 @@ hive.meta.user=
 hive.meta.password=
 wds.linkis.metadata.hive.encode.enabled=false
 
+#dsm
+wds.linkis.server.dsm.auth.admin=
+wds.linkis.server.mdm.service.app.name=linkis-ps-metadatamanager
+
 ##Spring
-spring.server.port=9106
+spring.server.port=8196
 spring.spring.main.allow-bean-definition-overriding=true
 spring.spring.jackson.serialization.FAIL_ON_EMPTY_BEANS=false
 spring.jackson.serialization.FAIL_ON_EMPTY_BEANS=false
diff --git a/assembly-combined-package/assembly-combined/conf/linkis-ps-publicservice.properties b/assembly-combined-package/assembly-combined/conf/linkis-ps-publicservice.properties
index f6f7129..5f8ff60 100644
--- a/assembly-combined-package/assembly-combined/conf/linkis-ps-publicservice.properties
+++ b/assembly-combined-package/assembly-combined/conf/linkis-ps-publicservice.properties
@@ -33,3 +33,20 @@ hive.meta.password=
 ##Spring
 spring.server.port=9105
 spring.spring.main.allow-bean-definition-overriding=true
+
+wds.linkis.io.extra.labels={tenant:"ioClient"}
+#jobhistory
+wds.linkis.query.code.store.length=2000
+
+wds.linkis.io.loadbalance.capacity=2
+
+#wds.linkis.workspace.resultset.download.maxsize.csv=300000
+
+#wds.linkis.workspace.resultset.download.maxsize.excel=300000
+
+wds.linkis.ms.parallelism.consumer.max=200
+
+#configuration
+wds.linkis.configuration.use.creator.default.value=false
+
+
diff --git a/assembly-combined-package/assembly-combined/conf/linkis.properties b/assembly-combined-package/assembly-combined/conf/linkis.properties
index 208a261..9956efc 100644
--- a/assembly-combined-package/assembly-combined/conf/linkis.properties
+++ b/assembly-combined-package/assembly-combined/conf/linkis.properties
@@ -35,22 +35,32 @@ wds.linkis.mysql.is.encrypt=false
 
 ##file path
 wds.linkis.filesystem.root.path=file:///tmp/linkis/
-wds.linkis.filesystem.hdfs.root.path=hdfs:///tmp/linkis/
+wds.linkis.filesystem.hdfs.root.path=hdfs:///apps-data
 ##bml path:default use hdfs
 wds.linkis.bml.is.hdfs=true
 wds.linkis.bml.hdfs.prefix=/apps-data
 #wds.linkis.bml.local.prefix=/data/dss/bml
 
 ##engine Version
-#wds.linkis.spark.engine.version=
-#wds.linkis.hive.engine.version=
-#wds.linkis.python.engine.version=
-
+#wds.linkis.spark.engine.version=2.4.3
+#wds.linkis.hive.engine.version=2.3.3
+#wds.linkis.python.engine.version=python2
+#wds.linkis.appconn.engine.version=1
 #LinkisHome
-wds.linkis.home=/appcom/Install/LinkisInstall
+wds.linkis.home=/appcom/Install/linkis
 #Linkis governance station administrators
 wds.linkis.governance.station.admin=hadoop
-wds.linkis.gateway.conf.publicservice.list=query,jobhistory,application,configuration,filesystem,udf,variable,microservice,errorcode,bml,datasource
+#wds.linkis.orchestrator.task.consumer.wait=30
+wds.linkis.server.mybatis.datasource.minIdle=5
+
+wds.linkis.server.mybatis.datasource.maxActive=40
+
+wds.linkis.storage.enable.io.proxy=true
+
+wds.linkis.hadoop.hdfs.cache.enable=true
+
+wds.linkis.orchestrator.execution.task.runner.max.size=200
+
 
 spring.spring.servlet.multipart.max-file-size=500MB
 spring.spring.servlet.multipart.max-request-size=500MB
diff --git a/assembly-combined-package/assembly-combined/conf/log4j2.xml b/assembly-combined-package/assembly-combined/conf/log4j2-console.xml
similarity index 51%
copy from assembly-combined-package/assembly-combined/conf/log4j2.xml
copy to assembly-combined-package/assembly-combined/conf/log4j2-console.xml
index dbb9b10..80641de 100644
--- a/assembly-combined-package/assembly-combined/conf/log4j2.xml
+++ b/assembly-combined-package/assembly-combined/conf/log4j2-console.xml
@@ -1,41 +1,50 @@
 <?xml version="1.0" encoding="UTF-8"?>
 <!--
-  ~ Licensed to the Apache Software Foundation (ASF) under one or more
-  ~ contributor license agreements.  See the NOTICE file distributed with
-  ~ this work for additional information regarding copyright ownership.
-  ~ The ASF licenses this file to You under the Apache License, Version 2.0
-  ~ (the "License"); you may not use this file except in compliance with
-  ~ the License.  You may obtain a copy of the License at
-  ~ 
-  ~   http://www.apache.org/licenses/LICENSE-2.0
-  ~ 
+  ~ Copyright 2019 WeBank
+  ~
+  ~ Licensed under the Apache License, Version 2.0 (the "License");
+  ~ you may not use this file except in compliance with the License.
+  ~ You may obtain a copy of the License at
+  ~
+  ~ http://www.apache.org/licenses/LICENSE-2.0
+  ~
   ~ Unless required by applicable law or agreed to in writing, software
   ~ distributed under the License is distributed on an "AS IS" BASIS,
   ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
   ~ See the License for the specific language governing permissions and
   ~ limitations under the License.
   -->
-  
-<configuration status="error" monitorInterval="30">
+
+<configuration status="error" monitorInterval="30" >
     <appenders>
         <RollingFile name="RollingFile" append="false" fileName="${env:LINKIS_LOG_DIR}/${sys:serviceName}.log"
                      filePattern="${env:LINKIS_LOG_DIR}/$${date:yyyy-MM}/${sys:serviceName}/linkis-log-%d{yyyy-MM-dd}-%i.log">
             <PatternLayout pattern="%d{yyyy-MM-dd HH:mm:ss.SSS} [%-5level] [%-40t] %c{1.} (%L) [%M] - %msg%xEx%n"/>
-            <Policies>
-                <TimeBasedTriggeringPolicy interval="1 hour" />
-                <SizeBasedTriggeringPolicy size="100MB"/>
-            </Policies>
+            <SizeBasedTriggeringPolicy size="100MB"/>
             <DefaultRolloverStrategy max="10"/>
         </RollingFile>
+        <console name="Console-Plain" target="SYSTEM_OUT">
+            <!--输出日志的格式-->
+            <PatternLayout pattern="%m%n"/>
+        </console>
     </appenders>
     <loggers>
         <root level="INFO">
             <appender-ref ref="RollingFile"/>
         </root>
+        <logger name="com.webank.wedatasphere.linkis.message.builder.DefaultMessageJob" level="warn" additivity="true">
+            <appender-ref ref="RollingFile"/>
+        </logger>
 
+        <logger name="com.webank.wedatasphere.linkis.message.scheduler.DefaultMessageExecutor" level="warn" additivity="true">
+            <appender-ref ref="RollingFile"/>
+        </logger>
         <logger name="com.netflix.loadbalancer.DynamicServerListLoadBalancer" level="warn" additivity="true">
             <appender-ref ref="RollingFile"/>
         </logger>
+        <logger name="PlaintTextConsoleLogger" level="INFO" additivity="false">
+            <appender-ref ref="Console-Plain"/>
+        </logger>
     </loggers>
 </configuration>
 
diff --git a/assembly-combined-package/assembly-combined/conf/log4j2.xml b/assembly-combined-package/assembly-combined/conf/log4j2.xml
index dbb9b10..2ba429e 100644
--- a/assembly-combined-package/assembly-combined/conf/log4j2.xml
+++ b/assembly-combined-package/assembly-combined/conf/log4j2.xml
@@ -32,7 +32,13 @@
         <root level="INFO">
             <appender-ref ref="RollingFile"/>
         </root>
+        <logger name="com.webank.wedatasphere.linkis.message.builder.DefaultMessageJob" level="warn" additivity="true">
+            <appender-ref ref="RollingFile"/>
+        </logger>
 
+        <logger name="com.webank.wedatasphere.linkis.message.scheduler.DefaultMessageExecutor" level="warn" additivity="true">
+            <appender-ref ref="RollingFile"/>
+        </logger>
         <logger name="com.netflix.loadbalancer.DynamicServerListLoadBalancer" level="warn" additivity="true">
             <appender-ref ref="RollingFile"/>
         </logger>
diff --git a/assembly-combined-package/assembly-combined/conf/token.properties b/assembly-combined-package/assembly-combined/conf/token.properties
index 7d9019f..c623747 100644
--- a/assembly-combined-package/assembly-combined/conf/token.properties
+++ b/assembly-combined-package/assembly-combined/conf/token.properties
@@ -17,4 +17,6 @@ QML-AUTH=*
 BML-AUTH=*
 WS-AUTH=*
 dss-AUTH=*
-QUALITIS-AUTH=*
\ No newline at end of file
+QUALITIS-AUTH=*
+VALIDATOR-AUTH=*
+DOPS-AUTH=*
\ No newline at end of file
diff --git a/assembly-combined-package/assembly-combined/public-module-combined/src/main/assembly/distribution.xml b/assembly-combined-package/assembly-combined/public-module-combined/src/main/assembly/distribution.xml
index d9cdcc6..590a961 100644
--- a/assembly-combined-package/assembly-combined/public-module-combined/src/main/assembly/distribution.xml
+++ b/assembly-combined-package/assembly-combined/public-module-combined/src/main/assembly/distribution.xml
@@ -23,6 +23,7 @@
     <id>module</id>
     <formats>
         <format>dir</format>
+        <format>zip</format>
     </formats>
     <includeBaseDirectory>false</includeBaseDirectory>
 <!--    <baseDirectory>lib</baseDirectory>-->
diff --git a/assembly-combined-package/assembly-combined/sbin/common.sh b/assembly-combined-package/assembly-combined/sbin/common.sh
index 6d79a25..59e8279 100644
--- a/assembly-combined-package/assembly-combined/sbin/common.sh
+++ b/assembly-combined-package/assembly-combined/sbin/common.sh
@@ -19,7 +19,8 @@ source ~/.bash_profile
 
 export local_host="`hostname --fqdn`"
 
-ipaddr=$(ip addr | awk '/^[0-9]+: / {}; /inet.*global/ {print gensub(/(.*)\/(.*)/, "\\1", "g", $2)}')
+#ipaddr=$(ip addr | awk '/^[0-9]+: / {}; /inet.*global/ {print gensub(/(.*)\/(.*)/, "\\1", "g", $2)}')
+ipaddr=`hostname -i`
 
 function isLocal(){
     if [ "$1" == "127.0.0.1" ];then
diff --git a/assembly-combined-package/assembly-combined/sbin/ext/linkis-common-start b/assembly-combined-package/assembly-combined/sbin/ext/linkis-common-start
index d2a15e2..8f26c6a 100644
--- a/assembly-combined-package/assembly-combined/sbin/ext/linkis-common-start
+++ b/assembly-combined-package/assembly-combined/sbin/ext/linkis-common-start
@@ -21,10 +21,10 @@
 source $LINKIS_CONF_DIR/linkis-env.sh
 
 if [ "$LINKIS_LOG_DIR" = "" ]; then
-  export LINKIS_LOG_DIR="$LINKIS_HOME/logs"
+  export LINKIS_LOG_DIR="/data/bdp/logs/linkis"
 fi
 if [ ! -w "$LINKIS_LOG_DIR" ] ; then
-  mkdir -p "$LINKIS_LOG_DIR"
+  sudo mkdir -p $LINKIS_LOG_DIR; sudo chown -R hadoop:hadoop $LINKIS_LOG_DIR
 fi
 
 if test -z "$SERVER_HEAP_SIZE"
diff --git a/assembly-combined-package/assembly-combined/sbin/ext/linkis-mg-eureka b/assembly-combined-package/assembly-combined/sbin/ext/linkis-mg-eureka
index 6dced4a..2bb35b9 100644
--- a/assembly-combined-package/assembly-combined/sbin/ext/linkis-mg-eureka
+++ b/assembly-combined-package/assembly-combined/sbin/ext/linkis-mg-eureka
@@ -25,10 +25,10 @@ SERVER_SUFFIX="linkis-spring-cloud-services/linkis-mg-eureka"
 #export DEBUG_PORT=
 
 if [ "$LINKIS_LOG_DIR" = "" ]; then
-  export LINKIS_LOG_DIR="$LINKIS_HOME/logs"
+  export LINKIS_LOG_DIR="/data/bdp/logs/linkis"
 fi
 if [ ! -w "$LINKIS_LOG_DIR" ] ; then
-  mkdir -p "$LINKIS_LOG_DIR"
+  sudo mkdir -p $LINKIS_LOG_DIR; sudo chown -R hadoop:hadoop $LINKIS_LOG_DIR
 fi
 
 if test -z "$SERVER_HEAP_SIZE"
@@ -55,12 +55,6 @@ export SERVER_CLASS=org.apache.linkis.eureka.SpringCloudEurekaApplication
 ## conf dir
 export SERVER_CONF_PATH=$LINKIS_CONF_DIR
 
-## commons lib
-export LINKIS_COMMONS_LIB=$LINKIS_HOME/$LINKIS_PUBLIC_MODULE
-if [ ! -r "$LINKIS_COMMONS_LIB" ] ; then
-    echo "linkis commons lib not exists $LINKIS_COMMONS_LIB"
-    exit 1
-fi
 
 ## server lib
 export SERVER_LIB=$LINKIS_HOME/lib/$SERVER_SUFFIX
@@ -70,7 +64,7 @@ if [ ! -r "$SERVER_LIB" ] ; then
 fi
 
 ## set class path
-export SERVER_CLASS_PATH=$SERVER_CONF_PATH:$LINKIS_COMMONS_LIB/*:$SERVER_LIB/*
+export SERVER_CLASS_PATH=$SERVER_CONF_PATH:$SERVER_LIB/*
 
 SERVER_IP="`hostname --fqdn`"
 
diff --git a/assembly-combined-package/assembly-combined/sbin/ext/linkis-mg-gateway b/assembly-combined-package/assembly-combined/sbin/ext/linkis-mg-gateway
index d65b935..6c26c7b 100644
--- a/assembly-combined-package/assembly-combined/sbin/ext/linkis-mg-gateway
+++ b/assembly-combined-package/assembly-combined/sbin/ext/linkis-mg-gateway
@@ -25,10 +25,10 @@ SERVER_SUFFIX="linkis-spring-cloud-services/linkis-mg-gateway"
 #export DEBUG_PORT=
 
 if [ "$LINKIS_LOG_DIR" = "" ]; then
-  export LINKIS_LOG_DIR="$LINKIS_HOME/logs"
+  export LINKIS_LOG_DIR="/data/bdp/logs/linkis"
 fi
 if [ ! -w "$LINKIS_LOG_DIR" ] ; then
-  mkdir -p "$LINKIS_LOG_DIR"
+  sudo mkdir -p $LINKIS_LOG_DIR; sudo chown -R hadoop:hadoop $LINKIS_LOG_DIR
 fi
 
 if test -z "$SERVER_HEAP_SIZE"
@@ -48,7 +48,7 @@ fi
 
 if test -z "$SERVER_JAVA_OPTS"
 then
-  export SERVER_JAVA_OPTS=" $JAVA_AGENT_OPTS -DserviceName=$SERVER_NAME -Xmx$SERVER_HEAP_SIZE -XX:+UseG1GC -Xloggc:$LINKIS_LOG_DIR/${SERVER_NAME}-gc.log $DEBUG_CMD"
+  export SERVER_JAVA_OPTS=" $JAVA_AGENT_OPTS -Dreactor.netty.ioWorkerCount=50 -DserviceName=$SERVER_NAME -Xmx$SERVER_HEAP_SIZE -XX:+UseG1GC -Xloggc:$LINKIS_LOG_DIR/${SERVER_NAME}-gc.log $DEBUG_CMD"
 fi
 
 
diff --git a/assembly-combined-package/assembly-combined/sbin/ext/linkis-ps-metadatamanager b/assembly-combined-package/assembly-combined/sbin/ext/linkis-ps-metadatamanager
index c6a666f..c6a467b 100644
--- a/assembly-combined-package/assembly-combined/sbin/ext/linkis-ps-metadatamanager
+++ b/assembly-combined-package/assembly-combined/sbin/ext/linkis-ps-metadatamanager
@@ -24,7 +24,14 @@ SERVER_SUFFIX="linkis-public-enhancements/linkis-ps-metadatamanager"
 
 
 export SERVER_CLASS=org.apache.linkis.metadatamanager.server.LinkisMetadataManagerApplication
-export LINKIS_LOG_DIR=$LINKIS_HOME/logs
+
+if [ "$LINKIS_LOG_DIR" = "" ]; then
+  export LINKIS_LOG_DIR="/data/bdp/logs/linkis"
+fi
+if [ ! -w "$LINKIS_LOG_DIR" ] ; then
+  sudo mkdir -p $LINKIS_LOG_DIR; sudo chown -R hadoop:hadoop $LINKIS_LOG_DIR
+fi
+
 if test -z "$SERVER_HEAP_SIZE"
 then
   export SERVER_HEAP_SIZE="512M"
diff --git a/assembly-combined-package/assembly-combined/sbin/linkis-daemon.sh b/assembly-combined-package/assembly-combined/sbin/linkis-daemon.sh
index ee23aaa..f0dfd68 100644
--- a/assembly-combined-package/assembly-combined/sbin/linkis-daemon.sh
+++ b/assembly-combined-package/assembly-combined/sbin/linkis-daemon.sh
@@ -114,7 +114,7 @@ function stop()
       if [[ -z "${pid}" ]]; then
         echo "server $SERVER_NAME is not running"
       else
-        wait_for_server_to_die $pid 40
+        wait_for_server_to_die $pid 300
         $(rm -f ${SERVER_PID})
         echo "server $SERVER_NAME is stopped."
       fi
diff --git a/assembly-combined-package/assembly-combined/sbin/linkis-start-all.sh b/assembly-combined-package/assembly-combined/sbin/linkis-start-all.sh
index bf7520f..cd253d0 100644
--- a/assembly-combined-package/assembly-combined/sbin/linkis-start-all.sh
+++ b/assembly-combined-package/assembly-combined/sbin/linkis-start-all.sh
@@ -69,7 +69,7 @@ SERVER_NAME="mg-gateway"
 SERVER_IP=$GATEWAY_INSTALL_IP
 startApp
 
-#publicenservice
+#publicservice
 SERVER_NAME="ps-publicservice"
 SERVER_IP=$PUBLICSERVICE_INSTALL_IP
 startApp
diff --git a/assembly-combined-package/assembly-combined/sbin/linkis-stop-all.sh b/assembly-combined-package/assembly-combined/sbin/linkis-stop-all.sh
index e17030e..fd43033 100644
--- a/assembly-combined-package/assembly-combined/sbin/linkis-stop-all.sh
+++ b/assembly-combined-package/assembly-combined/sbin/linkis-stop-all.sh
@@ -30,15 +30,17 @@ if [ "$LINKIS_HOME" = "" ]; then
   export LINKIS_HOME=$INSTALL_HOME
 fi
 
-info="We will stop all linkis applications, it will take some time, please wait"
-echo ${info}
-
+if [ -z "$LINKIS_CONF_DIR" ]
+then
+  LINKIS_CONF_DIR="$LINKIS_HOME/conf"
+fi
 
 
+info="We will stop all linkis applications, it will take some time, please wait"
+echo ${info}
 
 source ${LINKIS_HOME}/sbin/common.sh
 
-
 function stopApp(){
 echo "<-------------------------------->"
 echo "Begin to stop $SERVER_NAME"
@@ -53,6 +55,27 @@ executeCMD $SERVER_IP "$SERVER_STOP_CMD"
 echo "<-------------------------------->"
 }
 
+function clearResource(){
+echo "<-------------------------------->"
+echo "Begin to clear resource..."
+LINKIS_PROPERTIES_PATH="${LINKIS_CONF_DIR}/db.sh"
+source ${LINKIS_PROPERTIES_PATH}
+mysql -h$MYSQL_HOST -P$MYSQL_PORT -u$MYSQL_USER -D$MYSQL_DB -p$MYSQL_PASSWORD --default-character-set=utf8 -e "DELETE FROM linkis_cg_manager_label_resource"
+checkpoint1=$?
+mysql -h$MYSQL_HOST -P$MYSQL_PORT -u$MYSQL_USER -D$MYSQL_DB -p$MYSQL_PASSWORD --default-character-set=utf8 -e "DELETE FROM linkis_cg_manager_linkis_resources"
+checkpoint2=$?
+mysql -h$MYSQL_HOST -P$MYSQL_PORT -u$MYSQL_USER -D$MYSQL_DB -p$MYSQL_PASSWORD --default-character-set=utf8 -e "DELETE FROM linkis_cg_manager_service_instance_metrics"
+checkpoint3=$?
+mysql -h$MYSQL_HOST -P$MYSQL_PORT -u$MYSQL_USER -D$MYSQL_DB -p$MYSQL_PASSWORD --default-character-set=utf8 -e "DELETE FROM linkis_cg_manager_lock"
+if [ ${checkpoint1} -ne 0 -o ${checkpoint2} -ne 0 -o ${checkpoint3} -ne 0 -o $? -ne 0 ]
+then
+  echo "Failed to clear resource, pleck check your db.sh configuration."
+else
+  echo "Success to clear all resource!"
+fi
+echo "<-------------------------------->"
+}
+
 
 
 #gateway
@@ -96,4 +119,6 @@ export SERVER_NAME="mg-eureka"
 SERVER_IP=$EUREKA_INSTALL_IP
 stopApp
 
+clearResource
+
 echo "stop-all shell script executed completely"
diff --git a/assembly-combined-package/assembly-combined/src/main/assembly/assembly.xml b/assembly-combined-package/assembly-combined/src/main/assembly/assembly.xml
index 9f2d250..f1d20c5 100644
--- a/assembly-combined-package/assembly-combined/src/main/assembly/assembly.xml
+++ b/assembly-combined-package/assembly-combined/src/main/assembly/assembly.xml
@@ -19,6 +19,7 @@
 <assembly>
   <id>dist</id>
   <formats>
+    <format>tar.gz</format>
     <format>dir</format>
   </formats>
   <includeBaseDirectory>false</includeBaseDirectory>
@@ -26,17 +27,27 @@
   <fileSets>
 
     <fileSet>
-      <directory>
-        bin/
-      </directory>
-      <outputDirectory>bin</outputDirectory>
+      <directory>../..</directory>
+      <outputDirectory></outputDirectory>
       <includes>
-        <include>**/*</include>
+        <include>README*</include>
+        <include>LICENSE*</include>
+        <include>NOTICE*</include>
       </includes>
-      <fileMode>0755</fileMode>
-      <lineEnding>unix</lineEnding>
     </fileSet>
 
+      <fileSet>
+          <directory>
+              bin/
+          </directory>
+          <outputDirectory>bin</outputDirectory>
+          <includes>
+              <include>**/*</include>
+          </includes>
+          <fileMode>0755</fileMode>
+          <lineEnding>unix</lineEnding>
+      </fileSet>
+
     <fileSet>
       <directory>
         conf/
@@ -62,6 +73,19 @@
           <lineEnding>unix</lineEnding>
       </fileSet>
 
+      <fileSet>
+          <directory>
+              bin/
+          </directory>
+          <outputDirectory>bin</outputDirectory>
+          <includes>
+              <include>**/*</include>
+          </includes>
+          <fileMode>0777</fileMode>
+          <directoryMode>0755</directoryMode>
+          <lineEnding>unix</lineEnding>
+      </fileSet>
+
     <fileSet>
       <directory>
         ../../db/
@@ -234,7 +258,30 @@
               <include>*</include>
           </includes>
       </fileSet>
-
+        <!-- data source manager-->
+      <fileSet>
+          <directory>
+              ../../linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/server/target/out/lib/
+          </directory>
+          <outputDirectory>
+              lib/linkis-public-enhancements/linkis-ps-datasourcemanager
+          </outputDirectory>
+          <includes>
+              <include>*</include>
+          </includes>
+      </fileSet>
+      <!-- metadata manager -->
+      <fileSet>
+          <directory>
+              ../../linkis-public-enhancements/linkis-datasource/linkis-metadata-manager/server/target/out/lib/
+          </directory>
+          <outputDirectory>
+              lib/linkis-public-enhancements/linkis-ps-metadatamanager
+          </outputDirectory>
+          <includes>
+              <include>*</include>
+          </includes>
+      </fileSet>
             <!--application-manager-->
       <fileSet>
           <directory>
@@ -303,7 +350,7 @@
               ../../linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/target/out/conf
           </directory>
           <outputDirectory>
-              conf/linkis-cli
+              conf/linkis-computation-governance/linkis-client/linkis-cli
           </outputDirectory>
           <includes>
               <include>*</include>
diff --git a/assembly-combined-package/bin/checkEnv.sh b/assembly-combined-package/bin/checkEnv.sh
index 83ffdb4..dbce4cb 100644
--- a/assembly-combined-package/bin/checkEnv.sh
+++ b/assembly-combined-package/bin/checkEnv.sh
@@ -94,6 +94,7 @@ echo "check tar"
 need_cmd tar
 echo "check sed"
 need_cmd sed
+need_cmd dos2unix
 echo "<-----end to check used cmd---->"
 
 checkSpark
diff --git a/assembly-combined-package/bin/install.sh b/assembly-combined-package/bin/install.sh
index d10f3d6..46cadb0 100644
--- a/assembly-combined-package/bin/install.sh
+++ b/assembly-combined-package/bin/install.sh
@@ -46,30 +46,84 @@ fi
 ## import common.sh
 source ${workDir}/bin/common.sh
 
-##load config
-echo "======= Step 1: Load deploy-config/* =========="
-export LINKIS_CONFIG_PATH=${LINKIS_CONFIG_PATH:-"${workDir}/deploy-config/linkis-env.sh"}
-export LINKIS_DB_CONFIG_PATH=${LINKIS_DB_CONFIG_PATH:-"${workDir}/deploy-config/db.sh"}
 
-source ${LINKIS_CONFIG_PATH}
-source ${LINKIS_DB_CONFIG_PATH}
 
-isSuccess "load config"
+function checkPythonAndJava(){
+    python --version
+    isSuccess "execute python --version"
+    java -version
+    isSuccess "execute java --version"
+}
+
+function checkHadoopAndHive(){
+    hadoopVersion="`hdfs version`"
+    defaultHadoopVersion="2.7"
+    checkversion "$hadoopVersion" $defaultHadoopVersion hadoop
+    checkversion "$(whereis hive)" "1.2" hive
+}
+
+function checkversion(){
+versionStr=$1
+defaultVersion=$2
+module=$3
+
+result=$(echo $versionStr | grep "$defaultVersion")
+if [ -n "$result" ]; then
+    echo "$module version match"
+else
+   echo "WARN: Your $module version is not $defaultVersion, there may be compatibility issues:"
+   echo " 1: Continue installation, there may be compatibility issues"
+   echo " 2: Exit installation"
+   echo ""
+   read -p "Please input the choice:"  idx
+   if [[ '2' = "$idx" ]];then
+    echo "You chose  Exit installation"
+    exit 1
+   fi
+fi
+}
+
+function checkSpark(){
+ spark-submit --version
+ isSuccess "execute spark-submit --version"
+}
+
+say() {
+    printf 'check command fail \n %s\n' "$1"
+}
+
+err() {
+    say "$1" >&2
+    exit 1
+}
+
+check_cmd() {
+    command -v "$1" > /dev/null 2>&1
+}
+
+need_cmd() {
+    if ! check_cmd "$1"; then
+        err "need '$1' (command not found)"
+    fi
+}
+
 
 
-echo "======= Step 2: Check env =========="
-## check env
 sh ${workDir}/bin/checkEnv.sh
 isSuccess "check env"
 
-until mysql -h$MYSQL_HOST -P$MYSQL_PORT -u$MYSQL_USER -p$MYSQL_PASSWORD  -e ";" ; do
-     echo "try to connect to linkis mysql $MYSQL_HOST:$MYSQL_PORT/$MYSQL_DB failed, please check db configuration in:$LINKIS_DB_CONFIG_PATH"
-     exit 1
-done
+##load config
+echo "step1:load config "
+export LINKIS_CONFIG_PATH=${LINKIS_CONFIG_PATH:-"${workDir}/config/linkis-env.sh"}
+export LINKIS_DB_CONFIG_PATH=${LINKIS_DB_CONFIG_PATH:-"${workDir}/config/db.sh"}
+source ${LINKIS_CONFIG_PATH}
+source ${LINKIS_DB_CONFIG_PATH}
+
+isSuccess "load config"
+
 
-echo "======= Step 3: Create necessary directory =========="
 
-echo "[WORKSPACE_USER_ROOT_PATH] try to create directory"
+echo "create hdfs  directory and local directory"
 if [ "$WORKSPACE_USER_ROOT_PATH" != "" ]
 then
   localRootDir=$WORKSPACE_USER_ROOT_PATH
@@ -105,6 +159,7 @@ echo "[HDFS_USER_ROOT_PATH] try to create directory"
      localRootDir=${HDFS_USER_ROOT_PATH#hdfs://}
      echo "[HDFS_USER_ROOT_PATH] try to create hdfs dir,cmd is: hdfs dfs -mkdir -p $localRootDir/$deployUser"
      hdfs dfs -mkdir -p $localRootDir/$deployUser
+     hdfs dfs -chmod -R 775 $localRootDir/$deployUser
    else
      echo "[HDFS_USER_ROOT_PATH] does not support $HDFS_USER_ROOT_PATH filesystem types"
    fi
@@ -127,8 +182,8 @@ echo "[RESULT_SET_ROOT_PATH] try to create directory"
    elif [[ $RESULT_SET_ROOT_PATH == hdfs://* ]];then
      localRootDir=${RESULT_SET_ROOT_PATH#hdfs://}
      echo "[RESULT_SET_ROOT_PATH] try to create hdfs dir,cmd is: hdfs dfs -mkdir -p $localRootDir/$deployUser"
-     hdfs dfs -mkdir -p $localRootDir
-     hdfs dfs -chmod 775 $localRootDir
+     hdfs dfs -mkdir -p $localRootDir/$deployUser
+     hdfs dfs -chmod 775 $localRootDir/$deployUser
    else
      echo "[RESULT_SET_ROOT_PATH] does not support $RESULT_SET_ROOT_PATH filesystem types"
    fi
@@ -163,16 +218,13 @@ if ! test -d ${LINKIS_PACKAGE}; then
     echo "**********${RED}Error${NC}: please put ${LINKIS_PACKAGE} in $workDir! "
     exit 1
 else
-    echo "Start to cp ${LINKIS_PACKAGE} to $LINKIS_HOME."
-    cp -r $LINKIS_PACKAGE/* $LINKIS_HOME
-    isSuccess "cp ${LINKIS_PACKAGE} to $LINKIS_HOME"
+    echo "Start to unzip ${LINKIS_PACKAGE} ."
+    tar -xzf ${LINKIS_PACKAGE}  -C $LINKIS_HOME
+    isSuccess "Unzip ${LINKIS_PACKAGE} to $LINKIS_HOME"
 fi
 
 cp ${LINKIS_CONFIG_PATH} $LINKIS_HOME/conf
 
-
-
-echo "======= Step 4: Create linkis table =========="
 ## sql init
 if [ "$YARN_RESTFUL_URL" != "" ]
 then
@@ -249,8 +301,7 @@ fi
 
 
 #Deal common config
-echo ""
-echo "======= Step 5: Update config =========="
+echo "Update config..."
 
 if test -z "$EUREKA_INSTALL_IP"
 then
diff --git a/assembly-combined-package/deploy-config/linkis-env.sh b/assembly-combined-package/deploy-config/linkis-env.sh
index a5412db..2d54289 100644
--- a/assembly-combined-package/deploy-config/linkis-env.sh
+++ b/assembly-combined-package/deploy-config/linkis-env.sh
@@ -38,12 +38,12 @@ HDFS_USER_ROOT_PATH=hdfs:///tmp/linkis ##hdfs:// required
 
 
 ### Path to store started engines and engine logs, must be local
-ENGINECONN_ROOT_PATH=/appcom/tmp
+ENGINECONN_ROOT_PATH=/data/bdp/linkis
 
 #ENTRANCE_CONFIG_LOG_PATH=hdfs:///tmp/linkis/
 
 ### Path to store job ResultSet:file or hdfs path
-RESULT_SET_ROOT_PATH=hdfs:///tmp/linkis ##hdfs:// required
+#RESULT_SET_ROOT_PATH=hdfs:///tmp/linkis ##hdfs:// required
 
 ### Provide the DB information of Hive metadata database.
 ### Attention! If there are special characters like "&", they need to be enclosed in quotation marks.
@@ -52,7 +52,8 @@ HIVE_META_USER=""
 HIVE_META_PASSWORD=""
 
 ##YARN REST URL  spark engine required
-YARN_RESTFUL_URL=http://127.0.0.1:8088
+# Active resourcemanager address needed. Recommended to add all ha addresses.
+YARN_RESTFUL_URL="http://127.0.0.1:8088;http://127.0.0.1:8088"
 
 ## request spnego enabled Yarn resource restful interface When Yarn enable kerberos
 ## If your environment yarn interface can be accessed directly, ignore it
@@ -133,7 +134,7 @@ CS_PORT=9108
 export SERVER_HEAP_SIZE="512M"
 
 ##The decompression directory and the installation directory need to be inconsistent
-#LINKIS_HOME=/appcom/Install/LinkisInstall
+LINKIS_HOME=/appcom/Install/LinkisInstall
 
 LINKIS_VERSION=1.1.0
 

---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@linkis.apache.org
For additional commands, e-mail: commits-help@linkis.apache.org


[incubator-linkis] 02/18: 1. linkis-computation-engineconn - Add support for computation-engineconn to query running task status from entrance. 2. linkis-computation-engineconn - Add some tests.

Posted by pe...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

peacewong pushed a commit to branch dev-1.1.1
in repository https://gitbox.apache.org/repos/asf/incubator-linkis.git

commit 97d1c7bc22cde9e232479ba120773e8a9ace650c
Author: alexkun <xu...@qq.com>
AuthorDate: Fri Mar 4 14:42:06 2022 +0800

    1. linkis-computation-engineconn - Add support for computation-engineconn to query running task status from entrance.
    2. linkis-computation-engineconn - Add some tests.
---
 .../executor/conf/ComputationExecutorConf.scala    |   5 +
 .../executor/execute/ComputationExecutor.scala     |   2 +
 .../service/TaskExecutionServiceImpl.scala         |  10 +-
 .../executor/upstream/ECTaskEntranceMonitor.scala  |  57 ++++++++
 .../SingleThreadUpstreamConnectionMonitor.scala    | 149 +++++++++++++++++++++
 .../UpstreamConnectionMonitor.scala}               |  17 ++-
 .../access/ConnectionInfoAccess.scala}             |  15 +--
 .../access/ConnectionInfoAccessRequest.scala}      |  10 +-
 .../upstream/access/ECTaskEntranceInfoAccess.scala | 119 ++++++++++++++++
 .../access/ECTaskEntranceInfoAccessRequest.scala}  |  11 +-
 .../upstream/entity/ECTaskEntranceConnection.scala |  56 ++++++++
 .../entity/UpstreamConnection.scala}               |  18 ++-
 ...TaskStatusChangedForUpstreamMonitorEvent.scala} |  14 +-
 .../upstream/handler/ECTaskKillHandler.scala       |  68 ++++++++++
 .../handler/ECTaskKillHandlerRequest.scala}        |  12 +-
 .../handler/MonitorHandler.scala}                  |  15 +--
 .../handler/MonitorHandlerRequest.scala}           |   9 +-
 ...kStatusChangedForUpstreamMonitorListener.scala} |  13 +-
 .../service/ECTaskEntranceMonitorService.scala     |  68 ++++++++++
 .../wrapper/ConnectionInfoWrapper.scala}           |  13 +-
 .../wrapper/ECTaskEntranceConnectionWrapper.scala  |  64 +++++++++
 .../executor/utlis/ComputationErrorCode.scala      |   7 +
 .../access/ECTaskEntranceInfoAccessTest.java       |  63 +++++++++
 .../src/test/resources/linkis.properties           |   9 ++
 .../src/test/resources/log4j2.xml                  |  46 +++++++
 .../access/ECTaskEntranceInfoAccessHelper.scala    |  63 +++++++++
 26 files changed, 852 insertions(+), 81 deletions(-)

diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/conf/ComputationExecutorConf.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/conf/ComputationExecutorConf.scala
index 70b08dc..0c9925c 100644
--- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/conf/ComputationExecutorConf.scala
+++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/conf/ComputationExecutorConf.scala
@@ -46,4 +46,9 @@ object ComputationExecutorConf {
 
   val DEFAULT_COMPUTATION_EXECUTORMANAGER_CLAZZ = CommonVars("wds.linkis.default.computation.executormanager.clazz", "org.apache.linkis.engineconn.computation.executor.creation.ComputationExecutorManagerImpl")
 
+  val UPSTREAM_MONITOR_ECTASK_SHOULD_START = CommonVars("linkis.upstream.monitor.ectask.should.start", true).getValue
+
+  val UPSTREAM_MONITOR_WRAPPER_ENTRIES_SURVIVE_THRESHOLD_SEC = CommonVars("linkis.upstream.monitor.wrapper.entries.survive.time.sec", 86400).getValue
+
+  val UPSTREAM_MONITOR_ECTASK_ENTRANCE_THRESHOLD_SEC =  CommonVars("linkis.upstream.monitor.ectask.entrance.threshold.sec", 15).getValue
 }
diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/execute/ComputationExecutor.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/execute/ComputationExecutor.scala
index afa0839..3491cb9 100644
--- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/execute/ComputationExecutor.scala
+++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/execute/ComputationExecutor.scala
@@ -29,6 +29,7 @@ import org.apache.linkis.engineconn.acessible.executor.listener.event.TaskStatus
 import org.apache.linkis.engineconn.common.conf.{EngineConnConf, EngineConnConstant}
 import org.apache.linkis.engineconn.computation.executor.conf.ComputationExecutorConf
 import org.apache.linkis.engineconn.computation.executor.creation.ComputationExecutorManager
+import org.apache.linkis.engineconn.computation.executor.upstream.event.TaskStatusChangedForUpstreamMonitorEvent
 import org.apache.linkis.engineconn.computation.executor.entity.EngineConnTask
 import org.apache.linkis.engineconn.computation.executor.hook.ComputationExecutorHook
 import org.apache.linkis.engineconn.core.EngineConnObject
@@ -309,6 +310,7 @@ abstract class ComputationExecutor(val outputPrintLimit: Int = 1000) extends Acc
     }
     if (oriStatus != newStatus && !isInternalExecute) {
       listenerBusContext.getEngineConnSyncListenerBus.postToAll(TaskStatusChangedEvent(task.getTaskId, oriStatus, newStatus))
+      listenerBusContext.getEngineConnSyncListenerBus.postToAll(TaskStatusChangedForUpstreamMonitorEvent(task.getTaskId, oriStatus, newStatus, task, this))
     }
   }
 
diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/service/TaskExecutionServiceImpl.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/service/TaskExecutionServiceImpl.scala
index 672a895..d9bcf32 100644
--- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/service/TaskExecutionServiceImpl.scala
+++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/service/TaskExecutionServiceImpl.scala
@@ -112,15 +112,15 @@ class TaskExecutionServiceImpl extends TaskExecutionService with Logging with Re
     info("Received a new task, task content is " + requestTask)
     if (StringUtils.isBlank(requestTask.getLock)) {
       error(s"Invalid lock : ${requestTask.getLock} , requestTask : " + requestTask)
-      return ErrorExecuteResponse(s"Invalid lock : ${requestTask.getLock}.", new EngineConnExecutorErrorException(EngineConnExecutorErrorCode.INVALID_PARAMS, "Invalid lock or code"))
+      return ErrorExecuteResponse(s"Invalid lock : ${requestTask.getLock}.", new EngineConnExecutorErrorException(EngineConnExecutorErrorCode.INVALID_PARAMS, "Invalid lock or code(请获取到锁后再提交任务.)"))
     }
     if (!lockService.isLockExist(requestTask.getLock)) {
       error(s"Lock ${requestTask.getLock} not exist, cannot execute.")
-      return ErrorExecuteResponse("Lock not exixt", new EngineConnExecutorErrorException(EngineConnExecutorErrorCode.INVALID_LOCK, "Lock : " + requestTask.getLock + " not exist."))
+      return ErrorExecuteResponse("Lock not exixt", new EngineConnExecutorErrorException(EngineConnExecutorErrorCode.INVALID_LOCK, "Lock : " + requestTask.getLock + " not exist(您的锁无效,请重新获取后再提交)."))
     }
 
     if (StringUtils.isBlank(requestTask.getCode)) {
-      return IncompleteExecuteResponse("Your code is incomplete, it may be that only comments are selected for execution")
+      return IncompleteExecuteResponse("Your code is incomplete, it may be that only comments are selected for execution(您的代码不完整,可能是仅仅选中了注释进行执行)")
     }
 
     val taskId: Int = taskExecutedNum.incrementAndGet()
@@ -173,8 +173,7 @@ class TaskExecutionServiceImpl extends TaskExecutionService with Logging with Re
         val labelsStr = if (labels != null) labels.filter(_ != null).map(_.getStringValue).mkString(",") else ""
         val msg = "Invalid computationExecutor : " + o.getClass.getName + ", labels : " + labelsStr + ", requestTask : " + task.getTaskId
         error(msg)
-        ErrorExecuteResponse("Invalid computationExecutor.",
-          new EngineConnExecutorErrorException(EngineConnExecutorErrorCode.INVALID_ENGINE_TYPE, msg))
+        ErrorExecuteResponse("Invalid computationExecutor(生成无效的计算引擎,请联系管理员).", new EngineConnExecutorErrorException(EngineConnExecutorErrorCode.INVALID_ENGINE_TYPE, msg))
     }
   }
 
@@ -432,6 +431,7 @@ class TaskExecutionServiceImpl extends TaskExecutionService with Logging with Re
     case taskResultCreateEvent: TaskResultCreateEvent => onResultSetCreated(taskResultCreateEvent)
     case taskResultSizeCreatedEvent: TaskResultSizeCreatedEvent => onResultSizeCreated(taskResultSizeCreatedEvent)
     case taskResponseErrorEvent: TaskResponseErrorEvent => onTaskResponseErrorEvent(taskResponseErrorEvent)
+    case taskStatusChangedEvent2: TaskStatusChangedForUpstreamMonitorEvent => info("ignored TaskStatusChangedEvent2 for entrance monitoring, task: " + taskStatusChangedEvent2.taskId)
     case _ =>
       warn("Unknown event : " + BDPJettyServerHelper.gson.toJson(event))
   }
diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/ECTaskEntranceMonitor.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/ECTaskEntranceMonitor.scala
new file mode 100644
index 0000000..4695fa5
--- /dev/null
+++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/ECTaskEntranceMonitor.scala
@@ -0,0 +1,57 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.linkis.engineconn.computation.executor.upstream
+
+import java.util
+
+import org.apache.linkis.common.utils.Logging
+import org.apache.linkis.engineconn.computation.executor.entity.EngineConnTask
+import org.apache.linkis.engineconn.computation.executor.execute.ComputationExecutor
+import org.apache.linkis.engineconn.computation.executor.upstream.access.{ConnectionInfoAccessRequest, ECTaskEntranceInfoAccess, ECTaskEntranceInfoAccessRequest}
+import org.apache.linkis.engineconn.computation.executor.upstream.handler.{ECTaskKillHandler, ECTaskKillHandlerRequest, MonitorHandlerRequest}
+import org.apache.linkis.engineconn.computation.executor.upstream.wrapper.{ConnectionInfoWrapper, ECTaskEntranceConnectionWrapper}
+
+class ECTaskEntranceMonitor extends SingleThreadUpstreamConnectionMonitor(name = "ECTask-upstream-connection-monitor", infoAccess = new ECTaskEntranceInfoAccess, handler = new ECTaskKillHandler) with Logging {
+
+  def register(task: EngineConnTask, executor: ComputationExecutor): Unit = {
+    panicIfNull(task, "engineConnTask should not be null")
+    panicIfNull(executor, "executor should not be null")
+    val taskID = task.getTaskId
+    if (wrapperMap.containsKey(taskID)) {
+      error("registered duplicate EngineConnTask!! task-id: " + taskID)
+    }
+    wrapperMap.putIfAbsent(taskID, new ECTaskEntranceConnectionWrapper(taskID, task, executor))
+  }
+
+  def unregister(taskID: String): Unit = {
+    if (!wrapperMap.containsKey(taskID)) {
+      error("attempted to unregister non-existing EngineConnTask!! task-id: " + taskID)
+    }
+    wrapperMap.remove(taskID)
+  }
+
+  override def generateInfoAccessRequest(wrapperList: util.List[ConnectionInfoWrapper]): ConnectionInfoAccessRequest = {
+    panicIfNull(wrapperList, "wrapperList cannot be null")
+    new ECTaskEntranceInfoAccessRequest(wrapperList)
+  }
+
+  override def generateHandlerRequest(wrapperList: util.List[ConnectionInfoWrapper]): MonitorHandlerRequest = {
+    panicIfNull(wrapperList, "wrapperList cannot be null")
+    new ECTaskKillHandlerRequest(wrapperList)
+  }
+}
diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/SingleThreadUpstreamConnectionMonitor.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/SingleThreadUpstreamConnectionMonitor.scala
new file mode 100644
index 0000000..273dbe8
--- /dev/null
+++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/SingleThreadUpstreamConnectionMonitor.scala
@@ -0,0 +1,149 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.linkis.engineconn.computation.executor.upstream
+
+import java.util
+import java.util.Collections
+import java.util.concurrent.{ConcurrentHashMap, ScheduledThreadPoolExecutor, TimeUnit}
+
+import org.apache.linkis.common.utils.{Logging, Utils}
+import org.apache.linkis.engineconn.common.exception.EngineConnException
+import org.apache.linkis.engineconn.computation.executor.conf.ComputationExecutorConf
+import org.apache.linkis.engineconn.computation.executor.upstream.access.{ConnectionInfoAccess, ConnectionInfoAccessRequest}
+import org.apache.linkis.engineconn.computation.executor.upstream.handler.{MonitorHandler, MonitorHandlerRequest}
+import org.apache.linkis.engineconn.computation.executor.upstream.wrapper.ConnectionInfoWrapper
+import org.apache.linkis.engineconn.computation.executor.utlis.ComputationErrorCode
+import org.apache.commons.lang3.concurrent.BasicThreadFactory
+
+abstract class SingleThreadUpstreamConnectionMonitor(name: String, infoAccess: ConnectionInfoAccess, handler: MonitorHandler) extends UpstreamConnectionMonitor with Logging {
+
+  protected val wrapperMap = new ConcurrentHashMap[String, ConnectionInfoWrapper]
+
+  private val monitorDaemon = new ScheduledThreadPoolExecutor(3, new BasicThreadFactory.Builder().namingPattern(name + "-%d").daemon(true).build)
+
+  private val shouldStart = ComputationExecutorConf.UPSTREAM_MONITOR_ECTASK_SHOULD_START
+
+  private var started = false
+
+  override def getUpstreamNodeInfoAccess(): ConnectionInfoAccess = infoAccess
+
+  override def getHandler(): MonitorHandler = handler
+
+  def generateInfoAccessRequest(wrapperList: util.List[ConnectionInfoWrapper]): ConnectionInfoAccessRequest
+
+  def generateHandlerRequest(wrapperList: util.List[ConnectionInfoWrapper]): MonitorHandlerRequest
+
+  def start(): Unit = this.synchronized {
+    if (!shouldStart) {
+      info("Upstream-monitor is configured to be off. Will not monitor upstream connection for engine-task")
+      return
+    }
+    if (!started) {
+      panicIfNull(infoAccess, "infoAccess should not be null")
+      panicIfNull(handler, "handler should not be null")
+      info("started upstream monitor")
+      monitorDaemon.scheduleAtFixedRate(new Runnable {
+        override def run(): Unit = Utils.tryCatch(scanOneIteration) {
+          t => error("ClientHeartbeatMonitor failed to scan for one iteration", t)
+        }
+      }, 0, 5, TimeUnit.SECONDS)
+      monitorDaemon.scheduleAtFixedRate(new Runnable {
+        override def run(): Unit = Utils.tryCatch(clearOneIteration) {
+          t => error("clearWrapperMap has failed for one iteration", t)
+        }
+      }, 0, 1, TimeUnit.HOURS)
+      Utils.addShutdownHook(() -> this.shutdown())
+    } else {
+      throw new EngineConnException(ComputationErrorCode.START_UPSTREAM_MONITOR_TWICE, "cannot start upstream-monitor twice!")
+    }
+    started = true
+  }
+
+  def scanOneIteration(): Unit = {
+    panicIfNull(infoAccess, "connectionInfoAccess should not be null")
+    panicIfNull(handler, "handler should not be null")
+
+    val toBeRequested = new util.ArrayList[ConnectionInfoWrapper]
+    toBeRequested.addAll(wrapperMap.values())
+
+    if (toBeRequested.size() == 0) {
+      debug("nothing to monitor")
+      return
+    }
+    info("requesting connection info: " + util.Arrays.toString(Collections.list(wrapperMap.keys).toArray()))
+    val infoAccessRequest = generateInfoAccessRequest(toBeRequested)
+    val connectionInfoList = infoAccess.getUpstreamInfo(infoAccessRequest)
+    info("connection-info result: " + connectionInfoList(0).getUpstreamServiceInstanceName() + " : " + connectionInfoList(0).isAlive())
+    if (connectionInfoList == null || connectionInfoList.size == 0) {
+      info("Found none upstream-info")
+      return
+    }
+
+    val toBeHandled: util.List[ConnectionInfoWrapper] = new util.ArrayList[ConnectionInfoWrapper]
+    val connectionInfoMap = connectionInfoList.map(x => (x.getKey(), x)).toMap
+    val entries = wrapperMap.entrySet.iterator()
+    while (entries.hasNext) {
+      val entry = entries.next
+      val key = entry.getKey
+      val value = entry.getValue
+      if (connectionInfoMap.contains(key)) {
+        value.updateConnectionInfo(connectionInfoMap.get(key).get)
+      }
+      if (!value.getUpstreamConnection().isAlive()) {
+        info("Found upstream connection problem: " + entry.getValue.toString)
+        toBeHandled.add(value)
+      }
+    }
+
+    val iterator = toBeHandled.iterator
+    while (iterator.hasNext) {
+      wrapperMap.remove(iterator.next.getKey)
+    }
+
+    if (toBeHandled.size > 0) {
+      val handlerRequest = generateHandlerRequest(toBeHandled)
+      Utils.tryCatch(handler.handle(handlerRequest)) {
+        t => error("failed to handle upstream connection-loss", t)
+      }
+    }
+  }
+
+  private def clearOneIteration(): Unit = {
+    val entries = wrapperMap.entrySet.iterator
+    while (entries.hasNext) {
+      val entry = entries.next
+      if (entry.getValue.shouldClear) {
+        entries.remove()
+      }
+    }
+  }
+
+  protected def panicIfNull(obj: Any, msg: String): Unit = {
+    if (obj == null) {
+      throw new EngineConnException(ComputationErrorCode.VARIABLE_NULL_ERROR_CODE, msg)
+    }
+  }
+
+  def shutdown(): Unit = this.synchronized {
+    if (started) {
+      info("stopping upstream monitor")
+      monitorDaemon.shutdownNow
+    }
+  }
+
+}
diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/utlis/ComputationErrorCode.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/UpstreamConnectionMonitor.scala
similarity index 61%
copy from linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/utlis/ComputationErrorCode.scala
copy to linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/UpstreamConnectionMonitor.scala
index 08f9ddc..4358c5a 100644
--- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/utlis/ComputationErrorCode.scala
+++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/UpstreamConnectionMonitor.scala
@@ -15,14 +15,17 @@
  * limitations under the License.
  */
 
-package org.apache.linkis.engineconn.computation.executor.utlis
+package org.apache.linkis.engineconn.computation.executor.upstream
 
-object ComputationErrorCode {
-
-
-  val ASYNC_EXECUTOR_ERROR_CODE = 11301
-
-  val UDF_LOAD_ERROR_CODE = 11302
+import org.apache.linkis.engineconn.computation.executor.upstream.access.ConnectionInfoAccess
+import org.apache.linkis.engineconn.computation.executor.upstream.handler.MonitorHandler
 
+/**
+  * query upstream node info by UpstreamNodeInfoAccess
+  * call UpstreamMonitorHandler to handle undesirable upstream states
+  */
+trait UpstreamConnectionMonitor {
+  def getUpstreamNodeInfoAccess(): ConnectionInfoAccess
 
+  def getHandler(): MonitorHandler
 }
diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/utlis/ComputationErrorCode.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/access/ConnectionInfoAccess.scala
similarity index 69%
copy from linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/utlis/ComputationErrorCode.scala
copy to linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/access/ConnectionInfoAccess.scala
index 08f9ddc..c697653 100644
--- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/utlis/ComputationErrorCode.scala
+++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/access/ConnectionInfoAccess.scala
@@ -15,14 +15,13 @@
  * limitations under the License.
  */
 
-package org.apache.linkis.engineconn.computation.executor.utlis
-
-object ComputationErrorCode {
-
-
-  val ASYNC_EXECUTOR_ERROR_CODE = 11301
-
-  val UDF_LOAD_ERROR_CODE = 11302
+package org.apache.linkis.engineconn.computation.executor.upstream.access
 
+import org.apache.linkis.engineconn.computation.executor.upstream.entity.UpstreamConnection
 
+/**
+  * encapsulates client/driver/method for querying upstream node info
+  */
+trait ConnectionInfoAccess {
+  def getUpstreamInfo(request: ConnectionInfoAccessRequest): List[UpstreamConnection]
 }
diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/utlis/ComputationErrorCode.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/access/ConnectionInfoAccessRequest.scala
similarity index 82%
copy from linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/utlis/ComputationErrorCode.scala
copy to linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/access/ConnectionInfoAccessRequest.scala
index 08f9ddc..9a05fb0 100644
--- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/utlis/ComputationErrorCode.scala
+++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/access/ConnectionInfoAccessRequest.scala
@@ -15,14 +15,8 @@
  * limitations under the License.
  */
 
-package org.apache.linkis.engineconn.computation.executor.utlis
-
-object ComputationErrorCode {
-
-
-  val ASYNC_EXECUTOR_ERROR_CODE = 11301
-
-  val UDF_LOAD_ERROR_CODE = 11302
+package org.apache.linkis.engineconn.computation.executor.upstream.access
 
+trait ConnectionInfoAccessRequest {
 
 }
diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/access/ECTaskEntranceInfoAccess.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/access/ECTaskEntranceInfoAccess.scala
new file mode 100644
index 0000000..1b4fb63
--- /dev/null
+++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/access/ECTaskEntranceInfoAccess.scala
@@ -0,0 +1,119 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.linkis.engineconn.computation.executor.upstream.access
+
+import java.util
+
+import org.apache.linkis.common.ServiceInstance
+import org.apache.linkis.common.utils.{Logging, Utils}
+import org.apache.linkis.engineconn.common.exception.EngineConnException
+import org.apache.linkis.engineconn.computation.executor.upstream.entity.ECTaskEntranceConnection
+import org.apache.linkis.engineconn.computation.executor.upstream.wrapper.ECTaskEntranceConnectionWrapper
+import org.apache.linkis.engineconn.computation.executor.utlis.ComputationErrorCode
+import org.apache.linkis.rpc.sender.SpringCloudFeignConfigurationCache
+import org.springframework.cloud.client.{ServiceInstance => SpringCloudServiceInstance}
+import org.springframework.cloud.netflix.eureka.EurekaDiscoveryClient.EurekaServiceInstance
+
+import scala.collection.JavaConversions
+import scala.collection.JavaConverters._
+
+
+/**
+  * check entrance in eureka by DiscoveryClient
+  */
+class ECTaskEntranceInfoAccess extends ConnectionInfoAccess with Logging {
+  val discoveryClient = SpringCloudFeignConfigurationCache.getDiscoveryClient
+
+  //queryUpstreamInfo
+  override def getUpstreamInfo(request: ConnectionInfoAccessRequest): List[ECTaskEntranceConnection] = {
+    panicIfNull(request, "ConnectionInfoAccessRequest should not be null")
+    panicIfNull(discoveryClient, "discoveryClient should not be null")
+
+    val ret: util.List[ECTaskEntranceConnection] = new util.ArrayList[ECTaskEntranceConnection]
+
+    request match {
+      case eCTaskEntranceInfoAccessRequest: ECTaskEntranceInfoAccessRequest => {
+        //        val instances = Sender.getInstances(GovernanceCommonConf.ENTRANCE_SPRING_NAME.getValue) //use discoveryClient
+        val instanceMap = new util.HashMap[String, ServiceInstance]
+        Utils.tryCatch(discoveryClient.getServices.asScala.map(s => {
+          discoveryClient.getInstances(s).asScala.map {
+            s1 => {
+              val s3 = getDWCServiceInstance(s1)
+              instanceMap.put(s3.getInstance, s3) // instance should be unique
+            }
+          }
+        })) {
+          t => throw new EngineConnException(ComputationErrorCode.UPSTREAM_MONITOR_EXCEPTION, "Failed to get services from eureka").initCause(t)
+        }
+        if (instanceMap.size() == 0) {
+          throw new EngineConnException(ComputationErrorCode.UPSTREAM_MONITOR_EXCEPTION, "Got none serviceInstances from eureka")
+        }
+
+        val currentTime = System.currentTimeMillis
+        val wrappers = eCTaskEntranceInfoAccessRequest.getData
+        panicIfNull(wrappers, "wrappers should not be null")
+        val elements = wrappers.iterator
+        while (elements.hasNext) {
+          val wrapper = elements.next
+          if (wrapper == null) {
+            warn("wrapper should not be null")
+          } else {
+            wrapper match {
+              case ecWrapper: ECTaskEntranceConnectionWrapper => {
+                val engineConnTask = ecWrapper.getEngineConnTask
+                val instance = engineConnTask.getCallbackServiceInstance
+                val eCTaskEntranceConnection = new ECTaskEntranceConnection(engineConnTask.getTaskId, "", instance.getInstance)
+                if (isConnectionAlive(instance, instanceMap)) {
+                  eCTaskEntranceConnection.updatePrevAliveTimeStamp(currentTime)
+                }
+                ret.add(eCTaskEntranceConnection)
+              }
+              case _ => warn("invalid data-type: " + wrapper.getClass.getCanonicalName + " for data in ECTaskEntranceInfoAccessRequest")
+            }
+          }
+        }
+      }
+      case _ => throw new EngineConnException(ComputationErrorCode.INVALID_DATA_TYPE_ERROR_CODE, "invalid data-type: " + request.getClass.getCanonicalName)
+    }
+    JavaConversions.asScalaIterator(ret.iterator()).toList
+  }
+
+  private def getDWCServiceInstance(serviceInstance: SpringCloudServiceInstance): ServiceInstance = serviceInstance match {
+    case instance: EurekaServiceInstance =>
+      val applicationName = instance.getInstanceInfo.getAppName
+      val instanceId = instance.getInstanceInfo.getInstanceId
+      ServiceInstance(applicationName.toLowerCase, getInstance(applicationName, instanceId))
+  }
+
+  private def getInstance(applicationName: String, instanceId: String): String =
+    if (instanceId.toLowerCase.indexOf(applicationName.toLowerCase) > 0) {
+      val instanceInfos = instanceId.split(":")
+      instanceInfos(0) + ":" + instanceInfos(2)
+    } else instanceId
+
+  private def isConnectionAlive(instance: ServiceInstance, instanceMap: util.HashMap[String, ServiceInstance]): Boolean = {
+    instanceMap.containsKey(instance.getInstance) && instanceMap.get(instance.getInstance).equals(instance)
+  }
+
+  protected def panicIfNull(obj: Any, msg: String): Unit = {
+    if (obj == null) {
+      throw new EngineConnException(ComputationErrorCode.VARIABLE_NULL_ERROR_CODE, msg)
+    }
+  }
+
+}
diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/utlis/ComputationErrorCode.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/access/ECTaskEntranceInfoAccessRequest.scala
similarity index 68%
copy from linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/utlis/ComputationErrorCode.scala
copy to linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/access/ECTaskEntranceInfoAccessRequest.scala
index 08f9ddc..d856b33 100644
--- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/utlis/ComputationErrorCode.scala
+++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/access/ECTaskEntranceInfoAccessRequest.scala
@@ -15,14 +15,13 @@
  * limitations under the License.
  */
 
-package org.apache.linkis.engineconn.computation.executor.utlis
+package org.apache.linkis.engineconn.computation.executor.upstream.access
 
-object ComputationErrorCode {
+import java.util
 
-
-  val ASYNC_EXECUTOR_ERROR_CODE = 11301
-
-  val UDF_LOAD_ERROR_CODE = 11302
+import org.apache.linkis.engineconn.computation.executor.upstream.wrapper.ConnectionInfoWrapper
 
 
+class ECTaskEntranceInfoAccessRequest(wrappers: util.List[ConnectionInfoWrapper]) extends ConnectionInfoAccessRequest {
+  def getData(): util.List[ConnectionInfoWrapper] = wrappers
 }
diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/entity/ECTaskEntranceConnection.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/entity/ECTaskEntranceConnection.scala
new file mode 100644
index 0000000..0f86196
--- /dev/null
+++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/entity/ECTaskEntranceConnection.scala
@@ -0,0 +1,56 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.linkis.engineconn.computation.executor.upstream.entity
+
+import org.apache.linkis.engineconn.computation.executor.conf.ComputationExecutorConf
+import org.apache.commons.lang3.StringUtils
+
+
+class ECTaskEntranceConnection(taskID: String, currentServiceInstanceName: String, upstreamServiceInstanceName: String) extends UpstreamConnection {
+
+  @volatile
+  private var prevUpdatedAliveTimestamp = -1l
+
+  override def updatePrevAliveTimeStamp(target: Long): Unit = {
+    prevUpdatedAliveTimestamp = target
+  }
+
+  override def getPrevUpdatedAliveTimestamp(): Long = prevUpdatedAliveTimestamp
+
+  override def isAlive(): Boolean = {
+    System.currentTimeMillis() - prevUpdatedAliveTimestamp <= ComputationExecutorConf.UPSTREAM_MONITOR_ECTASK_ENTRANCE_THRESHOLD_SEC * 1000
+  }
+
+  override def isSameConnectionAs(upstreamConnection: UpstreamConnection): Boolean = upstreamConnection match {
+    case upstreamConnection2: ECTaskEntranceConnection => {
+      StringUtils.equals(upstreamConnection2.getKey, this.getKey) &&
+        StringUtils.equals(upstreamConnection2.getTaskID, this.getTaskID) &&
+        StringUtils.equals(upstreamConnection2.getCurrentServiceInstanceName, this.getCurrentServiceInstanceName) &&
+        StringUtils.equals(upstreamConnection2.getUpstreamServiceInstanceName, this.getUpstreamServiceInstanceName)
+    }
+    case _ => false
+  }
+
+  override def getKey(): String = taskID
+
+  def getTaskID(): String = taskID
+
+  override def getCurrentServiceInstanceName(): String = currentServiceInstanceName
+
+  override def getUpstreamServiceInstanceName(): String = upstreamServiceInstanceName
+}
diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/utlis/ComputationErrorCode.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/entity/UpstreamConnection.scala
similarity index 63%
copy from linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/utlis/ComputationErrorCode.scala
copy to linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/entity/UpstreamConnection.scala
index 08f9ddc..5ff3edf 100644
--- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/utlis/ComputationErrorCode.scala
+++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/entity/UpstreamConnection.scala
@@ -15,14 +15,24 @@
  * limitations under the License.
  */
 
-package org.apache.linkis.engineconn.computation.executor.utlis
+package org.apache.linkis.engineconn.computation.executor.upstream.entity
 
-object ComputationErrorCode {
 
+/**
+  * stores data representing upstream node state
+  */
+trait UpstreamConnection {
+  def getKey(): String
 
-  val ASYNC_EXECUTOR_ERROR_CODE = 11301
+  def getCurrentServiceInstanceName(): String
 
-  val UDF_LOAD_ERROR_CODE = 11302
+  def getUpstreamServiceInstanceName(): String
 
+  def updatePrevAliveTimeStamp(target: Long): Unit
 
+  def getPrevUpdatedAliveTimestamp(): Long
+
+  def isAlive(): Boolean
+
+  def isSameConnectionAs(upstreamConnection: UpstreamConnection): Boolean
 }
diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/utlis/ComputationErrorCode.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/event/TaskStatusChangedForUpstreamMonitorEvent.scala
similarity index 57%
copy from linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/utlis/ComputationErrorCode.scala
copy to linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/event/TaskStatusChangedForUpstreamMonitorEvent.scala
index 08f9ddc..41e9edf 100644
--- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/utlis/ComputationErrorCode.scala
+++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/event/TaskStatusChangedForUpstreamMonitorEvent.scala
@@ -15,14 +15,12 @@
  * limitations under the License.
  */
 
-package org.apache.linkis.engineconn.computation.executor.utlis
+package org.apache.linkis.engineconn.computation.executor.upstream.event
 
-object ComputationErrorCode {
+import org.apache.linkis.engineconn.acessible.executor.listener.event.TaskEvent
+import org.apache.linkis.engineconn.computation.executor.entity.EngineConnTask
+import org.apache.linkis.engineconn.computation.executor.execute.ComputationExecutor
+import org.apache.linkis.governance.common.entity.ExecutionNodeStatus
 
 
-  val ASYNC_EXECUTOR_ERROR_CODE = 11301
-
-  val UDF_LOAD_ERROR_CODE = 11302
-
-
-}
+case class TaskStatusChangedForUpstreamMonitorEvent(taskId: String, fromStatus: ExecutionNodeStatus, toStatus: ExecutionNodeStatus, task: EngineConnTask, executor: ComputationExecutor) extends TaskEvent
\ No newline at end of file
diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/handler/ECTaskKillHandler.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/handler/ECTaskKillHandler.scala
new file mode 100644
index 0000000..210eb04
--- /dev/null
+++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/handler/ECTaskKillHandler.scala
@@ -0,0 +1,68 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.linkis.engineconn.computation.executor.upstream.handler
+
+import org.apache.linkis.common.utils.{Logging, Utils}
+import org.apache.linkis.engineconn.computation.executor.upstream.wrapper.{ConnectionInfoWrapper, ECTaskEntranceConnectionWrapper}
+import org.apache.linkis.engineconn.core.executor.ExecutorManager
+import org.apache.linkis.manager.label.entity.entrance.ExecuteOnceLabel
+
+class ECTaskKillHandler extends MonitorHandler with Logging {
+  override def handle(request: MonitorHandlerRequest): Unit = {
+    if (request == null) {
+      error("illegal input for handler: null")
+    } else {
+      request match {
+        case _: ECTaskKillHandlerRequest => {
+          val toBeKilled = request.asInstanceOf[ECTaskKillHandlerRequest].getData
+          if (toBeKilled != null && toBeKilled.size() != 0) {
+            val elements = toBeKilled.iterator
+            while (elements.hasNext) {
+              val element = elements.next
+              Utils.tryCatch(doKill(element)) {
+                t => error("Failed to kill job: " + element.getKey, t)
+              }
+            }
+          }
+        }
+        case _ => error("illegal input for handler: " + request.getClass.getCanonicalName)
+      }
+    }
+  }
+
+  private def doKill(wrapper: ConnectionInfoWrapper): Unit = {
+    if (wrapper != null) {
+      wrapper match {
+        case eCTaskEntranceConnectionWrapper: ECTaskEntranceConnectionWrapper => {
+          if (eCTaskEntranceConnectionWrapper.getExecutor == null || eCTaskEntranceConnectionWrapper.getEngineConnTask == null) {
+            error("Failed to kill job, executor or engineConnTask in wrapper is null")
+          } else {
+            eCTaskEntranceConnectionWrapper.getExecutor.killTask(eCTaskEntranceConnectionWrapper.getEngineConnTask.getTaskId)
+            if (eCTaskEntranceConnectionWrapper.getEngineConnTask.getLables.exists(_.isInstanceOf[ExecuteOnceLabel])) {
+              warn("upstream monitor tries to shutdown engineConn because executeOnce-label was found")
+              ExecutorManager.getInstance.getReportExecutor.tryShutdown()
+            }
+          }
+        }
+        case _ => error("invalid data-type: " + wrapper.getClass.getCanonicalName)
+      }
+    } else {
+      error("wrapper is null")
+    }
+  }
+}
diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/utlis/ComputationErrorCode.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/handler/ECTaskKillHandlerRequest.scala
similarity index 69%
copy from linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/utlis/ComputationErrorCode.scala
copy to linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/handler/ECTaskKillHandlerRequest.scala
index 08f9ddc..abdd24f 100644
--- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/utlis/ComputationErrorCode.scala
+++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/handler/ECTaskKillHandlerRequest.scala
@@ -15,14 +15,12 @@
  * limitations under the License.
  */
 
-package org.apache.linkis.engineconn.computation.executor.utlis
+package org.apache.linkis.engineconn.computation.executor.upstream.handler
 
-object ComputationErrorCode {
-
-
-  val ASYNC_EXECUTOR_ERROR_CODE = 11301
-
-  val UDF_LOAD_ERROR_CODE = 11302
+import java.util
 
+import org.apache.linkis.engineconn.computation.executor.upstream.wrapper.ConnectionInfoWrapper
 
+class ECTaskKillHandlerRequest(wrappers: util.List[ConnectionInfoWrapper]) extends MonitorHandlerRequest {
+  def getData(): util.List[ConnectionInfoWrapper] = wrappers
 }
diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/utlis/ComputationErrorCode.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/handler/MonitorHandler.scala
similarity index 80%
copy from linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/utlis/ComputationErrorCode.scala
copy to linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/handler/MonitorHandler.scala
index 08f9ddc..06a2afe 100644
--- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/utlis/ComputationErrorCode.scala
+++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/handler/MonitorHandler.scala
@@ -15,14 +15,11 @@
  * limitations under the License.
  */
 
-package org.apache.linkis.engineconn.computation.executor.utlis
-
-object ComputationErrorCode {
-
-
-  val ASYNC_EXECUTOR_ERROR_CODE = 11301
-
-  val UDF_LOAD_ERROR_CODE = 11302
-
+package org.apache.linkis.engineconn.computation.executor.upstream.handler
 
+/**
+  * handle undesirable upstream states
+  */
+trait MonitorHandler {
+  def handle(request: MonitorHandlerRequest): Unit
 }
diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/utlis/ComputationErrorCode.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/handler/MonitorHandlerRequest.scala
similarity index 82%
copy from linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/utlis/ComputationErrorCode.scala
copy to linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/handler/MonitorHandlerRequest.scala
index 08f9ddc..482c4a8 100644
--- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/utlis/ComputationErrorCode.scala
+++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/handler/MonitorHandlerRequest.scala
@@ -15,14 +15,9 @@
  * limitations under the License.
  */
 
-package org.apache.linkis.engineconn.computation.executor.utlis
+package org.apache.linkis.engineconn.computation.executor.upstream.handler
 
-object ComputationErrorCode {
-
-
-  val ASYNC_EXECUTOR_ERROR_CODE = 11301
-
-  val UDF_LOAD_ERROR_CODE = 11302
 
+trait MonitorHandlerRequest {
 
 }
diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/utlis/ComputationErrorCode.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/listener/TaskStatusChangedForUpstreamMonitorListener.scala
similarity index 63%
copy from linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/utlis/ComputationErrorCode.scala
copy to linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/listener/TaskStatusChangedForUpstreamMonitorListener.scala
index 08f9ddc..19cec21 100644
--- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/utlis/ComputationErrorCode.scala
+++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/listener/TaskStatusChangedForUpstreamMonitorListener.scala
@@ -15,14 +15,11 @@
  * limitations under the License.
  */
 
-package org.apache.linkis.engineconn.computation.executor.utlis
-
-object ComputationErrorCode {
-
-
-  val ASYNC_EXECUTOR_ERROR_CODE = 11301
-
-  val UDF_LOAD_ERROR_CODE = 11302
+package org.apache.linkis.engineconn.computation.executor.upstream.listener
 
+import org.apache.linkis.engineconn.computation.executor.upstream.event.TaskStatusChangedForUpstreamMonitorEvent
+import org.apache.linkis.engineconn.executor.listener.EngineConnSyncListener
 
+trait TaskStatusChangedForUpstreamMonitorListener extends EngineConnSyncListener {
+  def onTaskStatusChanged(taskStatusChangedForUpstreamMonitorEvent: TaskStatusChangedForUpstreamMonitorEvent)
 }
diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/service/ECTaskEntranceMonitorService.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/service/ECTaskEntranceMonitorService.scala
new file mode 100644
index 0000000..c871574
--- /dev/null
+++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/service/ECTaskEntranceMonitorService.scala
@@ -0,0 +1,68 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.linkis.engineconn.computation.executor.upstream.service
+
+import javax.annotation.PostConstruct
+
+import org.apache.linkis.common.listener.Event
+import org.apache.linkis.common.utils.Logging
+import org.apache.linkis.engineconn.computation.executor.upstream.ECTaskEntranceMonitor
+import org.apache.linkis.engineconn.computation.executor.upstream.event.TaskStatusChangedForUpstreamMonitorEvent
+import org.apache.linkis.engineconn.computation.executor.upstream.listener.TaskStatusChangedForUpstreamMonitorListener
+import org.apache.linkis.engineconn.executor.listener.ExecutorListenerBusContext
+import org.apache.linkis.engineconn.executor.listener.event.EngineConnSyncEvent
+import org.apache.linkis.governance.common.entity.ExecutionNodeStatus
+import org.springframework.stereotype.Component
+
+@Component
+class ECTaskEntranceMonitorService extends TaskStatusChangedForUpstreamMonitorListener with Logging {
+  //add a layer
+  private val eCTaskEntranceMonitor = new ECTaskEntranceMonitor
+  private val syncListenerBus = ExecutorListenerBusContext.getExecutorListenerBusContext.getEngineConnSyncListenerBus
+  //TODO: configuration for start or not
+
+  @PostConstruct
+  def init(): Unit = {
+    syncListenerBus.addListener(this)
+    eCTaskEntranceMonitor.start
+    //TODO: shutdown
+  }
+
+  override def onEvent(event: EngineConnSyncEvent): Unit = event match {
+    case taskStatusChangedForUpstreamMonitorEvent: TaskStatusChangedForUpstreamMonitorEvent => onTaskStatusChanged(taskStatusChangedForUpstreamMonitorEvent)
+    case _ => info("ignored EngineConnSyncEvent " + event.getClass.getCanonicalName)
+  }
+
+  override def onTaskStatusChanged(event: TaskStatusChangedForUpstreamMonitorEvent): Unit = {
+    val fromStatus = event.fromStatus
+    val toStatus = event.toStatus
+    if ((fromStatus == ExecutionNodeStatus.Inited || fromStatus == ExecutionNodeStatus.Scheduled) &&
+      (toStatus == ExecutionNodeStatus.Running)) {
+      info("registering new task: " + event.taskId)
+      eCTaskEntranceMonitor.register(event.task, event.executor)
+    } else if (fromStatus == ExecutionNodeStatus.Running &&
+      (toStatus == ExecutionNodeStatus.Succeed || toStatus == ExecutionNodeStatus.Failed || toStatus == ExecutionNodeStatus.Cancelled || toStatus == ExecutionNodeStatus.Timeout)) {
+      info("unRegistering task: " + event.taskId)
+      eCTaskEntranceMonitor.unregister(event.task.getTaskId)
+    }
+  }
+
+  override def onEventError(event: Event, t: Throwable): Unit = {
+
+  }
+}
diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/utlis/ComputationErrorCode.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/wrapper/ConnectionInfoWrapper.scala
similarity index 67%
copy from linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/utlis/ComputationErrorCode.scala
copy to linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/wrapper/ConnectionInfoWrapper.scala
index 08f9ddc..e0241e8 100644
--- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/utlis/ComputationErrorCode.scala
+++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/wrapper/ConnectionInfoWrapper.scala
@@ -15,14 +15,19 @@
  * limitations under the License.
  */
 
-package org.apache.linkis.engineconn.computation.executor.utlis
+package org.apache.linkis.engineconn.computation.executor.upstream.wrapper
 
-object ComputationErrorCode {
+import org.apache.linkis.engineconn.computation.executor.upstream.entity.UpstreamConnection
 
+trait ConnectionInfoWrapper {
 
-  val ASYNC_EXECUTOR_ERROR_CODE = 11301
+  def getKey(): String
 
-  val UDF_LOAD_ERROR_CODE = 11302
+  def getLastUpdateTime(): Long
 
+  def updateConnectionInfo(newInfo: UpstreamConnection): Unit
 
+  def getUpstreamConnection(): UpstreamConnection
+
+  def shouldClear(): Boolean
 }
diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/wrapper/ECTaskEntranceConnectionWrapper.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/wrapper/ECTaskEntranceConnectionWrapper.scala
new file mode 100644
index 0000000..96adaa1
--- /dev/null
+++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/wrapper/ECTaskEntranceConnectionWrapper.scala
@@ -0,0 +1,64 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.linkis.engineconn.computation.executor.upstream.wrapper
+
+import org.apache.linkis.common.utils.Logging
+import org.apache.linkis.engineconn.computation.executor.conf.ComputationExecutorConf
+import org.apache.linkis.engineconn.computation.executor.entity.EngineConnTask
+import org.apache.linkis.engineconn.computation.executor.execute.ComputationExecutor
+import org.apache.linkis.engineconn.computation.executor.upstream.entity.{ECTaskEntranceConnection, UpstreamConnection}
+import org.apache.commons.lang3.StringUtils
+
+class ECTaskEntranceConnectionWrapper(taskID: String, engineConnTask: EngineConnTask, executor: ComputationExecutor) extends ConnectionInfoWrapper with Logging {
+  /* delete if time for any entry being in map exceeds threshold*/
+  private val wrapperEntriesSurviveThresholdSec = ComputationExecutorConf.UPSTREAM_MONITOR_WRAPPER_ENTRIES_SURVIVE_THRESHOLD_SEC
+  @volatile
+  private var connectionInfo: ECTaskEntranceConnection = _
+  @volatile
+  private var lastUpdateTime: Long = System.currentTimeMillis()
+
+  override def getKey(): String = taskID
+
+  def getEngineConnTask(): EngineConnTask = engineConnTask
+
+  def getExecutor(): ComputationExecutor = executor
+
+  override def updateConnectionInfo(newInfo: UpstreamConnection): Unit = newInfo match {
+    case newInfo2: ECTaskEntranceConnection => {
+      if (connectionInfo == null || StringUtils.isBlank(connectionInfo.getKey)) {
+        connectionInfo = newInfo2
+      } else if (!connectionInfo.isSameConnectionAs(newInfo2)) {
+        error("Failed to update connection-info: target connection-info is not same as current." +
+          "current: " + connectionInfo.getKey + ", " + connectionInfo.getUpstreamServiceInstanceName +
+          "target: " + newInfo2.getKey + ", " + newInfo2.getUpstreamServiceInstanceName)
+      } else if (newInfo2.getPrevUpdatedAliveTimestamp() != -1l) {
+        connectionInfo = newInfo2
+      }
+      lastUpdateTime = System.currentTimeMillis
+    }
+    case _ => error("wrong data-type for UpstreamConnection:" + newInfo.getClass.getCanonicalName)
+  }
+
+  override def getUpstreamConnection(): ECTaskEntranceConnection = connectionInfo
+
+  override def getLastUpdateTime(): Long = lastUpdateTime //createTime
+
+  override def shouldClear(): Boolean = {
+    System.currentTimeMillis - lastUpdateTime >= wrapperEntriesSurviveThresholdSec * 1000
+  }
+}
diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/utlis/ComputationErrorCode.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/utlis/ComputationErrorCode.scala
index 08f9ddc..0e38562 100644
--- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/utlis/ComputationErrorCode.scala
+++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/utlis/ComputationErrorCode.scala
@@ -24,5 +24,12 @@ object ComputationErrorCode {
 
   val UDF_LOAD_ERROR_CODE = 11302
 
+  val VARIABLE_NULL_ERROR_CODE = 21304 //TODO
+
+  val START_UPSTREAM_MONITOR_TWICE = 21304 //TODO
+
+  val INVALID_DATA_TYPE_ERROR_CODE = 21304 //TODO
+
+  val UPSTREAM_MONITOR_EXCEPTION = 21304 //TODO
 
 }
diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/test/java/org/apache/linkis/engineconn/computation/executor/upstream/access/ECTaskEntranceInfoAccessTest.java b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/test/java/org/apache/linkis/engineconn/computation/executor/upstream/access/ECTaskEntranceInfoAccessTest.java
new file mode 100644
index 0000000..1123a7e
--- /dev/null
+++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/test/java/org/apache/linkis/engineconn/computation/executor/upstream/access/ECTaskEntranceInfoAccessTest.java
@@ -0,0 +1,63 @@
+package org.apache.linkis.engineconn.computation.executor.upstream.access;
+
+import org.apache.linkis.DataWorkCloudApplication;
+import org.apache.linkis.common.ServiceInstance;
+import org.apache.linkis.common.conf.DWCArgumentsParser;
+import org.apache.linkis.engineconn.common.creation.DefaultEngineCreationContext;
+import org.apache.linkis.engineconn.common.creation.EngineCreationContext;
+import org.apache.linkis.governance.common.conf.GovernanceCommonConf;
+import org.apache.linkis.governance.common.utils.EngineConnArgumentsParser;
+import org.apache.linkis.rpc.Sender;
+import org.apache.linkis.server.conf.ServerConfiguration;
+import org.apache.linkis.server.utils.LinkisMainHelper;
+import org.apache.commons.lang3.StringUtils;
+import org.junit.Before;
+import org.junit.Test;
+import org.springframework.cloud.client.discovery.DiscoveryClient;
+
+import java.util.Arrays;
+import java.util.List;
+
+import static org.junit.Assert.*;
+
+/**
+ * Created by shangda on 2022/2/9.
+ */
+public class ECTaskEntranceInfoAccessTest {
+
+    @Before
+    public void before() {
+//        System.getProperties().setProperty("wds.linkis.server.conf", "linkis-et-jobhistory-scan.properties");
+        System.out.println("Spring is enabled, now try to start SpringBoot.");
+        System.out.println("<--------------------Start SpringBoot App-------------------->");
+        String existsExcludePackages = ServerConfiguration.BDP_SERVER_EXCLUDE_PACKAGES().getValue();
+        if (!StringUtils.isEmpty(existsExcludePackages)) {
+            DataWorkCloudApplication.setProperty(ServerConfiguration.BDP_SERVER_EXCLUDE_PACKAGES().key(), existsExcludePackages);
+        }
+
+        String[] args = new String[]{
+                "--spring-conf","eureka.client.serviceUrl.defaultZone=http://ip:port/eureka/",
+                "--spring-conf", "logging.config=classpath:log4j2.xml",
+                "--spring-conf", "spring.profiles.active=engineconn",
+                "--spring-conf", "server.port=28899",
+                "--spring-conf", "spring.application.name=linkis-cg-engineconn"};
+        // 加载spring类
+        try {
+//            ECTaskEntranceInfoAccessHelper.initApp(args);
+        } catch (Exception e) {
+            System.out.println(e.getStackTrace());
+        }
+
+        ServiceInstance[] instances = Sender.getInstances(GovernanceCommonConf.ENGINE_CONN_SPRING_NAME().getValue());
+
+        System.out.println("<--------------------SpringBoot App init succeed-------------------->");
+    }
+
+    @Test
+    public void main() throws Exception {
+
+
+//        LinkisJobHistoryScanApplication.main(new String[]{"2021122919", "2021122921"});
+    }
+
+}
\ No newline at end of file
diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/test/resources/linkis.properties b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/test/resources/linkis.properties
new file mode 100644
index 0000000..3e242fc
--- /dev/null
+++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/test/resources/linkis.properties
@@ -0,0 +1,9 @@
+wds.linkis.server.mybatis.mapperLocations=classpath*:com/webank/wedatasphere/linkis/jobhistory/scan/app/jobhistory/dao/impl/*.xml
+wds.linkis.server.mybatis.typeAliasesPackage=org.apache.linkis.jobhistory.scan.app.jobhistory.entity
+wds.linkis.server.mybatis.BasePackage=org.apache.linkis.jobhistory.scan.app.jobhistory.dao
+wds.linkis.mysql.is.encrypt=false
+wds.linkis.server.mybatis.datasource.url=
+wds.linkis.server.mybatis.datasource.username=
+wds.linkis.server.mybatis.datasource.password=
+
+wds.linkis.server.version=v1
\ No newline at end of file
diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/test/resources/log4j2.xml b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/test/resources/log4j2.xml
new file mode 100644
index 0000000..49eabc5
--- /dev/null
+++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/test/resources/log4j2.xml
@@ -0,0 +1,46 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  ~ Copyright 2019 WeBank
+  ~
+  ~ Licensed under the Apache License, Version 2.0 (the "License");
+  ~ you may not use this file except in compliance with the License.
+  ~ You may obtain a copy of the License at
+  ~
+  ~ http://www.apache.org/licenses/LICENSE-2.0
+  ~
+  ~ Unless required by applicable law or agreed to in writing, software
+  ~ distributed under the License is distributed on an "AS IS" BASIS,
+  ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  ~ See the License for the specific language governing permissions and
+  ~ limitations under the License.
+  -->
+
+<configuration status="error" monitorInterval="30">
+    <appenders>
+        <console name="RollingFile" target="SYSTEM_OUT">
+        </console>
+        <console name="Console-Plain" target="SYSTEM_OUT">
+            <PatternLayout pattern="%m%n"/>
+        </console>
+    </appenders>
+    <loggers>
+        <root level="INFO">
+            <appender-ref ref="RollingFile"/>
+        </root>
+        <logger name="org.apache.linkis.message.builder.DefaultMessageJob" level="warn" additivity="true">
+            <appender-ref ref="RollingFile"/>
+        </logger>
+
+        <logger name="org.apache.linkis.message.scheduler.DefaultMessageExecutor" level="warn"
+                additivity="true">
+            <appender-ref ref="RollingFile"/>
+        </logger>
+        <logger name="com.netflix.loadbalancer.DynamicServerListLoadBalancer" level="warn" additivity="true">
+            <appender-ref ref="RollingFile"/>
+        </logger>
+        <logger name="PlaintTextConsoleLogger" level="INFO" additivity="false">
+            <appender-ref ref="Console-Plain"/>
+        </logger>
+    </loggers>
+</configuration>
+
diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/test/scala/org/apache/linkis/engineconn/computation/executor/upstream/access/ECTaskEntranceInfoAccessHelper.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/test/scala/org/apache/linkis/engineconn/computation/executor/upstream/access/ECTaskEntranceInfoAccessHelper.scala
new file mode 100644
index 0000000..a7fcdfe
--- /dev/null
+++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/test/scala/org/apache/linkis/engineconn/computation/executor/upstream/access/ECTaskEntranceInfoAccessHelper.scala
@@ -0,0 +1,63 @@
+package org.apache.linkis.engineconn.computation.executor.upstream.access
+
+import org.apache.commons.lang.StringUtils
+import org.apache.linkis.DataWorkCloudApplication
+import org.apache.linkis.common.ServiceInstance
+import org.apache.linkis.common.conf.{CommonVars, DWCArgumentsParser}
+import org.apache.linkis.common.utils.Utils
+import org.apache.linkis.engineconn.common.creation.DefaultEngineCreationContext
+import org.apache.linkis.engineconn.core.util.EngineConnUtils
+import org.apache.linkis.engineconn.launch.EngineConnServer.info
+import org.apache.linkis.governance.common.conf.GovernanceCommonConf
+import org.apache.linkis.governance.common.utils.EngineConnArgumentsParser
+import org.apache.linkis.manager.engineplugin.common.launch.process.Environment
+import org.apache.linkis.manager.label.builder.factory.{LabelBuilderFactory, LabelBuilderFactoryContext}
+import org.apache.linkis.manager.label.entity.Label
+import org.apache.linkis.server.conf.ServerConfiguration
+
+import scala.collection.mutable.ArrayBuffer
+
+
+object ECTaskEntranceInfoAccessHelper {
+  val engineCreationContext = new DefaultEngineCreationContext
+  val labelBuilderFactory: LabelBuilderFactory = LabelBuilderFactoryContext.getLabelBuilderFactory
+
+  def initApp(args: Array[String]): Unit = {
+    val arguments = EngineConnArgumentsParser.getEngineConnArgumentsParser.parseToObj(args)
+    val engineConf = arguments.getEngineConnConfMap
+    engineCreationContext.setUser(engineConf.getOrElse("user", Utils.getJvmUser))
+    engineCreationContext.setTicketId(engineConf.getOrElse("ticketId", ""))
+    val host = CommonVars(Environment.ECM_HOST.toString, "127.0.0.1").getValue
+    val port = CommonVars(Environment.ECM_PORT.toString, "80").getValue
+    engineCreationContext.setEMInstance(ServiceInstance(GovernanceCommonConf.ENGINE_CONN_MANAGER_SPRING_NAME.getValue, s"$host:$port"))
+    val labels = new ArrayBuffer[Label[_]]
+    val labelArgs = engineConf.filter(_._1.startsWith(EngineConnArgumentsParser.LABEL_PREFIX))
+    if (labelArgs.nonEmpty) {
+      labelArgs.foreach { case (key, value) =>
+        labels += labelBuilderFactory.createLabel[Label[_]](key.replace(EngineConnArgumentsParser.LABEL_PREFIX, ""), value)
+      }
+      engineCreationContext.setLabels(labels.toList)
+    }
+    val jMap = new java.util.HashMap[String, String](engineConf.size)
+    jMap.putAll(engineConf)
+    engineCreationContext.setOptions(jMap)
+    engineCreationContext.setArgs(args)
+    //    EngineConnObject.setEngineCreationContext(engineCreationContext)
+    info("Finished to init engineCreationContext: " + EngineConnUtils.GSON.toJson(engineCreationContext))
+
+    info("Spring is enabled, now try to start SpringBoot.")
+    info("<--------------------Start SpringBoot App-------------------->")
+    val parser = DWCArgumentsParser.parse(engineCreationContext.getArgs)
+    DWCArgumentsParser.setDWCOptionMap(parser.getDWCConfMap)
+    val existsExcludePackages = ServerConfiguration.BDP_SERVER_EXCLUDE_PACKAGES.getValue
+    if (!StringUtils.isEmpty(existsExcludePackages)) {
+      DataWorkCloudApplication.setProperty(ServerConfiguration.BDP_SERVER_EXCLUDE_PACKAGES.key, existsExcludePackages)
+    }
+    // 加载spring类
+    DataWorkCloudApplication.main(DWCArgumentsParser.formatSpringOptions(parser.getSpringConfMap))
+
+    info("<--------------------SpringBoot App init succeed-------------------->")
+  }
+
+
+}

---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@linkis.apache.org
For additional commands, e-mail: commits-help@linkis.apache.org


[incubator-linkis] 12/18: 1. linkis-computation-engineconn - fix dependency for junit

Posted by pe...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

peacewong pushed a commit to branch dev-1.1.1
in repository https://gitbox.apache.org/repos/asf/incubator-linkis.git

commit 1cb0fb6178a1dd898c7853b936f3ae4bbdc2bc0d
Author: alexkun <xu...@qq.com>
AuthorDate: Fri Mar 4 20:10:33 2022 +0800

    1. linkis-computation-engineconn - fix dependency for junit
---
 .../linkis-engineconn/linkis-computation-engineconn/pom.xml        | 7 +++++++
 pom.xml                                                            | 1 +
 2 files changed, 8 insertions(+)

diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/pom.xml b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/pom.xml
index 921ea3f..9291aae 100644
--- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/pom.xml
+++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/pom.xml
@@ -100,6 +100,13 @@
         </dependency>-->
 
 
+        <dependency>
+            <groupId>junit</groupId>
+            <artifactId>junit</artifactId>
+            <scope>test</scope>
+            <version>${junit.version}</version>
+        </dependency>
+
     </dependencies>
 
     <build>
diff --git a/pom.xml b/pom.xml
index c481195..e5bc1d2 100644
--- a/pom.xml
+++ b/pom.xml
@@ -144,6 +144,7 @@
         <maven-surefire-plugin.version>2.22.1</maven-surefire-plugin.version>
         <jacoco.version>0.8.7</jacoco.version>
         <jacoco.skip>false</jacoco.skip>
+        <junit.version>4.12</junit.version>
     </properties>
 
     <dependencyManagement>

---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@linkis.apache.org
For additional commands, e-mail: commits-help@linkis.apache.org


[incubator-linkis] 04/18: 1. linkis-computation-client - add some method of datasource module in sdk api

Posted by pe...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

peacewong pushed a commit to branch dev-1.1.1
in repository https://gitbox.apache.org/repos/asf/incubator-linkis.git

commit 7fb89c34cd182f5521b7ddc4aa9f097f5beaf57e
Author: alexkun <xu...@qq.com>
AuthorDate: Fri Mar 4 17:38:50 2022 +0800

    1. linkis-computation-client - add some method of datasource module in sdk api
---
 .../client/once/LinkisManagerClient.scala          |  2 +-
 .../org/apache/linkis/ujes/client/UJESClient.scala |  8 +++
 .../request/GetPartitionStatisticInfoAction.scala  | 74 ++++++++++++++++++++++
 .../client/request/GetTableBaseInfoAction.scala    | 67 ++++++++++++++++++++
 .../response/GetPartitionStatisticInfoResult.scala | 34 ++++++++++
 .../client/response/GetTableBaseInfoResult.scala   | 30 +++++++++
 6 files changed, 214 insertions(+), 1 deletion(-)

diff --git a/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/computation/client/once/LinkisManagerClient.scala b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/computation/client/once/LinkisManagerClient.scala
index 48de998..ebb6b34 100644
--- a/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/computation/client/once/LinkisManagerClient.scala
+++ b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/computation/client/once/LinkisManagerClient.scala
@@ -62,7 +62,7 @@ class LinkisManagerClientImpl(ujesClient: UJESClient) extends LinkisManagerClien
 
   override def killEngineConn(killEngineConnAction: KillEngineConnAction): KillEngineConnResult = execute(killEngineConnAction)
 
-  override def executeEngineConnOperation(engineOperateAction: EngineConnOperateAction): EngineConnOperateResult = execute(engineOperateAction)
+  override def executeEngineConnOperation(engineConnOperateAction: EngineConnOperateAction): EngineConnOperateResult = execute(engineConnOperateAction)
 
   override def close(): Unit = ujesClient.close()
 }
\ No newline at end of file
diff --git a/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/UJESClient.scala b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/UJESClient.scala
index 1dcb364..f3b33c8 100644
--- a/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/UJESClient.scala
+++ b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/UJESClient.scala
@@ -95,6 +95,14 @@ abstract class UJESClient extends Closeable {
     executeUJESJob(getTableStatisticInfoAction).asInstanceOf[GetTableStatisticInfoResult]
   }
 
+  def getTableBaseInfo(getTableBaseInfoAction: GetTableBaseInfoAction): GetTableBaseInfoResult = {
+    executeUJESJob(getTableBaseInfoAction).asInstanceOf[GetTableBaseInfoResult]
+  }
+
+  def getPartitionStatisticInfo(getPartitionStatisticInfoAction: GetPartitionStatisticInfoAction): GetPartitionStatisticInfoResult = {
+    executeUJESJob(getPartitionStatisticInfoAction).asInstanceOf[GetPartitionStatisticInfoResult]
+  }
+
 }
 object UJESClient {
   def apply(clientConfig: DWSClientConfig): UJESClient = new UJESClientImpl(clientConfig)
diff --git a/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/request/GetPartitionStatisticInfoAction.scala b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/request/GetPartitionStatisticInfoAction.scala
new file mode 100644
index 0000000..69de6b7
--- /dev/null
+++ b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/request/GetPartitionStatisticInfoAction.scala
@@ -0,0 +1,74 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.linkis.ujes.client.request
+
+import org.apache.linkis.httpclient.request.GetAction
+import org.apache.linkis.ujes.client.exception.UJESClientBuilderException
+import org.apache.commons.lang.StringUtils
+
+
+class GetPartitionStatisticInfoAction extends GetAction with UJESJobAction {
+  override def suffixURLs: Array[String] = Array("datasource",  "getPartitionStatisticInfo")
+}
+
+object GetPartitionStatisticInfoAction {
+  def builder(): Builder = new Builder
+  class Builder private[GetPartitionStatisticInfoAction]() {
+    private var user: String = _
+    private var database: String = _
+    private var tableName: String = _
+    private var partitionPath: String = _
+
+    def setUser(user: String): Builder = {
+      this.user = user
+      this
+    }
+
+    def getUser(): String = user
+
+    def setDatabase(db: String): Builder = {
+      this.database = db
+      this
+    }
+
+    def setTable(table: String): Builder = {
+      this.tableName = table
+      this
+    }
+
+    def setPartitionPath(partitionPath: String): Builder = {
+      this.partitionPath = partitionPath
+      this
+    }
+
+    def builder(): GetPartitionStatisticInfoAction = {
+      if (StringUtils.isBlank(user)) throw new UJESClientBuilderException("user is needed!")
+      if (StringUtils.isBlank(database)) throw new UJESClientBuilderException("database is needed!")
+      if (StringUtils.isBlank(tableName)) throw new UJESClientBuilderException("table is needed!")
+      if (StringUtils.isBlank(partitionPath)) throw new UJESClientBuilderException("partitionPath is needed!")
+      val getPartitionStatisticInfoAction = new GetPartitionStatisticInfoAction
+      getPartitionStatisticInfoAction.setUser(user)
+      getPartitionStatisticInfoAction.setParameter("database", database)
+      getPartitionStatisticInfoAction.setParameter("tableName", tableName)
+      getPartitionStatisticInfoAction.setParameter("partitionPath", partitionPath)
+      getPartitionStatisticInfoAction
+    }
+
+  }
+
+}
diff --git a/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/request/GetTableBaseInfoAction.scala b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/request/GetTableBaseInfoAction.scala
new file mode 100644
index 0000000..8b0094e
--- /dev/null
+++ b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/request/GetTableBaseInfoAction.scala
@@ -0,0 +1,67 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.linkis.ujes.client.request
+
+import org.apache.linkis.httpclient.request.GetAction
+import org.apache.linkis.ujes.client.exception.UJESClientBuilderException
+
+class GetTableBaseInfoAction extends GetAction with UJESJobAction {
+  override def suffixURLs: Array[String] = Array("datasource", "getTableBaseInfo")
+}
+
+
+object GetTableBaseInfoAction {
+  def builder(): Builder = new Builder
+  class Builder private[GetTableBaseInfoAction]() {
+
+    private var user: String = _
+
+    private var database: String = _
+
+    private var tablename: String = _
+
+    def setUser(user: String): Builder = {
+      this.user = user
+      this
+    }
+
+    def setDatabase(database: String): Builder = {
+      this.database = database
+      this
+    }
+
+    def setTablename(tabglename: String): Builder = {
+      this.tablename = tablename
+      this
+    }
+
+    def build(): GetTableBaseInfoAction = {
+      if (user == null) throw new UJESClientBuilderException("user is needed!")
+      if (database == null) throw new UJESClientBuilderException("database is needed!")
+      if (tablename == null) throw new UJESClientBuilderException("tablename is needed!")
+      val getTableBaseInfoAction = new GetTableBaseInfoAction
+      getTableBaseInfoAction.setUser(user)
+      getTableBaseInfoAction.setParameter("database", database)
+      getTableBaseInfoAction.setParameter("tableName", tablename)
+      getTableBaseInfoAction
+    }
+
+  }
+}
+
+
diff --git a/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/response/GetPartitionStatisticInfoResult.scala b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/response/GetPartitionStatisticInfoResult.scala
new file mode 100644
index 0000000..d53fdb0
--- /dev/null
+++ b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/response/GetPartitionStatisticInfoResult.scala
@@ -0,0 +1,34 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.linkis.ujes.client.request
+
+import org.apache.linkis.httpclient.dws.annotation.DWSHttpMessageResult
+import org.apache.linkis.httpclient.dws.response.DWSResult
+import org.apache.linkis.ujes.client.request.UserAction
+
+import java.util
+import scala.beans.BeanProperty
+
+@DWSHttpMessageResult("/api/rest_j/v\\d+/datasource/getPartitionStatisticInfo")
+class GetPartitionStatisticInfoResult extends DWSResult with UserAction {
+
+  @BeanProperty
+  var partitionStatisticInfo: util.Map[String, Any] = _
+
+}
+
diff --git a/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/response/GetTableBaseInfoResult.scala b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/response/GetTableBaseInfoResult.scala
new file mode 100644
index 0000000..d4f8672
--- /dev/null
+++ b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/response/GetTableBaseInfoResult.scala
@@ -0,0 +1,30 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.linkis.ujes.client.request
+
+import org.apache.linkis.httpclient.dws.annotation.DWSHttpMessageResult
+import org.apache.linkis.httpclient.dws.response.DWSResult
+import org.apache.linkis.ujes.client.request.UserAction
+
+import java.util
+import scala.beans.BeanProperty
+
+@DWSHttpMessageResult("/api/rest_j/v\\d+/datasource/getTableBaseInfo")
+class GetTableBaseInfoResult extends DWSResult with UserAction {
+  @BeanProperty var tableBaseInfo: util.Map[String, Object] = _
+}

---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@linkis.apache.org
For additional commands, e-mail: commits-help@linkis.apache.org


[incubator-linkis] 09/18: 1. linkis-engineplugin-spark - fix kill task bug

Posted by pe...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

peacewong pushed a commit to branch dev-1.1.1
in repository https://gitbox.apache.org/repos/asf/incubator-linkis.git

commit f42346acfd0505e9fa1f43e0d39d234c9ab47ab1
Author: alexkun <xu...@qq.com>
AuthorDate: Fri Mar 4 19:52:58 2022 +0800

    1. linkis-engineplugin-spark - fix kill task bug
---
 .../engineplugin/spark/executor/SQLSession.scala   | 32 ++++++++++++++++------
 .../spark/executor/SparkEngineConnExecutor.scala   |  8 ++++--
 .../execution/datasources/csv/DolphinToSpark.scala | 12 ++++----
 3 files changed, 36 insertions(+), 16 deletions(-)

diff --git a/linkis-engineconn-plugins/engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/executor/SQLSession.scala b/linkis-engineconn-plugins/engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/executor/SQLSession.scala
index 9d03e14..498c2c4 100644
--- a/linkis-engineconn-plugins/engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/executor/SQLSession.scala
+++ b/linkis-engineconn-plugins/engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/executor/SQLSession.scala
@@ -97,15 +97,7 @@ object SQLSession extends Logging {
     Utils.tryThrow({
       while (index < maxResult && iterator.hasNext) {
         val row = iterator.next()
-        val r: Array[Any] = columns.indices.map { i =>
-          val data = row(i) match {
-            case value: String => value.replaceAll("\n|\t", " ")
-            case value: Double => nf.format(value)
-            case value: Any => value.toString
-            case _ => null
-          }
-          data
-        }.toArray
+        val r: Array[Any] = columns.indices.map{ i => toHiveString(row(i))}.toArray
         writer.addRecord(new TableRecord(r))
         index += 1
       }
@@ -119,6 +111,28 @@ object SQLSession extends Logging {
     engineExecutionContext.sendResultSet(writer)
   }
 
+
+  private def toHiveString(value: Any): String = {
+
+    value match {
+      case value: String => value.replaceAll("\n|\t", " ")
+      case value: Double => nf.format(value)
+      case value: java.math.BigDecimal => formatDecimal(value)
+      case value: Any => value.toString
+      case _ => null
+    }
+
+  }
+
+  private def formatDecimal(d: java.math.BigDecimal): String = {
+    if (null == d || d.compareTo(java.math.BigDecimal.ZERO) == 0) {
+      java.math.BigDecimal.ZERO.toPlainString
+    } else {
+      d.stripTrailingZeros().toPlainString
+    }
+  }
+
+
   def showHTML(sc: SparkContext, jobGroup: String, htmlContent: Any, engineExecutionContext: EngineExecutionContext): Unit = {
     val startTime = System.currentTimeMillis()
     val writer = engineExecutionContext.createResultSetWriter(ResultSetFactory.HTML_TYPE)
diff --git a/linkis-engineconn-plugins/engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/executor/SparkEngineConnExecutor.scala b/linkis-engineconn-plugins/engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/executor/SparkEngineConnExecutor.scala
index bdc7044..7ec3f0d 100644
--- a/linkis-engineconn-plugins/engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/executor/SparkEngineConnExecutor.scala
+++ b/linkis-engineconn-plugins/engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/executor/SparkEngineConnExecutor.scala
@@ -55,16 +55,17 @@ abstract class SparkEngineConnExecutor(val sc: SparkContext, id: Long) extends C
 
   private var executorLabels: util.List[Label[_]] = new util.ArrayList[Label[_]]()
 
+  private var thread: Thread = _
+
   override def init(): Unit = {
     info(s"Ready to change engine state!")
 //    setCodeParser()  // todo check
     super.init()
   }
 
-
-
   override def executeLine(engineExecutorContext: EngineExecutionContext, code: String): ExecuteResponse = Utils.tryFinally {
     this.engineExecutionContext = engineExecutorContext
+    thread = Thread.currentThread()
     if (sc.isStopped) {
       error("Spark application has already stopped, please restart it.")
       transition(NodeStatus.Failed)
@@ -208,6 +209,9 @@ abstract class SparkEngineConnExecutor(val sc: SparkContext, id: Long) extends C
   override def killTask(taskID: String): Unit = {
     if (!sc.isStopped) {
       sc.cancelAllJobs
+      if (null != thread) {
+        Utils.tryAndWarn(thread.interrupt())
+      }
       killRunningTask()
     }
     super.killTask(taskID)
diff --git a/linkis-engineconn-plugins/engineconn-plugins/spark/src/main/scala/org/apache/spark/sql/execution/datasources/csv/DolphinToSpark.scala b/linkis-engineconn-plugins/engineconn-plugins/spark/src/main/scala/org/apache/spark/sql/execution/datasources/csv/DolphinToSpark.scala
index c75b6f7..d8761bf 100644
--- a/linkis-engineconn-plugins/engineconn-plugins/spark/src/main/scala/org/apache/spark/sql/execution/datasources/csv/DolphinToSpark.scala
+++ b/linkis-engineconn-plugins/engineconn-plugins/spark/src/main/scala/org/apache/spark/sql/execution/datasources/csv/DolphinToSpark.scala
@@ -32,8 +32,8 @@ import org.apache.spark.sql.{DataFrame, Row, SparkSession}
   */
 object DolphinToSpark {
 
-  private val bigDecimalPrecision = 20
-  private val bigDecimalScale = 10
+  private val bigDecimalPrecision = CommonVars("wds.linkis.dolphin.decimal.precision", 32).getValue
+  private val bigDecimalScale = CommonVars("wds.linkis.dolphin.decimal.scale", 10).getValue
 
   def createTempView(spark: SparkSession, tableName: String, res: String): Unit = {
     createTempView(spark, tableName, res, false)
@@ -60,15 +60,17 @@ object DolphinToSpark {
 
   def toSparkType(dataType: wds.DataType): DataType = dataType match {
     case wds.NullType => NullType
-    case wds.BooleanType =>  BooleanType
+    //case wds.StringType | wds.CharType | wds.VarcharType | wds.StructType | wds.ListType | wds.ArrayType | wds.MapType => StringType
+    case wds.BooleanType => BooleanType
     case wds.ShortIntType => ShortType
     case wds.IntType => IntegerType
     case wds.LongType => LongType
     case wds.BigIntType => LongType
     case wds.FloatType => FloatType
-    case wds.DoubleType  => DoubleType
-    case wds.DecimalType => DecimalType(bigDecimalPrecision,bigDecimalScale)
+    case wds.DoubleType => DoubleType
+    case wds.DecimalType => DecimalType(bigDecimalPrecision, bigDecimalScale)
     case wds.DateType => DateType
+    //case wds.TimestampType => TimestampType
     case wds.BinaryType => BinaryType
     case _ => StringType
   }

---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@linkis.apache.org
For additional commands, e-mail: commits-help@linkis.apache.org


[incubator-linkis] 16/18: Support to remove the limit #1573

Posted by pe...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

peacewong pushed a commit to branch dev-1.1.1
in repository https://gitbox.apache.org/repos/asf/incubator-linkis.git

commit a9a9767cc1c4d55644ca970522b91ede8103404f
Author: peacewong <wp...@gmail.com>
AuthorDate: Sat Mar 5 16:06:13 2022 +0800

    Support to remove the limit #1573
---
 .../entrance/interceptor/impl/SQLLimitEntranceInterceptor.scala      | 5 +++--
 1 file changed, 3 insertions(+), 2 deletions(-)

diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/SQLLimitEntranceInterceptor.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/SQLLimitEntranceInterceptor.scala
index 7a53da1..d7e7612 100644
--- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/SQLLimitEntranceInterceptor.scala
+++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/SQLLimitEntranceInterceptor.scala
@@ -17,6 +17,7 @@
  
 package org.apache.linkis.entrance.interceptor.impl
 
+import org.apache.linkis.common.log.LogUtils
 import org.apache.linkis.entrance.conf.EntranceConfiguration
 import org.apache.linkis.entrance.interceptor.EntranceInterceptor
 import org.apache.linkis.governance.common.entity.job.JobRequest
@@ -27,11 +28,11 @@ class SQLLimitEntranceInterceptor extends EntranceInterceptor {
   private val  LIMIT_CREATORS = EntranceConfiguration.SQL_LIMIT_CREATOR.getValue
 
   override def apply(task: JobRequest, logAppender: java.lang.StringBuilder): JobRequest = {
-    /*val (user, creator) = LabelUtil.getUserCreator(task.getLabels)
+    val (user, creator) = LabelUtil.getUserCreator(task.getLabels)
     if (! LIMIT_CREATORS.contains(creator)) {
       logAppender.append(LogUtils.generateWarn(s"The code you submit will not be limited by the limit \n") )
       return task
-    }*/
+    }
     val codeType = {
       val codeType = LabelUtil.getCodeType(task.getLabels)
       if (null != codeType) {

---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@linkis.apache.org
For additional commands, e-mail: commits-help@linkis.apache.org


[incubator-linkis] 18/18: optimize license

Posted by pe...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

peacewong pushed a commit to branch dev-1.1.1
in repository https://gitbox.apache.org/repos/asf/incubator-linkis.git

commit e311d0de3c61230ada6c35bc3382168df8ca677f
Author: peacewong <wp...@gmail.com>
AuthorDate: Sat Mar 5 16:22:12 2022 +0800

    optimize license
---
 .../assembly-combined/bin/linkis-cli               |  84 ++++++++++-------
 .../assembly-combined/bin/linkis-cli-pre           |  13 ++-
 .../conf/linkis-cg-engineconnmanager.properties    |  12 +--
 .../conf/linkis-cg-engineplugin.properties         |   2 -
 .../conf/linkis-cg-entrance.properties             |  16 +---
 .../conf/linkis-cg-linkismanager.properties        |   7 +-
 .../conf/linkis-mg-gateway.properties              |   6 +-
 .../assembly-combined/conf/linkis-ps-cs.properties |   1 -
 .../conf/linkis-ps-data-source-manager.properties  |   6 +-
 .../conf/linkis-ps-publicservice.properties        |  17 ----
 .../assembly-combined/conf/linkis.properties       |  24 ++---
 .../assembly-combined/conf/log4j2-console.xml      |  13 +--
 .../assembly-combined/conf/log4j2.xml              |   6 --
 .../assembly-combined/conf/token.properties        |   4 +-
 .../src/main/assembly/distribution.xml             |   1 -
 .../assembly-combined/sbin/common.sh               |   3 +-
 .../assembly-combined/sbin/ext/linkis-common-start |   4 +-
 .../assembly-combined/sbin/ext/linkis-mg-eureka    |  12 ++-
 .../assembly-combined/sbin/ext/linkis-mg-gateway   |   6 +-
 .../sbin/ext/linkis-ps-metadatamanager             |   9 +-
 .../assembly-combined/sbin/linkis-daemon.sh        |   2 +-
 .../assembly-combined/sbin/linkis-start-all.sh     |   2 +-
 .../assembly-combined/sbin/linkis-stop-all.sh      |  33 +------
 .../src/main/assembly/assembly.xml                 |  65 ++-----------
 assembly-combined-package/bin/checkEnv.sh          |   1 -
 assembly-combined-package/bin/install.sh           | 101 +++++----------------
 .../deploy-config/linkis-env.sh                    |   6 +-
 .../linkis-computation-engineconn/pom.xml          |  13 ---
 .../access/ECTaskEntranceInfoAccessTest.java       |  73 ---------------
 .../src/test/resources/linkis.properties           |  23 -----
 .../src/test/resources/log4j2.xml                  |  46 ----------
 .../conf/AccessibleExecutorConfiguration.scala     |   2 +-
 .../linkis/entrance/job/EntranceExecutionJob.java  |  34 +++----
 33 files changed, 150 insertions(+), 497 deletions(-)

diff --git a/assembly-combined-package/assembly-combined/bin/linkis-cli b/assembly-combined-package/assembly-combined/bin/linkis-cli
index f6fce03..2c15cf6 100644
--- a/assembly-combined-package/assembly-combined/bin/linkis-cli
+++ b/assembly-combined-package/assembly-combined/bin/linkis-cli
@@ -13,15 +13,26 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 #
+
+#set -x
 export LANG=en_US.utf-8
 #set -x
 
 LINKIS_CLIENT='org.apache.linkis.cli.application.LinkisClientApplication'
 
+
+## color
+RED='\033[0;31m'
+NC='\033[0m' # No Color
+GREEN='\033[0;32m'
+#used as: echo -e "Apache ${RED}Linkis ${NC} Test \n"
+
+
 i=0
 for arg in "$@"
 do
-        ARG[i]=${arg}
+        args[i]=${arg}
+        input_args[i]=${arg}
         ((i++))
 done
 
@@ -30,26 +41,31 @@ done
 # find java_home
 #===============================================
 locate_java_home() {
-  local JAVA8_HOME_CANDIDATES='\
-    /usr/java/jdk1.8* \
-    /nemo/jdk1.8*'
-
-  JAVA_HOME_CANDIDATES="$JAVA8_HOME_CANDIDATES"
-
-  # attempt to find java 8
   flag=""
-  for candidate_regex in $JAVA_HOME_CANDIDATES ; do
-      for candidate in `ls -rd $candidate_regex 2>/dev/null`; do
-        if [ -e $candidate/bin/java ]; then
-          export JAVA_HOME=$candidate
-          flag="true"
-          break 2
-        fi
-      done
-  done
+  if [[ -e "$JAVA_HOME" && -e $JAVA_HOME/bin/java ]]; then
+    flag="true"
+  else
+    local JAVA8_HOME_CANDIDATES='\
+      /usr/java/jdk1.8* \
+      /nemo/jdk1.8*'
+
+    JAVA_HOME_CANDIDATES="$JAVA8_HOME_CANDIDATES"
+
+    # attempt to find java 8
+
+    for candidate_regex in $JAVA_HOME_CANDIDATES ; do
+        for candidate in `ls -rd $candidate_regex 2>/dev/null`; do
+          if [ -e $candidate/bin/java ]; then
+            export JAVA_HOME=$candidate
+            flag="true"
+            break 2
+          fi
+        done
+    done
+  fi
 
   if [ -z "$flag" ]; then
-    echo -e "\033[0;31;40mNo JDK 8 found. linkis-client requires Java 1.8\033[0m" 1>&2
+    echo -e "${RED}No JDK 8 found. linkis-client requires Java 1.8${NC}" 1>&2
     exit 1
   fi
 
@@ -71,19 +87,19 @@ EOF
 
 
 function call_linkis_client() {
-
-        LINKIS_DEPLOY_SUB_DIR='/linkis-computation-governance/linkis-client/linkis-cli/'
-        LINKIS_CLIENT_LOG_DIR="/appcom/logs/linkis-cli"
-        if [ ! -d $LINKIS_CLIENT_LOG_DIR ];then
-            LINKIS_CLIENT_LOG_DIR="${WORK_DIR}/logs"
-        fi
-        LINKIS_CLIENT_CONF_DIR="/appcom/config/linkisCli-config"
-        if [ ! -d $LINKIS_CLIENT_CONF_DIR ];then
-            LINKIS_CLIENT_CONF_DIR="${WORK_DIR}/conf/${LINKIS_DEPLOY_SUB_DIR}"
-        fi
-        LINKIS_CLIENT_CONF_FILES=${LINKIS_CLIENT_CONF_FILES:-"linkis-cli.properties"}
-
-        LINKIS_CLIENT_CLASSPATH="${LINKIS_CLIENT_CONF_DIR}:${WORK_DIR}/lib/${LINKIS_DEPLOY_SUB_DIR}*"
+        current_dir=`pwd`
+        workdir=`dirname "$0"`/../
+        workdir=`cd ${workdir};pwd`
+        cd ${current_dir}
+
+        LINKIS_DEPLOY_LIB_DIR='lib/linkis-computation-governance/linkis-client/linkis-cli/'
+        LINKIS_COMMON_LIB_DIR='lib/linkis-commons/public-module/'
+        LINKIS_DEPLOY_CONF_DIR='conf/linkis-cli'
+        LINKIS_DEPLOY_LOG_DIR='logs/linkis-cli'
+        LINKIS_CLIENT_CLASSPATH=${workdir}/${LINKIS_DEPLOY_CONF_DIR}:${workdir}/${LINKIS_DEPLOY_LIB_DIR}*:${workdir}/${LINKIS_COMMON_LIB_DIR}*:${CLASSPATH}
+        LINKIS_CLIENT_LOG_DIR=${LINKIS_CLIENT_LOG_DIR:-"${workdir}/${LINKIS_DEPLOY_LOG_DIR}"}
+        LINKIS_CLIENT_CONF_DIR=${LINKIS_CLIENT_CONF_DIR:-"${workdir}/${LINKIS_DEPLOY_CONF_DIR}"}
+        LINKIS_CLIENT_CONF_FILE=${LINKIS_CLIENT_CONF_FILE:-"linkis-cli.properties"}
 
         LINKIS_CLIENT_HEAP_OPTS="-server -Xms32m -Xmx2048m -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=${LINKIS_CLIENT_LOG_DIR} -XX:ErrorFile=${LINKIS_CLIENT_LOG_DIR}/ps_err_pid%p.log"
         LINKIS_CLIENT_GC_OPTS="-XX:+UseConcMarkSweepGC -XX:CMSInitiatingOccupancyFraction=80 -XX:+DisableExplicitGC"
@@ -91,7 +107,11 @@ function call_linkis_client() {
         #DEBUG_OPTS="-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=5005"
         LINKIS_CLIENT_OPTS=${LINKIS_CLIENT_OPTS:-" ${DEBUG_OPTS} "}
 
-        exec ${JAVA} ${LINKIS_CLIENT_HEAP_OPTS} ${LINKIS_CLIENT_GC_OPTS} ${LINKIS_CLIENT_OPTS} -classpath ${LINKIS_CLIENT_CLASSPATH} -Dconf.root=${LINKIS_CLIENT_CONF_DIR} ${LINKIS_CLIENT_LOG_OPTS}  ${LINKIS_CLIENT} "${ARG[@]}"
+
+        echo  "=====Java Start Command====="
+        echo "exec ${JAVA} ${LINKIS_CLIENT_HEAP_OPTS} ${LINKIS_CLIENT_GC_OPTS} ${LINKIS_CLIENT_OPTS} -classpath ${LINKIS_CLIENT_CLASSPATH} -Dconf.root=${LINKIS_CLIENT_CONF_DIR} -Dconf.file=${LINKIS_CLIENT_CONF_FILE} ${LINKIS_CLIENT_LOG_OPTS}  ${LINKIS_CLIENT} '${input_args[@]}'"
+
+        exec ${JAVA} ${LINKIS_CLIENT_HEAP_OPTS} ${LINKIS_CLIENT_GC_OPTS} ${LINKIS_CLIENT_OPTS} -classpath ${LINKIS_CLIENT_CLASSPATH} -Dconf.root=${LINKIS_CLIENT_CONF_DIR} -Dconf.file=${LINKIS_CLIENT_CONF_FILE} ${LINKIS_CLIENT_LOG_OPTS}  ${LINKIS_CLIENT} "${input_args[@]}"
 }
 
 if [ -z $LINKIS_HOME ]; then
diff --git a/assembly-combined-package/assembly-combined/bin/linkis-cli-pre b/assembly-combined-package/assembly-combined/bin/linkis-cli-pre
index 647b919..4273232 100755
--- a/assembly-combined-package/assembly-combined/bin/linkis-cli-pre
+++ b/assembly-combined-package/assembly-combined/bin/linkis-cli-pre
@@ -1,13 +1,12 @@
 #!/bin/bash
 #
-# Copyright 2019 WeBank
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-#  you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
 # http://www.apache.org/licenses/LICENSE-2.0
-#
 # Unless required by applicable law or agreed to in writing, software
 # distributed under the License is distributed on an "AS IS" BASIS,
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
diff --git a/assembly-combined-package/assembly-combined/conf/linkis-cg-engineconnmanager.properties b/assembly-combined-package/assembly-combined/conf/linkis-cg-engineconnmanager.properties
index 639f467..8189cc8 100644
--- a/assembly-combined-package/assembly-combined/conf/linkis-cg-engineconnmanager.properties
+++ b/assembly-combined-package/assembly-combined/conf/linkis-cg-engineconnmanager.properties
@@ -16,17 +16,7 @@
 #
 ##restful
 wds.linkis.server.restful.scan.packages=org.apache.linkis.em.restful
-wds.linkis.engineconn.root.dir=/data/bdp/linkis
-#wds.linkis.ecm.engineconn.create.duration=600000
-
-#wds.linkis.ecm.health.report.period=30
-
-#wds.linkis.ecm.cores.max=24
-
-#wds.linkis.ecm.memory.max=64424509440
-
-#wds.linkis.ecm.engineconn.instances.max=24
-wds.linkis.ecm.protected.load.enabled=false
+wds.linkis.engineconn.root.dir=/appcom/tmp
 ##Spring
 spring.server.port=9102
 ##set engine environment in econn start script, such as SPARK3_HOME,the value of env will read from ecm host by key.
diff --git a/assembly-combined-package/assembly-combined/conf/linkis-cg-engineplugin.properties b/assembly-combined-package/assembly-combined/conf/linkis-cg-engineplugin.properties
index d25b690..2919ccf 100644
--- a/assembly-combined-package/assembly-combined/conf/linkis-cg-engineplugin.properties
+++ b/assembly-combined-package/assembly-combined/conf/linkis-cg-engineplugin.properties
@@ -24,7 +24,5 @@ wds.linkis.engineConn.plugin.cache.expire-in-seconds=100000
 wds.linkis.engineConn.dist.load.enable=true
 #wds.linkis.engineconn.home=/appcom/Install/LinkisInstall/lib/linkis-engineconn-plugins
 #wds.linkis.engineconn.plugin.loader.store.path=/appcom/Install/LinkisInstall/lib/linkis-engineconn-plugins
-
-wds.linkis.ms.parallelism.consumer.max=200
 ##Spring
 spring.server.port=9103
\ No newline at end of file
diff --git a/assembly-combined-package/assembly-combined/conf/linkis-cg-entrance.properties b/assembly-combined-package/assembly-combined/conf/linkis-cg-entrance.properties
index 8177a7f..2122464 100644
--- a/assembly-combined-package/assembly-combined/conf/linkis-cg-entrance.properties
+++ b/assembly-combined-package/assembly-combined/conf/linkis-cg-entrance.properties
@@ -17,19 +17,7 @@
 ##restful
 wds.linkis.server.restful.scan.packages=org.apache.linkis.entrance.restful
 wds.linkis.server.socket.mode=false
-wds.linkis.entrance.config.log.path=hdfs:///appcom/logs/linkis
-#wds.linkis.resultSet.store.path=hdfs:///tmp/linkis
-wds.linkis.orchestrator.computation.operation.builder.class=org.apache.linkis.orchestrator.operation.TuningOperationBuilder
-
-
-wds.linkis.entrance.shell.danger.check.enabled=false
-#group capacity
-wds.linkis.concurrent.group.factory.capacity=30000
-
-wds.linkis.entrance.max.capacity=30000
-#errorcode
-wds.linkis.errorcode.future.timeout=5000
-
-wds.linkis.ms.parallelism.consumer.max=200
+#wds.linkis.entrance.config.log.path=hdfs:///tmp/linkis/
+wds.linkis.resultSet.store.path=hdfs:///tmp/linkis
 ##Spring
 spring.server.port=9104
\ No newline at end of file
diff --git a/assembly-combined-package/assembly-combined/conf/linkis-cg-linkismanager.properties b/assembly-combined-package/assembly-combined/conf/linkis-cg-linkismanager.properties
index 3bbb431..5653b8d 100644
--- a/assembly-combined-package/assembly-combined/conf/linkis-cg-linkismanager.properties
+++ b/assembly-combined-package/assembly-combined/conf/linkis-cg-linkismanager.properties
@@ -21,9 +21,4 @@ wds.linkis.server.mybatis.mapperLocations=classpath:org/apache/linkis/manager/da
 wds.linkis.server.mybatis.typeAliasesPackage=
 wds.linkis.server.mybatis.BasePackage=org.apache.linkis.manager.dao,org.apache.linkis.resourcemanager.external.dao
 ##Spring
-spring.server.port=9101
-
-wds.linkis.ms.parallelism.consumer.max=200
-
-#Resource action record for debug
-wds.linkis.manager.rm.resource.action.record=false
+spring.server.port=9101
\ No newline at end of file
diff --git a/assembly-combined-package/assembly-combined/conf/linkis-mg-gateway.properties b/assembly-combined-package/assembly-combined/conf/linkis-mg-gateway.properties
index 625fd48..0eb4092 100644
--- a/assembly-combined-package/assembly-combined/conf/linkis-mg-gateway.properties
+++ b/assembly-combined-package/assembly-combined/conf/linkis-mg-gateway.properties
@@ -32,8 +32,4 @@ wds.linkis.ldap.proxy.userNameFormat=
 wds.linkis.admin.user=hadoop
 #wds.linkis.admin.password=
 ##Spring
-spring.server.port=9001
-
-
-wds.linkis.gateway.conf.publicservice.list=query,application,filesystem,udf,variable,microservice,errorcode
-
+spring.server.port=9001
\ No newline at end of file
diff --git a/assembly-combined-package/assembly-combined/conf/linkis-ps-cs.properties b/assembly-combined-package/assembly-combined/conf/linkis-ps-cs.properties
index 7fc53ea..26b9c97 100644
--- a/assembly-combined-package/assembly-combined/conf/linkis-ps-cs.properties
+++ b/assembly-combined-package/assembly-combined/conf/linkis-ps-cs.properties
@@ -22,6 +22,5 @@ wds.linkis.server.mybatis.typeAliasesPackage=org.apache.linkis.cs.persistence.en
 wds.linkis.server.mybatis.BasePackage=org.apache.linkis.cs.persistence.dao
 ##Spring
 spring.server.port=9108
-wds.linkis.ms.parallelism.consumer.max=200
 # ps-cs prefix must be started with 'cs_'
 spring.eureka.instance.metadata-map.route=cs_1_dev
\ No newline at end of file
diff --git a/assembly-combined-package/assembly-combined/conf/linkis-ps-data-source-manager.properties b/assembly-combined-package/assembly-combined/conf/linkis-ps-data-source-manager.properties
index d73091f..0d374b0 100644
--- a/assembly-combined-package/assembly-combined/conf/linkis-ps-data-source-manager.properties
+++ b/assembly-combined-package/assembly-combined/conf/linkis-ps-data-source-manager.properties
@@ -30,12 +30,8 @@ hive.meta.user=
 hive.meta.password=
 wds.linkis.metadata.hive.encode.enabled=false
 
-#dsm
-wds.linkis.server.dsm.auth.admin=
-wds.linkis.server.mdm.service.app.name=linkis-ps-metadatamanager
-
 ##Spring
-spring.server.port=8196
+spring.server.port=9106
 spring.spring.main.allow-bean-definition-overriding=true
 spring.spring.jackson.serialization.FAIL_ON_EMPTY_BEANS=false
 spring.jackson.serialization.FAIL_ON_EMPTY_BEANS=false
diff --git a/assembly-combined-package/assembly-combined/conf/linkis-ps-publicservice.properties b/assembly-combined-package/assembly-combined/conf/linkis-ps-publicservice.properties
index 5f8ff60..f6f7129 100644
--- a/assembly-combined-package/assembly-combined/conf/linkis-ps-publicservice.properties
+++ b/assembly-combined-package/assembly-combined/conf/linkis-ps-publicservice.properties
@@ -33,20 +33,3 @@ hive.meta.password=
 ##Spring
 spring.server.port=9105
 spring.spring.main.allow-bean-definition-overriding=true
-
-wds.linkis.io.extra.labels={tenant:"ioClient"}
-#jobhistory
-wds.linkis.query.code.store.length=2000
-
-wds.linkis.io.loadbalance.capacity=2
-
-#wds.linkis.workspace.resultset.download.maxsize.csv=300000
-
-#wds.linkis.workspace.resultset.download.maxsize.excel=300000
-
-wds.linkis.ms.parallelism.consumer.max=200
-
-#configuration
-wds.linkis.configuration.use.creator.default.value=false
-
-
diff --git a/assembly-combined-package/assembly-combined/conf/linkis.properties b/assembly-combined-package/assembly-combined/conf/linkis.properties
index 9956efc..208a261 100644
--- a/assembly-combined-package/assembly-combined/conf/linkis.properties
+++ b/assembly-combined-package/assembly-combined/conf/linkis.properties
@@ -35,32 +35,22 @@ wds.linkis.mysql.is.encrypt=false
 
 ##file path
 wds.linkis.filesystem.root.path=file:///tmp/linkis/
-wds.linkis.filesystem.hdfs.root.path=hdfs:///apps-data
+wds.linkis.filesystem.hdfs.root.path=hdfs:///tmp/linkis/
 ##bml path:default use hdfs
 wds.linkis.bml.is.hdfs=true
 wds.linkis.bml.hdfs.prefix=/apps-data
 #wds.linkis.bml.local.prefix=/data/dss/bml
 
 ##engine Version
-#wds.linkis.spark.engine.version=2.4.3
-#wds.linkis.hive.engine.version=2.3.3
-#wds.linkis.python.engine.version=python2
-#wds.linkis.appconn.engine.version=1
+#wds.linkis.spark.engine.version=
+#wds.linkis.hive.engine.version=
+#wds.linkis.python.engine.version=
+
 #LinkisHome
-wds.linkis.home=/appcom/Install/linkis
+wds.linkis.home=/appcom/Install/LinkisInstall
 #Linkis governance station administrators
 wds.linkis.governance.station.admin=hadoop
-#wds.linkis.orchestrator.task.consumer.wait=30
-wds.linkis.server.mybatis.datasource.minIdle=5
-
-wds.linkis.server.mybatis.datasource.maxActive=40
-
-wds.linkis.storage.enable.io.proxy=true
-
-wds.linkis.hadoop.hdfs.cache.enable=true
-
-wds.linkis.orchestrator.execution.task.runner.max.size=200
-
+wds.linkis.gateway.conf.publicservice.list=query,jobhistory,application,configuration,filesystem,udf,variable,microservice,errorcode,bml,datasource
 
 spring.spring.servlet.multipart.max-file-size=500MB
 spring.spring.servlet.multipart.max-request-size=500MB
diff --git a/assembly-combined-package/assembly-combined/conf/log4j2-console.xml b/assembly-combined-package/assembly-combined/conf/log4j2-console.xml
index f9a1c94..d903fa1 100644
--- a/assembly-combined-package/assembly-combined/conf/log4j2-console.xml
+++ b/assembly-combined-package/assembly-combined/conf/log4j2-console.xml
@@ -1,12 +1,13 @@
 <?xml version="1.0" encoding="UTF-8"?>
 <!--
-  ~ Copyright 2019 WeBank
+  ~ Licensed to the Apache Software Foundation (ASF) under one or more
+  ~ contributor license agreements.  See the NOTICE file distributed with
+  ~ this work for additional information regarding copyright ownership.
+  ~ The ASF licenses this file to You under the Apache License, Version 2.0
+  ~ (the "License"); you may not use this file except in compliance with
+  ~ the License.  You may obtain a copy of the License at
   ~
-  ~ Licensed under the Apache License, Version 2.0 (the "License");
-  ~ you may not use this file except in compliance with the License.
-  ~ You may obtain a copy of the License at
-  ~
-  ~ http://www.apache.org/licenses/LICENSE-2.0
+  ~   http://www.apache.org/licenses/LICENSE-2.0
   ~
   ~ Unless required by applicable law or agreed to in writing, software
   ~ distributed under the License is distributed on an "AS IS" BASIS,
diff --git a/assembly-combined-package/assembly-combined/conf/log4j2.xml b/assembly-combined-package/assembly-combined/conf/log4j2.xml
index 4f4cb3c..dbb9b10 100644
--- a/assembly-combined-package/assembly-combined/conf/log4j2.xml
+++ b/assembly-combined-package/assembly-combined/conf/log4j2.xml
@@ -32,13 +32,7 @@
         <root level="INFO">
             <appender-ref ref="RollingFile"/>
         </root>
-        <logger name="org.apache.linkis.message.builder.DefaultMessageJob" level="warn" additivity="true">
-            <appender-ref ref="RollingFile"/>
-        </logger>
 
-        <logger name="org.apache.linkis.message.scheduler.DefaultMessageExecutor" level="warn" additivity="true">
-            <appender-ref ref="RollingFile"/>
-        </logger>
         <logger name="com.netflix.loadbalancer.DynamicServerListLoadBalancer" level="warn" additivity="true">
             <appender-ref ref="RollingFile"/>
         </logger>
diff --git a/assembly-combined-package/assembly-combined/conf/token.properties b/assembly-combined-package/assembly-combined/conf/token.properties
index c623747..7d9019f 100644
--- a/assembly-combined-package/assembly-combined/conf/token.properties
+++ b/assembly-combined-package/assembly-combined/conf/token.properties
@@ -17,6 +17,4 @@ QML-AUTH=*
 BML-AUTH=*
 WS-AUTH=*
 dss-AUTH=*
-QUALITIS-AUTH=*
-VALIDATOR-AUTH=*
-DOPS-AUTH=*
\ No newline at end of file
+QUALITIS-AUTH=*
\ No newline at end of file
diff --git a/assembly-combined-package/assembly-combined/public-module-combined/src/main/assembly/distribution.xml b/assembly-combined-package/assembly-combined/public-module-combined/src/main/assembly/distribution.xml
index 590a961..d9cdcc6 100644
--- a/assembly-combined-package/assembly-combined/public-module-combined/src/main/assembly/distribution.xml
+++ b/assembly-combined-package/assembly-combined/public-module-combined/src/main/assembly/distribution.xml
@@ -23,7 +23,6 @@
     <id>module</id>
     <formats>
         <format>dir</format>
-        <format>zip</format>
     </formats>
     <includeBaseDirectory>false</includeBaseDirectory>
 <!--    <baseDirectory>lib</baseDirectory>-->
diff --git a/assembly-combined-package/assembly-combined/sbin/common.sh b/assembly-combined-package/assembly-combined/sbin/common.sh
index 59e8279..6d79a25 100644
--- a/assembly-combined-package/assembly-combined/sbin/common.sh
+++ b/assembly-combined-package/assembly-combined/sbin/common.sh
@@ -19,8 +19,7 @@ source ~/.bash_profile
 
 export local_host="`hostname --fqdn`"
 
-#ipaddr=$(ip addr | awk '/^[0-9]+: / {}; /inet.*global/ {print gensub(/(.*)\/(.*)/, "\\1", "g", $2)}')
-ipaddr=`hostname -i`
+ipaddr=$(ip addr | awk '/^[0-9]+: / {}; /inet.*global/ {print gensub(/(.*)\/(.*)/, "\\1", "g", $2)}')
 
 function isLocal(){
     if [ "$1" == "127.0.0.1" ];then
diff --git a/assembly-combined-package/assembly-combined/sbin/ext/linkis-common-start b/assembly-combined-package/assembly-combined/sbin/ext/linkis-common-start
index 8f26c6a..d2a15e2 100644
--- a/assembly-combined-package/assembly-combined/sbin/ext/linkis-common-start
+++ b/assembly-combined-package/assembly-combined/sbin/ext/linkis-common-start
@@ -21,10 +21,10 @@
 source $LINKIS_CONF_DIR/linkis-env.sh
 
 if [ "$LINKIS_LOG_DIR" = "" ]; then
-  export LINKIS_LOG_DIR="/data/bdp/logs/linkis"
+  export LINKIS_LOG_DIR="$LINKIS_HOME/logs"
 fi
 if [ ! -w "$LINKIS_LOG_DIR" ] ; then
-  sudo mkdir -p $LINKIS_LOG_DIR; sudo chown -R hadoop:hadoop $LINKIS_LOG_DIR
+  mkdir -p "$LINKIS_LOG_DIR"
 fi
 
 if test -z "$SERVER_HEAP_SIZE"
diff --git a/assembly-combined-package/assembly-combined/sbin/ext/linkis-mg-eureka b/assembly-combined-package/assembly-combined/sbin/ext/linkis-mg-eureka
index 2bb35b9..6dced4a 100644
--- a/assembly-combined-package/assembly-combined/sbin/ext/linkis-mg-eureka
+++ b/assembly-combined-package/assembly-combined/sbin/ext/linkis-mg-eureka
@@ -25,10 +25,10 @@ SERVER_SUFFIX="linkis-spring-cloud-services/linkis-mg-eureka"
 #export DEBUG_PORT=
 
 if [ "$LINKIS_LOG_DIR" = "" ]; then
-  export LINKIS_LOG_DIR="/data/bdp/logs/linkis"
+  export LINKIS_LOG_DIR="$LINKIS_HOME/logs"
 fi
 if [ ! -w "$LINKIS_LOG_DIR" ] ; then
-  sudo mkdir -p $LINKIS_LOG_DIR; sudo chown -R hadoop:hadoop $LINKIS_LOG_DIR
+  mkdir -p "$LINKIS_LOG_DIR"
 fi
 
 if test -z "$SERVER_HEAP_SIZE"
@@ -55,6 +55,12 @@ export SERVER_CLASS=org.apache.linkis.eureka.SpringCloudEurekaApplication
 ## conf dir
 export SERVER_CONF_PATH=$LINKIS_CONF_DIR
 
+## commons lib
+export LINKIS_COMMONS_LIB=$LINKIS_HOME/$LINKIS_PUBLIC_MODULE
+if [ ! -r "$LINKIS_COMMONS_LIB" ] ; then
+    echo "linkis commons lib not exists $LINKIS_COMMONS_LIB"
+    exit 1
+fi
 
 ## server lib
 export SERVER_LIB=$LINKIS_HOME/lib/$SERVER_SUFFIX
@@ -64,7 +70,7 @@ if [ ! -r "$SERVER_LIB" ] ; then
 fi
 
 ## set class path
-export SERVER_CLASS_PATH=$SERVER_CONF_PATH:$SERVER_LIB/*
+export SERVER_CLASS_PATH=$SERVER_CONF_PATH:$LINKIS_COMMONS_LIB/*:$SERVER_LIB/*
 
 SERVER_IP="`hostname --fqdn`"
 
diff --git a/assembly-combined-package/assembly-combined/sbin/ext/linkis-mg-gateway b/assembly-combined-package/assembly-combined/sbin/ext/linkis-mg-gateway
index 6c26c7b..d65b935 100644
--- a/assembly-combined-package/assembly-combined/sbin/ext/linkis-mg-gateway
+++ b/assembly-combined-package/assembly-combined/sbin/ext/linkis-mg-gateway
@@ -25,10 +25,10 @@ SERVER_SUFFIX="linkis-spring-cloud-services/linkis-mg-gateway"
 #export DEBUG_PORT=
 
 if [ "$LINKIS_LOG_DIR" = "" ]; then
-  export LINKIS_LOG_DIR="/data/bdp/logs/linkis"
+  export LINKIS_LOG_DIR="$LINKIS_HOME/logs"
 fi
 if [ ! -w "$LINKIS_LOG_DIR" ] ; then
-  sudo mkdir -p $LINKIS_LOG_DIR; sudo chown -R hadoop:hadoop $LINKIS_LOG_DIR
+  mkdir -p "$LINKIS_LOG_DIR"
 fi
 
 if test -z "$SERVER_HEAP_SIZE"
@@ -48,7 +48,7 @@ fi
 
 if test -z "$SERVER_JAVA_OPTS"
 then
-  export SERVER_JAVA_OPTS=" $JAVA_AGENT_OPTS -Dreactor.netty.ioWorkerCount=50 -DserviceName=$SERVER_NAME -Xmx$SERVER_HEAP_SIZE -XX:+UseG1GC -Xloggc:$LINKIS_LOG_DIR/${SERVER_NAME}-gc.log $DEBUG_CMD"
+  export SERVER_JAVA_OPTS=" $JAVA_AGENT_OPTS -DserviceName=$SERVER_NAME -Xmx$SERVER_HEAP_SIZE -XX:+UseG1GC -Xloggc:$LINKIS_LOG_DIR/${SERVER_NAME}-gc.log $DEBUG_CMD"
 fi
 
 
diff --git a/assembly-combined-package/assembly-combined/sbin/ext/linkis-ps-metadatamanager b/assembly-combined-package/assembly-combined/sbin/ext/linkis-ps-metadatamanager
index c6a467b..c6a666f 100644
--- a/assembly-combined-package/assembly-combined/sbin/ext/linkis-ps-metadatamanager
+++ b/assembly-combined-package/assembly-combined/sbin/ext/linkis-ps-metadatamanager
@@ -24,14 +24,7 @@ SERVER_SUFFIX="linkis-public-enhancements/linkis-ps-metadatamanager"
 
 
 export SERVER_CLASS=org.apache.linkis.metadatamanager.server.LinkisMetadataManagerApplication
-
-if [ "$LINKIS_LOG_DIR" = "" ]; then
-  export LINKIS_LOG_DIR="/data/bdp/logs/linkis"
-fi
-if [ ! -w "$LINKIS_LOG_DIR" ] ; then
-  sudo mkdir -p $LINKIS_LOG_DIR; sudo chown -R hadoop:hadoop $LINKIS_LOG_DIR
-fi
-
+export LINKIS_LOG_DIR=$LINKIS_HOME/logs
 if test -z "$SERVER_HEAP_SIZE"
 then
   export SERVER_HEAP_SIZE="512M"
diff --git a/assembly-combined-package/assembly-combined/sbin/linkis-daemon.sh b/assembly-combined-package/assembly-combined/sbin/linkis-daemon.sh
index f0dfd68..ee23aaa 100644
--- a/assembly-combined-package/assembly-combined/sbin/linkis-daemon.sh
+++ b/assembly-combined-package/assembly-combined/sbin/linkis-daemon.sh
@@ -114,7 +114,7 @@ function stop()
       if [[ -z "${pid}" ]]; then
         echo "server $SERVER_NAME is not running"
       else
-        wait_for_server_to_die $pid 300
+        wait_for_server_to_die $pid 40
         $(rm -f ${SERVER_PID})
         echo "server $SERVER_NAME is stopped."
       fi
diff --git a/assembly-combined-package/assembly-combined/sbin/linkis-start-all.sh b/assembly-combined-package/assembly-combined/sbin/linkis-start-all.sh
index cd253d0..bf7520f 100644
--- a/assembly-combined-package/assembly-combined/sbin/linkis-start-all.sh
+++ b/assembly-combined-package/assembly-combined/sbin/linkis-start-all.sh
@@ -69,7 +69,7 @@ SERVER_NAME="mg-gateway"
 SERVER_IP=$GATEWAY_INSTALL_IP
 startApp
 
-#publicservice
+#publicenservice
 SERVER_NAME="ps-publicservice"
 SERVER_IP=$PUBLICSERVICE_INSTALL_IP
 startApp
diff --git a/assembly-combined-package/assembly-combined/sbin/linkis-stop-all.sh b/assembly-combined-package/assembly-combined/sbin/linkis-stop-all.sh
index fd43033..e17030e 100644
--- a/assembly-combined-package/assembly-combined/sbin/linkis-stop-all.sh
+++ b/assembly-combined-package/assembly-combined/sbin/linkis-stop-all.sh
@@ -30,17 +30,15 @@ if [ "$LINKIS_HOME" = "" ]; then
   export LINKIS_HOME=$INSTALL_HOME
 fi
 
-if [ -z "$LINKIS_CONF_DIR" ]
-then
-  LINKIS_CONF_DIR="$LINKIS_HOME/conf"
-fi
-
-
 info="We will stop all linkis applications, it will take some time, please wait"
 echo ${info}
 
+
+
+
 source ${LINKIS_HOME}/sbin/common.sh
 
+
 function stopApp(){
 echo "<-------------------------------->"
 echo "Begin to stop $SERVER_NAME"
@@ -55,27 +53,6 @@ executeCMD $SERVER_IP "$SERVER_STOP_CMD"
 echo "<-------------------------------->"
 }
 
-function clearResource(){
-echo "<-------------------------------->"
-echo "Begin to clear resource..."
-LINKIS_PROPERTIES_PATH="${LINKIS_CONF_DIR}/db.sh"
-source ${LINKIS_PROPERTIES_PATH}
-mysql -h$MYSQL_HOST -P$MYSQL_PORT -u$MYSQL_USER -D$MYSQL_DB -p$MYSQL_PASSWORD --default-character-set=utf8 -e "DELETE FROM linkis_cg_manager_label_resource"
-checkpoint1=$?
-mysql -h$MYSQL_HOST -P$MYSQL_PORT -u$MYSQL_USER -D$MYSQL_DB -p$MYSQL_PASSWORD --default-character-set=utf8 -e "DELETE FROM linkis_cg_manager_linkis_resources"
-checkpoint2=$?
-mysql -h$MYSQL_HOST -P$MYSQL_PORT -u$MYSQL_USER -D$MYSQL_DB -p$MYSQL_PASSWORD --default-character-set=utf8 -e "DELETE FROM linkis_cg_manager_service_instance_metrics"
-checkpoint3=$?
-mysql -h$MYSQL_HOST -P$MYSQL_PORT -u$MYSQL_USER -D$MYSQL_DB -p$MYSQL_PASSWORD --default-character-set=utf8 -e "DELETE FROM linkis_cg_manager_lock"
-if [ ${checkpoint1} -ne 0 -o ${checkpoint2} -ne 0 -o ${checkpoint3} -ne 0 -o $? -ne 0 ]
-then
-  echo "Failed to clear resource, pleck check your db.sh configuration."
-else
-  echo "Success to clear all resource!"
-fi
-echo "<-------------------------------->"
-}
-
 
 
 #gateway
@@ -119,6 +96,4 @@ export SERVER_NAME="mg-eureka"
 SERVER_IP=$EUREKA_INSTALL_IP
 stopApp
 
-clearResource
-
 echo "stop-all shell script executed completely"
diff --git a/assembly-combined-package/assembly-combined/src/main/assembly/assembly.xml b/assembly-combined-package/assembly-combined/src/main/assembly/assembly.xml
index f1d20c5..9f2d250 100644
--- a/assembly-combined-package/assembly-combined/src/main/assembly/assembly.xml
+++ b/assembly-combined-package/assembly-combined/src/main/assembly/assembly.xml
@@ -19,7 +19,6 @@
 <assembly>
   <id>dist</id>
   <formats>
-    <format>tar.gz</format>
     <format>dir</format>
   </formats>
   <includeBaseDirectory>false</includeBaseDirectory>
@@ -27,27 +26,17 @@
   <fileSets>
 
     <fileSet>
-      <directory>../..</directory>
-      <outputDirectory></outputDirectory>
+      <directory>
+        bin/
+      </directory>
+      <outputDirectory>bin</outputDirectory>
       <includes>
-        <include>README*</include>
-        <include>LICENSE*</include>
-        <include>NOTICE*</include>
+        <include>**/*</include>
       </includes>
+      <fileMode>0755</fileMode>
+      <lineEnding>unix</lineEnding>
     </fileSet>
 
-      <fileSet>
-          <directory>
-              bin/
-          </directory>
-          <outputDirectory>bin</outputDirectory>
-          <includes>
-              <include>**/*</include>
-          </includes>
-          <fileMode>0755</fileMode>
-          <lineEnding>unix</lineEnding>
-      </fileSet>
-
     <fileSet>
       <directory>
         conf/
@@ -73,19 +62,6 @@
           <lineEnding>unix</lineEnding>
       </fileSet>
 
-      <fileSet>
-          <directory>
-              bin/
-          </directory>
-          <outputDirectory>bin</outputDirectory>
-          <includes>
-              <include>**/*</include>
-          </includes>
-          <fileMode>0777</fileMode>
-          <directoryMode>0755</directoryMode>
-          <lineEnding>unix</lineEnding>
-      </fileSet>
-
     <fileSet>
       <directory>
         ../../db/
@@ -258,30 +234,7 @@
               <include>*</include>
           </includes>
       </fileSet>
-        <!-- data source manager-->
-      <fileSet>
-          <directory>
-              ../../linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/server/target/out/lib/
-          </directory>
-          <outputDirectory>
-              lib/linkis-public-enhancements/linkis-ps-datasourcemanager
-          </outputDirectory>
-          <includes>
-              <include>*</include>
-          </includes>
-      </fileSet>
-      <!-- metadata manager -->
-      <fileSet>
-          <directory>
-              ../../linkis-public-enhancements/linkis-datasource/linkis-metadata-manager/server/target/out/lib/
-          </directory>
-          <outputDirectory>
-              lib/linkis-public-enhancements/linkis-ps-metadatamanager
-          </outputDirectory>
-          <includes>
-              <include>*</include>
-          </includes>
-      </fileSet>
+
             <!--application-manager-->
       <fileSet>
           <directory>
@@ -350,7 +303,7 @@
               ../../linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/target/out/conf
           </directory>
           <outputDirectory>
-              conf/linkis-computation-governance/linkis-client/linkis-cli
+              conf/linkis-cli
           </outputDirectory>
           <includes>
               <include>*</include>
diff --git a/assembly-combined-package/bin/checkEnv.sh b/assembly-combined-package/bin/checkEnv.sh
index dbce4cb..83ffdb4 100644
--- a/assembly-combined-package/bin/checkEnv.sh
+++ b/assembly-combined-package/bin/checkEnv.sh
@@ -94,7 +94,6 @@ echo "check tar"
 need_cmd tar
 echo "check sed"
 need_cmd sed
-need_cmd dos2unix
 echo "<-----end to check used cmd---->"
 
 checkSpark
diff --git a/assembly-combined-package/bin/install.sh b/assembly-combined-package/bin/install.sh
index 46cadb0..d10f3d6 100644
--- a/assembly-combined-package/bin/install.sh
+++ b/assembly-combined-package/bin/install.sh
@@ -46,84 +46,30 @@ fi
 ## import common.sh
 source ${workDir}/bin/common.sh
 
+##load config
+echo "======= Step 1: Load deploy-config/* =========="
+export LINKIS_CONFIG_PATH=${LINKIS_CONFIG_PATH:-"${workDir}/deploy-config/linkis-env.sh"}
+export LINKIS_DB_CONFIG_PATH=${LINKIS_DB_CONFIG_PATH:-"${workDir}/deploy-config/db.sh"}
 
+source ${LINKIS_CONFIG_PATH}
+source ${LINKIS_DB_CONFIG_PATH}
 
-function checkPythonAndJava(){
-    python --version
-    isSuccess "execute python --version"
-    java -version
-    isSuccess "execute java --version"
-}
-
-function checkHadoopAndHive(){
-    hadoopVersion="`hdfs version`"
-    defaultHadoopVersion="2.7"
-    checkversion "$hadoopVersion" $defaultHadoopVersion hadoop
-    checkversion "$(whereis hive)" "1.2" hive
-}
-
-function checkversion(){
-versionStr=$1
-defaultVersion=$2
-module=$3
-
-result=$(echo $versionStr | grep "$defaultVersion")
-if [ -n "$result" ]; then
-    echo "$module version match"
-else
-   echo "WARN: Your $module version is not $defaultVersion, there may be compatibility issues:"
-   echo " 1: Continue installation, there may be compatibility issues"
-   echo " 2: Exit installation"
-   echo ""
-   read -p "Please input the choice:"  idx
-   if [[ '2' = "$idx" ]];then
-    echo "You chose  Exit installation"
-    exit 1
-   fi
-fi
-}
-
-function checkSpark(){
- spark-submit --version
- isSuccess "execute spark-submit --version"
-}
-
-say() {
-    printf 'check command fail \n %s\n' "$1"
-}
-
-err() {
-    say "$1" >&2
-    exit 1
-}
-
-check_cmd() {
-    command -v "$1" > /dev/null 2>&1
-}
-
-need_cmd() {
-    if ! check_cmd "$1"; then
-        err "need '$1' (command not found)"
-    fi
-}
-
+isSuccess "load config"
 
 
+echo "======= Step 2: Check env =========="
+## check env
 sh ${workDir}/bin/checkEnv.sh
 isSuccess "check env"
 
-##load config
-echo "step1:load config "
-export LINKIS_CONFIG_PATH=${LINKIS_CONFIG_PATH:-"${workDir}/config/linkis-env.sh"}
-export LINKIS_DB_CONFIG_PATH=${LINKIS_DB_CONFIG_PATH:-"${workDir}/config/db.sh"}
-source ${LINKIS_CONFIG_PATH}
-source ${LINKIS_DB_CONFIG_PATH}
-
-isSuccess "load config"
-
+until mysql -h$MYSQL_HOST -P$MYSQL_PORT -u$MYSQL_USER -p$MYSQL_PASSWORD  -e ";" ; do
+     echo "try to connect to linkis mysql $MYSQL_HOST:$MYSQL_PORT/$MYSQL_DB failed, please check db configuration in:$LINKIS_DB_CONFIG_PATH"
+     exit 1
+done
 
+echo "======= Step 3: Create necessary directory =========="
 
-echo "create hdfs  directory and local directory"
+echo "[WORKSPACE_USER_ROOT_PATH] try to create directory"
 if [ "$WORKSPACE_USER_ROOT_PATH" != "" ]
 then
   localRootDir=$WORKSPACE_USER_ROOT_PATH
@@ -159,7 +105,6 @@ echo "[HDFS_USER_ROOT_PATH] try to create directory"
      localRootDir=${HDFS_USER_ROOT_PATH#hdfs://}
      echo "[HDFS_USER_ROOT_PATH] try to create hdfs dir,cmd is: hdfs dfs -mkdir -p $localRootDir/$deployUser"
      hdfs dfs -mkdir -p $localRootDir/$deployUser
-     hdfs dfs -chmod -R 775 $localRootDir/$deployUser
    else
      echo "[HDFS_USER_ROOT_PATH] does not support $HDFS_USER_ROOT_PATH filesystem types"
    fi
@@ -182,8 +127,8 @@ echo "[RESULT_SET_ROOT_PATH] try to create directory"
    elif [[ $RESULT_SET_ROOT_PATH == hdfs://* ]];then
      localRootDir=${RESULT_SET_ROOT_PATH#hdfs://}
      echo "[RESULT_SET_ROOT_PATH] try to create hdfs dir,cmd is: hdfs dfs -mkdir -p $localRootDir/$deployUser"
-     hdfs dfs -mkdir -p $localRootDir/$deployUser
-     hdfs dfs -chmod 775 $localRootDir/$deployUser
+     hdfs dfs -mkdir -p $localRootDir
+     hdfs dfs -chmod 775 $localRootDir
    else
      echo "[RESULT_SET_ROOT_PATH] does not support $RESULT_SET_ROOT_PATH filesystem types"
    fi
@@ -218,13 +163,16 @@ if ! test -d ${LINKIS_PACKAGE}; then
     echo "**********${RED}Error${NC}: please put ${LINKIS_PACKAGE} in $workDir! "
     exit 1
 else
-    echo "Start to unzip ${LINKIS_PACKAGE} ."
-    tar -xzf ${LINKIS_PACKAGE}  -C $LINKIS_HOME
-    isSuccess "Unzip ${LINKIS_PACKAGE} to $LINKIS_HOME"
+    echo "Start to cp ${LINKIS_PACKAGE} to $LINKIS_HOME."
+    cp -r $LINKIS_PACKAGE/* $LINKIS_HOME
+    isSuccess "cp ${LINKIS_PACKAGE} to $LINKIS_HOME"
 fi
 
 cp ${LINKIS_CONFIG_PATH} $LINKIS_HOME/conf
 
+
+
+echo "======= Step 4: Create linkis table =========="
 ## sql init
 if [ "$YARN_RESTFUL_URL" != "" ]
 then
@@ -301,7 +249,8 @@ fi
 
 
 #Deal common config
-echo "Update config..."
+echo ""
+echo "======= Step 5: Update config =========="
 
 if test -z "$EUREKA_INSTALL_IP"
 then
diff --git a/assembly-combined-package/deploy-config/linkis-env.sh b/assembly-combined-package/deploy-config/linkis-env.sh
index 2d54289..a7716ac 100644
--- a/assembly-combined-package/deploy-config/linkis-env.sh
+++ b/assembly-combined-package/deploy-config/linkis-env.sh
@@ -38,12 +38,12 @@ HDFS_USER_ROOT_PATH=hdfs:///tmp/linkis ##hdfs:// required
 
 
 ### Path to store started engines and engine logs, must be local
-ENGINECONN_ROOT_PATH=/data/bdp/linkis
+ENGINECONN_ROOT_PATH=/appcom/tmp
 
 #ENTRANCE_CONFIG_LOG_PATH=hdfs:///tmp/linkis/
 
 ### Path to store job ResultSet:file or hdfs path
-#RESULT_SET_ROOT_PATH=hdfs:///tmp/linkis ##hdfs:// required
+RESULT_SET_ROOT_PATH=hdfs:///tmp/linkis ##hdfs:// required
 
 ### Provide the DB information of Hive metadata database.
 ### Attention! If there are special characters like "&", they need to be enclosed in quotation marks.
@@ -134,7 +134,7 @@ CS_PORT=9108
 export SERVER_HEAP_SIZE="512M"
 
 ##The decompression directory and the installation directory need to be inconsistent
-LINKIS_HOME=/appcom/Install/LinkisInstall
+#LINKIS_HOME=/appcom/Install/LinkisInstall
 
 LINKIS_VERSION=1.1.0
 
diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/pom.xml b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/pom.xml
index 9291aae..3f1837e 100644
--- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/pom.xml
+++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/pom.xml
@@ -93,19 +93,6 @@
             <version>${linkis.version}</version>
         </dependency>
 
-       <!-- <dependency>
-            <groupId>com.google.guava</groupId>
-            <artifactId>guava</artifactId>
-            <version>${guava.version}</version>
-        </dependency>-->
-
-
-        <dependency>
-            <groupId>junit</groupId>
-            <artifactId>junit</artifactId>
-            <scope>test</scope>
-            <version>${junit.version}</version>
-        </dependency>
 
     </dependencies>
 
diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/test/java/org/apache/linkis/engineconn/computation/executor/upstream/access/ECTaskEntranceInfoAccessTest.java b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/test/java/org/apache/linkis/engineconn/computation/executor/upstream/access/ECTaskEntranceInfoAccessTest.java
deleted file mode 100644
index 847d303..0000000
--- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/test/java/org/apache/linkis/engineconn/computation/executor/upstream/access/ECTaskEntranceInfoAccessTest.java
+++ /dev/null
@@ -1,73 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.linkis.engineconn.computation.executor.upstream.access;
-
-import org.apache.linkis.DataWorkCloudApplication;
-import org.apache.linkis.common.ServiceInstance;
-import org.apache.linkis.governance.common.conf.GovernanceCommonConf;
-import org.apache.linkis.rpc.Sender;
-import org.apache.linkis.server.conf.ServerConfiguration;
-
-import org.apache.commons.lang3.StringUtils;
-
-import org.junit.Before;
-import org.junit.Test;
-
-import static org.junit.Assert.*;
-
-public class ECTaskEntranceInfoAccessTest {
-
-    @Before
-    public void before() {
-        //        System.getProperties().setProperty("wds.linkis.server.conf",
-        // "linkis-et-jobhistory-scan.properties");
-        System.out.println("Spring is enabled, now try to start SpringBoot.");
-        System.out.println("<--------------------Start SpringBoot App-------------------->");
-        String existsExcludePackages = ServerConfiguration.BDP_SERVER_EXCLUDE_PACKAGES().getValue();
-        if (!StringUtils.isEmpty(existsExcludePackages)) {
-            DataWorkCloudApplication.setProperty(
-                    ServerConfiguration.BDP_SERVER_EXCLUDE_PACKAGES().key(), existsExcludePackages);
-        }
-
-        String[] args =
-                new String[] {
-                    "--spring-conf", "eureka.client.serviceUrl.defaultZone=http://ip:port/eureka/",
-                    "--spring-conf", "logging.config=classpath:log4j2.xml",
-                    "--spring-conf", "spring.profiles.active=engineconn",
-                    "--spring-conf", "server.port=28899",
-                    "--spring-conf", "spring.application.name=linkis-cg-engineconn"
-                };
-        // 加载spring类
-        try {
-            //            ECTaskEntranceInfoAccessHelper.initApp(args);
-        } catch (Exception e) {
-            System.out.println(e.getStackTrace());
-        }
-
-        ServiceInstance[] instances =
-                Sender.getInstances(GovernanceCommonConf.ENGINE_CONN_SPRING_NAME().getValue());
-
-        System.out.println("<--------------------SpringBoot App init succeed-------------------->");
-    }
-
-    @Test
-    public void main() throws Exception {
-
-        //        LinkisJobHistoryScanApplication.main(new String[]{"2021122919", "2021122921"});
-    }
-}
diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/test/resources/linkis.properties b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/test/resources/linkis.properties
deleted file mode 100644
index 138ea16..0000000
--- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/test/resources/linkis.properties
+++ /dev/null
@@ -1,23 +0,0 @@
-#
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-# http://www.apache.org/licenses/LICENSE-2.0
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-wds.linkis.server.mybatis.mapperLocations=classpath*:com/webank/wedatasphere/linkis/jobhistory/scan/app/jobhistory/dao/impl/*.xml
-wds.linkis.server.mybatis.typeAliasesPackage=org.apache.linkis.jobhistory.scan.app.jobhistory.entity
-wds.linkis.server.mybatis.BasePackage=org.apache.linkis.jobhistory.scan.app.jobhistory.dao
-wds.linkis.mysql.is.encrypt=false
-wds.linkis.server.mybatis.datasource.url=
-wds.linkis.server.mybatis.datasource.username=
-wds.linkis.server.mybatis.datasource.password=
-
-wds.linkis.server.version=v1
\ No newline at end of file
diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/test/resources/log4j2.xml b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/test/resources/log4j2.xml
deleted file mode 100644
index 49eabc5..0000000
--- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/test/resources/log4j2.xml
+++ /dev/null
@@ -1,46 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
-  ~ Copyright 2019 WeBank
-  ~
-  ~ Licensed under the Apache License, Version 2.0 (the "License");
-  ~ you may not use this file except in compliance with the License.
-  ~ You may obtain a copy of the License at
-  ~
-  ~ http://www.apache.org/licenses/LICENSE-2.0
-  ~
-  ~ Unless required by applicable law or agreed to in writing, software
-  ~ distributed under the License is distributed on an "AS IS" BASIS,
-  ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  ~ See the License for the specific language governing permissions and
-  ~ limitations under the License.
-  -->
-
-<configuration status="error" monitorInterval="30">
-    <appenders>
-        <console name="RollingFile" target="SYSTEM_OUT">
-        </console>
-        <console name="Console-Plain" target="SYSTEM_OUT">
-            <PatternLayout pattern="%m%n"/>
-        </console>
-    </appenders>
-    <loggers>
-        <root level="INFO">
-            <appender-ref ref="RollingFile"/>
-        </root>
-        <logger name="org.apache.linkis.message.builder.DefaultMessageJob" level="warn" additivity="true">
-            <appender-ref ref="RollingFile"/>
-        </logger>
-
-        <logger name="org.apache.linkis.message.scheduler.DefaultMessageExecutor" level="warn"
-                additivity="true">
-            <appender-ref ref="RollingFile"/>
-        </logger>
-        <logger name="com.netflix.loadbalancer.DynamicServerListLoadBalancer" level="warn" additivity="true">
-            <appender-ref ref="RollingFile"/>
-        </logger>
-        <logger name="PlaintTextConsoleLogger" level="INFO" additivity="false">
-            <appender-ref ref="Console-Plain"/>
-        </logger>
-    </loggers>
-</configuration>
-
diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/conf/AccessibleExecutorConfiguration.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/conf/AccessibleExecutorConfiguration.scala
index 9380f13..dff9b79 100644
--- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/conf/AccessibleExecutorConfiguration.scala
+++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/conf/AccessibleExecutorConfiguration.scala
@@ -35,7 +35,7 @@ object AccessibleExecutorConfiguration {
   val ENGINECONN_LOG_SEND_SIZE = CommonVars[Int]("wds.linkis.engineconn.log.send.cache.size", 300)
 
 
-  val ENGINECONN_MAX_FREE_TIME = CommonVars("wds.linkis.engineconn.max.free.time", new TimeType("1h"))
+  val ENGINECONN_MAX_FREE_TIME = CommonVars("wds.linkis.engineconn.max.free.time", new TimeType("30m"))
 
   val ENGINECONN_LOCK_CHECK_INTERVAL = CommonVars("wds.linkis.engineconn.lock.free.interval", new TimeType("3m"))
 
diff --git a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/job/EntranceExecutionJob.java b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/job/EntranceExecutionJob.java
index 4fdb0eb..1aaa2cb 100644
--- a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/job/EntranceExecutionJob.java
+++ b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/job/EntranceExecutionJob.java
@@ -150,26 +150,6 @@ public class EntranceExecutionJob extends EntranceJob implements LogHandler {
         updateNewestAccessByClientTimestamp();
     }
 
-    /*protected RequestPersistTask getRequestPersistTask() {
-        if(getTask() instanceof HaPersistenceTask) {
-            Task task = ((HaPersistenceTask) getTask()).task();
-            if(task instanceof RequestPersistTask) {
-                return (RequestPersistTask) task;
-            } else {
-                return null;
-            }
-        } else if(getTask() instanceof RequestPersistTask) {
-            return (RequestPersistTask) getTask();
-        } else {
-            return null;
-        }
-    }*/
-
-    @Override
-    public int getRunningSubJobIndex() {
-        return runningIndex;
-    }
-
     @Override
     public SubJobInfo getRunningSubJob() {
         if (runningIndex < getJobGroups().length) {
@@ -381,11 +361,14 @@ public class EntranceExecutionJob extends EntranceJob implements LogHandler {
         return progress;
     }
 
+    /**
+     * // The front end needs to obtain data //if (EntranceJob.JOB_COMPLETED_PROGRESS() ==
+     * getProgress()) { // return new JobProgressInfo[0]; //}
+     *
+     * @return
+     */
     @Override
     public JobProgressInfo[] getProgressInfo() {
-        if (EntranceJob.JOB_COMPLETED_PROGRESS() == getProgress()) {
-            return new JobProgressInfo[0];
-        }
         SubJobInfo[] subJobInfoArray = getJobGroups();
         if (subJobInfoArray.length > 0) {
             List<JobProgressInfo> progressInfoList = new ArrayList<>();
@@ -396,4 +379,9 @@ public class EntranceExecutionJob extends EntranceJob implements LogHandler {
         }
         return super.getProgressInfo();
     }
+
+    @Override
+    public int getRunningSubJobIndex() {
+        return runningIndex;
+    }
 }

---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@linkis.apache.org
For additional commands, e-mail: commits-help@linkis.apache.org


[incubator-linkis] 13/18: Add missing license header.

Posted by pe...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

peacewong pushed a commit to branch dev-1.1.1
in repository https://gitbox.apache.org/repos/asf/incubator-linkis.git

commit 8124f4af2e34226d8e4597647a71ecd26c0051a1
Author: alexkun <xu...@qq.com>
AuthorDate: Fri Mar 4 21:15:22 2022 +0800

    Add missing license header.
---
 .../service/TaskExecutionServiceImpl.scala         |  5 ++--
 .../access/ECTaskEntranceInfoAccessTest.java       |  1 +
 .../src/test/resources/linkis.properties           | 14 +++++++++++
 .../access/ECTaskEntranceInfoAccessHelper.scala    | 28 ++++++++++++++++++----
 4 files changed, 41 insertions(+), 7 deletions(-)

diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/service/TaskExecutionServiceImpl.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/service/TaskExecutionServiceImpl.scala
index d9bcf32..407892e 100644
--- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/service/TaskExecutionServiceImpl.scala
+++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/service/TaskExecutionServiceImpl.scala
@@ -18,8 +18,8 @@
 package org.apache.linkis.engineconn.computation.executor.service
 
 import com.google.common.cache.{Cache, CacheBuilder}
-import org.apache.commons.lang.StringUtils
-import org.apache.commons.lang.exception.ExceptionUtils
+import org.apache.commons.lang3.StringUtils
+import org.apache.commons.lang3.exception.ExceptionUtils
 import org.apache.linkis.common.listener.Event
 import org.apache.linkis.common.utils.{Logging, Utils}
 import org.apache.linkis.engineconn.acessible.executor.listener.LogListener
@@ -33,6 +33,7 @@ import org.apache.linkis.engineconn.computation.executor.entity.{CommonEngineCon
 import org.apache.linkis.engineconn.computation.executor.execute.{ComputationExecutor, ConcurrentComputationExecutor}
 import org.apache.linkis.engineconn.computation.executor.hook.ExecutorLabelsRestHook
 import org.apache.linkis.engineconn.computation.executor.listener.{ResultSetListener, TaskProgressListener, TaskStatusListener}
+import org.apache.linkis.engineconn.computation.executor.upstream.event.TaskStatusChangedForUpstreamMonitorEvent
 import org.apache.linkis.engineconn.computation.executor.utlis.{ComputationEngineConstant, ComputationEngineUtils}
 import org.apache.linkis.engineconn.core.executor.ExecutorManager
 import org.apache.linkis.engineconn.executor.entity.ResourceFetchExecutor
diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/test/java/org/apache/linkis/engineconn/computation/executor/upstream/access/ECTaskEntranceInfoAccessTest.java b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/test/java/org/apache/linkis/engineconn/computation/executor/upstream/access/ECTaskEntranceInfoAccessTest.java
index 1123a7e..51de963 100644
--- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/test/java/org/apache/linkis/engineconn/computation/executor/upstream/access/ECTaskEntranceInfoAccessTest.java
+++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/test/java/org/apache/linkis/engineconn/computation/executor/upstream/access/ECTaskEntranceInfoAccessTest.java
@@ -1,3 +1,4 @@
+
 package org.apache.linkis.engineconn.computation.executor.upstream.access;
 
 import org.apache.linkis.DataWorkCloudApplication;
diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/test/resources/linkis.properties b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/test/resources/linkis.properties
index 3e242fc..138ea16 100644
--- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/test/resources/linkis.properties
+++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/test/resources/linkis.properties
@@ -1,3 +1,17 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+# http://www.apache.org/licenses/LICENSE-2.0
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
 wds.linkis.server.mybatis.mapperLocations=classpath*:com/webank/wedatasphere/linkis/jobhistory/scan/app/jobhistory/dao/impl/*.xml
 wds.linkis.server.mybatis.typeAliasesPackage=org.apache.linkis.jobhistory.scan.app.jobhistory.entity
 wds.linkis.server.mybatis.BasePackage=org.apache.linkis.jobhistory.scan.app.jobhistory.dao
diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/test/scala/org/apache/linkis/engineconn/computation/executor/upstream/access/ECTaskEntranceInfoAccessHelper.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/test/scala/org/apache/linkis/engineconn/computation/executor/upstream/access/ECTaskEntranceInfoAccessHelper.scala
index a7fcdfe..da3fa7f 100644
--- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/test/scala/org/apache/linkis/engineconn/computation/executor/upstream/access/ECTaskEntranceInfoAccessHelper.scala
+++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/test/scala/org/apache/linkis/engineconn/computation/executor/upstream/access/ECTaskEntranceInfoAccessHelper.scala
@@ -1,6 +1,23 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
 package org.apache.linkis.engineconn.computation.executor.upstream.access
 
-import org.apache.commons.lang.StringUtils
+import org.apache.commons.lang3.StringUtils
 import org.apache.linkis.DataWorkCloudApplication
 import org.apache.linkis.common.ServiceInstance
 import org.apache.linkis.common.conf.{CommonVars, DWCArgumentsParser}
@@ -14,6 +31,7 @@ import org.apache.linkis.manager.engineplugin.common.launch.process.Environment
 import org.apache.linkis.manager.label.builder.factory.{LabelBuilderFactory, LabelBuilderFactoryContext}
 import org.apache.linkis.manager.label.entity.Label
 import org.apache.linkis.server.conf.ServerConfiguration
+import java.util
 
 import scala.collection.mutable.ArrayBuffer
 
@@ -30,16 +48,16 @@ object ECTaskEntranceInfoAccessHelper {
     val host = CommonVars(Environment.ECM_HOST.toString, "127.0.0.1").getValue
     val port = CommonVars(Environment.ECM_PORT.toString, "80").getValue
     engineCreationContext.setEMInstance(ServiceInstance(GovernanceCommonConf.ENGINE_CONN_MANAGER_SPRING_NAME.getValue, s"$host:$port"))
-    val labels = new ArrayBuffer[Label[_]]
+    val labels = new util.ArrayList[Label[_]]
     val labelArgs = engineConf.filter(_._1.startsWith(EngineConnArgumentsParser.LABEL_PREFIX))
     if (labelArgs.nonEmpty) {
       labelArgs.foreach { case (key, value) =>
-        labels += labelBuilderFactory.createLabel[Label[_]](key.replace(EngineConnArgumentsParser.LABEL_PREFIX, ""), value)
+        labels.add(labelBuilderFactory.createLabel[Label[_]](key.replace(EngineConnArgumentsParser.LABEL_PREFIX, ""), value))
       }
-      engineCreationContext.setLabels(labels.toList)
+      engineCreationContext.setLabels(labels)
     }
     val jMap = new java.util.HashMap[String, String](engineConf.size)
-    jMap.putAll(engineConf)
+    engineConf.foreach(kv => jMap.put(kv._1, kv._2))
     engineCreationContext.setOptions(jMap)
     engineCreationContext.setArgs(args)
     //    EngineConnObject.setEngineCreationContext(engineCreationContext)

---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@linkis.apache.org
For additional commands, e-mail: commits-help@linkis.apache.org


[incubator-linkis] 17/18: fix:HttpMessageNotWritableException about x-msdownload

Posted by pe...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

peacewong pushed a commit to branch dev-1.1.1
in repository https://gitbox.apache.org/repos/asf/incubator-linkis.git

commit 92a853634054bd38f1c0a11ea5e0bd54ce72a051
Author: peacewong <wp...@gmail.com>
AuthorDate: Sat Mar 5 16:21:32 2022 +0800

    fix:HttpMessageNotWritableException about x-msdownload
---
 .../linkis/bml/client/impl/HttpBmlClient.scala     | 61 ++++++++++------------
 1 file changed, 28 insertions(+), 33 deletions(-)

diff --git a/linkis-public-enhancements/linkis-bml/linkis-bml-client/src/main/scala/org/apache/linkis/bml/client/impl/HttpBmlClient.scala b/linkis-public-enhancements/linkis-bml/linkis-bml-client/src/main/scala/org/apache/linkis/bml/client/impl/HttpBmlClient.scala
index 522fd71..ea02226 100644
--- a/linkis-public-enhancements/linkis-bml/linkis-bml-client/src/main/scala/org/apache/linkis/bml/client/impl/HttpBmlClient.scala
+++ b/linkis-public-enhancements/linkis-bml/linkis-bml-client/src/main/scala/org/apache/linkis/bml/client/impl/HttpBmlClient.scala
@@ -17,7 +17,7 @@
 
 package org.apache.linkis.bml.client.impl
 
-import java.io.{ByteArrayInputStream, File, IOException, InputStream}
+import java.io.{OutputStream, File, IOException, InputStream}
 import java.util
 import org.apache.linkis.bml.client.AbstractBmlClient
 import org.apache.linkis.bml.common._
@@ -39,6 +39,7 @@ import org.apache.commons.io.IOUtils
 import org.apache.commons.lang.StringUtils
 import org.apache.http.client.methods.CloseableHttpResponse
 
+
 class HttpBmlClient(clientConfig: DWSClientConfig,
                    serverUrl: String,
                    properties: util.Map[String, Any]) extends AbstractBmlClient  with Logging {
@@ -125,7 +126,7 @@ class HttpBmlClient(clientConfig: DWSClientConfig,
     * @param overwrite 是否是追加
     * @return 返回的inputStream已经被全部读完,所以返回一个null,另外的fullFileName是整个文件的名字
     */
-  override def downloadResource(user: String, resourceId: String, version: String, path: String, overwrite:Boolean = false): BmlDownloadResponse = {
+  override def downloadResource(user: String, resourceId: String, version: String, path: String, overwrite: Boolean = false): BmlDownloadResponse = {
     val fsPath = new FsPath(path)
     val fileSystem = FSFactory.getFsByProxyUser(fsPath, user)
     fileSystem.init(new util.HashMap[String, String]())
@@ -135,12 +136,13 @@ class HttpBmlClient(clientConfig: DWSClientConfig,
     downloadAction.getParameters += "resourceId" -> resourceId
     if(StringUtils.isNotEmpty(version)) downloadAction.getParameters += "version" -> version
     downloadAction.setUser(user)
-    val downloadResult = dwsClient.execute(downloadAction)
-    val fullFilePath = new FsPath(fullFileName)
-    if (downloadResult != null){
-      val inputStream = downloadAction.getInputStream
-      val outputStream = fileSystem.write(fullFilePath, overwrite)
-      try{
+      var inputStream: InputStream = null
+      var outputStream: OutputStream = null
+      try {
+        dwsClient.execute(downloadAction)
+        val fullFilePath = new FsPath(fullFileName)
+        outputStream = fileSystem.write(fullFilePath, overwrite)
+        inputStream = downloadAction.getInputStream
         IOUtils.copy(inputStream, outputStream)
         downloadAction.getResponse match {
           case r: CloseableHttpResponse =>
@@ -148,24 +150,20 @@ class HttpBmlClient(clientConfig: DWSClientConfig,
           case o: Any =>
             info(s"Download response : ${o.getClass.getName} cannot close.")
         }
-      }catch{
-        case e:IOException => logger.error("failed to copy inputStream and outputStream (inputStream和outputStream流copy失败)", e)
+      } catch {
+        case e: IOException => logger.error("failed to copy inputStream and outputStream (inputStream和outputStream流copy失败)", e)
           val exception = BmlClientFailException("failed to copy inputStream and outputStream (inputStream和outputStream流copy失败)")
           exception.initCause(e)
-          throw e
-        case t:Throwable => logger.error("failed to copy stream (流复制失败)",t)
+          throw exception
+        case t: Throwable => logger.error("failed to copy stream (流复制失败)", t)
           throw t
-      }finally{
-        IOUtils.closeQuietly(inputStream)
-        IOUtils.closeQuietly(outputStream)
+      } finally {
+        if (null != inputStream) IOUtils.closeQuietly(inputStream)
+        if (null != outputStream) IOUtils.closeQuietly(outputStream)
         fileSystem.close()
       }
       BmlDownloadResponse(isSuccess = true, null, resourceId, version, fullFileName)
-    }else{
-      fileSystem.close()
-      BmlDownloadResponse(isSuccess = false, null, null, null, null)
     }
-  }
 
 
   override def downloadShareResource(user: String, resourceId: String, version: String, path: String,
@@ -179,30 +177,27 @@ class HttpBmlClient(clientConfig: DWSClientConfig,
     downloadAction.getParameters += "resourceId" -> resourceId
     if(StringUtils.isNotEmpty(version)) downloadAction.getParameters += "version" -> version
     downloadAction.setUser(user)
-    val downloadResult = dwsClient.execute(downloadAction)
-    val fullFilePath = new FsPath(fullFileName)
-    if (downloadResult != null){
-      val inputStream = downloadAction.getInputStream
-      val outputStream = fileSystem.write(fullFilePath, overwrite)
-      try{
+    var inputStream: InputStream = null
+    var outputStream: OutputStream = null
+    try {
+      dwsClient.execute(downloadAction)
+      val fullFilePath = new FsPath(fullFileName)
+      outputStream = fileSystem.write(fullFilePath, overwrite)
+      inputStream = downloadAction.getInputStream
         IOUtils.copy(inputStream, outputStream)
-      }catch{
+      } catch {
         case e:IOException => logger.error("failed to copy inputStream and outputStream (inputStream和outputStream流copy失败)", e)
           val exception = BmlClientFailException("failed to copy inputStream and outputStream (inputStream和outputStream流copy失败)")
           exception.initCause(e)
           throw e
         case t:Throwable => logger.error("failed to copy stream (流复制失败)",t)
           throw t
-      }finally{
-        IOUtils.closeQuietly(inputStream)
-        IOUtils.closeQuietly(outputStream)
+      } finally {
+       if (null != inputStream) IOUtils.closeQuietly(inputStream)
+       if (null != outputStream) IOUtils.closeQuietly(outputStream)
         fileSystem.close()
       }
       BmlDownloadResponse(isSuccess = true, null, resourceId, version, fullFileName)
-    }else{
-      fileSystem.close()
-      BmlDownloadResponse(isSuccess = false, null, null, null, null)
-    }
   }
 
 

---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@linkis.apache.org
For additional commands, e-mail: commits-help@linkis.apache.org


[incubator-linkis] 14/18: fix some import.

Posted by pe...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

peacewong pushed a commit to branch dev-1.1.1
in repository https://gitbox.apache.org/repos/asf/incubator-linkis.git

commit a18497d8cbb9fb3fe310d5671dd726c9946bab9f
Author: alexkun <xu...@qq.com>
AuthorDate: Fri Mar 4 21:54:34 2022 +0800

    fix some import.
---
 .../apache/linkis/ecm/core/launch/ProcessEngineCommandBuilder.scala | 2 +-
 .../org/apache/linkis/entrance/conf/EntranceConfiguration.scala     | 1 +
 .../main/scala/org/apache/linkis/entrance/log/CacheLogWriter.scala  | 2 +-
 .../resourcemanager/external/service/ExternalResourceService.java   | 6 ++++++
 .../resourcemanager/external/yarn/YarnResourceRequester.scala       | 2 +-
 .../apache/spark/sql/execution/datasources/csv/DolphinToSpark.scala | 3 ++-
 6 files changed, 12 insertions(+), 4 deletions(-)

diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-core/src/main/scala/org/apache/linkis/ecm/core/launch/ProcessEngineCommandBuilder.scala b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-core/src/main/scala/org/apache/linkis/ecm/core/launch/ProcessEngineCommandBuilder.scala
index 0bb815d..db52678 100644
--- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-core/src/main/scala/org/apache/linkis/ecm/core/launch/ProcessEngineCommandBuilder.scala
+++ b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-core/src/main/scala/org/apache/linkis/ecm/core/launch/ProcessEngineCommandBuilder.scala
@@ -18,9 +18,9 @@
 package org.apache.linkis.ecm.core.launch
 
 import java.io.OutputStream
-
 import org.apache.linkis.manager.engineplugin.common.launch.process.LaunchConstants
 import org.apache.commons.io.IOUtils
+import org.apache.linkis.ecm.core.conf.ECPCoreConf
 
 
 trait ProcessEngineCommandBuilder {
diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/conf/EntranceConfiguration.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/conf/EntranceConfiguration.scala
index cdc83a7..ce956ea 100644
--- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/conf/EntranceConfiguration.scala
+++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/conf/EntranceConfiguration.scala
@@ -171,4 +171,5 @@ object EntranceConfiguration {
 
   val CLI_HEARTBEAT_THRESHOLD_SECONDS = CommonVars[Long] ("linkis.entrance.cli.heartbeat.threshold.sec", 30l).getValue
 
+  val LOG_PUSH_INTERVAL_TIME = CommonVars("wds.linkis.entrance.log.push.interval.time", 5 * 60 * 1000)
 }
diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/log/CacheLogWriter.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/log/CacheLogWriter.scala
index e3f7b6e..070fbf0 100644
--- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/log/CacheLogWriter.scala
+++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/log/CacheLogWriter.scala
@@ -17,7 +17,7 @@
 
 package org.apache.linkis.entrance.log
 
-import org.apache.commons.lang.StringUtils
+import org.apache.commons.lang3.StringUtils
 import org.apache.linkis.entrance.conf.EntranceConfiguration
 
 import java.sql.Date
diff --git a/linkis-computation-governance/linkis-manager/linkis-resource-manager/src/main/java/org/apache/linkis/resourcemanager/external/service/ExternalResourceService.java b/linkis-computation-governance/linkis-manager/linkis-resource-manager/src/main/java/org/apache/linkis/resourcemanager/external/service/ExternalResourceService.java
index 1f8e79c..16619f9 100644
--- a/linkis-computation-governance/linkis-manager/linkis-resource-manager/src/main/java/org/apache/linkis/resourcemanager/external/service/ExternalResourceService.java
+++ b/linkis-computation-governance/linkis-manager/linkis-resource-manager/src/main/java/org/apache/linkis/resourcemanager/external/service/ExternalResourceService.java
@@ -23,6 +23,7 @@ import org.apache.linkis.manager.common.exception.RMErrorException;
 import org.apache.linkis.resourcemanager.domain.RMLabelContainer;
 import org.apache.linkis.resourcemanager.external.domain.ExternalAppInfo;
 import org.apache.linkis.resourcemanager.external.domain.ExternalResourceIdentifier;
+import org.apache.linkis.resourcemanager.external.domain.ExternalResourceProvider;
 
 import java.util.List;
 import java.util.Map;
@@ -52,4 +53,9 @@ public interface ExternalResourceService {
             RMLabelContainer labelContainer,
             ExternalResourceIdentifier identifier)
             throws RMErrorException;
+
+    ExternalResourceProvider chooseProvider(
+            ResourceType resourceType,
+            RMLabelContainer labelContainer)
+            throws RMErrorException;
 }
diff --git a/linkis-computation-governance/linkis-manager/linkis-resource-manager/src/main/scala/org/apache/linkis/resourcemanager/external/yarn/YarnResourceRequester.scala b/linkis-computation-governance/linkis-manager/linkis-resource-manager/src/main/scala/org/apache/linkis/resourcemanager/external/yarn/YarnResourceRequester.scala
index 29489d5..42f936e 100644
--- a/linkis-computation-governance/linkis-manager/linkis-resource-manager/src/main/scala/org/apache/linkis/resourcemanager/external/yarn/YarnResourceRequester.scala
+++ b/linkis-computation-governance/linkis-manager/linkis-resource-manager/src/main/scala/org/apache/linkis/resourcemanager/external/yarn/YarnResourceRequester.scala
@@ -36,7 +36,7 @@ import sun.misc.BASE64Encoder
 
 import java.util
 import java.util.concurrent.{ConcurrentHashMap, TimeUnit}
-import scala.collection.JavaConverters._
+import scala.collection.JavaConversions._
 import scala.collection.mutable.ArrayBuffer
 
 class YarnResourceRequester extends ExternalResourceRequester with Logging {
diff --git a/linkis-engineconn-plugins/engineconn-plugins/spark/src/main/scala/org/apache/spark/sql/execution/datasources/csv/DolphinToSpark.scala b/linkis-engineconn-plugins/engineconn-plugins/spark/src/main/scala/org/apache/spark/sql/execution/datasources/csv/DolphinToSpark.scala
index d8761bf..9421faa 100644
--- a/linkis-engineconn-plugins/engineconn-plugins/spark/src/main/scala/org/apache/spark/sql/execution/datasources/csv/DolphinToSpark.scala
+++ b/linkis-engineconn-plugins/engineconn-plugins/spark/src/main/scala/org/apache/spark/sql/execution/datasources/csv/DolphinToSpark.scala
@@ -17,8 +17,9 @@
  
 package org.apache.spark.sql.execution.datasources.csv
 
-import java.util
+import org.apache.linkis.common.conf.CommonVars
 
+import java.util
 import org.apache.linkis.engineplugin.spark.config.SparkConfiguration
 import org.apache.linkis.storage.resultset.ResultSetReader
 import org.apache.linkis.storage.resultset.table.{TableMetaData, TableRecord}

---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@linkis.apache.org
For additional commands, e-mail: commits-help@linkis.apache.org


[incubator-linkis] 10/18: 1. linkis-instance-label-server - fix equal method 2. linkis-jobhistory - fix job time

Posted by pe...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

peacewong pushed a commit to branch dev-1.1.1
in repository https://gitbox.apache.org/repos/asf/incubator-linkis.git

commit 7eddfb6e966d0074f56fbfa873d874da407fafef
Author: alexkun <xu...@qq.com>
AuthorDate: Fri Mar 4 19:54:33 2022 +0800

    1. linkis-instance-label-server - fix equal method
    2. linkis-jobhistory - fix job time
---
 .../instance/label/entity/InsPersistenceLabel.java       |  2 +-
 .../linkis/jobhistory/conversions/TaskConversions.scala  | 16 ++++++++++++----
 2 files changed, 13 insertions(+), 5 deletions(-)

diff --git a/linkis-public-enhancements/linkis-publicservice/linkis-instance-label/linkis-instance-label-server/src/main/java/org/apache/linkis/instance/label/entity/InsPersistenceLabel.java b/linkis-public-enhancements/linkis-publicservice/linkis-instance-label/linkis-instance-label-server/src/main/java/org/apache/linkis/instance/label/entity/InsPersistenceLabel.java
index 9c8fddc..b23ea21 100644
--- a/linkis-public-enhancements/linkis-publicservice/linkis-instance-label/linkis-instance-label-server/src/main/java/org/apache/linkis/instance/label/entity/InsPersistenceLabel.java
+++ b/linkis-public-enhancements/linkis-publicservice/linkis-instance-label/linkis-instance-label-server/src/main/java/org/apache/linkis/instance/label/entity/InsPersistenceLabel.java
@@ -91,6 +91,6 @@ public class InsPersistenceLabel extends GenericLabel {
                                 && this.getStringValue().equals(otherInsLabel.getStringValue()));
             }
         }
-        return super.equals(other);
+        return false;
     }
 }
diff --git a/linkis-public-enhancements/linkis-publicservice/linkis-jobhistory/src/main/scala/org/apache/linkis/jobhistory/conversions/TaskConversions.scala b/linkis-public-enhancements/linkis-publicservice/linkis-jobhistory/src/main/scala/org/apache/linkis/jobhistory/conversions/TaskConversions.scala
index a5301e0..4f3f3e5 100644
--- a/linkis-public-enhancements/linkis-publicservice/linkis-jobhistory/src/main/scala/org/apache/linkis/jobhistory/conversions/TaskConversions.scala
+++ b/linkis-public-enhancements/linkis-publicservice/linkis-jobhistory/src/main/scala/org/apache/linkis/jobhistory/conversions/TaskConversions.scala
@@ -273,10 +273,18 @@ object TaskConversions extends Logging {
     if(null != metrics && metrics.containsKey(TaskConstant.ENTRANCEJOB_SUBMIT_TIME) && metrics.get(TaskConstant.ENTRANCEJOB_SUBMIT_TIME) != null){
       createTime = dealString2Date(metrics.get(TaskConstant.ENTRANCEJOB_SUBMIT_TIME).toString)
     }
-    if(isJobFinished(job.getStatus) && null != completeTime && null != createTime){
-      taskVO.setCostTime(completeTime.getTime - createTime.getTime)
-    }else if (null != createTime){
-      taskVO.setCostTime(System.currentTimeMillis() - createTime.getTime)
+    if (null != createTime) {
+      if(isJobFinished(job.getStatus)) {
+        if (null != completeTime) {
+          taskVO.setCostTime(completeTime.getTime - createTime.getTime)
+        } else if (null != job.getUpdatedTime) {
+          taskVO.setCostTime(job.getUpdatedTime.getTime - createTime.getTime)
+        } else {
+          taskVO.setCostTime(System.currentTimeMillis() - createTime.getTime)
+        }
+      } else{
+        taskVO.setCostTime(System.currentTimeMillis() - createTime.getTime)
+      }
     }
 
     val entranceName = JobhistoryConfiguration.ENTRANCE_SPRING_NAME.getValue

---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@linkis.apache.org
For additional commands, e-mail: commits-help@linkis.apache.org


[incubator-linkis] 08/18: 1. linkis-entrance - add heart-beat support for linkis-cli task

Posted by pe...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

peacewong pushed a commit to branch dev-1.1.1
in repository https://gitbox.apache.org/repos/asf/incubator-linkis.git

commit 0aadd69e8e1b33ac5d043a1065622f414b7f904b
Author: alexkun <xu...@qq.com>
AuthorDate: Fri Mar 4 19:50:58 2022 +0800

    1. linkis-entrance - add heart-beat support for  linkis-cli task
---
 .../annotation/CliHeartBeatMonitorAnnotation.java  |  50 +++++++
 .../entrance/conf/EntranceSpringConfiguration.java |  68 +++++-----
 .../entrance/exception/EntranceErrorCode.java      |   8 +-
 .../linkis/entrance/job/EntranceExecutionJob.java  | 150 +++++++++------------
 .../persistence/QueryPersistenceManager.java       |  16 ++-
 .../entrance/restful/EntranceRestfulApi.java       |   3 +
 .../apache/linkis/entrance/EntranceServer.scala    |   6 +
 .../cli/heartbeat/CliHeartbeatMonitor.scala        | 136 +++++++++++++++++++
 .../cli/heartbeat/HeartbeatLossHandler.scala       |  24 ++++
 .../entrance/cli/heartbeat/KillHandler.scala       |  33 +++++
 .../entrance/conf/EntranceConfiguration.scala      |  11 +-
 .../linkis/entrance/execute/EntranceJob.scala      |  17 ++-
 .../impl/SQLLimitEntranceInterceptor.scala         |   8 ++
 13 files changed, 394 insertions(+), 136 deletions(-)

diff --git a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/annotation/CliHeartBeatMonitorAnnotation.java b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/annotation/CliHeartBeatMonitorAnnotation.java
new file mode 100644
index 0000000..9f5c537
--- /dev/null
+++ b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/annotation/CliHeartBeatMonitorAnnotation.java
@@ -0,0 +1,50 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.linkis.entrance.annotation;
+
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.beans.factory.annotation.Qualifier;
+import org.springframework.context.annotation.Bean;
+import org.springframework.core.annotation.AliasFor;
+import org.springframework.stereotype.Component;
+
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+@Target({ElementType.METHOD, ElementType.TYPE})
+@Retention(RetentionPolicy.RUNTIME)
+@Bean(value = CliHeartBeatMonitorAnnotation.BEAN_NAME)
+@Component(value = CliHeartBeatMonitorAnnotation.BEAN_NAME)
+public @interface CliHeartBeatMonitorAnnotation {
+    String BEAN_NAME = "cliHeartBeatMonitor";
+    @AliasFor(annotation = Component.class)
+    String value() default BEAN_NAME;
+
+    @Target({ElementType.CONSTRUCTOR, ElementType.FIELD, ElementType.METHOD, ElementType.TYPE, ElementType.PARAMETER})
+    @Retention(RetentionPolicy.RUNTIME)
+    @Qualifier(BEAN_NAME)
+    @Autowired
+    @interface CliHeartBeatMonitorAutowiredAnnotation {
+        @AliasFor(annotation = Qualifier.class)
+        String value() default BEAN_NAME;
+        @AliasFor(annotation = Autowired.class)
+        boolean required() default true;
+    }
+}
\ No newline at end of file
diff --git a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/conf/EntranceSpringConfiguration.java b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/conf/EntranceSpringConfiguration.java
index 1e72279..810c35b 100644
--- a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/conf/EntranceSpringConfiguration.java
+++ b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/conf/EntranceSpringConfiguration.java
@@ -19,6 +19,8 @@ package org.apache.linkis.entrance.conf;
 
 import org.apache.linkis.entrance.EntranceParser;
 import org.apache.linkis.entrance.annotation.*;
+import org.apache.linkis.entrance.cli.heartbeat.CliHeartbeatMonitor;
+import org.apache.linkis.entrance.cli.heartbeat.KillHandler;
 import org.apache.linkis.entrance.event.*;
 import org.apache.linkis.entrance.execute.impl.EntranceExecutorManagerImpl;
 import org.apache.linkis.entrance.interceptor.EntranceInterceptor;
@@ -74,34 +76,37 @@ public class EntranceSpringConfiguration {
         return new EntranceResultSetEngine();
     }
 
+    @CliHeartBeatMonitorAnnotation
+    @ConditionalOnMissingBean(name = {CliHeartBeatMonitorAnnotation.BEAN_NAME})
+    public CliHeartbeatMonitor generateCliHeartbeatMonitor(){
+        CliHeartbeatMonitor cliHeartbeatMonitor = new CliHeartbeatMonitor(new KillHandler());
+        cliHeartbeatMonitor.start();
+        return cliHeartbeatMonitor;
+    }
+
     @PersistenceManagerBeanAnnotation
     @ConditionalOnMissingBean(name = {PersistenceManagerBeanAnnotation.BEAN_NAME})
-    public PersistenceManager generatePersistenceManager(
-            @PersistenceEngineBeanAnnotation.PersistenceEngineAutowiredAnnotation
-                    PersistenceEngine persistenceEngine,
-            @ResultSetEngineBeanAnnotation.ResultSetEngineAutowiredAnnotation
-                    ResultSetEngine resultSetEngine) {
+    public PersistenceManager generatePersistenceManager(@PersistenceEngineBeanAnnotation.PersistenceEngineAutowiredAnnotation PersistenceEngine persistenceEngine,
+                                                         @ResultSetEngineBeanAnnotation.ResultSetEngineAutowiredAnnotation ResultSetEngine resultSetEngine,
+                                                         @CliHeartBeatMonitorAnnotation.CliHeartBeatMonitorAutowiredAnnotation CliHeartbeatMonitor cliHeartbeatMonitor){
         logger.info("init PersistenceManager.");
         QueryPersistenceManager persistenceManager = new QueryPersistenceManager();
         persistenceManager.setPersistenceEngine(persistenceEngine);
         persistenceManager.setResultSetEngine(resultSetEngine);
+        persistenceManager.setCliHeartbeatMonitor(cliHeartbeatMonitor);
         return persistenceManager;
     }
 
     @EntranceParserBeanAnnotation
     @ConditionalOnMissingBean(name = {EntranceParserBeanAnnotation.BEAN_NAME})
-    public EntranceParser generateEntranceParser(
-            @PersistenceManagerBeanAnnotation.PersistenceManagerAutowiredAnnotation
-                    PersistenceManager persistenceManager) {
+    public EntranceParser generateEntranceParser(@PersistenceManagerBeanAnnotation.PersistenceManagerAutowiredAnnotation PersistenceManager persistenceManager){
         return new CommonEntranceParser(persistenceManager);
     }
 
     @EntranceListenerBusBeanAnnotation
     @ConditionalOnMissingBean(name = {EntranceListenerBusBeanAnnotation.BEAN_NAME})
-    public EntranceEventListenerBus<EntranceEventListener, EntranceEvent>
-            generateEntranceEventListenerBus() {
-        EntranceEventListenerBus<EntranceEventListener, EntranceEvent> entranceEventListenerBus =
-                new EntranceEventListenerBus<EntranceEventListener, EntranceEvent>();
+    public EntranceEventListenerBus<EntranceEventListener, EntranceEvent> generateEntranceEventListenerBus() {
+        EntranceEventListenerBus<EntranceEventListener, EntranceEvent> entranceEventListenerBus = new EntranceEventListenerBus<EntranceEventListener, EntranceEvent>();
         entranceEventListenerBus.start();
         return entranceEventListenerBus;
     }
@@ -136,11 +141,8 @@ public class EntranceSpringConfiguration {
 
     @ErrorCodeListenerBeanAnnotation
     @ConditionalOnMissingBean(name = {ErrorCodeListenerBeanAnnotation.BEAN_NAME})
-    public ErrorCodeListener generateErrorCodeListener(
-            @PersistenceManagerBeanAnnotation.PersistenceManagerAutowiredAnnotation
-                    PersistenceManager persistenceManager,
-            @EntranceParserBeanAnnotation.EntranceParserAutowiredAnnotation
-                    EntranceParser entranceParser) {
+    public ErrorCodeListener generateErrorCodeListener(@PersistenceManagerBeanAnnotation.PersistenceManagerAutowiredAnnotation PersistenceManager persistenceManager,
+                                                       @EntranceParserBeanAnnotation.EntranceParserAutowiredAnnotation EntranceParser entranceParser) {
         PersistenceErrorCodeListener errorCodeListener = new PersistenceErrorCodeListener();
         errorCodeListener.setEntranceParser(entranceParser);
         errorCodeListener.setPersistenceManager(persistenceManager);
@@ -150,21 +152,18 @@ public class EntranceSpringConfiguration {
     @ErrorCodeManagerBeanAnnotation
     @ConditionalOnMissingBean(name = {ErrorCodeManagerBeanAnnotation.BEAN_NAME})
     public ErrorCodeManager generateErrorCodeManager() {
-        try {
-            Class.forName("org.apache.linkis.errorcode.client.handler.LinkisErrorCodeHandler");
+       /* try {
+            Class.forName("com.webank.wedatasphere.linkis.errorcode.client.handler.LinkisErrorCodeHandler");
         } catch (final Exception e) {
             logger.error("failed to init linkis error code handler", e);
-        }
+        }*/
         return FlexibleErrorCodeManager$.MODULE$;
     }
 
     @LogManagerBeanAnnotation
     @ConditionalOnMissingBean(name = {LogManagerBeanAnnotation.BEAN_NAME})
-    public LogManager generateLogManager(
-            @ErrorCodeListenerBeanAnnotation.ErrorCodeListenerAutowiredAnnotation
-                    ErrorCodeListener errorCodeListener,
-            @ErrorCodeManagerBeanAnnotation.ErrorCodeManagerAutowiredAnnotation
-                    ErrorCodeManager errorCodeManager) {
+    public LogManager generateLogManager(@ErrorCodeListenerBeanAnnotation.ErrorCodeListenerAutowiredAnnotation ErrorCodeListener errorCodeListener,
+                                         @ErrorCodeManagerBeanAnnotation.ErrorCodeManagerAutowiredAnnotation ErrorCodeManager errorCodeManager){
         CacheLogManager logManager = new CacheLogManager();
         logManager.setErrorCodeListener(errorCodeListener);
         logManager.setErrorCodeManager(errorCodeManager);
@@ -186,30 +185,23 @@ public class EntranceSpringConfiguration {
 
     @SchedulerContextBeanAnnotation
     @ConditionalOnMissingBean(name = {SchedulerContextBeanAnnotation.BEAN_NAME})
-    public SchedulerContext generateSchedulerContext(
-            @GroupFactoryBeanAnnotation.GroupFactoryAutowiredAnnotation GroupFactory groupFactory,
-            @EntranceExecutorManagerBeanAnnotation.EntranceExecutorManagerAutowiredAnnotation
-                    ExecutorManager executorManager,
-            @ConsumerManagerBeanAnnotation.ConsumerManagerAutowiredAnnotation
-                    ConsumerManager consumerManager) {
+    public SchedulerContext generateSchedulerContext(@GroupFactoryBeanAnnotation.GroupFactoryAutowiredAnnotation GroupFactory groupFactory,
+                                                     @EntranceExecutorManagerBeanAnnotation.EntranceExecutorManagerAutowiredAnnotation ExecutorManager executorManager,
+                                                     @ConsumerManagerBeanAnnotation.ConsumerManagerAutowiredAnnotation ConsumerManager consumerManager) {
         return new EntranceSchedulerContext(groupFactory, consumerManager, executorManager);
     }
 
     @EntranceExecutorManagerBeanAnnotation
     @ConditionalOnMissingBean(name = {EntranceExecutorManagerBeanAnnotation.BEAN_NAME})
-    public ExecutorManager generateExecutorManager(
-            @GroupFactoryBeanAnnotation.GroupFactoryAutowiredAnnotation GroupFactory groupFactory) {
-        EngineConnManagerBuilder engineConnManagerBuilder =
-                EngineConnManagerBuilder$.MODULE$.builder();
+    public ExecutorManager generateExecutorManager(@GroupFactoryBeanAnnotation.GroupFactoryAutowiredAnnotation GroupFactory groupFactory) {
+        EngineConnManagerBuilder engineConnManagerBuilder = EngineConnManagerBuilder$.MODULE$.builder();
         engineConnManagerBuilder.setPolicy(Policy.Process);
         return new EntranceExecutorManagerImpl(groupFactory, engineConnManagerBuilder.build());
     }
 
     @SchedulerBeanAnnotation
     @ConditionalOnMissingBean(name = {SchedulerBeanAnnotation.BEAN_NAME})
-    public Scheduler generateScheduler(
-            @SchedulerContextBeanAnnotation.SchedulerContextAutowiredAnnotation
-                    SchedulerContext schedulerContext) {
+    public Scheduler generateScheduler(@SchedulerContextBeanAnnotation.SchedulerContextAutowiredAnnotation SchedulerContext schedulerContext) {
         Scheduler scheduler = new ParallelScheduler(schedulerContext);
         scheduler.init();
         scheduler.start();
diff --git a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/exception/EntranceErrorCode.java b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/exception/EntranceErrorCode.java
index cf097cf..5f160fc 100644
--- a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/exception/EntranceErrorCode.java
+++ b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/exception/EntranceErrorCode.java
@@ -28,9 +28,11 @@ public enum EntranceErrorCode {
     INIT_JOB_ERROR(20012, "Init job error "),
     RESULT_NOT_PERSISTED_ERROR(20013, "Result not persisted error "),
     GROUP_NOT_FOUND(20014, "group not found"),
-    EXECUTION_CODE_ISNULL(
-            20015, "execute code is null, nothing will be execute!(执行代码为空,没有任何代码会被执行)"),
-    JOB_UPDATE_FAILED(20016, "job update failed");
+    EXECUTION_CODE_ISNULL(20015, "execute code is null, nothing will be execute!(执行代码为空,没有任何代码会被执行)"),
+    JOB_UPDATE_FAILED(20016, "job update failed"),
+    VARIABLE_NULL_EXCEPTION(20017, "variable is null")
+
+    ;
     private int errCode;
     private String desc;
 
diff --git a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/job/EntranceExecutionJob.java b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/job/EntranceExecutionJob.java
index dc56253..a292198 100644
--- a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/job/EntranceExecutionJob.java
+++ b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/job/EntranceExecutionJob.java
@@ -107,46 +107,54 @@ public class EntranceExecutionJob extends EntranceJob implements LogHandler {
     @Override
     public void init() throws EntranceErrorException {
         List<EntranceErrorException> errList = new ArrayList<>();
-        SubJobInfo[] subJobInfos =
-                Arrays.stream(getCodeParser().parse(getJobRequest().getExecutionCode()))
-                        .map(
-                                code -> {
-                                    SubJobInfo subJobInfo = new SubJobInfo();
-                                    // todo don't need whole jobRequest, but need executeUser
-                                    subJobInfo.setJobReq(getJobRequest());
-                                    subJobInfo.setStatus(SchedulerEventState.Inited().toString());
-                                    subJobInfo.setCode(code);
-                                    // persist and update jobDetail
-                                    SubJobDetail subJobDetail = createNewJobDetail();
-                                    subJobInfo.setSubJobDetail(subJobDetail);
-                                    subJobInfo.setProgress(0.0f);
-                                    subJobDetail.setExecutionContent(code);
-                                    subJobDetail.setJobGroupId(getJobRequest().getId());
-                                    subJobDetail.setStatus(SchedulerEventState.Inited().toString());
-                                    subJobDetail.setCreatedTime(
-                                            new Date(System.currentTimeMillis()));
-                                    subJobDetail.setUpdatedTime(
-                                            new Date(System.currentTimeMillis()));
-                                    try {
-                                        persistenceManager
-                                                .createPersistenceEngine()
-                                                .persist(subJobInfo);
-                                    } catch (Exception e1) {
-                                        errList.add(
-                                                new EntranceErrorException(
-                                                        EntranceErrorCode.INIT_JOB_ERROR
-                                                                .getErrCode(),
-                                                        "Init subjob error, please submit it again(任务初始化失败,请稍后重试). "
-                                                                + e1.getMessage()));
-                                    }
-                                    return subJobInfo;
-                                })
-                        .toArray(SubJobInfo[]::new);
+        SubJobInfo[] subJobInfos = Arrays.stream(getCodeParser().parse(getJobRequest().getExecutionCode())).map(code -> {
+            SubJobInfo subJobInfo = new SubJobInfo();
+            // todo don't need whole jobRequest, but need executeUser
+            subJobInfo.setJobReq(getJobRequest());
+            subJobInfo.setStatus(SchedulerEventState.Inited().toString());
+            subJobInfo.setCode(code);
+            // persist and update jobDetail
+            SubJobDetail subJobDetail = createNewJobDetail();
+            subJobInfo.setSubJobDetail(subJobDetail);
+            subJobInfo.setProgress(0.0f);
+            subJobDetail.setExecutionContent(code);
+            subJobDetail.setJobGroupId(getJobRequest().getId());
+            subJobDetail.setStatus(SchedulerEventState.Inited().toString());
+            subJobDetail.setCreatedTime(new Date(System.currentTimeMillis()));
+            subJobDetail.setUpdatedTime(new Date(System.currentTimeMillis()));
+            try {
+                persistenceManager.createPersistenceEngine().persist(subJobInfo);
+            } catch (Exception e1) {
+                errList.add(new EntranceErrorException(EntranceErrorCode.INIT_JOB_ERROR.getErrCode(), "Init subjob error, please submit it again(任务初始化失败,请稍后重试). " + e1.getMessage()));
+            }
+            return subJobInfo;
+        }).toArray(SubJobInfo[]::new);
         if (errList.size() > 0) {
             logger.error(errList.get(0).getDesc());
             throw errList.get(0);
         }
         setJobGroups(subJobInfos);
+        updateNewestAccessByClientTimestamp();
+    }
+
+    /*protected RequestPersistTask getRequestPersistTask() {
+        if(getTask() instanceof HaPersistenceTask) {
+            Task task = ((HaPersistenceTask) getTask()).task();
+            if(task instanceof RequestPersistTask) {
+                return (RequestPersistTask) task;
+            } else {
+                return null;
+            }
+        } else if(getTask() instanceof RequestPersistTask) {
+            return (RequestPersistTask) getTask();
+        } else {
+            return null;
+        }
+    }*/
+
+    @Override
+    public int getRunningSubJobIndex() {
+        return runningIndex;
     }
 
     @Override
@@ -179,8 +187,7 @@ public class EntranceExecutionJob extends EntranceJob implements LogHandler {
                 runtimeMapTmp.put(entry.getKey(), entry.getValue().toString());
             }
         }
-        String resultSetPathRoot =
-                GovernanceCommonConf.RESULT_SET_STORE_PATH().getValue(runtimeMapTmp);
+        String resultSetPathRoot = GovernanceCommonConf.RESULT_SET_STORE_PATH().getValue(runtimeMapTmp);
         Map<String, Object> jobMap = new HashMap<String, Object>();
         jobMap.put(RequestTask$.MODULE$.RESULT_SET_STORE_PATH(), resultSetPathRoot);
         runtimeMapOri.put(QueryParams$.MODULE$.JOB_KEY(), jobMap);
@@ -222,11 +229,10 @@ public class EntranceExecutionJob extends EntranceJob implements LogHandler {
             isHead = true;
             isTail = true;
         }
-        BindEngineLabel bindEngineLabel =
-                new BindEngineLabel()
-                        .setJobGroupId(getJobRequest().getId().toString())
-                        .setIsJobGroupHead(String.valueOf(isHead))
-                        .setIsJobGroupEnd(String.valueOf(isTail));
+        BindEngineLabel bindEngineLabel = new BindEngineLabel()
+                .setJobGroupId(getJobRequest().getId().toString())
+                .setIsJobGroupHead(String.valueOf(isHead))
+                .setIsJobGroupEnd(String.valueOf(isTail));
         if (isHead) {
             jobMap.put(GovernanceConstant.RESULTSET_INDEX(), 0);
             setResultSize(0);
@@ -273,39 +279,16 @@ public class EntranceExecutionJob extends EntranceJob implements LogHandler {
             getJobRequest().setMetrics(new HashMap<>());
         }
         Map<String, Object> metricsMap = getJobRequest().getMetrics();
-        String createTime =
-                metricsMap.containsKey(TaskConstant.ENTRANCEJOB_SUBMIT_TIME)
-                        ? simpleDateFormat.format(
-                                metricsMap.get(TaskConstant.ENTRANCEJOB_SUBMIT_TIME))
-                        : "not created";
-        String scheduleTime =
-                metricsMap.containsKey(TaskConstant.ENTRANCEJOB_SCHEDULE_TIME)
-                        ? simpleDateFormat.format(
-                                metricsMap.get(TaskConstant.ENTRANCEJOB_SCHEDULE_TIME))
-                        : "not scheduled";
-        String startTime =
-                metricsMap.containsKey(TaskConstant.ENTRANCEJOB_TO_ORCHESTRATOR)
-                        ? simpleDateFormat.format(
-                                metricsMap.get(TaskConstant.ENTRANCEJOB_TO_ORCHESTRATOR))
-                        : "not submitted to orchestrator";
-        String endTime =
-                metricsMap.containsKey(TaskConstant.ENTRANCEJOB_COMPLETE_TIME)
-                        ? simpleDateFormat.format(
-                                metricsMap.get(TaskConstant.ENTRANCEJOB_COMPLETE_TIME))
-                        : "on running or not started";
+        String createTime = metricsMap.containsKey(TaskConstant.ENTRANCEJOB_SUBMIT_TIME) ? simpleDateFormat.format(metricsMap.get(TaskConstant.ENTRANCEJOB_SUBMIT_TIME)) : "not created";
+        String scheduleTime = metricsMap.containsKey(TaskConstant.ENTRANCEJOB_SCHEDULE_TIME) ? simpleDateFormat.format(metricsMap.get(TaskConstant.ENTRANCEJOB_SCHEDULE_TIME)) : "not scheduled";
+        String startTime = metricsMap.containsKey(TaskConstant.ENTRANCEJOB_TO_ORCHESTRATOR) ? simpleDateFormat.format(metricsMap.get(TaskConstant.ENTRANCEJOB_TO_ORCHESTRATOR)) : "not submitted to orchestrator";
+        String endTime = metricsMap.containsKey(TaskConstant.ENTRANCEJOB_COMPLETE_TIME) ? simpleDateFormat.format(metricsMap.get(TaskConstant.ENTRANCEJOB_COMPLETE_TIME)) : "on running or not started";
         String runTime;
-        if (metricsMap.containsKey(TaskConstant.ENTRANCEJOB_COMPLETE_TIME)) {
-            runTime =
-                    Utils.msDurationToString(
-                            (((Date) metricsMap.get(TaskConstant.ENTRANCEJOB_COMPLETE_TIME)))
-                                            .getTime()
-                                    - (((Date)
-                                                    metricsMap.get(
-                                                            TaskConstant.ENTRANCEJOB_SUBMIT_TIME)))
-                                            .getTime());
-        } else {
-            runTime =
-                    "The task did not end normally and the usage time could not be counted.(任务并未正常结束,无法统计使用时间)";
+        if (metricsMap.containsKey(TaskConstant.ENTRANCEJOB_COMPLETE_TIME)){
+            runTime = Utils.msDurationToString((((Date) metricsMap.get(TaskConstant.ENTRANCEJOB_COMPLETE_TIME))).getTime()
+            - (((Date) metricsMap.get(TaskConstant.ENTRANCEJOB_SUBMIT_TIME))).getTime());
+        }else{
+            runTime = "The task did not end normally and the usage time could not be counted.(任务并未正常结束,无法统计使用时间)";
         }
         String metric =
                 "Task creation time(任务创建时间): "
@@ -333,6 +316,9 @@ public class EntranceExecutionJob extends EntranceJob implements LogHandler {
             // todo  Do a lot of aftercare work when close(close时候要做很多的善后工作)
             if (this.getLogWriter().isDefined()) {
                 IOUtils.closeQuietly(this.getLogWriter().get());
+                //this.setLogWriter(null);
+            } else {
+                logger.info("job:" + jobRequest().getId() + "LogWriter is null");
             }
             if (this.getLogReader().isDefined()) {
                 IOUtils.closeQuietly(getLogReader().get());
@@ -357,27 +343,19 @@ public class EntranceExecutionJob extends EntranceJob implements LogHandler {
         return progress;
     }
 
-    /**
-     * // The front end needs to obtain data //if (EntranceJob.JOB_COMPLETED_PROGRESS() ==
-     * getProgress()) { // return new JobProgressInfo[0]; //}
-     *
-     * @return
-     */
     @Override
     public JobProgressInfo[] getProgressInfo() {
+        if (EntranceJob.JOB_COMPLETED_PROGRESS() == getProgress()) {
+            return new JobProgressInfo[0];
+        }
         SubJobInfo[] subJobInfoArray = getJobGroups();
         if (subJobInfoArray.length > 0) {
             List<JobProgressInfo> progressInfoList = new ArrayList<>();
             for (SubJobInfo subJobInfo : subJobInfoArray) {
                 progressInfoList.addAll(subJobInfo.getProgressInfoMap().values());
             }
-            return progressInfoList.toArray(new JobProgressInfo[] {});
+            return progressInfoList.toArray(new JobProgressInfo[]{});
         }
         return super.getProgressInfo();
     }
-
-    @Override
-    public int getRunningSubJobIndex() {
-        return runningIndex;
-    }
 }
diff --git a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/persistence/QueryPersistenceManager.java b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/persistence/QueryPersistenceManager.java
index dc1a65f..68d4907 100644
--- a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/persistence/QueryPersistenceManager.java
+++ b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/persistence/QueryPersistenceManager.java
@@ -20,6 +20,7 @@ package org.apache.linkis.entrance.persistence;
 import org.apache.linkis.common.exception.ErrorException;
 import org.apache.linkis.common.io.FsPath;
 import org.apache.linkis.entrance.EntranceContext;
+import org.apache.linkis.entrance.cli.heartbeat.CliHeartbeatMonitor;
 import org.apache.linkis.entrance.cs.CSEntranceHelper;
 import org.apache.linkis.entrance.execute.EntranceJob;
 import org.apache.linkis.entrance.log.FlexibleErrorCodeManager;
@@ -44,8 +45,17 @@ public class QueryPersistenceManager extends PersistenceManager {
     private PersistenceEngine persistenceEngine;
     private ResultSetEngine resultSetEngine;
     private static final Logger logger = LoggerFactory.getLogger(QueryPersistenceManager.class);
-    //  private EntranceWebSocketService entranceWebSocketService; //TODO The latter version, to be
-    // removed, webSocket unified walk ListenerBus(后面的版本,要去掉,webSocket统一走ListenerBus)
+  //  private EntranceWebSocketService entranceWebSocketService; //TODO The latter version, to be removed, webSocket unified walk ListenerBus(后面的版本,要去掉,webSocket统一走ListenerBus)
+
+    private CliHeartbeatMonitor cliHeartbeatMonitor;
+
+    public CliHeartbeatMonitor getCliHeartbeatMonitor() {
+        return cliHeartbeatMonitor;
+    }
+
+    public void setCliHeartbeatMonitor(CliHeartbeatMonitor cliHeartbeatMonitor) {
+        this.cliHeartbeatMonitor = cliHeartbeatMonitor;
+    }
 
     public void setPersistenceEngine(PersistenceEngine persistenceEngine) {
         this.persistenceEngine = persistenceEngine;
@@ -163,6 +173,7 @@ public class QueryPersistenceManager extends PersistenceManager {
     @Override
     public void onJobInited(Job job) {
         updateJobStatus(job);
+        cliHeartbeatMonitor.registerIfCliJob(job);
     }
 
     @Override
@@ -198,6 +209,7 @@ public class QueryPersistenceManager extends PersistenceManager {
         } catch (Throwable e) {
             logger.error("Failed to register cs rs data ", e);
         }
+        cliHeartbeatMonitor.unRegisterIfCliJob(job);
         updateJobStatus(job);
     }
 
diff --git a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/restful/EntranceRestfulApi.java b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/restful/EntranceRestfulApi.java
index c7760da..4ea0366 100644
--- a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/restful/EntranceRestfulApi.java
+++ b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/restful/EntranceRestfulApi.java
@@ -199,6 +199,9 @@ public class EntranceRestfulApi implements EntranceRestfulRemote {
             return message;
         }
         if (job.isDefined()) {
+            if (job.get() instanceof EntranceJob) {
+                ((EntranceJob)job.get()).updateNewestAccessByClientTimestamp();
+            }
             message = Message.ok();
             message.setMethod("/api/entrance/" + id + "/status");
             message.data("status", job.get().getState().toString()).data("execID", id);
diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/EntranceServer.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/EntranceServer.scala
index f0024cc..2eb3bb9 100644
--- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/EntranceServer.scala
+++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/EntranceServer.scala
@@ -105,6 +105,12 @@ abstract class EntranceServer extends Logging {
       getEntranceContext.getOrCreateScheduler().submit(job)
       val msg = s"Job with jobId : ${job.getId} and execID : ${job.getId()} submitted "
       logger.info(msg)
+      /**
+        * job.afterStateChanged() method is only called in job.run(), and job.run() is called only after job is scheduled
+        * so it suggest that we lack a hook for job init, currently we call this to trigger JobListener.onJobinit()
+        * */
+      job.getJobListener.foreach(_.onJobInited(job))
+
       job match {
         case entranceJob: EntranceJob =>
           entranceJob.getJobRequest.setReqId(job.getId())
diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/cli/heartbeat/CliHeartbeatMonitor.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/cli/heartbeat/CliHeartbeatMonitor.scala
new file mode 100644
index 0000000..ded4d04
--- /dev/null
+++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/cli/heartbeat/CliHeartbeatMonitor.scala
@@ -0,0 +1,136 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.linkis.entrance.cli.heartbeat
+
+import java.util
+import java.util.concurrent.{ConcurrentHashMap, ScheduledThreadPoolExecutor, TimeUnit}
+import javax.annotation.PostConstruct
+
+import org.apache.linkis.common.utils.{Logging, Utils}
+import org.apache.linkis.entrance.conf.EntranceConfiguration
+import org.apache.linkis.entrance.exception.{EntranceErrorCode, EntranceErrorException}
+import org.apache.linkis.entrance.execute.EntranceJob
+import org.apache.linkis.scheduler.queue.Job
+import org.apache.commons.lang3.StringUtils
+import org.apache.commons.lang3.concurrent.BasicThreadFactory
+
+import scala.collection.JavaConverters._
+
+class CliHeartbeatMonitor(handler: HeartbeatLossHandler) extends Logging {
+  private val infoMap = new ConcurrentHashMap[String, EntranceJob]
+  private val clientHeartbeatThreshold = 1000 * EntranceConfiguration.CLI_HEARTBEAT_THRESHOLD_SECONDS
+  private val clientHeartbeatDaemon = new ScheduledThreadPoolExecutor(1, new BasicThreadFactory.Builder().namingPattern("entrance-cli-heartbeat-%d").daemon(true).build)
+
+  def panicIfNull(obj: Any, msg: String): Unit = {
+    if (obj == null) {
+          throw new EntranceErrorException(EntranceErrorCode.VARIABLE_NULL_EXCEPTION.getErrCode, msg)
+    }
+  }
+
+  /*
+  register for scan
+   */
+  def registerIfCliJob(job: Job): Unit = {
+    if (job.isInstanceOf[EntranceJob]) {
+      val entranceJob = job.asInstanceOf[EntranceJob]
+      if (isCliJob(entranceJob)) {
+        val id = job.getJobInfo.getId
+        if (infoMap.containsKey(id)) {
+          error("registered duplicate job!! job-id: " + id)
+        } else {
+          infoMap.put(id, entranceJob)
+          info("registered cli job: " + id)
+        }
+      }
+    }
+  }
+
+  /*
+  remove from scan list
+ */
+  def unRegisterIfCliJob(job: Job): Unit = {
+    if (job.isInstanceOf[EntranceJob]) {
+      val entranceJob = job.asInstanceOf[EntranceJob]
+      if (isCliJob(entranceJob)) {
+        infoMap.remove(job.getJobInfo.getId)
+        info("unregistered cli job: " + job.getJobInfo.getId)
+      }
+    }
+  }
+
+  /*
+  probably will not use it but instead update heartbeat for all jobs and
+  scan only cli jobs
+   */
+  def updateHeartbeatIfCliJob(job: Job): Unit = {
+    if (job.isInstanceOf[EntranceJob]) {
+      val entranceJob = job.asInstanceOf[EntranceJob]
+      if (isCliJob(entranceJob)) {
+        val id = job.getJobInfo.getId
+        if (!infoMap.containsKey(id)) error("heartbeat on non-existing job!! job-id: " + id)
+        else infoMap.get(id).updateNewestAccessByClientTimestamp()
+      }
+    }
+  }
+
+  def start(): Unit = {
+    panicIfNull(handler, "handler should not be null")
+    clientHeartbeatDaemon.scheduleAtFixedRate(new Runnable {
+      override def run(): Unit = Utils.tryCatch(scanOneIteration) {
+        t => error("ClientHeartbeatMonitor failed to scan for one iteration", t)
+      }
+    }, 0, 5, TimeUnit.SECONDS)
+    info("started cliHeartbeatMonitor")
+    Utils.addShutdownHook(() -> this.shutdown())
+  }
+
+  def scanOneIteration(): Unit = { //        LOG.info("ClientHeartbeatMonitor starts scanning for one iteration");
+    val currentTime = System.currentTimeMillis
+    val entries = infoMap.entrySet.iterator
+    val problemJobs = new util.ArrayList[EntranceJob]
+    while (entries.hasNext) {
+      val entry = entries.next
+      debug("Scanned job: " + entry.getKey());
+      if (!isAlive(currentTime, entry.getValue)) {
+        info("Found linkis-cli connection lost job: " + entry.getKey())
+        problemJobs.add(entry.getValue)
+      }
+    }
+
+    val iterator = problemJobs.iterator
+    while (iterator.hasNext) {
+      //remove to avoid handle same job twice
+      infoMap.remove(iterator.next)
+    }
+
+    if (problemJobs.size > 0) {
+      handler.handle(problemJobs.asScala.toList)
+    }
+    debug("ClientHeartbeatMonitor ends scanning for one iteration")
+  }
+
+  private def isCliJob(job: EntranceJob): Boolean = StringUtils.equalsIgnoreCase(job.getCreator, "LINKISCLI")
+  private def isAlive(currentTime: Long, job: EntranceJob): Boolean = {
+    val lastAliveTime = job.getNewestAccessByClientTimestamp
+    return currentTime - lastAliveTime <= clientHeartbeatThreshold
+  }
+
+  def shutdown(): Unit = {
+    clientHeartbeatDaemon.shutdownNow
+  }
+}
diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/cli/heartbeat/HeartbeatLossHandler.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/cli/heartbeat/HeartbeatLossHandler.scala
new file mode 100644
index 0000000..a9276ca
--- /dev/null
+++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/cli/heartbeat/HeartbeatLossHandler.scala
@@ -0,0 +1,24 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.linkis.entrance.cli.heartbeat
+
+import org.apache.linkis.entrance.execute.EntranceJob
+
+trait HeartbeatLossHandler {
+  def handle(jobs: List[EntranceJob]): Unit
+}
diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/cli/heartbeat/KillHandler.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/cli/heartbeat/KillHandler.scala
new file mode 100644
index 0000000..5f46280
--- /dev/null
+++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/cli/heartbeat/KillHandler.scala
@@ -0,0 +1,33 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.linkis.entrance.cli.heartbeat
+import org.apache.linkis.common.utils.{Logging, Utils}
+import org.apache.linkis.entrance.execute.EntranceJob
+
+class KillHandler extends HeartbeatLossHandler with Logging{
+  override def handle(jobs: List[EntranceJob]): Unit = {
+    for (job <- jobs) {
+      if (job != null) {
+        info("Killing job: " + job.getJobInfo.getId)
+        Utils.tryCatch(job.onFailure("Job is killed because of client-server connection lost", null)) {
+          t => error("failed to kill job: " + job.getJobInfo.getId, t)
+        }
+      }
+    }
+  }
+}
diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/conf/EntranceConfiguration.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/conf/EntranceConfiguration.scala
index 34a435f..cdc83a7 100644
--- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/conf/EntranceConfiguration.scala
+++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/conf/EntranceConfiguration.scala
@@ -60,6 +60,8 @@ object EntranceConfiguration {
     * requestApplicationName(Creator) 默认的服务名,默认为IDE
     */
   val DEFAULT_REQUEST_APPLICATION_NAME = CommonVars("wds.linkis.default.requestApplication.name", "IDE")
+
+  val SQL_LIMIT_CREATOR = CommonVars("wds.linkis.sql.limit.creator", "IDE,Visualis")
   /**
     * runType
     */
@@ -117,8 +119,8 @@ object EntranceConfiguration {
 
 
   val SHELL_DANGER_CHECK_SWITCH = CommonVars("wds.linkis.entrance.shell.danger.check.enabled", true)
-  val SHELL_DANGER_USAGE = CommonVars("wds.linkis.shell.danger.usage", "rm,sh,find,kill,python,for,source,hdfs,hadoop,spark-sql,spark-submit,pyspark,spark-shell,hive,yarn,df,dd")
-  val SHELL_WHITE_USAGE = CommonVars("wds.linkis.shell.white.usage", "cd,ls,echo")
+  val SHELL_DANGER_USAGE = CommonVars("wds.linkis.shell.danger.usage", "bdp-client")
+  val SHELL_WHITE_USAGE = CommonVars("wds.linkis.shell.white.usage", "sqoop,cd,ll,ls,echo,cat,tree,diff,who,grep,whoami,set,pwd,cut,file,head,less,if,while")
 
   val FLOW_EXECUTION_CREATOR = CommonVars("wds.linkis.entrance.flow.creator", "nodeexecution")
 
@@ -127,7 +129,7 @@ object EntranceConfiguration {
 
   val SKIP_AUTH = CommonVars("wds.linkis.entrance.skip.auth", false)
 
-  val PROGRESS_PUSH = CommonVars[String]("wds.linkis.entrance.push.progress", "true")
+  val PROGRESS_PUSH = CommonVars[String]("wds.linkis.entrance.push.progress", "false")
 
   val CONCURRENT_FACTORY_MAX_CAPACITY = CommonVars("wds.linkis.concurrent.group.factory.capacity", 1000)
 
@@ -167,7 +169,6 @@ object EntranceConfiguration {
 
   val ENABLE_HDFS_LOG_CACHE = CommonVars[Boolean] ("linkis.entrance.enable.hdfs.log.cache", true).getValue
 
-  val LOG_PUSH_INTERVAL_TIME = CommonVars("wds.linkis.entrance.log.push.interval.time", 5 * 60 * 1000)
-
+  val CLI_HEARTBEAT_THRESHOLD_SECONDS = CommonVars[Long] ("linkis.entrance.cli.heartbeat.threshold.sec", 30l).getValue
 
 }
diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/EntranceJob.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/EntranceJob.scala
index 0afd81b..a3107eb 100644
--- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/EntranceJob.scala
+++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/EntranceJob.scala
@@ -19,7 +19,7 @@ package org.apache.linkis.entrance.execute
 
 import java.util
 import java.util.Date
-import java.util.concurrent.atomic.AtomicInteger
+import java.util.concurrent.atomic.{AtomicInteger, AtomicLong}
 
 import org.apache.linkis.common.log.LogUtils
 import org.apache.linkis.common.utils.Utils
@@ -64,6 +64,13 @@ abstract class EntranceJob extends Job {
   //  private var resultSize = -1
   private var entranceContext: EntranceContext = _
 
+  /**
+    * Record newest time that a client access status of this job
+    * Can be used to monitor client status.
+    * e.g. server can detect if linkis-cli process has abnormally ended then kill the job
+    * */
+  private val newestAccessByClientTimestamp: AtomicLong = new AtomicLong(-1l) //volatile
+
   def setEntranceListenerBus(entranceListenerBus: EntranceEventListenerBus[EntranceEventListener, EntranceEvent]): Unit =
     this.entranceListenerBus = Option(entranceListenerBus)
 
@@ -75,6 +82,13 @@ abstract class EntranceJob extends Job {
 
   def getEntranceContext: EntranceContext = this.entranceContext
 
+  def getNewestAccessByClientTimestamp: Long = this.newestAccessByClientTimestamp.get()
+
+  def updateNewestAccessByClientTimestamp(): Unit = {
+    val newTime = System.currentTimeMillis()
+    newestAccessByClientTimestamp.set(newTime)
+  }
+
   def getRunningSubJobIndex: Int
 
   def getRunningSubJob: SubJobInfo = {
@@ -168,7 +182,6 @@ abstract class EntranceJob extends Job {
   }
 
   override def onFailure(errorMsg: String, t: Throwable): Unit = {
-    //updateJobRequestStatus(SchedulerEventState.Failed.toString)
     val generatedMsg = LogUtils.generateERROR(s"Sorry, your job executed failed with reason: $errorMsg")
     getLogListener.foreach(_.onLogUpdate(this, generatedMsg))
     super.onFailure(errorMsg, t)
diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/SQLLimitEntranceInterceptor.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/SQLLimitEntranceInterceptor.scala
index 15ffd44..7a53da1 100644
--- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/SQLLimitEntranceInterceptor.scala
+++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/SQLLimitEntranceInterceptor.scala
@@ -17,13 +17,21 @@
  
 package org.apache.linkis.entrance.interceptor.impl
 
+import org.apache.linkis.entrance.conf.EntranceConfiguration
 import org.apache.linkis.entrance.interceptor.EntranceInterceptor
 import org.apache.linkis.governance.common.entity.job.JobRequest
 import org.apache.linkis.manager.label.utils.LabelUtil
 
 
 class SQLLimitEntranceInterceptor extends EntranceInterceptor {
+  private val  LIMIT_CREATORS = EntranceConfiguration.SQL_LIMIT_CREATOR.getValue
+
   override def apply(task: JobRequest, logAppender: java.lang.StringBuilder): JobRequest = {
+    /*val (user, creator) = LabelUtil.getUserCreator(task.getLabels)
+    if (! LIMIT_CREATORS.contains(creator)) {
+      logAppender.append(LogUtils.generateWarn(s"The code you submit will not be limited by the limit \n") )
+      return task
+    }*/
     val codeType = {
       val codeType = LabelUtil.getCodeType(task.getLabels)
       if (null != codeType) {

---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@linkis.apache.org
For additional commands, e-mail: commits-help@linkis.apache.org


[incubator-linkis] 15/18: 1. reformat some code. 2. fix some config.

Posted by pe...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

peacewong pushed a commit to branch dev-1.1.1
in repository https://gitbox.apache.org/repos/asf/incubator-linkis.git

commit f409fc82d8dc75b727773a034756ab5fd2211ef4
Author: alexkun <xu...@qq.com>
AuthorDate: Sat Mar 5 13:44:40 2022 +0800

    1. reformat some code.
    2. fix some config.
---
 .../assembly-combined/bin/linkis-cli               |   2 +-
 .../conf/linkis-cg-entrance.properties             |   2 +-
 .../assembly-combined/conf/log4j2-console.xml      |   4 +-
 .../assembly-combined/conf/log4j2.xml              |   4 +-
 .../linkis/server/conf/ServerConfiguration.scala   |   4 +-
 .../access/ECTaskEntranceInfoAccessTest.java       |  61 ++++++-----
 .../annotation/CliHeartBeatMonitorAnnotation.java  |  12 ++-
 .../entrance/conf/EntranceSpringConfiguration.java |  59 +++++++----
 .../entrance/exception/EntranceErrorCode.java      |   6 +-
 .../linkis/entrance/job/EntranceExecutionJob.java  | 114 ++++++++++++++-------
 .../persistence/QueryPersistenceManager.java       |   3 +-
 .../entrance/restful/EntranceRestfulApi.java       |   2 +-
 .../entity/persistence/PersistenceLabel.java       |   2 +-
 .../request/ExternalResourceRequester.java         |   9 +-
 .../external/service/ExternalResourceService.java  |   4 +-
 .../service/impl/ExternalResourceServiceImpl.java  |  17 +--
 16 files changed, 194 insertions(+), 111 deletions(-)

diff --git a/assembly-combined-package/assembly-combined/bin/linkis-cli b/assembly-combined-package/assembly-combined/bin/linkis-cli
index dbf3392..f6fce03 100644
--- a/assembly-combined-package/assembly-combined/bin/linkis-cli
+++ b/assembly-combined-package/assembly-combined/bin/linkis-cli
@@ -16,7 +16,7 @@
 export LANG=en_US.utf-8
 #set -x
 
-LINKIS_CLIENT='com.webank.wedatasphere.linkis.cli.application.LinkisClientApplication'
+LINKIS_CLIENT='org.apache.linkis.cli.application.LinkisClientApplication'
 
 i=0
 for arg in "$@"
diff --git a/assembly-combined-package/assembly-combined/conf/linkis-cg-entrance.properties b/assembly-combined-package/assembly-combined/conf/linkis-cg-entrance.properties
index aee36d9..8177a7f 100644
--- a/assembly-combined-package/assembly-combined/conf/linkis-cg-entrance.properties
+++ b/assembly-combined-package/assembly-combined/conf/linkis-cg-entrance.properties
@@ -19,7 +19,7 @@ wds.linkis.server.restful.scan.packages=org.apache.linkis.entrance.restful
 wds.linkis.server.socket.mode=false
 wds.linkis.entrance.config.log.path=hdfs:///appcom/logs/linkis
 #wds.linkis.resultSet.store.path=hdfs:///tmp/linkis
-wds.linkis.orchestrator.computation.operation.builder.class=com.webank.wedatasphere.linkis.orchestrator.operation.TuningOperationBuilder
+wds.linkis.orchestrator.computation.operation.builder.class=org.apache.linkis.orchestrator.operation.TuningOperationBuilder
 
 
 wds.linkis.entrance.shell.danger.check.enabled=false
diff --git a/assembly-combined-package/assembly-combined/conf/log4j2-console.xml b/assembly-combined-package/assembly-combined/conf/log4j2-console.xml
index 80641de..f9a1c94 100644
--- a/assembly-combined-package/assembly-combined/conf/log4j2-console.xml
+++ b/assembly-combined-package/assembly-combined/conf/log4j2-console.xml
@@ -32,11 +32,11 @@
         <root level="INFO">
             <appender-ref ref="RollingFile"/>
         </root>
-        <logger name="com.webank.wedatasphere.linkis.message.builder.DefaultMessageJob" level="warn" additivity="true">
+        <logger name="org.apache.linkis.message.builder.DefaultMessageJob" level="warn" additivity="true">
             <appender-ref ref="RollingFile"/>
         </logger>
 
-        <logger name="com.webank.wedatasphere.linkis.message.scheduler.DefaultMessageExecutor" level="warn" additivity="true">
+        <logger name="org.apache.linkis.message.scheduler.DefaultMessageExecutor" level="warn" additivity="true">
             <appender-ref ref="RollingFile"/>
         </logger>
         <logger name="com.netflix.loadbalancer.DynamicServerListLoadBalancer" level="warn" additivity="true">
diff --git a/assembly-combined-package/assembly-combined/conf/log4j2.xml b/assembly-combined-package/assembly-combined/conf/log4j2.xml
index 2ba429e..4f4cb3c 100644
--- a/assembly-combined-package/assembly-combined/conf/log4j2.xml
+++ b/assembly-combined-package/assembly-combined/conf/log4j2.xml
@@ -32,11 +32,11 @@
         <root level="INFO">
             <appender-ref ref="RollingFile"/>
         </root>
-        <logger name="com.webank.wedatasphere.linkis.message.builder.DefaultMessageJob" level="warn" additivity="true">
+        <logger name="org.apache.linkis.message.builder.DefaultMessageJob" level="warn" additivity="true">
             <appender-ref ref="RollingFile"/>
         </logger>
 
-        <logger name="com.webank.wedatasphere.linkis.message.scheduler.DefaultMessageExecutor" level="warn" additivity="true">
+        <logger name="org.apache.linkis.message.scheduler.DefaultMessageExecutor" level="warn" additivity="true">
             <appender-ref ref="RollingFile"/>
         </logger>
         <logger name="com.netflix.loadbalancer.DynamicServerListLoadBalancer" level="warn" additivity="true">
diff --git a/linkis-commons/linkis-module/src/main/scala/org/apache/linkis/server/conf/ServerConfiguration.scala b/linkis-commons/linkis-module/src/main/scala/org/apache/linkis/server/conf/ServerConfiguration.scala
index e816c3c..75df6d9 100644
--- a/linkis-commons/linkis-module/src/main/scala/org/apache/linkis/server/conf/ServerConfiguration.scala
+++ b/linkis-commons/linkis-module/src/main/scala/org/apache/linkis/server/conf/ServerConfiguration.scala
@@ -23,7 +23,7 @@ import java.lang.Boolean
 import org.apache.linkis.common.conf.{CommonVars, Configuration, TimeType}
 import org.apache.linkis.common.utils.{DESUtil, Logging, Utils}
 import org.apache.linkis.server.exception.BDPInitServerException
-import org.apache.commons.lang.StringUtils
+import org.apache.commons.lang3.StringUtils
 import sun.misc.BASE64Encoder
 
 
@@ -99,8 +99,6 @@ object ServerConfiguration extends Logging{
   val BDP_SERVER_WAR_TEMPDIR = CommonVars("wds.linkis.server.war.tempdir", new File(BDP_SERVER_HOME.getValue, "web/webapps").getPath)
   val BDP_SERVER_SERVER_DEFAULT_DIR_ALLOWED = CommonVars("wds.linkis.server.default.dir.allowed", "false")
   val BDP_SERVER_WEB_SESSION_TIMEOUT = CommonVars("wds.linkis.server.web.session.timeout", new TimeType("2h"))
-  //val LINKIS_SERVER_SESSION_TICKETID_KEY = CommonVars("wds.linkis.session.ticket.key", "linkis_user_session_ticket_id_v1")
-  val LINKIS_SERVER_SESSION_TICKETID_KEY = CommonVars("wds.linkis.session.ticket.key", "bdp-user-ticket-id")
 
   val BDP_SERVER_EVENT_QUEUE_SIZE = CommonVars("wds.linkis.server.event.queue.size", 5000)
   val BDP_SERVER_EVENT_CONSUMER_THREAD_SIZE = CommonVars("wds.linkis.server.event.consumer.thread", 10)
diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/test/java/org/apache/linkis/engineconn/computation/executor/upstream/access/ECTaskEntranceInfoAccessTest.java b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/test/java/org/apache/linkis/engineconn/computation/executor/upstream/access/ECTaskEntranceInfoAccessTest.java
index 51de963..847d303 100644
--- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/test/java/org/apache/linkis/engineconn/computation/executor/upstream/access/ECTaskEntranceInfoAccessTest.java
+++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/test/java/org/apache/linkis/engineconn/computation/executor/upstream/access/ECTaskEntranceInfoAccessTest.java
@@ -1,55 +1,66 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
 
 package org.apache.linkis.engineconn.computation.executor.upstream.access;
 
 import org.apache.linkis.DataWorkCloudApplication;
 import org.apache.linkis.common.ServiceInstance;
-import org.apache.linkis.common.conf.DWCArgumentsParser;
-import org.apache.linkis.engineconn.common.creation.DefaultEngineCreationContext;
-import org.apache.linkis.engineconn.common.creation.EngineCreationContext;
 import org.apache.linkis.governance.common.conf.GovernanceCommonConf;
-import org.apache.linkis.governance.common.utils.EngineConnArgumentsParser;
 import org.apache.linkis.rpc.Sender;
 import org.apache.linkis.server.conf.ServerConfiguration;
-import org.apache.linkis.server.utils.LinkisMainHelper;
+
 import org.apache.commons.lang3.StringUtils;
+
 import org.junit.Before;
 import org.junit.Test;
-import org.springframework.cloud.client.discovery.DiscoveryClient;
-
-import java.util.Arrays;
-import java.util.List;
 
 import static org.junit.Assert.*;
 
-/**
- * Created by shangda on 2022/2/9.
- */
 public class ECTaskEntranceInfoAccessTest {
 
     @Before
     public void before() {
-//        System.getProperties().setProperty("wds.linkis.server.conf", "linkis-et-jobhistory-scan.properties");
+        //        System.getProperties().setProperty("wds.linkis.server.conf",
+        // "linkis-et-jobhistory-scan.properties");
         System.out.println("Spring is enabled, now try to start SpringBoot.");
         System.out.println("<--------------------Start SpringBoot App-------------------->");
         String existsExcludePackages = ServerConfiguration.BDP_SERVER_EXCLUDE_PACKAGES().getValue();
         if (!StringUtils.isEmpty(existsExcludePackages)) {
-            DataWorkCloudApplication.setProperty(ServerConfiguration.BDP_SERVER_EXCLUDE_PACKAGES().key(), existsExcludePackages);
+            DataWorkCloudApplication.setProperty(
+                    ServerConfiguration.BDP_SERVER_EXCLUDE_PACKAGES().key(), existsExcludePackages);
         }
 
-        String[] args = new String[]{
-                "--spring-conf","eureka.client.serviceUrl.defaultZone=http://ip:port/eureka/",
-                "--spring-conf", "logging.config=classpath:log4j2.xml",
-                "--spring-conf", "spring.profiles.active=engineconn",
-                "--spring-conf", "server.port=28899",
-                "--spring-conf", "spring.application.name=linkis-cg-engineconn"};
+        String[] args =
+                new String[] {
+                    "--spring-conf", "eureka.client.serviceUrl.defaultZone=http://ip:port/eureka/",
+                    "--spring-conf", "logging.config=classpath:log4j2.xml",
+                    "--spring-conf", "spring.profiles.active=engineconn",
+                    "--spring-conf", "server.port=28899",
+                    "--spring-conf", "spring.application.name=linkis-cg-engineconn"
+                };
         // 加载spring类
         try {
-//            ECTaskEntranceInfoAccessHelper.initApp(args);
+            //            ECTaskEntranceInfoAccessHelper.initApp(args);
         } catch (Exception e) {
             System.out.println(e.getStackTrace());
         }
 
-        ServiceInstance[] instances = Sender.getInstances(GovernanceCommonConf.ENGINE_CONN_SPRING_NAME().getValue());
+        ServiceInstance[] instances =
+                Sender.getInstances(GovernanceCommonConf.ENGINE_CONN_SPRING_NAME().getValue());
 
         System.out.println("<--------------------SpringBoot App init succeed-------------------->");
     }
@@ -57,8 +68,6 @@ public class ECTaskEntranceInfoAccessTest {
     @Test
     public void main() throws Exception {
 
-
-//        LinkisJobHistoryScanApplication.main(new String[]{"2021122919", "2021122921"});
+        //        LinkisJobHistoryScanApplication.main(new String[]{"2021122919", "2021122921"});
     }
-
-}
\ No newline at end of file
+}
diff --git a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/annotation/CliHeartBeatMonitorAnnotation.java b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/annotation/CliHeartBeatMonitorAnnotation.java
index 9f5c537..e263b32 100644
--- a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/annotation/CliHeartBeatMonitorAnnotation.java
+++ b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/annotation/CliHeartBeatMonitorAnnotation.java
@@ -34,17 +34,25 @@ import java.lang.annotation.Target;
 @Component(value = CliHeartBeatMonitorAnnotation.BEAN_NAME)
 public @interface CliHeartBeatMonitorAnnotation {
     String BEAN_NAME = "cliHeartBeatMonitor";
+
     @AliasFor(annotation = Component.class)
     String value() default BEAN_NAME;
 
-    @Target({ElementType.CONSTRUCTOR, ElementType.FIELD, ElementType.METHOD, ElementType.TYPE, ElementType.PARAMETER})
+    @Target({
+        ElementType.CONSTRUCTOR,
+        ElementType.FIELD,
+        ElementType.METHOD,
+        ElementType.TYPE,
+        ElementType.PARAMETER
+    })
     @Retention(RetentionPolicy.RUNTIME)
     @Qualifier(BEAN_NAME)
     @Autowired
     @interface CliHeartBeatMonitorAutowiredAnnotation {
         @AliasFor(annotation = Qualifier.class)
         String value() default BEAN_NAME;
+
         @AliasFor(annotation = Autowired.class)
         boolean required() default true;
     }
-}
\ No newline at end of file
+}
diff --git a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/conf/EntranceSpringConfiguration.java b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/conf/EntranceSpringConfiguration.java
index 810c35b..02ec26a 100644
--- a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/conf/EntranceSpringConfiguration.java
+++ b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/conf/EntranceSpringConfiguration.java
@@ -78,7 +78,7 @@ public class EntranceSpringConfiguration {
 
     @CliHeartBeatMonitorAnnotation
     @ConditionalOnMissingBean(name = {CliHeartBeatMonitorAnnotation.BEAN_NAME})
-    public CliHeartbeatMonitor generateCliHeartbeatMonitor(){
+    public CliHeartbeatMonitor generateCliHeartbeatMonitor() {
         CliHeartbeatMonitor cliHeartbeatMonitor = new CliHeartbeatMonitor(new KillHandler());
         cliHeartbeatMonitor.start();
         return cliHeartbeatMonitor;
@@ -86,9 +86,13 @@ public class EntranceSpringConfiguration {
 
     @PersistenceManagerBeanAnnotation
     @ConditionalOnMissingBean(name = {PersistenceManagerBeanAnnotation.BEAN_NAME})
-    public PersistenceManager generatePersistenceManager(@PersistenceEngineBeanAnnotation.PersistenceEngineAutowiredAnnotation PersistenceEngine persistenceEngine,
-                                                         @ResultSetEngineBeanAnnotation.ResultSetEngineAutowiredAnnotation ResultSetEngine resultSetEngine,
-                                                         @CliHeartBeatMonitorAnnotation.CliHeartBeatMonitorAutowiredAnnotation CliHeartbeatMonitor cliHeartbeatMonitor){
+    public PersistenceManager generatePersistenceManager(
+            @PersistenceEngineBeanAnnotation.PersistenceEngineAutowiredAnnotation
+                    PersistenceEngine persistenceEngine,
+            @ResultSetEngineBeanAnnotation.ResultSetEngineAutowiredAnnotation
+                    ResultSetEngine resultSetEngine,
+            @CliHeartBeatMonitorAnnotation.CliHeartBeatMonitorAutowiredAnnotation
+                    CliHeartbeatMonitor cliHeartbeatMonitor) {
         logger.info("init PersistenceManager.");
         QueryPersistenceManager persistenceManager = new QueryPersistenceManager();
         persistenceManager.setPersistenceEngine(persistenceEngine);
@@ -99,14 +103,18 @@ public class EntranceSpringConfiguration {
 
     @EntranceParserBeanAnnotation
     @ConditionalOnMissingBean(name = {EntranceParserBeanAnnotation.BEAN_NAME})
-    public EntranceParser generateEntranceParser(@PersistenceManagerBeanAnnotation.PersistenceManagerAutowiredAnnotation PersistenceManager persistenceManager){
+    public EntranceParser generateEntranceParser(
+            @PersistenceManagerBeanAnnotation.PersistenceManagerAutowiredAnnotation
+                    PersistenceManager persistenceManager) {
         return new CommonEntranceParser(persistenceManager);
     }
 
     @EntranceListenerBusBeanAnnotation
     @ConditionalOnMissingBean(name = {EntranceListenerBusBeanAnnotation.BEAN_NAME})
-    public EntranceEventListenerBus<EntranceEventListener, EntranceEvent> generateEntranceEventListenerBus() {
-        EntranceEventListenerBus<EntranceEventListener, EntranceEvent> entranceEventListenerBus = new EntranceEventListenerBus<EntranceEventListener, EntranceEvent>();
+    public EntranceEventListenerBus<EntranceEventListener, EntranceEvent>
+            generateEntranceEventListenerBus() {
+        EntranceEventListenerBus<EntranceEventListener, EntranceEvent> entranceEventListenerBus =
+                new EntranceEventListenerBus<EntranceEventListener, EntranceEvent>();
         entranceEventListenerBus.start();
         return entranceEventListenerBus;
     }
@@ -141,8 +149,11 @@ public class EntranceSpringConfiguration {
 
     @ErrorCodeListenerBeanAnnotation
     @ConditionalOnMissingBean(name = {ErrorCodeListenerBeanAnnotation.BEAN_NAME})
-    public ErrorCodeListener generateErrorCodeListener(@PersistenceManagerBeanAnnotation.PersistenceManagerAutowiredAnnotation PersistenceManager persistenceManager,
-                                                       @EntranceParserBeanAnnotation.EntranceParserAutowiredAnnotation EntranceParser entranceParser) {
+    public ErrorCodeListener generateErrorCodeListener(
+            @PersistenceManagerBeanAnnotation.PersistenceManagerAutowiredAnnotation
+                    PersistenceManager persistenceManager,
+            @EntranceParserBeanAnnotation.EntranceParserAutowiredAnnotation
+                    EntranceParser entranceParser) {
         PersistenceErrorCodeListener errorCodeListener = new PersistenceErrorCodeListener();
         errorCodeListener.setEntranceParser(entranceParser);
         errorCodeListener.setPersistenceManager(persistenceManager);
@@ -152,8 +163,8 @@ public class EntranceSpringConfiguration {
     @ErrorCodeManagerBeanAnnotation
     @ConditionalOnMissingBean(name = {ErrorCodeManagerBeanAnnotation.BEAN_NAME})
     public ErrorCodeManager generateErrorCodeManager() {
-       /* try {
-            Class.forName("com.webank.wedatasphere.linkis.errorcode.client.handler.LinkisErrorCodeHandler");
+        /* try {
+            Class.forName("org.apache.linkis.errorcode.client.handler.LinkisErrorCodeHandler");
         } catch (final Exception e) {
             logger.error("failed to init linkis error code handler", e);
         }*/
@@ -162,8 +173,11 @@ public class EntranceSpringConfiguration {
 
     @LogManagerBeanAnnotation
     @ConditionalOnMissingBean(name = {LogManagerBeanAnnotation.BEAN_NAME})
-    public LogManager generateLogManager(@ErrorCodeListenerBeanAnnotation.ErrorCodeListenerAutowiredAnnotation ErrorCodeListener errorCodeListener,
-                                         @ErrorCodeManagerBeanAnnotation.ErrorCodeManagerAutowiredAnnotation ErrorCodeManager errorCodeManager){
+    public LogManager generateLogManager(
+            @ErrorCodeListenerBeanAnnotation.ErrorCodeListenerAutowiredAnnotation
+                    ErrorCodeListener errorCodeListener,
+            @ErrorCodeManagerBeanAnnotation.ErrorCodeManagerAutowiredAnnotation
+                    ErrorCodeManager errorCodeManager) {
         CacheLogManager logManager = new CacheLogManager();
         logManager.setErrorCodeListener(errorCodeListener);
         logManager.setErrorCodeManager(errorCodeManager);
@@ -185,23 +199,30 @@ public class EntranceSpringConfiguration {
 
     @SchedulerContextBeanAnnotation
     @ConditionalOnMissingBean(name = {SchedulerContextBeanAnnotation.BEAN_NAME})
-    public SchedulerContext generateSchedulerContext(@GroupFactoryBeanAnnotation.GroupFactoryAutowiredAnnotation GroupFactory groupFactory,
-                                                     @EntranceExecutorManagerBeanAnnotation.EntranceExecutorManagerAutowiredAnnotation ExecutorManager executorManager,
-                                                     @ConsumerManagerBeanAnnotation.ConsumerManagerAutowiredAnnotation ConsumerManager consumerManager) {
+    public SchedulerContext generateSchedulerContext(
+            @GroupFactoryBeanAnnotation.GroupFactoryAutowiredAnnotation GroupFactory groupFactory,
+            @EntranceExecutorManagerBeanAnnotation.EntranceExecutorManagerAutowiredAnnotation
+                    ExecutorManager executorManager,
+            @ConsumerManagerBeanAnnotation.ConsumerManagerAutowiredAnnotation
+                    ConsumerManager consumerManager) {
         return new EntranceSchedulerContext(groupFactory, consumerManager, executorManager);
     }
 
     @EntranceExecutorManagerBeanAnnotation
     @ConditionalOnMissingBean(name = {EntranceExecutorManagerBeanAnnotation.BEAN_NAME})
-    public ExecutorManager generateExecutorManager(@GroupFactoryBeanAnnotation.GroupFactoryAutowiredAnnotation GroupFactory groupFactory) {
-        EngineConnManagerBuilder engineConnManagerBuilder = EngineConnManagerBuilder$.MODULE$.builder();
+    public ExecutorManager generateExecutorManager(
+            @GroupFactoryBeanAnnotation.GroupFactoryAutowiredAnnotation GroupFactory groupFactory) {
+        EngineConnManagerBuilder engineConnManagerBuilder =
+                EngineConnManagerBuilder$.MODULE$.builder();
         engineConnManagerBuilder.setPolicy(Policy.Process);
         return new EntranceExecutorManagerImpl(groupFactory, engineConnManagerBuilder.build());
     }
 
     @SchedulerBeanAnnotation
     @ConditionalOnMissingBean(name = {SchedulerBeanAnnotation.BEAN_NAME})
-    public Scheduler generateScheduler(@SchedulerContextBeanAnnotation.SchedulerContextAutowiredAnnotation SchedulerContext schedulerContext) {
+    public Scheduler generateScheduler(
+            @SchedulerContextBeanAnnotation.SchedulerContextAutowiredAnnotation
+                    SchedulerContext schedulerContext) {
         Scheduler scheduler = new ParallelScheduler(schedulerContext);
         scheduler.init();
         scheduler.start();
diff --git a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/exception/EntranceErrorCode.java b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/exception/EntranceErrorCode.java
index 5f160fc..1e47743 100644
--- a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/exception/EntranceErrorCode.java
+++ b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/exception/EntranceErrorCode.java
@@ -28,11 +28,11 @@ public enum EntranceErrorCode {
     INIT_JOB_ERROR(20012, "Init job error "),
     RESULT_NOT_PERSISTED_ERROR(20013, "Result not persisted error "),
     GROUP_NOT_FOUND(20014, "group not found"),
-    EXECUTION_CODE_ISNULL(20015, "execute code is null, nothing will be execute!(执行代码为空,没有任何代码会被执行)"),
+    EXECUTION_CODE_ISNULL(
+            20015, "execute code is null, nothing will be execute!(执行代码为空,没有任何代码会被执行)"),
     JOB_UPDATE_FAILED(20016, "job update failed"),
-    VARIABLE_NULL_EXCEPTION(20017, "variable is null")
+    VARIABLE_NULL_EXCEPTION(20017, "variable is null");
 
-    ;
     private int errCode;
     private String desc;
 
diff --git a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/job/EntranceExecutionJob.java b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/job/EntranceExecutionJob.java
index a292198..4fdb0eb 100644
--- a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/job/EntranceExecutionJob.java
+++ b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/job/EntranceExecutionJob.java
@@ -107,28 +107,41 @@ public class EntranceExecutionJob extends EntranceJob implements LogHandler {
     @Override
     public void init() throws EntranceErrorException {
         List<EntranceErrorException> errList = new ArrayList<>();
-        SubJobInfo[] subJobInfos = Arrays.stream(getCodeParser().parse(getJobRequest().getExecutionCode())).map(code -> {
-            SubJobInfo subJobInfo = new SubJobInfo();
-            // todo don't need whole jobRequest, but need executeUser
-            subJobInfo.setJobReq(getJobRequest());
-            subJobInfo.setStatus(SchedulerEventState.Inited().toString());
-            subJobInfo.setCode(code);
-            // persist and update jobDetail
-            SubJobDetail subJobDetail = createNewJobDetail();
-            subJobInfo.setSubJobDetail(subJobDetail);
-            subJobInfo.setProgress(0.0f);
-            subJobDetail.setExecutionContent(code);
-            subJobDetail.setJobGroupId(getJobRequest().getId());
-            subJobDetail.setStatus(SchedulerEventState.Inited().toString());
-            subJobDetail.setCreatedTime(new Date(System.currentTimeMillis()));
-            subJobDetail.setUpdatedTime(new Date(System.currentTimeMillis()));
-            try {
-                persistenceManager.createPersistenceEngine().persist(subJobInfo);
-            } catch (Exception e1) {
-                errList.add(new EntranceErrorException(EntranceErrorCode.INIT_JOB_ERROR.getErrCode(), "Init subjob error, please submit it again(任务初始化失败,请稍后重试). " + e1.getMessage()));
-            }
-            return subJobInfo;
-        }).toArray(SubJobInfo[]::new);
+        SubJobInfo[] subJobInfos =
+                Arrays.stream(getCodeParser().parse(getJobRequest().getExecutionCode()))
+                        .map(
+                                code -> {
+                                    SubJobInfo subJobInfo = new SubJobInfo();
+                                    // todo don't need whole jobRequest, but need executeUser
+                                    subJobInfo.setJobReq(getJobRequest());
+                                    subJobInfo.setStatus(SchedulerEventState.Inited().toString());
+                                    subJobInfo.setCode(code);
+                                    // persist and update jobDetail
+                                    SubJobDetail subJobDetail = createNewJobDetail();
+                                    subJobInfo.setSubJobDetail(subJobDetail);
+                                    subJobInfo.setProgress(0.0f);
+                                    subJobDetail.setExecutionContent(code);
+                                    subJobDetail.setJobGroupId(getJobRequest().getId());
+                                    subJobDetail.setStatus(SchedulerEventState.Inited().toString());
+                                    subJobDetail.setCreatedTime(
+                                            new Date(System.currentTimeMillis()));
+                                    subJobDetail.setUpdatedTime(
+                                            new Date(System.currentTimeMillis()));
+                                    try {
+                                        persistenceManager
+                                                .createPersistenceEngine()
+                                                .persist(subJobInfo);
+                                    } catch (Exception e1) {
+                                        errList.add(
+                                                new EntranceErrorException(
+                                                        EntranceErrorCode.INIT_JOB_ERROR
+                                                                .getErrCode(),
+                                                        "Init subjob error, please submit it again(任务初始化失败,请稍后重试). "
+                                                                + e1.getMessage()));
+                                    }
+                                    return subJobInfo;
+                                })
+                        .toArray(SubJobInfo[]::new);
         if (errList.size() > 0) {
             logger.error(errList.get(0).getDesc());
             throw errList.get(0);
@@ -187,7 +200,8 @@ public class EntranceExecutionJob extends EntranceJob implements LogHandler {
                 runtimeMapTmp.put(entry.getKey(), entry.getValue().toString());
             }
         }
-        String resultSetPathRoot = GovernanceCommonConf.RESULT_SET_STORE_PATH().getValue(runtimeMapTmp);
+        String resultSetPathRoot =
+                GovernanceCommonConf.RESULT_SET_STORE_PATH().getValue(runtimeMapTmp);
         Map<String, Object> jobMap = new HashMap<String, Object>();
         jobMap.put(RequestTask$.MODULE$.RESULT_SET_STORE_PATH(), resultSetPathRoot);
         runtimeMapOri.put(QueryParams$.MODULE$.JOB_KEY(), jobMap);
@@ -229,10 +243,11 @@ public class EntranceExecutionJob extends EntranceJob implements LogHandler {
             isHead = true;
             isTail = true;
         }
-        BindEngineLabel bindEngineLabel = new BindEngineLabel()
-                .setJobGroupId(getJobRequest().getId().toString())
-                .setIsJobGroupHead(String.valueOf(isHead))
-                .setIsJobGroupEnd(String.valueOf(isTail));
+        BindEngineLabel bindEngineLabel =
+                new BindEngineLabel()
+                        .setJobGroupId(getJobRequest().getId().toString())
+                        .setIsJobGroupHead(String.valueOf(isHead))
+                        .setIsJobGroupEnd(String.valueOf(isTail));
         if (isHead) {
             jobMap.put(GovernanceConstant.RESULTSET_INDEX(), 0);
             setResultSize(0);
@@ -279,16 +294,39 @@ public class EntranceExecutionJob extends EntranceJob implements LogHandler {
             getJobRequest().setMetrics(new HashMap<>());
         }
         Map<String, Object> metricsMap = getJobRequest().getMetrics();
-        String createTime = metricsMap.containsKey(TaskConstant.ENTRANCEJOB_SUBMIT_TIME) ? simpleDateFormat.format(metricsMap.get(TaskConstant.ENTRANCEJOB_SUBMIT_TIME)) : "not created";
-        String scheduleTime = metricsMap.containsKey(TaskConstant.ENTRANCEJOB_SCHEDULE_TIME) ? simpleDateFormat.format(metricsMap.get(TaskConstant.ENTRANCEJOB_SCHEDULE_TIME)) : "not scheduled";
-        String startTime = metricsMap.containsKey(TaskConstant.ENTRANCEJOB_TO_ORCHESTRATOR) ? simpleDateFormat.format(metricsMap.get(TaskConstant.ENTRANCEJOB_TO_ORCHESTRATOR)) : "not submitted to orchestrator";
-        String endTime = metricsMap.containsKey(TaskConstant.ENTRANCEJOB_COMPLETE_TIME) ? simpleDateFormat.format(metricsMap.get(TaskConstant.ENTRANCEJOB_COMPLETE_TIME)) : "on running or not started";
+        String createTime =
+                metricsMap.containsKey(TaskConstant.ENTRANCEJOB_SUBMIT_TIME)
+                        ? simpleDateFormat.format(
+                                metricsMap.get(TaskConstant.ENTRANCEJOB_SUBMIT_TIME))
+                        : "not created";
+        String scheduleTime =
+                metricsMap.containsKey(TaskConstant.ENTRANCEJOB_SCHEDULE_TIME)
+                        ? simpleDateFormat.format(
+                                metricsMap.get(TaskConstant.ENTRANCEJOB_SCHEDULE_TIME))
+                        : "not scheduled";
+        String startTime =
+                metricsMap.containsKey(TaskConstant.ENTRANCEJOB_TO_ORCHESTRATOR)
+                        ? simpleDateFormat.format(
+                                metricsMap.get(TaskConstant.ENTRANCEJOB_TO_ORCHESTRATOR))
+                        : "not submitted to orchestrator";
+        String endTime =
+                metricsMap.containsKey(TaskConstant.ENTRANCEJOB_COMPLETE_TIME)
+                        ? simpleDateFormat.format(
+                                metricsMap.get(TaskConstant.ENTRANCEJOB_COMPLETE_TIME))
+                        : "on running or not started";
         String runTime;
-        if (metricsMap.containsKey(TaskConstant.ENTRANCEJOB_COMPLETE_TIME)){
-            runTime = Utils.msDurationToString((((Date) metricsMap.get(TaskConstant.ENTRANCEJOB_COMPLETE_TIME))).getTime()
-            - (((Date) metricsMap.get(TaskConstant.ENTRANCEJOB_SUBMIT_TIME))).getTime());
-        }else{
-            runTime = "The task did not end normally and the usage time could not be counted.(任务并未正常结束,无法统计使用时间)";
+        if (metricsMap.containsKey(TaskConstant.ENTRANCEJOB_COMPLETE_TIME)) {
+            runTime =
+                    Utils.msDurationToString(
+                            (((Date) metricsMap.get(TaskConstant.ENTRANCEJOB_COMPLETE_TIME)))
+                                            .getTime()
+                                    - (((Date)
+                                                    metricsMap.get(
+                                                            TaskConstant.ENTRANCEJOB_SUBMIT_TIME)))
+                                            .getTime());
+        } else {
+            runTime =
+                    "The task did not end normally and the usage time could not be counted.(任务并未正常结束,无法统计使用时间)";
         }
         String metric =
                 "Task creation time(任务创建时间): "
@@ -316,7 +354,7 @@ public class EntranceExecutionJob extends EntranceJob implements LogHandler {
             // todo  Do a lot of aftercare work when close(close时候要做很多的善后工作)
             if (this.getLogWriter().isDefined()) {
                 IOUtils.closeQuietly(this.getLogWriter().get());
-                //this.setLogWriter(null);
+                // this.setLogWriter(null);
             } else {
                 logger.info("job:" + jobRequest().getId() + "LogWriter is null");
             }
@@ -354,7 +392,7 @@ public class EntranceExecutionJob extends EntranceJob implements LogHandler {
             for (SubJobInfo subJobInfo : subJobInfoArray) {
                 progressInfoList.addAll(subJobInfo.getProgressInfoMap().values());
             }
-            return progressInfoList.toArray(new JobProgressInfo[]{});
+            return progressInfoList.toArray(new JobProgressInfo[] {});
         }
         return super.getProgressInfo();
     }
diff --git a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/persistence/QueryPersistenceManager.java b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/persistence/QueryPersistenceManager.java
index 68d4907..f9c9881 100644
--- a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/persistence/QueryPersistenceManager.java
+++ b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/persistence/QueryPersistenceManager.java
@@ -45,7 +45,8 @@ public class QueryPersistenceManager extends PersistenceManager {
     private PersistenceEngine persistenceEngine;
     private ResultSetEngine resultSetEngine;
     private static final Logger logger = LoggerFactory.getLogger(QueryPersistenceManager.class);
-  //  private EntranceWebSocketService entranceWebSocketService; //TODO The latter version, to be removed, webSocket unified walk ListenerBus(后面的版本,要去掉,webSocket统一走ListenerBus)
+    //  private EntranceWebSocketService entranceWebSocketService; //TODO The latter version, to be
+    // removed, webSocket unified walk ListenerBus(后面的版本,要去掉,webSocket统一走ListenerBus)
 
     private CliHeartbeatMonitor cliHeartbeatMonitor;
 
diff --git a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/restful/EntranceRestfulApi.java b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/restful/EntranceRestfulApi.java
index 4ea0366..9b3ae4e 100644
--- a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/restful/EntranceRestfulApi.java
+++ b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/restful/EntranceRestfulApi.java
@@ -200,7 +200,7 @@ public class EntranceRestfulApi implements EntranceRestfulRemote {
         }
         if (job.isDefined()) {
             if (job.get() instanceof EntranceJob) {
-                ((EntranceJob)job.get()).updateNewestAccessByClientTimestamp();
+                ((EntranceJob) job.get()).updateNewestAccessByClientTimestamp();
             }
             message = Message.ok();
             message.setMethod("/api/entrance/" + id + "/status");
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/entity/persistence/PersistenceLabel.java b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/entity/persistence/PersistenceLabel.java
index c2fddb1..5844cad 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/entity/persistence/PersistenceLabel.java
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/entity/persistence/PersistenceLabel.java
@@ -94,7 +94,7 @@ public class PersistenceLabel extends GenericLabel {
     @Override
     public boolean equals(Object o) {
         if (this == o) return true;
-        if (o == null || getClass() != o.getClass())  {
+        if (o == null || getClass() != o.getClass()) {
             return false;
         } else if (!super.equals(o)) {
             return false;
diff --git a/linkis-computation-governance/linkis-manager/linkis-resource-manager/src/main/java/org/apache/linkis/resourcemanager/external/request/ExternalResourceRequester.java b/linkis-computation-governance/linkis-manager/linkis-resource-manager/src/main/java/org/apache/linkis/resourcemanager/external/request/ExternalResourceRequester.java
index 2b076ec..3a85b27 100644
--- a/linkis-computation-governance/linkis-manager/linkis-resource-manager/src/main/java/org/apache/linkis/resourcemanager/external/request/ExternalResourceRequester.java
+++ b/linkis-computation-governance/linkis-manager/linkis-resource-manager/src/main/java/org/apache/linkis/resourcemanager/external/request/ExternalResourceRequester.java
@@ -26,8 +26,13 @@ import org.apache.linkis.resourcemanager.external.domain.ExternalResourceProvide
 import java.util.List;
 
 public interface ExternalResourceRequester {
-    NodeResource requestResourceInfo(ExternalResourceIdentifier identifier, ExternalResourceProvider provider);
-    List<ExternalAppInfo> requestAppInfo(ExternalResourceIdentifier identifier, ExternalResourceProvider provider);
+    NodeResource requestResourceInfo(
+            ExternalResourceIdentifier identifier, ExternalResourceProvider provider);
+
+    List<ExternalAppInfo> requestAppInfo(
+            ExternalResourceIdentifier identifier, ExternalResourceProvider provider);
+
     ResourceType getResourceType();
+
     Boolean reloadExternalResourceAddress(ExternalResourceProvider provider);
 }
diff --git a/linkis-computation-governance/linkis-manager/linkis-resource-manager/src/main/java/org/apache/linkis/resourcemanager/external/service/ExternalResourceService.java b/linkis-computation-governance/linkis-manager/linkis-resource-manager/src/main/java/org/apache/linkis/resourcemanager/external/service/ExternalResourceService.java
index 16619f9..52b2c58 100644
--- a/linkis-computation-governance/linkis-manager/linkis-resource-manager/src/main/java/org/apache/linkis/resourcemanager/external/service/ExternalResourceService.java
+++ b/linkis-computation-governance/linkis-manager/linkis-resource-manager/src/main/java/org/apache/linkis/resourcemanager/external/service/ExternalResourceService.java
@@ -55,7 +55,5 @@ public interface ExternalResourceService {
             throws RMErrorException;
 
     ExternalResourceProvider chooseProvider(
-            ResourceType resourceType,
-            RMLabelContainer labelContainer)
-            throws RMErrorException;
+            ResourceType resourceType, RMLabelContainer labelContainer) throws RMErrorException;
 }
diff --git a/linkis-computation-governance/linkis-manager/linkis-resource-manager/src/main/java/org/apache/linkis/resourcemanager/external/service/impl/ExternalResourceServiceImpl.java b/linkis-computation-governance/linkis-manager/linkis-resource-manager/src/main/java/org/apache/linkis/resourcemanager/external/service/impl/ExternalResourceServiceImpl.java
index 2a7ca53..023a898 100644
--- a/linkis-computation-governance/linkis-manager/linkis-resource-manager/src/main/java/org/apache/linkis/resourcemanager/external/service/impl/ExternalResourceServiceImpl.java
+++ b/linkis-computation-governance/linkis-manager/linkis-resource-manager/src/main/java/org/apache/linkis/resourcemanager/external/service/impl/ExternalResourceServiceImpl.java
@@ -17,7 +17,6 @@
 
 package org.apache.linkis.resourcemanager.external.service.impl;
 
-import com.fasterxml.jackson.core.JsonParseException;
 import org.apache.linkis.manager.common.conf.RMConfiguration;
 import org.apache.linkis.manager.common.entity.resource.NodeResource;
 import org.apache.linkis.manager.common.entity.resource.ResourceType;
@@ -43,6 +42,7 @@ import org.springframework.beans.factory.InitializingBean;
 import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.stereotype.Component;
 
+import com.fasterxml.jackson.core.JsonParseException;
 import com.google.common.cache.CacheBuilder;
 import com.google.common.cache.CacheLoader;
 import com.google.common.cache.LoadingCache;
@@ -118,7 +118,8 @@ public class ExternalResourceServiceImpl implements ExternalResourceService, Ini
                                         externalResourceRequester.requestResourceInfo(
                                                 identifier, provider),
                                 (i) ->
-                                        externalResourceRequester.reloadExternalResourceAddress(provider));
+                                        externalResourceRequester.reloadExternalResourceAddress(
+                                                provider));
         return resource;
     }
 
@@ -149,18 +150,21 @@ public class ExternalResourceServiceImpl implements ExternalResourceService, Ini
                                         externalResourceRequester.requestAppInfo(
                                                 identifier, provider),
                                 (i) ->
-                                        externalResourceRequester.reloadExternalResourceAddress(provider));
+                                        externalResourceRequester.reloadExternalResourceAddress(
+                                                provider));
         return appInfos;
     }
 
-    private Object retry(int retryNum, Function function, Function reloadExternalAddress) throws RMErrorException {
+    private Object retry(int retryNum, Function function, Function reloadExternalAddress)
+            throws RMErrorException {
         int times = 0;
         String errorMsg = "Failed to request external resource";
         while (times < retryNum) {
             try {
                 return function.apply(null);
             } catch (Exception e) {
-                if ((JsonParseException.class.isInstance(e.getCause()) && e.getCause().getMessage().contains("This is standby RM"))
+                if ((JsonParseException.class.isInstance(e.getCause())
+                                && e.getCause().getMessage().contains("This is standby RM"))
                         || ConnectException.class.isInstance(e.getCause())) {
                     if (null != reloadExternalAddress) {
                         try {
@@ -181,7 +185,8 @@ public class ExternalResourceServiceImpl implements ExternalResourceService, Ini
     }
 
     @Override
-    public ExternalResourceProvider chooseProvider(ResourceType resourceType, RMLabelContainer labelContainer) throws RMErrorException {
+    public ExternalResourceProvider chooseProvider(
+            ResourceType resourceType, RMLabelContainer labelContainer) throws RMErrorException {
         Label label = labelContainer.find(ClusterLabel.class);
         ClusterLabel realClusterLabel = null;
         if (label == null) {

---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@linkis.apache.org
For additional commands, e-mail: commits-help@linkis.apache.org


[incubator-linkis] 07/18: 1. linkis-module - refactor USER_TICKET_ID_STRING in cookie 2. linkis-scheduler - add interface getJobListener 3. linkis-storage - refactor method toValue to fix decimal bug 4. linkis-httpclient - refactor method execute to handle parse exception for response

Posted by pe...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

peacewong pushed a commit to branch dev-1.1.1
in repository https://gitbox.apache.org/repos/asf/incubator-linkis.git

commit 29025eb1f1ba5298b708793e4626756edefa5ad8
Author: alexkun <xu...@qq.com>
AuthorDate: Fri Mar 4 17:57:34 2022 +0800

    1. linkis-module - refactor USER_TICKET_ID_STRING in cookie
    2. linkis-scheduler - add interface getJobListener
    3. linkis-storage - refactor method toValue to fix decimal bug
    4. linkis-httpclient - refactor method execute to handle parse exception for response
---
 .../org/apache/linkis/httpclient/AbstractHttpClient.scala  | 13 ++++++++-----
 .../apache/linkis/server/conf/ServerConfiguration.scala    |  2 ++
 .../main/scala/org/apache/linkis/scheduler/queue/Job.scala |  2 ++
 .../org/apache/linkis/storage/csv/StorageCSVWriter.scala   |  4 ++--
 .../scala/org/apache/linkis/storage/domain/DataType.scala  | 14 ++++++++++++--
 .../apache/linkis/storage/excel/StorageExcelWriter.scala   |  8 ++------
 .../scala/org/apache/linkis/storage/source/FileSplit.scala |  4 ++--
 7 files changed, 30 insertions(+), 17 deletions(-)

diff --git a/linkis-commons/linkis-httpclient/src/main/scala/org/apache/linkis/httpclient/AbstractHttpClient.scala b/linkis-commons/linkis-httpclient/src/main/scala/org/apache/linkis/httpclient/AbstractHttpClient.scala
index 622053c..6b5ffe7 100644
--- a/linkis-commons/linkis-httpclient/src/main/scala/org/apache/linkis/httpclient/AbstractHttpClient.scala
+++ b/linkis-commons/linkis-httpclient/src/main/scala/org/apache/linkis/httpclient/AbstractHttpClient.scala
@@ -19,7 +19,6 @@ package org.apache.linkis.httpclient
 
 import java.net.URI
 import java.util
-
 import org.apache.linkis.common.conf.{CommonVars, Configuration}
 import org.apache.linkis.common.io.{Fs, FsPath}
 import org.apache.linkis.httpclient.authentication.{AbstractAuthenticationStrategy, AuthenticationAction, HttpAuthentication}
@@ -30,7 +29,7 @@ import org.apache.linkis.httpclient.loadbalancer.{AbstractLoadBalancer, DefaultL
 import org.apache.linkis.httpclient.request._
 import org.apache.linkis.httpclient.response._
 import org.apache.commons.io.IOUtils
-import org.apache.commons.lang.StringUtils
+import org.apache.commons.lang3.StringUtils
 import org.apache.http.client.{CookieStore, ResponseHandler}
 import org.apache.http.client.config.RequestConfig
 import org.apache.http.client.entity.{DeflateDecompressingEntity, EntityBuilder, GzipDecompressingEntity, UrlEncodedFormEntity}
@@ -43,7 +42,7 @@ import org.apache.http.impl.client.{BasicCookieStore, CloseableHttpClient, HttpC
 import org.apache.http.message.BasicNameValuePair
 import org.apache.http.util.EntityUtils
 import org.apache.http.{HttpResponse, _}
-import org.apache.linkis.common.utils.Logging
+import org.apache.linkis.common.utils.{Logging, Utils}
 
 import scala.collection.JavaConversions._
 
@@ -108,8 +107,12 @@ abstract class AbstractHttpClient(clientConfig: ClientConfig, clientName: String
       val response = executeRequest(req, Some(waitTime).filter(_ > 0))
       if (response.getStatusLine.getStatusCode == 401) {
         tryLogin(action, getRequestUrl(action), true)
-        logger.info("The user is not logged in, client  retry Login")
-        throw new HttpClientRetryException("The user is not logged in,Client had reTry Login, you can set a retry")
+        logger.info("The user is not logged in, please log in first, you can set a retry")
+        val msg = Utils.tryCatch(EntityUtils.toString(response.getEntity)) {
+          t => warn("failed to parse entity", t)
+          ""
+        }
+        throw new HttpClientRetryException("The user is not logged in, please log in first, you can set a retry, message: " + msg)
       }
       val taken = System.currentTimeMillis - startTime
       attempts.add(taken)
diff --git a/linkis-commons/linkis-module/src/main/scala/org/apache/linkis/server/conf/ServerConfiguration.scala b/linkis-commons/linkis-module/src/main/scala/org/apache/linkis/server/conf/ServerConfiguration.scala
index 94fcb34..e816c3c 100644
--- a/linkis-commons/linkis-module/src/main/scala/org/apache/linkis/server/conf/ServerConfiguration.scala
+++ b/linkis-commons/linkis-module/src/main/scala/org/apache/linkis/server/conf/ServerConfiguration.scala
@@ -99,6 +99,8 @@ object ServerConfiguration extends Logging{
   val BDP_SERVER_WAR_TEMPDIR = CommonVars("wds.linkis.server.war.tempdir", new File(BDP_SERVER_HOME.getValue, "web/webapps").getPath)
   val BDP_SERVER_SERVER_DEFAULT_DIR_ALLOWED = CommonVars("wds.linkis.server.default.dir.allowed", "false")
   val BDP_SERVER_WEB_SESSION_TIMEOUT = CommonVars("wds.linkis.server.web.session.timeout", new TimeType("2h"))
+  //val LINKIS_SERVER_SESSION_TICKETID_KEY = CommonVars("wds.linkis.session.ticket.key", "linkis_user_session_ticket_id_v1")
+  val LINKIS_SERVER_SESSION_TICKETID_KEY = CommonVars("wds.linkis.session.ticket.key", "bdp-user-ticket-id")
 
   val BDP_SERVER_EVENT_QUEUE_SIZE = CommonVars("wds.linkis.server.event.queue.size", 5000)
   val BDP_SERVER_EVENT_CONSUMER_THREAD_SIZE = CommonVars("wds.linkis.server.event.consumer.thread", 10)
diff --git a/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/queue/Job.scala b/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/queue/Job.scala
index d5c4682..3a506e3 100644
--- a/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/queue/Job.scala
+++ b/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/queue/Job.scala
@@ -138,6 +138,8 @@ abstract class Job extends Runnable with SchedulerEvent with Closeable with Logg
 
   def setJobListener(jobListener: JobListener) = this.jobListener = Some(jobListener)
 
+  def getJobListener = jobListener
+
   def setLogListener(logListener: LogListener) = this.logListener = Some(logListener)
 
   def getLogListener = logListener
diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/csv/StorageCSVWriter.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/csv/StorageCSVWriter.scala
index 04cd5f6..5591409 100644
--- a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/csv/StorageCSVWriter.scala
+++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/csv/StorageCSVWriter.scala
@@ -21,6 +21,7 @@ import java.io._
 
 import org.apache.linkis.common.io.{MetaData, Record}
 import org.apache.linkis.common.utils.Logging
+import org.apache.linkis.storage.domain.DataType
 import org.apache.linkis.storage.resultset.table.{TableMetaData, TableRecord}
 import org.apache.commons.io.IOUtils
 
@@ -57,9 +58,8 @@ class StorageCSVWriter(val charset: String, val separator: String, val outputStr
 
   @scala.throws[IOException]
   override def addRecord(record: Record): Unit = {
-    val body = record.asInstanceOf[TableRecord].row.map(_.toString) //read时候进行null替换等等
+    val body = record.asInstanceOf[TableRecord].row.map(DataType.valueToString)
     write(body)
-    //IOUtils.write(compact(body).getBytes(charset),outputStream)
   }
 
   override def flush(): Unit = {
diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/domain/DataType.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/domain/DataType.scala
index 3074862..67740ea 100644
--- a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/domain/DataType.scala
+++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/domain/DataType.scala
@@ -20,12 +20,13 @@ package org.apache.linkis.storage.domain
 import java.sql.{Date, Timestamp}
 
 import org.apache.linkis.common.utils.{Logging, Utils}
-
+import java.math.{BigDecimal => JavaBigDecimal}
 
 object DataType extends Logging{
 
 
   val NULL_VALUE = "NULL"
+  val LOWCASE_NULL_VALUE = "null"
   //TODO Change to fine-grained regular expressions(改为精细化正则表达式)
   val DECIMAL_REGEX = "^decimal\\(\\d*\\,\\d*\\)".r.unanchored
 
@@ -80,7 +81,7 @@ object DataType extends Logging{
     case LongType | BigIntType => if(isNumberNull(value)) null else value.toLong
     case FloatType => if(isNumberNull(value)) null else value.toFloat
     case DoubleType  => if(isNumberNull(value)) null else value.toDouble
-    case DecimalType => if(isNumberNull(value)) null else BigDecimal(value)
+    case DecimalType => if(isNumberNull(value)) null else new JavaBigDecimal(value)
     case DateType => if(isNumberNull(value)) null else Date.valueOf(value)
     case TimestampType => if(isNumberNull(value)) null else Timestamp.valueOf(value).toString.stripSuffix(".0")
     case BinaryType => if(isNull(value)) null else value.getBytes()
@@ -99,6 +100,15 @@ object DataType extends Logging{
     false
   }
 
+  def valueToString(value: Any): String = {
+    if (null == value) return LOWCASE_NULL_VALUE
+    value match {
+      case javaDecimal: JavaBigDecimal =>
+        javaDecimal.toPlainString
+      case _ => value.toString
+    }
+  }
+
 }
 
 abstract class DataType(val typeName:String,
diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/excel/StorageExcelWriter.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/excel/StorageExcelWriter.scala
index 3f3c97f..4efc0fc 100644
--- a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/excel/StorageExcelWriter.scala
+++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/excel/StorageExcelWriter.scala
@@ -70,8 +70,7 @@ class StorageExcelWriter(val charset: String, val sheetName: String, val dateFor
   def createCellStyle(dataType: DataType): CellStyle = {
     val style = workBook.createCellStyle()
     format = workBook.createDataFormat()
-    dataType match {
-      case BigIntType | TinyIntType | ShortIntType | IntType | LongType  => style.setDataFormat(format.getFormat("0"))
+    dataType.toString match {
       case _ => style.setDataFormat(format.getFormat("@"))
     }
     style
@@ -115,10 +114,7 @@ class StorageExcelWriter(val charset: String, val sheetName: String, val dateFor
     for (elem <- excelRecord) {
       val cell = tableBody.createCell(colunmPoint)
       val dataType = types.apply(colunmPoint)
-      dataType match {
-        case BigIntType | TinyIntType | ShortIntType | IntType | LongType => cell.setCellValue(if (elem.toString.equals("NULL")) 0 else elem.toString.toDouble)
-        case _ => cell.setCellValue(elem.toString) //read时候进行null替换等等
-      }
+      cell.setCellValue(DataType.valueToString(elem))
       cell.setCellStyle(getCellStyle(dataType))
       colunmPoint += 1
     }
diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/source/FileSplit.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/source/FileSplit.scala
index ff0b70b..2ddcd68 100644
--- a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/source/FileSplit.scala
+++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/source/FileSplit.scala
@@ -21,7 +21,7 @@ import java.io.Closeable
 import java.util
 
 import org.apache.linkis.common.io.{FsReader, FsWriter, MetaData, Record}
-import org.apache.linkis.storage.domain.Column
+import org.apache.linkis.storage.domain.{Column, DataType}
 import org.apache.linkis.storage.resultset.table.{TableMetaData, TableRecord}
 import org.apache.linkis.storage.script.{ScriptMetaData, VariableParser}
 import org.apache.linkis.storage.{LineMetaData, LineRecord}
@@ -109,7 +109,7 @@ class FileSplit(var fsReader: FsReader[_ <: MetaData, _ <: Record], var `type`:
 
   def collectRecord(record: Record): Array[String] = {
     record match {
-      case t: TableRecord => t.row.map(_.toString)
+      case t: TableRecord => t.row.map(DataType.valueToString)
       case l: LineRecord => Array(l.getLine)
     }
   }

---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@linkis.apache.org
For additional commands, e-mail: commits-help@linkis.apache.org


[incubator-linkis] 05/18: 1. linkis-engineconn-manager - add core dump save for engineconn

Posted by pe...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

peacewong pushed a commit to branch dev-1.1.1
in repository https://gitbox.apache.org/repos/asf/incubator-linkis.git

commit a55e8776df3fbbdca63d1718383faf29f9246676
Author: alexkun <xu...@qq.com>
AuthorDate: Fri Mar 4 17:49:10 2022 +0800

    1. linkis-engineconn-manager - add core dump save for engineconn
---
 .../apache/linkis/ecm/core/conf/ECPCoreConf.scala  | 26 ++++++++++++++++++++++
 .../core/launch/ProcessEngineCommandBuilder.scala  |  4 ++++
 .../server/operator/EngineConnLogOperator.scala    | 14 +++++-------
 .../impl/ProcessEngineConnLaunchService.scala      |  6 ++---
 4 files changed, 38 insertions(+), 12 deletions(-)

diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-core/src/main/scala/org/apache/linkis/ecm/core/conf/ECPCoreConf.scala b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-core/src/main/scala/org/apache/linkis/ecm/core/conf/ECPCoreConf.scala
new file mode 100644
index 0000000..ce8020d
--- /dev/null
+++ b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-core/src/main/scala/org/apache/linkis/ecm/core/conf/ECPCoreConf.scala
@@ -0,0 +1,26 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.linkis.ecm.core.conf
+
+import org.apache.linkis.common.conf.CommonVars
+
+object ECPCoreConf {
+
+  val CORE_DUMP_DISABLE = CommonVars("linkis.ec.core.dump.disable", true).getValue
+
+}
diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-core/src/main/scala/org/apache/linkis/ecm/core/launch/ProcessEngineCommandBuilder.scala b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-core/src/main/scala/org/apache/linkis/ecm/core/launch/ProcessEngineCommandBuilder.scala
index fd7709b..0bb815d 100644
--- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-core/src/main/scala/org/apache/linkis/ecm/core/launch/ProcessEngineCommandBuilder.scala
+++ b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-core/src/main/scala/org/apache/linkis/ecm/core/launch/ProcessEngineCommandBuilder.scala
@@ -65,6 +65,10 @@ class UnixProcessEngineCommandBuilder extends ShellProcessEngineCommandBuilder {
 
   newLine("#!/bin/bash")
 
+  if (ECPCoreConf.CORE_DUMP_DISABLE) {
+    newLine("ulimit -c 0")
+  }
+
   private def addErrorCheck(): Unit = {
     newLine("linkis_engineconn_errorcode=$?")
     newLine("if [ $linkis_engineconn_errorcode -ne 0 ]")
diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/operator/EngineConnLogOperator.scala b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/operator/EngineConnLogOperator.scala
index c446abc..60d6c2d 100644
--- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/operator/EngineConnLogOperator.scala
+++ b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/operator/EngineConnLogOperator.scala
@@ -47,7 +47,7 @@ class EngineConnLogOperator extends Operator with Logging {
     if (lastRows > EngineConnLogOperator.MAX_LOG_FETCH_SIZE.getValue) {
       throw new ECMErrorException(ECMErrorCode.EC_FETCH_LOG_FAILED, s"Cannot fetch more than ${EngineConnLogOperator.MAX_LOG_FETCH_SIZE.getValue} lines of logs.")
     } else if (lastRows > 0) {
-      val logs = Utils.exec(Array("tail", "-f", logPath.getPath), 5000).split("\n")
+      val logs = Utils.exec(Array("tail", "-n", lastRows + "", logPath.getPath), 5000).split("\n")
       return Map("logs" -> logs, "rows" -> logs.length)
     }
     val pageSize = getAs("pageSize", 100)
@@ -103,15 +103,11 @@ class EngineConnLogOperator extends Operator with Logging {
 
   private def includeLine(line: String,
                           onlyKeywordList: Array[String], ignoreKeywordList: Array[String]): Boolean = {
-    if (onlyKeywordList.nonEmpty && onlyKeywordList.exists(line.contains)) {
-      true
-    } else if (ignoreKeywordList.nonEmpty && !ignoreKeywordList.exists(line.contains)) {
-      true
-    } else if (onlyKeywordList.isEmpty && ignoreKeywordList.isEmpty) {
-      true
-    } else {
-      false
+    var accept: Boolean = ignoreKeywordList.isEmpty || !ignoreKeywordList.exists(line.contains)
+    if (accept) {
+      accept = onlyKeywordList.isEmpty || onlyKeywordList.exists(line.contains)
     }
+    accept
   }
   private def getLogPath(implicit parameters: Map[String, Any]): File = {
     if (engineConnListService == null) {
diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/ProcessEngineConnLaunchService.scala b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/ProcessEngineConnLaunchService.scala
index 4651bb6..b796c18 100644
--- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/ProcessEngineConnLaunchService.scala
+++ b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/ProcessEngineConnLaunchService.scala
@@ -101,9 +101,9 @@ abstract class ProcessEngineConnLaunchService extends AbstractEngineConnLaunchSe
     }
     Utils.tryThrow(Utils.waitUntil(() => engineConn.getStatus != Starting, Duration(timeout, TimeUnit.MILLISECONDS))) {
       case e: TimeoutException =>
-        throw new ECMErrorException(ECMErrorConstants.ECM_ERROR, s"wait for $engineConn initial timeout.")
-      case e: InterruptedException =>
-        throw new ECMErrorException(ECMErrorConstants.ECM_ERROR, s"wait for $engineConn initial interrupted.")
+        throw new ECMErrorException(ECMErrorCode.EC_START_TIME_OUT, s"wait for engineConn initial timeout(请求引擎超时,可能是因为队列资源不足导致,请重试) $engineConn .")
+      case e: InterruptedException => //比如被ms cancel
+        throw new ECMErrorException(ECMErrorCode.EC_INTERRUPT_TIME_OUT, s"wait for initial interrupted(请求引擎被中断,可能是因为你操作了引擎取消操作,请重试) $engineConn .")
       case t: Throwable =>
         logger.error(s"unexpected error, now shutdown it.")
         throw t

---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@linkis.apache.org
For additional commands, e-mail: commits-help@linkis.apache.org


[incubator-linkis] 06/18: 1. linkis-manager-common - refactor equal method in PersistenceLabel

Posted by pe...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

peacewong pushed a commit to branch dev-1.1.1
in repository https://gitbox.apache.org/repos/asf/incubator-linkis.git

commit 294da24269f17bafecd85b25be01435cfc296f21
Author: alexkun <xu...@qq.com>
AuthorDate: Fri Mar 4 17:50:53 2022 +0800

    1. linkis-manager-common - refactor equal method in PersistenceLabel
---
 .../linkis/manager/common/entity/persistence/PersistenceLabel.java | 7 +++++--
 1 file changed, 5 insertions(+), 2 deletions(-)

diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/entity/persistence/PersistenceLabel.java b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/entity/persistence/PersistenceLabel.java
index 4f0c1cb..c2fddb1 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/entity/persistence/PersistenceLabel.java
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/entity/persistence/PersistenceLabel.java
@@ -94,8 +94,11 @@ public class PersistenceLabel extends GenericLabel {
     @Override
     public boolean equals(Object o) {
         if (this == o) return true;
-        if (o == null || getClass() != o.getClass()) return false;
-        if (!super.equals(o)) return false;
+        if (o == null || getClass() != o.getClass())  {
+            return false;
+        } else if (!super.equals(o)) {
+            return false;
+        }
 
         PersistenceLabel that = (PersistenceLabel) o;
 

---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@linkis.apache.org
For additional commands, e-mail: commits-help@linkis.apache.org


[incubator-linkis] 01/18: 1. linkis-resource-manager - Refactor YarnResourceRequester to support high-available address of resourcemanager 2. linkis-manager-common - Change default yarn name from 'sit' to 'default'

Posted by pe...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

peacewong pushed a commit to branch dev-1.1.1
in repository https://gitbox.apache.org/repos/asf/incubator-linkis.git

commit b9ed5b8a51f68e4a92b64cb5898cfeda7173495c
Author: alexkun <xu...@qq.com>
AuthorDate: Thu Mar 3 20:28:37 2022 +0800

    1. linkis-resource-manager - Refactor YarnResourceRequester to support high-available address of resourcemanager
    2. linkis-manager-common - Change default yarn name from 'sit' to 'default'
---
 .../manager/common/conf/RMConfiguration.scala      |  5 +-
 .../request/ExternalResourceRequester.java         |  9 +--
 .../service/impl/ExternalResourceServiceImpl.java  | 26 ++++++--
 .../external/yarn/YarnResourceRequester.scala      | 78 ++++++++++++++++++++--
 .../service/impl/DefaultResourceManager.scala      |  1 -
 5 files changed, 99 insertions(+), 20 deletions(-)

diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/scala/org/apache/linkis/manager/common/conf/RMConfiguration.scala b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/scala/org/apache/linkis/manager/common/conf/RMConfiguration.scala
index 24af6ed..4b90f58 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/scala/org/apache/linkis/manager/common/conf/RMConfiguration.scala
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/scala/org/apache/linkis/manager/common/conf/RMConfiguration.scala
@@ -43,7 +43,7 @@ object RMConfiguration {
   val USER_AVAILABLE_YARN_INSTANCE_MEMORY = CommonVars("wds.linkis.rm.yarnqueue.memory.max", new ByteType("450g"))
   val USER_AVAILABLE_YARN_INSTANCE = CommonVars("wds.linkis.rm.yarnqueue.instance.max", 30)
   val USER_AVAILABLE_YARN_QUEUE_NAME = CommonVars("wds.linkis.rm.yarnqueue", "default")
-  val USER_AVAILABLE_CLUSTER_NAME = CommonVars("wds.linkis.rm.cluster", "sit")
+  val USER_AVAILABLE_CLUSTER_NAME = CommonVars("wds.linkis.rm.cluster", "default")
 
   val USER_MODULE_WAIT_USED = CommonVars("wds.linkis.rm.user.module.wait.used", 60 * 10L)
   val USER_MODULE_WAIT_RELEASE = CommonVars("wds.linkis.rm.user.module.wait.used", -1L)
@@ -66,8 +66,9 @@ object RMConfiguration {
   //publics service
   val HIVE_ENGINE_MAINTAIN_TIME_STR = CommonVars("wds.linkis.hive.maintain.time.key", "wds.linkis.hive.maintain.time")
 
-  val DEFAULT_YARN_CLUSTER_NAME = CommonVars("wds.linkis.rm.default.yarn.cluster.name", "sit")
+  val DEFAULT_YARN_CLUSTER_NAME = CommonVars("wds.linkis.rm.default.yarn.cluster.name", "default")
   val DEFAULT_YARN_TYPE = CommonVars("wds.linkis.rm.default.yarn.cluster.type", "Yarn")
   val EXTERNAL_RETRY_NUM = CommonVars("wds.linkis.rm.external.retry.num", 3)
+  val DEFAULT_YARN_RM_WEB_ADDRESS_DELIMITER = CommonVars("wds.linkis.rm.default.yarn.webaddress.delimiter", ";")
 
 }
diff --git a/linkis-computation-governance/linkis-manager/linkis-resource-manager/src/main/java/org/apache/linkis/resourcemanager/external/request/ExternalResourceRequester.java b/linkis-computation-governance/linkis-manager/linkis-resource-manager/src/main/java/org/apache/linkis/resourcemanager/external/request/ExternalResourceRequester.java
index 9d84d1c..2b076ec 100644
--- a/linkis-computation-governance/linkis-manager/linkis-resource-manager/src/main/java/org/apache/linkis/resourcemanager/external/request/ExternalResourceRequester.java
+++ b/linkis-computation-governance/linkis-manager/linkis-resource-manager/src/main/java/org/apache/linkis/resourcemanager/external/request/ExternalResourceRequester.java
@@ -26,11 +26,8 @@ import org.apache.linkis.resourcemanager.external.domain.ExternalResourceProvide
 import java.util.List;
 
 public interface ExternalResourceRequester {
-    NodeResource requestResourceInfo(
-            ExternalResourceIdentifier identifier, ExternalResourceProvider provider);
-
-    List<ExternalAppInfo> requestAppInfo(
-            ExternalResourceIdentifier identifier, ExternalResourceProvider provider);
-
+    NodeResource requestResourceInfo(ExternalResourceIdentifier identifier, ExternalResourceProvider provider);
+    List<ExternalAppInfo> requestAppInfo(ExternalResourceIdentifier identifier, ExternalResourceProvider provider);
     ResourceType getResourceType();
+    Boolean reloadExternalResourceAddress(ExternalResourceProvider provider);
 }
diff --git a/linkis-computation-governance/linkis-manager/linkis-resource-manager/src/main/java/org/apache/linkis/resourcemanager/external/service/impl/ExternalResourceServiceImpl.java b/linkis-computation-governance/linkis-manager/linkis-resource-manager/src/main/java/org/apache/linkis/resourcemanager/external/service/impl/ExternalResourceServiceImpl.java
index 9ed0ef4..2a7ca53 100644
--- a/linkis-computation-governance/linkis-manager/linkis-resource-manager/src/main/java/org/apache/linkis/resourcemanager/external/service/impl/ExternalResourceServiceImpl.java
+++ b/linkis-computation-governance/linkis-manager/linkis-resource-manager/src/main/java/org/apache/linkis/resourcemanager/external/service/impl/ExternalResourceServiceImpl.java
@@ -17,6 +17,7 @@
 
 package org.apache.linkis.resourcemanager.external.service.impl;
 
+import com.fasterxml.jackson.core.JsonParseException;
 import org.apache.linkis.manager.common.conf.RMConfiguration;
 import org.apache.linkis.manager.common.entity.resource.NodeResource;
 import org.apache.linkis.manager.common.entity.resource.ResourceType;
@@ -48,6 +49,7 @@ import com.google.common.cache.LoadingCache;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import java.net.ConnectException;
 import java.util.List;
 import java.util.Map;
 import java.util.concurrent.ExecutionException;
@@ -114,7 +116,9 @@ public class ExternalResourceServiceImpl implements ExternalResourceService, Ini
                                 (Integer) RMConfiguration.EXTERNAL_RETRY_NUM().getValue(),
                                 (i) ->
                                         externalResourceRequester.requestResourceInfo(
-                                                identifier, provider));
+                                                identifier, provider),
+                                (i) ->
+                                        externalResourceRequester.reloadExternalResourceAddress(provider));
         return resource;
     }
 
@@ -143,17 +147,29 @@ public class ExternalResourceServiceImpl implements ExternalResourceService, Ini
                                 (Integer) RMConfiguration.EXTERNAL_RETRY_NUM().getValue(),
                                 (i) ->
                                         externalResourceRequester.requestAppInfo(
-                                                identifier, provider));
+                                                identifier, provider),
+                                (i) ->
+                                        externalResourceRequester.reloadExternalResourceAddress(provider));
         return appInfos;
     }
 
-    private Object retry(int retryNum, Function function) throws RMErrorException {
+    private Object retry(int retryNum, Function function, Function reloadExternalAddress) throws RMErrorException {
         int times = 0;
         String errorMsg = "Failed to request external resource";
         while (times < retryNum) {
             try {
                 return function.apply(null);
             } catch (Exception e) {
+                if ((JsonParseException.class.isInstance(e.getCause()) && e.getCause().getMessage().contains("This is standby RM"))
+                        || ConnectException.class.isInstance(e.getCause())) {
+                    if (null != reloadExternalAddress) {
+                        try {
+                            reloadExternalAddress.apply(null);
+                        } catch (Exception e1) {
+                            logger.error("ReloadExternalAddress failed. {}", e.getMessage(), e);
+                        }
+                    }
+                }
                 errorMsg =
                         "Failed to request external resource"
                                 + ExceptionUtils.getRootCauseMessage(e);
@@ -164,8 +180,8 @@ public class ExternalResourceServiceImpl implements ExternalResourceService, Ini
         throw new RMErrorException(11006, errorMsg);
     }
 
-    private ExternalResourceProvider chooseProvider(
-            ResourceType resourceType, RMLabelContainer labelContainer) throws RMErrorException {
+    @Override
+    public ExternalResourceProvider chooseProvider(ResourceType resourceType, RMLabelContainer labelContainer) throws RMErrorException {
         Label label = labelContainer.find(ClusterLabel.class);
         ClusterLabel realClusterLabel = null;
         if (label == null) {
diff --git a/linkis-computation-governance/linkis-manager/linkis-resource-manager/src/main/scala/org/apache/linkis/resourcemanager/external/yarn/YarnResourceRequester.scala b/linkis-computation-governance/linkis-manager/linkis-resource-manager/src/main/scala/org/apache/linkis/resourcemanager/external/yarn/YarnResourceRequester.scala
index 4cf7c3c..29489d5 100644
--- a/linkis-computation-governance/linkis-manager/linkis-resource-manager/src/main/scala/org/apache/linkis/resourcemanager/external/yarn/YarnResourceRequester.scala
+++ b/linkis-computation-governance/linkis-manager/linkis-resource-manager/src/main/scala/org/apache/linkis/resourcemanager/external/yarn/YarnResourceRequester.scala
@@ -17,6 +17,7 @@
  
 package org.apache.linkis.resourcemanager.external.yarn
 
+import org.apache.commons.lang3.StringUtils
 import org.apache.linkis.common.utils.{Logging, Utils}
 import org.apache.linkis.manager.common.entity.resource.{CommonNodeResource, NodeResource, ResourceType, YarnResource}
 import org.apache.linkis.resourcemanager.external.domain.{ExternalAppInfo, ExternalResourceIdentifier, ExternalResourceProvider}
@@ -25,6 +26,7 @@ import org.apache.http.{HttpHeaders, HttpResponse}
 import org.apache.http.client.methods.HttpGet
 import org.apache.http.impl.client.HttpClients
 import org.apache.http.util.EntityUtils
+import org.apache.linkis.manager.common.conf.RMConfiguration
 import org.apache.linkis.manager.common.exception.{RMErrorException, RMWarnException}
 import org.apache.linkis.resourcemanager.utils.RequestKerberosUrlUtils
 import org.json4s.JValue
@@ -32,26 +34,33 @@ import org.json4s.JsonAST._
 import org.json4s.jackson.JsonMethods.parse
 import sun.misc.BASE64Encoder
 
-import scala.collection.JavaConversions._
+import java.util
+import java.util.concurrent.{ConcurrentHashMap, TimeUnit}
+import scala.collection.JavaConverters._
 import scala.collection.mutable.ArrayBuffer
 
 class YarnResourceRequester extends ExternalResourceRequester with Logging {
 
+  private val HASTATE_ACTIVE = "active"
+
   private var provider: ExternalResourceProvider = _
+  private val rmAddressMap: util.Map[String, String] = new ConcurrentHashMap[String, String]()
+
 
   private def getAuthorizationStr = {
-    val user = this.provider.getConfigMap.getOrDefault("user","").asInstanceOf[String]
-    val pwd = this.provider.getConfigMap.getOrDefault("pwd","").asInstanceOf[String]
+    val user = this.provider.getConfigMap.getOrDefault("user", "").asInstanceOf[String]
+    val pwd = this.provider.getConfigMap.getOrDefault("pwd", "").asInstanceOf[String]
     val authKey = user + ":" + pwd
     val base64Encoder = new BASE64Encoder()
     base64Encoder.encode(authKey.getBytes)
   }
 
   override def requestResourceInfo(identifier: ExternalResourceIdentifier, provider: ExternalResourceProvider): NodeResource = {
-    val rmWebAddress = provider.getConfigMap.get("rmWebAddress").asInstanceOf[String]
+    val rmWebHaAddress = provider.getConfigMap.get("rmWebAddress").asInstanceOf[String]
+    this.provider = provider
+    val rmWebAddress = getAndUpdateActiveRmWebAddress(rmWebHaAddress)
     info(s"rmWebAddress: $rmWebAddress")
     val queueName = identifier.asInstanceOf[YarnResourceIdentifier].getQueueName
-    this.provider = provider
 
     def getYarnResource(jValue: Option[JValue]) = jValue.map(r => new YarnResource((r \ "memory").asInstanceOf[JInt].values.toLong * 1024l * 1024l, (r \ "vCores").asInstanceOf[JInt].values.toInt, 0, queueName))
 
@@ -171,7 +180,10 @@ class YarnResourceRequester extends ExternalResourceRequester with Logging {
   }
 
   override def requestAppInfo(identifier: ExternalResourceIdentifier, provider: ExternalResourceProvider): java.util.List[ExternalAppInfo] = {
-    val rmWebAddress = provider.getConfigMap.get("rmWebAddress").asInstanceOf[String]
+    val rmWebHaAddress = provider.getConfigMap.get("rmWebAddress").asInstanceOf[String]
+
+    val rmWebAddress = getAndUpdateActiveRmWebAddress(rmWebHaAddress)
+
     val queueName = identifier.asInstanceOf[YarnResourceIdentifier].getQueueName
 
     def getYarnResource(jValue: Option[JValue]) = jValue.map(r => new YarnResource((r \ "allocatedMB").asInstanceOf[JInt].values.toLong * 1024l * 1024l, (r \ "allocatedVCores").asInstanceOf[JInt].values.toInt, 0, queueName))
@@ -241,6 +253,60 @@ class YarnResourceRequester extends ExternalResourceRequester with Logging {
     }
     parse(EntityUtils.toString(httpResponse.getEntity()))
   }
+
+  def getAndUpdateActiveRmWebAddress(haAddress: String): String = {
+    // todo check if it will stuck for many requests
+    var activeAddress = rmAddressMap.get(haAddress)
+    if (StringUtils.isBlank(activeAddress)) haAddress.intern().synchronized {
+      if (StringUtils.isBlank(activeAddress)) {
+        if (logger.isDebugEnabled()) {
+          logger.debug(s"Cannot find value of haAddress : ${haAddress} in cacheMap with size ${rmAddressMap.size()}")
+        }
+        if (StringUtils.isNotBlank(haAddress)) {
+          haAddress.split(RMConfiguration.DEFAULT_YARN_RM_WEB_ADDRESS_DELIMITER.getValue).foreach(address => {
+            Utils.tryCatch {
+              val response = getResponseByUrl("info", address)
+              response \ "clusterInfo" \ "haState" match {
+                case state: JString =>
+                  if (HASTATE_ACTIVE.equalsIgnoreCase(state.s)) {
+                    activeAddress = address
+                  } else {
+                    logger.warn(s"Resourcemanager : ${address} haState : ${state.s}")
+                  }
+                case _ =>
+              }
+            } {
+              case e: Exception =>
+                logger.error("Get Yarn resourcemanager info error, " + e.getMessage, e)
+            }
+          })
+        }
+        if (StringUtils.isNotBlank(activeAddress)) {
+          if (logger.isDebugEnabled()) {
+            logger.debug(s"Put (${haAddress}, ${activeAddress}) to cacheMap.")
+          }
+          rmAddressMap.put(haAddress, activeAddress)
+        } else {
+          throw new RMErrorException(11007, s"Get active Yarn resourcemanager from : ${haAddress} exception.(从 ${haAddress} 获取主Yarn resourcemanager异常)")
+        }
+      }
+    }
+    if (logger.isDebugEnabled()) {
+      logger.debug(s"Get active rm address : ${activeAddress} from haAddress : ${haAddress}")
+    }
+    activeAddress
+  }
+
+  override def reloadExternalResourceAddress(provider: ExternalResourceProvider): java.lang.Boolean = {
+    if (null == provider) {
+      rmAddressMap.clear()
+    } else {
+      val rmWebHaAddress = provider.getConfigMap.get("rmWebAddress").asInstanceOf[String]
+      rmAddressMap.remove(rmWebHaAddress)
+      getAndUpdateActiveRmWebAddress(rmWebHaAddress)
+    }
+    true
+  }
 }
 object YarnResourceRequester extends Logging {
 
diff --git a/linkis-computation-governance/linkis-manager/linkis-resource-manager/src/main/scala/org/apache/linkis/resourcemanager/service/impl/DefaultResourceManager.scala b/linkis-computation-governance/linkis-manager/linkis-resource-manager/src/main/scala/org/apache/linkis/resourcemanager/service/impl/DefaultResourceManager.scala
index 84a121a..fe46db6 100644
--- a/linkis-computation-governance/linkis-manager/linkis-resource-manager/src/main/scala/org/apache/linkis/resourcemanager/service/impl/DefaultResourceManager.scala
+++ b/linkis-computation-governance/linkis-manager/linkis-resource-manager/src/main/scala/org/apache/linkis/resourcemanager/service/impl/DefaultResourceManager.scala
@@ -227,7 +227,6 @@ class DefaultResourceManager extends ResourceManager with Logging with Initializ
           }
         } {
           case exception: RMWarnException => return NotEnoughResource(exception.getMessage)
-          case exception: Exception => throw exception
         }
         val usedResource = labelResourceService.getLabelResource(label)
         if (usedResource == null) {

---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@linkis.apache.org
For additional commands, e-mail: commits-help@linkis.apache.org