You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@linkis.apache.org by ca...@apache.org on 2022/10/14 09:16:07 UTC

[incubator-linkis] branch dev-1.3.1-errorcode updated: [ISSUE-3402][spark]module errorcode optimization and documentation (#3585)

This is an automated email from the ASF dual-hosted git repository.

casion pushed a commit to branch dev-1.3.1-errorcode
in repository https://gitbox.apache.org/repos/asf/incubator-linkis.git


The following commit(s) were added to refs/heads/dev-1.3.1-errorcode by this push:
     new 29ba7f3d9 [ISSUE-3402][spark]module errorcode optimization and documentation (#3585)
29ba7f3d9 is described below

commit 29ba7f3d9964a6fc1250c1e84b292c9d2bf7f400
Author: 成彬彬 <10...@users.noreply.github.com>
AuthorDate: Fri Oct 14 17:16:00 2022 +0800

    [ISSUE-3402][spark]module errorcode optimization and documentation (#3585)
---
 docs/errorcode/spark-errorcode.md                  |  19 ++++
 .../spark/errorcode/SparkErrorCodeSummary.java     | 107 +++++++++++++++++++++
 .../engineplugin/spark/cs/CSTableParser.scala      |   6 +-
 .../spark/exception/NoSupportEngineException.scala |   7 +-
 .../engineplugin/spark/executor/SQLSession.scala   |  13 ++-
 .../spark/executor/SparkPythonExecutor.scala       |   7 +-
 .../spark/executor/SparkScalaExecutor.scala        |  15 ++-
 .../spark/factory/SparkEngineConnFactory.scala     |   9 +-
 .../spark/factory/SparkPythonExecutorFactory.scala |   5 +-
 .../spark/factory/SparkSqlExecutorFactory.scala    |   5 +-
 .../spark/mdq/MDQPreExecutionHook.scala            |  10 +-
 11 files changed, 179 insertions(+), 24 deletions(-)

diff --git a/docs/errorcode/spark-errorcode.md b/docs/errorcode/spark-errorcode.md
new file mode 100644
index 000000000..de1929113
--- /dev/null
+++ b/docs/errorcode/spark-errorcode.md
@@ -0,0 +1,19 @@
+## spark  errorcode
+
+| module name(模块名) | error code(错误码)  | describe(描述) |enumeration name(枚举)| Exception Class(类名)|
+| -------- | -------- | ----- |-----|-----|
+|spark |40001|read record  exception(读取记录异常)|READ_RECORD_EXCEPTION|SparkErrorCodeSummary|
+|spark |40002|dataFrame to local exception(dataFrame 到本地异常)|DATAFRAME_EXCEPTION|SparkErrorCodeSummary|
+|spark |40003| |OUT_ID|SparkErrorCodeSummary|
+|spark |40004|Spark application has already stopped, please restart it(Spark 应用程序已停止,请重新启动)|SPARK_STOPPED|SparkErrorCodeSummary|
+|spark |40005|execute sparkScala failed!(执行 sparkScala 失败!)|EXECUTE_SPARKSCALA_FAILED|SparkErrorCodeSummary|
+|spark |40006|sparkILoop is null(sparkILoop 为空)|SPARK_IS_NULL|SparkErrorCodeSummary|
+|spark |40007|The csTable that name is {} not found in cs(在 cs 中找不到名称为 {} 的 csTable) |CSTABLE_NOT_FOUND|SparkErrorCodeSummary|
+|spark |40007|Pyspark process  has stopped, query failed!(Pyspark 进程已停止,查询失败!)|PYSPARK_STOPPED|SparkErrorCodeSummary|
+|spark |40009|sparkSession can not be null(sparkSession 不能为空)|CAN_NOT_NULL|SparkErrorCodeSummary|
+|spark |80002|spark repl classdir create exception(spark repl classdir 创建异常)|SPARK_CREATE_EXCEPTION|SparkErrorCodeSummary|
+|spark |40010|The request to the MDQ service to parse into executable SQL failed(向MDQ服务请求解析为可以执行的sql时失败)|REQUEST_MDQ_FAILED|SparkErrorCodeSummary|
+|spark |420001|Invalid EngineConn engine session obj, failed to create sparkSql executor(EngineConn 引擎会话 obj 无效,无法创建 sparkSql 执行程序)|INVALID_CREATE_SPARKSQL|SparkErrorCodeSummary|
+|spark |420002|Invalid EngineConn engine session obj, failed to create sparkPython executor(EngineConn 引擎会话 obj 无效,无法创建 sparkPython 执行程序)|INVALID_CREATE_SPARKPYTHON|SparkErrorCodeSummary|
+
+
diff --git a/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/errorcode/SparkErrorCodeSummary.java b/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/errorcode/SparkErrorCodeSummary.java
new file mode 100644
index 000000000..3d9f7b92d
--- /dev/null
+++ b/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/errorcode/SparkErrorCodeSummary.java
@@ -0,0 +1,107 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.linkis.engineplugin.spark.errorcode;
+
+public enum SparkErrorCodeSummary {
+  READ_RECORD_EXCEPTION(40001, "read record  exception(读取记录异常)", "read record  exception(读取记录异常)"),
+  DATAFRAME_EXCEPTION(
+      40002,
+      "dataFrame to local exception(dataFrame 到本地异常)",
+      "dataFrame to local exception(dataFrame 到本地异常)"),
+  OUT_ID(40003, "", ""),
+
+  SPARK_STOPPED(
+      40004,
+      "Spark application has already stopped, please restart it(Spark 应用程序已停止,请重新启动)",
+      "Spark application has already stopped, please restart it(Spark 应用程序已停止,请重新启动)"),
+  EXECUTE_SPARKSCALA_FAILED(
+      40005,
+      "execute sparkScala failed!(执行 sparkScala 失败!)",
+      "execute sparkScala failed!(执行 sparkScala 失败!)"),
+  SPARK_IS_NULL(40006, "sparkILoop is null(sparkILoop 为空)", "sparkILoop is null(sparkILoop 为空)"),
+
+  CSTABLE_NOT_FOUND(
+      40007,
+      "The csTable that name is {} not found in cs(在 cs 中找不到名称为 {} 的 csTable)",
+      "The csTable that name is {} not found in cs(在 cs 中找不到名称为 {} 的 csTable)"),
+  PYSPARK_STOPPED(
+      40007,
+      "Pyspark process  has stopped, query failed!(Pyspark 进程已停止,查询失败!)",
+      "Pyspark process  has stopped, query failed!(Pyspark 进程已停止,查询失败!)"),
+  CAN_NOT_NULL(
+      40009,
+      "sparkSession can not be null(sparkSession 不能为空)",
+      "sparkSession can not be null(sparkSession 不能为空)"),
+  REQUEST_MDQ_FAILED(
+      40010,
+      "The request to the MDQ service to parse into executable SQL failed(向MDQ服务请求解析为可以执行的sql时失败)",
+      "The request to the MDQ service to parse into executable SQL failed(向MDQ服务请求解析为可以执行的sql时失败)"),
+  SPARK_CREATE_EXCEPTION(
+      80002,
+      "spark repl classdir create exception(spark repl classdir 创建异常)",
+      "spark repl classdir create exception(spark repl classdir 创建异常)"),
+  INVALID_CREATE_SPARKSQL(
+      420001,
+      "Invalid EngineConn engine session obj, failed to create sparkSql executor(EngineConn 引擎会话 obj 无效,无法创建 sparkSql 执行程序)",
+      "Invalid EngineConn engine session obj, failed to create sparkSql executor(EngineConn 引擎会话 obj 无效,无法创建 sparkSql 执行程序)"),
+  INVALID_CREATE_SPARKPYTHON(
+      420002,
+      "Invalid EngineConn engine session obj, failed to create sparkPython executor(EngineConn 引擎会话 obj 无效,无法创建 sparkPython 执行程序)",
+      "Invalid EngineConn engine session obj, failed to create sparkPython executor(EngineConn 引擎会话 obj 无效,无法创建 sparkPython 执行程序)");
+  /** (errorCode)错误码 */
+  private int errorCode;
+  /** (errorDesc)错误描述 */
+  private String errorDesc;
+  /** Possible reasons for the error(错误可能出现的原因) */
+  private String comment;
+
+  SparkErrorCodeSummary(int errorCode, String errorDesc, String comment) {
+    this.errorCode = errorCode;
+    this.errorDesc = errorDesc;
+    this.comment = comment;
+  }
+
+  public int getErrorCode() {
+    return errorCode;
+  }
+
+  public void setErrorCode(int errorCode) {
+    this.errorCode = errorCode;
+  }
+
+  public String getErrorDesc() {
+    return errorDesc;
+  }
+
+  public void setErrorDesc(String errorDesc) {
+    this.errorDesc = errorDesc;
+  }
+
+  public String getComment() {
+    return comment;
+  }
+
+  public void setComment(String comment) {
+    this.comment = comment;
+  }
+
+  @Override
+  public String toString() {
+    return "errorCode: " + this.errorCode + ", errorDesc:" + this.errorDesc;
+  }
+}
diff --git a/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/cs/CSTableParser.scala b/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/cs/CSTableParser.scala
index a862a45b3..e79260a6a 100644
--- a/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/cs/CSTableParser.scala
+++ b/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/cs/CSTableParser.scala
@@ -22,6 +22,7 @@ import org.apache.linkis.cs.client.service.CSTableService
 import org.apache.linkis.cs.common.entity.metadata.CSTable
 import org.apache.linkis.cs.common.utils.CSCommonUtils
 import org.apache.linkis.engineconn.computation.executor.execute.EngineExecutionContext
+import org.apache.linkis.engineplugin.spark.errorcode.SparkErrorCodeSummary._
 import org.apache.linkis.engineplugin.spark.exception.ExecuteError
 
 import org.apache.commons.lang3.StringUtils
@@ -72,7 +73,10 @@ object CSTableParser extends Logging {
       val table = getCSTable(csTempTable, contextIDValueStr, nodeNameStr)
       if (null == table) {
         // scalastyle:off throwerror
-        throw new ExecuteError(40007, s"The csTable that name is $csTempTable not found in cs")
+        throw new ExecuteError(
+          CSTABLE_NOT_FOUND.getErrorCode,
+          s"The csTable that name is $csTempTable not found in cs"
+        )
       }
       registerTempTable(table)
       parsedTables.append(csTempTable)
diff --git a/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/exception/NoSupportEngineException.scala b/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/exception/NoSupportEngineException.scala
index e0cf5de97..ecb62db6a 100644
--- a/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/exception/NoSupportEngineException.scala
+++ b/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/exception/NoSupportEngineException.scala
@@ -18,6 +18,7 @@
 package org.apache.linkis.engineplugin.spark.exception
 
 import org.apache.linkis.common.exception.ErrorException
+import org.apache.linkis.engineplugin.spark.errorcode.SparkErrorCodeSummary._
 
 /**
  */
@@ -27,8 +28,10 @@ case class NoSupportEngineException(errCode: Int, desc: String)
 case class NotSupportSparkTypeException(errorCode: Int, desc: String)
     extends ErrorException(errorCode, desc)
 
-case class NotSupportSparkSqlTypeException(desc: String) extends ErrorException(420001, desc)
+case class NotSupportSparkSqlTypeException(desc: String)
+    extends ErrorException(INVALID_CREATE_SPARKSQL.getErrorCode, desc)
 
-case class NotSupportSparkPythonTypeException(desc: String) extends ErrorException(420002, desc)
+case class NotSupportSparkPythonTypeException(desc: String)
+    extends ErrorException(INVALID_CREATE_SPARKPYTHON.getErrorCode, desc)
 
 case class NotSupportSparkScalaTypeException(desc: String) extends ErrorException(420003, desc)
diff --git a/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/executor/SQLSession.scala b/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/executor/SQLSession.scala
index 1201665c5..cf29c511f 100644
--- a/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/executor/SQLSession.scala
+++ b/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/executor/SQLSession.scala
@@ -20,6 +20,7 @@ package org.apache.linkis.engineplugin.spark.executor
 import org.apache.linkis.common.utils.{Logging, Utils}
 import org.apache.linkis.engineconn.computation.executor.execute.EngineExecutionContext
 import org.apache.linkis.engineplugin.spark.config.SparkConfiguration
+import org.apache.linkis.engineplugin.spark.errorcode.SparkErrorCodeSummary._
 import org.apache.linkis.engineplugin.spark.exception.SparkEngineException
 import org.apache.linkis.engineplugin.spark.utils.EngineUtils
 import org.apache.linkis.governance.common.exception.LinkisJobRetryException
@@ -62,7 +63,11 @@ object SQLSession extends Logging {
     //    sc.setJobGroup(jobGroup, "Get IDE-SQL Results.", false)
 
     val iterator = Utils.tryThrow(dataFrame.toLocalIterator) { t =>
-      throw new SparkEngineException(40002, s"dataFrame to local exception", t)
+      throw new SparkEngineException(
+        DATAFRAME_EXCEPTION.getErrorCode,
+        DATAFRAME_EXCEPTION.getErrorDesc,
+        t
+      )
     }
     // var columns: List[Attribute] = null
     // get field names
@@ -122,7 +127,11 @@ object SQLSession extends Logging {
         index += 1
       }
     }) { t =>
-      throw new SparkEngineException(40001, s"read record  exception", t)
+      throw new SparkEngineException(
+        READ_RECORD_EXCEPTION.getErrorCode,
+        READ_RECORD_EXCEPTION.getErrorDesc,
+        t
+      )
     }
     logger.warn(s"Time taken: ${System.currentTimeMillis() - startTime}, Fetched $index row(s).")
     // to register TempTable
diff --git a/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/executor/SparkPythonExecutor.scala b/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/executor/SparkPythonExecutor.scala
index c7b72bf5f..c1b1b96cf 100644
--- a/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/executor/SparkPythonExecutor.scala
+++ b/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/executor/SparkPythonExecutor.scala
@@ -27,6 +27,7 @@ import org.apache.linkis.engineplugin.spark.Interpreter.PythonInterpreter._
 import org.apache.linkis.engineplugin.spark.common.{Kind, PySpark}
 import org.apache.linkis.engineplugin.spark.config.SparkConfiguration
 import org.apache.linkis.engineplugin.spark.entity.SparkEngineSession
+import org.apache.linkis.engineplugin.spark.errorcode.SparkErrorCodeSummary._
 import org.apache.linkis.engineplugin.spark.exception.ExecuteError
 import org.apache.linkis.engineplugin.spark.imexport.CsvRelation
 import org.apache.linkis.engineplugin.spark.utils.EngineUtils
@@ -229,7 +230,9 @@ class SparkPythonExecutor(val sparkEngineSession: SparkEngineSession, val id: In
       //      close
       Utils.tryFinally({
         if (promise != null && !promise.isCompleted) {
-          promise.failure(new ExecuteError(40007, "Pyspark process  has stopped, query failed!"))
+          promise.failure(
+            new ExecuteError(PYSPARK_STOPPED.getErrorCode, PYSPARK_STOPPED.getErrorDesc)
+          )
         }
       }) {
         close
@@ -329,7 +332,7 @@ class SparkPythonExecutor(val sparkEngineSession: SparkEngineSession, val id: In
         logger.info("promise is completed and should start another python gateway")
         close
       } else {
-        promise.failure(ExecuteError(40003, out))
+        promise.failure(ExecuteError(OUT_ID.getErrorCode, out))
       }
     }
   }
diff --git a/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/executor/SparkScalaExecutor.scala b/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/executor/SparkScalaExecutor.scala
index de2271e54..19ea35a20 100644
--- a/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/executor/SparkScalaExecutor.scala
+++ b/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/executor/SparkScalaExecutor.scala
@@ -24,6 +24,7 @@ import org.apache.linkis.engineconn.core.executor.ExecutorManager
 import org.apache.linkis.engineplugin.spark.common.{Kind, SparkScala}
 import org.apache.linkis.engineplugin.spark.config.SparkConfiguration
 import org.apache.linkis.engineplugin.spark.entity.SparkEngineSession
+import org.apache.linkis.engineplugin.spark.errorcode.SparkErrorCodeSummary._
 import org.apache.linkis.engineplugin.spark.exception.{
   ApplicationAlreadyStoppedException,
   ExecuteError,
@@ -127,7 +128,7 @@ class SparkScalaExecutor(sparkEngineSession: SparkEngineSession, id: Long)
         }
       }
     } else {
-      throw new SparkSessionNullException(40006, "sparkILoop is null")
+      throw new SparkSessionNullException(SPARK_IS_NULL.getErrorCode, SPARK_IS_NULL.getErrorDesc)
     }
     Utils.waitUntil(
       () => sparkILoopInited && sparkILoop.intp != null,
@@ -182,8 +183,8 @@ class SparkScalaExecutor(sparkEngineSession: SparkEngineSession, id: Long)
     if (sparkContext.isStopped) {
       logger.error("Spark application has already stopped, please restart it.")
       throw new ApplicationAlreadyStoppedException(
-        40004,
-        "Spark application has already stopped, please restart it."
+        SPARK_STOPPED.getErrorCode,
+        SPARK_STOPPED.getErrorDesc
       )
     }
     executeCount += 1
@@ -236,7 +237,13 @@ class SparkScalaExecutor(sparkEngineSession: SparkEngineSession, id: Long)
           } else {
             logger.error("No error message is captured, please see the detailed log")
           }
-          ErrorExecuteResponse(errorMsg, ExecuteError(40005, "execute sparkScala failed!"))
+          ErrorExecuteResponse(
+            errorMsg,
+            ExecuteError(
+              EXECUTE_SPARKSCALA_FAILED.getErrorCode,
+              EXECUTE_SPARKSCALA_FAILED.getErrorDesc
+            )
+          )
       }
     }
     // reset the java stdout
diff --git a/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/factory/SparkEngineConnFactory.scala b/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/factory/SparkEngineConnFactory.scala
index 9aae45664..32ba4d0cf 100644
--- a/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/factory/SparkEngineConnFactory.scala
+++ b/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/factory/SparkEngineConnFactory.scala
@@ -22,6 +22,7 @@ import org.apache.linkis.common.utils.{Logging, Utils}
 import org.apache.linkis.engineconn.common.creation.EngineCreationContext
 import org.apache.linkis.engineplugin.spark.config.SparkConfiguration
 import org.apache.linkis.engineplugin.spark.entity.SparkEngineSession
+import org.apache.linkis.engineplugin.spark.errorcode.SparkErrorCodeSummary._
 import org.apache.linkis.engineplugin.spark.exception.{
   SparkCreateFileException,
   SparkSessionNullException
@@ -86,7 +87,7 @@ class SparkEngineConnFactory extends MultiExecutorEngineConnFactory with Logging
     )
     val sparkSession = createSparkSession(outputDir, sparkConf)
     if (sparkSession == null) {
-      throw new SparkSessionNullException(40009, "sparkSession can not be null")
+      throw new SparkSessionNullException(CAN_NOT_NULL.getErrorCode, CAN_NOT_NULL.getErrorDesc)
     }
 
     val sc = sparkSession.sparkContext
@@ -174,7 +175,11 @@ class SparkEngineConnFactory extends MultiExecutorEngineConnFactory with Logging
       output
     }(t => {
       logger.warn("create spark repl classdir failed", t)
-      throw new SparkCreateFileException(80002, s"spark repl classdir create exception", t)
+      throw new SparkCreateFileException(
+        SPARK_CREATE_EXCEPTION.getErrorCode,
+        SPARK_CREATE_EXCEPTION.getErrorDesc,
+        t
+      )
       null
     })
   }
diff --git a/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/factory/SparkPythonExecutorFactory.scala b/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/factory/SparkPythonExecutorFactory.scala
index 793e556bf..82588f7c1 100644
--- a/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/factory/SparkPythonExecutorFactory.scala
+++ b/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/factory/SparkPythonExecutorFactory.scala
@@ -22,6 +22,7 @@ import org.apache.linkis.engineconn.common.engineconn.EngineConn
 import org.apache.linkis.engineconn.computation.executor.creation.ComputationExecutorFactory
 import org.apache.linkis.engineconn.computation.executor.execute.ComputationExecutor
 import org.apache.linkis.engineplugin.spark.entity.SparkEngineSession
+import org.apache.linkis.engineplugin.spark.errorcode.SparkErrorCodeSummary._
 import org.apache.linkis.engineplugin.spark.exception.NotSupportSparkPythonTypeException
 import org.apache.linkis.engineplugin.spark.executor.SparkPythonExecutor
 import org.apache.linkis.manager.label.entity.Label
@@ -40,9 +41,7 @@ class SparkPythonExecutorFactory extends ComputationExecutorFactory {
       case sparkEngineSession: SparkEngineSession =>
         new SparkPythonExecutor(sparkEngineSession, id)
       case _ =>
-        throw NotSupportSparkPythonTypeException(
-          "Invalid EngineConn engine session obj, failed to create sparkPython executor"
-        )
+        throw NotSupportSparkPythonTypeException(INVALID_CREATE_SPARKPYTHON.getErrorDesc)
     }
   }
 
diff --git a/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/factory/SparkSqlExecutorFactory.scala b/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/factory/SparkSqlExecutorFactory.scala
index f75540295..b98bd63cb 100644
--- a/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/factory/SparkSqlExecutorFactory.scala
+++ b/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/factory/SparkSqlExecutorFactory.scala
@@ -22,6 +22,7 @@ import org.apache.linkis.engineconn.common.engineconn.EngineConn
 import org.apache.linkis.engineconn.computation.executor.creation.ComputationExecutorFactory
 import org.apache.linkis.engineconn.computation.executor.execute.ComputationExecutor
 import org.apache.linkis.engineplugin.spark.entity.SparkEngineSession
+import org.apache.linkis.engineplugin.spark.errorcode.SparkErrorCodeSummary._
 import org.apache.linkis.engineplugin.spark.exception.NotSupportSparkSqlTypeException
 import org.apache.linkis.engineplugin.spark.executor.SparkSqlExecutor
 import org.apache.linkis.manager.label.entity.Label
@@ -42,9 +43,7 @@ class SparkSqlExecutorFactory extends ComputationExecutorFactory {
       case sparkEngineSession: SparkEngineSession =>
         new SparkSqlExecutor(sparkEngineSession, id)
       case _ =>
-        throw NotSupportSparkSqlTypeException(
-          "Invalid EngineConn engine session obj, failed to create sparkSql executor"
-        )
+        throw NotSupportSparkSqlTypeException(INVALID_CREATE_SPARKSQL.getErrorDesc)
     }
   }
 
diff --git a/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/mdq/MDQPreExecutionHook.scala b/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/mdq/MDQPreExecutionHook.scala
index d27086f76..836f74868 100644
--- a/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/mdq/MDQPreExecutionHook.scala
+++ b/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/mdq/MDQPreExecutionHook.scala
@@ -21,10 +21,10 @@ import org.apache.linkis.common.utils.{Logging, Utils}
 import org.apache.linkis.engineconn.computation.executor.execute.EngineExecutionContext
 import org.apache.linkis.engineplugin.spark.common.SparkKind
 import org.apache.linkis.engineplugin.spark.config.SparkConfiguration
+import org.apache.linkis.engineplugin.spark.errorcode.SparkErrorCodeSummary._
 import org.apache.linkis.engineplugin.spark.exception.MDQErrorException
 import org.apache.linkis.engineplugin.spark.extension.SparkPreExecutionHook
 import org.apache.linkis.manager.label.entity.engine.CodeLanguageLabel
-import org.apache.linkis.manager.label.utils.LabelUtil
 import org.apache.linkis.protocol.mdq.{DDLRequest, DDLResponse}
 import org.apache.linkis.rpc.Sender
 import org.apache.linkis.storage.utils.StorageUtils
@@ -73,16 +73,16 @@ class MDQPreExecutionHook extends SparkPreExecutionHook with Logging {
     } { case e: Exception =>
       logger.error(s"Call MDQ rpc failed, ${e.getMessage}", e)
       throw new MDQErrorException(
-        40010,
-        s"The request to the MDQ service to parse into executable SQL failed(向MDQ服务请求解析为可以执行的sql时失败), ${e.getMessage}"
+        REQUEST_MDQ_FAILED.getErrorCode,
+        REQUEST_MDQ_FAILED.getErrorDesc + s", ${e.getMessage}"
       )
     }
     resp match {
       case DDLResponse(postCode) => postCode
       case _ =>
         throw new MDQErrorException(
-          40010,
-          "The request to the MDQ service failed to resolve into executable SQL(向MDQ服务请求解析为可以执行的sql时失败)"
+          REQUEST_MDQ_FAILED.getErrorCode,
+          REQUEST_MDQ_FAILED.getErrorDesc
         )
     }
   }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@linkis.apache.org
For additional commands, e-mail: commits-help@linkis.apache.org