You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@linkis.apache.org by ca...@apache.org on 2023/06/22 07:00:35 UTC
[linkis] branch master updated: Fix the abnormal problem caused by non-directories in the EC material directory (#4693)
This is an automated email from the ASF dual-hosted git repository.
casion pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/linkis.git
The following commit(s) were added to refs/heads/master by this push:
new e772781cd Fix the abnormal problem caused by non-directories in the EC material directory (#4693)
e772781cd is described below
commit e772781cdef1734bcce448cae794a9f2b4a1b318
Author: peacewong <wp...@gmail.com>
AuthorDate: Thu Jun 22 15:00:30 2023 +0800
Fix the abnormal problem caused by non-directories in the EC material directory (#4693)
* optimize code
* remove try and warn
* add bml prefix
* Fix this issue #4692 that there is a non-directory in the EC material directory
* update branches
---
.github/workflows/auto-format-pr.yaml | 2 +-
.github/workflows/codeql-analysis.yml | 4 +-
.github/workflows/integration-test.yml | 6 +-
.github/workflows/publish-docker.yaml | 3 +-
.../linkis/entrance/interceptor/impl/Explain.scala | 65 +++++++++++-----------
.../AbstractEngineConnBmlResourceGenerator.java | 2 +-
.../DefaultEngineConnBmlResourceGenerator.java | 11 ++++
.../conf/linkis-ps-publicservice.properties | 4 ++
.../hive/executor/HiveEngineConnExecutor.scala | 10 ++--
9 files changed, 57 insertions(+), 50 deletions(-)
diff --git a/.github/workflows/auto-format-pr.yaml b/.github/workflows/auto-format-pr.yaml
index 301d91c76..722ef12b0 100644
--- a/.github/workflows/auto-format-pr.yaml
+++ b/.github/workflows/auto-format-pr.yaml
@@ -19,7 +19,7 @@ name: Create Code Format Apply PullRequest
on:
pull_request:
- branches: [dev,dev-*]
+ branches: [master,dev-*]
types: [closed]
jobs:
diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml
index f5be6ff8a..3dac63672 100644
--- a/.github/workflows/codeql-analysis.yml
+++ b/.github/workflows/codeql-analysis.yml
@@ -18,9 +18,9 @@ name: CodeQL Analysis
on:
pull_request:
- branches: [dev,dev-*]
+ branches: [master,dev-*]
push:
- branches: [dev,dev-*]
+ branches: [master,dev-*]
jobs:
analyze:
diff --git a/.github/workflows/integration-test.yml b/.github/workflows/integration-test.yml
index 1fb0b6605..1487261d9 100644
--- a/.github/workflows/integration-test.yml
+++ b/.github/workflows/integration-test.yml
@@ -19,11 +19,9 @@ name: Integration Test
on:
push:
- branches:
- - dev-1.4.0
+ branches: [master,dev-*]
pull_request:
- branches:
- - dev-1.4.0
+ branches: [master,dev-*]
#concurrency:
# group: test-${{ github.head_ref || github.run_id }}
diff --git a/.github/workflows/publish-docker.yaml b/.github/workflows/publish-docker.yaml
index f9e2a5f7b..34276fc54 100644
--- a/.github/workflows/publish-docker.yaml
+++ b/.github/workflows/publish-docker.yaml
@@ -18,8 +18,7 @@
name: Publish Docker
on:
push:
- branches:
- - dev-1.4.0
+ branches: [master,dev-*]
env:
MAVEN_OPTS: -Dhttp.keepAlive=false -Dmaven.wagon.http.pool=false -Dmaven.wagon.http.retryHandler.class=standard -Dmaven.wagon.http.retryHandler.count=3 -Dmaven.wagon.httpconnectionManager.ttlSeconds=120
diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/Explain.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/Explain.scala
index 1914730d5..8436ccc71 100644
--- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/Explain.scala
+++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/Explain.scala
@@ -31,6 +31,7 @@ import org.apache.linkis.governance.common.entity.job.JobRequest
import org.apache.commons.lang3.StringUtils
+import java.util.Locale
import java.util.regex.Pattern
import scala.collection.mutable.ArrayBuffer
@@ -56,8 +57,6 @@ object SparkExplain extends Explain {
private val sy = Pattern.compile("sys\\.")
private val scCancelAllJobs = Pattern.compile("sc\\.cancelAllJobs(\\s*)")
private val runtime = Pattern.compile("Runtime\\.getRuntime")
- private val LINE_BREAK = "\n"
- private val LOG: Logger = LoggerFactory.getLogger(getClass)
override def authPass(code: String, error: StringBuilder): Boolean = {
if (EntranceConfiguration.SKIP_AUTH.getHotValue()) {
@@ -100,7 +99,6 @@ object SQLExplain extends Explain {
private val LIMIT: String = "limit"
private val LIMIT_UPPERCASE: String = "LIMIT"
private val IDE_ALLOW_NO_LIMIT = "--set wds.linkis.engine.no.limit.allow=true"
- private val LOG: Logger = LoggerFactory.getLogger(getClass)
override def authPass(code: String, error: StringBuilder): Boolean = {
true
@@ -147,7 +145,6 @@ object SQLExplain extends Explain {
s"You submitted a sql without limit, DSS will add limit 5000 to your sql"
) + "\n"
)
- // 将注释先干掉,然后再进行添加limit
val realCode = cleanComment(trimCode)
fixedCode += (realCode + SQL_APPEND_LIMIT)
} else if (isSelectOverLimit(singleCode) && !isNoLimitAllowed) {
@@ -175,7 +172,6 @@ object SQLExplain extends Explain {
s"You submitted a sql without limit, DSS will add limit 5000 to your sql"
) + "\n"
)
- // 将注释先干掉,然后再进行添加limit
val realCode = cleanComment(trimCode)
fixedCode += (realCode + SQL_APPEND_LIMIT)
} else if (isSelectOverLimit(singleCode) && !isNoLimitAllowed) {
@@ -214,28 +210,23 @@ object SQLExplain extends Explain {
array.toArray
}
- private def addNoLimit(code: String) = code + NO_LIMIT_STRING
-
protected def needNoLimit(code: String): Boolean = code.endsWith(NO_LIMIT_STRING)
def isSelectCmd(code: String): Boolean = {
if (StringUtils.isEmpty(code)) {
return false
}
- // 如果一段sql是 --xxx回车select * from default.users,那么他也是select语句
val realCode = cleanComment(code)
- // 以前,在判断,对于select* from xxx这样的SQL时会出现问题的,但是这种语法hive是支持的
- realCode.trim.split("\\s+")(0).toLowerCase.contains("select")
+ realCode.trim.split("\\s+")(0).toLowerCase().contains("select")
}
- def continueWhenError = false
+ def continueWhenError: Boolean = false
def isSelectCmdNoLimit(cmd: String): Boolean = {
if (StringUtils.isEmpty(cmd)) {
return false
}
val realCode = cmd.trim
- // limit往往就是在sql语句中最后的,所以需要进行最后的判断
val arr = realCode.split("\\s+")
val words = new ArrayBuffer[String]()
arr foreach { w =>
@@ -254,8 +245,9 @@ object SQLExplain extends Explain {
private def cleanComment(sql: String): String = {
val cleanSql = new StringBuilder
sql.trim.split(LINE_BREAK) foreach { singleSql =>
- if (!singleSql.trim().startsWith(COMMENT_FLAG))
+ if (!singleSql.trim().startsWith(COMMENT_FLAG)) {
cleanSql.append(singleSql).append(LINE_BREAK)
+ }
}
cleanSql.toString().trim
}
@@ -266,8 +258,8 @@ object SQLExplain extends Explain {
}
var overLimit: Boolean = false
var code = cmd.trim
- if (code.toLowerCase.contains("limit")) {
- code = code.substring(code.toLowerCase().lastIndexOf("limit")).trim
+ if (code.toLowerCase(Locale.getDefault).contains(LIMIT)) {
+ code = code.substring(code.toLowerCase((Locale.getDefault)).lastIndexOf(LIMIT)).trim
}
val hasLimit = code.toLowerCase().matches("limit\\s+\\d+\\s*;?")
if (hasLimit) {
@@ -292,13 +284,14 @@ object SQLExplain extends Explain {
* String
*/
def repairSelectOverLimit(cmd: String): String = {
- var code = cmd.trim
+ val code = cmd.trim
var preCode = ""
var tailCode = ""
- var limitNum = SQL_DEFAULT_LIMIT.getValue
- if (code.toLowerCase.contains("limit")) {
- preCode = code.substring(0, code.toLowerCase().lastIndexOf("limit")).trim
- tailCode = code.substring(code.toLowerCase().lastIndexOf("limit")).trim
+ val limitNum = SQL_DEFAULT_LIMIT.getValue
+ val lowerCaseCode = code.toLowerCase(Locale.getDefault)
+ if (lowerCaseCode.contains(LIMIT)) {
+ preCode = code.substring(0, lowerCaseCode.lastIndexOf(LIMIT)).trim
+ tailCode = code.substring(lowerCaseCode.lastIndexOf(LIMIT)).trim
}
if (isUpperSelect(cmd)) preCode + " LIMIT " + limitNum else preCode + " limit " + limitNum
}
@@ -356,44 +349,48 @@ object PythonExplain extends Explain {
IMPORT_SYS_MOUDLE
.findAllIn(code)
.nonEmpty || FROM_SYS_IMPORT.findAllIn(code).nonEmpty
- )
+ ) {
throw PythonCodeCheckException(20070, "can not use sys module")
- else if (
+ } else if (
IMPORT_OS_MOUDLE.findAllIn(code).nonEmpty || FROM_OS_IMPORT.findAllIn(code).nonEmpty
- )
+ ) {
throw PythonCodeCheckException(20071, "can not use os module")
- else if (
+ } else if (
IMPORT_PROCESS_MODULE
.findAllIn(code)
.nonEmpty || FROM_MULTIPROCESS_IMPORT.findAllIn(code).nonEmpty
- )
+ ) {
throw PythonCodeCheckException(20072, "can not use process module")
- else if (SC_STOP.findAllIn(code).nonEmpty)
+ } else if (SC_STOP.findAllIn(code).nonEmpty) {
throw PythonCodeCheckException(20073, "You can not stop SparkContext, It's dangerous")
- else if (FROM_NUMPY_IMPORT.findAllIn(code).nonEmpty)
+ } else if (FROM_NUMPY_IMPORT.findAllIn(code).nonEmpty) {
throw PythonCodeCheckException(20074, "Numpy packages cannot be imported in this way")
+ }
}
})
code.split(System.lineSeparator()) foreach { code =>
- if (IMPORT_SYS_MOUDLE.findAllIn(code).nonEmpty || FROM_SYS_IMPORT.findAllIn(code).nonEmpty)
+ if (IMPORT_SYS_MOUDLE.findAllIn(code).nonEmpty || FROM_SYS_IMPORT.findAllIn(code).nonEmpty) {
throw PythonCodeCheckException(20070, "can not use sys module")
- else if (IMPORT_OS_MOUDLE.findAllIn(code).nonEmpty || FROM_OS_IMPORT.findAllIn(code).nonEmpty)
+ } else if (
+ IMPORT_OS_MOUDLE.findAllIn(code).nonEmpty || FROM_OS_IMPORT.findAllIn(code).nonEmpty
+ ) {
throw PythonCodeCheckException(20071, "can not use os moudle")
- else if (
+ } else if (
IMPORT_PROCESS_MODULE.findAllIn(code).nonEmpty || FROM_MULTIPROCESS_IMPORT
.findAllIn(code)
.nonEmpty
- )
+ ) {
throw PythonCodeCheckException(20072, "can not use process module")
- else if (
+ } else if (
IMPORT_SUBPORCESS_MODULE.findAllIn(code).nonEmpty || FROM_SUBPROCESS_IMPORT
.findAllIn(code)
.nonEmpty
- )
+ ) {
throw PythonCodeCheckException(20072, "can not use subprocess module")
- else if (SC_STOP.findAllIn(code).nonEmpty)
+ } else if (SC_STOP.findAllIn(code).nonEmpty) {
throw PythonCodeCheckException(20073, "You can not stop SparkContext, It's dangerous")
+ }
}
true
}
diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/localize/AbstractEngineConnBmlResourceGenerator.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/localize/AbstractEngineConnBmlResourceGenerator.java
index ae9f01ff6..2c5d74320 100644
--- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/localize/AbstractEngineConnBmlResourceGenerator.java
+++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/localize/AbstractEngineConnBmlResourceGenerator.java
@@ -127,7 +127,7 @@ public abstract class AbstractEngineConnBmlResourceGenerator
@Override
public String[] getEngineConnTypeListFromDisk() {
return Arrays.stream(new File(getEngineConnsHome()).listFiles())
- .filter(file -> !file.isHidden())
+ .filter(file -> !file.isHidden() && file.isDirectory())
.map(file -> file.getName())
.toArray(String[]::new);
}
diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/localize/DefaultEngineConnBmlResourceGenerator.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/localize/DefaultEngineConnBmlResourceGenerator.java
index f044536d5..c8ebc5063 100644
--- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/localize/DefaultEngineConnBmlResourceGenerator.java
+++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/engineplugin/server/localize/DefaultEngineConnBmlResourceGenerator.java
@@ -19,6 +19,7 @@ package org.apache.linkis.engineplugin.server.localize;
import org.apache.linkis.common.utils.ZipUtils;
import org.apache.linkis.manager.engineplugin.common.exception.EngineConnPluginErrorException;
+import org.apache.linkis.manager.engineplugin.errorcode.EngineconnCoreErrorCodeSummary;
import java.io.File;
import java.text.MessageFormat;
@@ -46,6 +47,10 @@ public class DefaultEngineConnBmlResourceGenerator extends AbstractEngineConnBml
File versionFile = new File(path);
logger.info("generate, versionFile:" + path);
+ if (!versionFile.isDirectory()) {
+ logger.warn("File is not dir {},skip to upload", path);
+ continue;
+ }
String key = versionFile.getName();
try {
@@ -69,6 +74,12 @@ public class DefaultEngineConnBmlResourceGenerator extends AbstractEngineConnBml
private EngineConnLocalizeResource[] generateDir(String path) {
File distFile = new File(path);
+ if (!distFile.isDirectory()) {
+ logger.warn("File is not dir {},skip to upload", path);
+ throw new EngineConnPluginErrorException(
+ EngineconnCoreErrorCodeSummary.DIST_IRREGULAR_EXIST.getErrorCode(),
+ path + " is not dir, to delete this file then retry");
+ }
logger.info("generateDir, distFile:" + path);
File[] validFiles =
distFile.listFiles(
diff --git a/linkis-dist/package/conf/linkis-ps-publicservice.properties b/linkis-dist/package/conf/linkis-ps-publicservice.properties
index 76a09110f..63546ac56 100644
--- a/linkis-dist/package/conf/linkis-ps-publicservice.properties
+++ b/linkis-dist/package/conf/linkis-ps-publicservice.properties
@@ -41,6 +41,9 @@ wds.linkis.cs.deserialize.replace_package_header.enable=false
wds.linkis.rpc.conf.enable.local.message=true
wds.linkis.rpc.conf.local.app.list=linkis-ps-publicservice
+# bml storage path prefix
+#wds.linkis.bml.hdfs.prefix=/apps-data
+
##Spring
spring.server.port=9105
spring.spring.main.allow-bean-definition-overriding=true
@@ -50,3 +53,4 @@ spring.eureka.instance.metadata-map.route=cs_1_dev
## you may set service version if you want to distinguish different configuration version
spring.eureka.instance.metadata-map.linkis.conf.version=v1
+
diff --git a/linkis-engineconn-plugins/hive/src/main/scala/org/apache/linkis/engineplugin/hive/executor/HiveEngineConnExecutor.scala b/linkis-engineconn-plugins/hive/src/main/scala/org/apache/linkis/engineplugin/hive/executor/HiveEngineConnExecutor.scala
index ff64958a1..bf4eff0cb 100644
--- a/linkis-engineconn-plugins/hive/src/main/scala/org/apache/linkis/engineplugin/hive/executor/HiveEngineConnExecutor.scala
+++ b/linkis-engineconn-plugins/hive/src/main/scala/org/apache/linkis/engineplugin/hive/executor/HiveEngineConnExecutor.scala
@@ -644,12 +644,10 @@ class HiveDriverProxy(driver: Any) extends Logging {
}
def getResults(res: util.List[_]): Boolean = {
- Utils.tryAndWarn {
- driver.getClass
- .getMethod("getResults", classOf[util.List[_]])
- .invoke(driver, res.asInstanceOf[AnyRef])
- .asInstanceOf[Boolean]
- }
+ driver.getClass
+ .getMethod("getResults", classOf[util.List[_]])
+ .invoke(driver, res.asInstanceOf[AnyRef])
+ .asInstanceOf[Boolean]
}
def close(): Unit = {
---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@linkis.apache.org
For additional commands, e-mail: commits-help@linkis.apache.org