You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@linkis.apache.org by ja...@apache.org on 2022/09/24 01:28:16 UTC
[incubator-linkis] branch dev-1.3.1 updated: [WIP] Fix Problems introduced by Scala code format alarm clear (#3480)
This is an automated email from the ASF dual-hosted git repository.
jackxu2011 pushed a commit to branch dev-1.3.1
in repository https://gitbox.apache.org/repos/asf/incubator-linkis.git
The following commit(s) were added to refs/heads/dev-1.3.1 by this push:
new 6ab1c107f [WIP] Fix Problems introduced by Scala code format alarm clear (#3480)
6ab1c107f is described below
commit 6ab1c107fbe0249e2cf58ac77115790bc08423b3
Author: peacewong <wp...@gmail.com>
AuthorDate: Sat Sep 24 09:28:08 2022 +0800
[WIP] Fix Problems introduced by Scala code format alarm clear (#3480)
* Code reformatting to optimize formatting errors
---
.../linkis/storage/script/VariableParser.scala | 53 ++++----------
.../apache/linkis/storage/source/FileSplit.scala | 2 +-
.../apache/linkis/ecm/server/util/ECMUtils.scala | 7 +-
.../executor/execute/EngineExecutionContext.scala | 4 ++
.../upstream/access/ECTaskEntranceInfoAccess.scala | 2 +-
.../acessible/executor/log/LogHelper.scala | 1 -
.../entrance/execute/DefaultEntranceExecutor.scala | 7 +-
.../linkis/entrance/execute/EntranceExecutor.scala | 2 +-
.../entrance/execute/EntranceExecutorManager.scala | 15 +---
.../execute/ExecuteRequestInterceptor.scala | 2 +-
.../apache/linkis/entrance/execute/MarkReq.scala | 81 ----------------------
.../EntranceUserParallelOrchestratorPlugin.scala | 2 +-
.../entrance/parser/CommonEntranceParser.scala | 12 ++--
.../entrance/timeout/JobTimeoutManager.scala | 10 +--
.../service/engine/DefaultEngineReuseService.scala | 20 +++---
.../service/impl/DefaultNodeLabelService.scala | 6 +-
.../service/impl/DefaultUserLabelService.scala | 2 +-
.../spark/executor/SparkPythonExecutor.scala | 5 +-
.../io/iteraceptor/IOMethodInterceptor.scala | 28 ++++----
.../apache/linkis/orchestrator/domain/JobReq.scala | 2 +-
.../ecm/LoadBalanceLabelEngineConnManager.scala | 2 +-
.../apache/linkis/filesystem/bml/BMLHelper.scala | 44 ++++++------
.../linkis/filesystem/service/FsService.scala | 8 +--
.../variable/service/VariableServiceImpl.scala | 4 +-
.../linkis/httpclient/dws/DWSHttpClient.scala | 16 ++---
25 files changed, 110 insertions(+), 227 deletions(-)
diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/VariableParser.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/VariableParser.scala
index ca89bd2f8..d96c52df9 100644
--- a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/VariableParser.scala
+++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/script/VariableParser.scala
@@ -68,30 +68,32 @@ object VariableParser {
import scala.collection.JavaConverters._
val vars = new util.HashMap[String, String]
val confs = new util.HashMap[String, Object]
- variables.filter(_.sort == null).foreach(f => vars.asScala += f.key -> f.value)
+ variables.filter(_.sort == null).foreach(f => vars.put(f.key, f.value))
variables.filter(_.sort != null).foreach { f =>
f.sort match {
case STARTUP | RUNTIME | SPECIAL =>
if (confs.get(f.sort) == null) {
- confs.asScala += f.sort -> createMap(f)
+ confs.put(f.sort, createMap(f))
} else {
- confs.get(f.sort).asInstanceOf[util.HashMap[String, Object]].asScala += f.key -> f.value
+ confs.get(f.sort).asInstanceOf[util.HashMap[String, Object]].put(f.key, f.value)
}
case _ =>
if (confs.get(f.sortParent) == null) {
- confs.asScala += f.sortParent -> new util.HashMap[String, Object]
+ confs.put(f.sortParent, new util.HashMap[String, Object])
confs
.get(f.sortParent)
.asInstanceOf[util.HashMap[String, Object]]
- .asScala += f.sort -> createMap(f)
+ .put(f.sort, createMap(f))
} else {
val subMap = confs.get(f.sortParent).asInstanceOf[util.HashMap[String, Object]]
- if (subMap.get(f.sort) == null) subMap.asScala += f.sort -> createMap(f)
- else
+ if (subMap.get(f.sort) == null) {
+ subMap.put(f.sort, createMap(f))
+ } else {
subMap
.get(f.sort)
.asInstanceOf[util.HashMap[String, Object]]
- .asScala += f.key -> f.value
+ .put(f.key, f.value)
+ }
}
}
}
@@ -101,39 +103,10 @@ object VariableParser {
params
}
- import scala.collection.JavaConverters._
-
private def createMap(variable: Variable): util.Map[String, Object] = {
-
val map = new util.HashMap[String, Object]
- map.asScala += variable.key -> variable.value
- }.asJava
-
- /* def main(args: Array[String]): Unit = {
- val a = Array(
- Variable("variable", null, "a", "b"),
- Variable("variable", null, "a1", "b1"),
- Variable("configuration", "startup", "e", "f"),
- Variable("configuration", "runtime", "a", "b"),
- Variable("runtime", "env", "g2", "h3"),
- Variable("startup", "hello", "g2", "h3"))
- // val a = Array[Variable]()
- // println(new GsonBuilder().setDateFormat("yyyy-MM-dd'T'HH:mm:ssZ").serializeNulls.create.toJson(getMap(a)))
- // val variables: Array[Variable] = getVariables(getMap(a))
- // val variables: Array[Variable] = getVariables(getMap(a))
- // print(variables)
- /* val writer = ScriptFsWriter.getScriptFsWriter(new FsPath("file:///tmp/hello.py"), "utf-8", new FileOutputStream("E:\\aaa.py"))
-
- writer.addMetaData(new ScriptMetaData(a))
- writer.addRecord(new ScriptRecord("hello"))
- writer.addRecord(new ScriptRecord("hello"))
- writer.addRecord(new ScriptRecord("hello"))
- writer.addRecord(new ScriptRecord("hello"))*/
- val reader = ScriptFsReader.getScriptFsReader(new FsPath("file:///tmp/aaa.py"),"utf-8",new FileInputStream("E:\\aaa.py"))
- reader.getMetaData.asInstanceOf[ScriptMetaData].getMetaData
- val map = getMap(reader.getMetaData.asInstanceOf[ScriptMetaData].getMetaData)
- println(new GsonBuilder().setDateFormat("yyyy-MM-dd'T'HH:mm:ssZ").serializeNulls.create.toJson(map))
- print(reader.getRecord.asInstanceOf[ScriptRecord].getLine)
- } */
+ map.put(variable.key, variable.value)
+ map
+ }
}
diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/source/FileSplit.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/source/FileSplit.scala
index 0c7e3e7a1..358206357 100644
--- a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/source/FileSplit.scala
+++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/source/FileSplit.scala
@@ -63,7 +63,7 @@ class FileSplit(
}
def addParams(key: String, value: String): Unit = {
- this.params.asScala += key -> value
+ this.params.put(key, value)
}
def `while`[M](m: MetaData => M, r: Record => Unit): M = {
diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/util/ECMUtils.scala b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/util/ECMUtils.scala
index f5c936114..7917faf40 100644
--- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/util/ECMUtils.scala
+++ b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/util/ECMUtils.scala
@@ -46,9 +46,10 @@ object ECMUtils {
}
if (!response.isSuccess) throw new ECMErrorException(911115, "failed to downLoad(下载失败)")
val map = new util.HashMap[String, Object]
- map.asScala += "path" -> response.fullFilePath
- map.asScala += "is" -> response.inputStream
- }.asJava
+ map.put("path", response.fullFilePath)
+ map.put("is", response.inputStream)
+ map
+ }
def downLoadBmlResourceToLocal(resource: BmlResource, userName: String, path: String)(implicit
fs: FileSystem
diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/execute/EngineExecutionContext.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/execute/EngineExecutionContext.scala
index e54f67597..56d3b1fed 100644
--- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/execute/EngineExecutionContext.scala
+++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/execute/EngineExecutionContext.scala
@@ -83,6 +83,10 @@ class EngineExecutionContext(executor: ComputationExecutor, executorUser: String
})
}
+ /**
+ * Note: the writer will be closed at the end of the method
+ * @param resultSetWriter
+ */
def sendResultSet(resultSetWriter: ResultSetWriter[_ <: MetaData, _ <: Record]): Unit = {
logger.info("Start to send res to entrance")
val fileName = new File(resultSetWriter.toFSPath.getPath).getName
diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/access/ECTaskEntranceInfoAccess.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/access/ECTaskEntranceInfoAccess.scala
index 981f389c1..cfa3c94a6 100644
--- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/access/ECTaskEntranceInfoAccess.scala
+++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/access/ECTaskEntranceInfoAccess.scala
@@ -105,7 +105,7 @@ class ECTaskEntranceInfoAccess extends ConnectionInfoAccess with Logging {
"invalid data-type: " + request.getClass.getCanonicalName
)
}
- JavaConverters.asScalaIteratorConverter(ret.iterator()).asScala.toList
+ ret.iterator().asScala.toList
}
private def getDWCServiceInstance(serviceInstance: SpringCloudServiceInstance): ServiceInstance =
diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/log/LogHelper.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/log/LogHelper.scala
index 41232eee4..3e414baed 100644
--- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/log/LogHelper.scala
+++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/log/LogHelper.scala
@@ -59,7 +59,6 @@ object LogHelper extends Logging {
}
if (logs != null && logs.size > 0) {
val sb: StringBuilder = new StringBuilder
- import scala.collection.JavaConverters._
logs.asScala map (log => log + "\n") foreach sb.append
logListener.onLogUpdate(TaskLogUpdateEvent(null, sb.toString))
}
diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/DefaultEntranceExecutor.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/DefaultEntranceExecutor.scala
index d22a67f70..49fc59ebb 100644
--- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/DefaultEntranceExecutor.scala
+++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/DefaultEntranceExecutor.scala
@@ -54,11 +54,8 @@ import org.apache.commons.lang3.exception.ExceptionUtils
import java.util
import java.util.Date
-class DefaultEntranceExecutor(
- id: Long,
- mark: MarkReq,
- entranceExecutorManager: EntranceExecutorManager
-) extends EntranceExecutor(id, mark)
+class DefaultEntranceExecutor(id: Long)
+ extends EntranceExecutor(id)
with SingleTaskOperateSupport
with Logging {
diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/EntranceExecutor.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/EntranceExecutor.scala
index 2b80d731a..44cb3620c 100644
--- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/EntranceExecutor.scala
+++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/EntranceExecutor.scala
@@ -34,7 +34,7 @@ import org.apache.commons.io.IOUtils
import org.apache.commons.lang3.StringUtils
import org.apache.commons.lang3.exception.ExceptionUtils
-abstract class EntranceExecutor(val id: Long, val mark: MarkReq) extends Executor with Logging {
+abstract class EntranceExecutor(val id: Long) extends Executor with Logging {
private implicit var userWithCreator: UserWithCreator = _
diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/EntranceExecutorManager.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/EntranceExecutorManager.scala
index 43dd27572..ed1ff4a45 100644
--- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/EntranceExecutorManager.scala
+++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/EntranceExecutorManager.scala
@@ -46,16 +46,6 @@ abstract class EntranceExecutorManager(groupFactory: GroupFactory)
}
}
- protected def createMarkReq(jobReq: JobRequest): MarkReq = {
- val markReq = new MarkReq
- markReq.setCreateService(EntranceConfiguration.DEFAULT_CREATE_SERVICE.getValue)
- // todo get default config from db
- markReq.setProperties(jobReq.getParams)
- markReq.setUser(jobReq.getExecuteUser)
- markReq.setLabels(LabelUtils.labelsToMap(jobReq.getLabels))
- markReq
- }
-
override def askExecutor(schedulerEvent: SchedulerEvent): Option[Executor] =
schedulerEvent match {
case job: Job =>
@@ -109,11 +99,8 @@ abstract class EntranceExecutorManager(groupFactory: GroupFactory)
case job: EntranceJob =>
job.getJobRequest match {
case jobRequest: JobRequest =>
- // CreateMarkReq
- val markReq = createMarkReq(jobRequest)
- // getMark
val entranceEntranceExecutor =
- new DefaultEntranceExecutor(idGenerator.incrementAndGet(), markReq, this)
+ new DefaultEntranceExecutor(idGenerator.incrementAndGet())
// getEngineConn Executor
job.getLogListener.foreach(
_.onLogUpdate(job, "Your job is being scheduled by orchestrator.")
diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/ExecuteRequestInterceptor.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/ExecuteRequestInterceptor.scala
index 151d319cc..823055729 100644
--- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/ExecuteRequestInterceptor.scala
+++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/ExecuteRequestInterceptor.scala
@@ -96,7 +96,7 @@ object RuntimePropertiesExecuteRequestInterceptor extends ExecuteRequestIntercep
override def apply(requestTask: RequestTask, executeRequest: ExecuteRequest): RequestTask =
executeRequest match {
case runtime: RuntimePropertiesExecuteRequest =>
- mapAsScalaMapConverter(runtime.properties).asScala.foreach { case (k, v) =>
+ runtime.properties.asScala.foreach { case (k, v) =>
requestTask.data(k, v)
}
requestTask
diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/MarkReq.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/MarkReq.scala
deleted file mode 100644
index d04511f29..000000000
--- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/MarkReq.scala
+++ /dev/null
@@ -1,81 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.linkis.entrance.execute
-
-import java.util
-
-import scala.beans.BeanProperty
-import scala.collection.JavaConverters._
-
-class MarkReq {
-
- /**
- * 只包含StartUp参数和RunTime参数
- */
- @BeanProperty
- var properties: util.Map[String, Object] = null
-
- /**
- * 启动engineConn必要Label
- */
- @BeanProperty
- var labels: util.Map[String, AnyRef] = null
-
- /**
- * executeUser
- */
- @BeanProperty
- var user: String = null
-
- /**
- * 启动的服务:如linkis-entrance
- */
- @BeanProperty
- var createService: String = null
-
- @BeanProperty
- var description: String = null
-
- override def equals(obj: Any): Boolean = {
- var flag = false
- if (null != obj && obj.isInstanceOf[MarkReq]) {
- val other = obj.asInstanceOf[MarkReq]
-
- if (other.getUser != getUser) {
- return flag
- }
-
- if (other.getLabels != null && getLabels != null) {
- if (getLabels.size() != other.getLabels.size()) {
- return false
- }
- val iterator = other.getLabels.asScala.iterator
- while (iterator.hasNext) {
- val next = iterator.next()
- if (null == next._2 || !next._2.equals(getLabels.get(next._1))) {
- return false
- }
- }
- }
- flag = true
- }
- flag
- }
-
- override def hashCode(): Int = super.hashCode()
-}
diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/orchestrator/plugin/EntranceUserParallelOrchestratorPlugin.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/orchestrator/plugin/EntranceUserParallelOrchestratorPlugin.scala
index 46f0fd716..f1c7378c3 100644
--- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/orchestrator/plugin/EntranceUserParallelOrchestratorPlugin.scala
+++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/orchestrator/plugin/EntranceUserParallelOrchestratorPlugin.scala
@@ -56,7 +56,7 @@ class EntranceUserParallelOrchestratorPlugin extends UserParallelOrchestratorPlu
.newBuilder()
.maximumSize(1000)
.expireAfterAccess(1, TimeUnit.HOURS)
- .refreshAfterWrite(EntranceConfiguration.USER_PARALLEL_REFLESH_TIME.getValue, TimeUnit.MINUTES)
+ .expireAfterWrite(EntranceConfiguration.USER_PARALLEL_REFLESH_TIME.getValue, TimeUnit.MINUTES)
.build(new CacheLoader[String, Integer]() {
override def load(key: String): Integer = {
diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/parser/CommonEntranceParser.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/parser/CommonEntranceParser.scala
index c145f2319..f62cb2b83 100644
--- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/parser/CommonEntranceParser.scala
+++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/parser/CommonEntranceParser.scala
@@ -94,7 +94,7 @@ class CommonEntranceParser(val persistenceManager: PersistenceManager)
if (executionContent.containsKey(TaskConstant.CODE)) {
code = executionContent.get(TaskConstant.CODE).asInstanceOf[String]
runType = executionContent.get(TaskConstant.RUNTYPE).asInstanceOf[String]
- if (StringUtils.isEmpty(code)) {
+ if (StringUtils.isBlank(code)) {
throw new EntranceIllegalParamException(20007, "param executionCode can not be empty ")
}
} else {
@@ -125,7 +125,7 @@ class CommonEntranceParser(val persistenceManager: PersistenceManager)
}
private def checkEngineTypeLabel(labels: util.Map[String, Label[_]]): Unit = {
- val engineTypeLabel = labels.asScala.getOrElse(LabelKeyConstant.ENGINE_TYPE_KEY, null)
+ val engineTypeLabel = labels.getOrDefault(LabelKeyConstant.ENGINE_TYPE_KEY, null)
if (null == engineTypeLabel) {
val msg = s"You need to specify engineTypeLabel in labels, such as spark-2.4.3"
throw new EntranceIllegalParamException(
@@ -145,7 +145,7 @@ class CommonEntranceParser(val persistenceManager: PersistenceManager)
runType: String,
labels: util.Map[String, Label[_]]
): Unit = {
- val engineRunTypeLabel = labels.asScala.getOrElse(LabelKeyConstant.CODE_TYPE_KEY, null)
+ val engineRunTypeLabel = labels.getOrDefault(LabelKeyConstant.CODE_TYPE_KEY, null)
if (StringUtils.isBlank(runType) && null == engineRunTypeLabel) {
val msg = s"You need to specify runType in execution content, such as sql"
logger.warn(msg)
@@ -171,8 +171,8 @@ class CommonEntranceParser(val persistenceManager: PersistenceManager)
executeUser: String,
labels: util.Map[String, Label[_]]
): Unit = {
- var userCreatorLabel = labels.asScala
- .getOrElse(LabelKeyConstant.USER_CREATOR_TYPE_KEY, null)
+ var userCreatorLabel = labels
+ .getOrDefault(LabelKeyConstant.USER_CREATOR_TYPE_KEY, null)
.asInstanceOf[UserCreatorLabel]
if (null == userCreatorLabel) {
userCreatorLabel = labelBuilderFactory.createLabel(classOf[UserCreatorLabel])
@@ -210,7 +210,7 @@ class CommonEntranceParser(val persistenceManager: PersistenceManager)
.asInstanceOf[util.Map[String, String]]
val executeApplicationName =
params.get(TaskConstant.EXECUTEAPPLICATIONNAME).asInstanceOf[String]
- if (StringUtils.isEmpty(creator)) {
+ if (StringUtils.isBlank(creator)) {
creator = EntranceConfiguration.DEFAULT_REQUEST_APPLICATION_NAME.getValue
}
// When the execution type is IDE, executioncode and scriptpath cannot be empty at the same time
diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/timeout/JobTimeoutManager.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/timeout/JobTimeoutManager.scala
index af8288d42..aaaf131bd 100644
--- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/timeout/JobTimeoutManager.scala
+++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/timeout/JobTimeoutManager.scala
@@ -43,9 +43,9 @@ class JobTimeoutManager extends Logging {
def add(jobKey: String, job: EntranceJob): Unit = {
logger.info(s"Adding timeout job: ${job.getId()}")
- if (!timeoutJobByName.asScala.contains(jobKey)) {
+ if (!timeoutJobByName.containsKey(jobKey)) {
synchronized {
- if (!timeoutJobByName.asScala.contains(jobKey)) {
+ if (!timeoutJobByName.containsKey(jobKey)) {
timeoutJobByName.put(jobKey, job)
}
}
@@ -65,7 +65,7 @@ class JobTimeoutManager extends Logging {
}
def jobExist(jobKey: String): Boolean = {
- timeoutJobByName.asScala.contains(jobKey)
+ timeoutJobByName.containsKey(jobKey)
}
def jobCompleteDelete(jobkey: String): Unit = {
@@ -154,9 +154,9 @@ object JobTimeoutManager {
// If the timeout label set by the user is invalid, execution is not allowed
def checkTimeoutLabel(labels: util.Map[String, Label[_]]): Unit = {
val jobQueuingTimeoutLabel =
- labels.asScala.getOrElse(LabelKeyConstant.JOB_QUEUING_TIMEOUT_KEY, null)
+ labels.getOrDefault(LabelKeyConstant.JOB_QUEUING_TIMEOUT_KEY, null)
val jobRunningTimeoutLabel =
- labels.asScala.getOrElse(LabelKeyConstant.JOB_RUNNING_TIMEOUT_KEY, null)
+ labels.getOrDefault(LabelKeyConstant.JOB_RUNNING_TIMEOUT_KEY, null)
val posNumPattern = "^[0-9]+$"
if (
(null != jobQueuingTimeoutLabel && !jobQueuingTimeoutLabel.getStringValue.matches(
diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/engine/DefaultEngineReuseService.scala b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/engine/DefaultEngineReuseService.scala
index c02222740..65e41ae1c 100644
--- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/engine/DefaultEngineReuseService.scala
+++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/engine/DefaultEngineReuseService.scala
@@ -71,33 +71,35 @@ class DefaultEngineReuseService extends AbstractEngineService with EngineReuseSe
override def reuseEngine(engineReuseRequest: EngineReuseRequest, sender: Sender): EngineNode = {
logger.info(s"Start to reuse Engine for request: $engineReuseRequest")
val labelBuilderFactory = LabelBuilderFactoryContext.getLabelBuilderFactory
- var labelList: util.List[Label[_]] = LabelUtils.distinctLabel(
- labelBuilderFactory.getLabels(engineReuseRequest.getLabels),
- userLabelService.getUserLabels(engineReuseRequest.getUser)
- )
+ val labelList = LabelUtils
+ .distinctLabel(
+ labelBuilderFactory.getLabels(engineReuseRequest.getLabels),
+ userLabelService.getUserLabels(engineReuseRequest.getUser)
+ )
+ .asScala
val exclusionInstances: Array[String] =
- labelList.asScala.find(_.isInstanceOf[ReuseExclusionLabel]) match {
+ labelList.find(_.isInstanceOf[ReuseExclusionLabel]) match {
case Some(l) =>
l.asInstanceOf[ReuseExclusionLabel].getInstances
case None =>
Array.empty[String]
}
- labelList = labelList.asScala.filter(_.isInstanceOf[EngineNodeLabel]).asJava
+ var filterLabelList = labelList.filter(_.isInstanceOf[EngineNodeLabel]).asJava
val engineConnAliasLabel = labelBuilderFactory.createLabel(classOf[AliasServiceInstanceLabel])
engineConnAliasLabel.setAlias(GovernanceCommonConf.ENGINE_CONN_SPRING_NAME.getValue)
- labelList.add(engineConnAliasLabel)
+ filterLabelList.add(engineConnAliasLabel)
// label chooser
if (null != engineReuseLabelChoosers) {
engineReuseLabelChoosers.asScala.foreach { chooser =>
- labelList = chooser.chooseLabels(labelList)
+ filterLabelList = chooser.chooseLabels(filterLabelList)
}
}
- val instances = nodeLabelService.getScoredNodeMapsByLabels(labelList)
+ val instances = nodeLabelService.getScoredNodeMapsByLabels(filterLabelList)
if (null != instances && null != exclusionInstances && exclusionInstances.nonEmpty) {
val instancesKeys = instances.asScala.keys.toArray
diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/label/service/impl/DefaultNodeLabelService.scala b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/label/service/impl/DefaultNodeLabelService.scala
index f58e90065..5a3e6d715 100644
--- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/label/service/impl/DefaultNodeLabelService.scala
+++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/label/service/impl/DefaultNodeLabelService.scala
@@ -118,7 +118,7 @@ class DefaultNodeLabelService extends NodeLabelService with Logging {
val willBeAdd = newKeyList.diff(oldKeyList)
val willBeUpdate = oldKeyList.diff(willBeDelete)
val modifiableKeyList = LabelUtils.listAllUserModifiableLabel()
- if (!CollectionUtils.isEmpty(willBeDelete.asJava)) {
+ if (null != willBeDelete && willBeDelete.nonEmpty) {
nodeLabels.asScala.foreach(nodeLabel => {
if (
modifiableKeyList.contains(nodeLabel.getLabelKey) && willBeDelete
@@ -135,7 +135,7 @@ class DefaultNodeLabelService extends NodeLabelService with Logging {
* update step:
* 1.delete relations of old labels 2.add new relation between new labels and instance
*/
- if (!CollectionUtils.isEmpty(willBeUpdate.asJava)) {
+ if (null != willBeDelete && willBeDelete.nonEmpty) {
labels.asScala.foreach(label => {
if (
modifiableKeyList.contains(label.getLabelKey) && willBeUpdate
@@ -156,7 +156,7 @@ class DefaultNodeLabelService extends NodeLabelService with Logging {
}
})
}
- if (!CollectionUtils.isEmpty(willBeAdd.asJava)) {
+ if (null != willBeAdd && willBeAdd.nonEmpty) {
labels.asScala
.filter(label => willBeAdd.contains(label.getLabelKey))
.foreach(label => {
diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/label/service/impl/DefaultUserLabelService.scala b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/label/service/impl/DefaultUserLabelService.scala
index 7190fa0c8..b0cb0524b 100644
--- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/label/service/impl/DefaultUserLabelService.scala
+++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/label/service/impl/DefaultUserLabelService.scala
@@ -65,7 +65,7 @@ class DefaultUserLabelService extends UserLabelService with Logging {
// 4.找出重复key,删除这个relation
duplicatedKeyLabel.foreach(l => {
labelManagerPersistence.removeLabelFromUser(user, util.Arrays.asList(l.getId))
- userRelationLabels.asScala.toList.asJava.remove(duplicatedKeyLabel.get)
+ userRelationLabels.remove(duplicatedKeyLabel.get)
})
// 5.插入新的relation 需要抛出duplicateKey异常,回滚
labelManagerPersistence.addLabelToUser(user, util.Arrays.asList(dbLabel.getId))
diff --git a/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/executor/SparkPythonExecutor.scala b/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/executor/SparkPythonExecutor.scala
index 1b421ad3c..c7b72bf5f 100644
--- a/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/executor/SparkPythonExecutor.scala
+++ b/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/executor/SparkPythonExecutor.scala
@@ -37,6 +37,7 @@ import org.apache.linkis.storage.resultset.ResultSetWriter
import org.apache.commons.exec.CommandLine
import org.apache.commons.io.IOUtils
import org.apache.commons.lang3.{RandomStringUtils, StringUtils}
+import org.apache.spark.SparkConf
import org.apache.spark.api.java.JavaSparkContext
import org.apache.spark.sql.{DataFrame, SparkSession}
import org.apache.spark.sql.execution.datasources.csv.UDF
@@ -89,9 +90,9 @@ class SparkPythonExecutor(val sparkEngineSession: SparkEngineSession, val id: In
}
}
- def getSparkConf: Unit = sc.getConf
+ def getSparkConf: SparkConf = sc.getConf
- def getJavaSparkContext: Unit = new JavaSparkContext(sc)
+ def getJavaSparkContext: JavaSparkContext = new JavaSparkContext(sc)
def getSparkSession: Object = if (sparkSession != null) sparkSession
else () => throw new IllegalAccessException("not supported keyword spark in spark1.x versions")
diff --git a/linkis-extensions/linkis-io-file-client/src/main/scala/org/apache/linkis/storage/io/iteraceptor/IOMethodInterceptor.scala b/linkis-extensions/linkis-io-file-client/src/main/scala/org/apache/linkis/storage/io/iteraceptor/IOMethodInterceptor.scala
index b081aded9..96146bb3c 100644
--- a/linkis-extensions/linkis-io-file-client/src/main/scala/org/apache/linkis/storage/io/iteraceptor/IOMethodInterceptor.scala
+++ b/linkis-extensions/linkis-io-file-client/src/main/scala/org/apache/linkis/storage/io/iteraceptor/IOMethodInterceptor.scala
@@ -36,7 +36,6 @@ import org.springframework.cglib.proxy.{MethodInterceptor, MethodProxy}
import java.io.{InputStream, IOException, OutputStream}
import java.lang.reflect.Method
import java.net.InetAddress
-import java.util
import scala.beans.BeanProperty
import scala.collection.JavaConverters._
@@ -48,7 +47,7 @@ class IOMethodInterceptor(fsType: String) extends MethodInterceptor with Logging
@BeanProperty var ioClient: IOClient = _
- private val properties: java.util.Map[String, String] = new util.HashMap[String, String]
+ private val properties: mutable.HashMap[String, String] = mutable.HashMap[String, String]()
private var inited = false
@@ -69,7 +68,7 @@ class IOMethodInterceptor(fsType: String) extends MethodInterceptor with Logging
label.setJobGroupId(IOClientUtils.generateJobGrupID())
}
- def getProxyUser: String = StorageConfiguration.PROXY_USER.getValue(properties)
+ def getProxyUser: String = StorageConfiguration.PROXY_USER.getValue(properties.asJava)
def getCreatorUser: String = StorageUtils.getJvmUser
@@ -103,7 +102,7 @@ class IOMethodInterceptor(fsType: String) extends MethodInterceptor with Logging
}
def initFS(methodName: String = "init"): Unit = {
- if (!properties.asScala.contains(StorageConfiguration.PROXY_USER.key)) {
+ if (!properties.contains(StorageConfiguration.PROXY_USER.key)) {
throw new StorageErrorException(
52002,
"no user set, we cannot get the permission information."
@@ -120,7 +119,7 @@ class IOMethodInterceptor(fsType: String) extends MethodInterceptor with Logging
getProxyUser,
getLocalIP,
methodName,
- Array(properties.asScala.toMap)
+ Array(properties.toMap)
),
bindEngineLabel
)
@@ -167,7 +166,7 @@ class IOMethodInterceptor(fsType: String) extends MethodInterceptor with Logging
case "init" =>
case "storageName" => return fsType
case "setUser" =>
- properties.asScala += StorageConfiguration.PROXY_USER.key -> args(0).asInstanceOf[String];
+ properties += StorageConfiguration.PROXY_USER.key -> args(0).asInstanceOf[String];
return Unit
case _ =>
if (inited) {
@@ -180,22 +179,23 @@ class IOMethodInterceptor(fsType: String) extends MethodInterceptor with Logging
method.getName match {
case "init" =>
val user =
- if (properties.asScala.contains(StorageConfiguration.PROXY_USER.key)) {
- StorageConfiguration.PROXY_USER.getValue(properties.asScala.toMap)
- } else null
+ if (properties.contains(StorageConfiguration.PROXY_USER.key)) {
+ StorageConfiguration.PROXY_USER.getValue(properties.toMap)
+ } else {
+ null
+ }
if (args.length > 0 && args(0).isInstanceOf[java.util.Map[String, String]]) {
- properties.asScala ++= args(0).asInstanceOf[java.util.Map[String, String]].asScala
+ properties ++= args(0).asInstanceOf[java.util.Map[String, String]].asScala
}
- if (StringUtils.isNotEmpty(user)) {
- properties.asScala += StorageConfiguration.PROXY_USER.key -> user
+ if (StringUtils.isNoneBlank(user)) {
+ properties += StorageConfiguration.PROXY_USER.key -> user
}
initFS()
logger.warn(s"For user($user)inited a $fsType storage($id) .")
Unit
case "fsName" => fsType
case "setUser" =>
- properties.asScala += StorageConfiguration.PROXY_USER.key -> args(0).asInstanceOf[String];
- Unit
+ properties += StorageConfiguration.PROXY_USER.key -> args(0).asInstanceOf[String]; Unit
case "read" =>
if (!inited) throw new IllegalAccessException("storage has not been inited.")
new IOInputStream(args)
diff --git a/linkis-orchestrator/linkis-orchestrator-core/src/main/scala/org/apache/linkis/orchestrator/domain/JobReq.scala b/linkis-orchestrator/linkis-orchestrator-core/src/main/scala/org/apache/linkis/orchestrator/domain/JobReq.scala
index be24baa35..a2dbaa54f 100644
--- a/linkis-orchestrator/linkis-orchestrator-core/src/main/scala/org/apache/linkis/orchestrator/domain/JobReq.scala
+++ b/linkis-orchestrator/linkis-orchestrator-core/src/main/scala/org/apache/linkis/orchestrator/domain/JobReq.scala
@@ -94,5 +94,5 @@ object AbstractJobReq {
}
object JobReq {
- def getDefaultPriority: Unit = 0
+ def getDefaultPriority: Int = 0
}
diff --git a/linkis-orchestrator/plugin/linkis-orchestrator-ecm-plugin/src/main/scala/org/apache/linkis/orchestrator/ecm/LoadBalanceLabelEngineConnManager.scala b/linkis-orchestrator/plugin/linkis-orchestrator-ecm-plugin/src/main/scala/org/apache/linkis/orchestrator/ecm/LoadBalanceLabelEngineConnManager.scala
index 50069b813..c19692ed6 100644
--- a/linkis-orchestrator/plugin/linkis-orchestrator-ecm-plugin/src/main/scala/org/apache/linkis/orchestrator/ecm/LoadBalanceLabelEngineConnManager.scala
+++ b/linkis-orchestrator/plugin/linkis-orchestrator-ecm-plugin/src/main/scala/org/apache/linkis/orchestrator/ecm/LoadBalanceLabelEngineConnManager.scala
@@ -233,7 +233,7 @@ class LoadBalanceLabelEngineConnManager extends ComputationEngineConnManager wit
getMarkCache()
.values()
.asScala
- .foreach(_.asScala.foreach(s => instances.asJava.add(s.getInstance)))
+ .foreach(_.asScala.foreach(s => instances.append(s.getInstance)))
instances.toArray
}
diff --git a/linkis-public-enhancements/linkis-script-dev/linkis-storage-script-dev-server/src/main/scala/org/apache/linkis/filesystem/bml/BMLHelper.scala b/linkis-public-enhancements/linkis-script-dev/linkis-storage-script-dev-server/src/main/scala/org/apache/linkis/filesystem/bml/BMLHelper.scala
index c269c2b35..7c5cd4069 100644
--- a/linkis-public-enhancements/linkis-script-dev/linkis-storage-script-dev-server/src/main/scala/org/apache/linkis/filesystem/bml/BMLHelper.scala
+++ b/linkis-public-enhancements/linkis-script-dev/linkis-storage-script-dev-server/src/main/scala/org/apache/linkis/filesystem/bml/BMLHelper.scala
@@ -27,8 +27,6 @@ import java.io.{ByteArrayInputStream, InputStream}
import java.util
import java.util.UUID
-import scala.collection.JavaConverters._
-
@Component
class BMLHelper {
@@ -38,9 +36,10 @@ class BMLHelper {
val resource: BmlUploadResponse = client.uploadResource(userName, fileName, inputStream)
if (!resource.isSuccess) throw WorkspaceExceptionManager.createException(80021)
val map = new util.HashMap[String, Object]
- map.asScala += "resourceId" -> resource.resourceId
- map.asScala += "version" -> resource.version
- }.asJava
+ map.put("resourceId", resource.resourceId)
+ map.put("version", resource.version)
+ map
+ }
def upload(
userName: String,
@@ -53,9 +52,10 @@ class BMLHelper {
client.uploadShareResource(userName, projectName, fileName, inputStream)
if (!resource.isSuccess) throw WorkspaceExceptionManager.createException(80021)
val map = new util.HashMap[String, Object]
- map.asScala += "resourceId" -> resource.resourceId
- map.asScala += "version" -> resource.version
- }.asJava
+ map.put("resourceId", resource.resourceId)
+ map.put("version", resource.version)
+ map
+ }
def upload(
userName: String,
@@ -66,9 +66,10 @@ class BMLHelper {
val resource: BmlUploadResponse = client.uploadResource(userName, fileName, inputStream)
if (!resource.isSuccess) throw WorkspaceExceptionManager.createException(80021)
val map = new util.HashMap[String, Object]
- map.asScala += "resourceId" -> resource.resourceId
- map.asScala += "version" -> resource.version
- }.asJava
+ map.put("resourceId", resource.resourceId)
+ map.put("version", resource.version)
+ map
+ }
def update(
userName: String,
@@ -80,9 +81,10 @@ class BMLHelper {
client.updateShareResource(userName, resourceId, "", inputStream)
if (!resource.isSuccess) throw WorkspaceExceptionManager.createException(80022)
val map = new util.HashMap[String, Object]
- map.asScala += "resourceId" -> resource.resourceId
- map.asScala += "version" -> resource.version
- }.asJava
+ map.put("resourceId", resource.resourceId)
+ map.put("version", resource.version)
+ map
+ }
def update(userName: String, resourceId: String, content: String): util.Map[String, Object] = {
val inputStream = new ByteArrayInputStream(content.getBytes("utf-8"))
@@ -95,9 +97,10 @@ class BMLHelper {
)
if (!resource.isSuccess) throw WorkspaceExceptionManager.createException(80022)
val map = new util.HashMap[String, Object]
- map.asScala += "resourceId" -> resource.resourceId
- map.asScala += "version" -> resource.version
- }.asJava
+ map.put("resourceId", resource.resourceId)
+ map.put("version", resource.version)
+ map
+ }
def query(userName: String, resourceId: String, version: String): util.Map[String, Object] = {
val client: BmlClient = createBMLClient(userName)
@@ -106,9 +109,10 @@ class BMLHelper {
else resource = client.downloadShareResource(userName, resourceId, version)
if (!resource.isSuccess) throw WorkspaceExceptionManager.createException(80023)
val map = new util.HashMap[String, Object]
- map.asScala += "path" -> resource.fullFilePath
- map.asScala += "stream" -> resource.inputStream
- }.asJava
+ map.put("path", resource.fullFilePath)
+ map.put("stream", resource.inputStream)
+ map
+ }
private def inputstremToString(inputStream: InputStream): String =
scala.io.Source.fromInputStream(inputStream).mkString
diff --git a/linkis-public-enhancements/linkis-script-dev/linkis-storage-script-dev-server/src/main/scala/org/apache/linkis/filesystem/service/FsService.scala b/linkis-public-enhancements/linkis-script-dev/linkis-storage-script-dev-server/src/main/scala/org/apache/linkis/filesystem/service/FsService.scala
index 574953ce0..aeaa990e3 100644
--- a/linkis-public-enhancements/linkis-script-dev/linkis-storage-script-dev-server/src/main/scala/org/apache/linkis/filesystem/service/FsService.scala
+++ b/linkis-public-enhancements/linkis-script-dev/linkis-storage-script-dev-server/src/main/scala/org/apache/linkis/filesystem/service/FsService.scala
@@ -40,7 +40,7 @@ class FsService extends Logging {
if (FsCache.fsInfo.get(user) != null) {
// The outer layer does not add more judgments, it is also ok, it is to lock the user's fs group.(外层不加多个判断也ok,都是要锁用户的fs组)
FsCache.fsInfo.get(user) synchronized {
- if (FsCache.fsInfo.get(user).filter(_.fs.fsName().equals(fsPath.getFsType)).isEmpty) {
+ if (!FsCache.fsInfo.get(user).exists(_.fs.fsName().equals(fsPath.getFsType))) {
FsCache.fsInfo.get(user) += produceFSInfo(user, fsPath)
} else {
FsCache.fsInfo
@@ -52,14 +52,14 @@ class FsService extends Logging {
} else {
FsCache.fsInfo synchronized {
if (FsCache.fsInfo.get(user) == null) {
- FsCache.fsInfo.asScala += user -> ArrayBuffer(produceFSInfo(user, fsPath))
+ FsCache.fsInfo.put(user, ArrayBuffer(produceFSInfo(user, fsPath)))
}
}
// (43-49) Prevent file and hdfs from entering 37 lines at the same time, causing 51 lines to report the cross mark
// (43-49)防止file和hdfs同时进到37行,导致51行报下角标越界
- if (FsCache.fsInfo.get(user).filter(_.fs.fsName().equals(fsPath.getFsType)).isEmpty) {
+ if (!FsCache.fsInfo.get(user).exists(_.fs.fsName().equals(fsPath.getFsType))) {
FsCache.fsInfo.get(user) synchronized {
- if (FsCache.fsInfo.get(user).filter(_.fs.fsName().equals(fsPath.getFsType)).isEmpty) {
+ if (!FsCache.fsInfo.get(user).exists(_.fs.fsName().equals(fsPath.getFsType))) {
FsCache.fsInfo.get(user) += produceFSInfo(user, fsPath)
}
}
diff --git a/linkis-public-enhancements/linkis-variable/src/main/scala/org/apache/linkis/variable/service/VariableServiceImpl.scala b/linkis-public-enhancements/linkis-variable/src/main/scala/org/apache/linkis/variable/service/VariableServiceImpl.scala
index 9fbba7448..7651ab538 100644
--- a/linkis-public-enhancements/linkis-variable/src/main/scala/org/apache/linkis/variable/service/VariableServiceImpl.scala
+++ b/linkis-public-enhancements/linkis-variable/src/main/scala/org/apache/linkis/variable/service/VariableServiceImpl.scala
@@ -110,7 +110,7 @@ class VariableServiceImpl extends VariableService with Logging {
for (ele <- userVariables.asScala) {
if (f.getKey.equals(ele.getKey)) {
flag = false
- updateGlobalVariable(f, ele.getValueID: Long)
+ updateGlobalVariable(f, ele.getValueID)
break()
}
}
@@ -126,7 +126,7 @@ class VariableServiceImpl extends VariableService with Logging {
break()
}
}
- if (flag) removeGlobalVariable(f.getKeyID: Long)
+ if (flag) removeGlobalVariable(f.getKeyID)
}
}
diff --git a/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-httpclient-support/src/main/scala/org/apache/linkis/httpclient/dws/DWSHttpClient.scala b/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-httpclient-support/src/main/scala/org/apache/linkis/httpclient/dws/DWSHttpClient.scala
index 93c1de817..8d1c87c48 100644
--- a/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-httpclient-support/src/main/scala/org/apache/linkis/httpclient/dws/DWSHttpClient.scala
+++ b/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-httpclient-support/src/main/scala/org/apache/linkis/httpclient/dws/DWSHttpClient.scala
@@ -39,7 +39,7 @@ import org.apache.http.{HttpException, HttpResponse}
import java.util
-import scala.collection.{JavaConversions, JavaConverters}
+import scala.collection.JavaConverters._
class DWSHttpClient(clientConfig: DWSClientConfig, clientName: String)
extends AbstractHttpClient(clientConfig, clientName)
@@ -101,15 +101,11 @@ class DWSHttpClient(clientConfig: DWSClientConfig, clientName: String)
transfer(value, map)
value
case list: util.List[util.Map[String, Object]] =>
- val results = JavaConverters
- .asScalaBufferConverter(list)
- .asScala
- .map { map =>
- val value = clazz.getConstructor().newInstance().asInstanceOf[Result]
- transfer(value, map)
- value
- }
- .toArray
+ val results = list.asScala.map { map =>
+ val value = clazz.getConstructor().newInstance().asInstanceOf[Result]
+ transfer(value, map)
+ value
+ }.toArray
new ListResult(responseBody, results)
}
}
---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@linkis.apache.org
For additional commands, e-mail: commits-help@linkis.apache.org