You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@linkis.apache.org by pe...@apache.org on 2022/08/31 09:55:05 UTC

[incubator-linkis] branch dev-1.3.1 updated: feat:Scala code format alarm clear in linkis-metadata (#3134)

This is an automated email from the ASF dual-hosted git repository.

peacewong pushed a commit to branch dev-1.3.1
in repository https://gitbox.apache.org/repos/asf/incubator-linkis.git


The following commit(s) were added to refs/heads/dev-1.3.1 by this push:
     new a3b2bbb29 feat:Scala code format alarm clear in linkis-metadata (#3134)
a3b2bbb29 is described below

commit a3b2bbb29329af77010c900c70b6c9fa1b5fa866
Author: ruY <43...@users.noreply.github.com>
AuthorDate: Wed Aug 31 17:55:00 2022 +0800

    feat:Scala code format alarm clear in linkis-metadata (#3134)
---
 .../linkis/metadata/ddl/ImportDDLCreator.scala     | 43 ++++++++++++++--------
 .../linkis/metadata/ddl/ScalaDDLCreator.scala      | 18 ++-------
 2 files changed, 31 insertions(+), 30 deletions(-)

diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/scala/org/apache/linkis/metadata/ddl/ImportDDLCreator.scala b/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/scala/org/apache/linkis/metadata/ddl/ImportDDLCreator.scala
index 46bfbb573..2020aeae3 100644
--- a/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/scala/org/apache/linkis/metadata/ddl/ImportDDLCreator.scala
+++ b/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/scala/org/apache/linkis/metadata/ddl/ImportDDLCreator.scala
@@ -36,7 +36,7 @@ import java.text.SimpleDateFormat
 import java.util
 import java.util.Date
 
-import scala.collection.JavaConversions._
+import scala.collection.JavaConverters._
 import scala.collection.mutable.ArrayBuffer
 
 object ImportDDLCreator extends DDLCreator {
@@ -73,13 +73,17 @@ object FileImportDDLHelper extends ImportHelper with Logging {
     val importInfo = mdqTableBO.getImportInfo
     val args = importInfo.getArgs
     val _source =
-      if (StringUtils.isEmpty(importInfo.getSource))
+      if (StringUtils.isEmpty(importInfo.getSource)) {
         throw MdqIllegalParamException("import hive source is null")
-      else importInfo.getSource
+      } else {
+        importInfo.getSource
+      }
     val _destination =
-      if (StringUtils.isEmpty(importInfo.getDestination))
+      if (StringUtils.isEmpty(importInfo.getDestination)) {
         throw MdqIllegalParamException("import hive source is null")
-      else importInfo.getDestination
+      } else {
+        importInfo.getDestination
+      }
     val source = "val source = \"\"\"" + _source + "\"\"\"\n"
     createTableCode.append(source)
     val storePath = storeExecutionCode(_destination, user)
@@ -102,7 +106,7 @@ object FileImportDDLHelper extends ImportHelper with Logging {
       s"end to generate code for ${mdqTableBO.getTableBaseInfo.getBase.getName} code is $resultCode"
     )
     resultCode
-    //    if(storePath == null){
+    //    if(storePath == null) {
     //      newExecutionCode += "org.apache.linkis.engine.imexport.LoadData.loadDataToTable(spark,source,destination)"
     //    }else{
     //      newExecutionCode += "org.apache.linkis.engine.imexport.LoadData.loadDataToTableByFile(spark,destination,source)"
@@ -174,13 +178,17 @@ object HiveImportDDLHelper extends ImportHelper with SQLConst with Logging {
       )
     }
     val sourceDatabase =
-      if (StringUtils.isEmpty(args.get(DATABASE)))
+      if (StringUtils.isEmpty(args.get(DATABASE))) {
         throw MdqIllegalParamException("hive create table source database is null")
-      else args.get(DATABASE)
+      } else {
+        args.get(DATABASE)
+      }
     val sourceTableName =
-      if (StringUtils.isEmpty(args.get(TABLE)))
+      if (StringUtils.isEmpty(args.get(TABLE))) {
         throw MdqIllegalParamException("hive create table source table name is null")
-      else args.get(TABLE)
+      } else {
+        args.get(TABLE)
+      }
     // 判断目标表是否是分区表,如果是分区表,先建表
     val isPartitionTable = mdqTableBO.getTableBaseInfo.getBase.getPartitionTable
     if (isPartitionTable != null && isPartitionTable == true) {
@@ -199,7 +207,7 @@ object HiveImportDDLHelper extends ImportHelper with SQLConst with Logging {
       var dsCount = 0
       var partitionValue: String = null
       // 建表
-      fields foreach { field =>
+      fields.asScala foreach { field =>
         val name = field.getName
         val _type = field.getType
         val desc = field.getComment
@@ -213,8 +221,9 @@ object HiveImportDDLHelper extends ImportHelper with SQLConst with Logging {
           }
         } else {
           dsCount += 1
-          if (StringUtils.isNotBlank(field.getPartitionsValue))
+          if (StringUtils.isNotBlank(field.getPartitionsValue)) {
             partitionValue = field.getPartitionsValue
+          }
         }
       }
       executeCode
@@ -227,9 +236,11 @@ object HiveImportDDLHelper extends ImportHelper with SQLConst with Logging {
       // 判断源表是否是分区表
       val isSourceTablePartition: Boolean = checkPartitionTable(fields)
       val standardDs =
-        if (StringUtils.isNotBlank(partitionValue)) partitionValue
-        else
+        if (StringUtils.isNotBlank(partitionValue)) {
+          partitionValue
+        } else {
           new SimpleDateFormat("yyyyMMdd").format(new java.util.Date(System.currentTimeMillis()))
+        }
       if (!isSourceTablePartition) {
         // 插入数据
         executeCode.append(SPARK_SQL).append(LEFT_PARENTHESES).append(MARKS)
@@ -318,7 +329,7 @@ object HiveImportDDLHelper extends ImportHelper with SQLConst with Logging {
         executeCode.append("*").append(SPACE)
       } else {
         val fieldArr = new ArrayBuffer[String]()
-        fields filter (_ != null) foreach (fieldArr += _.getName)
+        fields.asScala filter (_ != null) foreach (fieldArr += _.getName)
         executeCode.append(fieldArr.mkString(", ")).append(SPACE)
       }
       executeCode
@@ -338,7 +349,7 @@ object HiveImportDDLHelper extends ImportHelper with SQLConst with Logging {
 
   def checkPartitionTable(fields: util.List[MdqTableFieldsInfoBO]): Boolean = {
     var count = 0
-    fields foreach { field =>
+    fields.asScala foreach { field =>
       if (field.getPartitionField != null && field.getPartitionField) count += 1
     }
     count >= 2
diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/scala/org/apache/linkis/metadata/ddl/ScalaDDLCreator.scala b/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/scala/org/apache/linkis/metadata/ddl/ScalaDDLCreator.scala
index c00721d0b..439826482 100644
--- a/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/scala/org/apache/linkis/metadata/ddl/ScalaDDLCreator.scala
+++ b/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/scala/org/apache/linkis/metadata/ddl/ScalaDDLCreator.scala
@@ -24,7 +24,7 @@ import org.apache.linkis.metadata.exception.MdqIllegalParamException
 
 import org.apache.commons.lang3.StringUtils
 
-import scala.collection.JavaConversions._
+import scala.collection.JavaConverters._
 import scala.collection.mutable.ArrayBuffer
 
 object ScalaDDLCreator extends DDLCreator with SQLConst with Logging {
@@ -40,7 +40,7 @@ object ScalaDDLCreator extends DDLCreator with SQLConst with Logging {
     createTableCode.append(LEFT_PARENTHESES)
     val partitions = new ArrayBuffer[MdqTableFieldsInfoBO]()
     val fieldsArray = new ArrayBuffer[String]()
-    fields foreach { field =>
+    fields.asScala foreach { field =>
       if (field.getPartitionField != null && field.getPartitionField == true) partitions += field
       else {
         val name = field.getName
@@ -59,8 +59,9 @@ object ScalaDDLCreator extends DDLCreator with SQLConst with Logging {
       partitions foreach { p =>
         val name = p.getName
         val _type = p.getType
-        if (StringUtils.isEmpty(name) || StringUtils.isEmpty(_type))
+        if (StringUtils.isEmpty(name) || StringUtils.isEmpty(_type)) {
           throw MdqIllegalParamException("partition name or type is null")
+        }
         partitionArr += (name + SPACE + _type)
       }
       createTableCode
@@ -104,15 +105,4 @@ object ScalaDDLCreator extends DDLCreator with SQLConst with Logging {
     finalCode
   }
 
-  def main(args: Array[String]): Unit = {
-    val filePath = "E:\\data\\json\\data.json"
-    val json = scala.io.Source.fromFile(filePath).mkString
-    println(json)
-
-    // val obj = new Gson().fromJson(json, classOf[MdqTableVO])
-    // val sql = createDDL(obj, "hadoop")
-    // println(System.currentTimeMillis())
-    // println(sql)
-  }
-
 }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@linkis.apache.org
For additional commands, e-mail: commits-help@linkis.apache.org