You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@linkis.apache.org by pe...@apache.org on 2023/03/06 12:33:39 UTC
[linkis] branch dev-1.3.2 updated: fix exportDataFromFile support with encoding (#4310)
This is an automated email from the ASF dual-hosted git repository.
peacewong pushed a commit to branch dev-1.3.2
in repository https://gitbox.apache.org/repos/asf/linkis.git
The following commit(s) were added to refs/heads/dev-1.3.2 by this push:
new 61e182f55 fix exportDataFromFile support with encoding (#4310)
61e182f55 is described below
commit 61e182f555ec7cb0747564d3e40ad3b25c428311
Author: Casion <ca...@gmail.com>
AuthorDate: Mon Mar 6 20:33:30 2023 +0800
fix exportDataFromFile support with encoding (#4310)
---
.../linkis/engineplugin/spark/imexport/ExportData.scala | 16 +++++++++-------
1 file changed, 9 insertions(+), 7 deletions(-)
diff --git a/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/imexport/ExportData.scala b/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/imexport/ExportData.scala
index 187277349..9d77c6257 100644
--- a/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/imexport/ExportData.scala
+++ b/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/imexport/ExportData.scala
@@ -72,16 +72,18 @@ object ExportData extends Logging {
val sheetName = LoadData.getMapValue[String](dest, "sheetName", "Sheet1")
val fieldDelimiter = LoadData.getMapValue[String](dest, "fieldDelimiter", ",")
val nullValue = LoadData.getMapValue[String](dest, "nullValue", "SHUFFLEOFF")
+ val encoding = LoadData.getMapValue[String](dest, "encoding", "uft-8")
+ var options = Map(
+ "fieldDelimiter" -> fieldDelimiter,
+ "exportNullValue" -> nullValue,
+ "encoding" -> encoding
+ )
if (isCsv) {
- CsvRelation.saveDFToCsv(
- spark,
- df,
- path,
- hasHeader,
- isOverwrite,
- option = Map("fieldDelimiter" -> fieldDelimiter, "exportNullValue" -> nullValue)
+ logger.info(
+ s"Try to saveDFToCsv with path:${path},hasHeader:${hasHeader},isOverwrite:${isOverwrite},options:${options}"
)
+ CsvRelation.saveDFToCsv(spark, df, path, hasHeader, isOverwrite, options)
} else {
df.write
.format("com.webank.wedatasphere.spark.excel")
---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@linkis.apache.org
For additional commands, e-mail: commits-help@linkis.apache.org