You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@seatunnel.apache.org by ki...@apache.org on 2022/01/20 15:04:10 UTC

[incubator-seatunnel] branch dev updated: [Improve] Remove the uesless commas (#1124)

This is an automated email from the ASF dual-hosted git repository.

kirs pushed a commit to branch dev
in repository https://gitbox.apache.org/repos/asf/incubator-seatunnel.git


The following commit(s) were added to refs/heads/dev by this push:
     new 38a57a7  [Improve] Remove the uesless commas (#1124)
38a57a7 is described below

commit 38a57a7b78f34a567579618a84096999cb656f7e
Author: Benedict Jin <as...@apache.org>
AuthorDate: Thu Jan 20 23:04:05 2022 +0800

    [Improve] Remove the uesless commas (#1124)
---
 .../src/main/scala/org/apache/seatunnel/spark/BaseSparkSink.scala       | 2 +-
 .../src/main/scala/org/apache/seatunnel/spark/BaseSparkSource.scala     | 2 +-
 .../src/main/scala/org/apache/seatunnel/spark/BaseSparkTransform.scala  | 2 +-
 .../src/main/scala/org/apache/seatunnel/spark/source/MongoDB.scala      | 2 +-
 .../src/main/scala/org/apache/seatunnel/spark/source/Neo4j.scala        | 2 +-
 5 files changed, 5 insertions(+), 5 deletions(-)

diff --git a/seatunnel-apis/seatunnel-api-spark/src/main/scala/org/apache/seatunnel/spark/BaseSparkSink.scala b/seatunnel-apis/seatunnel-api-spark/src/main/scala/org/apache/seatunnel/spark/BaseSparkSink.scala
index fc8cf17..aa4aae6 100644
--- a/seatunnel-apis/seatunnel-api-spark/src/main/scala/org/apache/seatunnel/spark/BaseSparkSink.scala
+++ b/seatunnel-apis/seatunnel-api-spark/src/main/scala/org/apache/seatunnel/spark/BaseSparkSink.scala
@@ -28,6 +28,6 @@ trait BaseSparkSink[OUT] extends BaseSink[SparkEnvironment] {
 
   override def getConfig: Config = config
 
-  def output(data: Dataset[Row], env: SparkEnvironment): OUT;
+  def output(data: Dataset[Row], env: SparkEnvironment): OUT
 
 }
diff --git a/seatunnel-apis/seatunnel-api-spark/src/main/scala/org/apache/seatunnel/spark/BaseSparkSource.scala b/seatunnel-apis/seatunnel-api-spark/src/main/scala/org/apache/seatunnel/spark/BaseSparkSource.scala
index 0b6afa5..9d75a2e 100644
--- a/seatunnel-apis/seatunnel-api-spark/src/main/scala/org/apache/seatunnel/spark/BaseSparkSource.scala
+++ b/seatunnel-apis/seatunnel-api-spark/src/main/scala/org/apache/seatunnel/spark/BaseSparkSource.scala
@@ -27,6 +27,6 @@ trait BaseSparkSource[Data] extends BaseSource[SparkEnvironment] {
 
   override def getConfig: Config = config
 
-  def getData(env: SparkEnvironment): Data;
+  def getData(env: SparkEnvironment): Data
 
 }
diff --git a/seatunnel-apis/seatunnel-api-spark/src/main/scala/org/apache/seatunnel/spark/BaseSparkTransform.scala b/seatunnel-apis/seatunnel-api-spark/src/main/scala/org/apache/seatunnel/spark/BaseSparkTransform.scala
index d964d93..89d99f2 100644
--- a/seatunnel-apis/seatunnel-api-spark/src/main/scala/org/apache/seatunnel/spark/BaseSparkTransform.scala
+++ b/seatunnel-apis/seatunnel-api-spark/src/main/scala/org/apache/seatunnel/spark/BaseSparkTransform.scala
@@ -28,6 +28,6 @@ trait BaseSparkTransform extends BaseTransform[SparkEnvironment] {
 
   override def getConfig: Config = config
 
-  def process(data: Dataset[Row], env: SparkEnvironment): Dataset[Row];
+  def process(data: Dataset[Row], env: SparkEnvironment): Dataset[Row]
 
 }
diff --git a/seatunnel-connectors/seatunnel-connector-spark-mongodb/src/main/scala/org/apache/seatunnel/spark/source/MongoDB.scala b/seatunnel-connectors/seatunnel-connector-spark-mongodb/src/main/scala/org/apache/seatunnel/spark/source/MongoDB.scala
index b175f91..c177bcc 100644
--- a/seatunnel-connectors/seatunnel-connector-spark-mongodb/src/main/scala/org/apache/seatunnel/spark/source/MongoDB.scala
+++ b/seatunnel-connectors/seatunnel-connector-spark-mongodb/src/main/scala/org/apache/seatunnel/spark/source/MongoDB.scala
@@ -62,7 +62,7 @@ class MongoDB extends SparkBatchSource {
       MongoSpark.builder().sparkSession(env.getSparkSession).readConfig(readConfig).build().toDF(
         schema)
     } else {
-      MongoSpark.load(env.getSparkSession, readConfig);
+      MongoSpark.load(env.getSparkSession, readConfig)
     }
   }
 
diff --git a/seatunnel-connectors/seatunnel-connector-spark-neo4j/src/main/scala/org/apache/seatunnel/spark/source/Neo4j.scala b/seatunnel-connectors/seatunnel-connector-spark-neo4j/src/main/scala/org/apache/seatunnel/spark/source/Neo4j.scala
index b5e9562..20b0797 100644
--- a/seatunnel-connectors/seatunnel-connector-spark-neo4j/src/main/scala/org/apache/seatunnel/spark/source/Neo4j.scala
+++ b/seatunnel-connectors/seatunnel-connector-spark-neo4j/src/main/scala/org/apache/seatunnel/spark/source/Neo4j.scala
@@ -45,7 +45,7 @@ class Neo4j extends SparkBatchSource {
 
     val checkMustConfigOneOfParams: CheckResult = CheckConfigUtil.checkOne(config, "query", "labels", "relationship")
     val checkMustConfigAllParams: CheckResult = CheckConfigUtil.check(config, "result_table_name", "url")
-    CheckConfigUtil.mergeCheckMessage(checkMustConfigAllParams, checkMustConfigOneOfParams);
+    CheckConfigUtil.mergeCheckMessage(checkMustConfigAllParams, checkMustConfigOneOfParams)
 
   }