You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by sr...@apache.org on 2023/06/30 14:14:33 UTC

[spark] branch master updated: [SPARK-44257][BUILD] Update some maven plugins & scalafmt to newest version

This is an automated email from the ASF dual-hosted git repository.

srowen pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 05f5dccbd34 [SPARK-44257][BUILD] Update some maven plugins & scalafmt to newest version
05f5dccbd34 is described below

commit 05f5dccbd34218c7d399228529853bdb1595f3a2
Author: panbingkun <pb...@gmail.com>
AuthorDate: Fri Jun 30 09:14:22 2023 -0500

    [SPARK-44257][BUILD] Update some maven plugins & scalafmt to newest version
    
    ### What changes were proposed in this pull request?
    The pr aims to update some maven plugins & scalafmt to newest version, include:
    - maven-clean-plugin from 3.2.0 to 3.3.1
    - maven-shade-plugin from 3.4.1 to 3.5.0
    - scalafmt from 3.7.4 to 3.7.5
    
    ### Why are the changes needed?
    1.maven-clean-plugin
    https://github.com/apache/maven-clean-plugin/releases/tag/maven-clean-plugin-3.3.1
    
    2.maven-shade-plugin
    https://github.com/apache/maven-shade-plugin/releases/tag/maven-shade-plugin-3.5.0
    
    3.scalafmt
    https://github.com/scalameta/scalafmt/releases/tag/v3.7.5
    Router: make sure to indent comments after lambda (https://github.com/scalameta/scalafmt/pull/3556) kitbellew
    Fix proposed version syntax (https://github.com/scalameta/scalafmt/pull/3555) JD557
    
    ### Does this PR introduce _any_ user-facing change?
    No.
    
    ### How was this patch tested?
    Pass GA.
    
    Closes #41803 from panbingkun/SPARK-44257.
    
    Authored-by: panbingkun <pb...@gmail.com>
    Signed-off-by: Sean Owen <sr...@gmail.com>
---
 .../src/main/scala/org/apache/spark/sql/Dataset.scala    | 16 +++++++---------
 .../scala/org/apache/spark/sql/catalog/Catalog.scala     |  7 +++----
 .../org/apache/spark/sql/internal/CatalogImpl.scala      |  7 +++----
 dev/.scalafmt.conf                                       |  2 +-
 pom.xml                                                  |  4 ++--
 5 files changed, 16 insertions(+), 20 deletions(-)

diff --git a/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/Dataset.scala b/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/Dataset.scala
index eba425ce127..b959974dc30 100644
--- a/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/Dataset.scala
+++ b/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/Dataset.scala
@@ -535,7 +535,7 @@ class Dataset[T] private[sql] (
       assert(result.schema.size == 1)
       // scalastyle:off println
       println(result.toArray.head)
-    // scalastyle:on println
+      // scalastyle:on println
     }
   }
 
@@ -2214,10 +2214,9 @@ class Dataset[T] private[sql] (
    * tied to this Spark application.
    *
    * Global temporary view is cross-session. Its lifetime is the lifetime of the Spark
-   * application,
-   * i.e. it will be automatically dropped when the application terminates. It's tied to a system
-   * preserved database `global_temp`, and we must use the qualified name to refer a global temp
-   * view, e.g. `SELECT * FROM global_temp.view1`.
+   * application, i.e. it will be automatically dropped when the application terminates. It's tied
+   * to a system preserved database `global_temp`, and we must use the qualified name to refer a
+   * global temp view, e.g. `SELECT * FROM global_temp.view1`.
    *
    * @throws AnalysisException
    *   if the view name is invalid or already exists
@@ -2235,10 +2234,9 @@ class Dataset[T] private[sql] (
    * temporary view is tied to this Spark application.
    *
    * Global temporary view is cross-session. Its lifetime is the lifetime of the Spark
-   * application,
-   * i.e. it will be automatically dropped when the application terminates. It's tied to a system
-   * preserved database `global_temp`, and we must use the qualified name to refer a global temp
-   * view, e.g. `SELECT * FROM global_temp.view1`.
+   * application, i.e. it will be automatically dropped when the application terminates. It's tied
+   * to a system preserved database `global_temp`, and we must use the qualified name to refer a
+   * global temp view, e.g. `SELECT * FROM global_temp.view1`.
    *
    * @group basic
    * @since 3.4.0
diff --git a/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/catalog/Catalog.scala b/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/catalog/Catalog.scala
index 268f162cbfa..11c3f4e3d18 100644
--- a/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/catalog/Catalog.scala
+++ b/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/catalog/Catalog.scala
@@ -543,10 +543,9 @@ abstract class Catalog {
    * cached before, then it will also be uncached.
    *
    * Global temporary view is cross-session. Its lifetime is the lifetime of the Spark
-   * application,
-   * i.e. it will be automatically dropped when the application terminates. It's tied to a system
-   * preserved database `global_temp`, and we must use the qualified name to refer a global temp
-   * view, e.g. `SELECT * FROM global_temp.view1`.
+   * application, i.e. it will be automatically dropped when the application terminates. It's tied
+   * to a system preserved database `global_temp`, and we must use the qualified name to refer a
+   * global temp view, e.g. `SELECT * FROM global_temp.view1`.
    *
    * @param viewName
    *   the unqualified name of the temporary view to be dropped.
diff --git a/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/internal/CatalogImpl.scala b/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/internal/CatalogImpl.scala
index f287568d629..8706000ae5b 100644
--- a/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/internal/CatalogImpl.scala
+++ b/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/internal/CatalogImpl.scala
@@ -521,10 +521,9 @@ class CatalogImpl(sparkSession: SparkSession) extends Catalog {
    * cached before, then it will also be uncached.
    *
    * Global temporary view is cross-session. Its lifetime is the lifetime of the Spark
-   * application,
-   * i.e. it will be automatically dropped when the application terminates. It's tied to a system
-   * preserved database `global_temp`, and we must use the qualified name to refer a global temp
-   * view, e.g. `SELECT * FROM global_temp.view1`.
+   * application, i.e. it will be automatically dropped when the application terminates. It's tied
+   * to a system preserved database `global_temp`, and we must use the qualified name to refer a
+   * global temp view, e.g. `SELECT * FROM global_temp.view1`.
    *
    * @param viewName
    *   the unqualified name of the temporary view to be dropped.
diff --git a/dev/.scalafmt.conf b/dev/.scalafmt.conf
index 160d89ecf7f..c3b26002a76 100644
--- a/dev/.scalafmt.conf
+++ b/dev/.scalafmt.conf
@@ -32,4 +32,4 @@ fileOverride {
     runner.dialect = scala213
   }
 }
-version = 3.7.4
+version = 3.7.5
diff --git a/pom.xml b/pom.xml
index 1c60a5c7db7..159b59ae46a 100644
--- a/pom.xml
+++ b/pom.xml
@@ -3079,7 +3079,7 @@
         <plugin>
           <groupId>org.apache.maven.plugins</groupId>
           <artifactId>maven-clean-plugin</artifactId>
-          <version>3.2.0</version>
+          <version>3.3.1</version>
           <configuration>
             <filesets>
               <fileset>
@@ -3162,7 +3162,7 @@
         <plugin>
           <groupId>org.apache.maven.plugins</groupId>
           <artifactId>maven-shade-plugin</artifactId>
-          <version>3.4.1</version>
+          <version>3.5.0</version>
           <dependencies>
             <dependency>
               <groupId>org.ow2.asm</groupId>


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org