You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by ya...@apache.org on 2023/03/24 06:46:14 UTC

[spark] branch branch-3.4 updated: [SPARK-42861][SQL] Use private[sql] instead of protected[sql] to avoid generating API doc

This is an automated email from the ASF dual-hosted git repository.

yao pushed a commit to branch branch-3.4
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/branch-3.4 by this push:
     new b74f7922577 [SPARK-42861][SQL] Use private[sql] instead of protected[sql] to avoid generating API doc
b74f7922577 is described below

commit b74f79225771a95e2bc045c806637d93e5dfaaa3
Author: Wenchen Fan <we...@databricks.com>
AuthorDate: Fri Mar 24 14:45:43 2023 +0800

    [SPARK-42861][SQL] Use private[sql] instead of protected[sql] to avoid generating API doc
    
    ### What changes were proposed in this pull request?
    
    This is the only issue I found during SQL module API auditing via https://github.com/apache/spark-website/pull/443/commits/615986022c573aedaff8d2b917a0d2d9dc2b67ef . Somehow `protected[sql]` also generates API doc which is unexpected. `private[sql]` solves the problem and I generated doc locally to verify it.
    
    Another API issue has been fixed by https://github.com/apache/spark/pull/40499
    
    ### Why are the changes needed?
    
    fix api doc
    
    ### Does this PR introduce _any_ user-facing change?
    
    no
    
    ### How was this patch tested?
    
    N/A
    
    Closes #40541 from cloud-fan/auditing.
    
    Authored-by: Wenchen Fan <we...@databricks.com>
    Signed-off-by: Kent Yao <ya...@apache.org>
    (cherry picked from commit f7421b498a15ea687eaf811a1b2c77091945ef90)
    Signed-off-by: Kent Yao <ya...@apache.org>
---
 .../src/main/scala/org/apache/spark/sql/RuntimeConfig.scala    | 10 +++++-----
 1 file changed, 5 insertions(+), 5 deletions(-)

diff --git a/sql/core/src/main/scala/org/apache/spark/sql/RuntimeConfig.scala b/sql/core/src/main/scala/org/apache/spark/sql/RuntimeConfig.scala
index 532cc7e08e1..f879a13097b 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/RuntimeConfig.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/RuntimeConfig.scala
@@ -64,7 +64,7 @@ class RuntimeConfig private[sql](sqlConf: SQLConf = new SQLConf) {
   /**
    * Sets the given Spark runtime configuration property.
    */
-  protected[sql] def set[T](entry: ConfigEntry[T], value: T): Unit = {
+  private[sql] def set[T](entry: ConfigEntry[T], value: T): Unit = {
     requireNonStaticConf(entry.key)
     sqlConf.setConf(entry, value)
   }
@@ -94,18 +94,18 @@ class RuntimeConfig private[sql](sqlConf: SQLConf = new SQLConf) {
    * Returns the value of Spark runtime configuration property for the given key.
    */
   @throws[NoSuchElementException]("if the key is not set")
-  protected[sql] def get[T](entry: ConfigEntry[T]): T = {
+  private[sql] def get[T](entry: ConfigEntry[T]): T = {
     sqlConf.getConf(entry)
   }
 
-  protected[sql] def get[T](entry: OptionalConfigEntry[T]): Option[T] = {
+  private[sql] def get[T](entry: OptionalConfigEntry[T]): Option[T] = {
     sqlConf.getConf(entry)
   }
 
   /**
    * Returns the value of Spark runtime configuration property for the given key.
    */
-  protected[sql] def get[T](entry: ConfigEntry[T], default: T): T = {
+  private[sql] def get[T](entry: ConfigEntry[T], default: T): T = {
     sqlConf.getConf(entry, default)
   }
 
@@ -153,7 +153,7 @@ class RuntimeConfig private[sql](sqlConf: SQLConf = new SQLConf) {
   /**
    * Returns whether a particular key is set.
    */
-  protected[sql] def contains(key: String): Boolean = {
+  private[sql] def contains(key: String): Boolean = {
     sqlConf.contains(key)
   }
 


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org