You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by gu...@apache.org on 2022/06/03 08:49:14 UTC

[spark] branch master updated: [SPARK-39371][DOCS][CORE] Review and fix issues in Scala/Java API docs of Core module

This is an automated email from the ASF dual-hosted git repository.

gurwls223 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 1fbb1d46feb [SPARK-39371][DOCS][CORE] Review and fix issues in Scala/Java API docs of Core module
1fbb1d46feb is described below

commit 1fbb1d46feb992c3441f2a4f2c5d5179da465d4b
Author: Yuanjian Li <yu...@databricks.com>
AuthorDate: Fri Jun 3 17:49:01 2022 +0900

    [SPARK-39371][DOCS][CORE] Review and fix issues in Scala/Java API docs of Core module
    
    ### What changes were proposed in this pull request?
    
    Compare the 3.3.0 API doc with the latest release version 3.2.1. Fix the following issues:
    
    * Add missing Since annotation for new APIs
    * Remove the leaking class/object in API doc
    
    ### Why are the changes needed?
    
    Improve API docs
    
    ### Does this PR introduce _any_ user-facing change?
    
    No
    
    ### How was this patch tested?
    
    Existing UT
    
    Closes #36757 from xuanyuanking/doc.
    
    Authored-by: Yuanjian Li <yu...@databricks.com>
    Signed-off-by: Hyukjin Kwon <gu...@apache.org>
---
 core/src/main/scala/org/apache/spark/errors/SparkCoreErrors.scala   | 2 +-
 .../storage/BlockSavedOnDecommissionedBlockManagerException.scala   | 2 +-
 .../src/main/java/org/apache/spark/launcher/AbstractLauncher.java   | 2 +-
 .../src/main/java/org/apache/spark/launcher/InProcessLauncher.java  | 2 +-
 .../src/main/java/org/apache/spark/launcher/JavaModuleOptions.java  | 2 ++
 .../scala/org/apache/spark/sql/diagnostic/DiagnosticListener.scala  | 4 ++--
 .../scala/org/apache/spark/sql/diagnostic/DiagnosticStore.scala     | 6 +++---
 7 files changed, 11 insertions(+), 9 deletions(-)

diff --git a/core/src/main/scala/org/apache/spark/errors/SparkCoreErrors.scala b/core/src/main/scala/org/apache/spark/errors/SparkCoreErrors.scala
index aecef8ed2d6..1da02884462 100644
--- a/core/src/main/scala/org/apache/spark/errors/SparkCoreErrors.scala
+++ b/core/src/main/scala/org/apache/spark/errors/SparkCoreErrors.scala
@@ -30,7 +30,7 @@ import org.apache.spark.storage.{BlockId, BlockManagerId, BlockNotFoundException
 /**
  * Object for grouping error messages from (most) exceptions thrown during query execution.
  */
-object SparkCoreErrors {
+private[spark] object SparkCoreErrors {
   def unexpectedPy4JServerError(other: Object): Throwable = {
     new RuntimeException(s"Unexpected Py4J server ${other.getClass}")
   }
diff --git a/core/src/main/scala/org/apache/spark/storage/BlockSavedOnDecommissionedBlockManagerException.scala b/core/src/main/scala/org/apache/spark/storage/BlockSavedOnDecommissionedBlockManagerException.scala
index 4684d9c6775..21a022864bb 100644
--- a/core/src/main/scala/org/apache/spark/storage/BlockSavedOnDecommissionedBlockManagerException.scala
+++ b/core/src/main/scala/org/apache/spark/storage/BlockSavedOnDecommissionedBlockManagerException.scala
@@ -17,5 +17,5 @@
 
 package org.apache.spark.storage
 
-class BlockSavedOnDecommissionedBlockManagerException(blockId: BlockId)
+private[spark] class BlockSavedOnDecommissionedBlockManagerException(blockId: BlockId)
   extends Exception(s"Block $blockId cannot be saved on decommissioned executor")
diff --git a/launcher/src/main/java/org/apache/spark/launcher/AbstractLauncher.java b/launcher/src/main/java/org/apache/spark/launcher/AbstractLauncher.java
index eee15419209..a944950cf15 100644
--- a/launcher/src/main/java/org/apache/spark/launcher/AbstractLauncher.java
+++ b/launcher/src/main/java/org/apache/spark/launcher/AbstractLauncher.java
@@ -26,7 +26,7 @@ import static org.apache.spark.launcher.CommandBuilderUtils.*;
 /**
  * Base class for launcher implementations.
  *
- * @since Spark 2.3.0
+ * @since 2.3.0
  */
 public abstract class AbstractLauncher<T extends AbstractLauncher<T>> {
 
diff --git a/launcher/src/main/java/org/apache/spark/launcher/InProcessLauncher.java b/launcher/src/main/java/org/apache/spark/launcher/InProcessLauncher.java
index 688e1f763c2..6867518b321 100644
--- a/launcher/src/main/java/org/apache/spark/launcher/InProcessLauncher.java
+++ b/launcher/src/main/java/org/apache/spark/launcher/InProcessLauncher.java
@@ -37,7 +37,7 @@ import java.util.logging.Logger;
  * driver memory or configs which modify the driver's class path) do not take effect. Logging
  * configuration is also inherited from the parent application.
  *
- * @since Spark 2.3.0
+ * @since 2.3.0
  */
 public class InProcessLauncher extends AbstractLauncher<InProcessLauncher> {
 
diff --git a/launcher/src/main/java/org/apache/spark/launcher/JavaModuleOptions.java b/launcher/src/main/java/org/apache/spark/launcher/JavaModuleOptions.java
index c7d3df99c6e..978466cd77c 100644
--- a/launcher/src/main/java/org/apache/spark/launcher/JavaModuleOptions.java
+++ b/launcher/src/main/java/org/apache/spark/launcher/JavaModuleOptions.java
@@ -21,6 +21,8 @@ package org.apache.spark.launcher;
  * This helper class is used to place the all `--add-opens` options
  * required by Spark when using Java 17. `DEFAULT_MODULE_OPTIONS` has added
  * `-XX:+IgnoreUnrecognizedVMOptions` to be compatible with Java 8 and Java 11.
+ *
+ * @since 3.3.0
  */
 public class JavaModuleOptions {
     private static final String[] DEFAULT_MODULE_OPTIONS = {
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/diagnostic/DiagnosticListener.scala b/sql/core/src/main/scala/org/apache/spark/sql/diagnostic/DiagnosticListener.scala
index 7ce1093e879..6899790603b 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/diagnostic/DiagnosticListener.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/diagnostic/DiagnosticListener.scala
@@ -30,7 +30,7 @@ import org.apache.spark.status.{ElementTrackingStore, KVUtils}
  *
  * @param kvStore used to store the diagnostic information
  */
-class DiagnosticListener(
+private[spark] class DiagnosticListener(
     conf: SparkConf,
     kvStore: ElementTrackingStore) extends SparkListener {
 
@@ -107,6 +107,6 @@ class DiagnosticListener(
   }
 }
 
-object DiagnosticListener {
+private[spark] object DiagnosticListener {
   val QUEUE_NAME = "diagnostics"
 }
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/diagnostic/DiagnosticStore.scala b/sql/core/src/main/scala/org/apache/spark/sql/diagnostic/DiagnosticStore.scala
index c13cc8a7f39..53ff787fe67 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/diagnostic/DiagnosticStore.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/diagnostic/DiagnosticStore.scala
@@ -28,7 +28,7 @@ import org.apache.spark.util.kvstore.{KVIndex, KVStore}
  * information. There's no state kept in this class, so it's ok to have multiple instances
  * of it in an application.
  */
-class DiagnosticStore(store: KVStore) {
+private[spark] class DiagnosticStore(store: KVStore) {
 
   def diagnosticsList(offset: Int, length: Int): Seq[ExecutionDiagnosticData] = {
     KVUtils.viewToSeq(store.view(classOf[ExecutionDiagnosticData]).skip(offset).max(length))
@@ -51,7 +51,7 @@ class DiagnosticStore(store: KVStore) {
 }
 
 /* Represents the diagnostic data of a SQL execution */
-class ExecutionDiagnosticData(
+private[spark] class ExecutionDiagnosticData(
     @KVIndexParam val executionId: Long,
     val physicalPlan: String,
     val submissionTime: Long,
@@ -59,7 +59,7 @@ class ExecutionDiagnosticData(
     val errorMessage: Option[String])
 
 /* Represents the plan change of an adaptive execution */
-class AdaptiveExecutionUpdate(
+private[spark] class AdaptiveExecutionUpdate(
     @KVIndexParam("id")
     val executionId: Long,
     @KVIndexParam(value = "updateTime", parent = "id")


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org