You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by gu...@apache.org on 2022/06/03 00:43:10 UTC
[spark] branch master updated: [SPARK-39367][DOCS][SQL] Review and fix issues in Scala/Java API docs of SQL module
This is an automated email from the ASF dual-hosted git repository.
gurwls223 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push:
new 4c7888dd915 [SPARK-39367][DOCS][SQL] Review and fix issues in Scala/Java API docs of SQL module
4c7888dd915 is described below
commit 4c7888dd9159dc203628b0d84f0ee2f90ab4bf13
Author: Gengliang Wang <ge...@apache.org>
AuthorDate: Fri Jun 3 09:42:44 2022 +0900
[SPARK-39367][DOCS][SQL] Review and fix issues in Scala/Java API docs of SQL module
### What changes were proposed in this pull request?
Compare the 3.3.0 API doc with the latest release version 3.2.1. Fix the following issues:
* Add missing Since annotation for new APIs
* Remove the leaking class/object in API doc
### Why are the changes needed?
Improve API docs
### Does this PR introduce _any_ user-facing change?
No
### How was this patch tested?
Existing UT
Closes #36754 from gengliangwang/apiDoc.
Authored-by: Gengliang Wang <ge...@apache.org>
Signed-off-by: Hyukjin Kwon <gu...@apache.org>
---
.../avro/src/main/scala/org/apache/spark/sql/avro/AvroUtils.scala | 2 +-
.../org/apache/spark/sql/connector/util/V2ExpressionSQLBuilder.java | 2 ++
.../java/org/apache/spark/sql/connector/write/RowLevelOperation.java | 2 ++
.../src/main/java/org/apache/spark/sql/util/NumericHistogram.java | 4 ++++
.../main/java/org/apache/spark/sql/vectorized/ColumnarBatchRow.java | 2 ++
.../scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala | 2 +-
.../src/main/scala/org/apache/spark/sql/errors/QueryErrorsBase.scala | 2 +-
.../main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala | 2 +-
.../main/scala/org/apache/spark/sql/errors/QueryParsingErrors.scala | 2 +-
sql/core/src/main/scala/org/apache/spark/sql/jdbc/JdbcDialects.scala | 2 +-
10 files changed, 16 insertions(+), 6 deletions(-)
diff --git a/connector/avro/src/main/scala/org/apache/spark/sql/avro/AvroUtils.scala b/connector/avro/src/main/scala/org/apache/spark/sql/avro/AvroUtils.scala
index d03902faab9..56d177da143 100644
--- a/connector/avro/src/main/scala/org/apache/spark/sql/avro/AvroUtils.scala
+++ b/connector/avro/src/main/scala/org/apache/spark/sql/avro/AvroUtils.scala
@@ -207,7 +207,7 @@ private[sql] object AvroUtils extends Logging {
}
/** Wrapper for a pair of matched fields, one Catalyst and one corresponding Avro field. */
- case class AvroMatchedField(
+ private[sql] case class AvroMatchedField(
catalystField: StructField,
catalystPosition: Int,
avroField: Schema.Field)
diff --git a/sql/catalyst/src/main/java/org/apache/spark/sql/connector/util/V2ExpressionSQLBuilder.java b/sql/catalyst/src/main/java/org/apache/spark/sql/connector/util/V2ExpressionSQLBuilder.java
index 396b1d9cdd0..7e489399cfe 100644
--- a/sql/catalyst/src/main/java/org/apache/spark/sql/connector/util/V2ExpressionSQLBuilder.java
+++ b/sql/catalyst/src/main/java/org/apache/spark/sql/connector/util/V2ExpressionSQLBuilder.java
@@ -30,6 +30,8 @@ import org.apache.spark.sql.types.DataType;
/**
* The builder to generate SQL from V2 expressions.
+ *
+ * @since 3.3.0
*/
public class V2ExpressionSQLBuilder {
diff --git a/sql/catalyst/src/main/java/org/apache/spark/sql/connector/write/RowLevelOperation.java b/sql/catalyst/src/main/java/org/apache/spark/sql/connector/write/RowLevelOperation.java
index 04bbab11e10..7acd27759a1 100644
--- a/sql/catalyst/src/main/java/org/apache/spark/sql/connector/write/RowLevelOperation.java
+++ b/sql/catalyst/src/main/java/org/apache/spark/sql/connector/write/RowLevelOperation.java
@@ -34,6 +34,8 @@ public interface RowLevelOperation {
/**
* A row-level SQL command.
+ *
+ * @since 3.3.0
*/
enum Command {
DELETE, UPDATE, MERGE
diff --git a/sql/catalyst/src/main/java/org/apache/spark/sql/util/NumericHistogram.java b/sql/catalyst/src/main/java/org/apache/spark/sql/util/NumericHistogram.java
index 947c2db29de..444263f3111 100644
--- a/sql/catalyst/src/main/java/org/apache/spark/sql/util/NumericHistogram.java
+++ b/sql/catalyst/src/main/java/org/apache/spark/sql/util/NumericHistogram.java
@@ -44,10 +44,14 @@ import java.util.Random;
* 4. In Hive's code, the method [[merge()] pass a serialized histogram,
* in Spark, this method pass a deserialized histogram.
* Here we change the code about merge bins.
+ *
+ * @since 3.3.0
*/
public class NumericHistogram {
/**
* The Coord class defines a histogram bin, which is just an (x,y) pair.
+ *
+ * @since 3.3.0
*/
public static class Coord implements Comparable {
public double x;
diff --git a/sql/catalyst/src/main/java/org/apache/spark/sql/vectorized/ColumnarBatchRow.java b/sql/catalyst/src/main/java/org/apache/spark/sql/vectorized/ColumnarBatchRow.java
index 7f841266008..32f6e71f77a 100644
--- a/sql/catalyst/src/main/java/org/apache/spark/sql/vectorized/ColumnarBatchRow.java
+++ b/sql/catalyst/src/main/java/org/apache/spark/sql/vectorized/ColumnarBatchRow.java
@@ -25,6 +25,8 @@ import org.apache.spark.unsafe.types.UTF8String;
/**
* This class wraps an array of {@link ColumnVector} and provides a row view.
+ *
+ * @since 3.3.0
*/
@DeveloperApi
public final class ColumnarBatchRow extends InternalRow {
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
index a9885c6c41a..eba13148a1e 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
@@ -46,7 +46,7 @@ import org.apache.spark.sql.types._
* As commands are executed eagerly, this also includes errors thrown during the execution of
* commands, which users can see immediately.
*/
-object QueryCompilationErrors extends QueryErrorsBase {
+private[sql] object QueryCompilationErrors extends QueryErrorsBase {
def groupingIDMismatchError(groupingID: GroupingID, groupByExprs: Seq[Expression]): Throwable = {
new AnalysisException(
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryErrorsBase.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryErrorsBase.scala
index 7369fd82cb8..558401cb4e9 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryErrorsBase.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryErrorsBase.scala
@@ -42,7 +42,7 @@ import org.apache.spark.sql.types.{DataType, DoubleType, FloatType}
* 7. SQL expressions shall be wrapped by double quotes.
* For example: "earnings + 1".
*/
-trait QueryErrorsBase {
+private[sql] trait QueryErrorsBase {
// Converts an error class parameter to its SQL representation
def toSQLValue(v: Any, t: DataType): String = Literal.create(v, t) match {
case Literal(null, _) => "NULL"
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
index 937dee32177..d57b6a66600 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
@@ -66,7 +66,7 @@ import org.apache.spark.util.CircularBuffer
* This does not include exceptions thrown during the eager execution of commands, which are
* grouped into [[QueryCompilationErrors]].
*/
-object QueryExecutionErrors extends QueryErrorsBase {
+private[sql] object QueryExecutionErrors extends QueryErrorsBase {
def cannotEvaluateExpressionError(expression: Expression): Throwable = {
new SparkUnsupportedOperationException(errorClass = "INTERNAL_ERROR",
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryParsingErrors.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryParsingErrors.scala
index 8fa28c0d347..b7037cdaeb1 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryParsingErrors.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryParsingErrors.scala
@@ -28,7 +28,7 @@ import org.apache.spark.sql.types.StringType
* Object for grouping all error messages of the query parsing.
* Currently it includes all ParseException.
*/
-object QueryParsingErrors extends QueryErrorsBase {
+private[sql] object QueryParsingErrors extends QueryErrorsBase {
def invalidInsertIntoError(ctx: InsertIntoContext): Throwable = {
new ParseException("Invalid InsertIntoContext", ctx)
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/jdbc/JdbcDialects.scala b/sql/core/src/main/scala/org/apache/spark/sql/jdbc/JdbcDialects.scala
index 0c9cd31dcfb..c54ac84c735 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/jdbc/JdbcDialects.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/jdbc/JdbcDialects.scala
@@ -222,7 +222,7 @@ abstract class JdbcDialect extends Serializable with Logging{
case _ => value
}
- class JDBCSQLBuilder extends V2ExpressionSQLBuilder {
+ private[jdbc] class JDBCSQLBuilder extends V2ExpressionSQLBuilder {
override def visitLiteral(literal: Literal[_]): String = {
compileValue(
CatalystTypeConverters.convertToScala(literal.value(), literal.dataType())).toString
---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org