You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by gu...@apache.org on 2022/04/20 02:03:17 UTC

[spark] branch master updated: [SPARK-37613][SQL][FOLLOWUP] Supplement docs for regr_count

This is an automated email from the ASF dual-hosted git repository.

gurwls223 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 1b106ea32d5 [SPARK-37613][SQL][FOLLOWUP] Supplement docs for regr_count
1b106ea32d5 is described below

commit 1b106ea32d567dd32ac697ed0d6cfd40ea7e6e08
Author: Jiaan Geng <be...@163.com>
AuthorDate: Wed Apr 20 11:02:58 2022 +0900

    [SPARK-37613][SQL][FOLLOWUP] Supplement docs for regr_count
    
    ### What changes were proposed in this pull request?
    https://github.com/apache/spark/pull/34880 supported ANSI Aggregate Function: regr_count.
    But the docs of regr_count is not good enough.
    
    ### Why are the changes needed?
    Make the docs of regr_count more detailed.
    
    ### Does this PR introduce _any_ user-facing change?
    'No'.
    New feature.
    
    ### How was this patch tested?
    N/A
    
    Closes #36258 from beliefer/SPARK-37613_followup.
    
    Authored-by: Jiaan Geng <be...@163.com>
    Signed-off-by: Hyukjin Kwon <gu...@apache.org>
---
 .../spark/sql/catalyst/expressions/aggregate/linearRegression.scala | 6 +++---
 1 file changed, 3 insertions(+), 3 deletions(-)

diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/linearRegression.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/linearRegression.scala
index 4c1749fa00e..098fc17b98a 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/linearRegression.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/linearRegression.scala
@@ -22,10 +22,9 @@ import org.apache.spark.sql.catalyst.expressions.{And, Expression, ExpressionDes
 import org.apache.spark.sql.catalyst.trees.BinaryLike
 import org.apache.spark.sql.types.{AbstractDataType, DoubleType, NumericType}
 
+// scalastyle:off line.size.limit
 @ExpressionDescription(
-  usage = """
-    _FUNC_(expr) - Returns the number of non-null number pairs in a group.
-  """,
+  usage = "_FUNC_(y, x) - Returns the number of non-null number pairs in a group, where `y` is the dependent variable and `x` is the independent variable.",
   examples = """
     Examples:
       > SELECT _FUNC_(y, x) FROM VALUES (1, 2), (2, 2), (2, 3), (2, 4) AS tab(y, x);
@@ -37,6 +36,7 @@ import org.apache.spark.sql.types.{AbstractDataType, DoubleType, NumericType}
   """,
   group = "agg_funcs",
   since = "3.3.0")
+// scalastyle:on line.size.limit
 case class RegrCount(left: Expression, right: Expression)
   extends AggregateFunction
   with RuntimeReplaceableAggregate


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org