You are viewing a plain text version of this content. The canonical link for it is here.
Posted to reviews@spark.apache.org by GitBox <gi...@apache.org> on 2022/06/02 02:13:50 UTC

[GitHub] [spark] beliefer commented on a diff in pull request #36714: [SPARK-39320][SQL] Support aggregate function `MEDIAN`

beliefer commented on code in PR #36714:
URL: https://github.com/apache/spark/pull/36714#discussion_r887439535


##########
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/percentiles.scala:
##########
@@ -359,6 +359,32 @@ case class Percentile(
   )
 }
 
+// scalastyle:off line.size.limit
+@ExpressionDescription(
+  usage = "_FUNC_(col) - Returns the median of numeric or ansi interval column `col`.",
+  examples = """
+    Examples:
+      > SELECT _FUNC_(col) FROM VALUES (0), (10) AS tab(col);
+       5.0
+      > SELECT _FUNC_(col) FROM VALUES (INTERVAL '0' MONTH), (INTERVAL '10' MONTH) AS tab(col);
+       5.0
+  """,
+  group = "agg_funcs",
+  since = "3.4.0")
+// scalastyle:on line.size.limit
+case class Median(child: Expression)
+  extends AggregateFunction
+    with RuntimeReplaceableAggregate
+    with ImplicitCastInputTypes
+    with UnaryLike[Expression] {
+  private lazy val percentile = new Percentile(child, Literal(0.5, DoubleType))
+  override def replacement: Expression = percentile
+  override def nodeName: String = "median"
+  override def inputTypes: Seq[AbstractDataType] = percentile.inputTypes.take(1)

Review Comment:
   It seems we could supports ansi interval too.
   `TypeCollection(NumericType, YearMonthIntervalType, DayTimeIntervalType)`



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: reviews-unsubscribe@spark.apache.org

For queries about this service, please contact Infrastructure at:
users@infra.apache.org


---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscribe@spark.apache.org
For additional commands, e-mail: reviews-help@spark.apache.org