You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by do...@apache.org on 2019/05/10 16:31:20 UTC
[spark] branch branch-2.4 updated: [SPARK-27673][SQL] Add `since`
info to random, regex, null expressions
This is an automated email from the ASF dual-hosted git repository.
dongjoon pushed a commit to branch branch-2.4
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/branch-2.4 by this push:
new f2cd16f [SPARK-27673][SQL] Add `since` info to random, regex, null expressions
f2cd16f is described below
commit f2cd16f665fb6d3361c49c54bbcf86f17d4931d1
Author: HyukjinKwon <gu...@apache.org>
AuthorDate: Fri May 10 09:24:04 2019 -0700
[SPARK-27673][SQL] Add `since` info to random, regex, null expressions
We should add since info to all expressions.
SPARK-7886 Rand / Randn
https://github.com/apache/spark/commit/af3746ce0d724dc624658a2187bde188ab26d084 RLike, Like (I manually checked that it exists from 1.0.0)
SPARK-8262 Split
SPARK-8256 RegExpReplace
SPARK-8255 RegExpExtract
https://github.com/apache/spark/commit/9aadcffabd226557174f3ff566927f873c71672e Coalesce / IsNull / IsNotNull (I manually checked that it exists from 1.0.0)
SPARK-14541 IfNull / NullIf / Nvl / Nvl2
SPARK-9080 IsNaN
SPARK-9168 NaNvl
N/A
Closes #24579 from HyukjinKwon/SPARK-27673.
Authored-by: HyukjinKwon <gu...@apache.org>
Signed-off-by: Dongjoon Hyun <dh...@apple.com>
(cherry picked from commit c71f217de1e0b2265f585369aa556ed26db98589)
Signed-off-by: Dongjoon Hyun <dh...@apple.com>
---
.../sql/catalyst/expressions/nullExpressions.scala | 27 ++++++++++++++--------
.../catalyst/expressions/randomExpressions.scala | 6 +++--
.../catalyst/expressions/regexpExpressions.scala | 15 ++++++++----
3 files changed, 32 insertions(+), 16 deletions(-)
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/nullExpressions.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/nullExpressions.scala
index b683d2a..293d28e 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/nullExpressions.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/nullExpressions.scala
@@ -42,7 +42,8 @@ import org.apache.spark.sql.types._
Examples:
> SELECT _FUNC_(NULL, 1, NULL);
1
- """)
+ """,
+ since = "1.0.0")
// scalastyle:on line.size.limit
case class Coalesce(children: Seq[Expression]) extends ComplexTypeMergingExpression {
@@ -127,7 +128,8 @@ case class Coalesce(children: Seq[Expression]) extends ComplexTypeMergingExpress
Examples:
> SELECT _FUNC_(NULL, array('2'));
["2"]
- """)
+ """,
+ since = "2.0.0")
case class IfNull(left: Expression, right: Expression, child: Expression)
extends RuntimeReplaceable {
@@ -146,7 +148,8 @@ case class IfNull(left: Expression, right: Expression, child: Expression)
Examples:
> SELECT _FUNC_(2, 2);
NULL
- """)
+ """,
+ since = "2.0.0")
case class NullIf(left: Expression, right: Expression, child: Expression)
extends RuntimeReplaceable {
@@ -165,7 +168,8 @@ case class NullIf(left: Expression, right: Expression, child: Expression)
Examples:
> SELECT _FUNC_(NULL, array('2'));
["2"]
- """)
+ """,
+ since = "2.0.0")
case class Nvl(left: Expression, right: Expression, child: Expression) extends RuntimeReplaceable {
def this(left: Expression, right: Expression) = {
@@ -184,7 +188,8 @@ case class Nvl(left: Expression, right: Expression, child: Expression) extends R
Examples:
> SELECT _FUNC_(NULL, 2, 1);
1
- """)
+ """,
+ since = "2.0.0")
// scalastyle:on line.size.limit
case class Nvl2(expr1: Expression, expr2: Expression, expr3: Expression, child: Expression)
extends RuntimeReplaceable {
@@ -207,7 +212,8 @@ case class Nvl2(expr1: Expression, expr2: Expression, expr3: Expression, child:
Examples:
> SELECT _FUNC_(cast('NaN' as double));
true
- """)
+ """,
+ since = "1.5.0")
case class IsNaN(child: Expression) extends UnaryExpression
with Predicate with ImplicitCastInputTypes {
@@ -249,7 +255,8 @@ case class IsNaN(child: Expression) extends UnaryExpression
Examples:
> SELECT _FUNC_(cast('NaN' as double), 123);
123.0
- """)
+ """,
+ since = "1.5.0")
case class NaNvl(left: Expression, right: Expression)
extends BinaryExpression with ImplicitCastInputTypes {
@@ -309,7 +316,8 @@ case class NaNvl(left: Expression, right: Expression)
Examples:
> SELECT _FUNC_(1);
false
- """)
+ """,
+ since = "1.0.0")
case class IsNull(child: Expression) extends UnaryExpression with Predicate {
override def nullable: Boolean = false
@@ -335,7 +343,8 @@ case class IsNull(child: Expression) extends UnaryExpression with Predicate {
Examples:
> SELECT _FUNC_(1);
true
- """)
+ """,
+ since = "1.0.0")
case class IsNotNull(child: Expression) extends UnaryExpression with Predicate {
override def nullable: Boolean = false
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/randomExpressions.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/randomExpressions.scala
index b70c341..c02d2f0 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/randomExpressions.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/randomExpressions.scala
@@ -78,7 +78,8 @@ trait ExpressionWithRandomSeed {
> SELECT _FUNC_(null);
0.8446490682263027
""",
- note = "The function is non-deterministic in general case.")
+ note = "The function is non-deterministic in general case.",
+ since = "1.5.0")
// scalastyle:on line.size.limit
case class Rand(child: Expression) extends RDG with ExpressionWithRandomSeed {
@@ -118,7 +119,8 @@ object Rand {
> SELECT _FUNC_(null);
1.1164209726833079
""",
- note = "The function is non-deterministic in general case.")
+ note = "The function is non-deterministic in general case.",
+ since = "1.5.0")
// scalastyle:on line.size.limit
case class Randn(child: Expression) extends RDG with ExpressionWithRandomSeed {
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/regexpExpressions.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/regexpExpressions.scala
index bf0c35f..e80543c 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/regexpExpressions.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/regexpExpressions.scala
@@ -101,7 +101,8 @@ abstract class StringRegexExpression extends BinaryExpression
""",
note = """
Use RLIKE to match with standard regular expressions.
- """)
+ """,
+ since = "1.0.0")
case class Like(left: Expression, right: Expression) extends StringRegexExpression {
override def escape(v: String): String = StringUtils.escapeLikeRegex(v)
@@ -179,7 +180,8 @@ case class Like(left: Expression, right: Expression) extends StringRegexExpressi
""",
note = """
Use LIKE to match with simple string pattern.
- """)
+ """,
+ since = "1.0.0")
case class RLike(left: Expression, right: Expression) extends StringRegexExpression {
override def escape(v: String): String = v
@@ -237,7 +239,8 @@ case class RLike(left: Expression, right: Expression) extends StringRegexExpress
Examples:
> SELECT _FUNC_('oneAtwoBthreeC', '[ABC]');
["one","two","three",""]
- """)
+ """,
+ since = "1.5.0")
case class StringSplit(str: Expression, pattern: Expression)
extends BinaryExpression with ImplicitCastInputTypes {
@@ -274,7 +277,8 @@ case class StringSplit(str: Expression, pattern: Expression)
Examples:
> SELECT _FUNC_('100-200', '(\\d+)', 'num');
num-num
- """)
+ """,
+ since = "1.5.0")
// scalastyle:on line.size.limit
case class RegExpReplace(subject: Expression, regexp: Expression, rep: Expression)
extends TernaryExpression with ImplicitCastInputTypes {
@@ -373,7 +377,8 @@ case class RegExpReplace(subject: Expression, regexp: Expression, rep: Expressio
Examples:
> SELECT _FUNC_('100-200', '(\\d+)-(\\d+)', 1);
100
- """)
+ """,
+ since = "1.5.0")
case class RegExpExtract(subject: Expression, regexp: Expression, idx: Expression)
extends TernaryExpression with ImplicitCastInputTypes {
def this(s: Expression, r: Expression) = this(s, r, Literal(1))
---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org