You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by do...@apache.org on 2020/03/16 18:24:09 UTC
[spark] branch branch-3.0 updated: [SPARK-31146][SQL] Leverage the
helper method for aliasing in built-in SQL expressions
This is an automated email from the ASF dual-hosted git repository.
dongjoon pushed a commit to branch branch-3.0
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/branch-3.0 by this push:
new 9cf5d17 [SPARK-31146][SQL] Leverage the helper method for aliasing in built-in SQL expressions
9cf5d17 is described below
commit 9cf5d170910a65792d894d129a976ec486b4abc6
Author: HyukjinKwon <gu...@apache.org>
AuthorDate: Mon Mar 16 11:22:34 2020 -0700
[SPARK-31146][SQL] Leverage the helper method for aliasing in built-in SQL expressions
### What changes were proposed in this pull request?
This PR is kind of a followup of #26808. It leverages the helper method for aliasing in built-in SQL expressions to use the alias as its output column name where it's applicable.
- `Expression`, `UnaryMathExpression` and `BinaryMathExpression` search the alias in the tags by default.
- When the naming is different in its implementation, it has to be overwritten for the expression specifically. E.g., `CallMethodViaReflection`, `Remainder`, `CurrentTimestamp`,
`FormatString` and `XPathDouble`.
This PR fixes the aliases of the functions below:
| class | alias |
|--------------------------|------------------|
|`Rand` |`random` |
|`Ceil` |`ceiling` |
|`Remainder` |`mod` |
|`Pow` |`pow` |
|`Signum` |`sign` |
|`Chr` |`char` |
|`Length` |`char_length` |
|`Length` |`character_length`|
|`FormatString` |`printf` |
|`Substring` |`substr` |
|`Upper` |`ucase` |
|`XPathDouble` |`xpath_number` |
|`DayOfMonth` |`day` |
|`CurrentTimestamp` |`now` |
|`Size` |`cardinality` |
|`Sha1` |`sha` |
|`CallMethodViaReflection` |`java_method` |
Note: `EqualTo`, `=` and `==` aliases were excluded because it's unable to leverage this helper method. It should fix the parser.
Note: this PR also excludes some instances such as `ToDegrees`, `ToRadians`, `UnaryMinus` and `UnaryPositive` that needs an explicit name overwritten to make the scope of this PR smaller.
### Why are the changes needed?
To respect expression name.
### Does this PR introduce any user-facing change?
Yes, it will change the output column name.
### How was this patch tested?
Manually tested, and unittests were added.
Closes #27901 from HyukjinKwon/31146.
Authored-by: HyukjinKwon <gu...@apache.org>
Signed-off-by: Dongjoon Hyun <do...@apache.org>
(cherry picked from commit 6704103499d2003b1879ff0b4b8e29141e401b9f)
Signed-off-by: Dongjoon Hyun <do...@apache.org>
---
.../sql/catalyst/analysis/FunctionRegistry.scala | 38 ++++++++++++----------
.../expressions/CallMethodViaReflection.scala | 4 +--
.../sql/catalyst/expressions/Expression.scala | 5 +--
.../sql/catalyst/expressions/aggregate/First.scala | 2 --
.../sql/catalyst/expressions/aggregate/Last.scala | 2 --
.../sql/catalyst/expressions/arithmetic.scala | 14 +++++++-
.../catalyst/expressions/datetimeExpressions.scala | 4 ++-
.../sql/catalyst/expressions/mathExpressions.scala | 10 +++---
.../catalyst/expressions/stringExpressions.scala | 7 ++--
.../spark/sql/catalyst/expressions/xml/xpath.scala | 5 +--
.../resources/sql-tests/results/operators.sql.out | 14 ++++----
.../sql-tests/results/postgreSQL/insert.sql.out | 2 +-
.../sql-tests/results/postgreSQL/numeric.sql.out | 2 +-
.../sql-tests/results/postgreSQL/strings.sql.out | 4 +--
.../sql-tests/results/string-functions.sql.out | 6 ++--
.../typeCoercion/native/implicitTypeCasts.sql.out | 2 +-
.../scala/org/apache/spark/sql/ExplainSuite.scala | 4 +--
17 files changed, 70 insertions(+), 55 deletions(-)
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala
index 6c4aee4..c11186e 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala
@@ -218,7 +218,7 @@ object FunctionRegistry {
expression[PosExplode]("posexplode"),
expressionGeneratorOuter[PosExplode]("posexplode_outer"),
expression[Rand]("rand"),
- expression[Rand]("random"),
+ expression[Rand]("random", true),
expression[Randn]("randn"),
expression[Stack]("stack"),
expression[CaseWhen]("when"),
@@ -235,7 +235,7 @@ object FunctionRegistry {
expression[BRound]("bround"),
expression[Cbrt]("cbrt"),
expression[Ceil]("ceil"),
- expression[Ceil]("ceiling"),
+ expression[Ceil]("ceiling", true),
expression[Cos]("cos"),
expression[Cosh]("cosh"),
expression[Conv]("conv"),
@@ -252,12 +252,12 @@ object FunctionRegistry {
expression[Log1p]("log1p"),
expression[Log2]("log2"),
expression[Log]("ln"),
- expression[Remainder]("mod"),
+ expression[Remainder]("mod", true),
expression[UnaryMinus]("negative"),
expression[Pi]("pi"),
expression[Pmod]("pmod"),
expression[UnaryPositive]("positive"),
- expression[Pow]("pow"),
+ expression[Pow]("pow", true),
expression[Pow]("power"),
expression[ToRadians]("radians"),
expression[Rint]("rint"),
@@ -265,7 +265,7 @@ object FunctionRegistry {
expression[ShiftLeft]("shiftleft"),
expression[ShiftRight]("shiftright"),
expression[ShiftRightUnsigned]("shiftrightunsigned"),
- expression[Signum]("sign"),
+ expression[Signum]("sign", true),
expression[Signum]("signum"),
expression[Sin]("sin"),
expression[Sinh]("sinh"),
@@ -323,12 +323,12 @@ object FunctionRegistry {
// string functions
expression[Ascii]("ascii"),
- expression[Chr]("char"),
+ expression[Chr]("char", true),
expression[Chr]("chr"),
expression[Base64]("base64"),
expression[BitLength]("bit_length"),
- expression[Length]("char_length"),
- expression[Length]("character_length"),
+ expression[Length]("char_length", true),
+ expression[Length]("character_length", true),
expression[ConcatWs]("concat_ws"),
expression[Decode]("decode"),
expression[Elt]("elt"),
@@ -351,7 +351,7 @@ object FunctionRegistry {
expression[JsonTuple]("json_tuple"),
expression[ParseUrl]("parse_url"),
expression[StringLocate]("position"),
- expression[FormatString]("printf"),
+ expression[FormatString]("printf", true),
expression[RegExpExtract]("regexp_extract"),
expression[RegExpReplace]("regexp_replace"),
expression[StringRepeat]("repeat"),
@@ -364,21 +364,21 @@ object FunctionRegistry {
expression[SoundEx]("soundex"),
expression[StringSpace]("space"),
expression[StringSplit]("split"),
- expression[Substring]("substr"),
+ expression[Substring]("substr", true),
expression[Substring]("substring"),
expression[Left]("left"),
expression[Right]("right"),
expression[SubstringIndex]("substring_index"),
expression[StringTranslate]("translate"),
expression[StringTrim]("trim"),
- expression[Upper]("ucase"),
+ expression[Upper]("ucase", true),
expression[UnBase64]("unbase64"),
expression[Unhex]("unhex"),
expression[Upper]("upper"),
expression[XPathList]("xpath"),
expression[XPathBoolean]("xpath_boolean"),
expression[XPathDouble]("xpath_double"),
- expression[XPathDouble]("xpath_number"),
+ expression[XPathDouble]("xpath_number", true),
expression[XPathFloat]("xpath_float"),
expression[XPathInt]("xpath_int"),
expression[XPathLong]("xpath_long"),
@@ -393,7 +393,7 @@ object FunctionRegistry {
expression[DateAdd]("date_add"),
expression[DateFormatClass]("date_format"),
expression[DateSub]("date_sub"),
- expression[DayOfMonth]("day"),
+ expression[DayOfMonth]("day", true),
expression[DayOfYear]("dayofyear"),
expression[DayOfMonth]("dayofmonth"),
expression[FromUnixTime]("from_unixtime"),
@@ -404,7 +404,7 @@ object FunctionRegistry {
expression[Month]("month"),
expression[MonthsBetween]("months_between"),
expression[NextDay]("next_day"),
- expression[CurrentTimestamp]("now"),
+ expression[CurrentTimestamp]("now", true),
expression[Quarter]("quarter"),
expression[Second]("second"),
expression[ParseToTimestamp]("to_timestamp"),
@@ -445,7 +445,7 @@ object FunctionRegistry {
expression[MapConcat]("map_concat"),
expression[Size]("size"),
expression[Slice]("slice"),
- expression[Size]("cardinality"),
+ expression[Size]("cardinality", true),
expression[ArraysZip]("arrays_zip"),
expression[SortArray]("sort_array"),
expression[Shuffle]("shuffle"),
@@ -478,7 +478,7 @@ object FunctionRegistry {
expression[Uuid]("uuid"),
expression[Murmur3Hash]("hash"),
expression[XxHash64]("xxhash64"),
- expression[Sha1]("sha"),
+ expression[Sha1]("sha", true),
expression[Sha1]("sha1"),
expression[Sha2]("sha2"),
expression[SparkPartitionID]("spark_partition_id"),
@@ -488,7 +488,7 @@ object FunctionRegistry {
expression[MonotonicallyIncreasingID]("monotonically_increasing_id"),
expression[CurrentDatabase]("current_database"),
expression[CallMethodViaReflection]("reflect"),
- expression[CallMethodViaReflection]("java_method"),
+ expression[CallMethodViaReflection]("java_method", true),
expression[SparkVersion]("version"),
expression[TypeOf]("typeof"),
@@ -590,7 +590,9 @@ object FunctionRegistry {
if (varargCtor.isDefined) {
// If there is an apply method that accepts Seq[Expression], use that one.
try {
- varargCtor.get.newInstance(expressions).asInstanceOf[Expression]
+ val exp = varargCtor.get.newInstance(expressions).asInstanceOf[Expression]
+ if (setAlias) exp.setTagValue(FUNC_ALIAS, name)
+ exp
} catch {
// the exception is an invocation exception. To get a meaningful message, we need the
// cause.
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/CallMethodViaReflection.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/CallMethodViaReflection.scala
index 65bb9a8..e6a4c8f 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/CallMethodViaReflection.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/CallMethodViaReflection.scala
@@ -20,7 +20,7 @@ package org.apache.spark.sql.catalyst.expressions
import java.lang.reflect.{Method, Modifier}
import org.apache.spark.sql.catalyst.InternalRow
-import org.apache.spark.sql.catalyst.analysis.TypeCheckResult
+import org.apache.spark.sql.catalyst.analysis.{FunctionRegistry, TypeCheckResult}
import org.apache.spark.sql.catalyst.analysis.TypeCheckResult.{TypeCheckFailure, TypeCheckSuccess}
import org.apache.spark.sql.catalyst.expressions.codegen.CodegenFallback
import org.apache.spark.sql.types._
@@ -55,7 +55,7 @@ import org.apache.spark.util.Utils
case class CallMethodViaReflection(children: Seq[Expression])
extends Expression with CodegenFallback {
- override def prettyName: String = "reflect"
+ override def prettyName: String = getTagValue(FunctionRegistry.FUNC_ALIAS).getOrElse("reflect")
override def checkInputDataTypes(): TypeCheckResult = {
if (children.size < 2) {
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Expression.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Expression.scala
index 1599321..f29ece2 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Expression.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Expression.scala
@@ -20,7 +20,7 @@ package org.apache.spark.sql.catalyst.expressions
import java.util.Locale
import org.apache.spark.sql.catalyst.InternalRow
-import org.apache.spark.sql.catalyst.analysis.{TypeCheckResult, TypeCoercion}
+import org.apache.spark.sql.catalyst.analysis.{FunctionRegistry, TypeCheckResult, TypeCoercion}
import org.apache.spark.sql.catalyst.expressions.aggregate.DeclarativeAggregate
import org.apache.spark.sql.catalyst.expressions.codegen._
import org.apache.spark.sql.catalyst.expressions.codegen.Block._
@@ -258,7 +258,8 @@ abstract class Expression extends TreeNode[Expression] {
* Returns a user-facing string representation of this expression's name.
* This should usually match the name of the function in SQL.
*/
- def prettyName: String = nodeName.toLowerCase(Locale.ROOT)
+ def prettyName: String =
+ getTagValue(FunctionRegistry.FUNC_ALIAS).getOrElse(nodeName.toLowerCase(Locale.ROOT))
protected def flatArguments: Iterator[Any] = stringArgs.flatMap {
case t: Iterable[_] => t
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/First.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/First.scala
index 210acf3..2c0060c 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/First.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/First.scala
@@ -117,7 +117,5 @@ case class First(child: Expression, ignoreNullsExpr: Expression)
override lazy val evaluateExpression: AttributeReference = first
- override def prettyName: String = getTagValue(FunctionRegistry.FUNC_ALIAS).getOrElse("first")
-
override def toString: String = s"$prettyName($child)${if (ignoreNulls) " ignore nulls"}"
}
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/Last.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/Last.scala
index 2c89a4b..6793ac7 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/Last.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/aggregate/Last.scala
@@ -115,7 +115,5 @@ case class Last(child: Expression, ignoreNullsExpr: Expression)
override lazy val evaluateExpression: AttributeReference = last
- override def prettyName: String = getTagValue(FunctionRegistry.FUNC_ALIAS).getOrElse("last")
-
override def toString: String = s"$prettyName($child)${if (ignoreNulls) " ignore nulls"}"
}
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/arithmetic.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/arithmetic.scala
index 215e88a..6a64819 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/arithmetic.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/arithmetic.scala
@@ -18,7 +18,7 @@
package org.apache.spark.sql.catalyst.expressions
import org.apache.spark.sql.catalyst.InternalRow
-import org.apache.spark.sql.catalyst.analysis.{TypeCheckResult, TypeCoercion}
+import org.apache.spark.sql.catalyst.analysis.{FunctionRegistry, TypeCheckResult, TypeCoercion}
import org.apache.spark.sql.catalyst.expressions.codegen._
import org.apache.spark.sql.catalyst.expressions.codegen.Block._
import org.apache.spark.sql.catalyst.util.{IntervalUtils, TypeUtils}
@@ -457,6 +457,18 @@ case class Remainder(left: Expression, right: Expression) extends DivModLike {
override def symbol: String = "%"
override def decimalMethod: String = "remainder"
+ override def toString: String = {
+ getTagValue(FunctionRegistry.FUNC_ALIAS).getOrElse(sqlOperator) match {
+ case operator if operator == sqlOperator => s"($left $sqlOperator $right)"
+ case funcName => s"$funcName($left, $right)"
+ }
+ }
+ override def sql: String = {
+ getTagValue(FunctionRegistry.FUNC_ALIAS).getOrElse(sqlOperator) match {
+ case operator if operator == sqlOperator => s"(${left.sql} $sqlOperator ${right.sql})"
+ case funcName => s"$funcName(${left.sql}, ${right.sql})"
+ }
+ }
private lazy val mod: (Any, Any) => Any = dataType match {
// special cases to make float/double primitive types faster
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala
index 1dad440..0b1b65f 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala
@@ -29,6 +29,7 @@ import org.apache.commons.text.StringEscapeUtils
import org.apache.spark.SparkUpgradeException
import org.apache.spark.sql.AnalysisException
import org.apache.spark.sql.catalyst.InternalRow
+import org.apache.spark.sql.catalyst.analysis.FunctionRegistry
import org.apache.spark.sql.catalyst.expressions.codegen._
import org.apache.spark.sql.catalyst.expressions.codegen.Block._
import org.apache.spark.sql.catalyst.util.{DateTimeUtils, LegacyDateFormats, TimestampFormatter}
@@ -99,7 +100,8 @@ case class CurrentTimestamp() extends LeafExpression with CodegenFallback {
override def eval(input: InternalRow): Any = currentTimestamp()
- override def prettyName: String = "current_timestamp"
+ override def prettyName: String =
+ getTagValue(FunctionRegistry.FUNC_ALIAS).getOrElse("current_timestamp")
}
/**
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/mathExpressions.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/mathExpressions.scala
index d5b959b..66e6334 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/mathExpressions.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/mathExpressions.scala
@@ -21,7 +21,7 @@ import java.{lang => jl}
import java.util.Locale
import org.apache.spark.sql.catalyst.InternalRow
-import org.apache.spark.sql.catalyst.analysis.TypeCheckResult
+import org.apache.spark.sql.catalyst.analysis.{FunctionRegistry, TypeCheckResult}
import org.apache.spark.sql.catalyst.analysis.TypeCheckResult.{TypeCheckFailure, TypeCheckSuccess}
import org.apache.spark.sql.catalyst.expressions.codegen._
import org.apache.spark.sql.catalyst.expressions.codegen.Block._
@@ -62,8 +62,8 @@ abstract class UnaryMathExpression(val f: Double => Double, name: String)
override def inputTypes: Seq[AbstractDataType] = Seq(DoubleType)
override def dataType: DataType = DoubleType
override def nullable: Boolean = true
- override def toString: String = s"$name($child)"
- override def prettyName: String = name
+ override def toString: String = s"$prettyName($child)"
+ override def prettyName: String = getTagValue(FunctionRegistry.FUNC_ALIAS).getOrElse(name)
protected override def nullSafeEval(input: Any): Any = {
f(input.asInstanceOf[Double])
@@ -115,9 +115,9 @@ abstract class BinaryMathExpression(f: (Double, Double) => Double, name: String)
override def inputTypes: Seq[DataType] = Seq(DoubleType, DoubleType)
- override def toString: String = s"$name($left, $right)"
+ override def toString: String = s"$prettyName($left, $right)"
- override def prettyName: String = name
+ override def prettyName: String = getTagValue(FunctionRegistry.FUNC_ALIAS).getOrElse(name)
override def dataType: DataType = DoubleType
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/stringExpressions.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/stringExpressions.scala
index cc09f60..50a90ae 100755
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/stringExpressions.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/stringExpressions.scala
@@ -27,7 +27,7 @@ import scala.collection.mutable.ArrayBuffer
import org.apache.commons.codec.binary.{Base64 => CommonsBase64}
import org.apache.spark.sql.catalyst.InternalRow
-import org.apache.spark.sql.catalyst.analysis.TypeCheckResult
+import org.apache.spark.sql.catalyst.analysis.{FunctionRegistry, TypeCheckResult}
import org.apache.spark.sql.catalyst.expressions.codegen._
import org.apache.spark.sql.catalyst.expressions.codegen.Block._
import org.apache.spark.sql.catalyst.util.{ArrayData, GenericArrayData, TypeUtils}
@@ -1450,7 +1450,7 @@ case class ParseUrl(children: Seq[Expression])
// scalastyle:on line.size.limit
case class FormatString(children: Expression*) extends Expression with ImplicitCastInputTypes {
- require(children.nonEmpty, "format_string() should take at least 1 argument")
+ require(children.nonEmpty, s"$prettyName() should take at least 1 argument")
override def foldable: Boolean = children.forall(_.foldable)
override def nullable: Boolean = children(0).nullable
@@ -1517,7 +1517,8 @@ case class FormatString(children: Expression*) extends Expression with ImplicitC
}""")
}
- override def prettyName: String = "format_string"
+ override def prettyName: String = getTagValue(
+ FunctionRegistry.FUNC_ALIAS).getOrElse("format_string")
}
/**
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/xml/xpath.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/xml/xpath.scala
index 073b45a..55e06cb 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/xml/xpath.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/xml/xpath.scala
@@ -17,7 +17,7 @@
package org.apache.spark.sql.catalyst.expressions.xml
-import org.apache.spark.sql.catalyst.analysis.TypeCheckResult
+import org.apache.spark.sql.catalyst.analysis.{FunctionRegistry, TypeCheckResult}
import org.apache.spark.sql.catalyst.analysis.TypeCheckResult.TypeCheckFailure
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.expressions.codegen.CodegenFallback
@@ -160,7 +160,8 @@ case class XPathFloat(xml: Expression, path: Expression) extends XPathExtract {
""")
// scalastyle:on line.size.limit
case class XPathDouble(xml: Expression, path: Expression) extends XPathExtract {
- override def prettyName: String = "xpath_double"
+ override def prettyName: String =
+ getTagValue(FunctionRegistry.FUNC_ALIAS).getOrElse("xpath_double")
override def dataType: DataType = DoubleType
override def nullSafeEval(xml: Any, path: Any): Any = {
diff --git a/sql/core/src/test/resources/sql-tests/results/operators.sql.out b/sql/core/src/test/resources/sql-tests/results/operators.sql.out
index 083410f..cf857cf 100644
--- a/sql/core/src/test/resources/sql-tests/results/operators.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/operators.sql.out
@@ -293,7 +293,7 @@ struct<COT(CAST(-1 AS DOUBLE)):double>
-- !query
select ceiling(0)
-- !query schema
-struct<CEIL(CAST(0 AS DOUBLE)):bigint>
+struct<ceiling(CAST(0 AS DOUBLE)):bigint>
-- !query output
0
@@ -301,7 +301,7 @@ struct<CEIL(CAST(0 AS DOUBLE)):bigint>
-- !query
select ceiling(1)
-- !query schema
-struct<CEIL(CAST(1 AS DOUBLE)):bigint>
+struct<ceiling(CAST(1 AS DOUBLE)):bigint>
-- !query output
1
@@ -317,7 +317,7 @@ struct<CEIL(1234567890123456):bigint>
-- !query
select ceiling(1234567890123456)
-- !query schema
-struct<CEIL(1234567890123456):bigint>
+struct<ceiling(1234567890123456):bigint>
-- !query output
1234567890123456
@@ -333,7 +333,7 @@ struct<CEIL(0.01):decimal(1,0)>
-- !query
select ceiling(-0.10)
-- !query schema
-struct<CEIL(-0.10):decimal(1,0)>
+struct<ceiling(-0.10):decimal(1,0)>
-- !query output
0
@@ -389,7 +389,7 @@ true
-- !query
select mod(7, 2), mod(7, 0), mod(0, 2), mod(7, null), mod(null, 2), mod(null, null)
-- !query schema
-struct<(7 % 2):int,(7 % 0):int,(0 % 2):int,(7 % CAST(NULL AS INT)):int,(CAST(NULL AS INT) % 2):int,(CAST(NULL AS DOUBLE) % CAST(NULL AS DOUBLE)):double>
+struct<mod(7, 2):int,mod(7, 0):int,mod(0, 2):int,mod(7, CAST(NULL AS INT)):int,mod(CAST(NULL AS INT), 2):int,mod(CAST(NULL AS DOUBLE), CAST(NULL AS DOUBLE)):double>
-- !query output
1 NULL 0 NULL NULL NULL
@@ -405,7 +405,7 @@ struct<bit_length(abc):int>
-- !query
select CHAR_LENGTH('abc')
-- !query schema
-struct<length(abc):int>
+struct<char_length(abc):int>
-- !query output
3
@@ -413,7 +413,7 @@ struct<length(abc):int>
-- !query
select CHARACTER_LENGTH('abc')
-- !query schema
-struct<length(abc):int>
+struct<character_length(abc):int>
-- !query output
3
diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/insert.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/insert.sql.out
index 1046d0e..63ad74a 100644
--- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/insert.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/insert.sql.out
@@ -64,7 +64,7 @@ struct<>
-- !query
select col1, col2, char_length(col3) from inserttest
-- !query schema
-struct<col1:int,col2:int,length(col3):int>
+struct<col1:int,col2:int,char_length(col3):int>
-- !query output
30 50 10000
NULL 3 7
diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/numeric.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/numeric.sql.out
index 65b6641..e59b9d5 100644
--- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/numeric.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/numeric.sql.out
@@ -4404,7 +4404,7 @@ struct<>
-- !query
SELECT a, ceil(a), ceiling(a), floor(a), round(a) FROM ceil_floor_round
-- !query schema
-struct<a:decimal(38,18),CEIL(a):decimal(21,0),CEIL(a):decimal(21,0),FLOOR(a):decimal(21,0),round(a, 0):decimal(38,0)>
+struct<a:decimal(38,18),CEIL(a):decimal(21,0),ceiling(a):decimal(21,0),FLOOR(a):decimal(21,0),round(a, 0):decimal(38,0)>
-- !query output
-0.000001000000000000 0 0 -1 0
-5.499999000000000000 -5 -5 -6 -5
diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/strings.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/strings.sql.out
index 3b26d56..5f89c79 100644
--- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/strings.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/strings.sql.out
@@ -627,7 +627,7 @@ struct<>
-- !query
SELECT substr(f1, 99995) from toasttest
-- !query schema
-struct<substring(f1, 99995, 2147483647):string>
+struct<substr(f1, 99995, 2147483647):string>
-- !query output
567890
567890
@@ -638,7 +638,7 @@ struct<substring(f1, 99995, 2147483647):string>
-- !query
SELECT substr(f1, 99995, 10) from toasttest
-- !query schema
-struct<substring(f1, 99995, 10):string>
+struct<substr(f1, 99995, 10):string>
-- !query output
567890
567890
diff --git a/sql/core/src/test/resources/sql-tests/results/string-functions.sql.out b/sql/core/src/test/resources/sql-tests/results/string-functions.sql.out
index 708dbb4..042d332 100644
--- a/sql/core/src/test/resources/sql-tests/results/string-functions.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/string-functions.sql.out
@@ -111,7 +111,7 @@ struct<split(aa1cc2ee3, [1-9]+, 2):array<string>>
-- !query
SELECT substr('Spark SQL', 5)
-- !query schema
-struct<substring(Spark SQL, 5, 2147483647):string>
+struct<substr(Spark SQL, 5, 2147483647):string>
-- !query output
k SQL
@@ -119,7 +119,7 @@ k SQL
-- !query
SELECT substr('Spark SQL', -3)
-- !query schema
-struct<substring(Spark SQL, -3, 2147483647):string>
+struct<substr(Spark SQL, -3, 2147483647):string>
-- !query output
SQL
@@ -127,7 +127,7 @@ SQL
-- !query
SELECT substr('Spark SQL', 5, 1)
-- !query schema
-struct<substring(Spark SQL, 5, 1):string>
+struct<substr(Spark SQL, 5, 1):string>
-- !query output
k
diff --git a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/implicitTypeCasts.sql.out b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/implicitTypeCasts.sql.out
index f841adf..e47decb 100644
--- a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/implicitTypeCasts.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/implicitTypeCasts.sql.out
@@ -285,7 +285,7 @@ struct<month(CAST(1996-01-10 AS DATE)):int>
-- !query
SELECT day( '1996-01-10') FROM t
-- !query schema
-struct<dayofmonth(CAST(1996-01-10 AS DATE)):int>
+struct<day(CAST(1996-01-10 AS DATE)):int>
-- !query output
10
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/ExplainSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/ExplainSuite.scala
index b591705..16c5802 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/ExplainSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/ExplainSuite.scala
@@ -116,8 +116,8 @@ class ExplainSuite extends QueryTest with SharedSparkSession {
// plan should show the rewritten aggregate expression.
val df = sql("SELECT k, every(v), some(v), any(v) FROM test_agg GROUP BY k")
checkKeywordsExistsInExplain(df,
- "Aggregate [k#x], [k#x, min(v#x) AS every(v)#x, max(v#x) AS some(v)#x, " +
- "max(v#x) AS any(v)#x]")
+ "Aggregate [k#x], [k#x, every(v#x) AS every(v)#x, some(v#x) AS some(v)#x, " +
+ "any(v#x) AS any(v)#x]")
}
}
---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org