You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by gu...@apache.org on 2022/08/27 05:59:21 UTC
[spark] branch master updated: [SPARK-40152][SQL][TESTS][FOLLOW-UP] Disable ANSI for out of bound test at ElementAt
This is an automated email from the ASF dual-hosted git repository.
gurwls223 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push:
new 4b0c3bab1ab [SPARK-40152][SQL][TESTS][FOLLOW-UP] Disable ANSI for out of bound test at ElementAt
4b0c3bab1ab is described below
commit 4b0c3bab1ab082565a051990bf45774f15962deb
Author: Hyukjin Kwon <gu...@apache.org>
AuthorDate: Sat Aug 27 14:58:56 2022 +0900
[SPARK-40152][SQL][TESTS][FOLLOW-UP] Disable ANSI for out of bound test at ElementAt
### What changes were proposed in this pull request?
This PR proposes to fix the test to pass with ANSI mode on. Currently `elementAt` test fails when ANSI mode is on:
```
[info] - elementAt *** FAILED *** (309 milliseconds)
[info] Exception evaluating element_at(stringsplitsql(11.12.13, .), 10, Some(), true) (ExpressionEvalHelper.scala:205)
[info] org.scalatest.exceptions.TestFailedException:
[info] at org.scalatest.Assertions.newAssertionFailedException(Assertions.scala:472)
[info] at org.scalatest.Assertions.newAssertionFailedException$(Assertions.scala:471)
[info] at org.scalatest.funsuite.AnyFunSuite.newAssertionFailedException(AnyFunSuite.scala:1563)
[info] at org.scalatest.Assertions.fail(Assertions.scala:949)
[info] at org.scalatest.Assertions.fail$(Assertions.scala:945)
[info] at org.scalatest.funsuite.AnyFunSuite.fail(AnyFunSuite.scala:1563)
[info] at org.apache.spark.sql.catalyst.expressions.ExpressionEvalHelper.checkEvaluationWithoutCodegen(ExpressionEvalHelper.scala:205)
[info] at org.apache.spark.sql.catalyst.expressions.ExpressionEvalHelper.checkEvaluationWithoutCodegen$(ExpressionEvalHelper.scala:199)
[info] at org.apache.spark.sql.catalyst.expressions.CollectionExpressionsSuite.checkEvaluationWithoutCodegen(CollectionExpressionsSuite.scala:39)
[info] at org.apache.spark.sql.catalyst.expressions.ExpressionEvalHelper.checkEvaluation(ExpressionEvalHelper.scala:87)
[info] at org.apache.spark.sql.catalyst.expressions.ExpressionEvalHelper.checkEvaluation$(ExpressionEvalHelper.scala:82)
[info] at org.apache.spark.sql.catalyst.expressions.CollectionExpressionsSuite.checkEvaluation(CollectionExpressionsSuite.scala:39)
[info] at org.apache.spark.sql.catalyst.expressions.CollectionExpressionsSuite.$anonfun$new$333(CollectionExpressionsSuite.scala:1546)
```
https://github.com/apache/spark/runs/8046961366?check_suite_focus=true
### Why are the changes needed?
To recover the build with ANSI mode.
### Does this PR introduce _any_ user-facing change?
No, test-only.
### How was this patch tested?
Unittest fixed.
Closes #37684 from HyukjinKwon/SPARK-40152.
Authored-by: Hyukjin Kwon <gu...@apache.org>
Signed-off-by: Hyukjin Kwon <gu...@apache.org>
---
.../expressions/CollectionExpressionsSuite.scala | 40 ++++++++++++----------
1 file changed, 21 insertions(+), 19 deletions(-)
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CollectionExpressionsSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CollectionExpressionsSuite.scala
index 229e698fb2e..9a6caea59bf 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CollectionExpressionsSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CollectionExpressionsSuite.scala
@@ -1483,7 +1483,7 @@ class CollectionExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper
checkEvaluation(ElementAt(a0, Literal(0)), null)
}.getMessage.contains("SQL array indices start at 1")
intercept[Exception] { checkEvaluation(ElementAt(a0, Literal(1.1)), null) }
- withSQLConf(SQLConf.ANSI_ENABLED.key -> "false") {
+ withSQLConf(SQLConf.ANSI_ENABLED.key -> false.toString) {
checkEvaluation(ElementAt(a0, Literal(4)), null)
checkEvaluation(ElementAt(a0, Literal(-4)), null)
}
@@ -1512,7 +1512,7 @@ class CollectionExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper
assert(ElementAt(m0, Literal(1.0)).checkInputDataTypes().isFailure)
- withSQLConf(SQLConf.ANSI_ENABLED.key -> "false") {
+ withSQLConf(SQLConf.ANSI_ENABLED.key -> false.toString) {
checkEvaluation(ElementAt(m0, Literal("d")), null)
checkEvaluation(ElementAt(m1, Literal("a")), null)
}
@@ -1529,7 +1529,7 @@ class CollectionExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper
MapType(BinaryType, StringType))
val mb1 = Literal.create(Map[Array[Byte], String](), MapType(BinaryType, StringType))
- withSQLConf(SQLConf.ANSI_ENABLED.key -> "false") {
+ withSQLConf(SQLConf.ANSI_ENABLED.key -> false.toString) {
checkEvaluation(ElementAt(mb0, Literal(Array[Byte](1, 2, 3))), null)
checkEvaluation(ElementAt(mb1, Literal(Array[Byte](1, 2))), null)
}
@@ -1537,22 +1537,24 @@ class CollectionExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper
checkEvaluation(ElementAt(mb0, Literal(Array[Byte](3, 4))), null)
// test defaultValueOutOfBound
- val delimiter = Literal.create(".", StringType)
- val str = StringSplitSQL(Literal.create("11.12.13", StringType), delimiter)
- val outOfBoundValue = Some(Literal.create("", StringType))
-
- checkEvaluation(ElementAt(str, Literal(3), outOfBoundValue), UTF8String.fromString("13"))
- checkEvaluation(ElementAt(str, Literal(1), outOfBoundValue), UTF8String.fromString("11"))
- checkEvaluation(ElementAt(str, Literal(10), outOfBoundValue), UTF8String.fromString(""))
- checkEvaluation(ElementAt(str, Literal(-10), outOfBoundValue), UTF8String.fromString(""))
-
- checkEvaluation(ElementAt(StringSplitSQL(Literal.create(null, StringType), delimiter),
- Literal(1), outOfBoundValue), null)
- checkEvaluation(ElementAt(StringSplitSQL(Literal.create("11.12.13", StringType),
- Literal.create(null, StringType)), Literal(1), outOfBoundValue), null)
-
- checkExceptionInExpression[Exception](
- ElementAt(str, Literal(0), outOfBoundValue), "The index 0 is invalid")
+ withSQLConf(SQLConf.ANSI_ENABLED.key -> false.toString) {
+ val delimiter = Literal.create(".", StringType)
+ val str = StringSplitSQL(Literal.create("11.12.13", StringType), delimiter)
+ val outOfBoundValue = Some(Literal.create("", StringType))
+
+ checkEvaluation(ElementAt(str, Literal(3), outOfBoundValue), UTF8String.fromString("13"))
+ checkEvaluation(ElementAt(str, Literal(1), outOfBoundValue), UTF8String.fromString("11"))
+ checkEvaluation(ElementAt(str, Literal(10), outOfBoundValue), UTF8String.fromString(""))
+ checkEvaluation(ElementAt(str, Literal(-10), outOfBoundValue), UTF8String.fromString(""))
+
+ checkEvaluation(ElementAt(StringSplitSQL(Literal.create(null, StringType), delimiter),
+ Literal(1), outOfBoundValue), null)
+ checkEvaluation(ElementAt(StringSplitSQL(Literal.create("11.12.13", StringType),
+ Literal.create(null, StringType)), Literal(1), outOfBoundValue), null)
+
+ checkExceptionInExpression[Exception](
+ ElementAt(str, Literal(0), outOfBoundValue), "The index 0 is invalid")
+ }
}
test("correctly handles ElementAt nullability for arrays") {
---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org