You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by gu...@apache.org on 2022/08/27 06:55:34 UTC
[spark] 01/02: [SPARK-40152][SQL][TESTS] Move tests from SplitPart to elementAt
This is an automated email from the ASF dual-hosted git repository.
gurwls223 pushed a commit to branch branch-3.3
in repository https://gitbox.apache.org/repos/asf/spark.git
commit 167f3ff4d752c6f51b71a38378deb47c97f745f0
Author: Yuming Wang <yu...@ebay.com>
AuthorDate: Wed Aug 24 13:33:26 2022 +0900
[SPARK-40152][SQL][TESTS] Move tests from SplitPart to elementAt
Move tests from SplitPart to elementAt in CollectionExpressionsSuite.
Simplify test.
No.
N/A.
Closes #37637 from wangyum/SPARK-40152-3.
Authored-by: Yuming Wang <yu...@ebay.com>
Signed-off-by: Hyukjin Kwon <gu...@apache.org>
(cherry picked from commit 06997d6eb73f271aede5b159d86d1db80a73b89f)
Signed-off-by: Hyukjin Kwon <gu...@apache.org>
---
.../expressions/CollectionExpressionsSuite.scala | 38 ++++++++++------------
1 file changed, 18 insertions(+), 20 deletions(-)
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CollectionExpressionsSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CollectionExpressionsSuite.scala
index 8fb04cd1ac7..27187a15f43 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CollectionExpressionsSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CollectionExpressionsSuite.scala
@@ -1535,6 +1535,24 @@ class CollectionExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper
}
checkEvaluation(ElementAt(mb0, Literal(Array[Byte](2, 1), BinaryType)), "2")
checkEvaluation(ElementAt(mb0, Literal(Array[Byte](3, 4))), null)
+
+ // test defaultValueOutOfBound
+ val delimiter = Literal.create(".", StringType)
+ val str = StringSplitSQL(Literal.create("11.12.13", StringType), delimiter)
+ val outOfBoundValue = Some(Literal.create("", StringType))
+
+ checkEvaluation(ElementAt(str, Literal(3), outOfBoundValue), UTF8String.fromString("13"))
+ checkEvaluation(ElementAt(str, Literal(1), outOfBoundValue), UTF8String.fromString("11"))
+ checkEvaluation(ElementAt(str, Literal(10), outOfBoundValue), UTF8String.fromString(""))
+ checkEvaluation(ElementAt(str, Literal(-10), outOfBoundValue), UTF8String.fromString(""))
+
+ checkEvaluation(ElementAt(StringSplitSQL(Literal.create(null, StringType), delimiter),
+ Literal(1), outOfBoundValue), null)
+ checkEvaluation(ElementAt(StringSplitSQL(Literal.create("11.12.13", StringType),
+ Literal.create(null, StringType)), Literal(1), outOfBoundValue), null)
+
+ checkExceptionInExpression[Exception](
+ ElementAt(str, Literal(0), outOfBoundValue), "The index 0 is invalid")
}
test("correctly handles ElementAt nullability for arrays") {
@@ -2532,24 +2550,4 @@ class CollectionExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper
Date.valueOf("2017-02-12")))
}
}
-
- test("SplitPart") {
- val delimiter = Literal.create(".", StringType)
- val str = StringSplitSQL(Literal.create("11.12.13", StringType), delimiter)
- val outOfBoundValue = Some(Literal.create("", StringType))
-
- checkEvaluation(ElementAt(str, Literal(3), outOfBoundValue), UTF8String.fromString("13"))
- checkEvaluation(ElementAt(str, Literal(1), outOfBoundValue), UTF8String.fromString("11"))
- checkEvaluation(ElementAt(str, Literal(10), outOfBoundValue), UTF8String.fromString(""))
- checkEvaluation(ElementAt(str, Literal(-10), outOfBoundValue), UTF8String.fromString(""))
-
- checkEvaluation(ElementAt(StringSplitSQL(Literal.create(null, StringType), delimiter),
- Literal(1), outOfBoundValue), null)
- checkEvaluation(ElementAt(StringSplitSQL(Literal.create("11.12.13", StringType),
- Literal.create(null, StringType)), Literal(1), outOfBoundValue), null)
-
- intercept[Exception] {
- checkEvaluation(ElementAt(str, Literal(0), outOfBoundValue), null)
- }.getMessage.contains("The index 0 is invalid")
- }
}
---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org