You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by sr...@apache.org on 2022/08/23 13:55:53 UTC

[spark] branch master updated: [SPARK-40152][SQL][TESTS] Add tests for SplitPart

This is an automated email from the ASF dual-hosted git repository.

srowen pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 4f525eed7d5 [SPARK-40152][SQL][TESTS] Add tests for SplitPart
4f525eed7d5 is described below

commit 4f525eed7d5d461498aee68c4d3e57941f9aae2c
Author: Yuming Wang <yu...@ebay.com>
AuthorDate: Tue Aug 23 08:55:27 2022 -0500

    [SPARK-40152][SQL][TESTS] Add tests for SplitPart
    
    ### What changes were proposed in this pull request?
    
    Add tests for `SplitPart`.
    
    ### Why are the changes needed?
    
    Improve test coverage.
    
    ### Does this PR introduce _any_ user-facing change?
    
    No.
    
    ### How was this patch tested?
    
    N/A.
    
    Closes #37626 from wangyum/SPARK-40152-2.
    
    Authored-by: Yuming Wang <yu...@ebay.com>
    Signed-off-by: Sean Owen <sr...@gmail.com>
---
 .../catalyst/expressions/collectionOperations.scala  |  2 +-
 .../expressions/CollectionExpressionsSuite.scala     | 20 ++++++++++++++++++++
 2 files changed, 21 insertions(+), 1 deletion(-)

diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/collectionOperations.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/collectionOperations.scala
index 870f58b4396..78496c98dec 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/collectionOperations.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/collectionOperations.scala
@@ -2270,7 +2270,7 @@ case class ElementAt(
               case Some(value) =>
                 val defaultValueEval = value.genCode(ctx)
                 s"""
-                  ${defaultValueEval.code};
+                  ${defaultValueEval.code}
                   ${ev.isNull} = ${defaultValueEval.isNull};
                   ${ev.value} = ${defaultValueEval.value};
                 """.stripMargin
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CollectionExpressionsSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CollectionExpressionsSuite.scala
index 2b0b9647665..94cf0a74467 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CollectionExpressionsSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CollectionExpressionsSuite.scala
@@ -2522,4 +2522,24 @@ class CollectionExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper
           Date.valueOf("2017-02-12")))
     }
   }
+
+  test("SplitPart") {
+    val delimiter = Literal.create(".", StringType)
+    val str = StringSplitSQL(Literal.create("11.12.13", StringType), delimiter)
+    val outOfBoundValue = Some(Literal.create("", StringType))
+
+    checkEvaluation(ElementAt(str, Literal(3), outOfBoundValue), UTF8String.fromString("13"))
+    checkEvaluation(ElementAt(str, Literal(1), outOfBoundValue), UTF8String.fromString("11"))
+    checkEvaluation(ElementAt(str, Literal(10), outOfBoundValue), UTF8String.fromString(""))
+    checkEvaluation(ElementAt(str, Literal(-10), outOfBoundValue), UTF8String.fromString(""))
+
+    checkEvaluation(ElementAt(StringSplitSQL(Literal.create(null, StringType), delimiter),
+      Literal(1), outOfBoundValue), null)
+    checkEvaluation(ElementAt(StringSplitSQL(Literal.create("11.12.13", StringType),
+      Literal.create(null, StringType)), Literal(1), outOfBoundValue), null)
+
+    intercept[Exception] {
+      checkEvaluation(ElementAt(str, Literal(0), outOfBoundValue), null)
+    }.getMessage.contains("The index 0 is invalid")
+  }
 }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org