You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by ma...@apache.org on 2021/04/18 08:35:50 UTC

[spark] branch master updated: [SPARK-35114][SQL][TESTS] Add checks for ANSI intervals to `LiteralExpressionSuite`

This is an automated email from the ASF dual-hosted git repository.

maxgekk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new d04b467  [SPARK-35114][SQL][TESTS] Add checks for ANSI intervals to `LiteralExpressionSuite`
d04b467 is described below

commit d04b467690f1684728b3cc18f033be0d29f3f9e3
Author: Max Gekk <ma...@gmail.com>
AuthorDate: Sun Apr 18 11:35:00 2021 +0300

    [SPARK-35114][SQL][TESTS] Add checks for ANSI intervals to `LiteralExpressionSuite`
    
    ### What changes were proposed in this pull request?
    In the PR, I propose to add additional checks for ANSI interval types `YearMonthIntervalType` and `DayTimeIntervalType` to `LiteralExpressionSuite`.
    
    Also, I replaced some long literal values by `CalendarInterval` to check `CalendarIntervalType` that the tests were supposed to check.
    
    ### Why are the changes needed?
    To improve test coverage and have the same checks for ANSI types as for `CalendarIntervalType`.
    
    ### Does this PR introduce _any_ user-facing change?
    No
    
    ### How was this patch tested?
    By running the modified test suite:
    ```
    $ build/sbt "test:testOnly *LiteralExpressionSuite"
    ```
    
    Closes #32213 from MaxGekk/interval-literal-tests.
    
    Authored-by: Max Gekk <ma...@gmail.com>
    Signed-off-by: Max Gekk <ma...@gmail.com>
---
 .../expressions/LiteralExpressionSuite.scala       | 23 +++++++++++++++-------
 1 file changed, 16 insertions(+), 7 deletions(-)

diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/LiteralExpressionSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/LiteralExpressionSuite.scala
index a5f70fd..bda43aa 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/LiteralExpressionSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/LiteralExpressionSuite.scala
@@ -50,6 +50,8 @@ class LiteralExpressionSuite extends SparkFunSuite with ExpressionEvalHelper {
     checkEvaluation(Literal.create(null, DateType), null)
     checkEvaluation(Literal.create(null, TimestampType), null)
     checkEvaluation(Literal.create(null, CalendarIntervalType), null)
+    checkEvaluation(Literal.create(null, YearMonthIntervalType), null)
+    checkEvaluation(Literal.create(null, DayTimeIntervalType), null)
     checkEvaluation(Literal.create(null, ArrayType(ByteType, true)), null)
     checkEvaluation(Literal.create(null, ArrayType(StringType, true)), null)
     checkEvaluation(Literal.create(null, MapType(StringType, IntegerType)), null)
@@ -77,6 +79,8 @@ class LiteralExpressionSuite extends SparkFunSuite with ExpressionEvalHelper {
       checkEvaluation(Literal.default(TimestampType), Instant.ofEpochSecond(0))
     }
     checkEvaluation(Literal.default(CalendarIntervalType), new CalendarInterval(0, 0, 0L))
+    checkEvaluation(Literal.default(YearMonthIntervalType), 0)
+    checkEvaluation(Literal.default(DayTimeIntervalType), 0L)
     checkEvaluation(Literal.default(ArrayType(StringType)), Array())
     checkEvaluation(Literal.default(MapType(IntegerType, StringType)), Map())
     checkEvaluation(Literal.default(StructType(StructField("a", StringType) :: Nil)), Row(""))
@@ -188,20 +192,21 @@ class LiteralExpressionSuite extends SparkFunSuite with ExpressionEvalHelper {
     checkArrayLiteral(Array(1, 2, 3))
     checkArrayLiteral(Array("a", "b", "c"))
     checkArrayLiteral(Array(1.0, 4.0))
-    checkArrayLiteral(Array(MICROS_PER_DAY, MICROS_PER_HOUR))
+    checkArrayLiteral(Array(new CalendarInterval(1, 0, 0), new CalendarInterval(0, 1, 0)))
     val arr = collection.mutable.WrappedArray.make(Array(1.0, 4.0))
     checkEvaluation(Literal(arr), toCatalyst(arr))
   }
 
   test("seq") {
-    def checkSeqLiteral[T: TypeTag](a: Seq[T], elementType: DataType): Unit = {
+    def checkSeqLiteral[T: TypeTag](a: Seq[T]): Unit = {
       checkEvaluation(Literal.create(a), toCatalyst(a))
     }
-    checkSeqLiteral(Seq(1, 2, 3), IntegerType)
-    checkSeqLiteral(Seq("a", "b", "c"), StringType)
-    checkSeqLiteral(Seq(1.0, 4.0), DoubleType)
-    checkSeqLiteral(Seq(MICROS_PER_DAY, MICROS_PER_HOUR),
-      CalendarIntervalType)
+    checkSeqLiteral(Seq(1, 2, 3))
+    checkSeqLiteral(Seq("a", "b", "c"))
+    checkSeqLiteral(Seq(1.0, 4.0))
+    checkSeqLiteral(Seq(new CalendarInterval(1, 0, 0), new CalendarInterval(0, 1, 0)))
+    checkSeqLiteral(Seq(Period.ZERO, Period.ofMonths(1)))
+    checkSeqLiteral(Seq(Duration.ZERO, Duration.ofDays(1)))
   }
 
   test("map") {
@@ -210,6 +215,7 @@ class LiteralExpressionSuite extends SparkFunSuite with ExpressionEvalHelper {
     }
     checkMapLiteral(Map("a" -> 1, "b" -> 2, "c" -> 3))
     checkMapLiteral(Map("1" -> 1.0, "2" -> 2.0, "3" -> 3.0))
+    checkMapLiteral(Map(Period.ofMonths(1) -> Duration.ZERO))
     assert(Literal.create(Map("a" -> 1)).toString === "map(keys: [a], values: [1])")
   }
 
@@ -220,6 +226,7 @@ class LiteralExpressionSuite extends SparkFunSuite with ExpressionEvalHelper {
     checkStructLiteral((1, 3.0, "abcde"))
     checkStructLiteral(("de", 1, 2.0f))
     checkStructLiteral((1, ("fgh", 3.0)))
+    checkStructLiteral((Period.ZERO, ("abc", Duration.ofDays(1))))
   }
 
   test("unsupported types (map and struct) in Literal.apply") {
@@ -337,6 +344,8 @@ class LiteralExpressionSuite extends SparkFunSuite with ExpressionEvalHelper {
       Literal.create(Array(1.toByte, 2.toByte, 3.toByte), BinaryType))
     assert(Literal(Array("1", "2", "3")) ==
       Literal.create(Array("1", "2", "3"), ArrayType(StringType)))
+    assert(Literal(Array(Period.ofMonths(1))) ==
+      Literal.create(Array(Period.ofMonths(1)), ArrayType(YearMonthIntervalType)))
   }
 
   test("SPARK-34342: Date/Timestamp toString") {

---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org