You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by ma...@apache.org on 2021/07/12 06:56:54 UTC

[spark] branch branch-3.2 updated: [SPARK-36044][SQL] Suport TimestampNTZ in functions unix_timestamp/to_unix_timestamp

This is an automated email from the ASF dual-hosted git repository.

maxgekk pushed a commit to branch branch-3.2
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/branch-3.2 by this push:
     new 5816482  [SPARK-36044][SQL] Suport TimestampNTZ in functions unix_timestamp/to_unix_timestamp
5816482 is described below

commit 58164828683db50fb0e1698b679ffc0a773da847
Author: gengjiaan <ge...@360.cn>
AuthorDate: Mon Jul 12 09:55:43 2021 +0300

    [SPARK-36044][SQL] Suport TimestampNTZ in functions unix_timestamp/to_unix_timestamp
    
    ### What changes were proposed in this pull request?
    The functions `unix_timestamp`/`to_unix_timestamp` should be able to accept input of `TimestampNTZType`.
    
    ### Why are the changes needed?
    The functions `unix_timestamp`/`to_unix_timestamp` should be able to accept input of `TimestampNTZType`.
    
    ### Does this PR introduce _any_ user-facing change?
    'Yes'.
    
    ### How was this patch tested?
    New tests.
    
    Closes #33278 from beliefer/SPARK-36044.
    
    Authored-by: gengjiaan <ge...@360.cn>
    Signed-off-by: Max Gekk <ma...@gmail.com>
    (cherry picked from commit 8738682f6a36436da0e9fc332d58b2f41309e2c2)
    Signed-off-by: Max Gekk <ma...@gmail.com>
---
 .../spark/sql/catalyst/expressions/datetimeExpressions.scala  |  8 +++++---
 .../spark/sql/catalyst/expressions/DateExpressionsSuite.scala |  9 +++++++++
 .../test/scala/org/apache/spark/sql/DateFunctionsSuite.scala  | 11 ++++++++++-
 3 files changed, 24 insertions(+), 4 deletions(-)

diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala
index 979eeba..f0ed89e 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/datetimeExpressions.scala
@@ -1091,8 +1091,10 @@ abstract class ToTimestamp
   override protected def formatString: Expression = right
   override protected def isParsing = true
 
+  override def forTimestampNTZ: Boolean = left.dataType == TimestampNTZType
+
   override def inputTypes: Seq[AbstractDataType] =
-    Seq(TypeCollection(StringType, DateType, TimestampType), StringType)
+    Seq(TypeCollection(StringType, DateType, TimestampType, TimestampNTZType), StringType)
 
   override def dataType: DataType = LongType
   override def nullable: Boolean = if (failOnError) children.exists(_.nullable) else true
@@ -1112,7 +1114,7 @@ abstract class ToTimestamp
       left.dataType match {
         case DateType =>
           daysToMicros(t.asInstanceOf[Int], zoneId) / downScaleFactor
-        case TimestampType =>
+        case TimestampType | TimestampNTZType =>
           t.asInstanceOf[Long] / downScaleFactor
         case StringType =>
           val fmt = right.eval(input)
@@ -1192,7 +1194,7 @@ abstract class ToTimestamp
              |}
              |""".stripMargin)
       }
-      case TimestampType =>
+      case TimestampType | TimestampNTZType =>
         val eval1 = left.genCode(ctx)
         ev.copy(code = code"""
           ${eval1.code}
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DateExpressionsSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DateExpressionsSuite.scala
index 5f071c3..02d6d95 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DateExpressionsSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/DateExpressionsSuite.scala
@@ -916,6 +916,11 @@ class DateExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper {
                 Literal(new Timestamp(1000000)), Literal("yyyy-MM-dd HH:mm:ss"), timeZoneId),
               1000L)
             checkEvaluation(
+              UnixTimestamp(
+                Literal(DateTimeUtils.microsToLocalDateTime(DateTimeUtils.millisToMicros(1000000))),
+                Literal("yyyy-MM-dd HH:mm:ss"), timeZoneId),
+              1000L)
+            checkEvaluation(
               UnixTimestamp(Literal(date1), Literal("yyyy-MM-dd HH:mm:ss"), timeZoneId),
               MICROSECONDS.toSeconds(
                 DateTimeUtils.daysToMicros(DateTimeUtils.fromJavaDate(date1), tz.toZoneId)))
@@ -981,6 +986,10 @@ class DateExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper {
             checkEvaluation(ToUnixTimestamp(
               Literal(new Timestamp(1000000)), Literal(fmt1)),
               1000L)
+            checkEvaluation(ToUnixTimestamp(
+              Literal(DateTimeUtils.microsToLocalDateTime(DateTimeUtils.millisToMicros(1000000))),
+              Literal(fmt1)),
+              1000L)
             checkEvaluation(
               ToUnixTimestamp(Literal(date1), Literal(fmt1), timeZoneId),
               MICROSECONDS.toSeconds(
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/DateFunctionsSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/DateFunctionsSuite.scala
index 34b1654..f2eb0b4 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/DateFunctionsSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/DateFunctionsSuite.scala
@@ -587,16 +587,22 @@ class DateFunctionsSuite extends QueryTest with SharedSparkSession {
         val date2 = Date.valueOf("2015-07-25")
         val ts1 = Timestamp.valueOf("2015-07-24 10:00:00.3")
         val ts2 = Timestamp.valueOf("2015-07-25 02:02:02.2")
+        val ntzTs1 = LocalDateTime.parse("2015-07-24T10:00:00.3")
+        val ntzTs2 = LocalDateTime.parse("2015-07-25T02:02:02.2")
         val s1 = "2015/07/24 10:00:00.5"
         val s2 = "2015/07/25 02:02:02.6"
         val ss1 = "2015-07-24 10:00:00"
         val ss2 = "2015-07-25 02:02:02"
         val fmt = "yyyy/MM/dd HH:mm:ss.S"
-        val df = Seq((date1, ts1, s1, ss1), (date2, ts2, s2, ss2)).toDF("d", "ts", "s", "ss")
+        val df = Seq((date1, ts1, ntzTs1, s1, ss1), (date2, ts2, ntzTs2, s2, ss2)).toDF(
+          "d", "ts", "ntzTs", "s", "ss")
         checkAnswer(df.select(unix_timestamp(col("ts"))), Seq(
           Row(secs(ts1.getTime)), Row(secs(ts2.getTime))))
         checkAnswer(df.select(unix_timestamp(col("ss"))), Seq(
           Row(secs(ts1.getTime)), Row(secs(ts2.getTime))))
+        checkAnswer(df.select(unix_timestamp(col("ntzTs"))), Seq(
+          Row(secs(DateTimeUtils.microsToMillis(DateTimeUtils.localDateTimeToMicros(ntzTs1)))),
+          Row(secs(DateTimeUtils.microsToMillis(DateTimeUtils.localDateTimeToMicros(ntzTs2))))))
         checkAnswer(df.select(unix_timestamp(col("d"), fmt)), Seq(
           Row(secs(date1.getTime)), Row(secs(date2.getTime))))
         checkAnswer(df.select(unix_timestamp(col("s"), fmt)), Seq(
@@ -605,6 +611,9 @@ class DateFunctionsSuite extends QueryTest with SharedSparkSession {
           Row(secs(ts1.getTime)), Row(secs(ts2.getTime))))
         checkAnswer(df.selectExpr("unix_timestamp(ss)"), Seq(
           Row(secs(ts1.getTime)), Row(secs(ts2.getTime))))
+        checkAnswer(df.selectExpr("unix_timestamp(ntzTs)"), Seq(
+          Row(secs(DateTimeUtils.microsToMillis(DateTimeUtils.localDateTimeToMicros(ntzTs1)))),
+          Row(secs(DateTimeUtils.microsToMillis(DateTimeUtils.localDateTimeToMicros(ntzTs2))))))
         checkAnswer(df.selectExpr(s"unix_timestamp(d, '$fmt')"), Seq(
           Row(secs(date1.getTime)), Row(secs(date2.getTime))))
         checkAnswer(df.selectExpr(s"unix_timestamp(s, '$fmt')"), Seq(

---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org