You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by ge...@apache.org on 2021/07/06 06:34:46 UTC

[spark] branch branch-3.2 updated: [SPARK-35978][SQL] Support non-reserved keyword TIMESTAMP_LTZ

This is an automated email from the ASF dual-hosted git repository.

gengliang pushed a commit to branch branch-3.2
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/branch-3.2 by this push:
     new e09feda  [SPARK-35978][SQL] Support non-reserved keyword TIMESTAMP_LTZ
e09feda is described below

commit e09feda1d23a89a6f15a900f8001405f47b7e058
Author: Gengliang Wang <ge...@apache.org>
AuthorDate: Tue Jul 6 14:33:22 2021 +0800

    [SPARK-35978][SQL] Support non-reserved keyword TIMESTAMP_LTZ
    
    ### What changes were proposed in this pull request?
    
    Support new keyword `TIMESTAMP_LTZ`, which can be used for:
    
    - timestamp with local time zone data type in DDL
    - timestamp with local time zone data type in Cast clause.
    - timestamp with local time zone data type literal
    
    ### Why are the changes needed?
    
    Users can use `TIMESTAMP_LTZ` in DDL/Cast/Literals for the timestamp with local time zone type directly. The new keyword is independent of the SQL configuration `spark.sql.timestampType`.
    
    ### Does this PR introduce _any_ user-facing change?
    
    No, the new timestamp type is not released yet.
    
    ### How was this patch tested?
    
    Unit test
    
    Closes #33224 from gengliangwang/TIMESTAMP_LTZ.
    
    Authored-by: Gengliang Wang <ge...@apache.org>
    Signed-off-by: Gengliang Wang <ge...@apache.org>
    (cherry picked from commit b0b9643cd76da48ed90e958e40717a664bc7494b)
    Signed-off-by: Gengliang Wang <ge...@apache.org>
---
 .../apache/spark/sql/catalyst/parser/AstBuilder.scala | 16 ++++++++++------
 .../sql/catalyst/parser/DataTypeParserSuite.scala     |  1 +
 .../sql/catalyst/parser/ExpressionParserSuite.scala   | 19 +++++++++++++++----
 3 files changed, 26 insertions(+), 10 deletions(-)

diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
index 680d781..d6363b5 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
@@ -2119,6 +2119,13 @@ class AstBuilder extends SqlBaseBaseVisitor[AnyRef] with SQLConfHelper with Logg
         throw QueryParsingErrors.cannotParseValueTypeError(valueType, value, ctx)
       }
     }
+
+    def constructTimestampLTZLiteral(value: String): Literal = {
+      val zoneId = getZoneId(conf.sessionLocalTimeZone)
+      val specialTs = convertSpecialTimestamp(value, zoneId).map(Literal(_, TimestampType))
+      specialTs.getOrElse(toLiteral(stringToTimestamp(_, zoneId), TimestampType))
+    }
+
     try {
       valueType match {
         case "DATE" =>
@@ -2128,13 +2135,9 @@ class AstBuilder extends SqlBaseBaseVisitor[AnyRef] with SQLConfHelper with Logg
         case "TIMESTAMP_NTZ" =>
           val specialTs = convertSpecialTimestampNTZ(value).map(Literal(_, TimestampNTZType))
           specialTs.getOrElse(toLiteral(stringToTimestampWithoutTimeZone, TimestampNTZType))
+        case "TIMESTAMP_LTZ" =>
+          constructTimestampLTZLiteral(value)
         case "TIMESTAMP" =>
-          def constructTimestampLTZLiteral(value: String): Literal = {
-            val zoneId = getZoneId(conf.sessionLocalTimeZone)
-            val specialTs = convertSpecialTimestamp(value, zoneId).map(Literal(_, TimestampType))
-            specialTs.getOrElse(toLiteral(stringToTimestamp(_, zoneId), TimestampType))
-          }
-
           SQLConf.get.timestampType match {
             case TimestampNTZType =>
               val specialTs = convertSpecialTimestampNTZ(value).map(Literal(_, TimestampNTZType))
@@ -2529,6 +2532,7 @@ class AstBuilder extends SqlBaseBaseVisitor[AnyRef] with SQLConfHelper with Logg
       case ("date", Nil) => DateType
       case ("timestamp", Nil) => SQLConf.get.timestampType
       case ("timestamp_ntz", Nil) => TimestampNTZType
+      case ("timestamp_ltz", Nil) => TimestampType
       case ("string", Nil) => StringType
       case ("character" | "char", length :: Nil) => CharType(length.getText.toInt)
       case ("varchar", length :: Nil) => VarcharType(length.getText.toInt)
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DataTypeParserSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DataTypeParserSuite.scala
index d34cccc..97dd0db 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DataTypeParserSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DataTypeParserSuite.scala
@@ -59,6 +59,7 @@ class DataTypeParserSuite extends SparkFunSuite with SQLHelper {
   checkDataType("DATE", DateType)
   checkDataType("timestamp", TimestampType)
   checkDataType("timestamp_ntz", TimestampNTZType)
+  checkDataType("timestamp_ltz", TimestampType)
   checkDataType("string", StringType)
   checkDataType("ChaR(5)", CharType(5))
   checkDataType("ChaRacter(5)", CharType(5))
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/ExpressionParserSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/ExpressionParserSuite.scala
index 7b13fa9..818afc4 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/ExpressionParserSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/ExpressionParserSuite.scala
@@ -455,6 +455,17 @@ class ExpressionParserSuite extends AnalysisTest {
   }
 
   test("type constructors") {
+    def checkTimestampNTZAndLTZ(): Unit = {
+      // Timestamp with local time zone
+      assertEqual("tImEstAmp_LTZ '2016-03-11 20:54:00.000'",
+        Literal(Timestamp.valueOf("2016-03-11 20:54:00.000")))
+      intercept("timestamP_LTZ '2016-33-11 20:54:00.000'", "Cannot parse the TIMESTAMP_LTZ value")
+      // Timestamp without time zone
+      assertEqual("tImEstAmp_Ntz '2016-03-11 20:54:00.000'",
+        Literal(LocalDateTime.parse("2016-03-11T20:54:00.000")))
+      intercept("tImEstAmp_Ntz '2016-33-11 20:54:00.000'", "Cannot parse the TIMESTAMP_NTZ value")
+    }
+
     // Dates.
     assertEqual("dAte '2016-03-11'", Literal(Date.valueOf("2016-03-11")))
     intercept("DAtE 'mar 11 2016'", "Cannot parse the DATE value")
@@ -464,10 +475,7 @@ class ExpressionParserSuite extends AnalysisTest {
       Literal(Timestamp.valueOf("2016-03-11 20:54:00.000")))
     intercept("timestamP '2016-33-11 20:54:00.000'", "Cannot parse the TIMESTAMP value")
 
-    // Timestamp without time zone
-    assertEqual("tImEstAmp_Ntz '2016-03-11 20:54:00.000'",
-      Literal(LocalDateTime.parse("2016-03-11T20:54:00.000")))
-    intercept("tImEstAmp_Ntz '2016-33-11 20:54:00.000'", "Cannot parse the TIMESTAMP_NTZ value")
+    checkTimestampNTZAndLTZ()
     withSQLConf(SQLConf.TIMESTAMP_TYPE.key -> TimestampTypes.TIMESTAMP_NTZ.toString) {
       assertEqual("tImEstAmp '2016-03-11 20:54:00.000'",
         Literal(LocalDateTime.parse("2016-03-11T20:54:00.000")))
@@ -477,6 +485,9 @@ class ExpressionParserSuite extends AnalysisTest {
       // If the timestamp string contains time zone, return a timestamp with local time zone literal
       assertEqual("tImEstAmp '1970-01-01 00:00:00.000 +01:00'",
         Literal(-3600000000L, TimestampType))
+
+      // The behavior of TIMESTAMP_NTZ and TIMESTAMP_LTZ is independent of SQLConf.TIMESTAMP_TYPE
+      checkTimestampNTZAndLTZ()
     }
     // Interval.
     val intervalLiteral = Literal(IntervalUtils.stringToInterval("interval 3 month 1 hour"))

---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org