You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by ma...@apache.org on 2021/07/05 19:31:20 UTC

[spark] branch master updated: [SPARK-35977][SQL] Support non-reserved keyword TIMESTAMP_NTZ

This is an automated email from the ASF dual-hosted git repository.

maxgekk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 5f44acf  [SPARK-35977][SQL] Support non-reserved keyword TIMESTAMP_NTZ
5f44acf is described below

commit 5f44acff3df51721fe891ea50c0a5bcf3a37a719
Author: Gengliang Wang <ge...@apache.org>
AuthorDate: Mon Jul 5 22:30:44 2021 +0300

    [SPARK-35977][SQL] Support non-reserved keyword TIMESTAMP_NTZ
    
    ### What changes were proposed in this pull request?
    
    Support new keyword TIMESTAMP_NTZ, which can be used for:
    
    - timestamp without time zone data type in DDL
    - timestamp without time zone data type in Cast clause.
    - timestamp without time zone data type literal
    
    ### Why are the changes needed?
    
    Users can use `TIMESTAMP_NTZ` in DDL/Cast/Literals for the timestamp without time zone type directly.
    
    ### Does this PR introduce _any_ user-facing change?
    
    No, the new timestamp type is not released yet.
    
    ### How was this patch tested?
    
    Unit test
    
    Closes #33221 from gengliangwang/timstamp_ntz.
    
    Authored-by: Gengliang Wang <ge...@apache.org>
    Signed-off-by: Max Gekk <ma...@gmail.com>
---
 .../main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala  | 4 ++++
 .../org/apache/spark/sql/catalyst/parser/DataTypeParserSuite.scala    | 1 +
 .../org/apache/spark/sql/catalyst/parser/ExpressionParserSuite.scala  | 3 +++
 3 files changed, 8 insertions(+)

diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
index 5b9107f..c650cf0 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala
@@ -2125,6 +2125,9 @@ class AstBuilder extends SqlBaseBaseVisitor[AnyRef] with SQLConfHelper with Logg
           val zoneId = getZoneId(conf.sessionLocalTimeZone)
           val specialDate = convertSpecialDate(value, zoneId).map(Literal(_, DateType))
           specialDate.getOrElse(toLiteral(stringToDate, DateType))
+        case "TIMESTAMP_NTZ" =>
+          val specialTs = convertSpecialTimestampNTZ(value).map(Literal(_, TimestampNTZType))
+          specialTs.getOrElse(toLiteral(stringToTimestampWithoutTimeZone, TimestampNTZType))
         case "TIMESTAMP" =>
           def constructTimestampLTZLiteral(value: String): Literal = {
             val zoneId = getZoneId(conf.sessionLocalTimeZone)
@@ -2525,6 +2528,7 @@ class AstBuilder extends SqlBaseBaseVisitor[AnyRef] with SQLConfHelper with Logg
       case ("double", Nil) => DoubleType
       case ("date", Nil) => DateType
       case ("timestamp", Nil) => SQLConf.get.timestampType
+      case ("timestamp_ntz", Nil) => TimestampNTZType
       case ("string", Nil) => StringType
       case ("character" | "char", length :: Nil) => CharType(length.getText.toInt)
       case ("varchar", length :: Nil) => VarcharType(length.getText.toInt)
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DataTypeParserSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DataTypeParserSuite.scala
index a6b78e0..d34cccc 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DataTypeParserSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/DataTypeParserSuite.scala
@@ -58,6 +58,7 @@ class DataTypeParserSuite extends SparkFunSuite with SQLHelper {
   checkDataType("deC", DecimalType.USER_DEFAULT)
   checkDataType("DATE", DateType)
   checkDataType("timestamp", TimestampType)
+  checkDataType("timestamp_ntz", TimestampNTZType)
   checkDataType("string", StringType)
   checkDataType("ChaR(5)", CharType(5))
   checkDataType("ChaRacter(5)", CharType(5))
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/ExpressionParserSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/ExpressionParserSuite.scala
index 37e2d9b..7b13fa9 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/ExpressionParserSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/ExpressionParserSuite.scala
@@ -465,6 +465,9 @@ class ExpressionParserSuite extends AnalysisTest {
     intercept("timestamP '2016-33-11 20:54:00.000'", "Cannot parse the TIMESTAMP value")
 
     // Timestamp without time zone
+    assertEqual("tImEstAmp_Ntz '2016-03-11 20:54:00.000'",
+      Literal(LocalDateTime.parse("2016-03-11T20:54:00.000")))
+    intercept("tImEstAmp_Ntz '2016-33-11 20:54:00.000'", "Cannot parse the TIMESTAMP_NTZ value")
     withSQLConf(SQLConf.TIMESTAMP_TYPE.key -> TimestampTypes.TIMESTAMP_NTZ.toString) {
       assertEqual("tImEstAmp '2016-03-11 20:54:00.000'",
         Literal(LocalDateTime.parse("2016-03-11T20:54:00.000")))

---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org