You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by ge...@apache.org on 2021/07/08 11:39:44 UTC

[spark] branch master updated: [SPARK-36043][SQL][TESTS] Add end-to-end tests with default timestamp type as TIMESTAMP_NTZ

This is an automated email from the ASF dual-hosted git repository.

gengliang pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 57342df  [SPARK-36043][SQL][TESTS] Add end-to-end tests with default timestamp type as TIMESTAMP_NTZ
57342df is described below

commit 57342dfc1dd7deaf60209127d93d416c096645ea
Author: Gengliang Wang <ge...@apache.org>
AuthorDate: Thu Jul 8 19:38:52 2021 +0800

    [SPARK-36043][SQL][TESTS] Add end-to-end tests with default timestamp type as TIMESTAMP_NTZ
    
    ### What changes were proposed in this pull request?
    
    Run end-to-end tests with default timestamp type as TIMESTAMP_NTZ to increase test coverage.
    
    ### Why are the changes needed?
    
    Inrease test coverage.
    Also, there will be more and more expressions have different behaviors when the default timestamp type is TIMESTAMP_NTZ, for example, `to_timestamp`, `from_json`, `from_csv`, and so on. Having this new test suite helps future developments.
    
    ### Does this PR introduce _any_ user-facing change?
    
    No
    
    ### How was this patch tested?
    
    CI tests.
    
    Closes #33259 from gengliangwang/ntzTest.
    
    Authored-by: Gengliang Wang <ge...@apache.org>
    Signed-off-by: Gengliang Wang <ge...@apache.org>
---
 .../sql-tests/inputs/timestampNTZ/datetime.sql     |    1 +
 .../results/timestampNTZ/datetime.sql.out          | 1595 ++++++++++++++++++++
 .../org/apache/spark/sql/SQLQueryTestSuite.scala   |   15 +
 .../thriftserver/ThriftServerQueryTestSuite.scala  |    6 +
 4 files changed, 1617 insertions(+)

diff --git a/sql/core/src/test/resources/sql-tests/inputs/timestampNTZ/datetime.sql b/sql/core/src/test/resources/sql-tests/inputs/timestampNTZ/datetime.sql
new file mode 100644
index 0000000..58ecf80
--- /dev/null
+++ b/sql/core/src/test/resources/sql-tests/inputs/timestampNTZ/datetime.sql
@@ -0,0 +1 @@
+--IMPORT datetime.sql
diff --git a/sql/core/src/test/resources/sql-tests/results/timestampNTZ/datetime.sql.out b/sql/core/src/test/resources/sql-tests/results/timestampNTZ/datetime.sql.out
new file mode 100644
index 0000000..131ad01
--- /dev/null
+++ b/sql/core/src/test/resources/sql-tests/results/timestampNTZ/datetime.sql.out
@@ -0,0 +1,1595 @@
+-- Automatically generated by SQLQueryTestSuite
+-- Number of queries: 193
+
+
+-- !query
+select TIMESTAMP_SECONDS(1230219000),TIMESTAMP_SECONDS(-1230219000),TIMESTAMP_SECONDS(null)
+-- !query schema
+struct<timestamp_seconds(1230219000):timestamp,timestamp_seconds(-1230219000):timestamp,timestamp_seconds(NULL):timestamp>
+-- !query output
+2008-12-25 07:30:00	1931-01-07 00:30:00	NULL
+
+
+-- !query
+select TIMESTAMP_SECONDS(1.23), TIMESTAMP_SECONDS(1.23d), TIMESTAMP_SECONDS(FLOAT(1.23))
+-- !query schema
+struct<timestamp_seconds(1.23):timestamp,timestamp_seconds(1.23):timestamp,timestamp_seconds(1.23):timestamp>
+-- !query output
+1969-12-31 16:00:01.23	1969-12-31 16:00:01.23	1969-12-31 16:00:01.23
+
+
+-- !query
+select TIMESTAMP_MILLIS(1230219000123),TIMESTAMP_MILLIS(-1230219000123),TIMESTAMP_MILLIS(null)
+-- !query schema
+struct<timestamp_millis(1230219000123):timestamp,timestamp_millis(-1230219000123):timestamp,timestamp_millis(NULL):timestamp>
+-- !query output
+2008-12-25 07:30:00.123	1931-01-07 00:29:59.877	NULL
+
+
+-- !query
+select TIMESTAMP_MICROS(1230219000123123),TIMESTAMP_MICROS(-1230219000123123),TIMESTAMP_MICROS(null)
+-- !query schema
+struct<timestamp_micros(1230219000123123):timestamp,timestamp_micros(-1230219000123123):timestamp,timestamp_micros(NULL):timestamp>
+-- !query output
+2008-12-25 07:30:00.123123	1931-01-07 00:29:59.876877	NULL
+
+
+-- !query
+select TIMESTAMP_SECONDS(1230219000123123)
+-- !query schema
+struct<>
+-- !query output
+java.lang.ArithmeticException
+long overflow
+
+
+-- !query
+select TIMESTAMP_SECONDS(-1230219000123123)
+-- !query schema
+struct<>
+-- !query output
+java.lang.ArithmeticException
+long overflow
+
+
+-- !query
+select TIMESTAMP_MILLIS(92233720368547758)
+-- !query schema
+struct<>
+-- !query output
+java.lang.ArithmeticException
+long overflow
+
+
+-- !query
+select TIMESTAMP_MILLIS(-92233720368547758)
+-- !query schema
+struct<>
+-- !query output
+java.lang.ArithmeticException
+long overflow
+
+
+-- !query
+select TIMESTAMP_SECONDS(0.1234567)
+-- !query schema
+struct<>
+-- !query output
+java.lang.ArithmeticException
+Rounding necessary
+
+
+-- !query
+select TIMESTAMP_SECONDS(0.1234567d), TIMESTAMP_SECONDS(FLOAT(0.1234567))
+-- !query schema
+struct<timestamp_seconds(0.1234567):timestamp,timestamp_seconds(0.1234567):timestamp>
+-- !query output
+1969-12-31 16:00:00.123456	1969-12-31 16:00:00.123456
+
+
+-- !query
+select UNIX_SECONDS(TIMESTAMP('2020-12-01 14:30:08Z')), UNIX_SECONDS(TIMESTAMP('2020-12-01 14:30:08.999999Z')), UNIX_SECONDS(null)
+-- !query schema
+struct<unix_seconds(2020-12-01 14:30:08Z):bigint,unix_seconds(2020-12-01 14:30:08.999999Z):bigint,unix_seconds(NULL):bigint>
+-- !query output
+1606833008	1606833008	NULL
+
+
+-- !query
+select UNIX_MILLIS(TIMESTAMP('2020-12-01 14:30:08Z')), UNIX_MILLIS(TIMESTAMP('2020-12-01 14:30:08.999999Z')), UNIX_MILLIS(null)
+-- !query schema
+struct<unix_millis(2020-12-01 14:30:08Z):bigint,unix_millis(2020-12-01 14:30:08.999999Z):bigint,unix_millis(NULL):bigint>
+-- !query output
+1606833008000	1606833008999	NULL
+
+
+-- !query
+select UNIX_MICROS(TIMESTAMP('2020-12-01 14:30:08Z')), UNIX_MICROS(TIMESTAMP('2020-12-01 14:30:08.999999Z')), UNIX_MICROS(null)
+-- !query schema
+struct<unix_micros(2020-12-01 14:30:08Z):bigint,unix_micros(2020-12-01 14:30:08.999999Z):bigint,unix_micros(NULL):bigint>
+-- !query output
+1606833008000000	1606833008999999	NULL
+
+
+-- !query
+select DATE_FROM_UNIX_DATE(0), DATE_FROM_UNIX_DATE(1000), DATE_FROM_UNIX_DATE(null)
+-- !query schema
+struct<date_from_unix_date(0):date,date_from_unix_date(1000):date,date_from_unix_date(NULL):date>
+-- !query output
+1970-01-01	1972-09-27	NULL
+
+
+-- !query
+select UNIX_DATE(DATE('1970-01-01')), UNIX_DATE(DATE('2020-12-04')), UNIX_DATE(null)
+-- !query schema
+struct<unix_date(1970-01-01):int,unix_date(2020-12-04):int,unix_date(NULL):int>
+-- !query output
+0	18600	NULL
+
+
+-- !query
+select current_date = current_date(), current_timestamp = current_timestamp()
+-- !query schema
+struct<(current_date() = current_date()):boolean,(current_timestamp() = current_timestamp()):boolean>
+-- !query output
+true	true
+
+
+-- !query
+select to_date(null), to_date('2016-12-31'), to_date('2016-12-31', 'yyyy-MM-dd')
+-- !query schema
+struct<to_date(NULL):date,to_date(2016-12-31):date,to_date(2016-12-31, yyyy-MM-dd):date>
+-- !query output
+NULL	2016-12-31	2016-12-31
+
+
+-- !query
+select to_timestamp(null), to_timestamp('2016-12-31 00:12:00'), to_timestamp('2016-12-31', 'yyyy-MM-dd')
+-- !query schema
+struct<to_timestamp(NULL):timestamp,to_timestamp(2016-12-31 00:12:00):timestamp,to_timestamp(2016-12-31, yyyy-MM-dd):timestamp>
+-- !query output
+NULL	2016-12-31 00:12:00	2016-12-31 00:00:00
+
+
+-- !query
+select to_timestamp_ntz(null), to_timestamp_ntz('2016-12-31 00:12:00'), to_timestamp_ntz('2016-12-31', 'yyyy-MM-dd')
+-- !query schema
+struct<to_timestamp_ntz(NULL):timestamp_ntz,to_timestamp_ntz(2016-12-31 00:12:00):timestamp_ntz,to_timestamp_ntz(2016-12-31, yyyy-MM-dd):timestamp_ntz>
+-- !query output
+NULL	2016-12-31 00:12:00	2016-12-31 00:00:00
+
+
+-- !query
+select to_timestamp_ntz(to_date(null)), to_timestamp_ntz(to_date('2016-12-31')), to_timestamp_ntz(to_date('2016-12-31', 'yyyy-MM-dd'))
+-- !query schema
+struct<to_timestamp_ntz(to_date(NULL)):timestamp_ntz,to_timestamp_ntz(to_date(2016-12-31)):timestamp_ntz,to_timestamp_ntz(to_date(2016-12-31, yyyy-MM-dd)):timestamp_ntz>
+-- !query output
+NULL	2016-12-31 00:00:00	2016-12-31 00:00:00
+
+
+-- !query
+select to_timestamp_ntz(to_timestamp(null)), to_timestamp_ntz(to_timestamp('2016-12-31 00:12:00')), to_timestamp_ntz(to_timestamp('2016-12-31', 'yyyy-MM-dd'))
+-- !query schema
+struct<to_timestamp_ntz(to_timestamp(NULL)):timestamp_ntz,to_timestamp_ntz(to_timestamp(2016-12-31 00:12:00)):timestamp_ntz,to_timestamp_ntz(to_timestamp(2016-12-31, yyyy-MM-dd)):timestamp_ntz>
+-- !query output
+NULL	2016-12-31 00:12:00	2016-12-31 00:00:00
+
+
+-- !query
+select dayofweek('2007-02-03'), dayofweek('2009-07-30'), dayofweek('2017-05-27'), dayofweek(null), dayofweek('1582-10-15 13:10:15')
+-- !query schema
+struct<dayofweek(2007-02-03):int,dayofweek(2009-07-30):int,dayofweek(2017-05-27):int,dayofweek(NULL):int,dayofweek(1582-10-15 13:10:15):int>
+-- !query output
+7	5	7	NULL	6
+
+
+-- !query
+create temporary view ttf1 as select * from values
+  (1, 2),
+  (2, 3)
+  as ttf1(current_date, current_timestamp)
+-- !query schema
+struct<>
+-- !query output
+
+
+
+-- !query
+select current_date, current_timestamp from ttf1
+-- !query schema
+struct<current_date:int,current_timestamp:int>
+-- !query output
+1	2
+2	3
+
+
+-- !query
+create temporary view ttf2 as select * from values
+  (1, 2),
+  (2, 3)
+  as ttf2(a, b)
+-- !query schema
+struct<>
+-- !query output
+
+
+
+-- !query
+select current_date = current_date(), current_timestamp = current_timestamp(), a, b from ttf2
+-- !query schema
+struct<(current_date() = current_date()):boolean,(current_timestamp() = current_timestamp()):boolean,a:int,b:int>
+-- !query output
+true	true	1	2
+true	true	2	3
+
+
+-- !query
+select a, b from ttf2 order by a, current_date
+-- !query schema
+struct<a:int,b:int>
+-- !query output
+1	2
+2	3
+
+
+-- !query
+select weekday('2007-02-03'), weekday('2009-07-30'), weekday('2017-05-27'), weekday(null), weekday('1582-10-15 13:10:15')
+-- !query schema
+struct<weekday(2007-02-03):int,weekday(2009-07-30):int,weekday(2017-05-27):int,weekday(NULL):int,weekday(1582-10-15 13:10:15):int>
+-- !query output
+5	3	5	NULL	4
+
+
+-- !query
+select year('1500-01-01'), month('1500-01-01'), dayOfYear('1500-01-01')
+-- !query schema
+struct<year(1500-01-01):int,month(1500-01-01):int,dayofyear(1500-01-01):int>
+-- !query output
+1500	1	1
+
+
+-- !query
+select date '2019-01-01\t'
+-- !query schema
+struct<DATE '2019-01-01':date>
+-- !query output
+2019-01-01
+
+
+-- !query
+select timestamp '2019-01-01\t'
+-- !query schema
+struct<1546300800000000:timestamp_ntz>
+-- !query output
+2019-01-01 00:00:00
+
+
+-- !query
+select date '2020-01-01中文'
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.catalyst.parser.ParseException
+
+Cannot parse the DATE value: 2020-01-01中文(line 1, pos 7)
+
+== SQL ==
+select date '2020-01-01中文'
+-------^^^
+
+
+-- !query
+select timestamp '2019-01-01中文'
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.catalyst.parser.ParseException
+
+Cannot parse the TIMESTAMP value: 2019-01-01中文(line 1, pos 7)
+
+== SQL ==
+select timestamp '2019-01-01中文'
+-------^^^
+
+
+-- !query
+select timestamp'2011-11-11 11:11:11' + interval '2' day
+-- !query schema
+struct<1321009871000000 + INTERVAL '2 days':timestamp_ntz>
+-- !query output
+2011-11-13 11:11:11
+
+
+-- !query
+select timestamp'2011-11-11 11:11:11' - interval '2' day
+-- !query schema
+struct<1321009871000000 - INTERVAL '2 days':timestamp_ntz>
+-- !query output
+2011-11-09 11:11:11
+
+
+-- !query
+select date'2011-11-11 11:11:11' + interval '2' second
+-- !query schema
+struct<DATE '2011-11-11' + INTERVAL '2 seconds':date>
+-- !query output
+2011-11-11
+
+
+-- !query
+select date'2011-11-11 11:11:11' - interval '2' second
+-- !query schema
+struct<DATE '2011-11-11' - INTERVAL '2 seconds':date>
+-- !query output
+2011-11-10
+
+
+-- !query
+select '2011-11-11' - interval '2' day
+-- !query schema
+struct<2011-11-11 - INTERVAL '2 days':string>
+-- !query output
+2011-11-09 00:00:00
+
+
+-- !query
+select '2011-11-11 11:11:11' - interval '2' second
+-- !query schema
+struct<2011-11-11 11:11:11 - INTERVAL '2 seconds':string>
+-- !query output
+2011-11-11 11:11:09
+
+
+-- !query
+select '1' - interval '2' second
+-- !query schema
+struct<1 - INTERVAL '2 seconds':string>
+-- !query output
+NULL
+
+
+-- !query
+select 1 - interval '2' second
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.AnalysisException
+cannot resolve '1 + (- INTERVAL '2 seconds')' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, '1' is of int type.; line 1 pos 7
+
+
+-- !query
+select date'2020-01-01' - timestamp'2019-10-06 10:11:12.345678'
+-- !query schema
+struct<(DATE '2020-01-01' - 1570356672345678):interval day to second>
+-- !query output
+86 13:48:47.654322000
+
+
+-- !query
+select timestamp'2019-10-06 10:11:12.345678' - date'2020-01-01'
+-- !query schema
+struct<(1570356672345678 - DATE '2020-01-01'):interval day to second>
+-- !query output
+-86 13:48:47.654322000
+
+
+-- !query
+select timestamp'2019-10-06 10:11:12.345678' - null
+-- !query schema
+struct<(1570356672345678 - NULL):interval day to second>
+-- !query output
+NULL
+
+
+-- !query
+select null - timestamp'2019-10-06 10:11:12.345678'
+-- !query schema
+struct<(NULL - 1570356672345678):interval day to second>
+-- !query output
+NULL
+
+
+-- !query
+select date'2020-01-01' - to_timestamp_ntz('2019-10-06 10:11:12.345678')
+-- !query schema
+struct<(DATE '2020-01-01' - to_timestamp_ntz(2019-10-06 10:11:12.345678)):interval day to second>
+-- !query output
+86 13:48:47.654322000
+
+
+-- !query
+select to_timestamp_ntz('2019-10-06 10:11:12.345678') - date'2020-01-01'
+-- !query schema
+struct<(to_timestamp_ntz(2019-10-06 10:11:12.345678) - DATE '2020-01-01'):interval day to second>
+-- !query output
+-86 13:48:47.654322000
+
+
+-- !query
+select to_timestamp_ntz('2019-10-06 10:11:12.345678') - null
+-- !query schema
+struct<(to_timestamp_ntz(2019-10-06 10:11:12.345678) - NULL):interval day to second>
+-- !query output
+NULL
+
+
+-- !query
+select null - to_timestamp_ntz('2019-10-06 10:11:12.345678')
+-- !query schema
+struct<(NULL - to_timestamp_ntz(2019-10-06 10:11:12.345678)):interval day to second>
+-- !query output
+NULL
+
+
+-- !query
+select to_timestamp_ntz('2019-10-07 10:11:12.345678') - to_timestamp_ntz('2019-10-06 10:11:12.345677')
+-- !query schema
+struct<(to_timestamp_ntz(2019-10-07 10:11:12.345678) - to_timestamp_ntz(2019-10-06 10:11:12.345677)):interval day to second>
+-- !query output
+1 00:00:00.000001000
+
+
+-- !query
+select to_timestamp_ntz('2019-10-06 10:11:12.345677') - to_timestamp_ntz('2019-10-07 10:11:12.345678')
+-- !query schema
+struct<(to_timestamp_ntz(2019-10-06 10:11:12.345677) - to_timestamp_ntz(2019-10-07 10:11:12.345678)):interval day to second>
+-- !query output
+-1 00:00:00.000001000
+
+
+-- !query
+select to_timestamp_ntz('2019-10-07 10:11:12.345678') - to_timestamp('2019-10-06 10:11:12.345678')
+-- !query schema
+struct<(to_timestamp_ntz(2019-10-07 10:11:12.345678) - to_timestamp(2019-10-06 10:11:12.345678)):interval day to second>
+-- !query output
+1 00:00:00.000000000
+
+
+-- !query
+select to_timestamp('2019-10-06 10:11:12.345678') - to_timestamp_ntz('2019-10-07 10:11:12.345678')
+-- !query schema
+struct<(to_timestamp(2019-10-06 10:11:12.345678) - to_timestamp_ntz(2019-10-07 10:11:12.345678)):interval day to second>
+-- !query output
+-1 00:00:00.000000000
+
+
+-- !query
+select date_add('2011-11-11', 1Y)
+-- !query schema
+struct<date_add(2011-11-11, 1):date>
+-- !query output
+2011-11-12
+
+
+-- !query
+select date_add('2011-11-11', 1S)
+-- !query schema
+struct<date_add(2011-11-11, 1):date>
+-- !query output
+2011-11-12
+
+
+-- !query
+select date_add('2011-11-11', 1)
+-- !query schema
+struct<date_add(2011-11-11, 1):date>
+-- !query output
+2011-11-12
+
+
+-- !query
+select date_add('2011-11-11', 1L)
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.AnalysisException
+cannot resolve 'date_add(CAST('2011-11-11' AS DATE), 1L)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '1L' is of bigint type.; line 1 pos 7
+
+
+-- !query
+select date_add('2011-11-11', 1.0)
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.AnalysisException
+cannot resolve 'date_add(CAST('2011-11-11' AS DATE), 1.0BD)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '1.0BD' is of decimal(2,1) type.; line 1 pos 7
+
+
+-- !query
+select date_add('2011-11-11', 1E1)
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.AnalysisException
+cannot resolve 'date_add(CAST('2011-11-11' AS DATE), 10.0D)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '10.0D' is of double type.; line 1 pos 7
+
+
+-- !query
+select date_add('2011-11-11', '1')
+-- !query schema
+struct<date_add(2011-11-11, 1):date>
+-- !query output
+2011-11-12
+
+
+-- !query
+select date_add('2011-11-11', '1.2')
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.AnalysisException
+The second argument of 'date_add' function needs to be an integer.
+
+
+-- !query
+select date_add(date'2011-11-11', 1)
+-- !query schema
+struct<date_add(DATE '2011-11-11', 1):date>
+-- !query output
+2011-11-12
+
+
+-- !query
+select date_add(timestamp'2011-11-11', 1)
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.AnalysisException
+cannot resolve 'date_add(1320969600000000, 1)' due to data type mismatch: argument 1 requires date type, however, '1320969600000000' is of timestamp_ntz type.; line 1 pos 7
+
+
+-- !query
+select date_sub(date'2011-11-11', 1)
+-- !query schema
+struct<date_sub(DATE '2011-11-11', 1):date>
+-- !query output
+2011-11-10
+
+
+-- !query
+select date_sub(date'2011-11-11', '1')
+-- !query schema
+struct<date_sub(DATE '2011-11-11', 1):date>
+-- !query output
+2011-11-10
+
+
+-- !query
+select date_sub(date'2011-11-11', '1.2')
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.AnalysisException
+The second argument of 'date_sub' function needs to be an integer.
+
+
+-- !query
+select date_sub(timestamp'2011-11-11', 1)
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.AnalysisException
+cannot resolve 'date_sub(1320969600000000, 1)' due to data type mismatch: argument 1 requires date type, however, '1320969600000000' is of timestamp_ntz type.; line 1 pos 7
+
+
+-- !query
+select date_sub(null, 1)
+-- !query schema
+struct<date_sub(NULL, 1):date>
+-- !query output
+NULL
+
+
+-- !query
+select date_sub(date'2011-11-11', null)
+-- !query schema
+struct<date_sub(DATE '2011-11-11', NULL):date>
+-- !query output
+NULL
+
+
+-- !query
+select date'2011-11-11' + 1E1
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.AnalysisException
+cannot resolve 'date_add(DATE '2011-11-11', 10.0D)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, '10.0D' is of double type.; line 1 pos 7
+
+
+-- !query
+select date'2011-11-11' + '1'
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.AnalysisException
+cannot resolve 'date_add(DATE '2011-11-11', CAST('1' AS DOUBLE))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST('1' AS DOUBLE)' is of double type.; line 1 pos 7
+
+
+-- !query
+select null + date '2001-09-28'
+-- !query schema
+struct<date_add(DATE '2001-09-28', NULL):date>
+-- !query output
+NULL
+
+
+-- !query
+select date '2001-09-28' + 7Y
+-- !query schema
+struct<date_add(DATE '2001-09-28', 7):date>
+-- !query output
+2001-10-05
+
+
+-- !query
+select 7S + date '2001-09-28'
+-- !query schema
+struct<date_add(DATE '2001-09-28', 7):date>
+-- !query output
+2001-10-05
+
+
+-- !query
+select date '2001-10-01' - 7
+-- !query schema
+struct<date_sub(DATE '2001-10-01', 7):date>
+-- !query output
+2001-09-24
+
+
+-- !query
+select date '2001-10-01' - '7'
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.AnalysisException
+cannot resolve 'date_sub(DATE '2001-10-01', CAST('7' AS DOUBLE))' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'CAST('7' AS DOUBLE)' is of double type.; line 1 pos 7
+
+
+-- !query
+select date '2001-09-28' + null
+-- !query schema
+struct<date_add(DATE '2001-09-28', NULL):date>
+-- !query output
+NULL
+
+
+-- !query
+select date '2001-09-28' - null
+-- !query schema
+struct<date_sub(DATE '2001-09-28', NULL):date>
+-- !query output
+NULL
+
+
+-- !query
+create temp view v as select '1' str
+-- !query schema
+struct<>
+-- !query output
+
+
+
+-- !query
+select date_add('2011-11-11', str) from v
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.AnalysisException
+cannot resolve 'date_add(CAST('2011-11-11' AS DATE), v.str)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'v.str' is of string type.; line 1 pos 7
+
+
+-- !query
+select date_sub('2011-11-11', str) from v
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.AnalysisException
+cannot resolve 'date_sub(CAST('2011-11-11' AS DATE), v.str)' due to data type mismatch: argument 2 requires (int or smallint or tinyint) type, however, 'v.str' is of string type.; line 1 pos 7
+
+
+-- !query
+select null - date '2019-10-06'
+-- !query schema
+struct<(NULL - DATE '2019-10-06'):interval day>
+-- !query output
+NULL
+
+
+-- !query
+select date '2001-10-01' - date '2001-09-28'
+-- !query schema
+struct<(DATE '2001-10-01' - DATE '2001-09-28'):interval day>
+-- !query output
+3 00:00:00.000000000
+
+
+-- !query
+select to_timestamp('2019-10-06 10:11:12.', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]')
+-- !query schema
+struct<to_timestamp(2019-10-06 10:11:12., yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]):timestamp>
+-- !query output
+NULL
+
+
+-- !query
+select to_timestamp('2019-10-06 10:11:12.0', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]')
+-- !query schema
+struct<to_timestamp(2019-10-06 10:11:12.0, yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]):timestamp>
+-- !query output
+2019-10-06 10:11:12
+
+
+-- !query
+select to_timestamp('2019-10-06 10:11:12.1', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]')
+-- !query schema
+struct<to_timestamp(2019-10-06 10:11:12.1, yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]):timestamp>
+-- !query output
+2019-10-06 10:11:12.1
+
+
+-- !query
+select to_timestamp('2019-10-06 10:11:12.12', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]')
+-- !query schema
+struct<to_timestamp(2019-10-06 10:11:12.12, yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]):timestamp>
+-- !query output
+2019-10-06 10:11:12.12
+
+
+-- !query
+select to_timestamp('2019-10-06 10:11:12.123UTC', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]')
+-- !query schema
+struct<to_timestamp(2019-10-06 10:11:12.123UTC, yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]):timestamp>
+-- !query output
+2019-10-06 03:11:12.123
+
+
+-- !query
+select to_timestamp('2019-10-06 10:11:12.1234', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]')
+-- !query schema
+struct<to_timestamp(2019-10-06 10:11:12.1234, yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]):timestamp>
+-- !query output
+2019-10-06 10:11:12.1234
+
+
+-- !query
+select to_timestamp('2019-10-06 10:11:12.12345CST', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]')
+-- !query schema
+struct<to_timestamp(2019-10-06 10:11:12.12345CST, yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]):timestamp>
+-- !query output
+2019-10-06 08:11:12.12345
+
+
+-- !query
+select to_timestamp('2019-10-06 10:11:12.123456PST', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]')
+-- !query schema
+struct<to_timestamp(2019-10-06 10:11:12.123456PST, yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]):timestamp>
+-- !query output
+2019-10-06 10:11:12.123456
+
+
+-- !query
+select to_timestamp('2019-10-06 10:11:12.1234567PST', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]')
+-- !query schema
+struct<to_timestamp(2019-10-06 10:11:12.1234567PST, yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]):timestamp>
+-- !query output
+NULL
+
+
+-- !query
+select to_timestamp('123456 2019-10-06 10:11:12.123456PST', 'SSSSSS yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]')
+-- !query schema
+struct<to_timestamp(123456 2019-10-06 10:11:12.123456PST, SSSSSS yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]):timestamp>
+-- !query output
+2019-10-06 10:11:12.123456
+
+
+-- !query
+select to_timestamp('223456 2019-10-06 10:11:12.123456PST', 'SSSSSS yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]')
+-- !query schema
+struct<to_timestamp(223456 2019-10-06 10:11:12.123456PST, SSSSSS yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]):timestamp>
+-- !query output
+NULL
+
+
+-- !query
+select to_timestamp('2019-10-06 10:11:12.1234', 'yyyy-MM-dd HH:mm:ss.[SSSSSS]')
+-- !query schema
+struct<to_timestamp(2019-10-06 10:11:12.1234, yyyy-MM-dd HH:mm:ss.[SSSSSS]):timestamp>
+-- !query output
+2019-10-06 10:11:12.1234
+
+
+-- !query
+select to_timestamp('2019-10-06 10:11:12.123', 'yyyy-MM-dd HH:mm:ss[.SSSSSS]')
+-- !query schema
+struct<to_timestamp(2019-10-06 10:11:12.123, yyyy-MM-dd HH:mm:ss[.SSSSSS]):timestamp>
+-- !query output
+2019-10-06 10:11:12.123
+
+
+-- !query
+select to_timestamp('2019-10-06 10:11:12', 'yyyy-MM-dd HH:mm:ss[.SSSSSS]')
+-- !query schema
+struct<to_timestamp(2019-10-06 10:11:12, yyyy-MM-dd HH:mm:ss[.SSSSSS]):timestamp>
+-- !query output
+2019-10-06 10:11:12
+
+
+-- !query
+select to_timestamp('2019-10-06 10:11:12.12', 'yyyy-MM-dd HH:mm[:ss.SSSSSS]')
+-- !query schema
+struct<to_timestamp(2019-10-06 10:11:12.12, yyyy-MM-dd HH:mm[:ss.SSSSSS]):timestamp>
+-- !query output
+2019-10-06 10:11:12.12
+
+
+-- !query
+select to_timestamp('2019-10-06 10:11', 'yyyy-MM-dd HH:mm[:ss.SSSSSS]')
+-- !query schema
+struct<to_timestamp(2019-10-06 10:11, yyyy-MM-dd HH:mm[:ss.SSSSSS]):timestamp>
+-- !query output
+2019-10-06 10:11:00
+
+
+-- !query
+select to_timestamp("2019-10-06S10:11:12.12345", "yyyy-MM-dd'S'HH:mm:ss.SSSSSS")
+-- !query schema
+struct<to_timestamp(2019-10-06S10:11:12.12345, yyyy-MM-dd'S'HH:mm:ss.SSSSSS):timestamp>
+-- !query output
+2019-10-06 10:11:12.12345
+
+
+-- !query
+select to_timestamp("12.12342019-10-06S10:11", "ss.SSSSyyyy-MM-dd'S'HH:mm")
+-- !query schema
+struct<to_timestamp(12.12342019-10-06S10:11, ss.SSSSyyyy-MM-dd'S'HH:mm):timestamp>
+-- !query output
+2019-10-06 10:11:12.1234
+
+
+-- !query
+select to_timestamp("12.1232019-10-06S10:11", "ss.SSSSyyyy-MM-dd'S'HH:mm")
+-- !query schema
+struct<to_timestamp(12.1232019-10-06S10:11, ss.SSSSyyyy-MM-dd'S'HH:mm):timestamp>
+-- !query output
+NULL
+
+
+-- !query
+select to_timestamp("12.1232019-10-06S10:11", "ss.SSSSyy-MM-dd'S'HH:mm")
+-- !query schema
+struct<to_timestamp(12.1232019-10-06S10:11, ss.SSSSyy-MM-dd'S'HH:mm):timestamp>
+-- !query output
+NULL
+
+
+-- !query
+select to_timestamp("12.1234019-10-06S10:11", "ss.SSSSy-MM-dd'S'HH:mm")
+-- !query schema
+struct<to_timestamp(12.1234019-10-06S10:11, ss.SSSSy-MM-dd'S'HH:mm):timestamp>
+-- !query output
+0019-10-06 10:11:12.1234
+
+
+-- !query
+select to_timestamp("2019-10-06S", "yyyy-MM-dd'S'")
+-- !query schema
+struct<to_timestamp(2019-10-06S, yyyy-MM-dd'S'):timestamp>
+-- !query output
+2019-10-06 00:00:00
+
+
+-- !query
+select to_timestamp("S2019-10-06", "'S'yyyy-MM-dd")
+-- !query schema
+struct<to_timestamp(S2019-10-06, 'S'yyyy-MM-dd):timestamp>
+-- !query output
+2019-10-06 00:00:00
+
+
+-- !query
+select to_timestamp("2019-10-06T10:11:12'12", "yyyy-MM-dd'T'HH:mm:ss''SSSS")
+-- !query schema
+struct<to_timestamp(2019-10-06T10:11:12'12, yyyy-MM-dd'T'HH:mm:ss''SSSS):timestamp>
+-- !query output
+2019-10-06 10:11:12.12
+
+
+-- !query
+select to_timestamp("2019-10-06T10:11:12'", "yyyy-MM-dd'T'HH:mm:ss''")
+-- !query schema
+struct<to_timestamp(2019-10-06T10:11:12', yyyy-MM-dd'T'HH:mm:ss''):timestamp>
+-- !query output
+2019-10-06 10:11:12
+
+
+-- !query
+select to_timestamp("'2019-10-06T10:11:12", "''yyyy-MM-dd'T'HH:mm:ss")
+-- !query schema
+struct<to_timestamp('2019-10-06T10:11:12, ''yyyy-MM-dd'T'HH:mm:ss):timestamp>
+-- !query output
+2019-10-06 10:11:12
+
+
+-- !query
+select to_timestamp("P2019-10-06T10:11:12", "'P'yyyy-MM-dd'T'HH:mm:ss")
+-- !query schema
+struct<to_timestamp(P2019-10-06T10:11:12, 'P'yyyy-MM-dd'T'HH:mm:ss):timestamp>
+-- !query output
+2019-10-06 10:11:12
+
+
+-- !query
+select to_timestamp_ntz('2019-10-06 10:11:12.', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]')
+-- !query schema
+struct<to_timestamp_ntz(2019-10-06 10:11:12., yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]):timestamp_ntz>
+-- !query output
+NULL
+
+
+-- !query
+select to_timestamp_ntz('2019-10-06 10:11:12.0', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]')
+-- !query schema
+struct<to_timestamp_ntz(2019-10-06 10:11:12.0, yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]):timestamp_ntz>
+-- !query output
+2019-10-06 10:11:12
+
+
+-- !query
+select to_timestamp_ntz('2019-10-06 10:11:12.1', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]')
+-- !query schema
+struct<to_timestamp_ntz(2019-10-06 10:11:12.1, yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]):timestamp_ntz>
+-- !query output
+2019-10-06 10:11:12.1
+
+
+-- !query
+select to_timestamp_ntz('2019-10-06 10:11:12.12', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]')
+-- !query schema
+struct<to_timestamp_ntz(2019-10-06 10:11:12.12, yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]):timestamp_ntz>
+-- !query output
+2019-10-06 10:11:12.12
+
+
+-- !query
+select to_timestamp_ntz('2019-10-06 10:11:12.123UTC', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]')
+-- !query schema
+struct<to_timestamp_ntz(2019-10-06 10:11:12.123UTC, yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]):timestamp_ntz>
+-- !query output
+2019-10-06 10:11:12.123
+
+
+-- !query
+select to_timestamp_ntz('2019-10-06 10:11:12.1234', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]')
+-- !query schema
+struct<to_timestamp_ntz(2019-10-06 10:11:12.1234, yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]):timestamp_ntz>
+-- !query output
+2019-10-06 10:11:12.1234
+
+
+-- !query
+select to_timestamp_ntz('2019-10-06 10:11:12.12345CST', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]')
+-- !query schema
+struct<to_timestamp_ntz(2019-10-06 10:11:12.12345CST, yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]):timestamp_ntz>
+-- !query output
+2019-10-06 10:11:12.12345
+
+
+-- !query
+select to_timestamp_ntz('2019-10-06 10:11:12.123456PST', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]')
+-- !query schema
+struct<to_timestamp_ntz(2019-10-06 10:11:12.123456PST, yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]):timestamp_ntz>
+-- !query output
+2019-10-06 10:11:12.123456
+
+
+-- !query
+select to_timestamp_ntz('2019-10-06 10:11:12.1234567PST', 'yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]')
+-- !query schema
+struct<to_timestamp_ntz(2019-10-06 10:11:12.1234567PST, yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]):timestamp_ntz>
+-- !query output
+NULL
+
+
+-- !query
+select to_timestamp_ntz('123456 2019-10-06 10:11:12.123456PST', 'SSSSSS yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]')
+-- !query schema
+struct<to_timestamp_ntz(123456 2019-10-06 10:11:12.123456PST, SSSSSS yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]):timestamp_ntz>
+-- !query output
+2019-10-06 10:11:12.123456
+
+
+-- !query
+select to_timestamp_ntz('223456 2019-10-06 10:11:12.123456PST', 'SSSSSS yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]')
+-- !query schema
+struct<to_timestamp_ntz(223456 2019-10-06 10:11:12.123456PST, SSSSSS yyyy-MM-dd HH:mm:ss.SSSSSS[zzz]):timestamp_ntz>
+-- !query output
+NULL
+
+
+-- !query
+select to_timestamp_ntz('2019-10-06 10:11:12.1234', 'yyyy-MM-dd HH:mm:ss.[SSSSSS]')
+-- !query schema
+struct<to_timestamp_ntz(2019-10-06 10:11:12.1234, yyyy-MM-dd HH:mm:ss.[SSSSSS]):timestamp_ntz>
+-- !query output
+2019-10-06 10:11:12.1234
+
+
+-- !query
+select to_timestamp_ntz('2019-10-06 10:11:12.123', 'yyyy-MM-dd HH:mm:ss[.SSSSSS]')
+-- !query schema
+struct<to_timestamp_ntz(2019-10-06 10:11:12.123, yyyy-MM-dd HH:mm:ss[.SSSSSS]):timestamp_ntz>
+-- !query output
+2019-10-06 10:11:12.123
+
+
+-- !query
+select to_timestamp_ntz('2019-10-06 10:11:12', 'yyyy-MM-dd HH:mm:ss[.SSSSSS]')
+-- !query schema
+struct<to_timestamp_ntz(2019-10-06 10:11:12, yyyy-MM-dd HH:mm:ss[.SSSSSS]):timestamp_ntz>
+-- !query output
+2019-10-06 10:11:12
+
+
+-- !query
+select to_timestamp_ntz('2019-10-06 10:11:12.12', 'yyyy-MM-dd HH:mm[:ss.SSSSSS]')
+-- !query schema
+struct<to_timestamp_ntz(2019-10-06 10:11:12.12, yyyy-MM-dd HH:mm[:ss.SSSSSS]):timestamp_ntz>
+-- !query output
+2019-10-06 10:11:12.12
+
+
+-- !query
+select to_timestamp_ntz('2019-10-06 10:11', 'yyyy-MM-dd HH:mm[:ss.SSSSSS]')
+-- !query schema
+struct<to_timestamp_ntz(2019-10-06 10:11, yyyy-MM-dd HH:mm[:ss.SSSSSS]):timestamp_ntz>
+-- !query output
+2019-10-06 10:11:00
+
+
+-- !query
+select to_timestamp_ntz("2019-10-06S10:11:12.12345", "yyyy-MM-dd'S'HH:mm:ss.SSSSSS")
+-- !query schema
+struct<to_timestamp_ntz(2019-10-06S10:11:12.12345, yyyy-MM-dd'S'HH:mm:ss.SSSSSS):timestamp_ntz>
+-- !query output
+2019-10-06 10:11:12.12345
+
+
+-- !query
+select to_timestamp_ntz("12.12342019-10-06S10:11", "ss.SSSSyyyy-MM-dd'S'HH:mm")
+-- !query schema
+struct<to_timestamp_ntz(12.12342019-10-06S10:11, ss.SSSSyyyy-MM-dd'S'HH:mm):timestamp_ntz>
+-- !query output
+2019-10-06 10:11:12.1234
+
+
+-- !query
+select to_timestamp_ntz("12.1232019-10-06S10:11", "ss.SSSSyyyy-MM-dd'S'HH:mm")
+-- !query schema
+struct<to_timestamp_ntz(12.1232019-10-06S10:11, ss.SSSSyyyy-MM-dd'S'HH:mm):timestamp_ntz>
+-- !query output
+NULL
+
+
+-- !query
+select to_timestamp_ntz("12.1232019-10-06S10:11", "ss.SSSSyy-MM-dd'S'HH:mm")
+-- !query schema
+struct<to_timestamp_ntz(12.1232019-10-06S10:11, ss.SSSSyy-MM-dd'S'HH:mm):timestamp_ntz>
+-- !query output
+NULL
+
+
+-- !query
+select to_timestamp_ntz("12.1234019-10-06S10:11", "ss.SSSSy-MM-dd'S'HH:mm")
+-- !query schema
+struct<to_timestamp_ntz(12.1234019-10-06S10:11, ss.SSSSy-MM-dd'S'HH:mm):timestamp_ntz>
+-- !query output
+0019-10-06 10:11:12.1234
+
+
+-- !query
+select to_timestamp_ntz("2019-10-06S", "yyyy-MM-dd'S'")
+-- !query schema
+struct<to_timestamp_ntz(2019-10-06S, yyyy-MM-dd'S'):timestamp_ntz>
+-- !query output
+2019-10-06 00:00:00
+
+
+-- !query
+select to_timestamp_ntz("S2019-10-06", "'S'yyyy-MM-dd")
+-- !query schema
+struct<to_timestamp_ntz(S2019-10-06, 'S'yyyy-MM-dd):timestamp_ntz>
+-- !query output
+2019-10-06 00:00:00
+
+
+-- !query
+select to_timestamp_ntz("2019-10-06T10:11:12'12", "yyyy-MM-dd'T'HH:mm:ss''SSSS")
+-- !query schema
+struct<to_timestamp_ntz(2019-10-06T10:11:12'12, yyyy-MM-dd'T'HH:mm:ss''SSSS):timestamp_ntz>
+-- !query output
+2019-10-06 10:11:12.12
+
+
+-- !query
+select to_timestamp_ntz("2019-10-06T10:11:12'", "yyyy-MM-dd'T'HH:mm:ss''")
+-- !query schema
+struct<to_timestamp_ntz(2019-10-06T10:11:12', yyyy-MM-dd'T'HH:mm:ss''):timestamp_ntz>
+-- !query output
+2019-10-06 10:11:12
+
+
+-- !query
+select to_timestamp_ntz("'2019-10-06T10:11:12", "''yyyy-MM-dd'T'HH:mm:ss")
+-- !query schema
+struct<to_timestamp_ntz('2019-10-06T10:11:12, ''yyyy-MM-dd'T'HH:mm:ss):timestamp_ntz>
+-- !query output
+2019-10-06 10:11:12
+
+
+-- !query
+select to_timestamp_ntz("P2019-10-06T10:11:12", "'P'yyyy-MM-dd'T'HH:mm:ss")
+-- !query schema
+struct<to_timestamp_ntz(P2019-10-06T10:11:12, 'P'yyyy-MM-dd'T'HH:mm:ss):timestamp_ntz>
+-- !query output
+2019-10-06 10:11:12
+
+
+-- !query
+select to_timestamp("16", "dd")
+-- !query schema
+struct<to_timestamp(16, dd):timestamp>
+-- !query output
+1970-01-16 00:00:00
+
+
+-- !query
+select to_timestamp("02-29", "MM-dd")
+-- !query schema
+struct<to_timestamp(02-29, MM-dd):timestamp>
+-- !query output
+NULL
+
+
+-- !query
+select to_timestamp_ntz("16", "dd")
+-- !query schema
+struct<to_timestamp_ntz(16, dd):timestamp_ntz>
+-- !query output
+1970-01-16 00:00:00
+
+
+-- !query
+select to_timestamp_ntz("02-29", "MM-dd")
+-- !query schema
+struct<to_timestamp_ntz(02-29, MM-dd):timestamp_ntz>
+-- !query output
+NULL
+
+
+-- !query
+select to_date("16", "dd")
+-- !query schema
+struct<to_date(16, dd):date>
+-- !query output
+1970-01-16
+
+
+-- !query
+select to_date("02-29", "MM-dd")
+-- !query schema
+struct<to_date(02-29, MM-dd):date>
+-- !query output
+NULL
+
+
+-- !query
+select to_timestamp("2019 40", "yyyy mm")
+-- !query schema
+struct<to_timestamp(2019 40, yyyy mm):timestamp>
+-- !query output
+2019-01-01 00:40:00
+
+
+-- !query
+select to_timestamp("2019 10:10:10", "yyyy hh:mm:ss")
+-- !query schema
+struct<to_timestamp(2019 10:10:10, yyyy hh:mm:ss):timestamp>
+-- !query output
+2019-01-01 10:10:10
+
+
+-- !query
+select to_timestamp_ntz("2019 40", "yyyy mm")
+-- !query schema
+struct<to_timestamp_ntz(2019 40, yyyy mm):timestamp_ntz>
+-- !query output
+2019-01-01 00:40:00
+
+
+-- !query
+select to_timestamp_ntz("2019 10:10:10", "yyyy hh:mm:ss")
+-- !query schema
+struct<to_timestamp_ntz(2019 10:10:10, yyyy hh:mm:ss):timestamp_ntz>
+-- !query output
+2019-01-01 10:10:10
+
+
+-- !query
+select to_timestamp('2019-10-06 A', 'yyyy-MM-dd GGGGG')
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.SparkUpgradeException
+You may get a different result due to the upgrading of Spark 3.0: Fail to recognize 'yyyy-MM-dd GGGGG' pattern in the DateTimeFormatter. 1) You can set spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
+
+
+-- !query
+select to_timestamp('22 05 2020 Friday', 'dd MM yyyy EEEEEE')
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.SparkUpgradeException
+You may get a different result due to the upgrading of Spark 3.0: Fail to recognize 'dd MM yyyy EEEEEE' pattern in the DateTimeFormatter. 1) You can set spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
+
+
+-- !query
+select to_timestamp('22 05 2020 Friday', 'dd MM yyyy EEEEE')
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.SparkUpgradeException
+You may get a different result due to the upgrading of Spark 3.0: Fail to recognize 'dd MM yyyy EEEEE' pattern in the DateTimeFormatter. 1) You can set spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
+
+
+-- !query
+select to_timestamp_ntz('2019-10-06 A', 'yyyy-MM-dd GGGGG')
+-- !query schema
+struct<>
+-- !query output
+java.lang.RuntimeException
+Fail to recognize 'yyyy-MM-dd GGGGG' pattern in the DateTimeFormatter. You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
+
+
+-- !query
+select to_timestamp_ntz('22 05 2020 Friday', 'dd MM yyyy EEEEEE')
+-- !query schema
+struct<>
+-- !query output
+java.lang.RuntimeException
+Fail to recognize 'dd MM yyyy EEEEEE' pattern in the DateTimeFormatter. You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
+
+
+-- !query
+select to_timestamp_ntz('22 05 2020 Friday', 'dd MM yyyy EEEEE')
+-- !query schema
+struct<>
+-- !query output
+java.lang.RuntimeException
+Fail to recognize 'dd MM yyyy EEEEE' pattern in the DateTimeFormatter. You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
+
+
+-- !query
+select unix_timestamp('22 05 2020 Friday', 'dd MM yyyy EEEEE')
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.SparkUpgradeException
+You may get a different result due to the upgrading of Spark 3.0: Fail to recognize 'dd MM yyyy EEEEE' pattern in the DateTimeFormatter. 1) You can set spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
+
+
+-- !query
+select from_json('{"t":"26/October/2015"}', 't Timestamp', map('timestampFormat', 'dd/MMMMM/yyyy'))
+-- !query schema
+struct<from_json({"t":"26/October/2015"}):struct<t:timestamp_ntz>>
+-- !query output
+{"t":null}
+
+
+-- !query
+select from_json('{"d":"26/October/2015"}', 'd Date', map('dateFormat', 'dd/MMMMM/yyyy'))
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.SparkUpgradeException
+You may get a different result due to the upgrading of Spark 3.0: Fail to recognize 'dd/MMMMM/yyyy' pattern in the DateTimeFormatter. 1) You can set spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
+
+
+-- !query
+select from_csv('26/October/2015', 't Timestamp', map('timestampFormat', 'dd/MMMMM/yyyy'))
+-- !query schema
+struct<>
+-- !query output
+java.lang.Exception
+Unsupported type: timestamp_ntz
+
+
+-- !query
+select from_csv('26/October/2015', 'd Date', map('dateFormat', 'dd/MMMMM/yyyy'))
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.SparkUpgradeException
+You may get a different result due to the upgrading of Spark 3.0: Fail to recognize 'dd/MMMMM/yyyy' pattern in the DateTimeFormatter. 1) You can set spark.sql.legacy.timeParserPolicy to LEGACY to restore the behavior before Spark 3.0. 2) You can form a valid datetime pattern with the guide from https://spark.apache.org/docs/latest/sql-ref-datetime-pattern.html
+
+
+-- !query
+select to_date("2020-01-27T20:06:11.847", "yyyy-MM-dd HH:mm:ss.SSS")
+-- !query schema
+struct<to_date(2020-01-27T20:06:11.847, yyyy-MM-dd HH:mm:ss.SSS):date>
+-- !query output
+NULL
+
+
+-- !query
+select to_date("Unparseable", "yyyy-MM-dd HH:mm:ss.SSS")
+-- !query schema
+struct<to_date(Unparseable, yyyy-MM-dd HH:mm:ss.SSS):date>
+-- !query output
+NULL
+
+
+-- !query
+select to_timestamp("2020-01-27T20:06:11.847", "yyyy-MM-dd HH:mm:ss.SSS")
+-- !query schema
+struct<to_timestamp(2020-01-27T20:06:11.847, yyyy-MM-dd HH:mm:ss.SSS):timestamp>
+-- !query output
+NULL
+
+
+-- !query
+select to_timestamp("Unparseable", "yyyy-MM-dd HH:mm:ss.SSS")
+-- !query schema
+struct<to_timestamp(Unparseable, yyyy-MM-dd HH:mm:ss.SSS):timestamp>
+-- !query output
+NULL
+
+
+-- !query
+select to_timestamp_ntz("2020-01-27T20:06:11.847", "yyyy-MM-dd HH:mm:ss.SSS")
+-- !query schema
+struct<to_timestamp_ntz(2020-01-27T20:06:11.847, yyyy-MM-dd HH:mm:ss.SSS):timestamp_ntz>
+-- !query output
+NULL
+
+
+-- !query
+select to_timestamp_ntz("Unparseable", "yyyy-MM-dd HH:mm:ss.SSS")
+-- !query schema
+struct<to_timestamp_ntz(Unparseable, yyyy-MM-dd HH:mm:ss.SSS):timestamp_ntz>
+-- !query output
+NULL
+
+
+-- !query
+select unix_timestamp("2020-01-27T20:06:11.847", "yyyy-MM-dd HH:mm:ss.SSS")
+-- !query schema
+struct<unix_timestamp(2020-01-27T20:06:11.847, yyyy-MM-dd HH:mm:ss.SSS):bigint>
+-- !query output
+NULL
+
+
+-- !query
+select unix_timestamp("Unparseable", "yyyy-MM-dd HH:mm:ss.SSS")
+-- !query schema
+struct<unix_timestamp(Unparseable, yyyy-MM-dd HH:mm:ss.SSS):bigint>
+-- !query output
+NULL
+
+
+-- !query
+select to_unix_timestamp("2020-01-27T20:06:11.847", "yyyy-MM-dd HH:mm:ss.SSS")
+-- !query schema
+struct<to_unix_timestamp(2020-01-27T20:06:11.847, yyyy-MM-dd HH:mm:ss.SSS):bigint>
+-- !query output
+NULL
+
+
+-- !query
+select to_unix_timestamp("Unparseable", "yyyy-MM-dd HH:mm:ss.SSS")
+-- !query schema
+struct<to_unix_timestamp(Unparseable, yyyy-MM-dd HH:mm:ss.SSS):bigint>
+-- !query output
+NULL
+
+
+-- !query
+select cast("Unparseable" as timestamp)
+-- !query schema
+struct<CAST(Unparseable AS TIMESTAMP_NTZ):timestamp_ntz>
+-- !query output
+NULL
+
+
+-- !query
+select cast("Unparseable" as date)
+-- !query schema
+struct<CAST(Unparseable AS DATE):date>
+-- !query output
+NULL
+
+
+-- !query
+select next_day("2015-07-23", "Mon")
+-- !query schema
+struct<next_day(2015-07-23, Mon):date>
+-- !query output
+2015-07-27
+
+
+-- !query
+select next_day("2015-07-23", "xx")
+-- !query schema
+struct<next_day(2015-07-23, xx):date>
+-- !query output
+NULL
+
+
+-- !query
+select next_day("xx", "Mon")
+-- !query schema
+struct<next_day(xx, Mon):date>
+-- !query output
+NULL
+
+
+-- !query
+select next_day(null, "Mon")
+-- !query schema
+struct<next_day(NULL, Mon):date>
+-- !query output
+NULL
+
+
+-- !query
+select next_day(null, "xx")
+-- !query schema
+struct<next_day(NULL, xx):date>
+-- !query output
+NULL
+
+
+-- !query
+select to_timestamp_ntz('2021-06-25 10:11:12') + interval 2 day
+-- !query schema
+struct<to_timestamp_ntz(2021-06-25 10:11:12) + INTERVAL '2 days':timestamp_ntz>
+-- !query output
+2021-06-27 10:11:12
+
+
+-- !query
+select to_timestamp_ntz('2021-06-25 10:11:12') + interval '0-0' year to month
+-- !query schema
+struct<to_timestamp_ntz(2021-06-25 10:11:12) + INTERVAL '0-0' YEAR TO MONTH:timestamp_ntz>
+-- !query output
+2021-06-25 10:11:12
+
+
+-- !query
+select to_timestamp_ntz('2021-06-25 10:11:12') + interval '1-2' year to month
+-- !query schema
+struct<to_timestamp_ntz(2021-06-25 10:11:12) + INTERVAL '1-2' YEAR TO MONTH:timestamp_ntz>
+-- !query output
+2022-08-25 10:11:12
+
+
+-- !query
+select to_timestamp_ntz('2021-06-25 10:11:12') + interval '0 0:0:0' day to second
+-- !query schema
+struct<to_timestamp_ntz(2021-06-25 10:11:12) + INTERVAL '0 00:00:00' DAY TO SECOND:timestamp_ntz>
+-- !query output
+2021-06-25 10:11:12
+
+
+-- !query
+select to_timestamp_ntz('2021-06-25 10:11:12') + interval '0 0:0:0.1' day to second
+-- !query schema
+struct<to_timestamp_ntz(2021-06-25 10:11:12) + INTERVAL '0 00:00:00.1' DAY TO SECOND:timestamp_ntz>
+-- !query output
+2021-06-25 10:11:12.1
+
+
+-- !query
+select to_timestamp_ntz('2021-06-25 10:11:12') + interval '10-9' year to month
+-- !query schema
+struct<to_timestamp_ntz(2021-06-25 10:11:12) + INTERVAL '10-9' YEAR TO MONTH:timestamp_ntz>
+-- !query output
+2032-03-25 10:11:12
+
+
+-- !query
+select to_timestamp_ntz('2021-06-25 10:11:12') + interval '20 15' day to hour
+-- !query schema
+struct<to_timestamp_ntz(2021-06-25 10:11:12) + INTERVAL '20 15' DAY TO HOUR:timestamp_ntz>
+-- !query output
+2021-07-16 01:11:12
+
+
+-- !query
+select to_timestamp_ntz('2021-06-25 10:11:12') + interval '20 15:40' day to minute
+-- !query schema
+struct<to_timestamp_ntz(2021-06-25 10:11:12) + INTERVAL '20 15:40' DAY TO MINUTE:timestamp_ntz>
+-- !query output
+2021-07-16 01:51:12
+
+
+-- !query
+select to_timestamp_ntz('2021-06-25 10:11:12') + interval '20 15:40:32.99899999' day to second
+-- !query schema
+struct<to_timestamp_ntz(2021-06-25 10:11:12) + INTERVAL '20 15:40:32.998999' DAY TO SECOND:timestamp_ntz>
+-- !query output
+2021-07-16 01:51:44.998999
+
+
+-- !query
+select to_timestamp_ntz('2021-06-25 10:11:12') - interval 2 day
+-- !query schema
+struct<to_timestamp_ntz(2021-06-25 10:11:12) - INTERVAL '2 days':timestamp_ntz>
+-- !query output
+2021-06-23 10:11:12
+
+
+-- !query
+select to_timestamp_ntz('2021-06-25 10:11:12') - interval '0-0' year to month
+-- !query schema
+struct<to_timestamp_ntz(2021-06-25 10:11:12) - INTERVAL '0-0' YEAR TO MONTH:timestamp_ntz>
+-- !query output
+2021-06-25 10:11:12
+
+
+-- !query
+select to_timestamp_ntz('2021-06-25 10:11:12') - interval '1-2' year to month
+-- !query schema
+struct<to_timestamp_ntz(2021-06-25 10:11:12) - INTERVAL '1-2' YEAR TO MONTH:timestamp_ntz>
+-- !query output
+2020-04-25 10:11:12
+
+
+-- !query
+select to_timestamp_ntz('2021-06-25 10:11:12') - interval '0 0:0:0' day to second
+-- !query schema
+struct<to_timestamp_ntz(2021-06-25 10:11:12) - INTERVAL '0 00:00:00' DAY TO SECOND:timestamp_ntz>
+-- !query output
+2021-06-25 10:11:12
+
+
+-- !query
+select to_timestamp_ntz('2021-06-25 10:11:12') - interval '0 0:0:0.1' day to second
+-- !query schema
+struct<to_timestamp_ntz(2021-06-25 10:11:12) - INTERVAL '0 00:00:00.1' DAY TO SECOND:timestamp_ntz>
+-- !query output
+2021-06-25 10:11:11.9
+
+
+-- !query
+select to_timestamp_ntz('2021-06-25 10:11:12') - interval '10-9' year to month
+-- !query schema
+struct<to_timestamp_ntz(2021-06-25 10:11:12) - INTERVAL '10-9' YEAR TO MONTH:timestamp_ntz>
+-- !query output
+2010-09-25 10:11:12
+
+
+-- !query
+select to_timestamp_ntz('2021-06-25 10:11:12') - interval '20 15' day to hour
+-- !query schema
+struct<to_timestamp_ntz(2021-06-25 10:11:12) - INTERVAL '20 15' DAY TO HOUR:timestamp_ntz>
+-- !query output
+2021-06-04 19:11:12
+
+
+-- !query
+select to_timestamp_ntz('2021-06-25 10:11:12') - interval '20 15:40' day to minute
+-- !query schema
+struct<to_timestamp_ntz(2021-06-25 10:11:12) - INTERVAL '20 15:40' DAY TO MINUTE:timestamp_ntz>
+-- !query output
+2021-06-04 18:31:12
+
+
+-- !query
+select to_timestamp_ntz('2021-06-25 10:11:12') - interval '20 15:40:32.99899999' day to second
+-- !query schema
+struct<to_timestamp_ntz(2021-06-25 10:11:12) - INTERVAL '20 15:40:32.998999' DAY TO SECOND:timestamp_ntz>
+-- !query output
+2021-06-04 18:30:39.001001
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/SQLQueryTestSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/SQLQueryTestSuite.scala
index 609e0094..d194293 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/SQLQueryTestSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/SQLQueryTestSuite.scala
@@ -31,6 +31,7 @@ import org.apache.spark.sql.catalyst.util.{fileToString, stringToFile}
 import org.apache.spark.sql.catalyst.util.DateTimeConstants.NANOS_PER_SECOND
 import org.apache.spark.sql.execution.WholeStageCodegenExec
 import org.apache.spark.sql.internal.SQLConf
+import org.apache.spark.sql.internal.SQLConf.TimestampTypes
 import org.apache.spark.sql.test.SharedSparkSession
 import org.apache.spark.tags.ExtendedSQLTest
 import org.apache.spark.util.Utils
@@ -186,6 +187,11 @@ class SQLQueryTestSuite extends QueryTest with SharedSparkSession with SQLHelper
    */
   protected trait AnsiTest
 
+  /**
+   * traits that indicate the default timestamp type is TimestampNTZType.
+   */
+  protected trait TimestampNTZTest
+
   protected trait UDFTest {
     val udf: TestUDF
   }
@@ -216,6 +222,10 @@ class SQLQueryTestSuite extends QueryTest with SharedSparkSession with SQLHelper
   protected case class AnsiTestCase(
       name: String, inputFile: String, resultFile: String) extends TestCase with AnsiTest
 
+  /** An date time test case with default timestamp as TimestampNTZType */
+  protected case class TimestampNTZTestCase(
+      name: String, inputFile: String, resultFile: String) extends TestCase with TimestampNTZTest
+
   protected def createScalaTestCase(testCase: TestCase): Unit = {
     if (ignoreList.exists(t =>
         testCase.name.toLowerCase(Locale.ROOT).contains(t.toLowerCase(Locale.ROOT)))) {
@@ -370,6 +380,9 @@ class SQLQueryTestSuite extends QueryTest with SharedSparkSession with SQLHelper
         localSparkSession.conf.set(SQLConf.LEGACY_INTERVAL_ENABLED.key, true)
       case _: AnsiTest =>
         localSparkSession.conf.set(SQLConf.ANSI_ENABLED.key, true)
+      case _: TimestampNTZTest =>
+        localSparkSession.conf.set(SQLConf.TIMESTAMP_TYPE.key,
+          TimestampTypes.TIMESTAMP_NTZ.toString)
       case _ =>
     }
 
@@ -481,6 +494,8 @@ class SQLQueryTestSuite extends QueryTest with SharedSparkSession with SQLHelper
         PgSQLTestCase(testCaseName, absPath, resultFile) :: Nil
       } else if (file.getAbsolutePath.startsWith(s"$inputFilePath${File.separator}ansi")) {
         AnsiTestCase(testCaseName, absPath, resultFile) :: Nil
+      } else if (file.getAbsolutePath.startsWith(s"$inputFilePath${File.separator}timestampNTZ")) {
+        TimestampNTZTestCase(testCaseName, absPath, resultFile) :: Nil
       } else {
         RegularTestCase(testCaseName, absPath, resultFile) :: Nil
       }
diff --git a/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/ThriftServerQueryTestSuite.scala b/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/ThriftServerQueryTestSuite.scala
index 1d03c90..f405461 100644
--- a/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/ThriftServerQueryTestSuite.scala
+++ b/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/ThriftServerQueryTestSuite.scala
@@ -31,6 +31,7 @@ import org.apache.spark.sql.catalyst.analysis.NoSuchTableException
 import org.apache.spark.sql.catalyst.util.fileToString
 import org.apache.spark.sql.execution.HiveResult.{getTimeFormatters, toHiveString, TimeFormatters}
 import org.apache.spark.sql.internal.SQLConf
+import org.apache.spark.sql.internal.SQLConf.TimestampTypes
 import org.apache.spark.sql.types._
 
 // scalastyle:off line.size.limit
@@ -112,6 +113,9 @@ class ThriftServerQueryTestSuite extends SQLQueryTestSuite with SharedThriftServ
           statement.execute(s"SET ${SQLConf.LEGACY_INTERVAL_ENABLED.key} = true")
         case _: AnsiTest =>
           statement.execute(s"SET ${SQLConf.ANSI_ENABLED.key} = true")
+        case _: TimestampNTZTest =>
+          statement.execute(s"SET ${SQLConf.TIMESTAMP_TYPE.key} = " +
+            s"${TimestampTypes.TIMESTAMP_NTZ.toString}")
         case _ =>
           statement.execute(s"SET ${SQLConf.ANSI_ENABLED.key} = false")
       }
@@ -244,6 +248,8 @@ class ThriftServerQueryTestSuite extends SQLQueryTestSuite with SharedThriftServ
         PgSQLTestCase(testCaseName, absPath, resultFile) :: Nil
       } else if (file.getAbsolutePath.startsWith(s"$inputFilePath${File.separator}ansi")) {
         AnsiTestCase(testCaseName, absPath, resultFile) :: Nil
+      } else if (file.getAbsolutePath.startsWith(s"$inputFilePath${File.separator}timestampNTZ")) {
+        TimestampNTZTestCase(testCaseName, absPath, resultFile) :: Nil
       } else {
         RegularTestCase(testCaseName, absPath, resultFile) :: Nil
       }

---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org