You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by ge...@apache.org on 2021/08/11 03:57:01 UTC
[spark] branch branch-3.2 updated: [SPARK-36445][SQL] ANSI type
coercion rule for date time operations
This is an automated email from the ASF dual-hosted git repository.
gengliang pushed a commit to branch branch-3.2
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/branch-3.2 by this push:
new 293a6cb [SPARK-36445][SQL] ANSI type coercion rule for date time operations
293a6cb is described below
commit 293a6cb1ab26fa8a56bd7f2622a5641d98eac0c2
Author: Gengliang Wang <ge...@apache.org>
AuthorDate: Wed Aug 11 11:55:45 2021 +0800
[SPARK-36445][SQL] ANSI type coercion rule for date time operations
### What changes were proposed in this pull request?
Implement a new rule for the date-time operations in the ANSI type coercion system:
1. Date will be converted to Timestamp when it is in the subtraction with Timestmap.
2. Promote string literals in date_add/date_sub/time_add
### Why are the changes needed?
Currently the type coercion rule `DateTimeOperations` doesn't match the design of the ANSI type coercion system:
1. For date_add/date_sub, if the input is timestamp type, Spark should not convert it into date type since date type is narrower than the timestamp type.
2. For date_add/date_sub/time_add, string value can be implicit cast to date/timestamp only when it is literal.
Thus, we need to have a new rule for the date-time operations in the ANSI type coercion system.
### Does this PR introduce _any_ user-facing change?
No, the ANSI type coercion rules are not releaesd.
### How was this patch tested?
New UT
Closes #33666 from gengliangwang/datetimeOp.
Authored-by: Gengliang Wang <ge...@apache.org>
Signed-off-by: Gengliang Wang <ge...@apache.org>
(cherry picked from commit 3029e62a828e33a6f45828dec57f6c7709cb32f7)
Signed-off-by: Gengliang Wang <ge...@apache.org>
---
.../sql/catalyst/analysis/AnsiTypeCoercion.scala | 28 ++++++-
.../spark/sql/catalyst/analysis/TypeCoercion.scala | 49 +++++------
.../src/test/resources/sql-tests/inputs/date.sql | 10 ++-
.../test/resources/sql-tests/inputs/timestamp.sql | 6 ++
.../resources/sql-tests/results/ansi/date.sql.out | 60 +++++++++++++-
.../sql-tests/results/ansi/interval.sql.out | 5 +-
.../sql-tests/results/ansi/timestamp.sql.out | 45 +++++++++-
.../test/resources/sql-tests/results/date.sql.out | 54 +++++++++++-
.../sql-tests/results/datetime-legacy.sql.out | 96 +++++++++++++++++++++-
.../resources/sql-tests/results/timestamp.sql.out | 44 +++++++++-
.../results/timestampNTZ/timestamp-ansi.sql.out | 45 +++++++++-
.../results/timestampNTZ/timestamp.sql.out | 44 +++++++++-
12 files changed, 444 insertions(+), 42 deletions(-)
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/AnsiTypeCoercion.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/AnsiTypeCoercion.scala
index f03296f..c5f0845 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/AnsiTypeCoercion.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/AnsiTypeCoercion.scala
@@ -260,7 +260,7 @@ object AnsiTypeCoercion extends TypeCoercionBase {
override def canCast(from: DataType, to: DataType): Boolean = AnsiCast.canCast(from, to)
/**
- * Promotes string literals that appear in arithmetic and comparison expressions.
+ * Promotes string literals that appear in arithmetic, comparison, and datetime expressions.
*/
object PromoteStringLiterals extends TypeCoercionRule {
private def castExpr(expr: Expression, targetType: DataType): Expression = {
@@ -294,6 +294,15 @@ object AnsiTypeCoercion extends TypeCoercionBase {
case other => other
}
p.makeCopy(Array(a, newList))
+
+ case d @ DateAdd(left @ StringType(), _) if left.foldable =>
+ d.copy(startDate = Cast(d.startDate, DateType))
+ case d @ DateSub(left @ StringType(), _) if left.foldable =>
+ d.copy(startDate = Cast(d.startDate, DateType))
+ case t @ TimeAdd(left @ StringType(), _, _) if left.foldable =>
+ t.copy(start = Cast(t.start, TimestampType))
+ case t @ SubtractTimestamps(left @ StringType(), _, _, _) if left.foldable =>
+ t.copy(left = Cast(t.left, TimestampType))
}
}
@@ -311,4 +320,21 @@ object AnsiTypeCoercion extends TypeCoercionBase {
g.withNewChildren(Seq(Cast(g.child, DateType)))
}
}
+
+ object DateTimeOperations extends TypeCoercionRule {
+ override val transform: PartialFunction[Expression, Expression] = {
+ // Skip nodes who's children have not been resolved yet.
+ case e if !e.childrenResolved => e
+
+ case s @ SubtractTimestamps(DateType(), AnyTimestampType(), _, _) =>
+ s.copy(left = Cast(s.left, s.right.dataType))
+ case s @ SubtractTimestamps(AnyTimestampType(), DateType(), _, _) =>
+ s.copy(right = Cast(s.right, s.left.dataType))
+ case s @ SubtractTimestamps(AnyTimestampType(), AnyTimestampType(), _, _)
+ if s.left.dataType != s.right.dataType =>
+ val newLeft = castIfNotSameType(s.left, TimestampNTZType)
+ val newRight = castIfNotSameType(s.right, TimestampNTZType)
+ s.copy(left = newLeft, right = newRight)
+ }
+ }
}
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/TypeCoercion.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/TypeCoercion.scala
index db6f499..23654af 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/TypeCoercion.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/TypeCoercion.scala
@@ -158,7 +158,7 @@ abstract class TypeCoercionBase {
}
}
- private def castIfNotSameType(expr: Expression, dt: DataType): Expression = {
+ protected def castIfNotSameType(expr: Expression, dt: DataType): Expression = {
if (!expr.dataType.sameType(dt)) {
Cast(expr, dt)
} else {
@@ -624,29 +624,6 @@ abstract class TypeCoercionBase {
}
}
- object DateTimeOperations extends TypeCoercionRule {
- override val transform: PartialFunction[Expression, Expression] = {
- // Skip nodes who's children have not been resolved yet.
- case e if !e.childrenResolved => e
- case d @ DateAdd(TimestampType(), _) => d.copy(startDate = Cast(d.startDate, DateType))
- case d @ DateAdd(StringType(), _) => d.copy(startDate = Cast(d.startDate, DateType))
- case d @ DateSub(TimestampType(), _) => d.copy(startDate = Cast(d.startDate, DateType))
- case d @ DateSub(StringType(), _) => d.copy(startDate = Cast(d.startDate, DateType))
-
- case s @ SubtractTimestamps(DateType(), AnyTimestampType(), _, _) =>
- s.copy(left = Cast(s.left, s.right.dataType))
- case s @ SubtractTimestamps(AnyTimestampType(), DateType(), _, _) =>
- s.copy(right = Cast(s.right, s.left.dataType))
- case s @ SubtractTimestamps(AnyTimestampType(), AnyTimestampType(), _, _)
- if s.left.dataType != s.right.dataType =>
- val newLeft = castIfNotSameType(s.left, TimestampNTZType)
- val newRight = castIfNotSameType(s.right, TimestampNTZType)
- s.copy(left = newLeft, right = newRight)
-
- case t @ TimeAdd(StringType(), _, _) => t.copy(start = Cast(t.start, TimestampType))
- }
- }
-
/**
* Casts types according to the expected input types for [[Expression]]s.
*/
@@ -1161,6 +1138,30 @@ object TypeCoercion extends TypeCoercionBase {
EqualNullSafe(left, Cast(right, left.dataType))
}
}
+
+ object DateTimeOperations extends TypeCoercionRule {
+ override val transform: PartialFunction[Expression, Expression] = {
+ // Skip nodes who's children have not been resolved yet.
+ case e if !e.childrenResolved => e
+ case d @ DateAdd(TimestampType(), _) => d.copy(startDate = Cast(d.startDate, DateType))
+ case d @ DateAdd(StringType(), _) => d.copy(startDate = Cast(d.startDate, DateType))
+ case d @ DateSub(TimestampType(), _) => d.copy(startDate = Cast(d.startDate, DateType))
+ case d @ DateSub(StringType(), _) => d.copy(startDate = Cast(d.startDate, DateType))
+
+ case s @ SubtractTimestamps(DateType(), AnyTimestampType(), _, _) =>
+ s.copy(left = Cast(s.left, s.right.dataType))
+ case s @ SubtractTimestamps(AnyTimestampType(), DateType(), _, _) =>
+ s.copy(right = Cast(s.right, s.left.dataType))
+ case s @ SubtractTimestamps(AnyTimestampType(), AnyTimestampType(), _, _)
+ if s.left.dataType != s.right.dataType =>
+ val newLeft = castIfNotSameType(s.left, TimestampNTZType)
+ val newRight = castIfNotSameType(s.right, TimestampNTZType)
+ s.copy(left = newLeft, right = newRight)
+
+ case t @ TimeAdd(StringType(), _, _) => t.copy(start = Cast(t.start, TimestampType))
+ }
+ }
+
}
trait TypeCoercionRule extends Rule[LogicalPlan] with Logging {
diff --git a/sql/core/src/test/resources/sql-tests/inputs/date.sql b/sql/core/src/test/resources/sql-tests/inputs/date.sql
index 53cb3eb..b32bb6a 100644
--- a/sql/core/src/test/resources/sql-tests/inputs/date.sql
+++ b/sql/core/src/test/resources/sql-tests/inputs/date.sql
@@ -73,7 +73,8 @@ select date_add(null, 1);
select date_add(date'2011-11-11', null);
-- `date_add` accepts both date and timestamp ltz/ntz inputs.
select date_add(date'2011-11-11', 1);
-select date_add(timestamp_ltz'2011-11-11 12:12:12', 1), date_add(timestamp_ntz'2011-11-11 12:12:12', 1);
+select date_add(timestamp_ltz'2011-11-11 12:12:12', 1);
+select date_add(timestamp_ntz'2011-11-11 12:12:12', 1);
-- date sub
select date_sub(date'2011-11-11', 1);
@@ -91,6 +92,11 @@ create temp view v as select '1' str;
select date_add('2011-11-11', str) from v;
select date_sub('2011-11-11', str) from v;
+-- non-literal string column add/sub with integer
+create temp view v2 as select '2011-11-11' str;
+select date_add(str, 1) from v2;
+select date_sub(str, 1) from v2;
+
-- date add/sub operations
select date'2011-11-11' + 1E1;
select date'2011-11-11' + '1';
@@ -104,6 +110,8 @@ select date '2001-09-28' - null;
select '2011-11-11' - interval '2' day;
select null - date '2019-10-06';
select date '2001-10-01' - date '2001-09-28';
+select '2011-11-11 11:11:11' - date'2011-11-11';
+select str - date'2011-11-11' from v2;
-- Unsupported narrow text style
select to_date('26/October/2015', 'dd/MMMMM/yyyy');
diff --git a/sql/core/src/test/resources/sql-tests/inputs/timestamp.sql b/sql/core/src/test/resources/sql-tests/inputs/timestamp.sql
index ae5bd8f..180c71e 100644
--- a/sql/core/src/test/resources/sql-tests/inputs/timestamp.sql
+++ b/sql/core/src/test/resources/sql-tests/inputs/timestamp.sql
@@ -102,6 +102,12 @@ select timestamp'2011-11-11 11:11:11' - interval '2' second;
select '2011-11-11 11:11:11' - interval '2' second;
select '1' - interval '2' second;
select 1 - interval '2' second;
+select '2011-11-11 11:11:11' - timestamp'2011-11-11 11:11:10';
+-- non-literal string column subtract interval
+create temporary view v3 as select '2011-11-11 11:11:11' str;
+select str - interval '2' second from v3;
+select str - date'2011-11-11' from v3;
+select str - timestamp'2011-11-11 11:11:10' from v3;
-- analyzer will cast date to timestamp automatically
select date'2020-01-01' - timestamp'2019-10-06 10:11:12.345678';
select timestamp'2019-10-06 10:11:12.345678' - date'2020-01-01';
diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/date.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/date.sql.out
index e09ef65..ab9c779 100644
--- a/sql/core/src/test/resources/sql-tests/results/ansi/date.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/ansi/date.sql.out
@@ -1,5 +1,5 @@
-- Automatically generated by SQLQueryTestSuite
--- Number of queries: 63
+-- Number of queries: 69
-- !query
@@ -344,12 +344,21 @@ struct<date_add(DATE '2011-11-11', 1):date>
-- !query
-select date_add(timestamp_ltz'2011-11-11 12:12:12', 1), date_add(timestamp_ntz'2011-11-11 12:12:12', 1)
+select date_add(timestamp_ltz'2011-11-11 12:12:12', 1)
-- !query schema
struct<>
-- !query output
org.apache.spark.sql.AnalysisException
-cannot resolve 'date_add(TIMESTAMP_NTZ '2011-11-11 12:12:12', 1)' due to data type mismatch: argument 1 requires date type, however, 'TIMESTAMP_NTZ '2011-11-11 12:12:12'' is of timestamp_ntz type.; line 1 pos 56
+cannot resolve 'date_add(TIMESTAMP '2011-11-11 12:12:12', 1)' due to data type mismatch: argument 1 requires date type, however, 'TIMESTAMP '2011-11-11 12:12:12'' is of timestamp type.; line 1 pos 7
+
+
+-- !query
+select date_add(timestamp_ntz'2011-11-11 12:12:12', 1)
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.AnalysisException
+cannot resolve 'date_add(TIMESTAMP_NTZ '2011-11-11 12:12:12', 1)' due to data type mismatch: argument 1 requires date type, however, 'TIMESTAMP_NTZ '2011-11-11 12:12:12'' is of timestamp_ntz type.; line 1 pos 7
-- !query
@@ -383,7 +392,7 @@ select date_sub(timestamp_ltz'2011-11-11 12:12:12', 1), date_sub(timestamp_ntz'2
struct<>
-- !query output
org.apache.spark.sql.AnalysisException
-cannot resolve 'date_sub(TIMESTAMP_NTZ '2011-11-11 12:12:12', 1)' due to data type mismatch: argument 1 requires date type, however, 'TIMESTAMP_NTZ '2011-11-11 12:12:12'' is of timestamp_ntz type.; line 1 pos 56
+cannot resolve 'date_sub(TIMESTAMP '2011-11-11 12:12:12', 1)' due to data type mismatch: argument 1 requires date type, however, 'TIMESTAMP '2011-11-11 12:12:12'' is of timestamp type.; line 1 pos 7
-- !query
@@ -429,6 +438,32 @@ cannot resolve 'date_sub(CAST('2011-11-11' AS DATE), v.str)' due to data type mi
-- !query
+create temp view v2 as select '2011-11-11' str
+-- !query schema
+struct<>
+-- !query output
+
+
+
+-- !query
+select date_add(str, 1) from v2
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.AnalysisException
+cannot resolve 'date_add(v2.str, 1)' due to data type mismatch: argument 1 requires date type, however, 'v2.str' is of string type.; line 1 pos 7
+
+
+-- !query
+select date_sub(str, 1) from v2
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.AnalysisException
+cannot resolve 'date_sub(v2.str, 1)' due to data type mismatch: argument 1 requires date type, however, 'v2.str' is of string type.; line 1 pos 7
+
+
+-- !query
select date'2011-11-11' + 1E1
-- !query schema
struct<>
@@ -528,6 +563,23 @@ struct<(DATE '2001-10-01' - DATE '2001-09-28'):interval day>
-- !query
+select '2011-11-11 11:11:11' - date'2011-11-11'
+-- !query schema
+struct<(2011-11-11 11:11:11 - DATE '2011-11-11'):interval day>
+-- !query output
+0 00:00:00.000000000
+
+
+-- !query
+select str - date'2011-11-11' from v2
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.AnalysisException
+cannot resolve '(v2.str - DATE '2011-11-11')' due to data type mismatch: argument 1 requires date type, however, 'v2.str' is of string type.; line 1 pos 7
+
+
+-- !query
select to_date('26/October/2015', 'dd/MMMMM/yyyy')
-- !query schema
struct<>
diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out
index 9ba5da3..8595498 100644
--- a/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out
@@ -1232,9 +1232,10 @@ select
interval '99 11:22:33.123456789' day to second + strval
from interval_arithmetic
-- !query schema
-struct<strval:string,strval - INTERVAL '99 11:22:33.123456' DAY TO SECOND:string,strval - INTERVAL '-99 11:22:33.123456' DAY TO SECOND:string,strval + INTERVAL '99 11:22:33.123456' DAY TO SECOND:string,strval + INTERVAL '-99 11:22:33.123456' DAY TO SECOND:string,strval + (- INTERVAL '99 11:22:33.123456' DAY TO SECOND):string,strval + INTERVAL '99 11:22:33.123456' DAY TO SECOND:string>
+struct<>
-- !query output
-2012-01-01 2011-09-23 12:37:26.876544 2012-04-09 11:22:33.123456 2012-04-09 11:22:33.123456 2011-09-23 12:37:26.876544 2011-09-23 12:37:26.876544 2012-04-09 11:22:33.123456
+org.apache.spark.sql.AnalysisException
+cannot resolve 'interval_arithmetic.strval + (- INTERVAL '99 11:22:33.123456' DAY TO SECOND)' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'interval_arithmetic.strval' is of string type.; line 3 pos 2
-- !query
diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/timestamp.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/timestamp.sql.out
index 5da0dfd..2bbd197 100644
--- a/sql/core/src/test/resources/sql-tests/results/ansi/timestamp.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/ansi/timestamp.sql.out
@@ -1,5 +1,5 @@
-- Automatically generated by SQLQueryTestSuite
--- Number of queries: 77
+-- Number of queries: 82
-- !query
@@ -591,6 +591,49 @@ cannot resolve '1 + (- INTERVAL '02' SECOND)' due to data type mismatch: argumen
-- !query
+select '2011-11-11 11:11:11' - timestamp'2011-11-11 11:11:10'
+-- !query schema
+struct<(2011-11-11 11:11:11 - TIMESTAMP '2011-11-11 11:11:10'):interval day to second>
+-- !query output
+0 00:00:01.000000000
+
+
+-- !query
+create temporary view v3 as select '2011-11-11 11:11:11' str
+-- !query schema
+struct<>
+-- !query output
+
+
+
+-- !query
+select str - interval '2' second from v3
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.AnalysisException
+cannot resolve 'v3.str + (- INTERVAL '02' SECOND)' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'v3.str' is of string type.; line 1 pos 7
+
+
+-- !query
+select str - date'2011-11-11' from v3
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.AnalysisException
+cannot resolve '(v3.str - DATE '2011-11-11')' due to data type mismatch: argument 1 requires date type, however, 'v3.str' is of string type.; line 1 pos 7
+
+
+-- !query
+select str - timestamp'2011-11-11 11:11:10' from v3
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.AnalysisException
+cannot resolve '(v3.str - TIMESTAMP '2011-11-11 11:11:10')' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'v3.str' is of string type.; line 1 pos 7
+
+
+-- !query
select date'2020-01-01' - timestamp'2019-10-06 10:11:12.345678'
-- !query schema
struct<(DATE '2020-01-01' - TIMESTAMP '2019-10-06 10:11:12.345678'):interval day to second>
diff --git a/sql/core/src/test/resources/sql-tests/results/date.sql.out b/sql/core/src/test/resources/sql-tests/results/date.sql.out
index bf9f9c9..457e84c 100644
--- a/sql/core/src/test/resources/sql-tests/results/date.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/date.sql.out
@@ -1,5 +1,5 @@
-- Automatically generated by SQLQueryTestSuite
--- Number of queries: 63
+-- Number of queries: 69
-- !query
@@ -331,12 +331,20 @@ struct<date_add(DATE '2011-11-11', 1):date>
-- !query
-select date_add(timestamp_ltz'2011-11-11 12:12:12', 1), date_add(timestamp_ntz'2011-11-11 12:12:12', 1)
+select date_add(timestamp_ltz'2011-11-11 12:12:12', 1)
+-- !query schema
+struct<date_add(TIMESTAMP '2011-11-11 12:12:12', 1):date>
+-- !query output
+2011-11-12
+
+
+-- !query
+select date_add(timestamp_ntz'2011-11-11 12:12:12', 1)
-- !query schema
struct<>
-- !query output
org.apache.spark.sql.AnalysisException
-cannot resolve 'date_add(TIMESTAMP_NTZ '2011-11-11 12:12:12', 1)' due to data type mismatch: argument 1 requires date type, however, 'TIMESTAMP_NTZ '2011-11-11 12:12:12'' is of timestamp_ntz type.; line 1 pos 56
+cannot resolve 'date_add(TIMESTAMP_NTZ '2011-11-11 12:12:12', 1)' due to data type mismatch: argument 1 requires date type, however, 'TIMESTAMP_NTZ '2011-11-11 12:12:12'' is of timestamp_ntz type.; line 1 pos 7
-- !query
@@ -416,6 +424,30 @@ cannot resolve 'date_sub(CAST('2011-11-11' AS DATE), v.str)' due to data type mi
-- !query
+create temp view v2 as select '2011-11-11' str
+-- !query schema
+struct<>
+-- !query output
+
+
+
+-- !query
+select date_add(str, 1) from v2
+-- !query schema
+struct<date_add(str, 1):date>
+-- !query output
+2011-11-12
+
+
+-- !query
+select date_sub(str, 1) from v2
+-- !query schema
+struct<date_sub(str, 1):date>
+-- !query output
+2011-11-10
+
+
+-- !query
select date'2011-11-11' + 1E1
-- !query schema
struct<>
@@ -515,6 +547,22 @@ struct<(DATE '2001-10-01' - DATE '2001-09-28'):interval day>
-- !query
+select '2011-11-11 11:11:11' - date'2011-11-11'
+-- !query schema
+struct<(2011-11-11 11:11:11 - DATE '2011-11-11'):interval day>
+-- !query output
+0 00:00:00.000000000
+
+
+-- !query
+select str - date'2011-11-11' from v2
+-- !query schema
+struct<(str - DATE '2011-11-11'):interval day>
+-- !query output
+0 00:00:00.000000000
+
+
+-- !query
select to_date('26/October/2015', 'dd/MMMMM/yyyy')
-- !query schema
struct<>
diff --git a/sql/core/src/test/resources/sql-tests/results/datetime-legacy.sql.out b/sql/core/src/test/resources/sql-tests/results/datetime-legacy.sql.out
index d1fb06b..785dc55 100644
--- a/sql/core/src/test/resources/sql-tests/results/datetime-legacy.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/datetime-legacy.sql.out
@@ -1,5 +1,5 @@
-- Automatically generated by SQLQueryTestSuite
--- Number of queries: 140
+-- Number of queries: 151
-- !query
@@ -331,12 +331,20 @@ struct<date_add(DATE '2011-11-11', 1):date>
-- !query
-select date_add(timestamp_ltz'2011-11-11 12:12:12', 1), date_add(timestamp_ntz'2011-11-11 12:12:12', 1)
+select date_add(timestamp_ltz'2011-11-11 12:12:12', 1)
+-- !query schema
+struct<date_add(TIMESTAMP '2011-11-11 12:12:12', 1):date>
+-- !query output
+2011-11-12
+
+
+-- !query
+select date_add(timestamp_ntz'2011-11-11 12:12:12', 1)
-- !query schema
struct<>
-- !query output
org.apache.spark.sql.AnalysisException
-cannot resolve 'date_add(TIMESTAMP_NTZ '2011-11-11 12:12:12', 1)' due to data type mismatch: argument 1 requires date type, however, 'TIMESTAMP_NTZ '2011-11-11 12:12:12'' is of timestamp_ntz type.; line 1 pos 56
+cannot resolve 'date_add(TIMESTAMP_NTZ '2011-11-11 12:12:12', 1)' due to data type mismatch: argument 1 requires date type, however, 'TIMESTAMP_NTZ '2011-11-11 12:12:12'' is of timestamp_ntz type.; line 1 pos 7
-- !query
@@ -416,6 +424,30 @@ cannot resolve 'date_sub(CAST('2011-11-11' AS DATE), v.str)' due to data type mi
-- !query
+create temp view v2 as select '2011-11-11' str
+-- !query schema
+struct<>
+-- !query output
+
+
+
+-- !query
+select date_add(str, 1) from v2
+-- !query schema
+struct<date_add(str, 1):date>
+-- !query output
+2011-11-12
+
+
+-- !query
+select date_sub(str, 1) from v2
+-- !query schema
+struct<date_sub(str, 1):date>
+-- !query output
+2011-11-10
+
+
+-- !query
select date'2011-11-11' + 1E1
-- !query schema
struct<>
@@ -515,6 +547,22 @@ struct<(DATE '2001-10-01' - DATE '2001-09-28'):interval day>
-- !query
+select '2011-11-11 11:11:11' - date'2011-11-11'
+-- !query schema
+struct<(2011-11-11 11:11:11 - DATE '2011-11-11'):interval day>
+-- !query output
+0 00:00:00.000000000
+
+
+-- !query
+select str - date'2011-11-11' from v2
+-- !query schema
+struct<(str - DATE '2011-11-11'):interval day>
+-- !query output
+0 00:00:00.000000000
+
+
+-- !query
select to_date('26/October/2015', 'dd/MMMMM/yyyy')
-- !query schema
struct<to_date(26/October/2015, dd/MMMMM/yyyy):date>
@@ -1108,6 +1156,48 @@ cannot resolve '1 + (- INTERVAL '02' SECOND)' due to data type mismatch: argumen
-- !query
+select '2011-11-11 11:11:11' - timestamp'2011-11-11 11:11:10'
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.AnalysisException
+cannot resolve '('2011-11-11 11:11:11' - TIMESTAMP '2011-11-11 11:11:10')' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, ''2011-11-11 11:11:11'' is of string type.; line 1 pos 7
+
+
+-- !query
+create temporary view v3 as select '2011-11-11 11:11:11' str
+-- !query schema
+struct<>
+-- !query output
+
+
+
+-- !query
+select str - interval '2' second from v3
+-- !query schema
+struct<str - INTERVAL '02' SECOND:string>
+-- !query output
+2011-11-11 11:11:09
+
+
+-- !query
+select str - date'2011-11-11' from v3
+-- !query schema
+struct<(str - DATE '2011-11-11'):interval day>
+-- !query output
+0 00:00:00.000000000
+
+
+-- !query
+select str - timestamp'2011-11-11 11:11:10' from v3
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.AnalysisException
+cannot resolve '(v3.str - TIMESTAMP '2011-11-11 11:11:10')' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'v3.str' is of string type.; line 1 pos 7
+
+
+-- !query
select date'2020-01-01' - timestamp'2019-10-06 10:11:12.345678'
-- !query schema
struct<(DATE '2020-01-01' - TIMESTAMP '2019-10-06 10:11:12.345678'):interval day to second>
diff --git a/sql/core/src/test/resources/sql-tests/results/timestamp.sql.out b/sql/core/src/test/resources/sql-tests/results/timestamp.sql.out
index 1e93430..e5d2f66 100644
--- a/sql/core/src/test/resources/sql-tests/results/timestamp.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/timestamp.sql.out
@@ -1,5 +1,5 @@
-- Automatically generated by SQLQueryTestSuite
--- Number of queries: 77
+-- Number of queries: 82
-- !query
@@ -572,6 +572,48 @@ cannot resolve '1 + (- INTERVAL '02' SECOND)' due to data type mismatch: argumen
-- !query
+select '2011-11-11 11:11:11' - timestamp'2011-11-11 11:11:10'
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.AnalysisException
+cannot resolve '('2011-11-11 11:11:11' - TIMESTAMP '2011-11-11 11:11:10')' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, ''2011-11-11 11:11:11'' is of string type.; line 1 pos 7
+
+
+-- !query
+create temporary view v3 as select '2011-11-11 11:11:11' str
+-- !query schema
+struct<>
+-- !query output
+
+
+
+-- !query
+select str - interval '2' second from v3
+-- !query schema
+struct<str - INTERVAL '02' SECOND:string>
+-- !query output
+2011-11-11 11:11:09
+
+
+-- !query
+select str - date'2011-11-11' from v3
+-- !query schema
+struct<(str - DATE '2011-11-11'):interval day>
+-- !query output
+0 00:00:00.000000000
+
+
+-- !query
+select str - timestamp'2011-11-11 11:11:10' from v3
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.AnalysisException
+cannot resolve '(v3.str - TIMESTAMP '2011-11-11 11:11:10')' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'v3.str' is of string type.; line 1 pos 7
+
+
+-- !query
select date'2020-01-01' - timestamp'2019-10-06 10:11:12.345678'
-- !query schema
struct<(DATE '2020-01-01' - TIMESTAMP '2019-10-06 10:11:12.345678'):interval day to second>
diff --git a/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out b/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out
index fae7721..3a13486 100644
--- a/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp-ansi.sql.out
@@ -1,5 +1,5 @@
-- Automatically generated by SQLQueryTestSuite
--- Number of queries: 77
+-- Number of queries: 82
-- !query
@@ -591,6 +591,49 @@ cannot resolve '1 + (- INTERVAL '02' SECOND)' due to data type mismatch: argumen
-- !query
+select '2011-11-11 11:11:11' - timestamp'2011-11-11 11:11:10'
+-- !query schema
+struct<(2011-11-11 11:11:11 - TIMESTAMP_NTZ '2011-11-11 11:11:10'):interval day to second>
+-- !query output
+0 00:00:01.000000000
+
+
+-- !query
+create temporary view v3 as select '2011-11-11 11:11:11' str
+-- !query schema
+struct<>
+-- !query output
+
+
+
+-- !query
+select str - interval '2' second from v3
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.AnalysisException
+cannot resolve 'v3.str + (- INTERVAL '02' SECOND)' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'v3.str' is of string type.; line 1 pos 7
+
+
+-- !query
+select str - date'2011-11-11' from v3
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.AnalysisException
+cannot resolve '(v3.str - DATE '2011-11-11')' due to data type mismatch: argument 1 requires date type, however, 'v3.str' is of string type.; line 1 pos 7
+
+
+-- !query
+select str - timestamp'2011-11-11 11:11:10' from v3
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.AnalysisException
+cannot resolve '(v3.str - TIMESTAMP_NTZ '2011-11-11 11:11:10')' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'v3.str' is of string type.; line 1 pos 7
+
+
+-- !query
select date'2020-01-01' - timestamp'2019-10-06 10:11:12.345678'
-- !query schema
struct<(DATE '2020-01-01' - TIMESTAMP_NTZ '2019-10-06 10:11:12.345678'):interval day to second>
diff --git a/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp.sql.out b/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp.sql.out
index c6de535..c51ce87 100644
--- a/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/timestampNTZ/timestamp.sql.out
@@ -1,5 +1,5 @@
-- Automatically generated by SQLQueryTestSuite
--- Number of queries: 77
+-- Number of queries: 82
-- !query
@@ -572,6 +572,48 @@ cannot resolve '1 + (- INTERVAL '02' SECOND)' due to data type mismatch: argumen
-- !query
+select '2011-11-11 11:11:11' - timestamp'2011-11-11 11:11:10'
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.AnalysisException
+cannot resolve '('2011-11-11 11:11:11' - TIMESTAMP_NTZ '2011-11-11 11:11:10')' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, ''2011-11-11 11:11:11'' is of string type.; line 1 pos 7
+
+
+-- !query
+create temporary view v3 as select '2011-11-11 11:11:11' str
+-- !query schema
+struct<>
+-- !query output
+
+
+
+-- !query
+select str - interval '2' second from v3
+-- !query schema
+struct<str - INTERVAL '02' SECOND:string>
+-- !query output
+2011-11-11 11:11:09
+
+
+-- !query
+select str - date'2011-11-11' from v3
+-- !query schema
+struct<(str - DATE '2011-11-11'):interval day>
+-- !query output
+0 00:00:00.000000000
+
+
+-- !query
+select str - timestamp'2011-11-11 11:11:10' from v3
+-- !query schema
+struct<>
+-- !query output
+org.apache.spark.sql.AnalysisException
+cannot resolve '(v3.str - TIMESTAMP_NTZ '2011-11-11 11:11:10')' due to data type mismatch: argument 1 requires (timestamp or timestamp without time zone) type, however, 'v3.str' is of string type.; line 1 pos 7
+
+
+-- !query
select date'2020-01-01' - timestamp'2019-10-06 10:11:12.345678'
-- !query schema
struct<(DATE '2020-01-01' - TIMESTAMP_NTZ '2019-10-06 10:11:12.345678'):interval day to second>
---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org