You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by gu...@apache.org on 2022/08/05 09:32:37 UTC
[spark] branch master updated: [SPARK-39981][SQL] Throw the exception QueryExecutionErrors.castingCauseOverflowErrorInTableInsert in Cast
This is an automated email from the ASF dual-hosted git repository.
gurwls223 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push:
new e6b9c6166a0 [SPARK-39981][SQL] Throw the exception QueryExecutionErrors.castingCauseOverflowErrorInTableInsert in Cast
e6b9c6166a0 is described below
commit e6b9c6166a08ad4dca2550bbbb151fa575b730a8
Author: Hyukjin Kwon <gu...@apache.org>
AuthorDate: Fri Aug 5 18:31:46 2022 +0900
[SPARK-39981][SQL] Throw the exception QueryExecutionErrors.castingCauseOverflowErrorInTableInsert in Cast
### What changes were proposed in this pull request?
This PR is a followup of https://github.com/apache/spark/pull/37283. It missed `throw` keyword in the interpreted path.
### Why are the changes needed?
To throw an exception as intended instead of returning an exception itself.
### Does this PR introduce _any_ user-facing change?
Yes, it will throw an exception as expected in the interpreted path.
### How was this patch tested?
Haven't tested because it's too much straightforward.
Closes #37414 from HyukjinKwon/SPARK-39981.
Authored-by: Hyukjin Kwon <gu...@apache.org>
Signed-off-by: Hyukjin Kwon <gu...@apache.org>
---
.../org/apache/spark/sql/catalyst/expressions/Cast.scala | 2 +-
.../spark/sql/errors/QueryExecutionAnsiErrorsSuite.scala | 16 ++++++++++++++++
2 files changed, 17 insertions(+), 1 deletion(-)
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala
index 37e141ab091..424b82533fc 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala
@@ -2371,7 +2371,7 @@ case class CheckOverflowInTableInsert(child: Cast, columnName: String) extends U
child.eval(input)
} catch {
case e: SparkArithmeticException =>
- QueryExecutionErrors.castingCauseOverflowErrorInTableInsert(
+ throw QueryExecutionErrors.castingCauseOverflowErrorInTableInsert(
child.child.dataType,
child.dataType,
columnName)
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionAnsiErrorsSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionAnsiErrorsSuite.scala
index 362c1153b0b..02492d5619c 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionAnsiErrorsSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionAnsiErrorsSuite.scala
@@ -18,7 +18,9 @@ package org.apache.spark.sql.errors
import org.apache.spark._
import org.apache.spark.sql.QueryTest
+import org.apache.spark.sql.catalyst.expressions.{Cast, CheckOverflowInTableInsert, Literal}
import org.apache.spark.sql.internal.SQLConf
+import org.apache.spark.sql.types.ByteType
// Test suite for all the execution errors that requires enable ANSI SQL mode.
class QueryExecutionAnsiErrorsSuite extends QueryTest with QueryErrorsSuiteBase {
@@ -174,4 +176,18 @@ class QueryExecutionAnsiErrorsSuite extends QueryTest with QueryErrorsSuiteBase
}
}
}
+
+ test("SPARK-39981: interpreted CheckOverflowInTableInsert should throw an exception") {
+ checkError(
+ exception = intercept[SparkArithmeticException] {
+ CheckOverflowInTableInsert(
+ Cast(Literal.apply(12345678901234567890D), ByteType), "col").eval(null)
+ }.asInstanceOf[SparkThrowable],
+ errorClass = "CAST_OVERFLOW_IN_TABLE_INSERT",
+ parameters = Map(
+ "sourceType" -> "\"DOUBLE\"",
+ "targetType" -> ("\"TINYINT\""),
+ "columnName" -> "`col`")
+ )
+ }
}
---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org