You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by ma...@apache.org on 2022/07/21 10:07:40 UTC

[spark] branch master updated: [SPARK-39827][SQL] Use the error class `ARITHMETIC_OVERFLOW` on int overflow in `add_months()`

This is an automated email from the ASF dual-hosted git repository.

maxgekk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 90af6b73903 [SPARK-39827][SQL] Use the error class `ARITHMETIC_OVERFLOW` on int overflow in `add_months()`
90af6b73903 is described below

commit 90af6b73903e4615448a7523b4dff9d4e10df2eb
Author: Max Gekk <ma...@gmail.com>
AuthorDate: Thu Jul 21 13:07:26 2022 +0300

    [SPARK-39827][SQL] Use the error class `ARITHMETIC_OVERFLOW` on int overflow in `add_months()`
    
    ### What changes were proposed in this pull request?
    In the PR, I propose to use `toIntExact()` from Spark's `MathUtils` instead of standard `Math.toIntExact` in the `AddMonths` expression. As a consequence of that, the `add_months()` function will raise `SparkArithmeticException` w/ the error class `ARITHMETIC_OVERFLOW` when integer overflow occurs.
    
    ### Why are the changes needed?
    This should improve user experience with Spark SQL by showing unified errors w/ error classes to users.
    
    ### Does this PR introduce _any_ user-facing change?
    No, it shouldn't because `SparkArithmeticException` is a sub-class of `ArithmeticException` but the error will look differently.
    
    Before:
    ```sql
    spark-sql> SELECT add_months('5500000-12-31', 10000000);
    22/07/20 19:21:10 ERROR SparkSQLDriver: Failed in [SELECT add_months('5500000-12-31', 10000000)]
    java.lang.ArithmeticException: integer overflow
            at java.lang.Math.toIntExact(Math.java:1011)
    ```
    
    After:
    ```sql
    spark-sql> SELECT add_months('5500000-12-31', 10000000);
    org.apache.spark.SparkArithmeticException: [ARITHMETIC_OVERFLOW] integer overflow. If necessary set spark.sql.ansi.enabled to "false" (except for ANSI interval type) to bypass this error.
            at org.apache.spark.sql.errors.QueryExecutionErrors$.arithmeticOverflowError(QueryExecutionErrors.scala:484)
    ```
    
    ### How was this patch tested?
    By running new test:
    ```
    $ build/sbt "test:testOnly *QueryExecutionErrorsSuite"
    ```
    
    Closes #37240 from MaxGekk/use-error-class-in-add_months.
    
    Authored-by: Max Gekk <ma...@gmail.com>
    Signed-off-by: Max Gekk <ma...@gmail.com>
---
 .../org/apache/spark/sql/catalyst/util/DateTimeUtils.scala   |  2 +-
 .../apache/spark/sql/errors/QueryExecutionErrorsSuite.scala  | 12 ++++++++++++
 2 files changed, 13 insertions(+), 1 deletion(-)

diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala
index 5045d1479f2..d206585ea53 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/DateTimeUtils.scala
@@ -563,7 +563,7 @@ object DateTimeUtils {
   /**
    * Converts the local date to the number of days since 1970-01-01.
    */
-  def localDateToDays(localDate: LocalDate): Int = Math.toIntExact(localDate.toEpochDay)
+  def localDateToDays(localDate: LocalDate): Int = MathUtils.toIntExact(localDate.toEpochDay)
 
   /**
    * Obtains an instance of `java.time.LocalDate` from the epoch day count.
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionErrorsSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionErrorsSuite.scala
index 8bf90d43565..c540f90a9f5 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionErrorsSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryExecutionErrorsSuite.scala
@@ -649,6 +649,18 @@ class QueryExecutionErrorsSuite
       Map.empty
     )
   }
+
+  test("ARITHMETIC_OVERFLOW: overflow on adding months") {
+    checkError(
+      exception = intercept[SparkArithmeticException](
+        sql("select add_months('5500000-12-31', 10000000)").collect()
+      ),
+      errorClass = "ARITHMETIC_OVERFLOW",
+      parameters = Map(
+        "message" -> "integer overflow",
+        "alternative" -> "",
+        "config" -> SQLConf.ANSI_ENABLED.key))
+  }
 }
 
 class FakeFileSystemSetPermission extends LocalFileSystem {


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org