You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by ge...@apache.org on 2022/03/31 05:21:18 UTC

[spark] branch branch-3.3 updated: [SPARK-38698][SQL] Provide query context in runtime error of Divide/Div/Reminder/Pmod

This is an automated email from the ASF dual-hosted git repository.

gengliang pushed a commit to branch branch-3.3
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/branch-3.3 by this push:
     new 20d545c  [SPARK-38698][SQL] Provide query context in runtime error of Divide/Div/Reminder/Pmod
20d545c is described below

commit 20d545c01594b03e0815823e8dca600fd2c1de55
Author: Gengliang Wang <ge...@apache.org>
AuthorDate: Thu Mar 31 13:18:45 2022 +0800

    [SPARK-38698][SQL] Provide query context in runtime error of Divide/Div/Reminder/Pmod
    
    ### What changes were proposed in this pull request?
    
    Provide SQL query context in the following runtime error:
    
    - Divide: divide by 0 error, including numeric types and ANSI interval types
    - Integral Divide: divide by 0 error and overflow error
    - Reminder: divide by 0 error
    - Pmod: divide by 0 error
    
    Example1:
    ```
    == SQL(line 1, position 7) ==
    select smallint('100') / bigint('0')
           ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
    ```
    
    Example 2:
    ```
    == SQL(line 1, position 7) ==
    select interval '2' year / 0
           ^^^^^^^^^^^^^^^^^^^^^
    ```
    ### Why are the changes needed?
    
    Provide SQL query context of runtime errors to users, so that they can understand it better.
    
    ### Does this PR introduce _any_ user-facing change?
    
    Yes, improve the runtime error message of Divide/Div/Reminder/Pmod
    ### How was this patch tested?
    
    UT
    
    Closes #36013 from gengliangwang/divideError.
    
    Authored-by: Gengliang Wang <ge...@apache.org>
    Signed-off-by: Gengliang Wang <ge...@apache.org>
    (cherry picked from commit e96883d98c32cef04d6015d9937979d663c1e754)
    Signed-off-by: Gengliang Wang <ge...@apache.org>
---
 core/src/main/resources/error/error-classes.json   |   2 +-
 .../org/apache/spark/SparkThrowableSuite.scala     |   4 +-
 .../sql/catalyst/expressions/arithmetic.scala      |  19 +--
 .../catalyst/expressions/intervalExpressions.scala | 172 +++++++++++----------
 .../spark/sql/catalyst/util/IntervalUtils.scala    |   2 +-
 .../spark/sql/errors/QueryExecutionErrors.scala    |   8 +-
 .../expressions/ArithmeticExpressionSuite.scala    |  46 ++++++
 .../sql-tests/results/ansi/interval.sql.out        |  18 +++
 .../resources/sql-tests/results/interval.sql.out   |  18 +++
 .../sql-tests/results/postgreSQL/case.sql.out      |   9 ++
 .../sql-tests/results/postgreSQL/int8.sql.out      |   9 ++
 .../results/postgreSQL/select_having.sql.out       |   3 +
 .../results/udf/postgreSQL/udf-case.sql.out        |   9 ++
 .../udf/postgreSQL/udf-select_having.sql.out       |   3 +
 14 files changed, 227 insertions(+), 95 deletions(-)

diff --git a/core/src/main/resources/error/error-classes.json b/core/src/main/resources/error/error-classes.json
index cd47d50..e159e7c 100644
--- a/core/src/main/resources/error/error-classes.json
+++ b/core/src/main/resources/error/error-classes.json
@@ -33,7 +33,7 @@
     "sqlState" : "22008"
   },
   "DIVIDE_BY_ZERO" : {
-    "message" : [ "divide by zero. To return NULL instead, use 'try_divide'. If necessary set %s to false (except for ANSI interval type) to bypass this error." ],
+    "message" : [ "divide by zero. To return NULL instead, use 'try_divide'. If necessary set %s to false (except for ANSI interval type) to bypass this error.%s" ],
     "sqlState" : "22012"
   },
   "DUPLICATE_KEY" : {
diff --git a/core/src/test/scala/org/apache/spark/SparkThrowableSuite.scala b/core/src/test/scala/org/apache/spark/SparkThrowableSuite.scala
index 47df19f..f1eb27c 100644
--- a/core/src/test/scala/org/apache/spark/SparkThrowableSuite.scala
+++ b/core/src/test/scala/org/apache/spark/SparkThrowableSuite.scala
@@ -124,9 +124,9 @@ class SparkThrowableSuite extends SparkFunSuite {
     }
 
     // Does not fail with too many args (expects 0 args)
-    assert(getMessage("DIVIDE_BY_ZERO", Array("foo", "bar")) ==
+    assert(getMessage("DIVIDE_BY_ZERO", Array("foo", "bar", "baz")) ==
       "divide by zero. To return NULL instead, use 'try_divide'. If necessary set foo to false " +
-        "(except for ANSI interval type) to bypass this error.")
+        "(except for ANSI interval type) to bypass this error.bar")
   }
 
   test("Error message is formatted") {
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/arithmetic.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/arithmetic.scala
index 7251e47..c6d66d8 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/arithmetic.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/arithmetic.scala
@@ -457,10 +457,10 @@ trait DivModLike extends BinaryArithmetic {
       } else {
         if (isZero(input2)) {
           // when we reach here, failOnError must be true.
-          throw QueryExecutionErrors.divideByZeroError()
+          throw QueryExecutionErrors.divideByZeroError(origin.context)
         }
         if (checkDivideOverflow && input1 == Long.MinValue && input2 == -1) {
-          throw QueryExecutionErrors.overflowInIntegralDivideError()
+          throw QueryExecutionErrors.overflowInIntegralDivideError(origin.context)
         }
         evalOperation(input1, input2)
       }
@@ -487,10 +487,11 @@ trait DivModLike extends BinaryArithmetic {
     } else {
       s"($javaType)(${eval1.value} $symbol ${eval2.value})"
     }
+    lazy val errorContext = ctx.addReferenceObj("errCtx", origin.context)
     val checkIntegralDivideOverflow = if (checkDivideOverflow) {
       s"""
         |if (${eval1.value} == ${Long.MinValue}L && ${eval2.value} == -1)
-        |  throw QueryExecutionErrors.overflowInIntegralDivideError();
+        |  throw QueryExecutionErrors.overflowInIntegralDivideError($errorContext);
         |""".stripMargin
     } else {
       ""
@@ -499,7 +500,7 @@ trait DivModLike extends BinaryArithmetic {
     // evaluate right first as we have a chance to skip left if right is 0
     if (!left.nullable && !right.nullable) {
       val divByZero = if (failOnError) {
-        s"throw QueryExecutionErrors.divideByZeroError();"
+        s"throw QueryExecutionErrors.divideByZeroError($errorContext);"
       } else {
         s"${ev.isNull} = true;"
       }
@@ -517,7 +518,7 @@ trait DivModLike extends BinaryArithmetic {
     } else {
       val nullOnErrorCondition = if (failOnError) "" else s" || $isZero"
       val failOnErrorBranch = if (failOnError) {
-        s"if ($isZero) throw QueryExecutionErrors.divideByZeroError();"
+        s"if ($isZero) throw QueryExecutionErrors.divideByZeroError($errorContext);"
       } else {
         ""
       }
@@ -742,7 +743,7 @@ case class Pmod(
       } else {
         if (isZero(input2)) {
           // when we reach here, failOnError must bet true.
-          throw QueryExecutionErrors.divideByZeroError
+          throw QueryExecutionErrors.divideByZeroError(origin.context)
         }
         input1 match {
           case i: Integer => pmod(i, input2.asInstanceOf[java.lang.Integer])
@@ -767,7 +768,7 @@ case class Pmod(
     }
     val remainder = ctx.freshName("remainder")
     val javaType = CodeGenerator.javaType(dataType)
-
+    lazy val errorContext = ctx.addReferenceObj("errCtx", origin.context)
     val result = dataType match {
       case DecimalType.Fixed(_, _) =>
         val decimalAdd = "$plus"
@@ -803,7 +804,7 @@ case class Pmod(
     // evaluate right first as we have a chance to skip left if right is 0
     if (!left.nullable && !right.nullable) {
       val divByZero = if (failOnError) {
-        s"throw QueryExecutionErrors.divideByZeroError();"
+        s"throw QueryExecutionErrors.divideByZeroError($errorContext);"
       } else {
         s"${ev.isNull} = true;"
       }
@@ -820,7 +821,7 @@ case class Pmod(
     } else {
       val nullOnErrorCondition = if (failOnError) "" else s" || $isZero"
       val failOnErrorBranch = if (failOnError) {
-        s"if ($isZero) throw QueryExecutionErrors.divideByZeroError();"
+        s"if ($isZero) throw QueryExecutionErrors.divideByZeroError($errorContext);"
       } else {
         ""
       }
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/intervalExpressions.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/intervalExpressions.scala
index c461b8f..dbaff16d 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/intervalExpressions.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/intervalExpressions.scala
@@ -598,23 +598,33 @@ case class MultiplyDTInterval(
 }
 
 trait IntervalDivide {
-  def checkDivideOverflow(value: Any, minValue: Any, num: Expression, numValue: Any): Unit = {
+  def checkDivideOverflow(
+      value: Any,
+      minValue: Any,
+      num: Expression,
+      numValue: Any,
+      context: String): Unit = {
     if (value == minValue && num.dataType.isInstanceOf[IntegralType]) {
       if (numValue.asInstanceOf[Number].longValue() == -1) {
-        throw QueryExecutionErrors.overflowInIntegralDivideError()
+        throw QueryExecutionErrors.overflowInIntegralDivideError(context)
       }
     }
   }
 
-  def divideByZeroCheck(dataType: DataType, num: Any): Unit = dataType match {
+  def divideByZeroCheck(dataType: DataType, num: Any, context: String): Unit = dataType match {
     case _: DecimalType =>
-      if (num.asInstanceOf[Decimal].isZero) throw QueryExecutionErrors.divideByZeroError()
-    case _ => if (num == 0) throw QueryExecutionErrors.divideByZeroError()
+      if (num.asInstanceOf[Decimal].isZero) throw QueryExecutionErrors.divideByZeroError(context)
+    case _ => if (num == 0) throw QueryExecutionErrors.divideByZeroError(context)
   }
 
-  def divideByZeroCheckCodegen(dataType: DataType, value: String): String = dataType match {
-    case _: DecimalType => s"if ($value.isZero()) throw QueryExecutionErrors.divideByZeroError();"
-    case _ => s"if ($value == 0) throw QueryExecutionErrors.divideByZeroError();"
+  def divideByZeroCheckCodegen(
+      dataType: DataType,
+      value: String,
+      errorContextReference: String): String = dataType match {
+    case _: DecimalType =>
+      s"if ($value.isZero()) throw QueryExecutionErrors.divideByZeroError($errorContextReference);"
+    case _ =>
+      s"if ($value == 0) throw QueryExecutionErrors.divideByZeroError($errorContextReference);"
   }
 }
 
@@ -646,47 +656,50 @@ case class DivideYMInterval(
   }
 
   override def nullSafeEval(interval: Any, num: Any): Any = {
-    checkDivideOverflow(interval.asInstanceOf[Int], Int.MinValue, right, num)
-    divideByZeroCheck(right.dataType, num)
+    checkDivideOverflow(interval.asInstanceOf[Int], Int.MinValue, right, num, origin.context)
+    divideByZeroCheck(right.dataType, num, origin.context)
     evalFunc(interval.asInstanceOf[Int], num)
   }
 
-  override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = right.dataType match {
-    case t: IntegralType =>
-      val math = t match {
-        case LongType => classOf[LongMath].getName
-        case _ => classOf[IntMath].getName
-      }
-      val javaType = CodeGenerator.javaType(dataType)
-      val months = left.genCode(ctx)
-      val num = right.genCode(ctx)
-      val checkIntegralDivideOverflow =
-        s"""
-           |if (${months.value} == ${Int.MinValue} && ${num.value} == -1)
-           |  throw QueryExecutionErrors.overflowInIntegralDivideError();
-           |""".stripMargin
-      nullSafeCodeGen(ctx, ev, (m, n) =>
-        // Similarly to non-codegen code. The result of `divide(Int, Long, ...)` must fit to `Int`.
-        // Casting to `Int` is safe here.
-        s"""
-           |${divideByZeroCheckCodegen(right.dataType, n)}
-           |$checkIntegralDivideOverflow
-           |${ev.value} = ($javaType)$math.divide($m, $n, java.math.RoundingMode.HALF_UP);
-        """.stripMargin)
-    case _: DecimalType =>
-      nullSafeCodeGen(ctx, ev, (m, n) =>
-        s"""
-           |${divideByZeroCheckCodegen(right.dataType, n)}
-           |${ev.value} = ((new Decimal()).set($m).$$div($n)).toJavaBigDecimal()
-           |  .setScale(0, java.math.RoundingMode.HALF_UP).intValueExact();
-         """.stripMargin)
-    case _: FractionalType =>
-      val math = classOf[DoubleMath].getName
-      nullSafeCodeGen(ctx, ev, (m, n) =>
-        s"""
-           |${divideByZeroCheckCodegen(right.dataType, n)}
-           |${ev.value} = $math.roundToInt($m / (double)$n, java.math.RoundingMode.HALF_UP);
-         """.stripMargin)
+  override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
+    val errorContext = ctx.addReferenceObj("errCtx", origin.context)
+    right.dataType match {
+      case t: IntegralType =>
+        val math = t match {
+          case LongType => classOf[LongMath].getName
+          case _ => classOf[IntMath].getName
+        }
+        val javaType = CodeGenerator.javaType(dataType)
+        val months = left.genCode(ctx)
+        val num = right.genCode(ctx)
+        val checkIntegralDivideOverflow =
+          s"""
+             |if (${months.value} == ${Int.MinValue} && ${num.value} == -1)
+             |  throw QueryExecutionErrors.overflowInIntegralDivideError($errorContext);
+             |""".stripMargin
+        nullSafeCodeGen(ctx, ev, (m, n) =>
+          // Similarly to non-codegen code. The result of `divide(Int, Long, ...)` must fit
+          // to `Int`. Casting to `Int` is safe here.
+          s"""
+             |${divideByZeroCheckCodegen(right.dataType, n, errorContext)}
+             |$checkIntegralDivideOverflow
+             |${ev.value} = ($javaType)$math.divide($m, $n, java.math.RoundingMode.HALF_UP);
+          """.stripMargin)
+      case _: DecimalType =>
+        nullSafeCodeGen(ctx, ev, (m, n) =>
+          s"""
+             |${divideByZeroCheckCodegen(right.dataType, n, errorContext)}
+             |${ev.value} = ((new Decimal()).set($m).$$div($n)).toJavaBigDecimal()
+             |  .setScale(0, java.math.RoundingMode.HALF_UP).intValueExact();
+          """.stripMargin)
+      case _: FractionalType =>
+        val math = classOf[DoubleMath].getName
+        nullSafeCodeGen(ctx, ev, (m, n) =>
+          s"""
+             |${divideByZeroCheckCodegen(right.dataType, n, errorContext)}
+             |${ev.value} = $math.roundToInt($m / (double)$n, java.math.RoundingMode.HALF_UP);
+          """.stripMargin)
+    }
   }
 
   override def toString: String = s"($left / $right)"
@@ -721,41 +734,44 @@ case class DivideDTInterval(
   }
 
   override def nullSafeEval(interval: Any, num: Any): Any = {
-    checkDivideOverflow(interval.asInstanceOf[Long], Long.MinValue, right, num)
-    divideByZeroCheck(right.dataType, num)
+    checkDivideOverflow(interval.asInstanceOf[Long], Long.MinValue, right, num, origin.context)
+    divideByZeroCheck(right.dataType, num, origin.context)
     evalFunc(interval.asInstanceOf[Long], num)
   }
 
-  override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = right.dataType match {
-    case _: IntegralType =>
-      val math = classOf[LongMath].getName
-      val micros = left.genCode(ctx)
-      val num = right.genCode(ctx)
-      val checkIntegralDivideOverflow =
-        s"""
-           |if (${micros.value} == ${Long.MinValue}L && ${num.value} == -1L)
-           |  throw QueryExecutionErrors.overflowInIntegralDivideError();
-           |""".stripMargin
-      nullSafeCodeGen(ctx, ev, (m, n) =>
-        s"""
-           |${divideByZeroCheckCodegen(right.dataType, n)}
-           |$checkIntegralDivideOverflow
-           |${ev.value} = $math.divide($m, $n, java.math.RoundingMode.HALF_UP);
-        """.stripMargin)
-    case _: DecimalType =>
-      nullSafeCodeGen(ctx, ev, (m, n) =>
-        s"""
-           |${divideByZeroCheckCodegen(right.dataType, n)}
-           |${ev.value} = ((new Decimal()).set($m).$$div($n)).toJavaBigDecimal()
-           |  .setScale(0, java.math.RoundingMode.HALF_UP).longValueExact();
-         """.stripMargin)
-    case _: FractionalType =>
-      val math = classOf[DoubleMath].getName
-      nullSafeCodeGen(ctx, ev, (m, n) =>
-        s"""
-           |${divideByZeroCheckCodegen(right.dataType, n)}
-           |${ev.value} = $math.roundToLong($m / (double)$n, java.math.RoundingMode.HALF_UP);
-         """.stripMargin)
+  override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
+    val errorContext = ctx.addReferenceObj("errCtx", origin.context)
+    right.dataType match {
+      case _: IntegralType =>
+        val math = classOf[LongMath].getName
+        val micros = left.genCode(ctx)
+        val num = right.genCode(ctx)
+        val checkIntegralDivideOverflow =
+          s"""
+             |if (${micros.value} == ${Long.MinValue}L && ${num.value} == -1L)
+             |  throw QueryExecutionErrors.overflowInIntegralDivideError($errorContext);
+             |""".stripMargin
+        nullSafeCodeGen(ctx, ev, (m, n) =>
+          s"""
+             |${divideByZeroCheckCodegen(right.dataType, n, errorContext)}
+             |$checkIntegralDivideOverflow
+             |${ev.value} = $math.divide($m, $n, java.math.RoundingMode.HALF_UP);
+          """.stripMargin)
+      case _: DecimalType =>
+        nullSafeCodeGen(ctx, ev, (m, n) =>
+          s"""
+             |${divideByZeroCheckCodegen(right.dataType, n, errorContext)}
+             |${ev.value} = ((new Decimal()).set($m).$$div($n)).toJavaBigDecimal()
+             |  .setScale(0, java.math.RoundingMode.HALF_UP).longValueExact();
+          """.stripMargin)
+      case _: FractionalType =>
+        val math = classOf[DoubleMath].getName
+        nullSafeCodeGen(ctx, ev, (m, n) =>
+          s"""
+             |${divideByZeroCheckCodegen(right.dataType, n, errorContext)}
+             |${ev.value} = $math.roundToLong($m / (double)$n, java.math.RoundingMode.HALF_UP);
+          """.stripMargin)
+    }
   }
 
   override def toString: String = s"($left / $right)"
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/IntervalUtils.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/IntervalUtils.scala
index ceed8df..f05e320 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/IntervalUtils.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/IntervalUtils.scala
@@ -733,7 +733,7 @@ object IntervalUtils {
    * @throws ArithmeticException if the result overflows any field value or divided by zero
    */
   def divideExact(interval: CalendarInterval, num: Double): CalendarInterval = {
-    if (num == 0) throw QueryExecutionErrors.divideByZeroError()
+    if (num == 0) throw QueryExecutionErrors.divideByZeroError("")
     fromDoubles(interval.months / num, interval.days / num, interval.microseconds / num)
   }
 
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
index 8372150..a83b7b2 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
@@ -137,9 +137,9 @@ object QueryExecutionErrors {
       messageParameters = Array(funcCls, inputTypes, outputType), e)
   }
 
-  def divideByZeroError(): ArithmeticException = {
+  def divideByZeroError(context: String): ArithmeticException = {
     new SparkArithmeticException(
-      errorClass = "DIVIDE_BY_ZERO", messageParameters = Array(SQLConf.ANSI_ENABLED.key))
+      errorClass = "DIVIDE_BY_ZERO", messageParameters = Array(SQLConf.ANSI_ENABLED.key, context))
   }
 
   def invalidArrayIndexError(index: Int, numElements: Int): ArrayIndexOutOfBoundsException = {
@@ -216,8 +216,8 @@ object QueryExecutionErrors {
     arithmeticOverflowError("Overflow in sum of decimals")
   }
 
-  def overflowInIntegralDivideError(): ArithmeticException = {
-    arithmeticOverflowError("Overflow in integral divide", "try_divide")
+  def overflowInIntegralDivideError(context: String): ArithmeticException = {
+    arithmeticOverflowError("Overflow in integral divide", "try_divide", context)
   }
 
   def mapSizeExceedArraySizeWhenZipMapError(size: Int): RuntimeException = {
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ArithmeticExpressionSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ArithmeticExpressionSuite.scala
index c992b4d..7624828 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ArithmeticExpressionSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/ArithmeticExpressionSuite.scala
@@ -253,6 +253,19 @@ class ArithmeticExpressionSuite extends SparkFunSuite with ExpressionEvalHelper
     }
   }
 
+  test("Divide: divide by 0 exception should contain SQL text context") {
+    val query = "1234.5D / 0"
+    val o = Origin(
+      line = Some(1),
+      startPosition = Some(7),
+      startIndex = Some(7),
+      sqlText = Some(s"select $query"))
+    withOrigin(o) {
+      val expr = Divide(Literal(1234.5, DoubleType), Literal(0.0, DoubleType), failOnError = true)
+      checkExceptionInExpression[ArithmeticException](expr, EmptyRow, query)
+    }
+  }
+
   private def testDecimalAndLongType(testFunc: (Int => Any) => Unit): Unit = {
     testFunc(_.toLong)
     testFunc(Decimal(_))
@@ -292,6 +305,23 @@ class ArithmeticExpressionSuite extends SparkFunSuite with ExpressionEvalHelper
     }
   }
 
+  test("IntegralDivide: exception should contain SQL text context") {
+    Seq(-1L, 0L).foreach { right =>
+      val query = s"${Long.MinValue} div right"
+      val o = Origin(
+        line = Some(1),
+        startPosition = Some(7),
+        startIndex = Some(7),
+        sqlText = Some(s"select $query"))
+      withOrigin(o) {
+        val expr =
+          IntegralDivide(
+            Literal(Long.MinValue, LongType), Literal(right, LongType), failOnError = true)
+        checkExceptionInExpression[ArithmeticException](expr, EmptyRow, query)
+      }
+    }
+  }
+
   test("% (Remainder)") {
     testNumericDataTypes { convert =>
       val left = Literal(convert(1))
@@ -323,6 +353,22 @@ class ArithmeticExpressionSuite extends SparkFunSuite with ExpressionEvalHelper
     }
   }
 
+  test("Remainder/Pmod: exception should contain SQL text context") {
+    Seq(
+      Remainder(Literal(1L, LongType), Literal(0L, LongType), failOnError = true),
+      Pmod(Literal(1L, LongType), Literal(0L, LongType), failOnError = true)).foreach { expr =>
+        val query = s"1L ${expr.symbol} 0L"
+        val o = Origin(
+          line = Some(1),
+          startPosition = Some(7),
+          startIndex = Some(7),
+          sqlText = Some(s"select $query"))
+        withOrigin(o) {
+          checkExceptionInExpression[ArithmeticException](expr, EmptyRow, query)
+        }
+    }
+  }
+
   test("SPARK-17617: % (Remainder) double % double on super big double") {
     val leftDouble = Literal(-5083676433652386516D)
     val rightDouble = Literal(10D)
diff --git a/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out b/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out
index 2f46111..e468612 100644
--- a/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/ansi/interval.sql.out
@@ -211,6 +211,9 @@ struct<>
 -- !query output
 org.apache.spark.SparkArithmeticException
 divide by zero. To return NULL instead, use 'try_divide'. If necessary set spark.sql.ansi.enabled to false (except for ANSI interval type) to bypass this error.
+== SQL(line 1, position 7) ==
+select interval '2 seconds' / 0
+       ^^^^^^^^^^^^^^^^^^^^^^^^
 
 
 -- !query
@@ -244,6 +247,9 @@ struct<>
 -- !query output
 org.apache.spark.SparkArithmeticException
 divide by zero. To return NULL instead, use 'try_divide'. If necessary set spark.sql.ansi.enabled to false (except for ANSI interval type) to bypass this error.
+== SQL(line 1, position 7) ==
+select interval '2' year / 0
+       ^^^^^^^^^^^^^^^^^^^^^
 
 
 -- !query
@@ -1998,6 +2004,9 @@ struct<>
 -- !query output
 java.lang.ArithmeticException
 Overflow in integral divide. To return NULL instead, use 'try_divide'. If necessary set spark.sql.ansi.enabled to false (except for ANSI interval type) to bypass this error.
+== SQL(line 1, position 7) ==
+SELECT (INTERVAL '-178956970-8' YEAR TO MONTH) / -1
+       ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 
 
 -- !query
@@ -2007,6 +2016,9 @@ struct<>
 -- !query output
 java.lang.ArithmeticException
 Overflow in integral divide. To return NULL instead, use 'try_divide'. If necessary set spark.sql.ansi.enabled to false (except for ANSI interval type) to bypass this error.
+== SQL(line 1, position 7) ==
+SELECT (INTERVAL '-178956970-8' YEAR TO MONTH) / -1L
+       ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 
 
 -- !query
@@ -2050,6 +2062,9 @@ struct<>
 -- !query output
 java.lang.ArithmeticException
 Overflow in integral divide. To return NULL instead, use 'try_divide'. If necessary set spark.sql.ansi.enabled to false (except for ANSI interval type) to bypass this error.
+== SQL(line 1, position 7) ==
+SELECT (INTERVAL '-106751991 04:00:54.775808' DAY TO SECOND) / -1
+       ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 
 
 -- !query
@@ -2059,6 +2074,9 @@ struct<>
 -- !query output
 java.lang.ArithmeticException
 Overflow in integral divide. To return NULL instead, use 'try_divide'. If necessary set spark.sql.ansi.enabled to false (except for ANSI interval type) to bypass this error.
+== SQL(line 1, position 7) ==
+SELECT (INTERVAL '-106751991 04:00:54.775808' DAY TO SECOND) / -1L
+       ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 
 
 -- !query
diff --git a/sql/core/src/test/resources/sql-tests/results/interval.sql.out b/sql/core/src/test/resources/sql-tests/results/interval.sql.out
index 7aa0c69..df1db77 100644
--- a/sql/core/src/test/resources/sql-tests/results/interval.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/interval.sql.out
@@ -205,6 +205,9 @@ struct<>
 -- !query output
 org.apache.spark.SparkArithmeticException
 divide by zero. To return NULL instead, use 'try_divide'. If necessary set spark.sql.ansi.enabled to false (except for ANSI interval type) to bypass this error.
+== SQL(line 1, position 7) ==
+select interval '2 seconds' / 0
+       ^^^^^^^^^^^^^^^^^^^^^^^^
 
 
 -- !query
@@ -238,6 +241,9 @@ struct<>
 -- !query output
 org.apache.spark.SparkArithmeticException
 divide by zero. To return NULL instead, use 'try_divide'. If necessary set spark.sql.ansi.enabled to false (except for ANSI interval type) to bypass this error.
+== SQL(line 1, position 7) ==
+select interval '2' year / 0
+       ^^^^^^^^^^^^^^^^^^^^^
 
 
 -- !query
@@ -1987,6 +1993,9 @@ struct<>
 -- !query output
 java.lang.ArithmeticException
 Overflow in integral divide. To return NULL instead, use 'try_divide'. If necessary set spark.sql.ansi.enabled to false (except for ANSI interval type) to bypass this error.
+== SQL(line 1, position 7) ==
+SELECT (INTERVAL '-178956970-8' YEAR TO MONTH) / -1
+       ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 
 
 -- !query
@@ -1996,6 +2005,9 @@ struct<>
 -- !query output
 java.lang.ArithmeticException
 Overflow in integral divide. To return NULL instead, use 'try_divide'. If necessary set spark.sql.ansi.enabled to false (except for ANSI interval type) to bypass this error.
+== SQL(line 1, position 7) ==
+SELECT (INTERVAL '-178956970-8' YEAR TO MONTH) / -1L
+       ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 
 
 -- !query
@@ -2039,6 +2051,9 @@ struct<>
 -- !query output
 java.lang.ArithmeticException
 Overflow in integral divide. To return NULL instead, use 'try_divide'. If necessary set spark.sql.ansi.enabled to false (except for ANSI interval type) to bypass this error.
+== SQL(line 1, position 7) ==
+SELECT (INTERVAL '-106751991 04:00:54.775808' DAY TO SECOND) / -1
+       ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 
 
 -- !query
@@ -2048,6 +2063,9 @@ struct<>
 -- !query output
 java.lang.ArithmeticException
 Overflow in integral divide. To return NULL instead, use 'try_divide'. If necessary set spark.sql.ansi.enabled to false (except for ANSI interval type) to bypass this error.
+== SQL(line 1, position 7) ==
+SELECT (INTERVAL '-106751991 04:00:54.775808' DAY TO SECOND) / -1L
+       ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 
 
 -- !query
diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/case.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/case.sql.out
index b6e952a..3d91e42 100644
--- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/case.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/case.sql.out
@@ -180,6 +180,9 @@ struct<>
 -- !query output
 org.apache.spark.SparkArithmeticException
 divide by zero. To return NULL instead, use 'try_divide'. If necessary set spark.sql.ansi.enabled to false (except for ANSI interval type) to bypass this error.
+== SQL(line 1, position 26) ==
+SELECT CASE WHEN 1=0 THEN 1/0 WHEN 1=1 THEN 1 ELSE 2/0 END
+                          ^^^
 
 
 -- !query
@@ -189,6 +192,9 @@ struct<>
 -- !query output
 org.apache.spark.SparkArithmeticException
 divide by zero. To return NULL instead, use 'try_divide'. If necessary set spark.sql.ansi.enabled to false (except for ANSI interval type) to bypass this error.
+== SQL(line 1, position 26) ==
+SELECT CASE 1 WHEN 0 THEN 1/0 WHEN 1 THEN 1 ELSE 2/0 END
+                          ^^^
 
 
 -- !query
@@ -198,6 +204,9 @@ struct<>
 -- !query output
 org.apache.spark.SparkArithmeticException
 divide by zero. To return NULL instead, use 'try_divide'. If necessary set spark.sql.ansi.enabled to false (except for ANSI interval type) to bypass this error.
+== SQL(line 1, position 30) ==
+SELECT CASE WHEN i > 100 THEN 1/0 ELSE 0 END FROM case_tbl
+                              ^^^
 
 
 -- !query
diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/int8.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/int8.sql.out
index af30653c..bb4a770 100755
--- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/int8.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/int8.sql.out
@@ -576,6 +576,9 @@ struct<>
 -- !query output
 org.apache.spark.SparkArithmeticException
 divide by zero. To return NULL instead, use 'try_divide'. If necessary set spark.sql.ansi.enabled to false (except for ANSI interval type) to bypass this error.
+== SQL(line 1, position 7) ==
+select bigint('9223372036854775800') / bigint('0')
+       ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 
 
 -- !query
@@ -585,6 +588,9 @@ struct<>
 -- !query output
 org.apache.spark.SparkArithmeticException
 divide by zero. To return NULL instead, use 'try_divide'. If necessary set spark.sql.ansi.enabled to false (except for ANSI interval type) to bypass this error.
+== SQL(line 1, position 7) ==
+select bigint('-9223372036854775808') / smallint('0')
+       ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 
 
 -- !query
@@ -594,6 +600,9 @@ struct<>
 -- !query output
 org.apache.spark.SparkArithmeticException
 divide by zero. To return NULL instead, use 'try_divide'. If necessary set spark.sql.ansi.enabled to false (except for ANSI interval type) to bypass this error.
+== SQL(line 1, position 7) ==
+select smallint('100') / bigint('0')
+       ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 
 
 -- !query
diff --git a/sql/core/src/test/resources/sql-tests/results/postgreSQL/select_having.sql.out b/sql/core/src/test/resources/sql-tests/results/postgreSQL/select_having.sql.out
index 3382c86..0f6492d 100644
--- a/sql/core/src/test/resources/sql-tests/results/postgreSQL/select_having.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/postgreSQL/select_having.sql.out
@@ -178,6 +178,9 @@ struct<>
 -- !query output
 org.apache.spark.SparkArithmeticException
 divide by zero. To return NULL instead, use 'try_divide'. If necessary set spark.sql.ansi.enabled to false (except for ANSI interval type) to bypass this error.
+== SQL(line 1, position 39) ==
+...1 AS one FROM test_having WHERE 1/a = 1 HAVING 1 < 2
+                                   ^^^
 
 
 -- !query
diff --git a/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-case.sql.out b/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-case.sql.out
index 6073fef..8d4f152 100755
--- a/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-case.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-case.sql.out
@@ -180,6 +180,9 @@ struct<>
 -- !query output
 org.apache.spark.SparkArithmeticException
 divide by zero. To return NULL instead, use 'try_divide'. If necessary set spark.sql.ansi.enabled to false (except for ANSI interval type) to bypass this error.
+== SQL(line 1, position 31) ==
+SELECT CASE WHEN udf(1=0) THEN 1/0 WHEN 1=1 THEN 1 ELSE 2/0 END
+                               ^^^
 
 
 -- !query
@@ -189,6 +192,9 @@ struct<>
 -- !query output
 org.apache.spark.SparkArithmeticException
 divide by zero. To return NULL instead, use 'try_divide'. If necessary set spark.sql.ansi.enabled to false (except for ANSI interval type) to bypass this error.
+== SQL(line 1, position 54) ==
+...HEN 1/udf(0) WHEN 1 THEN 1 ELSE 2/0 END
+                                   ^^^
 
 
 -- !query
@@ -198,6 +204,9 @@ struct<>
 -- !query output
 org.apache.spark.SparkArithmeticException
 divide by zero. To return NULL instead, use 'try_divide'. If necessary set spark.sql.ansi.enabled to false (except for ANSI interval type) to bypass this error.
+== SQL(line 1, position 34) ==
+...LECT CASE WHEN i > 100 THEN udf(1/0) ELSE udf(0) END FROM case_tbl
+                                   ^^^
 
 
 -- !query
diff --git a/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-select_having.sql.out b/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-select_having.sql.out
index 05c8103..9d30973 100644
--- a/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-select_having.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/udf/postgreSQL/udf-select_having.sql.out
@@ -178,6 +178,9 @@ struct<>
 -- !query output
 org.apache.spark.SparkArithmeticException
 divide by zero. To return NULL instead, use 'try_divide'. If necessary set spark.sql.ansi.enabled to false (except for ANSI interval type) to bypass this error.
+== SQL(line 1, position 39) ==
+...1 AS one FROM test_having WHERE 1/udf(a) = 1 HAVING 1 < 2
+                                   ^^^^^^^^
 
 
 -- !query

---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org