You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by ma...@apache.org on 2023/10/10 12:36:34 UTC

[spark] branch master updated: [SPARK-45213][SQL] Assign name to the error _LEGACY_ERROR_TEMP_2151

This is an automated email from the ASF dual-hosted git repository.

maxgekk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 6373f19f537 [SPARK-45213][SQL] Assign name to the error _LEGACY_ERROR_TEMP_2151
6373f19f537 is described below

commit 6373f19f537f69c6460b2e4097f19903c01a608f
Author: dengziming <de...@gmail.com>
AuthorDate: Tue Oct 10 15:36:18 2023 +0300

    [SPARK-45213][SQL] Assign name to the error _LEGACY_ERROR_TEMP_2151
    
    ### What changes were proposed in this pull request?
    Assign the name `EXPRESSION_DECODING_FAILED` to the legacy error class `_LEGACY_ERROR_TEMP_2151`.
    
    ### Why are the changes needed?
    To assign proper name as a part of activity in SPARK-37935.
    
    ### Does this PR introduce _any_ user-facing change?
    Yes, the error message will include the error class name
    
    ### How was this patch tested?
    An existing unit test to produce the error from user code.
    
    ### Was this patch authored or co-authored using generative AI tooling?
    No.
    
    Closes #43029 from dengziming/SPARK-45213.
    
    Authored-by: dengziming <de...@gmail.com>
    Signed-off-by: Max Gekk <ma...@gmail.com>
---
 common/utils/src/main/resources/error/error-classes.json      | 11 +++++------
 docs/sql-error-conditions.md                                  |  6 ++++++
 .../org/apache/spark/sql/errors/QueryExecutionErrors.scala    |  3 +--
 .../spark/sql/catalyst/encoders/EncoderResolutionSuite.scala  |  2 +-
 .../src/test/scala/org/apache/spark/sql/DatasetSuite.scala    |  5 ++---
 5 files changed, 15 insertions(+), 12 deletions(-)

diff --git a/common/utils/src/main/resources/error/error-classes.json b/common/utils/src/main/resources/error/error-classes.json
index 690d1ae1a14..1239793b3f9 100644
--- a/common/utils/src/main/resources/error/error-classes.json
+++ b/common/utils/src/main/resources/error/error-classes.json
@@ -921,6 +921,11 @@
       }
     }
   },
+  "EXPRESSION_DECODING_FAILED" : {
+    "message" : [
+      "Failed to decode a row to a value of the expressions: <expressions>."
+    ]
+  },
   "EXPRESSION_TYPE_IS_NOT_ORDERABLE" : {
     "message" : [
       "Column expression <expr> cannot be sorted because its type <exprType> is not orderable."
@@ -5524,12 +5529,6 @@
       "Due to Scala's limited support of tuple, tuple with more than 22 elements are not supported."
     ]
   },
-  "_LEGACY_ERROR_TEMP_2151" : {
-    "message" : [
-      "Error while decoding: <e>",
-      "<expressions>."
-    ]
-  },
   "_LEGACY_ERROR_TEMP_2152" : {
     "message" : [
       "Error while encoding: <e>",
diff --git a/docs/sql-error-conditions.md b/docs/sql-error-conditions.md
index fda10eceb97..b4ee7358b52 100644
--- a/docs/sql-error-conditions.md
+++ b/docs/sql-error-conditions.md
@@ -551,6 +551,12 @@ The table `<tableName>` does not support `<operation>`.
 
 For more details see [EXPECT_VIEW_NOT_TABLE](sql-error-conditions-expect-view-not-table-error-class.html)
 
+### EXPRESSION_DECODING_FAILED
+
+SQLSTATE: none assigned
+
+Failed to decode a row to a value of the expressions: `<expressions>`.
+
 ### EXPRESSION_TYPE_IS_NOT_ORDERABLE
 
 SQLSTATE: none assigned
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
index bd4d7a3be7f..5396ae5ff70 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
@@ -1342,9 +1342,8 @@ private[sql] object QueryExecutionErrors extends QueryErrorsBase with ExecutionE
 
   def expressionDecodingError(e: Exception, expressions: Seq[Expression]): SparkRuntimeException = {
     new SparkRuntimeException(
-      errorClass = "_LEGACY_ERROR_TEMP_2151",
+      errorClass = "EXPRESSION_DECODING_FAILED",
       messageParameters = Map(
-        "e" -> e.toString(),
         "expressions" -> expressions.map(
           _.simpleString(SQLConf.get.maxToStringFields)).mkString("\n")),
       cause = e)
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/encoders/EncoderResolutionSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/encoders/EncoderResolutionSuite.scala
index f4106e65e7c..7f54987ee7e 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/encoders/EncoderResolutionSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/encoders/EncoderResolutionSuite.scala
@@ -172,7 +172,7 @@ class EncoderResolutionSuite extends PlanTest {
     val e = intercept[RuntimeException] {
       fromRow(InternalRow(new GenericArrayData(Array(1, null))))
     }
-    assert(e.getMessage.contains("Null value appeared in non-nullable field"))
+    assert(e.getCause.getMessage.contains("Null value appeared in non-nullable field"))
   }
 
   test("the real number of fields doesn't match encoder schema: tuple encoder") {
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/DatasetSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/DatasetSuite.scala
index ca06e0f2bf2..2579b52bf72 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/DatasetSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/DatasetSuite.scala
@@ -1220,7 +1220,7 @@ class DatasetSuite extends QueryTest
 
     val message = intercept[RuntimeException] {
       buildDataset(Row(Row("hello", null))).collect()
-    }.getMessage
+    }.getCause.getMessage
 
     assert(message.contains("Null value appeared in non-nullable field"))
   }
@@ -2592,9 +2592,8 @@ class DatasetSuite extends QueryTest
       // Expression decoding error
       checkError(
         exception = exception,
-        errorClass = "_LEGACY_ERROR_TEMP_2151",
+        errorClass = "EXPRESSION_DECODING_FAILED",
         parameters = Map(
-          "e" -> exception.getCause.toString(),
           "expressions" -> expressions.map(
             _.simpleString(SQLConf.get.maxToStringFields)).mkString("\n"))
       )


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org