You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by ma...@apache.org on 2022/09/26 11:33:10 UTC

[spark] branch master updated: [SPARK-40560][SQL] Rename `message` to `messageTemplate` in the `STANDARD` format of errors

This is an automated email from the ASF dual-hosted git repository.

maxgekk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 8fdaf548bcc [SPARK-40560][SQL] Rename `message` to `messageTemplate` in the `STANDARD` format of errors
8fdaf548bcc is described below

commit 8fdaf548bcc51630f7cfae8a17930c987b29fbd3
Author: Max Gekk <ma...@gmail.com>
AuthorDate: Mon Sep 26 14:31:55 2022 +0300

    [SPARK-40560][SQL] Rename `message` to `messageTemplate` in the `STANDARD` format of errors
    
    ### What changes were proposed in this pull request?
    In the `STANDARD` format of error messages, rename the `message` field to `messageTemplate`.
    
    ### Why are the changes needed?
    Because the field contains a template of an error message, actually.
    
    ### Does this PR introduce _any_ user-facing change?
    Yes.
    
    ### How was this patch tested?
    By running the affected test suites:
    ```
    $ build/sbt "core/testOnly *SparkThrowableSuite"
    $ build/sbt -Phive-2.3 -Phive-thriftserver "test:testOnly org.apache.spark.sql.hive.thriftserver.CliSuite"
    $ build/sbt -Phive -Phive-thriftserver "test:testOnly *ThriftServerWithSparkContextInBinarySuite"
    ```
    
    Closes #37997 from MaxGekk/messageTemplate.
    
    Authored-by: Max Gekk <ma...@gmail.com>
    Signed-off-by: Max Gekk <ma...@gmail.com>
---
 core/src/main/scala/org/apache/spark/ErrorClassesJSONReader.scala | 8 ++++----
 core/src/main/scala/org/apache/spark/SparkThrowableHelper.scala   | 2 +-
 core/src/test/scala/org/apache/spark/SparkThrowableSuite.scala    | 6 +++---
 .../scala/org/apache/spark/sql/hive/thriftserver/CliSuite.scala   | 2 +-
 .../sql/hive/thriftserver/ThriftServerWithSparkContextSuite.scala | 2 +-
 5 files changed, 10 insertions(+), 10 deletions(-)

diff --git a/core/src/main/scala/org/apache/spark/ErrorClassesJSONReader.scala b/core/src/main/scala/org/apache/spark/ErrorClassesJSONReader.scala
index 8d4ae3a877d..9d6dd9dde07 100644
--- a/core/src/main/scala/org/apache/spark/ErrorClassesJSONReader.scala
+++ b/core/src/main/scala/org/apache/spark/ErrorClassesJSONReader.scala
@@ -74,12 +74,12 @@ class ErrorClassesJsonReader(jsonFileURLs: Seq[URL]) {
     assert(errorInfo.subClass.isDefined == subErrorClass.isDefined)
 
     if (subErrorClass.isEmpty) {
-      errorInfo.messageFormat
+      errorInfo.messageTemplate
     } else {
       val errorSubInfo = errorInfo.subClass.get.getOrElse(
         subErrorClass.get,
         throw SparkException.internalError(s"Cannot find sub error class '$errorClass'"))
-      errorInfo.messageFormat + " " + errorSubInfo.messageFormat
+      errorInfo.messageTemplate + " " + errorSubInfo.messageTemplate
     }
   }
 
@@ -102,7 +102,7 @@ private case class ErrorInfo(
     sqlState: Option[String]) {
   // For compatibility with multi-line error messages
   @JsonIgnore
-  val messageFormat: String = message.mkString("\n")
+  val messageTemplate: String = message.mkString("\n")
 }
 
 /**
@@ -114,5 +114,5 @@ private case class ErrorInfo(
 private case class ErrorSubInfo(message: Seq[String]) {
   // For compatibility with multi-line error messages
   @JsonIgnore
-  val messageFormat: String = message.mkString("\n")
+  val messageTemplate: String = message.mkString("\n")
 }
diff --git a/core/src/main/scala/org/apache/spark/SparkThrowableHelper.scala b/core/src/main/scala/org/apache/spark/SparkThrowableHelper.scala
index d503f400d00..9073a73dec4 100644
--- a/core/src/main/scala/org/apache/spark/SparkThrowableHelper.scala
+++ b/core/src/main/scala/org/apache/spark/SparkThrowableHelper.scala
@@ -92,7 +92,7 @@ private[spark] object SparkThrowableHelper {
           if (errorSubClass != null) g.writeStringField("errorSubClass", errorSubClass)
           if (format == STANDARD) {
             val finalClass = errorClass + Option(errorSubClass).map("." + _).getOrElse("")
-            g.writeStringField("message", errorReader.getMessageTemplate(finalClass))
+            g.writeStringField("messageTemplate", errorReader.getMessageTemplate(finalClass))
           }
           val sqlState = e.getSqlState
           if (sqlState != null) g.writeStringField("sqlState", sqlState)
diff --git a/core/src/test/scala/org/apache/spark/SparkThrowableSuite.scala b/core/src/test/scala/org/apache/spark/SparkThrowableSuite.scala
index 266683b1eca..191304bc353 100644
--- a/core/src/test/scala/org/apache/spark/SparkThrowableSuite.scala
+++ b/core/src/test/scala/org/apache/spark/SparkThrowableSuite.scala
@@ -128,7 +128,7 @@ class SparkThrowableSuite extends SparkFunSuite {
   test("Message format invariants") {
     val messageFormats = errorReader.errorInfoMap
       .filterKeys(!_.startsWith("_LEGACY_ERROR_TEMP_"))
-      .values.toSeq.flatMap { i => Seq(i.messageFormat) }
+      .values.toSeq.flatMap { i => Seq(i.messageTemplate) }
     checkCondition(messageFormats, s => s != null)
     checkIfUnique(messageFormats)
   }
@@ -282,7 +282,7 @@ class SparkThrowableSuite extends SparkFunSuite {
     assert(SparkThrowableHelper.getMessage(e, STANDARD) ===
       """{
         |  "errorClass" : "DIVIDE_BY_ZERO",
-        |  "message" : "Division by zero. Use `try_divide` to tolerate divisor being 0 and return NULL instead. If necessary set <config> to \"false\" to bypass this error.",
+        |  "messageTemplate" : "Division by zero. Use `try_divide` to tolerate divisor being 0 and return NULL instead. If necessary set <config> to \"false\" to bypass this error.",
         |  "sqlState" : "22012",
         |  "messageParameters" : {
         |    "config" : "CONFIG"
@@ -304,7 +304,7 @@ class SparkThrowableSuite extends SparkFunSuite {
       """{
         |  "errorClass" : "UNSUPPORTED_SAVE_MODE",
         |  "errorSubClass" : "EXISTENT_PATH",
-        |  "message" : "The save mode <saveMode> is not supported for: an existent path.",
+        |  "messageTemplate" : "The save mode <saveMode> is not supported for: an existent path.",
         |  "messageParameters" : {
         |    "saveMode" : "UNSUPPORTED_MODE"
         |  }
diff --git a/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/CliSuite.scala b/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/CliSuite.scala
index 6bbc26bc8ca..aa7b8876486 100644
--- a/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/CliSuite.scala
+++ b/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/CliSuite.scala
@@ -755,7 +755,7 @@ class CliSuite extends SparkFunSuite {
         errorMessage =
           """{
             |  "errorClass" : "DIVIDE_BY_ZERO",
-            |  "message" : "Division by zero. Use `try_divide` to tolerate divisor being 0 and return NULL instead. If necessary set <config> to \"false\" to bypass this error.",
+            |  "messageTemplate" : "Division by zero. Use `try_divide` to tolerate divisor being 0 and return NULL instead. If necessary set <config> to \"false\" to bypass this error.",
             |  "sqlState" : "22012",
             |  "messageParameters" : {
             |    "config" : "\"spark.sql.ansi.enabled\""
diff --git a/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/ThriftServerWithSparkContextSuite.scala b/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/ThriftServerWithSparkContextSuite.scala
index b0db14b4228..0228f0ac6d2 100644
--- a/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/ThriftServerWithSparkContextSuite.scala
+++ b/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/ThriftServerWithSparkContextSuite.scala
@@ -191,7 +191,7 @@ trait ThriftServerWithSparkContextSuite extends SharedThriftServer {
       assert(e3.getMessage ===
         """{
           |  "errorClass" : "DIVIDE_BY_ZERO",
-          |  "message" : "Division by zero. Use `try_divide` to tolerate divisor being 0 and return NULL instead. If necessary set <config> to \"false\" to bypass this error.",
+          |  "messageTemplate" : "Division by zero. Use `try_divide` to tolerate divisor being 0 and return NULL instead. If necessary set <config> to \"false\" to bypass this error.",
           |  "sqlState" : "22012",
           |  "messageParameters" : {
           |    "config" : "\"spark.sql.ansi.enabled\""


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org