You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by ma...@apache.org on 2023/02/08 14:06:25 UTC
[spark] branch branch-3.4 updated: [SPARK-42303][SQL] Assign name to _LEGACY_ERROR_TEMP_1326
This is an automated email from the ASF dual-hosted git repository.
maxgekk pushed a commit to branch branch-3.4
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/branch-3.4 by this push:
new a381068e147 [SPARK-42303][SQL] Assign name to _LEGACY_ERROR_TEMP_1326
a381068e147 is described below
commit a381068e147c7425ee788a999d94064be9c8b47f
Author: itholic <ha...@databricks.com>
AuthorDate: Wed Feb 8 19:05:29 2023 +0500
[SPARK-42303][SQL] Assign name to _LEGACY_ERROR_TEMP_1326
### What changes were proposed in this pull request?
This PR proposes to assign name to _LEGACY_ERROR_TEMP_1326, "CANNOT_MODIFY_CONFIG".
### Why are the changes needed?
We should assign proper name to _LEGACY_ERROR_TEMP_*
### Does this PR introduce _any_ user-facing change?
No
### How was this patch tested?
`./build/sbt "sql/testOnly org.apache.spark.sql.SQLQueryTestSuite*"`
Closes #39873 from itholic/LEGACY_1326.
Authored-by: itholic <ha...@databricks.com>
Signed-off-by: Max Gekk <ma...@gmail.com>
(cherry picked from commit d4e5df827de9eab91cfe563a729f2f46c5830351)
Signed-off-by: Max Gekk <ma...@gmail.com>
---
core/src/main/resources/error/error-classes.json | 13 +++++++------
.../apache/spark/sql/errors/QueryCompilationErrors.scala | 4 ++--
.../scala/org/apache/spark/sql/internal/SQLConfSuite.scala | 9 ++++++---
3 files changed, 15 insertions(+), 11 deletions(-)
diff --git a/core/src/main/resources/error/error-classes.json b/core/src/main/resources/error/error-classes.json
index 522cdd6095a..7126c0bc047 100644
--- a/core/src/main/resources/error/error-classes.json
+++ b/core/src/main/resources/error/error-classes.json
@@ -76,6 +76,13 @@
"Could not load Protobuf class with name <protobufClassName>. <explanation>."
]
},
+ "CANNOT_MODIFY_CONFIG" : {
+ "message" : [
+ "Cannot modify the value of the Spark config: <key>.",
+ "See also '<docroot>/sql-migration-guide.html#ddl-statements'."
+ ],
+ "sqlState" : "46110"
+ },
"CANNOT_PARSE_DECIMAL" : {
"message" : [
"Cannot parse decimal."
@@ -3510,12 +3517,6 @@
"Cannot modify the value of a static config: <key>."
]
},
- "_LEGACY_ERROR_TEMP_1326" : {
- "message" : [
- "Cannot modify the value of a Spark config: <key>.",
- "See also '<docroot>/sql-migration-guide.html#ddl-statements'."
- ]
- },
"_LEGACY_ERROR_TEMP_1327" : {
"message" : [
"Command execution is not supported in runner <runner>."
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
index 9da7c7bccd7..a08f72e8313 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
@@ -2989,8 +2989,8 @@ private[sql] object QueryCompilationErrors extends QueryErrorsBase {
def cannotModifyValueOfSparkConfigError(key: String, docroot: String): Throwable = {
new AnalysisException(
- errorClass = "_LEGACY_ERROR_TEMP_1326",
- messageParameters = Map("key" -> key, "docroot" -> docroot))
+ errorClass = "CANNOT_MODIFY_CONFIG",
+ messageParameters = Map("key" -> toSQLConf(key), "docroot" -> docroot))
}
def commandExecutionInRunnerUnsupportedError(runner: String): Throwable = {
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfSuite.scala
index b3b2912cd6c..30f4fdfbbcf 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfSuite.scala
@@ -22,6 +22,7 @@ import java.util.TimeZone
import org.apache.hadoop.fs.Path
import org.apache.logging.log4j.Level
+import org.apache.spark.SPARK_DOC_ROOT
import org.apache.spark.sql._
import org.apache.spark.sql.catalyst.parser.ParseException
import org.apache.spark.sql.catalyst.util.DateTimeTestUtils.MIT
@@ -204,9 +205,11 @@ class SQLConfSuite extends QueryTest with SharedSparkSession {
sql("RESET spark.app.id")
assert(spark.conf.get("spark.app.id") === appId, "Should not change spark core ones")
// spark core conf w/ entry registered
- val e1 = intercept[AnalysisException](sql("RESET spark.executor.cores"))
- val str_match = "Cannot modify the value of a Spark config: spark.executor.cores"
- assert(e1.getMessage.contains(str_match))
+ checkError(
+ exception = intercept[AnalysisException](sql("RESET spark.executor.cores")),
+ errorClass = "CANNOT_MODIFY_CONFIG",
+ parameters = Map("key" -> "\"spark.executor.cores\"", "docroot" -> SPARK_DOC_ROOT)
+ )
// user defined settings
sql("SET spark.abc=xyz")
---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org