You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by ma...@apache.org on 2023/02/06 10:48:32 UTC
[spark] branch master updated: [SPARK-42320][SQL] Assign name to _LEGACY_ERROR_TEMP_2188
This is an automated email from the ASF dual-hosted git repository.
maxgekk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push:
new 17e3ee03268 [SPARK-42320][SQL] Assign name to _LEGACY_ERROR_TEMP_2188
17e3ee03268 is described below
commit 17e3ee03268f1e1a9f453900065871f4639f0a1d
Author: itholic <ha...@databricks.com>
AuthorDate: Mon Feb 6 13:48:15 2023 +0300
[SPARK-42320][SQL] Assign name to _LEGACY_ERROR_TEMP_2188
### What changes were proposed in this pull request?
This PR proposes to assign name to _LEGACY_ERROR_TEMP_2188, "CANNOT_RECOGNIZE_HIVE_TYPE".
### Why are the changes needed?
We should assign proper name to _LEGACY_ERROR_TEMP_*
### Does this PR introduce _any_ user-facing change?
No
### How was this patch tested?
`./build/sbt "sql/testOnly org.apache.spark.sql.SQLQueryTestSuite*`
Closes #39888 from itholic/LEGACY_2188.
Authored-by: itholic <ha...@databricks.com>
Signed-off-by: Max Gekk <ma...@gmail.com>
---
core/src/main/resources/error/error-classes.json | 11 ++++++-----
.../spark/sql/errors/QueryExecutionErrors.scala | 6 +++---
.../spark/sql/hive/execution/HiveDDLSuite.scala | 23 +++++++++++++---------
3 files changed, 23 insertions(+), 17 deletions(-)
diff --git a/core/src/main/resources/error/error-classes.json b/core/src/main/resources/error/error-classes.json
index afabc56a431..88565eb9f1a 100644
--- a/core/src/main/resources/error/error-classes.json
+++ b/core/src/main/resources/error/error-classes.json
@@ -99,6 +99,12 @@
],
"sqlState" : "22007"
},
+ "CANNOT_RECOGNIZE_HIVE_TYPE" : {
+ "message" : [
+ "Cannot recognize hive type string: <fieldType>, column: <fieldName>."
+ ],
+ "sqlState" : "429BB"
+ },
"CANNOT_RESTORE_PERMISSIONS_FOR_PATH" : {
"message" : [
"Failed to set permissions on created path <path> back to <permission>."
@@ -4470,11 +4476,6 @@
"<message>, db: <dbName>, table: <tableName>."
]
},
- "_LEGACY_ERROR_TEMP_2188" : {
- "message" : [
- "Cannot recognize hive type string: <fieldType>, column: <fieldName>."
- ]
- },
"_LEGACY_ERROR_TEMP_2189" : {
"message" : [
"Hive 2.2 and lower versions don't support getTablesByType. Please use Hive 2.3 or higher version."
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
index b3bd7b727bf..54ae497a114 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
@@ -1844,10 +1844,10 @@ private[sql] object QueryExecutionErrors extends QueryErrorsBase {
def cannotRecognizeHiveTypeError(
e: ParseException, fieldType: String, fieldName: String): Throwable = {
new SparkException(
- errorClass = "_LEGACY_ERROR_TEMP_2188",
+ errorClass = "CANNOT_RECOGNIZE_HIVE_TYPE",
messageParameters = Map(
- "fieldType" -> fieldType,
- "fieldName" -> fieldName),
+ "fieldType" -> toSQLType(fieldType),
+ "fieldName" -> toSQLId(fieldName)),
cause = e)
}
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala
index 445477dc024..e09b923ee51 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala
@@ -156,16 +156,21 @@ class HiveCatalogedDDLSuite extends DDLSuite with TestHiveSingleton with BeforeA
}
test("SPARK-22431: illegal nested type") {
- val queries = Seq(
- "CREATE TABLE t USING hive AS SELECT STRUCT('a' AS `$a`, 1 AS b) q",
- "CREATE TABLE t(q STRUCT<`$a`:INT, col2:STRING>, i1 INT) USING hive")
+ checkError(
+ exception = intercept[SparkException] {
+ spark.sql("CREATE TABLE t USING hive AS SELECT STRUCT('a' AS `$a`, 1 AS b) q")
+ },
+ errorClass = "CANNOT_RECOGNIZE_HIVE_TYPE",
+ parameters = Map("fieldType" -> "\"STRUCT<$A:STRING,B:INT>\"", "fieldName" -> "`q`")
+ )
- queries.foreach(query => {
- val err = intercept[SparkException] {
- spark.sql(query)
- }.getMessage
- assert(err.contains("Cannot recognize hive type string"))
- })
+ checkError(
+ exception = intercept[SparkException] {
+ spark.sql("CREATE TABLE t(q STRUCT<`$a`:INT, col2:STRING>, i1 INT) USING hive")
+ },
+ errorClass = "CANNOT_RECOGNIZE_HIVE_TYPE",
+ parameters = Map("fieldType" -> "\"STRUCT<$A:INT,COL2:STRING>\"", "fieldName" -> "`q`")
+ )
withView("v") {
spark.sql("CREATE VIEW v AS SELECT STRUCT('a' AS `a`, 1 AS b) q")
---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org