You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by ma...@apache.org on 2023/02/08 19:18:52 UTC
[spark] branch branch-3.4 updated: [SPARK-42318][SPARK-42319][SQL] Assign name to _LEGACY_ERROR_TEMP_(2123|2125)
This is an automated email from the ASF dual-hosted git repository.
maxgekk pushed a commit to branch branch-3.4
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/branch-3.4 by this push:
new 4c4585b29ef [SPARK-42318][SPARK-42319][SQL] Assign name to _LEGACY_ERROR_TEMP_(2123|2125)
4c4585b29ef is described below
commit 4c4585b29efe5eb02f3ec40fbcd8bbfa0e3d2b12
Author: itholic <ha...@databricks.com>
AuthorDate: Thu Feb 9 00:17:53 2023 +0500
[SPARK-42318][SPARK-42319][SQL] Assign name to _LEGACY_ERROR_TEMP_(2123|2125)
### What changes were proposed in this pull request?
This PR proposes to assign name to _LEGACY_ERROR_TEMP_2123 and _LEGACY_ERROR_TEMP_2125, "CANNOT_MERGE_INCOMPATIBLE_DATA_TYPE".
### Why are the changes needed?
We should assign proper name to _LEGACY_ERROR_TEMP_*
### Does this PR introduce _any_ user-facing change?
No
### How was this patch tested?
`./build/sbt "sql/testOnly org.apache.spark.sql.SQLQueryTestSuite*`
Closes #39891 from itholic/LEGACY_2125.
Authored-by: itholic <ha...@databricks.com>
Signed-off-by: Max Gekk <ma...@gmail.com>
(cherry picked from commit b11fba0b8402905c0d682f90939e64882f0fd7f5)
Signed-off-by: Max Gekk <ma...@gmail.com>
---
core/src/main/resources/error/error-classes.json | 16 ++++++----------
.../spark/sql/errors/QueryExecutionErrors.scala | 16 +++-------------
.../org/apache/spark/sql/types/StructType.scala | 3 ++-
.../org/apache/spark/sql/types/DataTypeSuite.scala | 8 +++-----
.../apache/spark/sql/types/StructTypeSuite.scala | 22 ++++++++++++++--------
.../execution/datasources/orc/OrcSourceSuite.scala | 12 ++++++++----
6 files changed, 36 insertions(+), 41 deletions(-)
diff --git a/core/src/main/resources/error/error-classes.json b/core/src/main/resources/error/error-classes.json
index e95988a260a..65e7fb70bfe 100644
--- a/core/src/main/resources/error/error-classes.json
+++ b/core/src/main/resources/error/error-classes.json
@@ -76,6 +76,12 @@
"Could not load Protobuf class with name <protobufClassName>. <explanation>."
]
},
+ "CANNOT_MERGE_INCOMPATIBLE_DATA_TYPE" : {
+ "message" : [
+ "Failed to merge incompatible data types <left> and <right>."
+ ],
+ "sqlState" : "42825"
+ },
"CANNOT_MODIFY_CONFIG" : {
"message" : [
"Cannot modify the value of the Spark config: <key>.",
@@ -4183,21 +4189,11 @@
"Failed parsing <simpleString>: <raw>."
]
},
- "_LEGACY_ERROR_TEMP_2123" : {
- "message" : [
- "Failed to merge fields '<leftName>' and '<rightName>'. <message>"
- ]
- },
"_LEGACY_ERROR_TEMP_2124" : {
"message" : [
"Failed to merge decimal types with incompatible scale <leftScale> and <rightScale>."
]
},
- "_LEGACY_ERROR_TEMP_2125" : {
- "message" : [
- "Failed to merge incompatible data types <leftCatalogString> and <rightCatalogString>."
- ]
- },
"_LEGACY_ERROR_TEMP_2126" : {
"message" : [
"Unsuccessful attempt to build maps with <size> elements due to exceeding the map size limit <maxRoundedArrayLength>."
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
index 4abeeef0fc6..4134da135e3 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
@@ -1291,16 +1291,6 @@ private[sql] object QueryExecutionErrors extends QueryErrorsBase {
messageParameters = Map("simpleString" -> StructType.simpleString, "raw" -> raw))
}
- def failedMergingFieldsError(leftName: String, rightName: String, e: Throwable): Throwable = {
- new SparkException(
- errorClass = "_LEGACY_ERROR_TEMP_2123",
- messageParameters = Map(
- "leftName" -> leftName,
- "rightName" -> rightName,
- "message" -> e.getMessage),
- cause = null)
- }
-
def cannotMergeDecimalTypesWithIncompatibleScaleError(
leftScale: Int, rightScale: Int): Throwable = {
new SparkException(
@@ -1313,10 +1303,10 @@ private[sql] object QueryExecutionErrors extends QueryErrorsBase {
def cannotMergeIncompatibleDataTypesError(left: DataType, right: DataType): Throwable = {
new SparkException(
- errorClass = "_LEGACY_ERROR_TEMP_2125",
+ errorClass = "CANNOT_MERGE_INCOMPATIBLE_DATA_TYPE",
messageParameters = Map(
- "leftCatalogString" -> left.catalogString,
- "rightCatalogString" -> right.catalogString),
+ "left" -> toSQLType(left),
+ "right" -> toSQLType(right)),
cause = null)
}
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/StructType.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/StructType.scala
index c4d1f167a35..a9c3829a721 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/StructType.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/StructType.scala
@@ -612,7 +612,8 @@ object StructType extends AbstractDataType {
nullable = leftNullable || rightNullable)
} catch {
case NonFatal(e) =>
- throw QueryExecutionErrors.failedMergingFieldsError(leftName, rightName, e)
+ throw QueryExecutionErrors.cannotMergeIncompatibleDataTypesError(
+ leftType, rightType)
}
}
.orElse {
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/types/DataTypeSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/types/DataTypeSuite.scala
index 8208c1f7fb6..5d71732f7b0 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/types/DataTypeSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/types/DataTypeSuite.scala
@@ -157,11 +157,9 @@ class DataTypeSuite extends SparkFunSuite {
exception = intercept[SparkException] {
left.merge(right)
},
- errorClass = "_LEGACY_ERROR_TEMP_2123",
- parameters = Map(
- "leftName" -> "b",
- "rightName" -> "b",
- "message" -> "Failed to merge incompatible data types float and bigint.")
+ errorClass = "CANNOT_MERGE_INCOMPATIBLE_DATA_TYPE",
+ parameters = Map("left" -> "\"FLOAT\"", "right" -> "\"BIGINT\""
+ )
)
}
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/types/StructTypeSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/types/StructTypeSuite.scala
index dd5bed3b30c..d9eb0892d13 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/types/StructTypeSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/types/StructTypeSuite.scala
@@ -434,15 +434,21 @@ class StructTypeSuite extends SparkFunSuite with SQLHelper {
// Invalid merge cases:
- var e = intercept[SparkException] {
- StructType.fromDDL("c1 DECIMAL(10, 5)").merge(StructType.fromDDL("c1 DECIMAL(12, 2)"))
- }
- assert(e.getMessage.contains("Failed to merge decimal types"))
+ checkError(
+ exception = intercept[SparkException] {
+ StructType.fromDDL("c1 DECIMAL(10, 5)").merge(StructType.fromDDL("c1 DECIMAL(12, 2)"))
+ },
+ errorClass = "CANNOT_MERGE_INCOMPATIBLE_DATA_TYPE",
+ parameters = Map("left" -> "\"DECIMAL(10,5)\"", "right" -> "\"DECIMAL(12,2)\"")
+ )
- e = intercept[SparkException] {
- StructType.fromDDL("c1 DECIMAL(12, 5)").merge(StructType.fromDDL("c1 DECIMAL(12, 2)"))
- }
- assert(e.getMessage.contains("Failed to merge decimal types"))
+ checkError(
+ exception = intercept[SparkException] {
+ StructType.fromDDL("c1 DECIMAL(12, 5)").merge(StructType.fromDDL("c1 DECIMAL(12, 2)"))
+ },
+ errorClass = "CANNOT_MERGE_INCOMPATIBLE_DATA_TYPE",
+ parameters = Map("left" -> "\"DECIMAL(12,5)\"", "right" -> "\"DECIMAL(12,2)\"")
+ )
}
test("SPARK-39143: Test parsing default column values out of struct types") {
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/orc/OrcSourceSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/orc/OrcSourceSuite.scala
index 0fc7cd33bef..c821276431e 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/orc/OrcSourceSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/orc/OrcSourceSuite.scala
@@ -448,14 +448,18 @@ abstract class OrcSuite
spark.read.orc(basePath).columns.length
}.getCause
- val innerMessage = orcImp match {
- case "native" => exception.getMessage
- case "hive" => exception.getCause.getMessage
+ val innerException = orcImp match {
+ case "native" => exception
+ case "hive" => exception.getCause
case impl =>
throw new UnsupportedOperationException(s"Unknown ORC implementation: $impl")
}
- assert(innerMessage.contains("Failed to merge incompatible data types"))
+ checkError(
+ exception = innerException.asInstanceOf[SparkException],
+ errorClass = "CANNOT_MERGE_INCOMPATIBLE_DATA_TYPE",
+ parameters = Map("left" -> "\"BIGINT\"", "right" -> "\"STRING\"")
+ )
}
// it is ok if no schema merging
---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org