You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by ma...@apache.org on 2023/02/06 15:44:27 UTC

[spark] branch master updated: [SPARK-42255][SQL] Assign name to _LEGACY_ERROR_TEMP_2430

This is an automated email from the ASF dual-hosted git repository.

maxgekk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 3b9d1c6f3c5 [SPARK-42255][SQL] Assign name to _LEGACY_ERROR_TEMP_2430
3b9d1c6f3c5 is described below

commit 3b9d1c6f3c5dcff764f6c846bade5048a93ded07
Author: itholic <ha...@databricks.com>
AuthorDate: Mon Feb 6 18:44:11 2023 +0300

    [SPARK-42255][SQL] Assign name to _LEGACY_ERROR_TEMP_2430
    
    ### What changes were proposed in this pull request?
    
    This PR proposes to assign name to _LEGACY_ERROR_TEMP_2430, "INCOMPATIBLE_COLUMN_TYPE".
    
    ### Why are the changes needed?
    
    We should assign proper name to _LEGACY_ERROR_TEMP_*
    
    ### Does this PR introduce _any_ user-facing change?
    
    No
    
    ### How was this patch tested?
    
    `./build/sbt "sql/testOnly org.apache.spark.sql.SQLQueryTestSuite*`
    
    Closes #39839 from itholic/SPARK-2430.
    
    Lead-authored-by: itholic <ha...@databricks.com>
    Co-authored-by: Haejoon Lee <44...@users.noreply.github.com>
    Signed-off-by: Max Gekk <ma...@gmail.com>
---
 core/src/main/resources/error/error-classes.json   |  11 +-
 .../sql/catalyst/analysis/CheckAnalysis.scala      |  10 +-
 .../sql/catalyst/analysis/AnalysisSuite.scala      |  89 ++-
 .../resources/sql-tests/results/except-all.sql.out |  11 +-
 .../sql-tests/results/intersect-all.sql.out        |  11 +-
 .../native/widenSetOperationTypes.sql.out          | 770 +++++++++++----------
 .../sql-tests/results/udf/udf-except-all.sql.out   |  11 +-
 .../results/udf/udf-intersect-all.sql.out          |  11 +-
 .../spark/sql/DataFrameSetOperationsSuite.scala    |  21 +-
 .../scala/org/apache/spark/sql/SQLQuerySuite.scala |  11 +-
 10 files changed, 532 insertions(+), 424 deletions(-)

diff --git a/core/src/main/resources/error/error-classes.json b/core/src/main/resources/error/error-classes.json
index ed9f8a63347..806a1159ac0 100644
--- a/core/src/main/resources/error/error-classes.json
+++ b/core/src/main/resources/error/error-classes.json
@@ -605,6 +605,12 @@
     ],
     "sqlState" : "42818"
   },
+  "INCOMPATIBLE_COLUMN_TYPE" : {
+    "message" : [
+      "<operator> can only be performed on tables with compatible column types. The <columnOrdinalNumber> column of the <tableOrdinalNumber> table is <dataType1> type which is not compatible with <dataType2> at the same column of the first table.<hint>."
+    ],
+    "sqlState" : "42825"
+  },
   "INCOMPATIBLE_DATASOURCE_REGISTER" : {
     "message" : [
       "Detected an incompatible DataSourceRegister. Please remove the incompatible library from classpath or upgrade it. Error: <message>"
@@ -5240,11 +5246,6 @@
       "The sum of the LIMIT clause and the OFFSET clause must not be greater than the maximum 32-bit integer value (2,147,483,647) but found limit = <limit>, offset = <offset>."
     ]
   },
-  "_LEGACY_ERROR_TEMP_2430" : {
-    "message" : [
-      "<operator> can only be performed on tables with compatible column types. The <ci> column of the <ti> table is <dt1> type which is not compatible with <dt2> at the same column of the first table.<hint>."
-    ]
-  },
   "_LEGACY_ERROR_TEMP_2431" : {
     "message" : [
       "Invalid partitioning: <cols> is missing or is in a map or array."
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/CheckAnalysis.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/CheckAnalysis.scala
index cca54a8742d..e95c21ad985 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/CheckAnalysis.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/CheckAnalysis.scala
@@ -577,13 +577,13 @@ trait CheckAnalysis extends PredicateHelper with LookupCatalog with QueryErrorsB
                 // SPARK-18058: we shall not care about the nullability of columns
                 if (!dataTypesAreCompatibleFn(dt1, dt2)) {
                   e.failAnalysis(
-                    errorClass = "_LEGACY_ERROR_TEMP_2430",
+                    errorClass = "INCOMPATIBLE_COLUMN_TYPE",
                     messageParameters = Map(
                       "operator" -> toSQLStmt(operator.nodeName),
-                      "ci" -> ordinalNumber(ci),
-                      "ti" -> ordinalNumber(ti + 1),
-                      "dt1" -> dt1.catalogString,
-                      "dt2" -> dt2.catalogString,
+                      "columnOrdinalNumber" -> ordinalNumber(ci),
+                      "tableOrdinalNumber" -> ordinalNumber(ti + 1),
+                      "dataType1" -> toSQLType(dt1),
+                      "dataType2" -> toSQLType(dt2),
                       "hint" -> extraHintForAnsiTypeCoercionPlan(operator)))
                 }
               }
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisSuite.scala
index 5c2878be69f..69f2147714a 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisSuite.scala
@@ -949,36 +949,65 @@ class AnalysisSuite extends AnalysisTest with Matchers {
       AttributeReference("c", IntegerType)(),
       AttributeReference("d", TimestampType)())
 
-    val r1 = Union(firstTable, secondTable)
-    val r2 = Union(firstTable, thirdTable)
-    val r3 = Union(firstTable, fourthTable)
-    val r4 = Except(firstTable, secondTable, isAll = false)
-    val r5 = Intersect(firstTable, secondTable, isAll = false)
-
-    assertAnalysisError(r1,
-      Seq("Union can only be performed on tables with compatible column types. " +
-        "The second column of the second table is timestamp type which is not compatible " +
-        "with double at the same column of the first table"))
-
-    assertAnalysisError(r2,
-      Seq("Union can only be performed on tables with compatible column types. " +
-        "The third column of the second table is timestamp type which is not compatible " +
-        "with int at the same column of the first table"))
-
-    assertAnalysisError(r3,
-      Seq("Union can only be performed on tables with compatible column types. " +
-        "The 4th column of the second table is timestamp type which is not compatible " +
-        "with float at the same column of the first table"))
-
-    assertAnalysisError(r4,
-      Seq("Except can only be performed on tables with compatible column types. " +
-        "The second column of the second table is timestamp type which is not compatible " +
-        "with double at the same column of the first table"))
-
-    assertAnalysisError(r5,
-      Seq("Intersect can only be performed on tables with compatible column types. " +
-        "The second column of the second table is timestamp type which is not compatible " +
-        "with double at the same column of the first table"))
+    assertAnalysisErrorClass(
+      Union(firstTable, secondTable),
+      expectedErrorClass = "INCOMPATIBLE_COLUMN_TYPE",
+      expectedMessageParameters = Map(
+        "tableOrdinalNumber" -> "second",
+        "columnOrdinalNumber" -> "second",
+        "dataType2" -> "\"DOUBLE\"",
+        "operator" -> "UNION",
+        "hint" -> "",
+        "dataType1" -> "\"TIMESTAMP\"")
+    )
+
+    assertAnalysisErrorClass(
+      Union(firstTable, thirdTable),
+      expectedErrorClass = "INCOMPATIBLE_COLUMN_TYPE",
+      expectedMessageParameters = Map(
+        "tableOrdinalNumber" -> "second",
+        "columnOrdinalNumber" -> "third",
+        "dataType2" -> "\"INT\"",
+        "operator" -> "UNION",
+        "hint" -> "",
+        "dataType1" -> "\"TIMESTAMP\"")
+    )
+
+    assertAnalysisErrorClass(
+      Union(firstTable, fourthTable),
+      expectedErrorClass = "INCOMPATIBLE_COLUMN_TYPE",
+      expectedMessageParameters = Map(
+        "tableOrdinalNumber" -> "second",
+        "columnOrdinalNumber" -> "4th",
+        "dataType2" -> "\"FLOAT\"",
+        "operator" -> "UNION",
+        "hint" -> "",
+        "dataType1" -> "\"TIMESTAMP\"")
+    )
+
+    assertAnalysisErrorClass(
+      Except(firstTable, secondTable, isAll = false),
+      expectedErrorClass = "INCOMPATIBLE_COLUMN_TYPE",
+      expectedMessageParameters = Map(
+        "tableOrdinalNumber" -> "second",
+        "columnOrdinalNumber" -> "second",
+        "dataType2" -> "\"DOUBLE\"",
+        "operator" -> "EXCEPT",
+        "hint" -> "",
+        "dataType1" -> "\"TIMESTAMP\"")
+    )
+
+    assertAnalysisErrorClass(
+      Intersect(firstTable, secondTable, isAll = false),
+      expectedErrorClass = "INCOMPATIBLE_COLUMN_TYPE",
+      expectedMessageParameters = Map(
+        "tableOrdinalNumber" -> "second",
+        "columnOrdinalNumber" -> "second",
+        "dataType2" -> "\"DOUBLE\"",
+        "operator" -> "INTERSECT",
+        "hint" -> "",
+        "dataType1" -> "\"TIMESTAMP\"")
+    )
   }
 
   test("SPARK-31975: Throw user facing error when use WindowFunction directly") {
diff --git a/sql/core/src/test/resources/sql-tests/results/except-all.sql.out b/sql/core/src/test/resources/sql-tests/results/except-all.sql.out
index ad741f85812..f487a92e678 100644
--- a/sql/core/src/test/resources/sql-tests/results/except-all.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/except-all.sql.out
@@ -139,14 +139,15 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_2430",
+  "errorClass" : "INCOMPATIBLE_COLUMN_TYPE",
+  "sqlState" : "42825",
   "messageParameters" : {
-    "ci" : "first",
-    "dt1" : "array<int>",
-    "dt2" : "int",
+    "columnOrdinalNumber" : "first",
+    "dataType1" : "\"ARRAY<INT>\"",
+    "dataType2" : "\"INT\"",
     "hint" : "",
     "operator" : "EXCEPT ALL",
-    "ti" : "second"
+    "tableOrdinalNumber" : "second"
   },
   "queryContext" : [ {
     "objectType" : "",
diff --git a/sql/core/src/test/resources/sql-tests/results/intersect-all.sql.out b/sql/core/src/test/resources/sql-tests/results/intersect-all.sql.out
index 4e1172d9b52..10dfb51358f 100644
--- a/sql/core/src/test/resources/sql-tests/results/intersect-all.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/intersect-all.sql.out
@@ -96,14 +96,15 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_2430",
+  "errorClass" : "INCOMPATIBLE_COLUMN_TYPE",
+  "sqlState" : "42825",
   "messageParameters" : {
-    "ci" : "first",
-    "dt1" : "array<int>",
-    "dt2" : "int",
+    "columnOrdinalNumber" : "first",
+    "dataType1" : "\"ARRAY<INT>\"",
+    "dataType2" : "\"INT\"",
     "hint" : "",
     "operator" : "INTERSECT ALL",
-    "ti" : "second"
+    "tableOrdinalNumber" : "second"
   },
   "queryContext" : [ {
     "objectType" : "",
diff --git a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/widenSetOperationTypes.sql.out b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/widenSetOperationTypes.sql.out
index 3f40a18181a..e474a494d48 100644
--- a/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/widenSetOperationTypes.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/typeCoercion/native/widenSetOperationTypes.sql.out
@@ -86,14 +86,15 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_2430",
+  "errorClass" : "INCOMPATIBLE_COLUMN_TYPE",
+  "sqlState" : "42825",
   "messageParameters" : {
-    "ci" : "first",
-    "dt1" : "binary",
-    "dt2" : "tinyint",
+    "columnOrdinalNumber" : "first",
+    "dataType1" : "\"BINARY\"",
+    "dataType2" : "\"TINYINT\"",
     "hint" : "",
     "operator" : "UNION",
-    "ti" : "second"
+    "tableOrdinalNumber" : "second"
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -112,14 +113,15 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_2430",
+  "errorClass" : "INCOMPATIBLE_COLUMN_TYPE",
+  "sqlState" : "42825",
   "messageParameters" : {
-    "ci" : "first",
-    "dt1" : "boolean",
-    "dt2" : "tinyint",
+    "columnOrdinalNumber" : "first",
+    "dataType1" : "\"BOOLEAN\"",
+    "dataType2" : "\"TINYINT\"",
     "hint" : "",
     "operator" : "UNION",
-    "ti" : "second"
+    "tableOrdinalNumber" : "second"
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -138,14 +140,15 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_2430",
+  "errorClass" : "INCOMPATIBLE_COLUMN_TYPE",
+  "sqlState" : "42825",
   "messageParameters" : {
-    "ci" : "first",
-    "dt1" : "timestamp",
-    "dt2" : "tinyint",
+    "columnOrdinalNumber" : "first",
+    "dataType1" : "\"TIMESTAMP\"",
+    "dataType2" : "\"TINYINT\"",
     "hint" : "",
     "operator" : "UNION",
-    "ti" : "second"
+    "tableOrdinalNumber" : "second"
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -164,14 +167,15 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_2430",
+  "errorClass" : "INCOMPATIBLE_COLUMN_TYPE",
+  "sqlState" : "42825",
   "messageParameters" : {
-    "ci" : "first",
-    "dt1" : "date",
-    "dt2" : "tinyint",
+    "columnOrdinalNumber" : "first",
+    "dataType1" : "\"DATE\"",
+    "dataType2" : "\"TINYINT\"",
     "hint" : "",
     "operator" : "UNION",
-    "ti" : "second"
+    "tableOrdinalNumber" : "second"
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -262,14 +266,15 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_2430",
+  "errorClass" : "INCOMPATIBLE_COLUMN_TYPE",
+  "sqlState" : "42825",
   "messageParameters" : {
-    "ci" : "first",
-    "dt1" : "binary",
-    "dt2" : "smallint",
+    "columnOrdinalNumber" : "first",
+    "dataType1" : "\"BINARY\"",
+    "dataType2" : "\"SMALLINT\"",
     "hint" : "",
     "operator" : "UNION",
-    "ti" : "second"
+    "tableOrdinalNumber" : "second"
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -288,14 +293,15 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_2430",
+  "errorClass" : "INCOMPATIBLE_COLUMN_TYPE",
+  "sqlState" : "42825",
   "messageParameters" : {
-    "ci" : "first",
-    "dt1" : "boolean",
-    "dt2" : "smallint",
+    "columnOrdinalNumber" : "first",
+    "dataType1" : "\"BOOLEAN\"",
+    "dataType2" : "\"SMALLINT\"",
     "hint" : "",
     "operator" : "UNION",
-    "ti" : "second"
+    "tableOrdinalNumber" : "second"
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -314,14 +320,15 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_2430",
+  "errorClass" : "INCOMPATIBLE_COLUMN_TYPE",
+  "sqlState" : "42825",
   "messageParameters" : {
-    "ci" : "first",
-    "dt1" : "timestamp",
-    "dt2" : "smallint",
+    "columnOrdinalNumber" : "first",
+    "dataType1" : "\"TIMESTAMP\"",
+    "dataType2" : "\"SMALLINT\"",
     "hint" : "",
     "operator" : "UNION",
-    "ti" : "second"
+    "tableOrdinalNumber" : "second"
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -340,14 +347,15 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_2430",
+  "errorClass" : "INCOMPATIBLE_COLUMN_TYPE",
+  "sqlState" : "42825",
   "messageParameters" : {
-    "ci" : "first",
-    "dt1" : "date",
-    "dt2" : "smallint",
+    "columnOrdinalNumber" : "first",
+    "dataType1" : "\"DATE\"",
+    "dataType2" : "\"SMALLINT\"",
     "hint" : "",
     "operator" : "UNION",
-    "ti" : "second"
+    "tableOrdinalNumber" : "second"
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -438,14 +446,15 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_2430",
+  "errorClass" : "INCOMPATIBLE_COLUMN_TYPE",
+  "sqlState" : "42825",
   "messageParameters" : {
-    "ci" : "first",
-    "dt1" : "binary",
-    "dt2" : "int",
+    "columnOrdinalNumber" : "first",
+    "dataType1" : "\"BINARY\"",
+    "dataType2" : "\"INT\"",
     "hint" : "",
     "operator" : "UNION",
-    "ti" : "second"
+    "tableOrdinalNumber" : "second"
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -464,14 +473,15 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_2430",
+  "errorClass" : "INCOMPATIBLE_COLUMN_TYPE",
+  "sqlState" : "42825",
   "messageParameters" : {
-    "ci" : "first",
-    "dt1" : "boolean",
-    "dt2" : "int",
+    "columnOrdinalNumber" : "first",
+    "dataType1" : "\"BOOLEAN\"",
+    "dataType2" : "\"INT\"",
     "hint" : "",
     "operator" : "UNION",
-    "ti" : "second"
+    "tableOrdinalNumber" : "second"
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -490,14 +500,15 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_2430",
+  "errorClass" : "INCOMPATIBLE_COLUMN_TYPE",
+  "sqlState" : "42825",
   "messageParameters" : {
-    "ci" : "first",
-    "dt1" : "timestamp",
-    "dt2" : "int",
+    "columnOrdinalNumber" : "first",
+    "dataType1" : "\"TIMESTAMP\"",
+    "dataType2" : "\"INT\"",
     "hint" : "",
     "operator" : "UNION",
-    "ti" : "second"
+    "tableOrdinalNumber" : "second"
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -516,14 +527,15 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_2430",
+  "errorClass" : "INCOMPATIBLE_COLUMN_TYPE",
+  "sqlState" : "42825",
   "messageParameters" : {
-    "ci" : "first",
-    "dt1" : "date",
-    "dt2" : "int",
+    "columnOrdinalNumber" : "first",
+    "dataType1" : "\"DATE\"",
+    "dataType2" : "\"INT\"",
     "hint" : "",
     "operator" : "UNION",
-    "ti" : "second"
+    "tableOrdinalNumber" : "second"
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -614,14 +626,15 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_2430",
+  "errorClass" : "INCOMPATIBLE_COLUMN_TYPE",
+  "sqlState" : "42825",
   "messageParameters" : {
-    "ci" : "first",
-    "dt1" : "binary",
-    "dt2" : "bigint",
+    "columnOrdinalNumber" : "first",
+    "dataType1" : "\"BINARY\"",
+    "dataType2" : "\"BIGINT\"",
     "hint" : "",
     "operator" : "UNION",
-    "ti" : "second"
+    "tableOrdinalNumber" : "second"
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -640,14 +653,15 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_2430",
+  "errorClass" : "INCOMPATIBLE_COLUMN_TYPE",
+  "sqlState" : "42825",
   "messageParameters" : {
-    "ci" : "first",
-    "dt1" : "boolean",
-    "dt2" : "bigint",
+    "columnOrdinalNumber" : "first",
+    "dataType1" : "\"BOOLEAN\"",
+    "dataType2" : "\"BIGINT\"",
     "hint" : "",
     "operator" : "UNION",
-    "ti" : "second"
+    "tableOrdinalNumber" : "second"
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -666,14 +680,15 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_2430",
+  "errorClass" : "INCOMPATIBLE_COLUMN_TYPE",
+  "sqlState" : "42825",
   "messageParameters" : {
-    "ci" : "first",
-    "dt1" : "timestamp",
-    "dt2" : "bigint",
+    "columnOrdinalNumber" : "first",
+    "dataType1" : "\"TIMESTAMP\"",
+    "dataType2" : "\"BIGINT\"",
     "hint" : "",
     "operator" : "UNION",
-    "ti" : "second"
+    "tableOrdinalNumber" : "second"
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -692,14 +707,15 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_2430",
+  "errorClass" : "INCOMPATIBLE_COLUMN_TYPE",
+  "sqlState" : "42825",
   "messageParameters" : {
-    "ci" : "first",
-    "dt1" : "date",
-    "dt2" : "bigint",
+    "columnOrdinalNumber" : "first",
+    "dataType1" : "\"DATE\"",
+    "dataType2" : "\"BIGINT\"",
     "hint" : "",
     "operator" : "UNION",
-    "ti" : "second"
+    "tableOrdinalNumber" : "second"
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -790,14 +806,15 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_2430",
+  "errorClass" : "INCOMPATIBLE_COLUMN_TYPE",
+  "sqlState" : "42825",
   "messageParameters" : {
-    "ci" : "first",
-    "dt1" : "binary",
-    "dt2" : "float",
+    "columnOrdinalNumber" : "first",
+    "dataType1" : "\"BINARY\"",
+    "dataType2" : "\"FLOAT\"",
     "hint" : "",
     "operator" : "UNION",
-    "ti" : "second"
+    "tableOrdinalNumber" : "second"
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -816,14 +833,15 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_2430",
+  "errorClass" : "INCOMPATIBLE_COLUMN_TYPE",
+  "sqlState" : "42825",
   "messageParameters" : {
-    "ci" : "first",
-    "dt1" : "boolean",
-    "dt2" : "float",
+    "columnOrdinalNumber" : "first",
+    "dataType1" : "\"BOOLEAN\"",
+    "dataType2" : "\"FLOAT\"",
     "hint" : "",
     "operator" : "UNION",
-    "ti" : "second"
+    "tableOrdinalNumber" : "second"
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -842,14 +860,15 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_2430",
+  "errorClass" : "INCOMPATIBLE_COLUMN_TYPE",
+  "sqlState" : "42825",
   "messageParameters" : {
-    "ci" : "first",
-    "dt1" : "timestamp",
-    "dt2" : "float",
+    "columnOrdinalNumber" : "first",
+    "dataType1" : "\"TIMESTAMP\"",
+    "dataType2" : "\"FLOAT\"",
     "hint" : "",
     "operator" : "UNION",
-    "ti" : "second"
+    "tableOrdinalNumber" : "second"
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -868,14 +887,15 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_2430",
+  "errorClass" : "INCOMPATIBLE_COLUMN_TYPE",
+  "sqlState" : "42825",
   "messageParameters" : {
-    "ci" : "first",
-    "dt1" : "date",
-    "dt2" : "float",
+    "columnOrdinalNumber" : "first",
+    "dataType1" : "\"DATE\"",
+    "dataType2" : "\"FLOAT\"",
     "hint" : "",
     "operator" : "UNION",
-    "ti" : "second"
+    "tableOrdinalNumber" : "second"
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -966,14 +986,15 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_2430",
+  "errorClass" : "INCOMPATIBLE_COLUMN_TYPE",
+  "sqlState" : "42825",
   "messageParameters" : {
-    "ci" : "first",
-    "dt1" : "binary",
-    "dt2" : "double",
+    "columnOrdinalNumber" : "first",
+    "dataType1" : "\"BINARY\"",
+    "dataType2" : "\"DOUBLE\"",
     "hint" : "",
     "operator" : "UNION",
-    "ti" : "second"
+    "tableOrdinalNumber" : "second"
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -992,14 +1013,15 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_2430",
+  "errorClass" : "INCOMPATIBLE_COLUMN_TYPE",
+  "sqlState" : "42825",
   "messageParameters" : {
-    "ci" : "first",
-    "dt1" : "boolean",
-    "dt2" : "double",
+    "columnOrdinalNumber" : "first",
+    "dataType1" : "\"BOOLEAN\"",
+    "dataType2" : "\"DOUBLE\"",
     "hint" : "",
     "operator" : "UNION",
-    "ti" : "second"
+    "tableOrdinalNumber" : "second"
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -1018,14 +1040,15 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_2430",
+  "errorClass" : "INCOMPATIBLE_COLUMN_TYPE",
+  "sqlState" : "42825",
   "messageParameters" : {
-    "ci" : "first",
-    "dt1" : "timestamp",
-    "dt2" : "double",
+    "columnOrdinalNumber" : "first",
+    "dataType1" : "\"TIMESTAMP\"",
+    "dataType2" : "\"DOUBLE\"",
     "hint" : "",
     "operator" : "UNION",
-    "ti" : "second"
+    "tableOrdinalNumber" : "second"
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -1044,14 +1067,15 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_2430",
+  "errorClass" : "INCOMPATIBLE_COLUMN_TYPE",
+  "sqlState" : "42825",
   "messageParameters" : {
-    "ci" : "first",
-    "dt1" : "date",
-    "dt2" : "double",
+    "columnOrdinalNumber" : "first",
+    "dataType1" : "\"DATE\"",
+    "dataType2" : "\"DOUBLE\"",
     "hint" : "",
     "operator" : "UNION",
-    "ti" : "second"
+    "tableOrdinalNumber" : "second"
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -1142,14 +1166,15 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_2430",
+  "errorClass" : "INCOMPATIBLE_COLUMN_TYPE",
+  "sqlState" : "42825",
   "messageParameters" : {
-    "ci" : "first",
-    "dt1" : "binary",
-    "dt2" : "decimal(10,0)",
+    "columnOrdinalNumber" : "first",
+    "dataType1" : "\"BINARY\"",
+    "dataType2" : "\"DECIMAL(10,0)\"",
     "hint" : "",
     "operator" : "UNION",
-    "ti" : "second"
+    "tableOrdinalNumber" : "second"
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -1168,14 +1193,15 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_2430",
+  "errorClass" : "INCOMPATIBLE_COLUMN_TYPE",
+  "sqlState" : "42825",
   "messageParameters" : {
-    "ci" : "first",
-    "dt1" : "boolean",
-    "dt2" : "decimal(10,0)",
+    "columnOrdinalNumber" : "first",
+    "dataType1" : "\"BOOLEAN\"",
+    "dataType2" : "\"DECIMAL(10,0)\"",
     "hint" : "",
     "operator" : "UNION",
-    "ti" : "second"
+    "tableOrdinalNumber" : "second"
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -1194,14 +1220,15 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_2430",
+  "errorClass" : "INCOMPATIBLE_COLUMN_TYPE",
+  "sqlState" : "42825",
   "messageParameters" : {
-    "ci" : "first",
-    "dt1" : "timestamp",
-    "dt2" : "decimal(10,0)",
+    "columnOrdinalNumber" : "first",
+    "dataType1" : "\"TIMESTAMP\"",
+    "dataType2" : "\"DECIMAL(10,0)\"",
     "hint" : "",
     "operator" : "UNION",
-    "ti" : "second"
+    "tableOrdinalNumber" : "second"
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -1220,14 +1247,15 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_2430",
+  "errorClass" : "INCOMPATIBLE_COLUMN_TYPE",
+  "sqlState" : "42825",
   "messageParameters" : {
-    "ci" : "first",
-    "dt1" : "date",
-    "dt2" : "decimal(10,0)",
+    "columnOrdinalNumber" : "first",
+    "dataType1" : "\"DATE\"",
+    "dataType2" : "\"DECIMAL(10,0)\"",
     "hint" : "",
     "operator" : "UNION",
-    "ti" : "second"
+    "tableOrdinalNumber" : "second"
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -1318,14 +1346,15 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_2430",
+  "errorClass" : "INCOMPATIBLE_COLUMN_TYPE",
+  "sqlState" : "42825",
   "messageParameters" : {
-    "ci" : "first",
-    "dt1" : "binary",
-    "dt2" : "string",
+    "columnOrdinalNumber" : "first",
+    "dataType1" : "\"BINARY\"",
+    "dataType2" : "\"STRING\"",
     "hint" : "",
     "operator" : "UNION",
-    "ti" : "second"
+    "tableOrdinalNumber" : "second"
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -1344,14 +1373,15 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_2430",
+  "errorClass" : "INCOMPATIBLE_COLUMN_TYPE",
+  "sqlState" : "42825",
   "messageParameters" : {
-    "ci" : "first",
-    "dt1" : "boolean",
-    "dt2" : "string",
+    "columnOrdinalNumber" : "first",
+    "dataType1" : "\"BOOLEAN\"",
+    "dataType2" : "\"STRING\"",
     "hint" : "",
     "operator" : "UNION",
-    "ti" : "second"
+    "tableOrdinalNumber" : "second"
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -1388,14 +1418,15 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_2430",
+  "errorClass" : "INCOMPATIBLE_COLUMN_TYPE",
+  "sqlState" : "42825",
   "messageParameters" : {
-    "ci" : "first",
-    "dt1" : "tinyint",
-    "dt2" : "binary",
+    "columnOrdinalNumber" : "first",
+    "dataType1" : "\"TINYINT\"",
+    "dataType2" : "\"BINARY\"",
     "hint" : "",
     "operator" : "UNION",
-    "ti" : "second"
+    "tableOrdinalNumber" : "second"
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -1414,14 +1445,15 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_2430",
+  "errorClass" : "INCOMPATIBLE_COLUMN_TYPE",
+  "sqlState" : "42825",
   "messageParameters" : {
-    "ci" : "first",
-    "dt1" : "smallint",
-    "dt2" : "binary",
+    "columnOrdinalNumber" : "first",
+    "dataType1" : "\"SMALLINT\"",
+    "dataType2" : "\"BINARY\"",
     "hint" : "",
     "operator" : "UNION",
-    "ti" : "second"
+    "tableOrdinalNumber" : "second"
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -1440,14 +1472,15 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_2430",
+  "errorClass" : "INCOMPATIBLE_COLUMN_TYPE",
+  "sqlState" : "42825",
   "messageParameters" : {
-    "ci" : "first",
-    "dt1" : "int",
-    "dt2" : "binary",
+    "columnOrdinalNumber" : "first",
+    "dataType1" : "\"INT\"",
+    "dataType2" : "\"BINARY\"",
     "hint" : "",
     "operator" : "UNION",
-    "ti" : "second"
+    "tableOrdinalNumber" : "second"
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -1466,14 +1499,15 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_2430",
+  "errorClass" : "INCOMPATIBLE_COLUMN_TYPE",
+  "sqlState" : "42825",
   "messageParameters" : {
-    "ci" : "first",
-    "dt1" : "bigint",
-    "dt2" : "binary",
+    "columnOrdinalNumber" : "first",
+    "dataType1" : "\"BIGINT\"",
+    "dataType2" : "\"BINARY\"",
     "hint" : "",
     "operator" : "UNION",
-    "ti" : "second"
+    "tableOrdinalNumber" : "second"
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -1492,14 +1526,15 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_2430",
+  "errorClass" : "INCOMPATIBLE_COLUMN_TYPE",
+  "sqlState" : "42825",
   "messageParameters" : {
-    "ci" : "first",
-    "dt1" : "float",
-    "dt2" : "binary",
+    "columnOrdinalNumber" : "first",
+    "dataType1" : "\"FLOAT\"",
+    "dataType2" : "\"BINARY\"",
     "hint" : "",
     "operator" : "UNION",
-    "ti" : "second"
+    "tableOrdinalNumber" : "second"
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -1518,14 +1553,15 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_2430",
+  "errorClass" : "INCOMPATIBLE_COLUMN_TYPE",
+  "sqlState" : "42825",
   "messageParameters" : {
-    "ci" : "first",
-    "dt1" : "double",
-    "dt2" : "binary",
+    "columnOrdinalNumber" : "first",
+    "dataType1" : "\"DOUBLE\"",
+    "dataType2" : "\"BINARY\"",
     "hint" : "",
     "operator" : "UNION",
-    "ti" : "second"
+    "tableOrdinalNumber" : "second"
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -1544,14 +1580,15 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_2430",
+  "errorClass" : "INCOMPATIBLE_COLUMN_TYPE",
+  "sqlState" : "42825",
   "messageParameters" : {
-    "ci" : "first",
-    "dt1" : "decimal(10,0)",
-    "dt2" : "binary",
+    "columnOrdinalNumber" : "first",
+    "dataType1" : "\"DECIMAL(10,0)\"",
+    "dataType2" : "\"BINARY\"",
     "hint" : "",
     "operator" : "UNION",
-    "ti" : "second"
+    "tableOrdinalNumber" : "second"
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -1570,14 +1607,15 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_2430",
+  "errorClass" : "INCOMPATIBLE_COLUMN_TYPE",
+  "sqlState" : "42825",
   "messageParameters" : {
-    "ci" : "first",
-    "dt1" : "string",
-    "dt2" : "binary",
+    "columnOrdinalNumber" : "first",
+    "dataType1" : "\"STRING\"",
+    "dataType2" : "\"BINARY\"",
     "hint" : "",
     "operator" : "UNION",
-    "ti" : "second"
+    "tableOrdinalNumber" : "second"
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -1605,14 +1643,15 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_2430",
+  "errorClass" : "INCOMPATIBLE_COLUMN_TYPE",
+  "sqlState" : "42825",
   "messageParameters" : {
-    "ci" : "first",
-    "dt1" : "boolean",
-    "dt2" : "binary",
+    "columnOrdinalNumber" : "first",
+    "dataType1" : "\"BOOLEAN\"",
+    "dataType2" : "\"BINARY\"",
     "hint" : "",
     "operator" : "UNION",
-    "ti" : "second"
+    "tableOrdinalNumber" : "second"
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -1631,14 +1670,15 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_2430",
+  "errorClass" : "INCOMPATIBLE_COLUMN_TYPE",
+  "sqlState" : "42825",
   "messageParameters" : {
-    "ci" : "first",
-    "dt1" : "timestamp",
-    "dt2" : "binary",
+    "columnOrdinalNumber" : "first",
+    "dataType1" : "\"TIMESTAMP\"",
+    "dataType2" : "\"BINARY\"",
     "hint" : "",
     "operator" : "UNION",
-    "ti" : "second"
+    "tableOrdinalNumber" : "second"
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -1657,14 +1697,15 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_2430",
+  "errorClass" : "INCOMPATIBLE_COLUMN_TYPE",
+  "sqlState" : "42825",
   "messageParameters" : {
-    "ci" : "first",
-    "dt1" : "date",
-    "dt2" : "binary",
+    "columnOrdinalNumber" : "first",
+    "dataType1" : "\"DATE\"",
+    "dataType2" : "\"BINARY\"",
     "hint" : "",
     "operator" : "UNION",
-    "ti" : "second"
+    "tableOrdinalNumber" : "second"
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -1683,14 +1724,15 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_2430",
+  "errorClass" : "INCOMPATIBLE_COLUMN_TYPE",
+  "sqlState" : "42825",
   "messageParameters" : {
-    "ci" : "first",
-    "dt1" : "tinyint",
-    "dt2" : "boolean",
+    "columnOrdinalNumber" : "first",
+    "dataType1" : "\"TINYINT\"",
+    "dataType2" : "\"BOOLEAN\"",
     "hint" : "",
     "operator" : "UNION",
-    "ti" : "second"
+    "tableOrdinalNumber" : "second"
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -1709,14 +1751,15 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_2430",
+  "errorClass" : "INCOMPATIBLE_COLUMN_TYPE",
+  "sqlState" : "42825",
   "messageParameters" : {
-    "ci" : "first",
-    "dt1" : "smallint",
-    "dt2" : "boolean",
+    "columnOrdinalNumber" : "first",
+    "dataType1" : "\"SMALLINT\"",
+    "dataType2" : "\"BOOLEAN\"",
     "hint" : "",
     "operator" : "UNION",
-    "ti" : "second"
+    "tableOrdinalNumber" : "second"
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -1735,14 +1778,15 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_2430",
+  "errorClass" : "INCOMPATIBLE_COLUMN_TYPE",
+  "sqlState" : "42825",
   "messageParameters" : {
-    "ci" : "first",
-    "dt1" : "int",
-    "dt2" : "boolean",
+    "columnOrdinalNumber" : "first",
+    "dataType1" : "\"INT\"",
+    "dataType2" : "\"BOOLEAN\"",
     "hint" : "",
     "operator" : "UNION",
-    "ti" : "second"
+    "tableOrdinalNumber" : "second"
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -1761,14 +1805,15 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_2430",
+  "errorClass" : "INCOMPATIBLE_COLUMN_TYPE",
+  "sqlState" : "42825",
   "messageParameters" : {
-    "ci" : "first",
-    "dt1" : "bigint",
-    "dt2" : "boolean",
+    "columnOrdinalNumber" : "first",
+    "dataType1" : "\"BIGINT\"",
+    "dataType2" : "\"BOOLEAN\"",
     "hint" : "",
     "operator" : "UNION",
-    "ti" : "second"
+    "tableOrdinalNumber" : "second"
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -1787,14 +1832,15 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_2430",
+  "errorClass" : "INCOMPATIBLE_COLUMN_TYPE",
+  "sqlState" : "42825",
   "messageParameters" : {
-    "ci" : "first",
-    "dt1" : "float",
-    "dt2" : "boolean",
+    "columnOrdinalNumber" : "first",
+    "dataType1" : "\"FLOAT\"",
+    "dataType2" : "\"BOOLEAN\"",
     "hint" : "",
     "operator" : "UNION",
-    "ti" : "second"
+    "tableOrdinalNumber" : "second"
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -1813,14 +1859,15 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_2430",
+  "errorClass" : "INCOMPATIBLE_COLUMN_TYPE",
+  "sqlState" : "42825",
   "messageParameters" : {
-    "ci" : "first",
-    "dt1" : "double",
-    "dt2" : "boolean",
+    "columnOrdinalNumber" : "first",
+    "dataType1" : "\"DOUBLE\"",
+    "dataType2" : "\"BOOLEAN\"",
     "hint" : "",
     "operator" : "UNION",
-    "ti" : "second"
+    "tableOrdinalNumber" : "second"
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -1839,14 +1886,15 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_2430",
+  "errorClass" : "INCOMPATIBLE_COLUMN_TYPE",
+  "sqlState" : "42825",
   "messageParameters" : {
-    "ci" : "first",
-    "dt1" : "decimal(10,0)",
-    "dt2" : "boolean",
+    "columnOrdinalNumber" : "first",
+    "dataType1" : "\"DECIMAL(10,0)\"",
+    "dataType2" : "\"BOOLEAN\"",
     "hint" : "",
     "operator" : "UNION",
-    "ti" : "second"
+    "tableOrdinalNumber" : "second"
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -1865,14 +1913,15 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_2430",
+  "errorClass" : "INCOMPATIBLE_COLUMN_TYPE",
+  "sqlState" : "42825",
   "messageParameters" : {
-    "ci" : "first",
-    "dt1" : "string",
-    "dt2" : "boolean",
+    "columnOrdinalNumber" : "first",
+    "dataType1" : "\"STRING\"",
+    "dataType2" : "\"BOOLEAN\"",
     "hint" : "",
     "operator" : "UNION",
-    "ti" : "second"
+    "tableOrdinalNumber" : "second"
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -1891,14 +1940,15 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_2430",
+  "errorClass" : "INCOMPATIBLE_COLUMN_TYPE",
+  "sqlState" : "42825",
   "messageParameters" : {
-    "ci" : "first",
-    "dt1" : "binary",
-    "dt2" : "boolean",
+    "columnOrdinalNumber" : "first",
+    "dataType1" : "\"BINARY\"",
+    "dataType2" : "\"BOOLEAN\"",
     "hint" : "",
     "operator" : "UNION",
-    "ti" : "second"
+    "tableOrdinalNumber" : "second"
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -1925,14 +1975,15 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_2430",
+  "errorClass" : "INCOMPATIBLE_COLUMN_TYPE",
+  "sqlState" : "42825",
   "messageParameters" : {
-    "ci" : "first",
-    "dt1" : "timestamp",
-    "dt2" : "boolean",
+    "columnOrdinalNumber" : "first",
+    "dataType1" : "\"TIMESTAMP\"",
+    "dataType2" : "\"BOOLEAN\"",
     "hint" : "",
     "operator" : "UNION",
-    "ti" : "second"
+    "tableOrdinalNumber" : "second"
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -1951,14 +2002,15 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_2430",
+  "errorClass" : "INCOMPATIBLE_COLUMN_TYPE",
+  "sqlState" : "42825",
   "messageParameters" : {
-    "ci" : "first",
-    "dt1" : "date",
-    "dt2" : "boolean",
+    "columnOrdinalNumber" : "first",
+    "dataType1" : "\"DATE\"",
+    "dataType2" : "\"BOOLEAN\"",
     "hint" : "",
     "operator" : "UNION",
-    "ti" : "second"
+    "tableOrdinalNumber" : "second"
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -1977,14 +2029,15 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_2430",
+  "errorClass" : "INCOMPATIBLE_COLUMN_TYPE",
+  "sqlState" : "42825",
   "messageParameters" : {
-    "ci" : "first",
-    "dt1" : "tinyint",
-    "dt2" : "timestamp",
+    "columnOrdinalNumber" : "first",
+    "dataType1" : "\"TINYINT\"",
+    "dataType2" : "\"TIMESTAMP\"",
     "hint" : "",
     "operator" : "UNION",
-    "ti" : "second"
+    "tableOrdinalNumber" : "second"
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -2003,14 +2056,15 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_2430",
+  "errorClass" : "INCOMPATIBLE_COLUMN_TYPE",
+  "sqlState" : "42825",
   "messageParameters" : {
-    "ci" : "first",
-    "dt1" : "smallint",
-    "dt2" : "timestamp",
+    "columnOrdinalNumber" : "first",
+    "dataType1" : "\"SMALLINT\"",
+    "dataType2" : "\"TIMESTAMP\"",
     "hint" : "",
     "operator" : "UNION",
-    "ti" : "second"
+    "tableOrdinalNumber" : "second"
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -2029,14 +2083,15 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_2430",
+  "errorClass" : "INCOMPATIBLE_COLUMN_TYPE",
+  "sqlState" : "42825",
   "messageParameters" : {
-    "ci" : "first",
-    "dt1" : "int",
-    "dt2" : "timestamp",
+    "columnOrdinalNumber" : "first",
+    "dataType1" : "\"INT\"",
+    "dataType2" : "\"TIMESTAMP\"",
     "hint" : "",
     "operator" : "UNION",
-    "ti" : "second"
+    "tableOrdinalNumber" : "second"
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -2055,14 +2110,15 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_2430",
+  "errorClass" : "INCOMPATIBLE_COLUMN_TYPE",
+  "sqlState" : "42825",
   "messageParameters" : {
-    "ci" : "first",
-    "dt1" : "bigint",
-    "dt2" : "timestamp",
+    "columnOrdinalNumber" : "first",
+    "dataType1" : "\"BIGINT\"",
+    "dataType2" : "\"TIMESTAMP\"",
     "hint" : "",
     "operator" : "UNION",
-    "ti" : "second"
+    "tableOrdinalNumber" : "second"
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -2081,14 +2137,15 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_2430",
+  "errorClass" : "INCOMPATIBLE_COLUMN_TYPE",
+  "sqlState" : "42825",
   "messageParameters" : {
-    "ci" : "first",
-    "dt1" : "float",
-    "dt2" : "timestamp",
+    "columnOrdinalNumber" : "first",
+    "dataType1" : "\"FLOAT\"",
+    "dataType2" : "\"TIMESTAMP\"",
     "hint" : "",
     "operator" : "UNION",
-    "ti" : "second"
+    "tableOrdinalNumber" : "second"
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -2107,14 +2164,15 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_2430",
+  "errorClass" : "INCOMPATIBLE_COLUMN_TYPE",
+  "sqlState" : "42825",
   "messageParameters" : {
-    "ci" : "first",
-    "dt1" : "double",
-    "dt2" : "timestamp",
+    "columnOrdinalNumber" : "first",
+    "dataType1" : "\"DOUBLE\"",
+    "dataType2" : "\"TIMESTAMP\"",
     "hint" : "",
     "operator" : "UNION",
-    "ti" : "second"
+    "tableOrdinalNumber" : "second"
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -2133,14 +2191,15 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_2430",
+  "errorClass" : "INCOMPATIBLE_COLUMN_TYPE",
+  "sqlState" : "42825",
   "messageParameters" : {
-    "ci" : "first",
-    "dt1" : "decimal(10,0)",
-    "dt2" : "timestamp",
+    "columnOrdinalNumber" : "first",
+    "dataType1" : "\"DECIMAL(10,0)\"",
+    "dataType2" : "\"TIMESTAMP\"",
     "hint" : "",
     "operator" : "UNION",
-    "ti" : "second"
+    "tableOrdinalNumber" : "second"
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -2168,14 +2227,15 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_2430",
+  "errorClass" : "INCOMPATIBLE_COLUMN_TYPE",
+  "sqlState" : "42825",
   "messageParameters" : {
-    "ci" : "first",
-    "dt1" : "binary",
-    "dt2" : "timestamp",
+    "columnOrdinalNumber" : "first",
+    "dataType1" : "\"BINARY\"",
+    "dataType2" : "\"TIMESTAMP\"",
     "hint" : "",
     "operator" : "UNION",
-    "ti" : "second"
+    "tableOrdinalNumber" : "second"
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -2194,14 +2254,15 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_2430",
+  "errorClass" : "INCOMPATIBLE_COLUMN_TYPE",
+  "sqlState" : "42825",
   "messageParameters" : {
-    "ci" : "first",
-    "dt1" : "boolean",
-    "dt2" : "timestamp",
+    "columnOrdinalNumber" : "first",
+    "dataType1" : "\"BOOLEAN\"",
+    "dataType2" : "\"TIMESTAMP\"",
     "hint" : "",
     "operator" : "UNION",
-    "ti" : "second"
+    "tableOrdinalNumber" : "second"
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -2238,14 +2299,15 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_2430",
+  "errorClass" : "INCOMPATIBLE_COLUMN_TYPE",
+  "sqlState" : "42825",
   "messageParameters" : {
-    "ci" : "first",
-    "dt1" : "tinyint",
-    "dt2" : "date",
+    "columnOrdinalNumber" : "first",
+    "dataType1" : "\"TINYINT\"",
+    "dataType2" : "\"DATE\"",
     "hint" : "",
     "operator" : "UNION",
-    "ti" : "second"
+    "tableOrdinalNumber" : "second"
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -2264,14 +2326,15 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_2430",
+  "errorClass" : "INCOMPATIBLE_COLUMN_TYPE",
+  "sqlState" : "42825",
   "messageParameters" : {
-    "ci" : "first",
-    "dt1" : "smallint",
-    "dt2" : "date",
+    "columnOrdinalNumber" : "first",
+    "dataType1" : "\"SMALLINT\"",
+    "dataType2" : "\"DATE\"",
     "hint" : "",
     "operator" : "UNION",
-    "ti" : "second"
+    "tableOrdinalNumber" : "second"
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -2290,14 +2353,15 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_2430",
+  "errorClass" : "INCOMPATIBLE_COLUMN_TYPE",
+  "sqlState" : "42825",
   "messageParameters" : {
-    "ci" : "first",
-    "dt1" : "int",
-    "dt2" : "date",
+    "columnOrdinalNumber" : "first",
+    "dataType1" : "\"INT\"",
+    "dataType2" : "\"DATE\"",
     "hint" : "",
     "operator" : "UNION",
-    "ti" : "second"
+    "tableOrdinalNumber" : "second"
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -2316,14 +2380,15 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_2430",
+  "errorClass" : "INCOMPATIBLE_COLUMN_TYPE",
+  "sqlState" : "42825",
   "messageParameters" : {
-    "ci" : "first",
-    "dt1" : "bigint",
-    "dt2" : "date",
+    "columnOrdinalNumber" : "first",
+    "dataType1" : "\"BIGINT\"",
+    "dataType2" : "\"DATE\"",
     "hint" : "",
     "operator" : "UNION",
-    "ti" : "second"
+    "tableOrdinalNumber" : "second"
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -2342,14 +2407,15 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_2430",
+  "errorClass" : "INCOMPATIBLE_COLUMN_TYPE",
+  "sqlState" : "42825",
   "messageParameters" : {
-    "ci" : "first",
-    "dt1" : "float",
-    "dt2" : "date",
+    "columnOrdinalNumber" : "first",
+    "dataType1" : "\"FLOAT\"",
+    "dataType2" : "\"DATE\"",
     "hint" : "",
     "operator" : "UNION",
-    "ti" : "second"
+    "tableOrdinalNumber" : "second"
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -2368,14 +2434,15 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_2430",
+  "errorClass" : "INCOMPATIBLE_COLUMN_TYPE",
+  "sqlState" : "42825",
   "messageParameters" : {
-    "ci" : "first",
-    "dt1" : "double",
-    "dt2" : "date",
+    "columnOrdinalNumber" : "first",
+    "dataType1" : "\"DOUBLE\"",
+    "dataType2" : "\"DATE\"",
     "hint" : "",
     "operator" : "UNION",
-    "ti" : "second"
+    "tableOrdinalNumber" : "second"
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -2394,14 +2461,15 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_2430",
+  "errorClass" : "INCOMPATIBLE_COLUMN_TYPE",
+  "sqlState" : "42825",
   "messageParameters" : {
-    "ci" : "first",
-    "dt1" : "decimal(10,0)",
-    "dt2" : "date",
+    "columnOrdinalNumber" : "first",
+    "dataType1" : "\"DECIMAL(10,0)\"",
+    "dataType2" : "\"DATE\"",
     "hint" : "",
     "operator" : "UNION",
-    "ti" : "second"
+    "tableOrdinalNumber" : "second"
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -2429,14 +2497,15 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_2430",
+  "errorClass" : "INCOMPATIBLE_COLUMN_TYPE",
+  "sqlState" : "42825",
   "messageParameters" : {
-    "ci" : "first",
-    "dt1" : "binary",
-    "dt2" : "date",
+    "columnOrdinalNumber" : "first",
+    "dataType1" : "\"BINARY\"",
+    "dataType2" : "\"DATE\"",
     "hint" : "",
     "operator" : "UNION",
-    "ti" : "second"
+    "tableOrdinalNumber" : "second"
   },
   "queryContext" : [ {
     "objectType" : "",
@@ -2455,14 +2524,15 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_2430",
+  "errorClass" : "INCOMPATIBLE_COLUMN_TYPE",
+  "sqlState" : "42825",
   "messageParameters" : {
-    "ci" : "first",
-    "dt1" : "boolean",
-    "dt2" : "date",
+    "columnOrdinalNumber" : "first",
+    "dataType1" : "\"BOOLEAN\"",
+    "dataType2" : "\"DATE\"",
     "hint" : "",
     "operator" : "UNION",
-    "ti" : "second"
+    "tableOrdinalNumber" : "second"
   },
   "queryContext" : [ {
     "objectType" : "",
diff --git a/sql/core/src/test/resources/sql-tests/results/udf/udf-except-all.sql.out b/sql/core/src/test/resources/sql-tests/results/udf/udf-except-all.sql.out
index b5c95f90fdf..24e77ca3e07 100644
--- a/sql/core/src/test/resources/sql-tests/results/udf/udf-except-all.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/udf/udf-except-all.sql.out
@@ -139,14 +139,15 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_2430",
+  "errorClass" : "INCOMPATIBLE_COLUMN_TYPE",
+  "sqlState" : "42825",
   "messageParameters" : {
-    "ci" : "first",
-    "dt1" : "array<int>",
-    "dt2" : "int",
+    "columnOrdinalNumber" : "first",
+    "dataType1" : "\"ARRAY<INT>\"",
+    "dataType2" : "\"INT\"",
     "hint" : "",
     "operator" : "EXCEPT ALL",
-    "ti" : "second"
+    "tableOrdinalNumber" : "second"
   },
   "queryContext" : [ {
     "objectType" : "",
diff --git a/sql/core/src/test/resources/sql-tests/results/udf/udf-intersect-all.sql.out b/sql/core/src/test/resources/sql-tests/results/udf/udf-intersect-all.sql.out
index da800c9db69..05d089744da 100644
--- a/sql/core/src/test/resources/sql-tests/results/udf/udf-intersect-all.sql.out
+++ b/sql/core/src/test/resources/sql-tests/results/udf/udf-intersect-all.sql.out
@@ -96,14 +96,15 @@ struct<>
 -- !query output
 org.apache.spark.sql.AnalysisException
 {
-  "errorClass" : "_LEGACY_ERROR_TEMP_2430",
+  "errorClass" : "INCOMPATIBLE_COLUMN_TYPE",
+  "sqlState" : "42825",
   "messageParameters" : {
-    "ci" : "first",
-    "dt1" : "array<int>",
-    "dt2" : "int",
+    "columnOrdinalNumber" : "first",
+    "dataType1" : "\"ARRAY<INT>\"",
+    "dataType2" : "\"INT\"",
     "hint" : "",
     "operator" : "INTERSECT ALL",
-    "ti" : "second"
+    "tableOrdinalNumber" : "second"
   },
   "queryContext" : [ {
     "objectType" : "",
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSetOperationsSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSetOperationsSuite.scala
index 994dfb4d114..61724a39dfa 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSetOperationsSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSetOperationsSuite.scala
@@ -1009,14 +1009,19 @@ class DataFrameSetOperationsSuite extends QueryTest with SharedSparkSession {
     // If right side of the nested struct has extra col.
     df1 = Seq((1, 2, UnionClass1d(1, 2, Struct3(1)))).toDF("a", "b", "c")
     df2 = Seq((1, 2, UnionClass1e(1, 2, Struct4(1, 5)))).toDF("a", "b", "c")
-    val errMsg = intercept[AnalysisException] {
-      df1.unionByName(df2)
-    }.getMessage
-    assert(errMsg.contains("UNION can only be performed on tables with" +
-      " compatible column types." +
-      " The third column of the second table is struct<c1:int,c2:int,c3:struct<c3:int,c5:int>>" +
-      " type which is not compatible with struct<c1:int,c2:int,c3:struct<c3:int>> at the same" +
-      " column of the first table"))
+    checkError(
+      exception = intercept[AnalysisException] {
+        df1.unionByName(df2)
+      },
+      errorClass = "INCOMPATIBLE_COLUMN_TYPE",
+      parameters = Map(
+        "tableOrdinalNumber" -> "second",
+        "columnOrdinalNumber" -> "third",
+        "dataType2" -> "\"STRUCT<c1: INT, c2: INT, c3: STRUCT<c3: INT>>\"",
+        "operator" -> "UNION",
+        "hint" -> "",
+        "dataType1" -> "\"STRUCT<c1: INT, c2: INT, c3: STRUCT<c3: INT, c5: INT>>\"")
+    )
 
     // diff Case sensitive attributes names and diff sequence scenario for unionByName
     df1 = Seq((1, 2, UnionClass1d(1, 2, Struct3(1)))).toDF("a", "b", "c")
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
index ab9584a99ef..06a5b799279 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
@@ -2702,15 +2702,14 @@ class SQLQuerySuite extends QueryTest with SharedSparkSession with AdaptiveSpark
         exception = intercept[AnalysisException] {
           sql("SELECT struct(1 a) EXCEPT (SELECT struct(2 A))")
         },
-        errorClass = "_LEGACY_ERROR_TEMP_2430",
+        errorClass = "INCOMPATIBLE_COLUMN_TYPE",
         parameters = Map(
+          "tableOrdinalNumber" -> "second",
+          "columnOrdinalNumber" -> "first",
+          "dataType2" -> "\"STRUCT<a: INT>\"",
           "operator" -> "EXCEPT",
-          "dt1" -> "struct<A:int>",
-          "dt2" -> "struct<a:int>",
           "hint" -> "",
-          "ci" -> "first",
-          "ti" -> "second"
-        ),
+          "dataType1" -> "\"STRUCT<A: INT>\""),
         context = ExpectedContext(
           fragment = "SELECT struct(1 a) EXCEPT (SELECT struct(2 A))",
           start = 0,


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org