You are viewing a plain text version of this content. The canonical link for it is here.
Posted to reviews@spark.apache.org by GitBox <gi...@apache.org> on 2021/11/29 17:58:27 UTC

[GitHub] [spark] entong commented on a change in pull request #34747: [SPARK-37490][SQL] Show extra hint if analyzer fails due to ANSI type coercion

entong commented on a change in pull request #34747:
URL: https://github.com/apache/spark/pull/34747#discussion_r758595236



##########
File path: sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
##########
@@ -198,21 +205,39 @@ class Analyzer(override val catalogManager: CatalogManager)
   }
 
   def executeAndCheck(plan: LogicalPlan, tracker: QueryPlanningTracker): LogicalPlan = {
-    if (plan.analyzed) return plan
     AnalysisHelper.markInAnalyzer {
       val analyzed = executeAndTrack(plan, tracker)
       try {
         checkAnalysis(analyzed)
         analyzed
       } catch {
         case e: AnalysisException =>
-          val ae = e.copy(plan = Option(analyzed))
+          val ae = e.copy(plan = Option(analyzed),
+            message = e.message + extraHintForAnsiTypeCoercion(plan))
           ae.setStackTrace(e.getStackTrace)
           throw ae
       }
     }
   }
 
+  private def extraHintForAnsiTypeCoercion(plan: LogicalPlan): String = {
+    if (!conf.ansiEnabled) {
+      ""
+    } else {
+      val nonAnsiPlan = AnalysisContext.withDefaultTypeCoercionAnalysisContext {
+        executeSameContext(plan)
+      }
+      try {
+        checkAnalysis(nonAnsiPlan)
+        "\nTo fix the error, you might need to add explicit type casts.\n" +
+          "To bypass the error with lenient type coercion rules, " +

Review comment:
       `If necessary set ansi_mode to false to bypass this error.` to be consistent with other ansi related errors.




-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: reviews-unsubscribe@spark.apache.org

For queries about this service, please contact Infrastructure at:
users@infra.apache.org



---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscribe@spark.apache.org
For additional commands, e-mail: reviews-help@spark.apache.org