You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by ma...@apache.org on 2022/03/29 18:44:14 UTC

[spark] branch master updated: [SPARK-37982][SQL] Replace the exception by IllegalStateException in csvExpressions

This is an automated email from the ASF dual-hosted git repository.

maxgekk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 42a9114  [SPARK-37982][SQL] Replace the exception by IllegalStateException in csvExpressions
42a9114 is described below

commit 42a9114f4c509f1f369bfb1499ea99c187518c48
Author: leesf <le...@apache.org>
AuthorDate: Tue Mar 29 21:43:06 2022 +0300

    [SPARK-37982][SQL] Replace the exception by IllegalStateException in csvExpressions
    
    ### What changes were proposed in this pull request?
    Replace the exception by IllegalStateException and remove inputTypeUnsupportedError from QueryExecutionErrors
    
    ### Why are the changes needed?
    Make the code more clear
    
    ### Does this PR introduce _any_ user-facing change?
    No
    
    ### How was this patch tested?
    Existing Tests
    
    Closes #35274 from leesf/SPARK-37982.
    
    Authored-by: leesf <le...@apache.org>
    Signed-off-by: Max Gekk <ma...@gmail.com>
---
 .../org/apache/spark/sql/catalyst/expressions/csvExpressions.scala    | 4 ++--
 .../main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala | 4 ----
 2 files changed, 2 insertions(+), 6 deletions(-)

diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/csvExpressions.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/csvExpressions.scala
index 6e08ad3..5162a07 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/csvExpressions.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/csvExpressions.scala
@@ -26,7 +26,7 @@ import org.apache.spark.sql.catalyst.analysis.TypeCheckResult
 import org.apache.spark.sql.catalyst.csv._
 import org.apache.spark.sql.catalyst.expressions.codegen.CodegenFallback
 import org.apache.spark.sql.catalyst.util._
-import org.apache.spark.sql.errors.{QueryCompilationErrors, QueryExecutionErrors}
+import org.apache.spark.sql.errors.QueryCompilationErrors
 import org.apache.spark.sql.internal.SQLConf
 import org.apache.spark.sql.types._
 import org.apache.spark.unsafe.types.UTF8String
@@ -247,7 +247,7 @@ case class StructsToCsv(
   lazy val inputSchema: StructType = child.dataType match {
     case st: StructType => st
     case other =>
-      throw QueryExecutionErrors.inputTypeUnsupportedError(other)
+      throw new IllegalArgumentException(s"Unsupported input type ${other.catalogString}")
   }
 
   @transient
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
index c6a69e4..6b1fc23 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
@@ -175,10 +175,6 @@ object QueryExecutionErrors {
     }
   }
 
-  def inputTypeUnsupportedError(dataType: DataType): Throwable = {
-    new IllegalArgumentException(s"Unsupported input type ${dataType.catalogString}")
-  }
-
   def invalidFractionOfSecondError(): DateTimeException = {
     new SparkDateTimeException(errorClass = "INVALID_FRACTION_OF_SECOND",
       Array(SQLConf.ANSI_ENABLED.key))

---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org