You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by ma...@apache.org on 2022/08/29 05:49:37 UTC

[spark] branch master updated: [MINOR] Remove unnecessary method in QueryCompilationErrors & QueryExecutionErrors

This is an automated email from the ASF dual-hosted git repository.

maxgekk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 765d8deefd4 [MINOR] Remove unnecessary method in QueryCompilationErrors & QueryExecutionErrors
765d8deefd4 is described below

commit 765d8deefd444531d0734fcf628a2e406e8aea5b
Author: panbingkun <pb...@gmail.com>
AuthorDate: Mon Aug 29 08:49:14 2022 +0300

    [MINOR] Remove unnecessary method in QueryCompilationErrors & QueryExecutionErrors
    
    ### What changes were proposed in this pull request?
    The pr aim to remove unnecessary method in QueryCompilationErrors & QueryExecutionErrors.
    
    ### Why are the changes needed?
    Eliminate redundant method and make the code cleaner.
    
    ### Does this PR introduce _any_ user-facing change?
    No.
    
    ### How was this patch tested?
    Pass GA.
    
    Closes #37698 from panbingkun/remove_unnecessary_method.
    
    Authored-by: panbingkun <pb...@gmail.com>
    Signed-off-by: Max Gekk <ma...@gmail.com>
---
 .../spark/sql/errors/QueryCompilationErrors.scala  | 45 ----------------------
 .../spark/sql/errors/QueryExecutionErrors.scala    | 23 -----------
 2 files changed, 68 deletions(-)

diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
index 8b9663f1734..834e0e6b214 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala
@@ -34,7 +34,6 @@ import org.apache.spark.sql.catalyst.util.{FailFastMode, ParseMode, PermissiveMo
 import org.apache.spark.sql.connector.catalog._
 import org.apache.spark.sql.connector.catalog.CatalogV2Implicits._
 import org.apache.spark.sql.connector.catalog.functions.{BoundFunction, UnboundFunction}
-import org.apache.spark.sql.connector.expressions.NamedReference
 import org.apache.spark.sql.connector.expressions.filter.Predicate
 import org.apache.spark.sql.internal.SQLConf
 import org.apache.spark.sql.internal.SQLConf.{LEGACY_ALLOW_NEGATIVE_SCALE_OF_DECIMAL_ENABLED, LEGACY_CTE_PRECEDENCE_POLICY}
@@ -476,20 +475,6 @@ private[sql] object QueryCompilationErrors extends QueryErrorsBase {
       s"CalendarIntervalType, but got ${dt}")
   }
 
-  def viewOutputNumberMismatchQueryColumnNamesError(
-      output: Seq[Attribute], queryColumnNames: Seq[String]): Throwable = {
-    new AnalysisException(
-      s"The view output ${output.mkString("[", ",", "]")} doesn't have the same" +
-        "number of columns with the query column names " +
-        s"${queryColumnNames.mkString("[", ",", "]")}")
-  }
-
-  def attributeNotFoundError(colName: String, child: LogicalPlan): Throwable = {
-    new AnalysisException(
-      s"Attribute with name '$colName' is not found in " +
-        s"'${child.output.map(_.name).mkString("(", ",", ")")}'")
-  }
-
   def functionUndefinedError(name: FunctionIdentifier): Throwable = {
     new AnalysisException(s"undefined function $name")
   }
@@ -583,10 +568,6 @@ private[sql] object QueryCompilationErrors extends QueryErrorsBase {
         s"'${db.head}' != '${v1TableName.database.get}'")
   }
 
-  def sqlOnlySupportedWithV1TablesError(sql: String): Throwable = {
-    new AnalysisException(s"$sql is only supported with v1 tables.")
-  }
-
   def cannotCreateTableWithBothProviderAndSerdeError(
       provider: Option[String], maybeSerdeInfo: Option[SerdeInfo]): Throwable = {
     new AnalysisException(
@@ -1453,10 +1434,6 @@ private[sql] object QueryCompilationErrors extends QueryErrorsBase {
     new AnalysisException("Cannot use interval type in the table schema.")
   }
 
-  def cannotPartitionByNestedColumnError(reference: NamedReference): Throwable = {
-    new AnalysisException(s"Cannot partition by nested column: $reference")
-  }
-
   def missingCatalogAbilityError(plugin: CatalogPlugin, ability: String): Throwable = {
     new AnalysisException(s"Catalog ${plugin.name} does not support $ability")
   }
@@ -1530,12 +1507,6 @@ private[sql] object QueryCompilationErrors extends QueryErrorsBase {
     new AnalysisException(msg)
   }
 
-  def lookupFunctionInNonFunctionCatalogError(
-      ident: Identifier, catalog: CatalogPlugin): Throwable = {
-    new AnalysisException(s"Trying to lookup function '$ident' in " +
-      s"catalog '${catalog.name()}', but it is not a FunctionCatalog.")
-  }
-
   def functionCannotProcessInputError(
       unbound: UnboundFunction,
       arguments: Seq[Expression],
@@ -1724,10 +1695,6 @@ private[sql] object QueryCompilationErrors extends QueryErrorsBase {
       s"Supported interval fields: ${supportedIds.mkString(", ")}.")
   }
 
-  def invalidYearMonthIntervalType(startFieldName: String, endFieldName: String): Throwable = {
-    new AnalysisException(s"'interval $startFieldName to $endFieldName' is invalid.")
-  }
-
   def configRemovedInVersionError(
       configName: String,
       version: String,
@@ -2235,18 +2202,6 @@ private[sql] object QueryCompilationErrors extends QueryErrorsBase {
     new AnalysisException(s"Boundary end is not a valid integer: $end")
   }
 
-  def databaseDoesNotExistError(dbName: String): Throwable = {
-    new AnalysisException(s"Database '$dbName' does not exist.")
-  }
-
-  def tableDoesNotExistInDatabaseError(tableName: String, dbName: String): Throwable = {
-    new AnalysisException(s"Table '$tableName' does not exist in database '$dbName'.")
-  }
-
-  def tableOrViewNotFoundInDatabaseError(tableName: String, dbName: String): Throwable = {
-    new AnalysisException(s"Table or view '$tableName' not found in database '$dbName'")
-  }
-
   def tableOrViewNotFound(ident: Seq[String]): Throwable = {
     new AnalysisException(s"Table or view '${ident.quoted}' not found")
   }
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
index b52a197595c..e81696621ce 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
@@ -21,7 +21,6 @@ import java.io.{FileNotFoundException, IOException}
 import java.lang.reflect.InvocationTargetException
 import java.net.{URISyntaxException, URL}
 import java.sql.{SQLException, SQLFeatureNotSupportedException}
-import java.text.{ParseException => JavaParseException}
 import java.time.{DateTimeException, LocalDate}
 import java.time.temporal.ChronoField
 import java.util.ConcurrentModificationException
@@ -265,12 +264,6 @@ private[sql] object QueryExecutionErrors extends QueryErrorsBase {
     new DateTimeException(newMessage, e.getCause)
   }
 
-  def ansiParseError(e: JavaParseException): JavaParseException = {
-    val newMessage = s"${e.getMessage}. " +
-      s"If necessary set ${SQLConf.ANSI_ENABLED.key} to false to bypass this error."
-    new JavaParseException(newMessage, e.getErrorOffset)
-  }
-
   def ansiIllegalArgumentError(message: String): IllegalArgumentException = {
     val newMessage = s"$message. If necessary set ${SQLConf.ANSI_ENABLED.key} " +
       s"to false to bypass this error."
@@ -362,10 +355,6 @@ private[sql] object QueryExecutionErrors extends QueryErrorsBase {
       s"Cannot generate $codeType code for incomparable type: ${dataType.catalogString}")
   }
 
-  def cannotGenerateCodeForUnsupportedTypeError(dataType: DataType): Throwable = {
-    new IllegalArgumentException(s"cannot generate code for unsupported type: $dataType")
-  }
-
   def cannotInterpolateClassIntoCodeBlockError(arg: Any): Throwable = {
     new IllegalArgumentException(
       s"Can not interpolate ${arg.getClass.getName} into code block.")
@@ -1016,18 +1005,6 @@ private[sql] object QueryExecutionErrors extends QueryErrorsBase {
     new SparkException(s"Failed to merge fields '$leftName' and '$rightName'. ${e.getMessage}")
   }
 
-  def cannotMergeDecimalTypesWithIncompatiblePrecisionAndScaleError(
-      leftPrecision: Int, rightPrecision: Int, leftScale: Int, rightScale: Int): Throwable = {
-    new SparkException("Failed to merge decimal types with incompatible " +
-      s"precision $leftPrecision and $rightPrecision & scale $leftScale and $rightScale")
-  }
-
-  def cannotMergeDecimalTypesWithIncompatiblePrecisionError(
-      leftPrecision: Int, rightPrecision: Int): Throwable = {
-    new SparkException("Failed to merge decimal types with incompatible " +
-      s"precision $leftPrecision and $rightPrecision")
-  }
-
   def cannotMergeDecimalTypesWithIncompatibleScaleError(
       leftScale: Int, rightScale: Int): Throwable = {
     new SparkException("Failed to merge decimal types with incompatible " +


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org