You are viewing a plain text version of this content. The canonical link for it is here.
Posted to reviews@spark.apache.org by GitBox <gi...@apache.org> on 2021/10/13 22:28:55 UTC

[GitHub] [spark] karenfeng commented on a change in pull request #34190: [SPARK-36293][SQL] Refactor fourth set of 20 query execution errors to use error classes

karenfeng commented on a change in pull request #34190:
URL: https://github.com/apache/spark/pull/34190#discussion_r728480745



##########
File path: core/src/main/resources/error/error-classes.json
##########
@@ -11,17 +15,51 @@
     "message" : [ "%s cannot be represented as Decimal(%s, %s)." ],
     "sqlState" : "22005"
   },
+  "CANNOT_CREATE_OBJECT_DUE_TO_FAILED_DIRECTORY_CREATION" : {
+    "message" : [ "Unable to CREATE %s %s as failed to create its directory %s." ]
+  },
+  "CANNOT_CREATE_PARTITION_PATH" : {
+    "message" : [ "Unable to create partition path %s" ]
+  },
+  "CANNOT_DELETE_PARTITION_PATH" : {
+    "message" : [ "Unable to delete partition path %s" ]
+  },
+  "CANNOT_DROP_OBJECT_DUE_TO_FAILED_DIRECTORY_DELETION" : {
+    "message" : [ "Unable to DROP %s %s as failed to delete its directory %s." ]
+  },
+  "CANNOT_EVALUATE_EXPRESSION" : {

Review comment:
       These aren't from this PR, right?

##########
File path: sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
##########
@@ -344,94 +345,139 @@ object QueryExecutionErrors {
 
   def unableToCreateDatabaseAsFailedToCreateDirectoryError(
       dbDefinition: CatalogDatabase, e: IOException): Throwable = {
-    new SparkException(s"Unable to create database ${dbDefinition.name} as failed " +
-      s"to create its directory ${dbDefinition.locationUri}", e)
+    new SparkException(
+      errorClass = "CANNOT_CREATE_OBJECT_DUE_TO_FAILED_DIRECTORY_CREATION",
+      messageParameters = Array("DATABASE", dbDefinition.name, dbDefinition.locationUri.toString),
+      cause = e)
   }
 
   def unableToDropDatabaseAsFailedToDeleteDirectoryError(
       dbDefinition: CatalogDatabase, e: IOException): Throwable = {
-    new SparkException(s"Unable to drop database ${dbDefinition.name} as failed " +
-      s"to delete its directory ${dbDefinition.locationUri}", e)
+    new SparkException(
+      errorClass = "CANNOT_DROP_OBJECT_DUE_TO_FAILED_DIRECTORY_DELETION",
+      messageParameters = Array("DATABASE", dbDefinition.name, dbDefinition.locationUri.toString),
+      cause = e)
   }
 
   def unableToCreateTableAsFailedToCreateDirectoryError(
       table: String, defaultTableLocation: Path, e: IOException): Throwable = {
-    new SparkException(s"Unable to create table $table as failed " +
-      s"to create its directory $defaultTableLocation", e)
+    new SparkException(
+      errorClass = "CANNOT_CREATE_OBJECT_DUE_TO_FAILED_DIRECTORY_CREATION",
+      messageParameters = Array("TABLE", table, defaultTableLocation.toString),
+      cause = e)
   }
 
   def unableToDeletePartitionPathError(partitionPath: Path, e: IOException): Throwable = {
-    new SparkException(s"Unable to delete partition path $partitionPath", e)
+    new SparkException(
+      errorClass = "CANNOT_DELETE_PARTITION_PATH",
+      messageParameters = Array(partitionPath.toString),
+      cause = e)
   }
 
   def unableToDropTableAsFailedToDeleteDirectoryError(
       table: String, dir: Path, e: IOException): Throwable = {
-    new SparkException(s"Unable to drop table $table as failed " +
-      s"to delete its directory $dir", e)
+    new SparkException(
+      errorClass = "CANNOT_DROP_OBJECT_DUE_TO_FAILED_DIRECTORY_DELETION",
+      messageParameters = Array("TABLE", table, dir.toString),
+      cause = e)
   }
 
   def unableToRenameTableAsFailedToRenameDirectoryError(
       oldName: String, newName: String, oldDir: Path, e: IOException): Throwable = {
-    new SparkException(s"Unable to rename table $oldName to $newName as failed " +
-      s"to rename its directory $oldDir", e)
+    new SparkException(
+      errorClass = "CANNOT_RENAME_TABLE_DUE_TO_FAILED_DIRECTORY_RENAMING",
+      messageParameters = Array(oldName, newName, oldDir.toString),
+      cause = e)
   }
 
   def unableToCreatePartitionPathError(partitionPath: Path, e: IOException): Throwable = {
-    new SparkException(s"Unable to create partition path $partitionPath", e)
+    new SparkException(
+      errorClass = "CANNOT_CREATE_PARTITION_PATH",
+      messageParameters = Array(partitionPath.toString),
+      cause = e)
   }
 
   def unableToRenamePartitionPathError(oldPartPath: Path, e: IOException): Throwable = {
-    new SparkException(s"Unable to rename partition path $oldPartPath", e)
+    new SparkException(
+      errorClass = "CANNOT_RENAME_PARTITION_PATH",
+      messageParameters = Array(oldPartPath.toString),
+      cause = e)
   }
 
   def methodNotImplementedError(methodName: String): Throwable = {
-    new UnsupportedOperationException(s"$methodName is not implemented")
+    new SparkUnsupportedOperationException(

Review comment:
       It looks like this is actually only used by a dummy implementation and should not be exposed to users. Can you make this an internal error?

##########
File path: sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
##########
@@ -344,94 +345,139 @@ object QueryExecutionErrors {
 
   def unableToCreateDatabaseAsFailedToCreateDirectoryError(
       dbDefinition: CatalogDatabase, e: IOException): Throwable = {
-    new SparkException(s"Unable to create database ${dbDefinition.name} as failed " +
-      s"to create its directory ${dbDefinition.locationUri}", e)
+    new SparkException(
+      errorClass = "CANNOT_CREATE_OBJECT_DUE_TO_FAILED_DIRECTORY_CREATION",
+      messageParameters = Array("DATABASE", dbDefinition.name, dbDefinition.locationUri.toString),
+      cause = e)
   }
 
   def unableToDropDatabaseAsFailedToDeleteDirectoryError(
       dbDefinition: CatalogDatabase, e: IOException): Throwable = {
-    new SparkException(s"Unable to drop database ${dbDefinition.name} as failed " +
-      s"to delete its directory ${dbDefinition.locationUri}", e)
+    new SparkException(
+      errorClass = "CANNOT_DROP_OBJECT_DUE_TO_FAILED_DIRECTORY_DELETION",
+      messageParameters = Array("DATABASE", dbDefinition.name, dbDefinition.locationUri.toString),
+      cause = e)
   }
 
   def unableToCreateTableAsFailedToCreateDirectoryError(
       table: String, defaultTableLocation: Path, e: IOException): Throwable = {
-    new SparkException(s"Unable to create table $table as failed " +
-      s"to create its directory $defaultTableLocation", e)
+    new SparkException(
+      errorClass = "CANNOT_CREATE_OBJECT_DUE_TO_FAILED_DIRECTORY_CREATION",
+      messageParameters = Array("TABLE", table, defaultTableLocation.toString),
+      cause = e)
   }
 
   def unableToDeletePartitionPathError(partitionPath: Path, e: IOException): Throwable = {
-    new SparkException(s"Unable to delete partition path $partitionPath", e)
+    new SparkException(
+      errorClass = "CANNOT_DELETE_PARTITION_PATH",
+      messageParameters = Array(partitionPath.toString),
+      cause = e)
   }
 
   def unableToDropTableAsFailedToDeleteDirectoryError(
       table: String, dir: Path, e: IOException): Throwable = {
-    new SparkException(s"Unable to drop table $table as failed " +
-      s"to delete its directory $dir", e)
+    new SparkException(
+      errorClass = "CANNOT_DROP_OBJECT_DUE_TO_FAILED_DIRECTORY_DELETION",
+      messageParameters = Array("TABLE", table, dir.toString),
+      cause = e)
   }
 
   def unableToRenameTableAsFailedToRenameDirectoryError(
       oldName: String, newName: String, oldDir: Path, e: IOException): Throwable = {
-    new SparkException(s"Unable to rename table $oldName to $newName as failed " +
-      s"to rename its directory $oldDir", e)
+    new SparkException(
+      errorClass = "CANNOT_RENAME_TABLE_DUE_TO_FAILED_DIRECTORY_RENAMING",
+      messageParameters = Array(oldName, newName, oldDir.toString),
+      cause = e)
   }
 
   def unableToCreatePartitionPathError(partitionPath: Path, e: IOException): Throwable = {
-    new SparkException(s"Unable to create partition path $partitionPath", e)
+    new SparkException(
+      errorClass = "CANNOT_CREATE_PARTITION_PATH",
+      messageParameters = Array(partitionPath.toString),
+      cause = e)
   }
 
   def unableToRenamePartitionPathError(oldPartPath: Path, e: IOException): Throwable = {
-    new SparkException(s"Unable to rename partition path $oldPartPath", e)
+    new SparkException(
+      errorClass = "CANNOT_RENAME_PARTITION_PATH",
+      messageParameters = Array(oldPartPath.toString),
+      cause = e)
   }
 
   def methodNotImplementedError(methodName: String): Throwable = {
-    new UnsupportedOperationException(s"$methodName is not implemented")
+    new SparkUnsupportedOperationException(
+      errorClass = "METHOD_NOT_IMPLEMENTED",
+      messageParameters = Array(methodName))
   }
 
   def tableStatsNotSpecifiedError(): Throwable = {
-    new IllegalStateException("table stats must be specified.")
+    new SparkIllegalStateException(
+      errorClass = "TABLE_STATISTICS_NOT_SPECIFIED",
+      messageParameters = Array.empty)
   }
 
   def unaryMinusCauseOverflowError(originValue: AnyVal): ArithmeticException = {
-    new ArithmeticException(s"- $originValue caused overflow.")
+    new SparkArithmeticException(
+      errorClass = "UNARY_MINUS_OVERFLOW",
+      messageParameters = Array(originValue.toString))
   }
 
   def binaryArithmeticCauseOverflowError(
       eval1: Short, symbol: String, eval2: Short): ArithmeticException = {
-    new ArithmeticException(s"$eval1 $symbol $eval2 caused overflow.")
+    new SparkArithmeticException(
+      errorClass = "BINARY_ARITHMETIC_OVERFLOW",
+      messageParameters = Array(eval1.toString, symbol, eval2.toString))
   }
 
   def failedSplitSubExpressionMsg(length: Int): String = {
-    "Failed to split subexpression code into small functions because " +
-      s"the parameter length of at least one split function went over the JVM limit: $length"
+    SparkThrowableHelper.getMessage(
+      errorClass = "FAILED_SPLIT_SUBEXPRESSION",
+      messageParameters = Array(length.toString))
   }
 
   def failedSplitSubExpressionError(length: Int): Throwable = {
-    new IllegalStateException(failedSplitSubExpressionMsg(length))
+    new SparkIllegalStateException(
+      errorClass = "FAILED_SPLIT_SUBEXPRESSION",
+      messageParameters = Array(length.toString))
   }
 
   def failedToCompileMsg(e: Exception): String = {
-    s"failed to compile: $e"
+    SparkThrowableHelper.getMessage(

Review comment:
       @cloud-fan, are all of these compilation failures internal errors?

##########
File path: sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
##########
@@ -344,94 +345,139 @@ object QueryExecutionErrors {
 
   def unableToCreateDatabaseAsFailedToCreateDirectoryError(
       dbDefinition: CatalogDatabase, e: IOException): Throwable = {
-    new SparkException(s"Unable to create database ${dbDefinition.name} as failed " +
-      s"to create its directory ${dbDefinition.locationUri}", e)
+    new SparkException(
+      errorClass = "CANNOT_CREATE_OBJECT_DUE_TO_FAILED_DIRECTORY_CREATION",
+      messageParameters = Array("DATABASE", dbDefinition.name, dbDefinition.locationUri.toString),
+      cause = e)
   }
 
   def unableToDropDatabaseAsFailedToDeleteDirectoryError(
       dbDefinition: CatalogDatabase, e: IOException): Throwable = {
-    new SparkException(s"Unable to drop database ${dbDefinition.name} as failed " +
-      s"to delete its directory ${dbDefinition.locationUri}", e)
+    new SparkException(
+      errorClass = "CANNOT_DROP_OBJECT_DUE_TO_FAILED_DIRECTORY_DELETION",
+      messageParameters = Array("DATABASE", dbDefinition.name, dbDefinition.locationUri.toString),
+      cause = e)
   }
 
   def unableToCreateTableAsFailedToCreateDirectoryError(
       table: String, defaultTableLocation: Path, e: IOException): Throwable = {
-    new SparkException(s"Unable to create table $table as failed " +
-      s"to create its directory $defaultTableLocation", e)
+    new SparkException(
+      errorClass = "CANNOT_CREATE_OBJECT_DUE_TO_FAILED_DIRECTORY_CREATION",
+      messageParameters = Array("TABLE", table, defaultTableLocation.toString),
+      cause = e)
   }
 
   def unableToDeletePartitionPathError(partitionPath: Path, e: IOException): Throwable = {
-    new SparkException(s"Unable to delete partition path $partitionPath", e)
+    new SparkException(
+      errorClass = "CANNOT_DELETE_PARTITION_PATH",
+      messageParameters = Array(partitionPath.toString),
+      cause = e)
   }
 
   def unableToDropTableAsFailedToDeleteDirectoryError(
       table: String, dir: Path, e: IOException): Throwable = {
-    new SparkException(s"Unable to drop table $table as failed " +
-      s"to delete its directory $dir", e)
+    new SparkException(
+      errorClass = "CANNOT_DROP_OBJECT_DUE_TO_FAILED_DIRECTORY_DELETION",
+      messageParameters = Array("TABLE", table, dir.toString),
+      cause = e)
   }
 
   def unableToRenameTableAsFailedToRenameDirectoryError(
       oldName: String, newName: String, oldDir: Path, e: IOException): Throwable = {
-    new SparkException(s"Unable to rename table $oldName to $newName as failed " +
-      s"to rename its directory $oldDir", e)
+    new SparkException(
+      errorClass = "CANNOT_RENAME_TABLE_DUE_TO_FAILED_DIRECTORY_RENAMING",
+      messageParameters = Array(oldName, newName, oldDir.toString),
+      cause = e)
   }
 
   def unableToCreatePartitionPathError(partitionPath: Path, e: IOException): Throwable = {
-    new SparkException(s"Unable to create partition path $partitionPath", e)
+    new SparkException(
+      errorClass = "CANNOT_CREATE_PARTITION_PATH",
+      messageParameters = Array(partitionPath.toString),
+      cause = e)
   }
 
   def unableToRenamePartitionPathError(oldPartPath: Path, e: IOException): Throwable = {
-    new SparkException(s"Unable to rename partition path $oldPartPath", e)
+    new SparkException(
+      errorClass = "CANNOT_RENAME_PARTITION_PATH",
+      messageParameters = Array(oldPartPath.toString),
+      cause = e)
   }
 
   def methodNotImplementedError(methodName: String): Throwable = {
-    new UnsupportedOperationException(s"$methodName is not implemented")
+    new SparkUnsupportedOperationException(
+      errorClass = "METHOD_NOT_IMPLEMENTED",
+      messageParameters = Array(methodName))
   }
 
   def tableStatsNotSpecifiedError(): Throwable = {
-    new IllegalStateException("table stats must be specified.")
+    new SparkIllegalStateException(
+      errorClass = "TABLE_STATISTICS_NOT_SPECIFIED",
+      messageParameters = Array.empty)
   }
 
   def unaryMinusCauseOverflowError(originValue: AnyVal): ArithmeticException = {
-    new ArithmeticException(s"- $originValue caused overflow.")
+    new SparkArithmeticException(
+      errorClass = "UNARY_MINUS_OVERFLOW",
+      messageParameters = Array(originValue.toString))
   }
 
   def binaryArithmeticCauseOverflowError(
       eval1: Short, symbol: String, eval2: Short): ArithmeticException = {
-    new ArithmeticException(s"$eval1 $symbol $eval2 caused overflow.")
+    new SparkArithmeticException(
+      errorClass = "BINARY_ARITHMETIC_OVERFLOW",
+      messageParameters = Array(eval1.toString, symbol, eval2.toString))
   }
 
   def failedSplitSubExpressionMsg(length: Int): String = {
-    "Failed to split subexpression code into small functions because " +
-      s"the parameter length of at least one split function went over the JVM limit: $length"
+    SparkThrowableHelper.getMessage(
+      errorClass = "FAILED_SPLIT_SUBEXPRESSION",
+      messageParameters = Array(length.toString))
   }
 
   def failedSplitSubExpressionError(length: Int): Throwable = {
-    new IllegalStateException(failedSplitSubExpressionMsg(length))
+    new SparkIllegalStateException(
+      errorClass = "FAILED_SPLIT_SUBEXPRESSION",
+      messageParameters = Array(length.toString))
   }
 
   def failedToCompileMsg(e: Exception): String = {
-    s"failed to compile: $e"
+    SparkThrowableHelper.getMessage(
+      errorClass = "COMPILATION_FAILED",
+      messageParameters = Array(e.toString))
   }
 
   def internalCompilerError(e: InternalCompilerException): Throwable = {
-    new InternalCompilerException(failedToCompileMsg(e), e)
+    new SparkInternalCompilerException(
+      errorClass = "COMPILATION_FAILED",
+      messageParameters = Array(e.toString))
   }
 
   def compilerError(e: CompileException): Throwable = {
-    new CompileException(failedToCompileMsg(e), e.getLocation)
+    new SparkCompileException(
+      errorClass = "COMPILATION_FAILED",
+      messageParameters = Array(e.toString),
+      location = e.getLocation)
   }
 
   def unsupportedTableChangeError(e: IllegalArgumentException): Throwable = {
-    new SparkException(s"Unsupported table change: ${e.getMessage}", e)
+    new SparkException(
+      errorClass = "UNSUPPORTED_TABLE_CHANGE",
+      messageParameters = Array(e.getMessage),
+      cause = e)
   }
 
   def notADatasourceRDDPartitionError(split: Partition): Throwable = {
-    new SparkException(s"[BUG] Not a DataSourceRDDPartition: $split")
+    new SparkException(
+      errorClass = "NOT_A_DATA_SOURCE_RDD_PARTITION",

Review comment:
       This is an internal error; let's use `INTERNAL_ERROR`

##########
File path: core/src/main/resources/error/error-classes.json
##########
@@ -11,17 +15,51 @@
     "message" : [ "%s cannot be represented as Decimal(%s, %s)." ],
     "sqlState" : "22005"
   },
+  "CANNOT_CREATE_OBJECT_DUE_TO_FAILED_DIRECTORY_CREATION" : {
+    "message" : [ "Unable to CREATE %s %s as failed to create its directory %s." ]
+  },
+  "CANNOT_CREATE_PARTITION_PATH" : {
+    "message" : [ "Unable to create partition path %s" ]
+  },
+  "CANNOT_DELETE_PARTITION_PATH" : {
+    "message" : [ "Unable to delete partition path %s" ]
+  },
+  "CANNOT_DROP_OBJECT_DUE_TO_FAILED_DIRECTORY_DELETION" : {
+    "message" : [ "Unable to DROP %s %s as failed to delete its directory %s." ]
+  },
+  "CANNOT_EVALUATE_EXPRESSION" : {
+    "message" : [ "Cannot evaluate expression: %s" ]
+  },
+  "CANNOT_GENERATE_CODE_FOR_EXPRESSION" : {

Review comment:
       These aren't from this PR, right?

##########
File path: core/src/main/resources/error/error-classes.json
##########
@@ -11,17 +15,51 @@
     "message" : [ "%s cannot be represented as Decimal(%s, %s)." ],
     "sqlState" : "22005"
   },
+  "CANNOT_CREATE_OBJECT_DUE_TO_FAILED_DIRECTORY_CREATION" : {
+    "message" : [ "Unable to CREATE %s %s as failed to create its directory %s." ]
+  },
+  "CANNOT_CREATE_PARTITION_PATH" : {
+    "message" : [ "Unable to create partition path %s" ]
+  },
+  "CANNOT_DELETE_PARTITION_PATH" : {
+    "message" : [ "Unable to delete partition path %s" ]
+  },
+  "CANNOT_DROP_OBJECT_DUE_TO_FAILED_DIRECTORY_DELETION" : {
+    "message" : [ "Unable to DROP %s %s as failed to delete its directory %s." ]
+  },
+  "CANNOT_EVALUATE_EXPRESSION" : {
+    "message" : [ "Cannot evaluate expression: %s" ]
+  },
+  "CANNOT_GENERATE_CODE_FOR_EXPRESSION" : {
+    "message" : [ "Cannot generate code for expression: %s" ]
+  },
   "CANNOT_PARSE_DECIMAL" : {
     "message" : [ "Cannot parse decimal" ],
     "sqlState" : "42000"
   },
+  "CANNOT_RENAME_PARTITION_PATH" : {
+    "message" : [ "Unable to rename partition path %s" ]
+  },
+  "CANNOT_RENAME_TABLE_DUE_TO_FAILED_DIRECTORY_RENAMING" : {
+    "message" : [ "Unable to rename table %s to %s as failed to rename its directory %s" ]
+  },
+  "CANNOT_TERMINATE_GENERATOR" : {
+    "message" : [ "Cannot terminate expression: %s" ]
+  },
   "CAST_CAUSES_OVERFLOW" : {
     "message" : [ "Casting %s to %s causes overflow" ],
     "sqlState" : "22005"
   },
+  "COMPILATION_FAILED" : {
+    "message" : [ "failed to compile: %s" ]
+  },
   "CONCURRENT_QUERY" : {
     "message" : [ "Another instance of this query was just started by a concurrent session." ]
   },
+  "DATA_PATH_NOT_SPECIFIED" : {

Review comment:
       Let's call this `MISSING_DATA_PATH`?

##########
File path: core/src/main/resources/error/error-classes.json
##########
@@ -11,17 +15,51 @@
     "message" : [ "%s cannot be represented as Decimal(%s, %s)." ],
     "sqlState" : "22005"
   },
+  "CANNOT_CREATE_OBJECT_DUE_TO_FAILED_DIRECTORY_CREATION" : {

Review comment:
       Actually, what if we just have three error classes: `CANNOT_CREATE_DIRECTORY`, `CANNOT_DELETE_DIRECTORY` and `CANNOT_RENAME_DIRECTORY`? This would be more concise and can be used more generally as well. Then we can make the error message like `Cannot %s as failed to create directory %s`.

##########
File path: sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
##########
@@ -344,94 +345,139 @@ object QueryExecutionErrors {
 
   def unableToCreateDatabaseAsFailedToCreateDirectoryError(
       dbDefinition: CatalogDatabase, e: IOException): Throwable = {
-    new SparkException(s"Unable to create database ${dbDefinition.name} as failed " +
-      s"to create its directory ${dbDefinition.locationUri}", e)
+    new SparkException(
+      errorClass = "CANNOT_CREATE_OBJECT_DUE_TO_FAILED_DIRECTORY_CREATION",
+      messageParameters = Array("DATABASE", dbDefinition.name, dbDefinition.locationUri.toString),
+      cause = e)
   }
 
   def unableToDropDatabaseAsFailedToDeleteDirectoryError(
       dbDefinition: CatalogDatabase, e: IOException): Throwable = {
-    new SparkException(s"Unable to drop database ${dbDefinition.name} as failed " +
-      s"to delete its directory ${dbDefinition.locationUri}", e)
+    new SparkException(
+      errorClass = "CANNOT_DROP_OBJECT_DUE_TO_FAILED_DIRECTORY_DELETION",
+      messageParameters = Array("DATABASE", dbDefinition.name, dbDefinition.locationUri.toString),
+      cause = e)
   }
 
   def unableToCreateTableAsFailedToCreateDirectoryError(
       table: String, defaultTableLocation: Path, e: IOException): Throwable = {
-    new SparkException(s"Unable to create table $table as failed " +
-      s"to create its directory $defaultTableLocation", e)
+    new SparkException(
+      errorClass = "CANNOT_CREATE_OBJECT_DUE_TO_FAILED_DIRECTORY_CREATION",
+      messageParameters = Array("TABLE", table, defaultTableLocation.toString),
+      cause = e)
   }
 
   def unableToDeletePartitionPathError(partitionPath: Path, e: IOException): Throwable = {
-    new SparkException(s"Unable to delete partition path $partitionPath", e)
+    new SparkException(
+      errorClass = "CANNOT_DELETE_PARTITION_PATH",
+      messageParameters = Array(partitionPath.toString),
+      cause = e)
   }
 
   def unableToDropTableAsFailedToDeleteDirectoryError(
       table: String, dir: Path, e: IOException): Throwable = {
-    new SparkException(s"Unable to drop table $table as failed " +
-      s"to delete its directory $dir", e)
+    new SparkException(
+      errorClass = "CANNOT_DROP_OBJECT_DUE_TO_FAILED_DIRECTORY_DELETION",
+      messageParameters = Array("TABLE", table, dir.toString),
+      cause = e)
   }
 
   def unableToRenameTableAsFailedToRenameDirectoryError(
       oldName: String, newName: String, oldDir: Path, e: IOException): Throwable = {
-    new SparkException(s"Unable to rename table $oldName to $newName as failed " +
-      s"to rename its directory $oldDir", e)
+    new SparkException(
+      errorClass = "CANNOT_RENAME_TABLE_DUE_TO_FAILED_DIRECTORY_RENAMING",
+      messageParameters = Array(oldName, newName, oldDir.toString),
+      cause = e)
   }
 
   def unableToCreatePartitionPathError(partitionPath: Path, e: IOException): Throwable = {
-    new SparkException(s"Unable to create partition path $partitionPath", e)
+    new SparkException(
+      errorClass = "CANNOT_CREATE_PARTITION_PATH",
+      messageParameters = Array(partitionPath.toString),
+      cause = e)
   }
 
   def unableToRenamePartitionPathError(oldPartPath: Path, e: IOException): Throwable = {
-    new SparkException(s"Unable to rename partition path $oldPartPath", e)
+    new SparkException(
+      errorClass = "CANNOT_RENAME_PARTITION_PATH",
+      messageParameters = Array(oldPartPath.toString),
+      cause = e)
   }
 
   def methodNotImplementedError(methodName: String): Throwable = {
-    new UnsupportedOperationException(s"$methodName is not implemented")
+    new SparkUnsupportedOperationException(
+      errorClass = "METHOD_NOT_IMPLEMENTED",
+      messageParameters = Array(methodName))
   }
 
   def tableStatsNotSpecifiedError(): Throwable = {
-    new IllegalStateException("table stats must be specified.")
+    new SparkIllegalStateException(
+      errorClass = "TABLE_STATISTICS_NOT_SPECIFIED",

Review comment:
       @cloud-fan, is this an internal error? I can't know a use case where the user encounters this.




-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: reviews-unsubscribe@spark.apache.org

For queries about this service, please contact Infrastructure at:
users@infra.apache.org



---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscribe@spark.apache.org
For additional commands, e-mail: reviews-help@spark.apache.org