You are viewing a plain text version of this content. The canonical link for it is here.
Posted to reviews@spark.apache.org by GitBox <gi...@apache.org> on 2021/10/07 01:40:28 UTC

[GitHub] [spark] dchvn commented on a change in pull request #34168: [SPARK-36302][SQL] Refactor thirteenth set of 20 query execution errors to use error classes

dchvn commented on a change in pull request #34168:
URL: https://github.com/apache/spark/pull/34168#discussion_r723787788



##########
File path: sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryExecutionErrors.scala
##########
@@ -1297,74 +1297,87 @@ object QueryExecutionErrors {
   }
 
   def serDeInterfaceNotFoundError(e: NoClassDefFoundError): Throwable = {
-    new ClassNotFoundException("The SerDe interface removed since Hive 2.3(HIVE-15167)." +
-      " Please migrate your custom SerDes to Hive 2.3. See HIVE-15167 for more details.", e)
+    new SparkClassNotFoundException(
+      errorClass = "SERDE_INTERFACE_NOT_FOUND",
+      messageParameters = Array.empty, e)
   }
 
   def convertHiveTableToCatalogTableError(
       e: SparkException, dbName: String, tableName: String): Throwable = {
-    new SparkException(s"${e.getMessage}, db: $dbName, table: $tableName", e)
+    new SparkException(
+      errorClass = "CANNOT_CONVERT_HIVE_TABLE_TO_CATALOG_TABLE",
+      messageParameters = Array(e.getMessage, dbName, tableName), e)
   }
 
   def cannotRecognizeHiveTypeError(
       e: ParseException, fieldType: String, fieldName: String): Throwable = {
     new SparkException(
-      s"Cannot recognize hive type string: $fieldType, column: $fieldName", e)
+      errorClass = "CANNOT_RECOGNIZE_HIVE_TYPE",
+      messageParameters = Array(fieldType, fieldName), e)
   }
 
   def getTablesByTypeUnsupportedByHiveVersionError(): Throwable = {
-    new UnsupportedOperationException("Hive 2.2 and lower versions don't support " +
-      "getTablesByType. Please use Hive 2.3 or higher version.")
+    new SparkUnsupportedOperationException(
+      errorClass = "GET_TABLES_BY_TYPE_UNSUPPORTED_BY_HIVE_VERSION",
+      messageParameters = Array.empty
+    )
   }
 
   def dropTableWithPurgeUnsupportedError(): Throwable = {
-    new UnsupportedOperationException("DROP TABLE ... PURGE")
+    new SparkUnsupportedOperationException(
+      errorClass = "DROP_TABLE_WITH_PURGE_UNSUPPORTED",
+      messageParameters = Array.empty
+    )
   }
 
   def alterTableWithDropPartitionAndPurgeUnsupportedError(): Throwable = {
-    new UnsupportedOperationException("ALTER TABLE ... DROP PARTITION ... PURGE")
+    new SparkUnsupportedOperationException(
+      errorClass = "ALTER_TABLE_WITH_DROP_PARTITION_AND_PURGE_UNSUPPORTED",
+      messageParameters = Array.empty
+    )
   }
 
   def invalidPartitionFilterError(): Throwable = {
-    new UnsupportedOperationException(
-      """Partition filter cannot have both `"` and `'` characters""")
+    new SparkUnsupportedOperationException(
+      errorClass = "INVALID_PARTITION_FILTER",
+      messageParameters = Array.empty
+    )
   }
 
   def getPartitionMetadataByFilterError(e: InvocationTargetException): Throwable = {
-    new RuntimeException(
-      s"""
-         |Caught Hive MetaException attempting to get partition metadata by filter
-         |from Hive. You can set the Spark configuration setting
-         |${SQLConf.HIVE_METASTORE_PARTITION_PRUNING_FALLBACK_ON_EXCEPTION} to true to work around
-         |this problem, however this will result in degraded performance. Please
-         |report a bug: https://issues.apache.org/jira/browse/SPARK
-       """.stripMargin.replaceAll("\n", " "), e)
+    new SparkRuntimeException(
+      errorClass = "CANNOT_GET_PARTITION_METADATA_BY_FILTER",

Review comment:
       Thanks, updated it! 




-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: reviews-unsubscribe@spark.apache.org

For queries about this service, please contact Infrastructure at:
users@infra.apache.org



---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscribe@spark.apache.org
For additional commands, e-mail: reviews-help@spark.apache.org