You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by da...@apache.org on 2016/08/23 16:45:16 UTC

spark git commit: [SPARK-13286] [SQL] add the next expression of SQLException as cause

Repository: spark
Updated Branches:
  refs/heads/master 97d461b75 -> 9afdfc94f


[SPARK-13286] [SQL] add the next expression of SQLException as cause

## What changes were proposed in this pull request?

Some JDBC driver (for example PostgreSQL) does not use the underlying exception as cause, but have another APIs (getNextException) to access that, so it it's included in the error logging, making us hard to find the root cause, especially in batch mode.

This PR will pull out the next exception and add it as cause (if it's different) or suppressed (if there is another different cause).

## How was this patch tested?

Can't reproduce this on the default JDBC driver, so did not add a regression test.

Author: Davies Liu <da...@databricks.com>

Closes #14722 from davies/keep_cause.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/9afdfc94
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/9afdfc94
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/9afdfc94

Branch: refs/heads/master
Commit: 9afdfc94f49395e69a7959e881c19d787ce00c3e
Parents: 97d461b
Author: Davies Liu <da...@databricks.com>
Authored: Tue Aug 23 09:45:13 2016 -0700
Committer: Davies Liu <da...@gmail.com>
Committed: Tue Aug 23 09:45:13 2016 -0700

----------------------------------------------------------------------
 .../sql/execution/datasources/jdbc/JdbcUtils.scala   | 15 +++++++++++++--
 1 file changed, 13 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/9afdfc94/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JdbcUtils.scala
----------------------------------------------------------------------
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JdbcUtils.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JdbcUtils.scala
index a33c26d..cbd5046 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JdbcUtils.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/jdbc/JdbcUtils.scala
@@ -17,7 +17,7 @@
 
 package org.apache.spark.sql.execution.datasources.jdbc
 
-import java.sql.{Connection, Driver, DriverManager, PreparedStatement}
+import java.sql.{Connection, Driver, DriverManager, PreparedStatement, SQLException}
 import java.util.Properties
 
 import scala.collection.JavaConverters._
@@ -289,7 +289,7 @@ object JdbcUtils extends Logging {
       }
       val stmt = insertStatement(conn, table, rddSchema, dialect)
       val setters: Array[JDBCValueSetter] = rddSchema.fields.map(_.dataType)
-          .map(makeSetter(conn, dialect, _)).toArray
+        .map(makeSetter(conn, dialect, _)).toArray
 
       try {
         var rowCount = 0
@@ -322,6 +322,17 @@ object JdbcUtils extends Logging {
         conn.commit()
       }
       committed = true
+    } catch {
+      case e: SQLException =>
+        val cause = e.getNextException
+        if (e.getCause != cause) {
+          if (e.getCause == null) {
+            e.initCause(cause)
+          } else {
+            e.addSuppressed(cause)
+          }
+        }
+        throw e
     } finally {
       if (!committed) {
         // The stage must fail.  We got here through an exception path, so


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org