You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by rx...@apache.org on 2016/03/12 20:25:44 UTC

spark git commit: [SPARK-13828][SQL] Bring back stack trace of AnalysisException thrown from QueryExecution.assertAnalyzed

Repository: spark
Updated Branches:
  refs/heads/master ba8c86d06 -> 4eace4d38


[SPARK-13828][SQL] Bring back stack trace of AnalysisException thrown from QueryExecution.assertAnalyzed

PR #11443 added an extra `plan: Option[LogicalPlan]` argument to `AnalysisException` and attached partially analyzed plan to thrown `AnalysisException` in `QueryExecution.assertAnalyzed()`.  However, the original stack trace wasn't properly inherited.  This PR fixes this issue by inheriting the stack trace.

A test case is added to verify that the first entry of `AnalysisException` stack trace isn't from `QueryExecution`.

Author: Cheng Lian <li...@databricks.com>

Closes #11677 from liancheng/analysis-exception-stacktrace.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/4eace4d3
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/4eace4d3
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/4eace4d3

Branch: refs/heads/master
Commit: 4eace4d384f0e12b4934019d8654b5e3886ddaef
Parents: ba8c86d
Author: Cheng Lian <li...@databricks.com>
Authored: Sat Mar 12 11:24:50 2016 -0800
Committer: Reynold Xin <rx...@databricks.com>
Committed: Sat Mar 12 11:25:15 2016 -0800

----------------------------------------------------------------------
 .../org/apache/spark/sql/execution/QueryExecution.scala  |  4 +++-
 .../test/scala/org/apache/spark/sql/DataFrameSuite.scala | 11 ++++++++++-
 2 files changed, 13 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/4eace4d3/sql/core/src/main/scala/org/apache/spark/sql/execution/QueryExecution.scala
----------------------------------------------------------------------
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/QueryExecution.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/QueryExecution.scala
index 19ab3ea..9e60c1c 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/QueryExecution.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/QueryExecution.scala
@@ -33,7 +33,9 @@ class QueryExecution(val sqlContext: SQLContext, val logical: LogicalPlan) {
 
   def assertAnalyzed(): Unit = try sqlContext.analyzer.checkAnalysis(analyzed) catch {
     case e: AnalysisException =>
-      throw new AnalysisException(e.message, e.line, e.startPosition, Some(analyzed))
+      val ae = new AnalysisException(e.message, e.line, e.startPosition, Some(analyzed))
+      ae.setStackTrace(e.getStackTrace)
+      throw ae
   }
 
   lazy val analyzed: LogicalPlan = sqlContext.analyzer.execute(logical)

http://git-wip-us.apache.org/repos/asf/spark/blob/4eace4d3/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala
----------------------------------------------------------------------
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala
index 46cd380..e6e27ec 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala
@@ -25,7 +25,8 @@ import scala.util.Random
 import org.scalatest.Matchers._
 
 import org.apache.spark.SparkException
-import org.apache.spark.sql.catalyst.plans.logical.{BroadcastHint, OneRowRelation, Union}
+import org.apache.spark.sql.catalyst.plans.logical.{OneRowRelation, Union}
+import org.apache.spark.sql.execution.QueryExecution
 import org.apache.spark.sql.execution.aggregate.TungstenAggregate
 import org.apache.spark.sql.execution.exchange.{BroadcastExchange, ReusedExchange, ShuffleExchange}
 import org.apache.spark.sql.functions._
@@ -1366,4 +1367,12 @@ class DataFrameSuite extends QueryTest with SharedSQLContext {
     // another invalid table name test as below
     intercept[AnalysisException](df.registerTempTable("table!#"))
   }
+
+  test("assertAnalyzed shouldn't replace original stack trace") {
+    val e = intercept[AnalysisException] {
+      sqlContext.range(1).select('id as 'a, 'id as 'b).groupBy('a).agg('b)
+    }
+
+    assert(e.getStackTrace.head.getClassName != classOf[QueryExecution].getName)
+  }
 }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org