aboutsummaryrefslogtreecommitdiff
path: root/sql
diff options
context:
space:
mode:
authorCheng Lian <lian@databricks.com>2016-03-12 11:24:50 -0800
committerReynold Xin <rxin@databricks.com>2016-03-12 11:25:15 -0800
commit4eace4d384f0e12b4934019d8654b5e3886ddaef (patch)
tree3b7ff9e4fee0abcac163d7429e8efd4e6095e821 /sql
parentba8c86d06f5968c1af4db8dd9a458005bc5f214c (diff)
downloadspark-4eace4d384f0e12b4934019d8654b5e3886ddaef.tar.gz
spark-4eace4d384f0e12b4934019d8654b5e3886ddaef.tar.bz2
spark-4eace4d384f0e12b4934019d8654b5e3886ddaef.zip
[SPARK-13828][SQL] Bring back stack trace of AnalysisException thrown from QueryExecution.assertAnalyzed
PR #11443 added an extra `plan: Option[LogicalPlan]` argument to `AnalysisException` and attached partially analyzed plan to thrown `AnalysisException` in `QueryExecution.assertAnalyzed()`. However, the original stack trace wasn't properly inherited. This PR fixes this issue by inheriting the stack trace. A test case is added to verify that the first entry of `AnalysisException` stack trace isn't from `QueryExecution`. Author: Cheng Lian <lian@databricks.com> Closes #11677 from liancheng/analysis-exception-stacktrace.
Diffstat (limited to 'sql')
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/execution/QueryExecution.scala4
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala11
2 files changed, 13 insertions, 2 deletions
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/QueryExecution.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/QueryExecution.scala
index 19ab3ea132..9e60c1cd61 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/QueryExecution.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/QueryExecution.scala
@@ -33,7 +33,9 @@ class QueryExecution(val sqlContext: SQLContext, val logical: LogicalPlan) {
def assertAnalyzed(): Unit = try sqlContext.analyzer.checkAnalysis(analyzed) catch {
case e: AnalysisException =>
- throw new AnalysisException(e.message, e.line, e.startPosition, Some(analyzed))
+ val ae = new AnalysisException(e.message, e.line, e.startPosition, Some(analyzed))
+ ae.setStackTrace(e.getStackTrace)
+ throw ae
}
lazy val analyzed: LogicalPlan = sqlContext.analyzer.execute(logical)
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala
index 46cd380a79..e6e27ec413 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala
@@ -25,7 +25,8 @@ import scala.util.Random
import org.scalatest.Matchers._
import org.apache.spark.SparkException
-import org.apache.spark.sql.catalyst.plans.logical.{BroadcastHint, OneRowRelation, Union}
+import org.apache.spark.sql.catalyst.plans.logical.{OneRowRelation, Union}
+import org.apache.spark.sql.execution.QueryExecution
import org.apache.spark.sql.execution.aggregate.TungstenAggregate
import org.apache.spark.sql.execution.exchange.{BroadcastExchange, ReusedExchange, ShuffleExchange}
import org.apache.spark.sql.functions._
@@ -1366,4 +1367,12 @@ class DataFrameSuite extends QueryTest with SharedSQLContext {
// another invalid table name test as below
intercept[AnalysisException](df.registerTempTable("table!#"))
}
+
+ test("assertAnalyzed shouldn't replace original stack trace") {
+ val e = intercept[AnalysisException] {
+ sqlContext.range(1).select('id as 'a, 'id as 'b).groupBy('a).agg('b)
+ }
+
+ assert(e.getStackTrace.head.getClassName != classOf[QueryExecution].getName)
+ }
}