aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorMichael Armbrust <michael@databricks.com>2015-03-24 13:22:46 -0700
committerMichael Armbrust <michael@databricks.com>2015-03-24 13:22:46 -0700
commit046c1e2aa459147bf592371bb9fb7a65edb182e7 (patch)
tree9891f84ac18ce4ca15d2f1b448c53ee2e338174c
parent3fa3d121dfec60f9768d3859e8450ee482b2d4e8 (diff)
downloadspark-046c1e2aa459147bf592371bb9fb7a65edb182e7.tar.gz
spark-046c1e2aa459147bf592371bb9fb7a65edb182e7.tar.bz2
spark-046c1e2aa459147bf592371bb9fb7a65edb182e7.zip
[SPARK-6375][SQL] Fix formatting of error messages.
Author: Michael Armbrust <michael@databricks.com> Closes #5155 from marmbrus/errorMessages and squashes the following commits: b898188 [Michael Armbrust] Fix formatting of error messages.
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/AnalysisException.scala6
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala5
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/package.scala8
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/unresolved.scala4
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/namedExpressions.scala7
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/LogicalPlan.scala3
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/ErrorPositionSuite.scala25
7 files changed, 53 insertions, 5 deletions
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/AnalysisException.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/AnalysisException.scala
index 15add84878..34fedead44 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/AnalysisException.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/AnalysisException.scala
@@ -30,6 +30,12 @@ class AnalysisException protected[sql] (
val startPosition: Option[Int] = None)
extends Exception with Serializable {
+ def withPosition(line: Option[Int], startPosition: Option[Int]) = {
+ val newException = new AnalysisException(message, line, startPosition)
+ newException.setStackTrace(getStackTrace)
+ newException
+ }
+
override def getMessage: String = {
val lineAnnotation = line.map(l => s" line $l").getOrElse("")
val positionAnnotation = startPosition.map(p => s" pos $p").getOrElse("")
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
index 92d3db077c..c93af79795 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala
@@ -175,7 +175,7 @@ class Analyzer(catalog: Catalog,
catalog.lookupRelation(u.tableIdentifier, u.alias)
} catch {
case _: NoSuchTableException =>
- u.failAnalysis(s"no such table ${u.tableIdentifier}")
+ u.failAnalysis(s"no such table ${u.tableName}")
}
}
@@ -275,7 +275,8 @@ class Analyzer(catalog: Catalog,
q.asInstanceOf[GroupingAnalytics].gid
case u @ UnresolvedAttribute(name) =>
// Leave unchanged if resolution fails. Hopefully will be resolved next round.
- val result = q.resolveChildren(name, resolver).getOrElse(u)
+ val result =
+ withPosition(u) { q.resolveChildren(name, resolver).getOrElse(u) }
logDebug(s"Resolving $u to $result")
result
case UnresolvedGetField(child, fieldName) if child.resolved =>
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/package.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/package.scala
index e95f19e69e..a7d3a8ee7d 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/package.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/package.scala
@@ -42,4 +42,12 @@ package object analysis {
throw new AnalysisException(msg, t.origin.line, t.origin.startPosition)
}
}
+
+ /** Catches any AnalysisExceptions thrown by `f` and attaches `t`'s position if any. */
+ def withPosition[A](t: TreeNode[_])(f: => A) = {
+ try f catch {
+ case a: AnalysisException =>
+ throw a.withPosition(t.origin.line, t.origin.startPosition)
+ }
+ }
}
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/unresolved.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/unresolved.scala
index a7cd4124e5..ad5172c034 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/unresolved.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/unresolved.scala
@@ -36,6 +36,10 @@ class UnresolvedException[TreeType <: TreeNode[_]](tree: TreeType, function: Str
case class UnresolvedRelation(
tableIdentifier: Seq[String],
alias: Option[String] = None) extends LeafNode {
+
+ /** Returns a `.` separated name for this relation. */
+ def tableName = tableIdentifier.mkString(".")
+
override def output = Nil
override lazy val resolved = false
}
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/namedExpressions.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/namedExpressions.scala
index 3dd7d38847..08361d043b 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/namedExpressions.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/namedExpressions.scala
@@ -42,6 +42,13 @@ abstract class NamedExpression extends Expression {
def exprId: ExprId
/**
+ * Returns a dot separated fully qualified name for this attribute. Given that there can be
+ * multiple qualifiers, it is possible that there are other possible way to refer to this
+ * attribute.
+ */
+ def qualifiedName: String = (qualifiers.headOption.toSeq :+ name).mkString(".")
+
+ /**
* All possible qualifiers for the expression.
*
* For now, since we do not allow using original table name to qualify a column name once the
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/LogicalPlan.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/LogicalPlan.scala
index 8c4f09b58a..0f8b144ccc 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/LogicalPlan.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/LogicalPlan.scala
@@ -208,8 +208,9 @@ abstract class LogicalPlan extends QueryPlan[LogicalPlan] with Logging {
// More than one match.
case ambiguousReferences =>
+ val referenceNames = ambiguousReferences.map(_._1.qualifiedName).mkString(", ")
throw new AnalysisException(
- s"Ambiguous references to $name: ${ambiguousReferences.mkString(",")}")
+ s"Reference '$name' is ambiguous, could be: $referenceNames.")
}
}
}
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/ErrorPositionSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/ErrorPositionSuite.scala
index f04437c595..968557c9c4 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/ErrorPositionSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/ErrorPositionSuite.scala
@@ -19,12 +19,29 @@ package org.apache.spark.sql.hive
import java.io.{OutputStream, PrintStream}
+import scala.util.Try
+
+import org.scalatest.BeforeAndAfter
+
import org.apache.spark.sql.hive.test.TestHive._
+import org.apache.spark.sql.hive.test.TestHive.implicits._
import org.apache.spark.sql.{AnalysisException, QueryTest}
-import scala.util.Try
-class ErrorPositionSuite extends QueryTest {
+class ErrorPositionSuite extends QueryTest with BeforeAndAfter {
+
+ before {
+ Seq((1, 1, 1)).toDF("a", "a", "b").registerTempTable("dupAttributes")
+ }
+
+ positionTest("ambiguous attribute reference 1",
+ "SELECT a from dupAttributes", "a")
+
+ positionTest("ambiguous attribute reference 2",
+ "SELECT a, b from dupAttributes", "a")
+
+ positionTest("ambiguous attribute reference 3",
+ "SELECT b, a from dupAttributes", "a")
positionTest("unresolved attribute 1",
"SELECT x FROM src", "x")
@@ -127,6 +144,10 @@ class ErrorPositionSuite extends QueryTest {
val error = intercept[AnalysisException] {
quietly(sql(query))
}
+
+ assert(!error.getMessage.contains("Seq("))
+ assert(!error.getMessage.contains("List("))
+
val (line, expectedLineNum) = query.split("\n").zipWithIndex.collect {
case (l, i) if l.contains(token) => (l, i + 1)
}.headOption.getOrElse(sys.error(s"Invalid test. Token $token not in $query"))