diff options
author | bomeng <bmeng@us.ibm.com> | 2016-04-12 13:43:39 -0700 |
---|---|---|
committer | Reynold Xin <rxin@databricks.com> | 2016-04-12 13:43:39 -0700 |
commit | bcd2076274b1a95f74616d0ceacb0696e38b5f4c (patch) | |
tree | caefc19411872ab1a6106cd7c75994a26f4259e4 /sql/catalyst | |
parent | 85e68b4bea3e4ad2e4063334dbf5b11af197d2ce (diff) | |
download | spark-bcd2076274b1a95f74616d0ceacb0696e38b5f4c.tar.gz spark-bcd2076274b1a95f74616d0ceacb0696e38b5f4c.tar.bz2 spark-bcd2076274b1a95f74616d0ceacb0696e38b5f4c.zip |
[SPARK-14414][SQL] improve the error message class hierarchy
## What changes were proposed in this pull request?
Before we are using `AnalysisException`, `ParseException`, `NoSuchFunctionException` etc when a parsing error encounters. I am trying to make it consistent and also **minimum** code impact to the current implementation by changing the class hierarchy.
1. `NoSuchItemException` is removed, since it is an abstract class and it just simply takes a message string.
2. `NoSuchDatabaseException`, `NoSuchTableException`, `NoSuchPartitionException` and `NoSuchFunctionException` now extends `AnalysisException`, as well as `ParseException`, they are all under `AnalysisException` umbrella, but you can also determine how to use them in a granular way.
## How was this patch tested?
The existing test cases should cover this patch.
Author: bomeng <bmeng@us.ibm.com>
Closes #12314 from bomeng/SPARK-14414.
Diffstat (limited to 'sql/catalyst')
-rw-r--r-- | sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/NoSuchItemException.scala | 31 |
1 files changed, 8 insertions, 23 deletions
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/NoSuchItemException.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/NoSuchItemException.scala index 96fd1a027e..5e18316c94 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/NoSuchItemException.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/NoSuchItemException.scala @@ -17,6 +17,7 @@ package org.apache.spark.sql.catalyst.analysis +import org.apache.spark.sql.AnalysisException import org.apache.spark.sql.catalyst.catalog.ExternalCatalog.TablePartitionSpec @@ -24,29 +25,13 @@ import org.apache.spark.sql.catalyst.catalog.ExternalCatalog.TablePartitionSpec * Thrown by a catalog when an item cannot be found. The analyzer will rethrow the exception * as an [[org.apache.spark.sql.AnalysisException]] with the correct position information. */ -abstract class NoSuchItemException extends Exception { - override def getMessage: String -} +class NoSuchDatabaseException(db: String) extends AnalysisException(s"Database $db not found") -class NoSuchDatabaseException(db: String) extends NoSuchItemException { - override def getMessage: String = s"Database $db not found" -} +class NoSuchTableException(db: String, table: String) + extends AnalysisException(s"Table or View $table not found in database $db") -class NoSuchTableException(db: String, table: String) extends NoSuchItemException { - override def getMessage: String = s"Table or View $table not found in database $db" -} +class NoSuchPartitionException(db: String, table: String, spec: TablePartitionSpec) extends + AnalysisException(s"Partition not found in table $table database $db:\n" + spec.mkString("\n")) -class NoSuchPartitionException( - db: String, - table: String, - spec: TablePartitionSpec) - extends NoSuchItemException { - - override def getMessage: String = { - s"Partition not found in table $table database $db:\n" + spec.mkString("\n") - } -} - -class NoSuchFunctionException(db: String, func: String) extends NoSuchItemException { - override def getMessage: String = s"Function $func not found in database $db" -} +class NoSuchFunctionException(db: String, func: String) + extends AnalysisException(s"Function $func not found in database $db") |