aboutsummaryrefslogtreecommitdiff
path: root/sql
diff options
context:
space:
mode:
Diffstat (limited to 'sql')
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/NoSuchItemException.scala31
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala1
-rw-r--r--sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveExternalCatalog.scala3
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQuerySuite.scala1
4 files changed, 9 insertions, 27 deletions
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/NoSuchItemException.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/NoSuchItemException.scala
index 96fd1a027e..5e18316c94 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/NoSuchItemException.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/NoSuchItemException.scala
@@ -17,6 +17,7 @@
package org.apache.spark.sql.catalyst.analysis
+import org.apache.spark.sql.AnalysisException
import org.apache.spark.sql.catalyst.catalog.ExternalCatalog.TablePartitionSpec
@@ -24,29 +25,13 @@ import org.apache.spark.sql.catalyst.catalog.ExternalCatalog.TablePartitionSpec
* Thrown by a catalog when an item cannot be found. The analyzer will rethrow the exception
* as an [[org.apache.spark.sql.AnalysisException]] with the correct position information.
*/
-abstract class NoSuchItemException extends Exception {
- override def getMessage: String
-}
+class NoSuchDatabaseException(db: String) extends AnalysisException(s"Database $db not found")
-class NoSuchDatabaseException(db: String) extends NoSuchItemException {
- override def getMessage: String = s"Database $db not found"
-}
+class NoSuchTableException(db: String, table: String)
+ extends AnalysisException(s"Table or View $table not found in database $db")
-class NoSuchTableException(db: String, table: String) extends NoSuchItemException {
- override def getMessage: String = s"Table or View $table not found in database $db"
-}
+class NoSuchPartitionException(db: String, table: String, spec: TablePartitionSpec) extends
+ AnalysisException(s"Partition not found in table $table database $db:\n" + spec.mkString("\n"))
-class NoSuchPartitionException(
- db: String,
- table: String,
- spec: TablePartitionSpec)
- extends NoSuchItemException {
-
- override def getMessage: String = {
- s"Partition not found in table $table database $db:\n" + spec.mkString("\n")
- }
-}
-
-class NoSuchFunctionException(db: String, func: String) extends NoSuchItemException {
- override def getMessage: String = s"Function $func not found in database $db"
-}
+class NoSuchFunctionException(db: String, func: String)
+ extends AnalysisException(s"Function $func not found in database $db")
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala
index 758a7e45d2..5137bd11d8 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala
@@ -29,6 +29,7 @@ import org.apache.spark.sql.catalyst.expressions.{Attribute, AttributeReference}
import org.apache.spark.sql.types._
+
// Note: The definition of these commands are based on the ones described in
// https://cwiki.apache.org/confluence/display/Hive/LanguageManual+DDL
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveExternalCatalog.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveExternalCatalog.scala
index 482f47428d..f627384253 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveExternalCatalog.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveExternalCatalog.scala
@@ -25,7 +25,6 @@ import org.apache.thrift.TException
import org.apache.spark.internal.Logging
import org.apache.spark.sql.AnalysisException
import org.apache.spark.sql.catalyst.TableIdentifier
-import org.apache.spark.sql.catalyst.analysis.NoSuchItemException
import org.apache.spark.sql.catalyst.catalog._
import org.apache.spark.sql.hive.client.HiveClient
@@ -66,8 +65,6 @@ private[spark] class HiveExternalCatalog(client: HiveClient) extends ExternalCat
try {
body
} catch {
- case e: NoSuchItemException =>
- throw new AnalysisException(e.getMessage)
case NonFatal(e) if isClientException(e) =>
throw new AnalysisException(e.getClass.getCanonicalName + ": " + e.getMessage)
}
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQuerySuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQuerySuite.scala
index 0c57ede9ed..af73baa1f3 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQuerySuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQuerySuite.scala
@@ -28,7 +28,6 @@ import org.scalatest.BeforeAndAfter
import org.apache.spark.{SparkException, SparkFiles}
import org.apache.spark.sql.{AnalysisException, DataFrame, Row}
-import org.apache.spark.sql.catalyst.analysis.NoSuchDatabaseException
import org.apache.spark.sql.catalyst.expressions.Cast
import org.apache.spark.sql.catalyst.plans.logical.Project
import org.apache.spark.sql.execution.joins.BroadcastNestedLoopJoin