aboutsummaryrefslogtreecommitdiff
path: root/sql
diff options
context:
space:
mode:
authorthomastechs <thomas.sebastian@tcs.com>2016-01-03 11:09:30 -0800
committerReynold Xin <rxin@databricks.com>2016-01-03 11:09:30 -0800
commitc82924d564c07e6e6f635b9e263994dedf06268a (patch)
treec474195a61b24dc50dfcda7a9305504127939cad /sql
parentc3d505602de2fd2361633f90e4fff7e041849e28 (diff)
downloadspark-c82924d564c07e6e6f635b9e263994dedf06268a.tar.gz
spark-c82924d564c07e6e6f635b9e263994dedf06268a.tar.bz2
spark-c82924d564c07e6e6f635b9e263994dedf06268a.zip
[SPARK-12533][SQL] hiveContext.table() throws the wrong exception
Avoiding the the No such table exception and throwing analysis exception as per the bug: SPARK-12533 Author: thomastechs <thomas.sebastian@tcs.com> Closes #10529 from thomastechs/topic-branch.
Diffstat (limited to 'sql')
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Catalog.scala2
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/CachedTableSuite.scala6
2 files changed, 4 insertions, 4 deletions
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Catalog.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Catalog.scala
index 8f4ce74a2e..3b775c3ca8 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Catalog.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Catalog.scala
@@ -104,7 +104,7 @@ class SimpleCatalog(val conf: CatalystConf) extends Catalog {
val tableName = getTableName(tableIdent)
val table = tables.get(tableName)
if (table == null) {
- throw new NoSuchTableException
+ throw new AnalysisException("Table not found: " + tableName)
}
val tableWithQualifiers = Subquery(tableName, table)
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/CachedTableSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/CachedTableSuite.scala
index d86df4cfb9..6b735bcf16 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/CachedTableSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/CachedTableSuite.scala
@@ -17,7 +17,7 @@
package org.apache.spark.sql
-import org.apache.spark.sql.catalyst.analysis.NoSuchTableException
+
import org.apache.spark.sql.execution.Exchange
import org.apache.spark.sql.execution.PhysicalRDD
@@ -289,7 +289,7 @@ class CachedTableSuite extends QueryTest with SQLTestUtils with SharedSQLContext
testData.select('key).registerTempTable("t1")
sqlContext.table("t1")
sqlContext.dropTempTable("t1")
- intercept[NoSuchTableException](sqlContext.table("t1"))
+ intercept[AnalysisException](sqlContext.table("t1"))
}
test("Drops cached temporary table") {
@@ -301,7 +301,7 @@ class CachedTableSuite extends QueryTest with SQLTestUtils with SharedSQLContext
assert(sqlContext.isCached("t2"))
sqlContext.dropTempTable("t1")
- intercept[NoSuchTableException](sqlContext.table("t1"))
+ intercept[AnalysisException](sqlContext.table("t1"))
assert(!sqlContext.isCached("t2"))
}