aboutsummaryrefslogtreecommitdiff
path: root/sql
diff options
context:
space:
mode:
authorAndrew Or <andrew@databricks.com>2016-04-05 21:23:20 -0700
committerYin Huai <yhuai@databricks.com>2016-04-05 21:23:20 -0700
commitadbfdb878dd1029738db3d1955d08b33de1aa8a9 (patch)
tree327fbd5dd4a425bad96447367429095b8ec713b2 /sql
parentf6456fa80ba442bfd7ce069fc23b7dbd993e6cb9 (diff)
downloadspark-adbfdb878dd1029738db3d1955d08b33de1aa8a9.tar.gz
spark-adbfdb878dd1029738db3d1955d08b33de1aa8a9.tar.bz2
spark-adbfdb878dd1029738db3d1955d08b33de1aa8a9.zip
[SPARK-14128][SQL] Alter table DDL followup
## What changes were proposed in this pull request? This is just a followup to #12121, which implemented the alter table DDLs using the `SessionCatalog`. Specially, this corrects the behavior of setting the location of a datasource table. For datasource tables, we need to set the `locationUri` in addition to the `path` entry in the serde properties. Additionally, changing the location of a datasource table partition is not allowed. ## How was this patch tested? `DDLSuite` Author: Andrew Or <andrew@databricks.com> Closes #12186 from andrewor14/alter-table-ddl-followup.
Diffstat (limited to 'sql')
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala6
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala20
2 files changed, 21 insertions, 5 deletions
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala
index 0d38c41a3f..6d56a6fec8 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala
@@ -383,8 +383,9 @@ case class AlterTableSetLocation(
val part = catalog.getPartition(tableName, spec)
val newPart =
if (DDLUtils.isDatasourceTable(table)) {
- part.copy(storage = part.storage.copy(
- serdeProperties = part.storage.serdeProperties ++ Map("path" -> location)))
+ throw new AnalysisException(
+ "alter table set location for partition is not allowed for tables defined " +
+ "using the datasource API")
} else {
part.copy(storage = part.storage.copy(locationUri = Some(location)))
}
@@ -394,6 +395,7 @@ case class AlterTableSetLocation(
val newTable =
if (DDLUtils.isDatasourceTable(table)) {
table.withNewStorage(
+ locationUri = Some(location),
serdeProperties = table.storage.serdeProperties ++ Map("path" -> location))
} else {
table.withNewStorage(locationUri = Some(location))
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
index d8e2c94a8a..a8db4e9923 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
@@ -417,23 +417,37 @@ class DDLSuite extends QueryTest with SharedSQLContext with BeforeAndAfterEach {
.map { s => catalog.getPartition(tableIdent, s).storage }
.getOrElse { catalog.getTable(tableIdent).storage }
if (isDatasourceTable) {
- assert(storageFormat.serdeProperties.get("path") === Some(expected))
+ if (spec.isDefined) {
+ assert(storageFormat.serdeProperties.isEmpty)
+ assert(storageFormat.locationUri.isEmpty)
+ } else {
+ assert(storageFormat.serdeProperties.get("path") === Some(expected))
+ assert(storageFormat.locationUri === Some(expected))
+ }
} else {
assert(storageFormat.locationUri === Some(expected))
}
}
+ // Optionally expect AnalysisException
+ def maybeWrapException[T](expectException: Boolean)(body: => T): Unit = {
+ if (expectException) intercept[AnalysisException] { body } else body
+ }
// set table location
sql("ALTER TABLE dbx.tab1 SET LOCATION '/path/to/your/lovely/heart'")
verifyLocation("/path/to/your/lovely/heart")
// set table partition location
- sql("ALTER TABLE dbx.tab1 PARTITION (a='1') SET LOCATION '/path/to/part/ways'")
+ maybeWrapException(isDatasourceTable) {
+ sql("ALTER TABLE dbx.tab1 PARTITION (a='1') SET LOCATION '/path/to/part/ways'")
+ }
verifyLocation("/path/to/part/ways", Some(partSpec))
// set table location without explicitly specifying database
catalog.setCurrentDatabase("dbx")
sql("ALTER TABLE tab1 SET LOCATION '/swanky/steak/place'")
verifyLocation("/swanky/steak/place")
// set table partition location without explicitly specifying database
- sql("ALTER TABLE tab1 PARTITION (a='1') SET LOCATION 'vienna'")
+ maybeWrapException(isDatasourceTable) {
+ sql("ALTER TABLE tab1 PARTITION (a='1') SET LOCATION 'vienna'")
+ }
verifyLocation("vienna", Some(partSpec))
// table to alter does not exist
intercept[AnalysisException] {