diff options
-rw-r--r-- | sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala | 6 | ||||
-rw-r--r-- | sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala | 20 |
2 files changed, 21 insertions, 5 deletions
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala index 0d38c41a3f..6d56a6fec8 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala @@ -383,8 +383,9 @@ case class AlterTableSetLocation( val part = catalog.getPartition(tableName, spec) val newPart = if (DDLUtils.isDatasourceTable(table)) { - part.copy(storage = part.storage.copy( - serdeProperties = part.storage.serdeProperties ++ Map("path" -> location))) + throw new AnalysisException( + "alter table set location for partition is not allowed for tables defined " + + "using the datasource API") } else { part.copy(storage = part.storage.copy(locationUri = Some(location))) } @@ -394,6 +395,7 @@ case class AlterTableSetLocation( val newTable = if (DDLUtils.isDatasourceTable(table)) { table.withNewStorage( + locationUri = Some(location), serdeProperties = table.storage.serdeProperties ++ Map("path" -> location)) } else { table.withNewStorage(locationUri = Some(location)) diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala index d8e2c94a8a..a8db4e9923 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala @@ -417,23 +417,37 @@ class DDLSuite extends QueryTest with SharedSQLContext with BeforeAndAfterEach { .map { s => catalog.getPartition(tableIdent, s).storage } .getOrElse { catalog.getTable(tableIdent).storage } if (isDatasourceTable) { - assert(storageFormat.serdeProperties.get("path") === Some(expected)) + if (spec.isDefined) { + assert(storageFormat.serdeProperties.isEmpty) + assert(storageFormat.locationUri.isEmpty) + } else { + assert(storageFormat.serdeProperties.get("path") === Some(expected)) + assert(storageFormat.locationUri === Some(expected)) + } } else { assert(storageFormat.locationUri === Some(expected)) } } + // Optionally expect AnalysisException + def maybeWrapException[T](expectException: Boolean)(body: => T): Unit = { + if (expectException) intercept[AnalysisException] { body } else body + } // set table location sql("ALTER TABLE dbx.tab1 SET LOCATION '/path/to/your/lovely/heart'") verifyLocation("/path/to/your/lovely/heart") // set table partition location - sql("ALTER TABLE dbx.tab1 PARTITION (a='1') SET LOCATION '/path/to/part/ways'") + maybeWrapException(isDatasourceTable) { + sql("ALTER TABLE dbx.tab1 PARTITION (a='1') SET LOCATION '/path/to/part/ways'") + } verifyLocation("/path/to/part/ways", Some(partSpec)) // set table location without explicitly specifying database catalog.setCurrentDatabase("dbx") sql("ALTER TABLE tab1 SET LOCATION '/swanky/steak/place'") verifyLocation("/swanky/steak/place") // set table partition location without explicitly specifying database - sql("ALTER TABLE tab1 PARTITION (a='1') SET LOCATION 'vienna'") + maybeWrapException(isDatasourceTable) { + sql("ALTER TABLE tab1 PARTITION (a='1') SET LOCATION 'vienna'") + } verifyLocation("vienna", Some(partSpec)) // table to alter does not exist intercept[AnalysisException] { |