aboutsummaryrefslogtreecommitdiff
path: root/sql/core
diff options
context:
space:
mode:
authorWenchen Fan <wenchen@databricks.com>2017-01-17 19:42:02 +0800
committerWenchen Fan <wenchen@databricks.com>2017-01-17 19:42:02 +0800
commita774bca05ec6dd0deec638048dc8672a84427f49 (patch)
treed7c2bddc080b9b8adc1dd693b6f379263e079133 /sql/core
parent84f0b645b424eabb429c9eb38092841f44be1310 (diff)
downloadspark-a774bca05ec6dd0deec638048dc8672a84427f49.tar.gz
spark-a774bca05ec6dd0deec638048dc8672a84427f49.tar.bz2
spark-a774bca05ec6dd0deec638048dc8672a84427f49.zip
[SPARK-19240][SQL][TEST] add test for setting location for managed table
## What changes were proposed in this pull request? SET LOCATION can also work on managed table(or table created without custom path), the behavior is a little weird, but as we have already supported it, we should add a test to explicitly show the behavior. ## How was this patch tested? N/A Author: Wenchen Fan <wenchen@databricks.com> Closes #16597 from cloud-fan/set-location.
Diffstat (limited to 'sql/core')
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala28
1 files changed, 28 insertions, 0 deletions
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
index ac3878e849..97990a6d9b 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
@@ -18,6 +18,7 @@
package org.apache.spark.sql.execution.command
import java.io.File
+import java.net.URI
import org.apache.hadoop.fs.Path
import org.scalatest.BeforeAndAfterEach
@@ -1787,4 +1788,31 @@ class DDLSuite extends QueryTest with SharedSQLContext with BeforeAndAfterEach {
val rows: Seq[Row] = df.toLocalIterator().asScala.toSeq
assert(rows.length > 0)
}
+
+ test("SET LOCATION for managed table") {
+ withTable("src") {
+ withTempDir { dir =>
+ sql("CREATE TABLE tbl(i INT) USING parquet")
+ sql("INSERT INTO tbl SELECT 1")
+ checkAnswer(spark.table("tbl"), Row(1))
+ val defaultTablePath = spark.sessionState.catalog
+ .getTableMetadata(TableIdentifier("tbl")).storage.locationUri.get
+
+ sql(s"ALTER TABLE tbl SET LOCATION '${dir.getCanonicalPath}'")
+ // SET LOCATION won't move data from previous table path to new table path.
+ assert(spark.table("tbl").count() == 0)
+ // the previous table path should be still there.
+ assert(new File(new URI(defaultTablePath)).exists())
+
+ sql("INSERT INTO tbl SELECT 2")
+ checkAnswer(spark.table("tbl"), Row(2))
+ // newly inserted data will go to the new table path.
+ assert(dir.listFiles().nonEmpty)
+
+ sql("DROP TABLE tbl")
+ // the new table path will be removed after DROP TABLE.
+ assert(!dir.exists())
+ }
+ }
+ }
}