aboutsummaryrefslogtreecommitdiff
path: root/sql
diff options
context:
space:
mode:
authorgatorsmile <gatorsmile@gmail.com>2016-05-27 09:54:31 -0700
committerYin Huai <yhuai@databricks.com>2016-05-27 09:54:31 -0700
commitc17272902c95290beca274ee6316a8a98fd7a725 (patch)
tree8db5590bb3c1bfb4e85894cf252ab12978a3fa39 /sql
parent6f95c6c030db0057de213733c2bd3453463bc6f2 (diff)
downloadspark-c17272902c95290beca274ee6316a8a98fd7a725.tar.gz
spark-c17272902c95290beca274ee6316a8a98fd7a725.tar.bz2
spark-c17272902c95290beca274ee6316a8a98fd7a725.zip
[SPARK-15565][SQL] Add the File Scheme to the Default Value of WAREHOUSE_PATH
#### What changes were proposed in this pull request? The default value of `spark.sql.warehouse.dir` is `System.getProperty("user.dir")/spark-warehouse`. Since `System.getProperty("user.dir")` is a local dir, we should explicitly set the scheme to local filesystem. cc yhuai #### How was this patch tested? Added two test cases Author: gatorsmile <gatorsmile@gmail.com> Closes #13348 from gatorsmile/addSchemeToDefaultWarehousePath.
Diffstat (limited to 'sql')
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala2
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala25
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfSuite.scala12
3 files changed, 38 insertions, 1 deletions
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala b/sql/core/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala
index 4efefdacab..d1db0dd800 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala
@@ -55,7 +55,7 @@ object SQLConf {
val WAREHOUSE_PATH = SQLConfigBuilder("spark.sql.warehouse.dir")
.doc("The default location for managed databases and tables.")
.stringConf
- .createWithDefault("${system:user.dir}/spark-warehouse")
+ .createWithDefault("file:${system:user.dir}/spark-warehouse")
val OPTIMIZER_MAX_ITERATIONS = SQLConfigBuilder("spark.sql.optimizer.maxIterations")
.internal()
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
index e32521aaaf..e975756685 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
@@ -171,6 +171,31 @@ class DDLSuite extends QueryTest with SharedSQLContext with BeforeAndAfterEach {
}
}
+ test("Create Database using Default Warehouse Path") {
+ withSQLConf(SQLConf.WAREHOUSE_PATH.key -> "") {
+ // Will use the default location if and only if we unset the conf
+ spark.conf.unset(SQLConf.WAREHOUSE_PATH.key)
+ val catalog = spark.sessionState.catalog
+ val dbName = "db1"
+ try {
+ sql(s"CREATE DATABASE $dbName")
+ val db1 = catalog.getDatabaseMetadata(dbName)
+ val expectedLocation =
+ "file:" + appendTrailingSlash(System.getProperty("user.dir")) +
+ s"spark-warehouse/$dbName.db"
+ assert(db1 == CatalogDatabase(
+ dbName,
+ "",
+ expectedLocation,
+ Map.empty))
+ sql(s"DROP DATABASE $dbName CASCADE")
+ assert(!catalog.databaseExists(dbName))
+ } finally {
+ catalog.reset()
+ }
+ }
+ }
+
test("Create/Drop Database - location") {
val catalog = spark.sessionState.catalog
val databaseNames = Seq("db1", "`database`")
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfSuite.scala
index ad5365a35e..3d4fc75e83 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/internal/SQLConfSuite.scala
@@ -207,4 +207,16 @@ class SQLConfSuite extends QueryTest with SharedSQLContext {
}
}
+ test("default value of WAREHOUSE_PATH") {
+ val original = spark.conf.get(SQLConf.WAREHOUSE_PATH)
+ try {
+ // to get the default value, always unset it
+ spark.conf.unset(SQLConf.WAREHOUSE_PATH.key)
+ assert(spark.sessionState.conf.warehousePath
+ === s"file:${System.getProperty("user.dir")}/spark-warehouse")
+ } finally {
+ sql(s"set ${SQLConf.WAREHOUSE_PATH}=$original")
+ }
+ }
+
}