aboutsummaryrefslogtreecommitdiff
path: root/sql
diff options
context:
space:
mode:
Diffstat (limited to 'sql')
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala2
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSparkSubmitSuite.scala6
2 files changed, 7 insertions, 1 deletions
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
index 05f826a11b..95672e01f5 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
@@ -43,6 +43,8 @@ class DDLSuite extends QueryTest with SharedSQLContext with BeforeAndAfterEach {
// drop all databases, tables and functions after each test
spark.sessionState.catalog.reset()
} finally {
+ val path = System.getProperty("user.dir") + "/spark-warehouse"
+ Utils.deleteRecursively(new File(path))
super.afterEach()
}
}
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSparkSubmitSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSparkSubmitSuite.scala
index af28286666..29317e2887 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSparkSubmitSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveSparkSubmitSuite.scala
@@ -590,7 +590,9 @@ object SparkSubmitClassLoaderTest extends Logging {
def main(args: Array[String]) {
Utils.configTestLog4j("INFO")
val conf = new SparkConf()
+ val hiveWarehouseLocation = Utils.createTempDir()
conf.set("spark.ui.enabled", "false")
+ conf.set("spark.sql.warehouse.dir", hiveWarehouseLocation.toString)
val sc = new SparkContext(conf)
val hiveContext = new TestHiveContext(sc)
val df = hiveContext.createDataFrame((1 to 100).map(i => (i, i))).toDF("i", "j")
@@ -699,11 +701,13 @@ object SPARK_9757 extends QueryTest {
def main(args: Array[String]): Unit = {
Utils.configTestLog4j("INFO")
+ val hiveWarehouseLocation = Utils.createTempDir()
val sparkContext = new SparkContext(
new SparkConf()
.set("spark.sql.hive.metastore.version", "0.13.1")
.set("spark.sql.hive.metastore.jars", "maven")
- .set("spark.ui.enabled", "false"))
+ .set("spark.ui.enabled", "false")
+ .set("spark.sql.warehouse.dir", hiveWarehouseLocation.toString))
val hiveContext = new TestHiveContext(sparkContext)
spark = hiveContext.sparkSession