aboutsummaryrefslogtreecommitdiff
path: root/sql/core
diff options
context:
space:
mode:
authorSean Owen <sowen@cloudera.com>2015-03-20 14:16:21 +0000
committerSean Owen <sowen@cloudera.com>2015-03-20 14:16:21 +0000
commit6f80c3e8880340597f161f87e64697bec86cc586 (patch)
treec019ca07ed3b4dd178c102aac00418485da5e679 /sql/core
parentd08e3eb3dc455970b685a7b8b7e00c537c89a8e4 (diff)
downloadspark-6f80c3e8880340597f161f87e64697bec86cc586.tar.gz
spark-6f80c3e8880340597f161f87e64697bec86cc586.tar.bz2
spark-6f80c3e8880340597f161f87e64697bec86cc586.zip
SPARK-6338 [CORE] Use standard temp dir mechanisms in tests to avoid orphaned temp files
Use `Utils.createTempDir()` to replace other temp file mechanisms used in some tests, to further ensure they are cleaned up, and simplify Author: Sean Owen <sowen@cloudera.com> Closes #5029 from srowen/SPARK-6338 and squashes the following commits: 27b740a [Sean Owen] Fix hive-thriftserver tests that don't expect an existing dir 4a212fa [Sean Owen] Standardize a bit more temp dir management 9004081 [Sean Owen] Revert some added recursive-delete calls 57609e4 [Sean Owen] Use Utils.createTempDir() to replace other temp file mechanisms used in some tests, to further ensure they are cleaned up, and simplify
Diffstat (limited to 'sql/core')
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/parquet/ParquetTest.scala6
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/UserDefinedTypeSuite.scala6
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/json/JsonSuite.scala22
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/sources/CreateTableAsSelectSuite.scala5
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/sources/InsertSuite.scala5
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/sources/SaveLoadSuite.scala6
6 files changed, 27 insertions, 23 deletions
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/parquet/ParquetTest.scala b/sql/core/src/main/scala/org/apache/spark/sql/parquet/ParquetTest.scala
index d6ea6679c5..9d17516e0e 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/parquet/ParquetTest.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/parquet/ParquetTest.scala
@@ -23,7 +23,6 @@ import scala.reflect.ClassTag
import scala.reflect.runtime.universe.TypeTag
import scala.util.Try
-import org.apache.spark.sql.catalyst.util
import org.apache.spark.sql.{DataFrame, SQLContext, SaveMode}
import org.apache.spark.util.Utils
@@ -67,8 +66,9 @@ private[sql] trait ParquetTest {
* @todo Probably this method should be moved to a more general place
*/
protected def withTempPath(f: File => Unit): Unit = {
- val file = util.getTempFilePath("parquetTest").getCanonicalFile
- try f(file) finally if (file.exists()) Utils.deleteRecursively(file)
+ val path = Utils.createTempDir()
+ path.delete()
+ try f(path) finally Utils.deleteRecursively(path)
}
/**
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/UserDefinedTypeSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/UserDefinedTypeSuite.scala
index 23f424c0bf..fe618e0e8e 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/UserDefinedTypeSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/UserDefinedTypeSuite.scala
@@ -19,6 +19,8 @@ package org.apache.spark.sql
import java.io.File
+import org.apache.spark.util.Utils
+
import scala.beans.{BeanInfo, BeanProperty}
import org.apache.spark.rdd.RDD
@@ -98,13 +100,13 @@ class UserDefinedTypeSuite extends QueryTest {
test("UDTs with Parquet") {
- val tempDir = File.createTempFile("parquet", "test")
+ val tempDir = Utils.createTempDir()
tempDir.delete()
pointsRDD.saveAsParquetFile(tempDir.getCanonicalPath)
}
test("Repartition UDTs with Parquet") {
- val tempDir = File.createTempFile("parquet", "test")
+ val tempDir = Utils.createTempDir()
tempDir.delete()
pointsRDD.repartition(1).saveAsParquetFile(tempDir.getCanonicalPath)
}
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/json/JsonSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/json/JsonSuite.scala
index 320b80d80e..706c966ee0 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/json/JsonSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/json/JsonSuite.scala
@@ -22,7 +22,6 @@ import java.sql.{Date, Timestamp}
import org.scalactic.Tolerance._
import org.apache.spark.sql.TestData._
-import org.apache.spark.sql.catalyst.util._
import org.apache.spark.sql.functions._
import org.apache.spark.sql.json.JsonRDD.{compatibleType, enforceCorrectType}
import org.apache.spark.sql.sources.LogicalRelation
@@ -31,6 +30,7 @@ import org.apache.spark.sql.test.TestSQLContext._
import org.apache.spark.sql.test.TestSQLContext.implicits._
import org.apache.spark.sql.types._
import org.apache.spark.sql.{QueryTest, Row, SQLConf}
+import org.apache.spark.util.Utils
class JsonSuite extends QueryTest {
import org.apache.spark.sql.json.TestJsonData._
@@ -554,8 +554,9 @@ class JsonSuite extends QueryTest {
}
test("jsonFile should be based on JSONRelation") {
- val file = getTempFilePath("json")
- val path = file.toString
+ val dir = Utils.createTempDir()
+ dir.delete()
+ val path = dir.getCanonicalPath
sparkContext.parallelize(1 to 100).map(i => s"""{"a": 1, "b": "str$i"}""").saveAsTextFile(path)
val jsonDF = jsonFile(path, 0.49)
@@ -580,8 +581,9 @@ class JsonSuite extends QueryTest {
}
test("Loading a JSON dataset from a text file") {
- val file = getTempFilePath("json")
- val path = file.toString
+ val dir = Utils.createTempDir()
+ dir.delete()
+ val path = dir.getCanonicalPath
primitiveFieldAndType.map(record => record.replaceAll("\n", " ")).saveAsTextFile(path)
val jsonDF = jsonFile(path)
@@ -611,8 +613,9 @@ class JsonSuite extends QueryTest {
}
test("Loading a JSON dataset from a text file with SQL") {
- val file = getTempFilePath("json")
- val path = file.toString
+ val dir = Utils.createTempDir()
+ dir.delete()
+ val path = dir.getCanonicalPath
primitiveFieldAndType.map(record => record.replaceAll("\n", " ")).saveAsTextFile(path)
sql(
@@ -637,8 +640,9 @@ class JsonSuite extends QueryTest {
}
test("Applying schemas") {
- val file = getTempFilePath("json")
- val path = file.toString
+ val dir = Utils.createTempDir()
+ dir.delete()
+ val path = dir.getCanonicalPath
primitiveFieldAndType.map(record => record.replaceAll("\n", " ")).saveAsTextFile(path)
val schema = StructType(
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/sources/CreateTableAsSelectSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/sources/CreateTableAsSelectSuite.scala
index 60355414a4..2975a7fee4 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/sources/CreateTableAsSelectSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/sources/CreateTableAsSelectSuite.scala
@@ -22,7 +22,6 @@ import java.io.File
import org.apache.spark.sql.AnalysisException
import org.scalatest.BeforeAndAfterAll
-import org.apache.spark.sql.catalyst.util
import org.apache.spark.util.Utils
class CreateTableAsSelectSuite extends DataSourceTest with BeforeAndAfterAll {
@@ -32,7 +31,7 @@ class CreateTableAsSelectSuite extends DataSourceTest with BeforeAndAfterAll {
var path: File = null
override def beforeAll(): Unit = {
- path = util.getTempFilePath("jsonCTAS").getCanonicalFile
+ path = Utils.createTempDir()
val rdd = sparkContext.parallelize((1 to 10).map(i => s"""{"a":$i, "b":"str${i}"}"""))
jsonRDD(rdd).registerTempTable("jt")
}
@@ -42,7 +41,7 @@ class CreateTableAsSelectSuite extends DataSourceTest with BeforeAndAfterAll {
}
after {
- if (path.exists()) Utils.deleteRecursively(path)
+ Utils.deleteRecursively(path)
}
test("CREATE TEMPORARY TABLE AS SELECT") {
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/sources/InsertSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/sources/InsertSuite.scala
index b5b16f9546..80efe9728f 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/sources/InsertSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/sources/InsertSuite.scala
@@ -22,7 +22,6 @@ import java.io.File
import org.scalatest.BeforeAndAfterAll
import org.apache.spark.sql.{AnalysisException, Row}
-import org.apache.spark.sql.catalyst.util
import org.apache.spark.util.Utils
class InsertSuite extends DataSourceTest with BeforeAndAfterAll {
@@ -32,7 +31,7 @@ class InsertSuite extends DataSourceTest with BeforeAndAfterAll {
var path: File = null
override def beforeAll: Unit = {
- path = util.getTempFilePath("jsonCTAS").getCanonicalFile
+ path = Utils.createTempDir()
val rdd = sparkContext.parallelize((1 to 10).map(i => s"""{"a":$i, "b":"str${i}"}"""))
jsonRDD(rdd).registerTempTable("jt")
sql(
@@ -48,7 +47,7 @@ class InsertSuite extends DataSourceTest with BeforeAndAfterAll {
override def afterAll: Unit = {
dropTempTable("jsonTable")
dropTempTable("jt")
- if (path.exists()) Utils.deleteRecursively(path)
+ Utils.deleteRecursively(path)
}
test("Simple INSERT OVERWRITE a JSONRelation") {
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/sources/SaveLoadSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/sources/SaveLoadSuite.scala
index 607488ccfd..43bc8eb2d1 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/sources/SaveLoadSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/sources/SaveLoadSuite.scala
@@ -21,7 +21,6 @@ import java.io.File
import org.scalatest.BeforeAndAfterAll
-import org.apache.spark.sql.catalyst.util
import org.apache.spark.sql.{SaveMode, SQLConf, DataFrame}
import org.apache.spark.sql.types._
import org.apache.spark.util.Utils
@@ -39,7 +38,8 @@ class SaveLoadSuite extends DataSourceTest with BeforeAndAfterAll {
override def beforeAll(): Unit = {
originalDefaultSource = conf.defaultDataSourceName
- path = util.getTempFilePath("datasource").getCanonicalFile
+ path = Utils.createTempDir()
+ path.delete()
val rdd = sparkContext.parallelize((1 to 10).map(i => s"""{"a":$i, "b":"str${i}"}"""))
df = jsonRDD(rdd)
@@ -52,7 +52,7 @@ class SaveLoadSuite extends DataSourceTest with BeforeAndAfterAll {
after {
conf.setConf(SQLConf.DEFAULT_DATA_SOURCE_NAME, originalDefaultSource)
- if (path.exists()) Utils.deleteRecursively(path)
+ Utils.deleteRecursively(path)
}
def checkLoad(): Unit = {