aboutsummaryrefslogtreecommitdiff
path: root/core
diff options
context:
space:
mode:
authorAndrew Or <andrew@databricks.com>2015-06-05 10:53:32 -0700
committerAndrew Or <andrew@databricks.com>2015-06-05 10:53:32 -0700
commit4036d05ceeec77ebfa9c683cbc699250df3e3895 (patch)
tree6669a0371779bdcbf044b98c86207f7d93e81b33 /core
parent12f5eaeee1235850a076ce5716d069bd2f1205a5 (diff)
downloadspark-4036d05ceeec77ebfa9c683cbc699250df3e3895.tar.gz
spark-4036d05ceeec77ebfa9c683cbc699250df3e3895.tar.bz2
spark-4036d05ceeec77ebfa9c683cbc699250df3e3895.zip
Revert "[MINOR] [BUILD] Use custom temp directory during build."
This reverts commit b16b5434ff44c42e4b3a337f9af147669ba44896.
Diffstat (limited to 'core')
-rw-r--r--core/src/test/scala/org/apache/spark/deploy/SparkSubmitUtilsSuite.scala22
1 files changed, 10 insertions, 12 deletions
diff --git a/core/src/test/scala/org/apache/spark/deploy/SparkSubmitUtilsSuite.scala b/core/src/test/scala/org/apache/spark/deploy/SparkSubmitUtilsSuite.scala
index 07d261cc42..8fda5c8b47 100644
--- a/core/src/test/scala/org/apache/spark/deploy/SparkSubmitUtilsSuite.scala
+++ b/core/src/test/scala/org/apache/spark/deploy/SparkSubmitUtilsSuite.scala
@@ -28,12 +28,9 @@ import org.apache.ivy.plugins.resolver.IBiblioResolver
import org.apache.spark.SparkFunSuite
import org.apache.spark.deploy.SparkSubmitUtils.MavenCoordinate
-import org.apache.spark.util.Utils
class SparkSubmitUtilsSuite extends SparkFunSuite with BeforeAndAfterAll {
- private var tempIvyPath: String = _
-
private val noOpOutputStream = new OutputStream {
def write(b: Int) = {}
}
@@ -50,7 +47,6 @@ class SparkSubmitUtilsSuite extends SparkFunSuite with BeforeAndAfterAll {
super.beforeAll()
// We don't want to write logs during testing
SparkSubmitUtils.printStream = new BufferPrintStream
- tempIvyPath = Utils.createTempDir(namePrefix = "ivy").getAbsolutePath()
}
test("incorrect maven coordinate throws error") {
@@ -94,20 +90,21 @@ class SparkSubmitUtilsSuite extends SparkFunSuite with BeforeAndAfterAll {
}
test("ivy path works correctly") {
+ val ivyPath = "dummy" + File.separator + "ivy"
val md = SparkSubmitUtils.getModuleDescriptor
val artifacts = for (i <- 0 until 3) yield new MDArtifact(md, s"jar-$i", "jar", "jar")
- var jPaths = SparkSubmitUtils.resolveDependencyPaths(artifacts.toArray, new File(tempIvyPath))
+ var jPaths = SparkSubmitUtils.resolveDependencyPaths(artifacts.toArray, new File(ivyPath))
for (i <- 0 until 3) {
- val index = jPaths.indexOf(tempIvyPath)
+ val index = jPaths.indexOf(ivyPath)
assert(index >= 0)
- jPaths = jPaths.substring(index + tempIvyPath.length)
+ jPaths = jPaths.substring(index + ivyPath.length)
}
val main = MavenCoordinate("my.awesome.lib", "mylib", "0.1")
IvyTestUtils.withRepository(main, None, None) { repo =>
// end to end
val jarPath = SparkSubmitUtils.resolveMavenCoordinates(main.toString, Option(repo),
- Option(tempIvyPath), true)
- assert(jarPath.indexOf(tempIvyPath) >= 0, "should use non-default ivy path")
+ Option(ivyPath), true)
+ assert(jarPath.indexOf(ivyPath) >= 0, "should use non-default ivy path")
}
}
@@ -126,12 +123,13 @@ class SparkSubmitUtilsSuite extends SparkFunSuite with BeforeAndAfterAll {
assert(jarPath.indexOf("mylib") >= 0, "should find artifact")
}
// Local ivy repository with modified home
- val dummyIvyLocal = new File(tempIvyPath, "local" + File.separator)
+ val dummyIvyPath = "dummy" + File.separator + "ivy"
+ val dummyIvyLocal = new File(dummyIvyPath, "local" + File.separator)
IvyTestUtils.withRepository(main, None, Some(dummyIvyLocal), true) { repo =>
val jarPath = SparkSubmitUtils.resolveMavenCoordinates(main.toString, None,
- Some(tempIvyPath), true)
+ Some(dummyIvyPath), true)
assert(jarPath.indexOf("mylib") >= 0, "should find artifact")
- assert(jarPath.indexOf(tempIvyPath) >= 0, "should be in new ivy path")
+ assert(jarPath.indexOf(dummyIvyPath) >= 0, "should be in new ivy path")
}
}