From bdabfd43f6e4900b48010dd00ffa48ed5fd15997 Mon Sep 17 00:00:00 2001 From: Marcelo Vanzin Date: Wed, 30 Mar 2016 13:59:10 -0700 Subject: [SPARK-13955][YARN] Also look for Spark jars in the build directory. Move the logic to find Spark jars to CommandBuilderUtils and make it available for YARN code, so that it's possible to easily launch Spark on YARN from a build directory. Tested by running SparkPi from the build directory on YARN. Author: Marcelo Vanzin Closes #11970 from vanzin/SPARK-13955. --- yarn/src/test/scala/org/apache/spark/deploy/yarn/ClientSuite.scala | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) (limited to 'yarn/src/test/scala/org') diff --git a/yarn/src/test/scala/org/apache/spark/deploy/yarn/ClientSuite.scala b/yarn/src/test/scala/org/apache/spark/deploy/yarn/ClientSuite.scala index 24472e006b..e3613a93ed 100644 --- a/yarn/src/test/scala/org/apache/spark/deploy/yarn/ClientSuite.scala +++ b/yarn/src/test/scala/org/apache/spark/deploy/yarn/ClientSuite.scala @@ -17,7 +17,7 @@ package org.apache.spark.deploy.yarn -import java.io.File +import java.io.{File, FileOutputStream} import java.net.URI import java.util.Properties @@ -274,6 +274,7 @@ class ClientSuite extends SparkFunSuite with Matchers with BeforeAndAfterAll val jarsDir = new File(temp, "lib") assert(jarsDir.mkdir()) val jar = TestUtils.createJarWithFiles(Map(), jarsDir) + new FileOutputStream(new File(temp, "RELEASE")).close() val sparkConf = new SparkConfWithEnv(Map("SPARK_HOME" -> temp.getAbsolutePath())) val client = createClient(sparkConf) -- cgit v1.2.3