From b77c19be053125fde99b098ec1e1162f25b5433c Mon Sep 17 00:00:00 2001 From: Marcelo Vanzin Date: Wed, 4 Jun 2014 22:56:49 -0700 Subject: Fix issue in ReplSuite with hadoop-provided profile. When building the assembly with the maven "hadoop-provided" profile, the executors were failing to come up because Hadoop classes were not found in the classpath anymore; so add them explicitly to the classpath using spark.executor.extraClassPath. This is only needed for the local-cluster mode, but doesn't affect other tests, so it's added for all of them to keep the code simpler. Author: Marcelo Vanzin Closes #781 from vanzin/repl-test-fix and squashes the following commits: 4f0a3b0 [Marcelo Vanzin] Fix issue in ReplSuite with hadoop-provided profile. --- repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) (limited to 'repl/src/test/scala/org') diff --git a/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala b/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala index 98cdfd0054..7c765edd55 100644 --- a/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala +++ b/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala @@ -32,6 +32,8 @@ import org.apache.spark.util.Utils class ReplSuite extends FunSuite { def runInterpreter(master: String, input: String): String = { + val CONF_EXECUTOR_CLASSPATH = "spark.executor.extraClassPath" + val in = new BufferedReader(new StringReader(input + "\n")) val out = new StringWriter() val cl = getClass.getClassLoader @@ -44,13 +46,23 @@ class ReplSuite extends FunSuite { } } } + val classpath = paths.mkString(File.pathSeparator) + + val oldExecutorClasspath = System.getProperty(CONF_EXECUTOR_CLASSPATH) + System.setProperty(CONF_EXECUTOR_CLASSPATH, classpath) + val interp = new SparkILoop(in, new PrintWriter(out), master) org.apache.spark.repl.Main.interp = interp - interp.process(Array("-classpath", paths.mkString(File.pathSeparator))) + interp.process(Array("-classpath", classpath)) org.apache.spark.repl.Main.interp = null if (interp.sparkContext != null) { interp.sparkContext.stop() } + if (oldExecutorClasspath != null) { + System.setProperty(CONF_EXECUTOR_CLASSPATH, oldExecutorClasspath) + } else { + System.clearProperty(CONF_EXECUTOR_CLASSPATH) + } return out.toString } -- cgit v1.2.3