diff options
author | Marcelo Vanzin <vanzin@cloudera.com> | 2014-06-04 22:56:49 -0700 |
---|---|---|
committer | Patrick Wendell <pwendell@gmail.com> | 2014-06-04 22:56:49 -0700 |
commit | b77c19be053125fde99b098ec1e1162f25b5433c (patch) | |
tree | c6dcfc077d0add1c8fcf49e16ab492366e742b8a /repl | |
parent | abea2d4ff099036c67fc73136d0e61d0d0e22123 (diff) | |
download | spark-b77c19be053125fde99b098ec1e1162f25b5433c.tar.gz spark-b77c19be053125fde99b098ec1e1162f25b5433c.tar.bz2 spark-b77c19be053125fde99b098ec1e1162f25b5433c.zip |
Fix issue in ReplSuite with hadoop-provided profile.
When building the assembly with the maven "hadoop-provided"
profile, the executors were failing to come up because Hadoop classes
were not found in the classpath anymore; so add them explicitly to
the classpath using spark.executor.extraClassPath. This is only
needed for the local-cluster mode, but doesn't affect other tests,
so it's added for all of them to keep the code simpler.
Author: Marcelo Vanzin <vanzin@cloudera.com>
Closes #781 from vanzin/repl-test-fix and squashes the following commits:
4f0a3b0 [Marcelo Vanzin] Fix issue in ReplSuite with hadoop-provided profile.
Diffstat (limited to 'repl')
-rw-r--r-- | repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala | 14 |
1 files changed, 13 insertions, 1 deletions
diff --git a/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala b/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala index 98cdfd0054..7c765edd55 100644 --- a/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala +++ b/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala @@ -32,6 +32,8 @@ import org.apache.spark.util.Utils class ReplSuite extends FunSuite { def runInterpreter(master: String, input: String): String = { + val CONF_EXECUTOR_CLASSPATH = "spark.executor.extraClassPath" + val in = new BufferedReader(new StringReader(input + "\n")) val out = new StringWriter() val cl = getClass.getClassLoader @@ -44,13 +46,23 @@ class ReplSuite extends FunSuite { } } } + val classpath = paths.mkString(File.pathSeparator) + + val oldExecutorClasspath = System.getProperty(CONF_EXECUTOR_CLASSPATH) + System.setProperty(CONF_EXECUTOR_CLASSPATH, classpath) + val interp = new SparkILoop(in, new PrintWriter(out), master) org.apache.spark.repl.Main.interp = interp - interp.process(Array("-classpath", paths.mkString(File.pathSeparator))) + interp.process(Array("-classpath", classpath)) org.apache.spark.repl.Main.interp = null if (interp.sparkContext != null) { interp.sparkContext.stop() } + if (oldExecutorClasspath != null) { + System.setProperty(CONF_EXECUTOR_CLASSPATH, oldExecutorClasspath) + } else { + System.clearProperty(CONF_EXECUTOR_CLASSPATH) + } return out.toString } |