diff options
author | Reynold Xin <rxin@apache.org> | 2013-11-09 00:32:14 -0800 |
---|---|---|
committer | Reynold Xin <rxin@apache.org> | 2013-11-09 00:32:14 -0800 |
commit | 319299941dbf4bfa2aaa8b5078e313ca45cb5207 (patch) | |
tree | 6859a84c7455e741a3455467fef2945ecee02ea0 /repl/src/test/scala/org/apache | |
parent | 3d4ad84b63e440fd3f4b3edb1b120ff7c14a42d1 (diff) | |
download | spark-319299941dbf4bfa2aaa8b5078e313ca45cb5207.tar.gz spark-319299941dbf4bfa2aaa8b5078e313ca45cb5207.tar.bz2 spark-319299941dbf4bfa2aaa8b5078e313ca45cb5207.zip |
Propagate the SparkContext local property from the thread that calls the spark-repl to the actual execution thread.
Diffstat (limited to 'repl/src/test/scala/org/apache')
-rw-r--r-- | repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala | 35 |
1 files changed, 33 insertions, 2 deletions
diff --git a/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala b/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala index 8f9b632c0e..6e4504d4d5 100644 --- a/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala +++ b/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala @@ -21,12 +21,14 @@ import java.io._ import java.net.URLClassLoader import scala.collection.mutable.ArrayBuffer -import scala.collection.JavaConversions._ -import org.scalatest.FunSuite import com.google.common.io.Files +import org.scalatest.FunSuite +import org.apache.spark.SparkContext + class ReplSuite extends FunSuite { + def runInterpreter(master: String, input: String): String = { val in = new BufferedReader(new StringReader(input + "\n")) val out = new StringWriter() @@ -64,6 +66,35 @@ class ReplSuite extends FunSuite { "Interpreter output contained '" + message + "':\n" + output) } + test("propagation of local properties") { + // A mock ILoop that doesn't install the SIGINT handler. + class ILoop(out: PrintWriter) extends SparkILoop(None, out, None) { + settings = new scala.tools.nsc.Settings + settings.usejavacp.value = true + org.apache.spark.repl.Main.interp = this + override def createInterpreter() { + intp = new SparkILoopInterpreter + intp.setContextClassLoader() + } + } + + val out = new StringWriter() + val interp = new ILoop(new PrintWriter(out)) + interp.sparkContext = new SparkContext("local", "repl-test") + interp.createInterpreter() + interp.intp.initialize() + interp.sparkContext.setLocalProperty("someKey", "someValue") + + // Make sure the value we set in the caller to interpret is propagated in the thread that + // interprets the command. + interp.interpret("org.apache.spark.repl.Main.interp.sparkContext.getLocalProperty(\"someKey\")") + assert(out.toString.contains("someValue")) + + interp.sparkContext.stop() + System.clearProperty("spark.driver.port") + System.clearProperty("spark.hostPort") + } + test ("simple foreach with accumulator") { val output = runInterpreter("local", """ val accum = sc.accumulator(0) |