diff options
author | Raymond Liu <raymond.liu@intel.com> | 2013-11-14 12:44:19 +0800 |
---|---|---|
committer | Raymond Liu <raymond.liu@intel.com> | 2013-11-14 12:44:19 +0800 |
commit | a60620b76a98e236f1e4ffda7a2f289e7917b957 (patch) | |
tree | f66630007c201074af74ba13dca24ab9894ae543 /repl/src | |
parent | 0f2e3c6e31d56c627ff81cdc93289a7c7cb2ec16 (diff) | |
parent | 2054c61a18c277c00661b89bbae365470c297031 (diff) | |
download | spark-a60620b76a98e236f1e4ffda7a2f289e7917b957.tar.gz spark-a60620b76a98e236f1e4ffda7a2f289e7917b957.tar.bz2 spark-a60620b76a98e236f1e4ffda7a2f289e7917b957.zip |
Merge branch 'master' into scala-2.10
Diffstat (limited to 'repl/src')
-rw-r--r-- | repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala | 36 |
1 files changed, 34 insertions, 2 deletions
diff --git a/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala b/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala index fccb6e652c..418c31e24b 100644 --- a/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala +++ b/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala @@ -5,10 +5,13 @@ import java.net.URLClassLoader import scala.collection.mutable.ArrayBuffer -import org.scalatest.FunSuite import com.google.common.io.Files +import org.scalatest.FunSuite +import org.apache.spark.SparkContext + class ReplSuite extends FunSuite { + def runInterpreter(master: String, input: String): String = { val in = new BufferedReader(new StringReader(input + "\n")) val out = new StringWriter() @@ -46,7 +49,36 @@ class ReplSuite extends FunSuite { "Interpreter output contained '" + message + "':\n" + output) } - test("simple foreach with accumulator") { + test("propagation of local properties") { + // A mock ILoop that doesn't install the SIGINT handler. + class ILoop(out: PrintWriter) extends SparkILoop(None, out, None) { + settings = new scala.tools.nsc.Settings + settings.usejavacp.value = true + org.apache.spark.repl.Main.interp = this + override def createInterpreter() { + intp = new SparkILoopInterpreter + intp.setContextClassLoader() + } + } + + val out = new StringWriter() + val interp = new ILoop(new PrintWriter(out)) + interp.sparkContext = new SparkContext("local", "repl-test") + interp.createInterpreter() + interp.intp.initialize() + interp.sparkContext.setLocalProperty("someKey", "someValue") + + // Make sure the value we set in the caller to interpret is propagated in the thread that + // interprets the command. + interp.interpret("org.apache.spark.repl.Main.interp.sparkContext.getLocalProperty(\"someKey\")") + assert(out.toString.contains("someValue")) + + interp.sparkContext.stop() + System.clearProperty("spark.driver.port") + System.clearProperty("spark.hostPort") + } + + test ("simple foreach with accumulator") { val output = runInterpreter("local", """ |val accum = sc.accumulator(0) |sc.parallelize(1 to 10).foreach(x => accum += x) |