From a3bb887e0200cf47a1fa2382a18948b3c553cf26 Mon Sep 17 00:00:00 2001 From: Prashant Sharma Date: Wed, 6 May 2015 16:26:03 +0530 Subject: SI-7747 Make REPL wrappers serialization friendly Spark has been shipping a forked version of our REPL for sometime. We have been trying to fold the patches back into the mainline so they can defork. This is the last outstanding issue. Consider this REPL session: ``` scala> val x = StdIn.readInt scala> class A(a: Int) scala> serializedAndExecuteRemotely { () => new A(x) } ``` As shown by the enclosed test, the REPL, even with the Spark friendly option `-Yrepl-class-based`, will re-initialize `x` on the remote system. This test simulates this by running a REPL session, and then deserializing the resulting closure into a fresh classloader based on the class files generated by that session. Before this patch, it printed "evaluating x" twice. This is based on the Spark change described: https://github.com/mesos/spark/pull/535#discussion_r3541925 A followup commit will avoid the `val lineN$read = ` part if we import classes or type aliases only. [Original commit from Prashant Sharma, test case from Jason Zaugg] --- test/files/run/repl-serialization.scala | 65 +++++++++++++++++++++++++++++++++ 1 file changed, 65 insertions(+) create mode 100644 test/files/run/repl-serialization.scala (limited to 'test/files/run/repl-serialization.scala') diff --git a/test/files/run/repl-serialization.scala b/test/files/run/repl-serialization.scala new file mode 100644 index 0000000000..64915ce51e --- /dev/null +++ b/test/files/run/repl-serialization.scala @@ -0,0 +1,65 @@ +import java.io._ + +import scala.reflect.io.AbstractFile +import scala.tools.nsc.Settings +import scala.tools.nsc.interpreter.IMain +import scala.tools.nsc.util._ +import scala.reflect.internal.util.AbstractFileClassLoader + +object Test { + def main(args: Array[String]) { + run() + } + + def run(): Unit = { + val settings = new Settings() + settings.Yreplclassbased.value = true + settings.usejavacp.value = true + + var imain: IMain = null + object extract extends ((AnyRef) => Unit) with Serializable { + var value: AnyRef = null + + def apply(a: AnyRef) = value = a + } + + val code = + """val x = {println(" evaluating x"); 0 } + |lazy val y = {println(" evaluating y"); 0 } + |class D; val z = {println(" evaluating z"); 0}; val zz = {println(" evaluating zz"); 0} + |object O extends Serializable { val apply = {println(" evaluating O"); 0} } + |class A(i: Int) { println(" constructing A") } + |type AA = A + |extract(() => new AA(x + y + z + zz + O.apply)) + """.stripMargin + + imain = new IMain(settings) + println("== evaluating lines") + imain.directBind("extract", "(AnyRef => Unit)", extract) + code.lines.foreach(imain.interpret) + + val virtualFile: AbstractFile = extract.value.getClass.getClassLoader.asInstanceOf[AbstractFileClassLoader].root + val newLoader = new AbstractFileClassLoader(virtualFile, getClass.getClassLoader) + + def deserializeInNewLoader(string: Array[Byte]): AnyRef = { + val bis = new ByteArrayInputStream(string) + val in = new ObjectInputStream(bis) { + override def resolveClass(desc: ObjectStreamClass) = Class.forName(desc.getName, false, newLoader) + } + in.readObject() + } + def serialize(o: AnyRef): Array[Byte] = { + val bos = new ByteArrayOutputStream() + val out = new ObjectOutputStream(bos) + out.writeObject(o) + out.close() + bos.toByteArray + } + println("== evaluating lambda") + extract.value.asInstanceOf[() => Any].apply() + println("== reconstituting into a fresh classloader") + val reconstituted = deserializeInNewLoader(serialize(extract.value)).asInstanceOf[() => Any] + println("== evaluating reconstituted lambda") + reconstituted.apply() // should not print("evaluating x") a second time + } +} -- cgit v1.2.3