summaryrefslogtreecommitdiff
path: root/sbt-plugin/src/main/scala/scala/scalajs/sbtplugin/env/rhino
diff options
context:
space:
mode:
Diffstat (limited to 'sbt-plugin/src/main/scala/scala/scalajs/sbtplugin/env/rhino')
-rw-r--r--sbt-plugin/src/main/scala/scala/scalajs/sbtplugin/env/rhino/LazyScalaJSScope.scala96
-rw-r--r--sbt-plugin/src/main/scala/scala/scalajs/sbtplugin/env/rhino/RhinoJSEnv.scala303
-rw-r--r--sbt-plugin/src/main/scala/scala/scalajs/sbtplugin/env/rhino/ScalaJSCoreLib.scala173
-rw-r--r--sbt-plugin/src/main/scala/scala/scalajs/sbtplugin/env/rhino/package.scala42
4 files changed, 614 insertions, 0 deletions
diff --git a/sbt-plugin/src/main/scala/scala/scalajs/sbtplugin/env/rhino/LazyScalaJSScope.scala b/sbt-plugin/src/main/scala/scala/scalajs/sbtplugin/env/rhino/LazyScalaJSScope.scala
new file mode 100644
index 0000000..d4cdaee
--- /dev/null
+++ b/sbt-plugin/src/main/scala/scala/scalajs/sbtplugin/env/rhino/LazyScalaJSScope.scala
@@ -0,0 +1,96 @@
+/* __ *\
+** ________ ___ / / ___ __ ____ Scala.js sbt plugin **
+** / __/ __// _ | / / / _ | __ / // __/ (c) 2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ |/_// /_\ \ http://scala-js.org/ **
+** /____/\___/_/ |_/____/_/ | |__/ /____/ **
+** |/____/ **
+\* */
+
+
+package scala.scalajs.sbtplugin.env.rhino
+
+import scala.collection.mutable
+
+import org.mozilla.javascript.Scriptable
+
+/** A proxy for a ScalaJS "scope" field that loads scripts lazily
+ *
+ * E.g., ScalaJS.c, which is a scope with the Scala.js classes, can be
+ * turned to a LazyScalaJSScope. Upon first access to a field of ScalaJS.c,
+ * say ScalaJS.c.scala_Option, the script defining that particular
+ * field will be loaded.
+ * This is possible because the relative path to the script can be derived
+ * from the name of the property being accessed.
+ *
+ * It is immensely useful, because it allows to load lazily only the scripts
+ * that are actually needed.
+ */
+class LazyScalaJSScope(
+ coreLib: ScalaJSCoreLib,
+ globalScope: Scriptable,
+ base: Scriptable,
+ isModule: Boolean = false,
+ isTraitImpl: Boolean = false) extends Scriptable {
+
+ private val fields = mutable.HashMap.empty[String, Any]
+ private var prototype: Scriptable = _
+ private var parentScope: Scriptable = _
+
+ {
+ // Pre-fill fields with the properties of `base`
+ for (id <- base.getIds()) {
+ (id.asInstanceOf[Any]: @unchecked) match {
+ case name: String => put(name, this, base.get(name, base))
+ case index: Int => put(index, this, base.get(index, base))
+ }
+ }
+ }
+
+ private def load(name: String): Unit =
+ coreLib.load(globalScope, propNameToEncodedName(name))
+
+ private def propNameToEncodedName(name: String): String = {
+ if (isTraitImpl) name.split("__")(0)
+ else if (isModule) name + "$"
+ else name
+ }
+
+ override def getClassName() = "LazyScalaJSScope"
+
+ override def get(name: String, start: Scriptable) = {
+ fields.getOrElse(name, {
+ load(name)
+ fields.getOrElse(name, Scriptable.NOT_FOUND)
+ }).asInstanceOf[AnyRef]
+ }
+ override def get(index: Int, start: Scriptable) =
+ get(index.toString, start)
+
+ override def has(name: String, start: Scriptable) =
+ fields.contains(name)
+ override def has(index: Int, start: Scriptable) =
+ has(index.toString, start)
+
+ override def put(name: String, start: Scriptable, value: Any) = {
+ fields(name) = value
+ }
+ override def put(index: Int, start: Scriptable, value: Any) =
+ put(index.toString, start, value)
+
+ override def delete(name: String) = ()
+ override def delete(index: Int) = ()
+
+ override def getPrototype() = prototype
+ override def setPrototype(value: Scriptable) = prototype = value
+
+ override def getParentScope() = parentScope
+ override def setParentScope(value: Scriptable) = parentScope = value
+
+ override def getIds() = fields.keys.toArray
+
+ override def getDefaultValue(hint: java.lang.Class[_]) = {
+ base.getDefaultValue(hint)
+ }
+
+ override def hasInstance(instance: Scriptable) = false
+}
diff --git a/sbt-plugin/src/main/scala/scala/scalajs/sbtplugin/env/rhino/RhinoJSEnv.scala b/sbt-plugin/src/main/scala/scala/scalajs/sbtplugin/env/rhino/RhinoJSEnv.scala
new file mode 100644
index 0000000..cd35ff6
--- /dev/null
+++ b/sbt-plugin/src/main/scala/scala/scalajs/sbtplugin/env/rhino/RhinoJSEnv.scala
@@ -0,0 +1,303 @@
+/* __ *\
+** ________ ___ / / ___ __ ____ Scala.js sbt plugin **
+** / __/ __// _ | / / / _ | __ / // __/ (c) 2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ |/_// /_\ \ http://scala-js.org/ **
+** /____/\___/_/ |_/____/_/ | |__/ /____/ **
+** |/____/ **
+\* */
+
+
+package scala.scalajs.sbtplugin.env.rhino
+
+import scala.scalajs.tools.sem.Semantics
+import scala.scalajs.tools.io._
+import scala.scalajs.tools.classpath._
+import scala.scalajs.tools.env._
+import scala.scalajs.tools.logging._
+
+import scala.io.Source
+
+import scala.collection.mutable
+
+import scala.concurrent.{Future, Promise, Await}
+import scala.concurrent.duration.Duration
+
+import org.mozilla.javascript._
+
+class RhinoJSEnv(semantics: Semantics,
+ withDOM: Boolean = false) extends ComJSEnv {
+
+ import RhinoJSEnv._
+
+ /** Executes code in an environment where the Scala.js library is set up to
+ * load its classes lazily.
+ *
+ * Other .js scripts in the inputs are executed eagerly before the provided
+ * `code` is called.
+ */
+ override def jsRunner(classpath: CompleteClasspath, code: VirtualJSFile,
+ logger: Logger, console: JSConsole): JSRunner = {
+ new Runner(classpath, code, logger, console)
+ }
+
+ private class Runner(classpath: CompleteClasspath, code: VirtualJSFile,
+ logger: Logger, console: JSConsole) extends JSRunner {
+ def run(): Unit = internalRunJS(classpath, code, logger, console, None)
+ }
+
+ override def asyncRunner(classpath: CompleteClasspath, code: VirtualJSFile,
+ logger: Logger, console: JSConsole): AsyncJSRunner = {
+ new AsyncRunner(classpath, code, logger, console)
+ }
+
+ private class AsyncRunner(classpath: CompleteClasspath, code: VirtualJSFile,
+ logger: Logger, console: JSConsole) extends AsyncJSRunner {
+
+ private[this] val promise = Promise[Unit]
+
+ private[this] val thread = new Thread {
+ override def run(): Unit = {
+ try {
+ internalRunJS(classpath, code, logger, console, optChannel)
+ promise.success(())
+ } catch {
+ case t: Throwable =>
+ promise.failure(t)
+ }
+ }
+ }
+
+ def start(): Future[Unit] = {
+ thread.start()
+ promise.future
+ }
+
+ def stop(): Unit = thread.interrupt()
+
+ def isRunning(): Boolean = !promise.isCompleted
+
+ def await(): Unit = Await.result(promise.future, Duration.Inf)
+
+ protected def optChannel(): Option[Channel] = None
+ }
+
+ override def comRunner(classpath: CompleteClasspath, code: VirtualJSFile,
+ logger: Logger, console: JSConsole): ComJSRunner = {
+ new ComRunner(classpath, code, logger, console)
+ }
+
+ private class ComRunner(classpath: CompleteClasspath, code: VirtualJSFile,
+ logger: Logger, console: JSConsole)
+ extends AsyncRunner(classpath, code, logger, console) with ComJSRunner {
+
+ private[this] val channel = new Channel
+
+ override protected def optChannel(): Option[Channel] = Some(channel)
+
+ def send(msg: String): Unit = {
+ try {
+ channel.sendToJS(msg)
+ } catch {
+ case _: ChannelClosedException =>
+ throw new ComJSEnv.ComClosedException
+ }
+ }
+
+ def receive(): String = {
+ try {
+ channel.recvJVM()
+ } catch {
+ case _: ChannelClosedException =>
+ throw new ComJSEnv.ComClosedException
+ }
+ }
+
+ def close(): Unit = channel.close()
+
+ override def stop(): Unit = {
+ close()
+ super.stop()
+ }
+
+ }
+
+ private def internalRunJS(classpath: CompleteClasspath, code: VirtualJSFile,
+ logger: Logger, console: JSConsole, optChannel: Option[Channel]): Unit = {
+
+ val context = Context.enter()
+ try {
+ val scope = context.initStandardObjects()
+
+ if (withDOM) {
+ // Fetch env.rhino.js from webjar
+ val name = "env.rhino.js"
+ val path = "/META-INF/resources/webjars/envjs/1.2/" + name
+ val resource = getClass.getResource(path)
+ assert(resource != null, s"need $name as resource")
+
+ // Rhino can't optimize envjs
+ context.setOptimizationLevel(-1)
+
+ // Don't print envjs header
+ scope.addFunction("print", args => ())
+
+ // Pipe file to Rhino
+ val reader = Source.fromURL(resource).bufferedReader
+ context.evaluateReader(scope, reader, name, 1, null);
+
+ // No need to actually define print here: It is captured by envjs to
+ // implement console.log, which we'll override in the next statement
+ }
+
+ // Make sure Rhino does not do its magic for JVM top-level packages (#364)
+ val PackagesObject =
+ ScriptableObject.getProperty(scope, "Packages").asInstanceOf[Scriptable]
+ val topLevelPackageIds = ScriptableObject.getPropertyIds(PackagesObject)
+ for (id <- topLevelPackageIds) (id: Any) match {
+ case name: String => ScriptableObject.deleteProperty(scope, name)
+ case index: Int => ScriptableObject.deleteProperty(scope, index)
+ case _ => // should not happen, I think, but with Rhino you never know
+ }
+
+ // Setup console.log
+ val jsconsole = context.newObject(scope)
+ jsconsole.addFunction("log", _.foreach(console.log _))
+ ScriptableObject.putProperty(scope, "console", jsconsole)
+
+ // Optionally setup scalaJSCom
+ var recvCallback: Option[String => Unit] = None
+ for (channel <- optChannel) {
+ val comObj = context.newObject(scope)
+
+ comObj.addFunction("send", s =>
+ channel.sendToJVM(Context.toString(s(0))))
+
+ comObj.addFunction("init", s => s(0) match {
+ case f: Function =>
+ val cb: String => Unit =
+ msg => f.call(context, scope, scope, Array(msg))
+ recvCallback = Some(cb)
+ case _ =>
+ sys.error("First argument to init must be a function")
+ })
+
+ comObj.addFunction("close", _ => {
+ // Tell JVM side we won't send anything
+ channel.close()
+ // Internally register that we're done
+ recvCallback = None
+ })
+
+ ScriptableObject.putProperty(scope, "scalajsCom", comObj)
+ }
+
+ try {
+ // Make the classpath available. Either through lazy loading or by
+ // simply inserting
+ classpath match {
+ case cp: IRClasspath =>
+ // Setup lazy loading classpath and source mapper
+ val optLoader = if (cp.scalaJSIR.nonEmpty) {
+ val loader = new ScalaJSCoreLib(semantics, cp)
+
+ // Setup sourceMapper
+ val scalaJSenv = context.newObject(scope)
+
+ scalaJSenv.addFunction("sourceMapper", args => {
+ val trace = Context.toObject(args(0), scope)
+ loader.mapStackTrace(trace, context, scope)
+ })
+
+ ScriptableObject.putProperty(scope, "__ScalaJSEnv", scalaJSenv)
+
+ Some(loader)
+ } else {
+ None
+ }
+
+ // Load JS libraries
+ cp.jsLibs.foreach(dep => context.evaluateFile(scope, dep.lib))
+
+ optLoader.foreach(_.insertInto(context, scope))
+ case cp =>
+ cp.allCode.foreach(context.evaluateFile(scope, _))
+ }
+
+ context.evaluateFile(scope, code)
+
+ // Callback the com channel if necessary (if comCallback = None, channel
+ // wasn't initialized on the client)
+ for ((channel, callback) <- optChannel zip recvCallback) {
+ try {
+ while (recvCallback.isDefined)
+ callback(channel.recvJS())
+ } catch {
+ case _: ChannelClosedException =>
+ // the JVM side closed the connection
+ }
+ }
+
+ // Enusre the channel is closed to release JVM side
+ optChannel.foreach(_.close)
+
+ } catch {
+ case e: RhinoException =>
+ // Trace here, since we want to be in the context to trace.
+ logger.trace(e)
+ sys.error(s"Exception while running JS code: ${e.getMessage}")
+ }
+ } finally {
+ Context.exit()
+ }
+ }
+
+}
+
+object RhinoJSEnv {
+
+ /** Communication channel between the Rhino thread and the rest of the JVM */
+ private class Channel {
+ private[this] var _closed = false
+ private[this] val js2jvm = mutable.Queue.empty[String]
+ private[this] val jvm2js = mutable.Queue.empty[String]
+
+ def sendToJS(msg: String): Unit = synchronized {
+ jvm2js.enqueue(msg)
+ notify()
+ }
+
+ def sendToJVM(msg: String): Unit = synchronized {
+ js2jvm.enqueue(msg)
+ notify()
+ }
+
+ def recvJVM(): String = synchronized {
+ while (js2jvm.isEmpty && ensureOpen())
+ wait()
+
+ js2jvm.dequeue()
+ }
+
+ def recvJS(): String = synchronized {
+ while (jvm2js.isEmpty && ensureOpen())
+ wait()
+
+ jvm2js.dequeue()
+ }
+
+ def close(): Unit = synchronized {
+ _closed = true
+ notify()
+ }
+
+ /** Throws if the channel is closed and returns true */
+ private def ensureOpen(): Boolean = {
+ if (_closed)
+ throw new ChannelClosedException
+ true
+ }
+ }
+
+ private class ChannelClosedException extends Exception
+
+}
diff --git a/sbt-plugin/src/main/scala/scala/scalajs/sbtplugin/env/rhino/ScalaJSCoreLib.scala b/sbt-plugin/src/main/scala/scala/scalajs/sbtplugin/env/rhino/ScalaJSCoreLib.scala
new file mode 100644
index 0000000..e937e5b
--- /dev/null
+++ b/sbt-plugin/src/main/scala/scala/scalajs/sbtplugin/env/rhino/ScalaJSCoreLib.scala
@@ -0,0 +1,173 @@
+/* __ *\
+** ________ ___ / / ___ __ ____ Scala.js sbt plugin **
+** / __/ __// _ | / / / _ | __ / // __/ (c) 2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ |/_// /_\ \ http://scala-js.org/ **
+** /____/\___/_/ |_/____/_/ | |__/ /____/ **
+** |/____/ **
+\* */
+
+
+package scala.scalajs.sbtplugin.env.rhino
+
+import scala.collection.mutable
+
+import org.mozilla.javascript.{Context, Scriptable}
+
+import scala.scalajs.ir
+
+import scala.scalajs.tools.sem.Semantics
+import scala.scalajs.tools.javascript.{Printers, ScalaJSClassEmitter}
+import scala.scalajs.tools.io._
+import scala.scalajs.tools.classpath._
+import scala.scalajs.tools.corelib._
+
+class ScalaJSCoreLib(semantics: Semantics, classpath: IRClasspath) {
+ import ScalaJSCoreLib._
+
+ private val (providers, exportedSymbols) = {
+ val providers = mutable.Map.empty[String, VirtualScalaJSIRFile]
+ val exportedSymbols = mutable.ListBuffer.empty[String]
+
+ for (irFile <- classpath.scalaJSIR) {
+ val info = irFile.roughInfo
+ providers += info.encodedName -> irFile
+ if (info.isExported)
+ exportedSymbols += info.encodedName
+ }
+
+ (providers, exportedSymbols)
+ }
+
+ def insertInto(context: Context, scope: Scriptable) = {
+ CoreJSLibs.libs(semantics).foreach(context.evaluateFile(scope, _))
+ lazifyScalaJSFields(scope)
+
+ // Make sure exported symbols are loaded
+ val ScalaJS = Context.toObject(scope.get("ScalaJS", scope), scope)
+ val c = Context.toObject(ScalaJS.get("c", ScalaJS), scope)
+ for (encodedName <- exportedSymbols)
+ c.get(encodedName, c)
+ }
+
+ /** Source maps the given stack trace (where possible) */
+ def mapStackTrace(stackTrace: Scriptable,
+ context: Context, scope: Scriptable): Scriptable = {
+ val count = Context.toNumber(stackTrace.get("length", stackTrace)).toInt
+
+ // Maps file -> max line (0-based)
+ val neededMaps = mutable.Map.empty[String, Int]
+
+ // Collect required line counts
+ for (i <- 0 until count) {
+ val elem = Context.toObject(stackTrace.get(i, stackTrace), scope)
+ val fileName = Context.toString(elem.get("fileName", elem))
+
+ if (fileName.endsWith(PseudoFileSuffix) &&
+ providers.contains(fileName.stripSuffix(PseudoFileSuffix))) {
+
+ val curMaxLine = neededMaps.getOrElse(fileName, -1)
+ val reqLine = Context.toNumber(elem.get("lineNumber", elem)).toInt - 1
+
+ if (reqLine > curMaxLine)
+ neededMaps.put(fileName, reqLine)
+ }
+ }
+
+ // Map required files
+ val maps =
+ for ((fileName, maxLine) <- neededMaps)
+ yield (fileName, getSourceMapper(fileName, maxLine))
+
+ // Create new stack trace to return
+ val res = context.newArray(scope, count)
+
+ for (i <- 0 until count) {
+ val elem = Context.toObject(stackTrace.get(i, stackTrace), scope)
+ val fileName = Context.toString(elem.get("fileName", elem))
+ val line = Context.toNumber(elem.get("lineNumber", elem)).toInt - 1
+
+ val pos = maps.get(fileName).fold(ir.Position.NoPosition)(_(line))
+
+ val newElem =
+ if (pos.isDefined) newPosElem(scope, context, elem, pos)
+ else elem
+
+ res.put(i, res, newElem)
+ }
+
+ res
+ }
+
+ private def getSourceMapper(fileName: String, untilLine: Int) = {
+ val irFile = providers(fileName.stripSuffix(PseudoFileSuffix))
+ val mapper = new Printers.ReverseSourceMapPrinter(untilLine)
+ val classDef = irFile.tree
+ val desugared = new ScalaJSClassEmitter(semantics).genClassDef(classDef)
+ mapper.reverseSourceMap(desugared)
+ mapper
+ }
+
+ private def newPosElem(scope: Scriptable, context: Context,
+ origElem: Scriptable, pos: ir.Position): Scriptable = {
+ assert(pos.isDefined)
+
+ val elem = context.newObject(scope)
+
+ elem.put("declaringClass", elem, origElem.get("declaringClass", origElem))
+ elem.put("methodName", elem, origElem.get("methodName", origElem))
+ elem.put("fileName", elem, pos.source.toString)
+ elem.put("lineNumber", elem, pos.line + 1)
+ elem.put("columnNumber", elem, pos.column + 1)
+
+ elem
+ }
+
+ private val scalaJSLazyFields = Seq(
+ Info("d"),
+ Info("c"),
+ Info("h"),
+ Info("i", isTraitImpl = true),
+ Info("n", isModule = true),
+ Info("m", isModule = true),
+ Info("is"),
+ Info("as"),
+ Info("isArrayOf"),
+ Info("asArrayOf"))
+
+ private def lazifyScalaJSFields(scope: Scriptable) = {
+ val ScalaJS = Context.toObject(scope.get("ScalaJS", scope), scope)
+
+ def makeLazyScalaJSScope(base: Scriptable, isModule: Boolean, isTraitImpl: Boolean) =
+ new LazyScalaJSScope(this, scope, base, isModule, isTraitImpl)
+
+ for (Info(name, isModule, isTraitImpl) <- scalaJSLazyFields) {
+ val base = ScalaJS.get(name, ScalaJS).asInstanceOf[Scriptable]
+ val lazified = makeLazyScalaJSScope(base, isModule, isTraitImpl)
+ ScalaJS.put(name, ScalaJS, lazified)
+ }
+ }
+
+ private[rhino] def load(scope: Scriptable, encodedName: String): Unit = {
+ providers.get(encodedName) foreach { irFile =>
+ val codeWriter = new java.io.StringWriter
+ val printer = new Printers.JSTreePrinter(codeWriter)
+ val classDef = irFile.tree
+ val desugared = new ScalaJSClassEmitter(semantics).genClassDef(classDef)
+ printer.printTopLevelTree(desugared)
+ printer.complete()
+ val ctx = Context.getCurrentContext()
+ val fakeFileName = encodedName + PseudoFileSuffix
+ ctx.evaluateString(scope, codeWriter.toString(),
+ fakeFileName, 1, null)
+ }
+ }
+}
+
+object ScalaJSCoreLib {
+ private case class Info(name: String,
+ isModule: Boolean = false, isTraitImpl: Boolean = false)
+
+ private val EncodedNameLine = raw""""encodedName": *"([^"]+)"""".r.unanchored
+
+ private final val PseudoFileSuffix = ".sjsir"
+}
diff --git a/sbt-plugin/src/main/scala/scala/scalajs/sbtplugin/env/rhino/package.scala b/sbt-plugin/src/main/scala/scala/scalajs/sbtplugin/env/rhino/package.scala
new file mode 100644
index 0000000..926fbb2
--- /dev/null
+++ b/sbt-plugin/src/main/scala/scala/scalajs/sbtplugin/env/rhino/package.scala
@@ -0,0 +1,42 @@
+/* __ *\
+** ________ ___ / / ___ __ ____ Scala.js sbt plugin **
+** / __/ __// _ | / / / _ | __ / // __/ (c) 2013, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ |/_// /_\ \ http://scala-js.org/ **
+** /____/\___/_/ |_/____/_/ | |__/ /____/ **
+** |/____/ **
+\* */
+
+
+package scala.scalajs.sbtplugin.env
+
+import org.mozilla.javascript._
+
+import scala.scalajs.tools.io._
+
+package object rhino {
+
+ implicit class ContextOps(val self: Context) extends AnyVal {
+ def evaluateFile(scope: Scriptable, file: VirtualJSFile,
+ securityDomain: AnyRef = null): Any = {
+ self.evaluateString(scope, file.content, file.path, 1, securityDomain)
+ }
+ }
+
+ implicit class ScriptableObjectOps(val self: Scriptable) {
+ def addFunction(name: String, function: Array[AnyRef] => Any) = {
+ val rhinoFunction =
+ new BaseFunction {
+ ScriptRuntime.setFunctionProtoAndParent(this, self)
+ override def call(context: Context, scope: Scriptable,
+ thisObj: Scriptable, args: Array[AnyRef]): AnyRef = {
+ function(args) match {
+ case () => Undefined.instance
+ case r => r.asInstanceOf[AnyRef]
+ }
+ }
+ }
+
+ ScriptableObject.putProperty(self, name, rhinoFunction)
+ }
+ }
+}