summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/actors/scala/actors/Actor.scala484
-rw-r--r--src/actors/scala/actors/ActorCanReply.scala2
-rw-r--r--src/actors/scala/actors/ActorTask.scala9
-rw-r--r--src/actors/scala/actors/Channel.scala2
-rw-r--r--src/actors/scala/actors/Combinators.scala2
-rw-r--r--src/actors/scala/actors/InternalActor.scala509
-rw-r--r--src/actors/scala/actors/InternalReplyReactor.scala161
-rw-r--r--src/actors/scala/actors/OutputChannel.scala2
-rw-r--r--src/actors/scala/actors/ReactChannel.scala2
-rw-r--r--src/actors/scala/actors/Reactor.scala2
-rw-r--r--src/actors/scala/actors/ReactorCanReply.scala2
-rw-r--r--src/actors/scala/actors/ReplyReactor.scala165
-rw-r--r--src/actors/scala/actors/ReplyReactorTask.scala4
-rw-r--r--src/actors/scala/actors/UncaughtException.scala2
-rw-r--r--src/build/maven/maven-deploy.xml23
-rw-r--r--src/build/maven/scala-actors-pom.xml62
-rw-r--r--src/build/pack.xml9
-rw-r--r--src/compiler/scala/reflect/internal/Definitions.scala18
-rw-r--r--src/compiler/scala/reflect/internal/Flags.scala2
-rw-r--r--src/compiler/scala/reflect/internal/Importers.scala2
-rw-r--r--src/compiler/scala/reflect/internal/Kinds.scala23
-rw-r--r--src/compiler/scala/reflect/internal/Symbols.scala83
-rw-r--r--src/compiler/scala/reflect/internal/TreeInfo.scala20
-rw-r--r--src/compiler/scala/reflect/internal/TreePrinters.scala2
-rw-r--r--src/compiler/scala/reflect/internal/Trees.scala5
-rw-r--r--src/compiler/scala/reflect/internal/Types.scala39
-rw-r--r--src/compiler/scala/reflect/runtime/ToolBoxes.scala4
-rw-r--r--src/compiler/scala/reflect/runtime/Universe.scala5
-rwxr-xr-xsrc/compiler/scala/tools/nsc/ast/DocComments.scala59
-rw-r--r--src/compiler/scala/tools/nsc/ast/NodePrinters.scala565
-rw-r--r--src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala147
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala34
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala2
-rw-r--r--src/compiler/scala/tools/nsc/backend/opt/Inliners.scala2
-rw-r--r--src/compiler/scala/tools/nsc/doc/Settings.scala2
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/HtmlFactory.scala2
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/HtmlPage.scala12
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/SyntaxHigh.scala12
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/page/Template.scala25
-rw-r--r--src/compiler/scala/tools/nsc/doc/html/resource/lib/template.css24
-rw-r--r--src/compiler/scala/tools/nsc/doc/model/comment/CommentFactory.scala60
-rw-r--r--src/compiler/scala/tools/nsc/interactive/RangePositions.scala2
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/Naming.scala20
-rw-r--r--src/compiler/scala/tools/nsc/matching/ParallelMatching.scala2
-rw-r--r--src/compiler/scala/tools/nsc/reporters/ConsoleReporter.scala4
-rw-r--r--src/compiler/scala/tools/nsc/settings/ScalaSettings.scala3
-rw-r--r--src/compiler/scala/tools/nsc/symtab/Positions.scala9
-rw-r--r--src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala10
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala6
-rw-r--r--src/compiler/scala/tools/nsc/symtab/clr/TypeParser.scala24
-rw-r--r--src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala4
-rw-r--r--src/compiler/scala/tools/nsc/transform/Mixin.scala286
-rw-r--r--src/compiler/scala/tools/nsc/transform/TailCalls.scala126
-rw-r--r--src/compiler/scala/tools/nsc/transform/UnCurry.scala88
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala2
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Duplicators.scala12
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Infer.scala2
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala54
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Namers.scala4
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/PatMatVirtualiser.scala414
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/RefChecks.scala4
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala2
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala4
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala50
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Typers.scala283
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Unapplies.scala5
-rw-r--r--src/compiler/scala/tools/nsc/util/Position.scala13
-rw-r--r--src/compiler/scala/tools/util/color/CString.scala19
-rw-r--r--src/compiler/scala/tools/util/color/package.scala1
-rw-r--r--src/library/scala/Specializable.scala12
-rw-r--r--src/library/scala/collection/GenIterableLike.scala64
-rw-r--r--src/library/scala/collection/GenMapLike.scala2
-rw-r--r--src/library/scala/collection/GenSeqLike.scala157
-rw-r--r--src/library/scala/collection/GenTraversableLike.scala112
-rw-r--r--src/library/scala/collection/GenTraversableOnce.scala55
-rw-r--r--src/library/scala/collection/IterableLike.scala1
-rw-r--r--src/library/scala/collection/Iterator.scala14
-rw-r--r--src/library/scala/collection/JavaConversions.scala883
-rwxr-xr-xsrc/library/scala/collection/JavaConverters.scala484
-rw-r--r--src/library/scala/collection/MapLike.scala14
-rw-r--r--src/library/scala/collection/SeqLike.scala49
-rw-r--r--src/library/scala/collection/TraversableLike.scala76
-rw-r--r--src/library/scala/collection/convert/DecorateAsJava.scala296
-rw-r--r--src/library/scala/collection/convert/DecorateAsScala.scala189
-rw-r--r--src/library/scala/collection/convert/Decorators.scala46
-rw-r--r--src/library/scala/collection/convert/WrapAsJava.scala256
-rw-r--r--src/library/scala/collection/convert/WrapAsScala.scala193
-rw-r--r--src/library/scala/collection/convert/Wrappers.scala422
-rw-r--r--src/library/scala/collection/convert/package.scala18
-rw-r--r--src/library/scala/collection/generic/GenericTraversableTemplate.scala25
-rw-r--r--src/library/scala/collection/immutable/List.scala16
-rw-r--r--src/library/scala/collection/mutable/ConcurrentTrieMap.scala2
-rw-r--r--src/library/scala/collection/mutable/WeakHashMap.scala5
-rw-r--r--src/library/scala/collection/parallel/ParIterableLike.scala3
-rw-r--r--src/library/scala/collection/parallel/ParSeqLike.scala16
-rw-r--r--src/library/scala/concurrent/Awaitable.scala2
-rw-r--r--src/library/scala/concurrent/ConcurrentPackageObject.scala7
-rw-r--r--src/library/scala/concurrent/DelayedLazyVal.scala3
-rw-r--r--src/library/scala/concurrent/ExecutionContext.scala2
-rw-r--r--src/library/scala/concurrent/Future.scala3
-rw-r--r--src/library/scala/concurrent/Scheduler.scala2
-rw-r--r--src/library/scala/concurrent/default/SchedulerImpl.scala.disabled2
-rw-r--r--src/library/scala/concurrent/default/TaskImpl.scala.disabled2
-rw-r--r--src/library/scala/concurrent/impl/ExecutionContextImpl.scala3
-rw-r--r--src/library/scala/concurrent/impl/Promise.scala2
-rw-r--r--src/library/scala/concurrent/package.scala3
-rw-r--r--src/library/scala/concurrent/util/Duration.scala578
-rw-r--r--src/library/scala/reflect/api/Positions.scala18
-rw-r--r--src/library/scala/reflect/api/TreePrinters.scala2
-rw-r--r--src/library/scala/reflect/api/Trees.scala45
-rw-r--r--src/library/scala/util/Duration.scala485
-rw-r--r--src/library/scala/util/Timeout.scala33
-rwxr-xr-xsrc/library/scala/xml/Elem.scala8
-rwxr-xr-xsrc/library/scala/xml/XML.scala17
-rwxr-xr-xsrc/library/scala/xml/parsing/MarkupParser.scala22
-rw-r--r--src/partest/scala/tools/partest/PartestTask.scala11
-rw-r--r--src/partest/scala/tools/partest/nest/AntRunner.scala1
-rw-r--r--src/partest/scala/tools/partest/nest/ConsoleFileManager.scala9
-rw-r--r--src/partest/scala/tools/partest/nest/DirectRunner.scala9
-rw-r--r--src/partest/scala/tools/partest/nest/FileManager.scala1
-rw-r--r--src/partest/scala/tools/partest/nest/ReflectiveRunner.scala4
-rw-r--r--src/partest/scala/tools/partest/nest/SBTRunner.scala4
-rw-r--r--src/partest/scala/tools/partest/nest/Worker.scala1
123 files changed, 4872 insertions, 3861 deletions
diff --git a/src/actors/scala/actors/Actor.scala b/src/actors/scala/actors/Actor.scala
index b746f68268..aab533ae8d 100644
--- a/src/actors/scala/actors/Actor.scala
+++ b/src/actors/scala/actors/Actor.scala
@@ -46,7 +46,7 @@ object Actor extends Combinators {
Terminated = Value
}
- private[actors] val tl = new ThreadLocal[ReplyReactor]
+ private[actors] val tl = new ThreadLocal[InternalReplyReactor]
// timer thread runs as daemon
private[actors] val timer = new Timer(true)
@@ -59,15 +59,15 @@ object Actor extends Combinators {
*
* @return returns the currently executing actor.
*/
- def self: Actor = self(Scheduler)
+ def self: Actor = self(Scheduler).asInstanceOf[Actor]
- private[actors] def self(sched: IScheduler): Actor =
- rawSelf(sched).asInstanceOf[Actor]
+ private[actors] def self(sched: IScheduler): InternalActor =
+ rawSelf(sched).asInstanceOf[InternalActor]
- private[actors] def rawSelf: ReplyReactor =
+ private[actors] def rawSelf: InternalReplyReactor =
rawSelf(Scheduler)
- private[actors] def rawSelf(sched: IScheduler): ReplyReactor = {
+ private[actors] def rawSelf(sched: IScheduler): InternalReplyReactor = {
val s = tl.get
if (s eq null) {
val r = new ActorProxy(Thread.currentThread, sched)
@@ -245,7 +245,7 @@ object Actor extends Combinators {
def eventloop(f: PartialFunction[Any, Unit]): Nothing =
rawSelf.react(new RecursiveProxyHandler(rawSelf, f))
- private class RecursiveProxyHandler(a: ReplyReactor, f: PartialFunction[Any, Unit])
+ private class RecursiveProxyHandler(a: InternalReplyReactor, f: PartialFunction[Any, Unit])
extends scala.runtime.AbstractPartialFunction[Any, Unit] {
def _isDefinedAt(m: Any): Boolean =
true // events are immediately removed from the mailbox
@@ -259,7 +259,7 @@ object Actor extends Combinators {
* Returns the actor which sent the last received message.
*/
def sender: OutputChannel[Any] =
- rawSelf.sender
+ rawSelf.internalSender
/**
* Sends `msg` to the actor waiting in a call to `!?`.
@@ -302,7 +302,7 @@ object Actor extends Combinators {
def andThen[b](other: => b): Unit
}
- implicit def mkBody[a](body: => a) = new Body[a] {
+ implicit def mkBody[a](body: => a) = new InternalActor.Body[a] {
def andThen[b](other: => b): Unit = rawSelf.seq(body, other)
}
@@ -397,476 +397,12 @@ object Actor extends Combinators {
* @define channel actor's mailbox
*/
@SerialVersionUID(-781154067877019505L)
-trait Actor extends AbstractActor with ReplyReactor with ActorCanReply with InputChannel[Any] with Serializable {
-
- /* The following two fields are only used when the actor
- * suspends by blocking its underlying thread, for example,
- * when waiting in a receive or synchronous send.
- */
- @volatile
- private var isSuspended = false
-
- /* This field is used to communicate the received message from
- * the invocation of send to the place where the thread of
- * the receiving actor resumes inside receive/receiveWithin.
- */
- @volatile
- private var received: Option[Any] = None
-
- protected[actors] override def scheduler: IScheduler = Scheduler
-
- private[actors] override def startSearch(msg: Any, replyTo: OutputChannel[Any], handler: PartialFunction[Any, Any]) =
- if (isSuspended) {
- () => synchronized {
- mailbox.append(msg, replyTo)
- resumeActor()
- }
- } else super.startSearch(msg, replyTo, handler)
-
- // we override this method to check `shouldExit` before suspending
- private[actors] override def searchMailbox(startMbox: MQueue[Any],
- handler: PartialFunction[Any, Any],
- resumeOnSameThread: Boolean) {
- var tmpMbox = startMbox
- var done = false
- while (!done) {
- val qel = tmpMbox.extractFirst((msg: Any, replyTo: OutputChannel[Any]) => {
- senders = List(replyTo)
- handler.isDefinedAt(msg)
- })
- if (tmpMbox ne mailbox)
- tmpMbox.foreach((m, s) => mailbox.append(m, s))
- if (null eq qel) {
- synchronized {
- // in mean time new stuff might have arrived
- if (!sendBuffer.isEmpty) {
- tmpMbox = new MQueue[Any]("Temp")
- drainSendBuffer(tmpMbox)
- // keep going
- } else {
- // very important to check for `shouldExit` at this point
- // since linked actors might have set it after we checked
- // last time (e.g., at the beginning of `react`)
- if (shouldExit) exit()
- waitingFor = handler
- // see Reactor.searchMailbox
- throw Actor.suspendException
- }
- }
- } else {
- resumeReceiver((qel.msg, qel.session), handler, resumeOnSameThread)
- done = true
- }
- }
- }
-
- private[actors] override def makeReaction(fun: () => Unit, handler: PartialFunction[Any, Any], msg: Any): Runnable =
- new ActorTask(this, fun, handler, msg)
-
- /** See the companion object's `receive` method. */
- def receive[R](f: PartialFunction[Any, R]): R = {
- assert(Actor.self(scheduler) == this, "receive from channel belonging to other actor")
-
- synchronized {
- if (shouldExit) exit() // links
- drainSendBuffer(mailbox)
- }
-
- var done = false
- while (!done) {
- val qel = mailbox.extractFirst((m: Any, replyTo: OutputChannel[Any]) => {
- senders = replyTo :: senders
- val matches = f.isDefinedAt(m)
- senders = senders.tail
- matches
- })
- if (null eq qel) {
- synchronized {
- // in mean time new stuff might have arrived
- if (!sendBuffer.isEmpty) {
- drainSendBuffer(mailbox)
- // keep going
- } else {
- waitingFor = f
- isSuspended = true
- scheduler.managedBlock(blocker)
- drainSendBuffer(mailbox)
- // keep going
- }
- }
- } else {
- received = Some(qel.msg)
- senders = qel.session :: senders
- done = true
- }
- }
-
- val result = f(received.get)
- received = None
- senders = senders.tail
- result
- }
-
- /** See the companion object's `receiveWithin` method. */
- def receiveWithin[R](msec: Long)(f: PartialFunction[Any, R]): R = {
- assert(Actor.self(scheduler) == this, "receive from channel belonging to other actor")
-
- synchronized {
- if (shouldExit) exit() // links
- drainSendBuffer(mailbox)
- }
-
- // first, remove spurious TIMEOUT message from mailbox if any
- mailbox.extractFirst((m: Any, replyTo: OutputChannel[Any]) => m == TIMEOUT)
-
- val receiveTimeout = () => {
- if (f.isDefinedAt(TIMEOUT)) {
- received = Some(TIMEOUT)
- senders = this :: senders
- } else
- sys.error("unhandled timeout")
- }
-
- var done = false
- while (!done) {
- val qel = mailbox.extractFirst((m: Any, replyTo: OutputChannel[Any]) => {
- senders = replyTo :: senders
- val matches = f.isDefinedAt(m)
- senders = senders.tail
- matches
- })
- if (null eq qel) {
- val todo = synchronized {
- // in mean time new stuff might have arrived
- if (!sendBuffer.isEmpty) {
- drainSendBuffer(mailbox)
- // keep going
- () => {}
- } else if (msec == 0L) {
- done = true
- receiveTimeout
- } else {
- if (onTimeout.isEmpty) {
- if (!f.isDefinedAt(TIMEOUT))
- sys.error("unhandled timeout")
-
- val thisActor = this
- onTimeout = Some(new TimerTask {
- def run() {
- thisActor.send(TIMEOUT, thisActor)
- }
- })
- Actor.timer.schedule(onTimeout.get, msec)
- }
-
- // It is possible that !onTimeout.isEmpty, but TIMEOUT is not yet in mailbox
- // See SI-4759
- waitingFor = f
- received = None
- isSuspended = true
- scheduler.managedBlock(blocker)
- drainSendBuffer(mailbox)
- // keep going
- () => {}
- }
- }
- todo()
- } else {
- synchronized {
- if (!onTimeout.isEmpty) {
- onTimeout.get.cancel()
- onTimeout = None
- }
- }
- received = Some(qel.msg)
- senders = qel.session :: senders
- done = true
- }
- }
-
- val result = f(received.get)
- received = None
- senders = senders.tail
- result
- }
-
- /** See the companion object's `react` method. */
- override def react(handler: PartialFunction[Any, Unit]): Nothing = {
- synchronized {
- if (shouldExit) exit()
- }
- super.react(handler)
- }
-
- /** See the companion object's `reactWithin` method. */
- override def reactWithin(msec: Long)(handler: PartialFunction[Any, Unit]): Nothing = {
- synchronized {
- if (shouldExit) exit()
- }
- super.reactWithin(msec)(handler)
- }
-
- /** Receives the next message from the mailbox */
- def ? : Any = receive {
- case x => x
- }
-
- // guarded by lock of this
- // never throws SuspendActorControl
- private[actors] override def scheduleActor(f: PartialFunction[Any, Any], msg: Any) =
- if (f eq null) {
- // do nothing (timeout is handled instead)
- }
- else {
- val task = new ActorTask(this, null, f, msg)
- scheduler executeFromActor task
- }
-
- /* Used for notifying scheduler when blocking inside receive/receiveWithin. */
- private object blocker extends scala.concurrent.ManagedBlocker {
- def block() = {
- Actor.this.suspendActor()
- true
- }
- def isReleasable =
- !Actor.this.isSuspended
- }
-
- private def suspendActor() = synchronized {
- while (isSuspended) {
- try {
- wait()
- } catch {
- case _: InterruptedException =>
- }
- }
- // links: check if we should exit
- if (shouldExit) exit()
- }
-
- private def resumeActor() {
- isSuspended = false
- notify()
- }
-
- private[actors] override def exiting = synchronized {
- _state == Actor.State.Terminated
- }
-
- // guarded by this
- private[actors] override def dostart() {
- // Reset various flags.
- //
- // Note that we do *not* reset `trapExit`. The reason is that
- // users should be able to set the field in the constructor
- // and before `act` is called.
- exitReason = 'normal
- shouldExit = false
-
- super.dostart()
- }
+trait Actor extends InternalActor with ReplyReactor {
override def start(): Actor = synchronized {
super.start()
this
}
- /** State of this actor */
- override def getState: Actor.State.Value = synchronized {
- if (isSuspended) {
- if (onTimeout.isEmpty)
- Actor.State.Blocked
- else
- Actor.State.TimedBlocked
- } else
- super.getState
- }
-
- // guarded by this
- private[actors] var links: List[AbstractActor] = Nil
-
- /**
- * Links `self` to actor `to`.
- *
- * @param to the actor to link to
- * @return the parameter actor
- */
- def link(to: AbstractActor): AbstractActor = {
- assert(Actor.self(scheduler) == this, "link called on actor different from self")
- this linkTo to
- to linkTo this
- to
- }
-
- /**
- * Links `self` to the actor defined by `body`.
- *
- * @param body the body of the actor to link to
- * @return the parameter actor
- */
- def link(body: => Unit): Actor = {
- assert(Actor.self(scheduler) == this, "link called on actor different from self")
- val a = new Actor {
- def act() = body
- override final val scheduler: IScheduler = Actor.this.scheduler
- }
- link(a)
- a.start()
- a
- }
-
- private[actors] def linkTo(to: AbstractActor) = synchronized {
- links = to :: links
- }
-
- /**
- * Unlinks `self` from actor `from`.
- */
- def unlink(from: AbstractActor) {
- assert(Actor.self(scheduler) == this, "unlink called on actor different from self")
- this unlinkFrom from
- from unlinkFrom this
- }
-
- private[actors] def unlinkFrom(from: AbstractActor) = synchronized {
- links = links.filterNot(from.==)
- }
-
- @volatile
- var trapExit = false
- // guarded by this
- private var exitReason: AnyRef = 'normal
- // guarded by this
- private[actors] var shouldExit = false
-
- /**
- * Terminates execution of `self` with the following effect on
- * linked actors:
- *
- * For each linked actor `a` with `trapExit` set to `'''true'''`,
- * send message `Exit(self, reason)` to `a`.
- *
- * For each linked actor `a` with `trapExit` set to `'''false'''`
- * (default), call `a.exit(reason)` if `reason != 'normal`.
- */
- protected[actors] def exit(reason: AnyRef): Nothing = {
- synchronized {
- exitReason = reason
- }
- exit()
- }
-
- /**
- * Terminates with exit reason `'normal`.
- */
- protected[actors] override def exit(): Nothing = {
- val todo = synchronized {
- if (!links.isEmpty)
- exitLinked()
- else
- () => {}
- }
- todo()
- super.exit()
- }
-
- // Assume !links.isEmpty
- // guarded by this
- private[actors] def exitLinked(): () => Unit = {
- _state = Actor.State.Terminated
- // reset waitingFor, otherwise getState returns Suspended
- waitingFor = Reactor.waitingForNone
- // remove this from links
- val mylinks = links.filterNot(this.==)
- // unlink actors
- mylinks.foreach(unlinkFrom(_))
- // return closure that locks linked actors
- () => {
- mylinks.foreach((linked: AbstractActor) => {
- linked.synchronized {
- if (!linked.exiting) {
- linked.unlinkFrom(this)
- linked.exit(this, exitReason)
- }
- }
- })
- }
- }
-
- // Assume !links.isEmpty
- // guarded by this
- private[actors] def exitLinked(reason: AnyRef): () => Unit = {
- exitReason = reason
- exitLinked()
- }
-
- // Assume !this.exiting
- private[actors] def exit(from: AbstractActor, reason: AnyRef) {
- if (trapExit) {
- this ! Exit(from, reason)
- }
- else if (reason != 'normal)
- synchronized {
- shouldExit = true
- exitReason = reason
- // resume this Actor in a way that
- // causes it to exit
- // (because shouldExit == true)
- if (isSuspended)
- resumeActor()
- else if (waitingFor ne Reactor.waitingForNone) {
- waitingFor = Reactor.waitingForNone
- // it doesn't matter what partial function we are passing here
- scheduleActor(waitingFor, null)
- /* Here we should not throw a SuspendActorControl,
- since the current method is called from an actor that
- is in the process of exiting.
-
- Therefore, the contract for scheduleActor is that
- it never throws a SuspendActorControl.
- */
- }
- }
- }
-
- /** Requires qualified private, because `RemoteActor` must
- * register a termination handler.
- */
- private[actors] def onTerminate(f: => Unit) {
- scheduler.onTerminate(this) { f }
- }
}
-
-/**
- * Used as the timeout pattern in
- * <a href="Actor.html#receiveWithin(Long)" target="contentFrame">
- * <code>receiveWithin</code></a> and
- * <a href="Actor.html#reactWithin(Long)" target="contentFrame">
- * <code>reactWithin</code></a>.
- *
- * @example {{{
- * receiveWithin(500) {
- * case (x, y) => ...
- * case TIMEOUT => ...
- * }
- * }}}
- *
- * @author Philipp Haller
- */
-case object TIMEOUT
-
-
-/** Sent to an actor with `trapExit` set to `'''true'''` whenever one of its
- * linked actors terminates.
- *
- * @param from the actor that terminated
- * @param reason the reason that caused the actor to terminate
- */
-case class Exit(from: AbstractActor, reason: AnyRef)
-
-/** Manages control flow of actor executions.
- *
- * @author Philipp Haller
- */
-private[actors] class SuspendActorControl extends ControlThrowable
diff --git a/src/actors/scala/actors/ActorCanReply.scala b/src/actors/scala/actors/ActorCanReply.scala
index b307aafa57..d92fb183c0 100644
--- a/src/actors/scala/actors/ActorCanReply.scala
+++ b/src/actors/scala/actors/ActorCanReply.scala
@@ -18,7 +18,7 @@ import scala.concurrent.SyncVar
* @author Philipp Haller
*/
private[actors] trait ActorCanReply extends ReactorCanReply {
- this: AbstractActor with ReplyReactor =>
+ this: AbstractActor with InternalReplyReactor =>
override def !?(msg: Any): Any = {
val replyCh = new Channel[Any](Actor.self(scheduler))
diff --git a/src/actors/scala/actors/ActorTask.scala b/src/actors/scala/actors/ActorTask.scala
index 090d0448f0..bb04302238 100644
--- a/src/actors/scala/actors/ActorTask.scala
+++ b/src/actors/scala/actors/ActorTask.scala
@@ -17,7 +17,7 @@ package scala.actors
* changes to the underlying var invisible.) I can't figure out what's supposed
* to happen, so I renamed the constructor parameter to at least be less confusing.
*/
-private[actors] class ActorTask(actor: Actor,
+private[actors] class ActorTask(actor: InternalActor,
fun: () => Unit,
handler: PartialFunction[Any, Any],
initialMsg: Any)
@@ -32,7 +32,7 @@ private[actors] class ActorTask(actor: Actor,
}
protected override def terminateExecution(e: Throwable) {
- val senderInfo = try { Some(actor.sender) } catch {
+ val senderInfo = try { Some(actor.internalSender) } catch {
case _: Exception => None
}
// !!! If this is supposed to be setting the current contents of the
@@ -45,13 +45,16 @@ private[actors] class ActorTask(actor: Actor,
e)
val todo = actor.synchronized {
- if (!actor.links.isEmpty)
+ val res = if (!actor.links.isEmpty)
actor.exitLinked(uncaught)
else {
super.terminateExecution(e)
() => {}
}
+ actor.internalPostStop
+ res
}
+
todo()
}
diff --git a/src/actors/scala/actors/Channel.scala b/src/actors/scala/actors/Channel.scala
index 62331239e8..36cee66b42 100644
--- a/src/actors/scala/actors/Channel.scala
+++ b/src/actors/scala/actors/Channel.scala
@@ -34,7 +34,7 @@ case class ! [a](ch: Channel[a], msg: a)
* @define actor channel
* @define channel channel
*/
-class Channel[Msg](val receiver: Actor) extends InputChannel[Msg] with OutputChannel[Msg] with CanReply[Msg, Any] {
+class Channel[Msg](val receiver: InternalActor) extends InputChannel[Msg] with OutputChannel[Msg] with CanReply[Msg, Any] {
type Future[+P] = scala.actors.Future[P]
diff --git a/src/actors/scala/actors/Combinators.scala b/src/actors/scala/actors/Combinators.scala
index 5276c7843e..c1a9095614 100644
--- a/src/actors/scala/actors/Combinators.scala
+++ b/src/actors/scala/actors/Combinators.scala
@@ -16,7 +16,7 @@ private[actors] trait Combinators {
* Enables the composition of suspendable closures using `andThen`,
* `loop`, `loopWhile`, etc.
*/
- implicit def mkBody[a](body: => a): Actor.Body[a]
+ implicit def mkBody[a](body: => a): InternalActor.Body[a]
/**
* Repeatedly executes `body`.
diff --git a/src/actors/scala/actors/InternalActor.scala b/src/actors/scala/actors/InternalActor.scala
new file mode 100644
index 0000000000..c94da5b9fd
--- /dev/null
+++ b/src/actors/scala/actors/InternalActor.scala
@@ -0,0 +1,509 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2005-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+package scala.actors
+import java.util.TimerTask
+import scala.util.control.ControlThrowable
+
+private[actors] object InternalActor {
+ private[actors] trait Body[a] {
+ def andThen[b](other: => b): Unit
+ }
+}
+
+private[actors] trait InternalActor extends AbstractActor with InternalReplyReactor with ActorCanReply with InputChannel[Any] with Serializable {
+
+ /* The following two fields are only used when the actor
+ * suspends by blocking its underlying thread, for example,
+ * when waiting in a receive or synchronous send.
+ */
+ @volatile
+ private[actors] var isSuspended = false
+
+ /* This field is used to communicate the received message from
+ * the invocation of send to the place where the thread of
+ * the receiving actor resumes inside receive/receiveWithin.
+ */
+ @volatile
+ private var received: Option[Any] = None
+
+ protected[actors] override def scheduler: IScheduler = Scheduler
+
+ private[actors] override def startSearch(msg: Any, replyTo: OutputChannel[Any], handler: PartialFunction[Any, Any]) =
+ if (isSuspended) {
+ () =>
+ synchronized {
+ mailbox.append(msg, replyTo)
+ resumeActor()
+ }
+ } else super.startSearch(msg, replyTo, handler)
+
+ // we override this method to check `shouldExit` before suspending
+ private[actors] override def searchMailbox(startMbox: MQueue[Any],
+ handler: PartialFunction[Any, Any],
+ resumeOnSameThread: Boolean) {
+ var tmpMbox = startMbox
+ var done = false
+ while (!done) {
+ val qel = tmpMbox.extractFirst((msg: Any, replyTo: OutputChannel[Any]) => {
+ senders = List(replyTo)
+ handler.isDefinedAt(msg)
+ })
+ if (tmpMbox ne mailbox)
+ tmpMbox.foreach((m, s) => mailbox.append(m, s))
+ if (null eq qel) {
+ synchronized {
+ // in mean time new stuff might have arrived
+ if (!sendBuffer.isEmpty) {
+ tmpMbox = new MQueue[Any]("Temp")
+ drainSendBuffer(tmpMbox)
+ // keep going
+ } else {
+ // very important to check for `shouldExit` at this point
+ // since linked actors might have set it after we checked
+ // last time (e.g., at the beginning of `react`)
+ if (shouldExit) exit()
+ waitingFor = handler
+ // see Reactor.searchMailbox
+ throw Actor.suspendException
+ }
+ }
+ } else {
+ resumeReceiver((qel.msg, qel.session), handler, resumeOnSameThread)
+ done = true
+ }
+ }
+ }
+
+ private[actors] override def makeReaction(fun: () => Unit, handler: PartialFunction[Any, Any], msg: Any): Runnable =
+ new ActorTask(this, fun, handler, msg)
+
+ /** See the companion object's `receive` method. */
+ def receive[R](f: PartialFunction[Any, R]): R = {
+ assert(Actor.self(scheduler) == this, "receive from channel belonging to other actor")
+
+ synchronized {
+ if (shouldExit) exit() // links
+ drainSendBuffer(mailbox)
+ }
+
+ var done = false
+ while (!done) {
+ val qel = mailbox.extractFirst((m: Any, replyTo: OutputChannel[Any]) => {
+ senders = replyTo :: senders
+ val matches = f.isDefinedAt(m)
+ senders = senders.tail
+ matches
+ })
+ if (null eq qel) {
+ synchronized {
+ // in mean time new stuff might have arrived
+ if (!sendBuffer.isEmpty) {
+ drainSendBuffer(mailbox)
+ // keep going
+ } else {
+ waitingFor = f
+ isSuspended = true
+ scheduler.managedBlock(blocker)
+ drainSendBuffer(mailbox)
+ // keep going
+ }
+ }
+ } else {
+ received = Some(qel.msg)
+ senders = qel.session :: senders
+ done = true
+ }
+ }
+
+ val result = f(received.get)
+ received = None
+ senders = senders.tail
+ result
+ }
+
+ /** See the companion object's `receiveWithin` method. */
+ def receiveWithin[R](msec: Long)(f: PartialFunction[Any, R]): R = {
+ assert(Actor.self(scheduler) == this, "receive from channel belonging to other actor")
+
+ synchronized {
+ if (shouldExit) exit() // links
+ drainSendBuffer(mailbox)
+ }
+
+ // first, remove spurious TIMEOUT message from mailbox if any
+ mailbox.extractFirst((m: Any, replyTo: OutputChannel[Any]) => m == TIMEOUT)
+
+ val receiveTimeout = () => {
+ if (f.isDefinedAt(TIMEOUT)) {
+ received = Some(TIMEOUT)
+ senders = this :: senders
+ } else
+ sys.error("unhandled timeout")
+ }
+
+ var done = false
+ while (!done) {
+ val qel = mailbox.extractFirst((m: Any, replyTo: OutputChannel[Any]) => {
+ senders = replyTo :: senders
+ val matches = f.isDefinedAt(m)
+ senders = senders.tail
+ matches
+ })
+ if (null eq qel) {
+ val todo = synchronized {
+ // in mean time new stuff might have arrived
+ if (!sendBuffer.isEmpty) {
+ drainSendBuffer(mailbox)
+ // keep going
+ () => {}
+ } else if (msec == 0L) {
+ done = true
+ receiveTimeout
+ } else {
+ if (onTimeout.isEmpty) {
+ if (!f.isDefinedAt(TIMEOUT))
+ sys.error("unhandled timeout")
+
+ val thisActor = this
+ onTimeout = Some(new TimerTask {
+ def run() {
+ thisActor.send(TIMEOUT, thisActor)
+ }
+ })
+ Actor.timer.schedule(onTimeout.get, msec)
+ }
+
+ // It is possible that !onTimeout.isEmpty, but TIMEOUT is not yet in mailbox
+ // See SI-4759
+ waitingFor = f
+ received = None
+ isSuspended = true
+ scheduler.managedBlock(blocker)
+ drainSendBuffer(mailbox)
+ // keep going
+ () => {}
+ }
+ }
+ todo()
+ } else {
+ synchronized {
+ if (!onTimeout.isEmpty) {
+ onTimeout.get.cancel()
+ onTimeout = None
+ }
+ }
+ received = Some(qel.msg)
+ senders = qel.session :: senders
+ done = true
+ }
+ }
+
+ val result = f(received.get)
+ received = None
+ senders = senders.tail
+ result
+ }
+
+ /** See the companion object's `react` method. */
+ override def react(handler: PartialFunction[Any, Unit]): Nothing = {
+ synchronized {
+ if (shouldExit) exit()
+ }
+ super.react(handler)
+ }
+
+ /** See the companion object's `reactWithin` method. */
+ override def reactWithin(msec: Long)(handler: PartialFunction[Any, Unit]): Nothing = {
+ synchronized {
+ if (shouldExit) exit()
+ }
+ super.reactWithin(msec)(handler)
+ }
+
+ /** Receives the next message from the mailbox */
+ def ? : Any = receive {
+ case x => x
+ }
+
+ // guarded by lock of this
+ // never throws SuspendActorControl
+ private[actors] override def scheduleActor(f: PartialFunction[Any, Any], msg: Any) =
+ if (f eq null) {
+ // do nothing (timeout is handled instead)
+ } else {
+ val task = new ActorTask(this, null, f, msg)
+ scheduler executeFromActor task
+ }
+
+ /* Used for notifying scheduler when blocking inside receive/receiveWithin. */
+ private object blocker extends scala.concurrent.ManagedBlocker {
+ def block() = {
+ InternalActor.this.suspendActor()
+ true
+ }
+ def isReleasable =
+ !InternalActor.this.isSuspended
+ }
+
+ private def suspendActor() = synchronized {
+ while (isSuspended) {
+ try {
+ wait()
+ } catch {
+ case _: InterruptedException =>
+ }
+ }
+ // links: check if we should exit
+ if (shouldExit) exit()
+ }
+
+ private def resumeActor() {
+ isSuspended = false
+ notify()
+ }
+
+ private[actors] override def exiting = synchronized {
+ _state == Actor.State.Terminated
+ }
+
+ // guarded by this
+ private[actors] override def dostart() {
+ // Reset various flags.
+ //
+ // Note that we do *not* reset `trapExit`. The reason is that
+ // users should be able to set the field in the constructor
+ // and before `act` is called.
+ exitReason = 'normal
+ shouldExit = false
+
+ super.dostart()
+ }
+
+ override def start(): InternalActor = synchronized {
+ super.start()
+ this
+ }
+
+ /** State of this actor */
+ override def getState: Actor.State.Value = synchronized {
+ if (isSuspended) {
+ if (onTimeout.isEmpty)
+ Actor.State.Blocked
+ else
+ Actor.State.TimedBlocked
+ } else
+ super.getState
+ }
+
+ // guarded by this
+ private[actors] var links: List[AbstractActor] = Nil
+
+ /**
+ * Links <code>self</code> to actor <code>to</code>.
+ *
+ * @param to the actor to link to
+ * @return the parameter actor
+ */
+ def link(to: AbstractActor): AbstractActor = {
+ assert(Actor.self(scheduler) == this, "link called on actor different from self")
+ this linkTo to
+ to linkTo this
+ to
+ }
+
+ /**
+ * Links <code>self</code> to the actor defined by <code>body</code>.
+ *
+ * @param body the body of the actor to link to
+ * @return the parameter actor
+ */
+ def link(body: => Unit): Actor = {
+ assert(Actor.self(scheduler) == this, "link called on actor different from self")
+ val a = new Actor {
+ def act() = body
+ override final val scheduler: IScheduler = InternalActor.this.scheduler
+ }
+ link(a)
+ a.start()
+ a
+ }
+
+ private[actors] def linkTo(to: AbstractActor) = synchronized {
+ links = to :: links
+ }
+
+ /**
+ * Unlinks <code>self</code> from actor <code>from</code>.
+ */
+ def unlink(from: AbstractActor) {
+ assert(Actor.self(scheduler) == this, "unlink called on actor different from self")
+ this unlinkFrom from
+ from unlinkFrom this
+ }
+
+ private[actors] def unlinkFrom(from: AbstractActor) = synchronized {
+ links = links.filterNot(from.==)
+ }
+
+ @volatile
+ private[actors] var _trapExit = false
+
+ def trapExit = _trapExit
+
+ def trapExit_=(value: Boolean) = _trapExit = value
+
+ // guarded by this
+ private var exitReason: AnyRef = 'normal
+ // guarded by this
+ private[actors] var shouldExit = false
+
+ /**
+ * <p>
+ * Terminates execution of <code>self</code> with the following
+ * effect on linked actors:
+ * </p>
+ * <p>
+ * For each linked actor <code>a</code> with
+ * <code>trapExit</code> set to <code>true</code>, send message
+ * <code>Exit(self, reason)</code> to <code>a</code>.
+ * </p>
+ * <p>
+ * For each linked actor <code>a</code> with
+ * <code>trapExit</code> set to <code>false</code> (default),
+ * call <code>a.exit(reason)</code> if
+ * <code>reason != 'normal</code>.
+ * </p>
+ */
+ protected[actors] def exit(reason: AnyRef): Nothing = {
+ synchronized {
+ exitReason = reason
+ }
+ exit()
+ }
+
+ /**
+ * Terminates with exit reason <code>'normal</code>.
+ */
+ protected[actors] override def exit(): Nothing = {
+ val todo = synchronized {
+ if (!links.isEmpty)
+ exitLinked()
+ else
+ () => {}
+ }
+ todo()
+ super.exit()
+ }
+
+ // Assume !links.isEmpty
+ // guarded by this
+ private[actors] def exitLinked(): () => Unit = {
+ _state = Actor.State.Terminated
+ // reset waitingFor, otherwise getState returns Suspended
+ waitingFor = Reactor.waitingForNone
+ // remove this from links
+ val mylinks = links.filterNot(this.==)
+ // unlink actors
+ mylinks.foreach(unlinkFrom(_))
+ // return closure that locks linked actors
+ () => {
+ mylinks.foreach((linked: AbstractActor) => {
+ linked.synchronized {
+ if (!linked.exiting) {
+ linked.unlinkFrom(this)
+ linked.exit(this, exitReason)
+ }
+ }
+ })
+ }
+ }
+
+ // Assume !links.isEmpty
+ // guarded by this
+ private[actors] def exitLinked(reason: AnyRef): () => Unit = {
+ exitReason = reason
+ exitLinked()
+ }
+
+ // Assume !this.exiting
+ private[actors] def exit(from: AbstractActor, reason: AnyRef) {
+ if (trapExit) {
+ this ! Exit(from, reason)
+ } else if (reason != 'normal)
+ stop(reason)
+ }
+
+ /* Requires qualified private, because <code>RemoteActor</code> must
+ * register a termination handler.
+ */
+ private[actors] def onTerminate(f: => Unit) {
+ scheduler.onTerminate(this) { f }
+ }
+
+ private[actors] def internalPostStop() = {}
+
+ private[actors] def stop(reason: AnyRef): Unit = {
+ synchronized {
+ shouldExit = true
+ exitReason = reason
+ // resume this Actor in a way that
+ // causes it to exit
+ // (because shouldExit == true)
+ if (isSuspended)
+ resumeActor()
+ else if (waitingFor ne Reactor.waitingForNone) {
+ waitingFor = Reactor.waitingForNone
+ // it doesn't matter what partial function we are passing here
+ val task = new ActorTask(this, null, waitingFor, null)
+ scheduler execute task
+ /* Here we should not throw a SuspendActorControl,
+ since the current method is called from an actor that
+ is in the process of exiting.
+
+ Therefore, the contract for scheduleActor is that
+ it never throws a SuspendActorControl.
+ */
+ }
+ }
+ }
+}
+
+/**
+ * Used as the timeout pattern in
+ * <a href="Actor.html#receiveWithin(Long)" target="contentFrame">
+ * <code>receiveWithin</code></a> and
+ * <a href="Actor.html#reactWithin(Long)" target="contentFrame">
+ * <code>reactWithin</code></a>.
+ *
+ * @example {{{
+ * receiveWithin(500) {
+ * case (x, y) => ...
+ * case TIMEOUT => ...
+ * }
+ * }}}
+ *
+ * @author Philipp Haller
+ */
+case object TIMEOUT
+
+/**
+ * Sent to an actor
+ * with `trapExit` set to `true` whenever one of its linked actors
+ * terminates.
+ *
+ * @param from the actor that terminated
+ * @param reason the reason that caused the actor to terminate
+ */
+case class Exit(from: AbstractActor, reason: AnyRef)
+
+/**
+ * Manages control flow of actor executions.
+ *
+ * @author Philipp Haller
+ */
+private[actors] class SuspendActorControl extends ControlThrowable
diff --git a/src/actors/scala/actors/InternalReplyReactor.scala b/src/actors/scala/actors/InternalReplyReactor.scala
new file mode 100644
index 0000000000..38295138d4
--- /dev/null
+++ b/src/actors/scala/actors/InternalReplyReactor.scala
@@ -0,0 +1,161 @@
+package scala.actors
+
+import java.util.{TimerTask}
+
+/**
+ * Extends the [[scala.actors.Reactor]]
+ * trait with methods to reply to the sender of a message.
+ * Sending a message to a <code>ReplyReactor</code> implicitly
+ * passes a reference to the sender together with the message.
+ *
+ * @author Philipp Haller
+ *
+ * @define actor `ReplyReactor`
+ */
+trait InternalReplyReactor extends Reactor[Any] with ReactorCanReply {
+
+ /* A list of the current senders. The head of the list is
+ * the sender of the message that was received last.
+ */
+ @volatile
+ private[actors] var senders: List[OutputChannel[Any]] = List()
+
+ /* This option holds a TimerTask when the actor waits in a
+ * reactWithin. The TimerTask is cancelled when the actor
+ * resumes.
+ *
+ * guarded by this
+ */
+ private[actors] var onTimeout: Option[TimerTask] = None
+
+ /**
+ * Returns the $actor which sent the last received message.
+ */
+ protected[actors] def internalSender: OutputChannel[Any] = senders.head
+
+ /**
+ * Replies with <code>msg</code> to the sender.
+ */
+ protected[actors] def reply(msg: Any) {
+ internalSender ! msg
+ }
+
+ override def !(msg: Any) {
+ send(msg, Actor.rawSelf(scheduler))
+ }
+
+ override def forward(msg: Any) {
+ send(msg, Actor.sender)
+ }
+
+ private[actors] override def resumeReceiver(item: (Any, OutputChannel[Any]), handler: PartialFunction[Any, Any], onSameThread: Boolean) {
+ synchronized {
+ if (!onTimeout.isEmpty) {
+ onTimeout.get.cancel()
+ onTimeout = None
+ }
+ }
+ senders = List(item._2)
+ super.resumeReceiver(item, handler, onSameThread)
+ }
+
+ private[actors] override def searchMailbox(startMbox: MQueue[Any],
+ handler: PartialFunction[Any, Any],
+ resumeOnSameThread: Boolean) {
+ var tmpMbox = startMbox
+ var done = false
+ while (!done) {
+ val qel = tmpMbox.extractFirst((msg: Any, replyTo: OutputChannel[Any]) => {
+ senders = List(replyTo)
+ handler.isDefinedAt(msg)
+ })
+ if (tmpMbox ne mailbox)
+ tmpMbox.foreach((m, s) => mailbox.append(m, s))
+ if (null eq qel) {
+ synchronized {
+ // in mean time new stuff might have arrived
+ if (!sendBuffer.isEmpty) {
+ tmpMbox = new MQueue[Any]("Temp")
+ drainSendBuffer(tmpMbox)
+ // keep going
+ } else {
+ waitingFor = handler
+ // see Reactor.searchMailbox
+ throw Actor.suspendException
+ }
+ }
+ } else {
+ resumeReceiver((qel.msg, qel.session), handler, resumeOnSameThread)
+ done = true
+ }
+ }
+ }
+
+ private[actors] override def makeReaction(fun: () => Unit, handler: PartialFunction[Any, Any], msg: Any): Runnable =
+ new ReplyReactorTask(this, fun, handler, msg)
+
+ protected[actors] override def react(handler: PartialFunction[Any, Unit]): Nothing = {
+ assert(Actor.rawSelf(scheduler) == this, "react on channel belonging to other actor")
+ super.react(handler)
+ }
+
+
+ /**
+ * Receives a message from this $actor's mailbox within a certain
+ * time span.
+ *
+ * This method never returns. Therefore, the rest of the computation
+ * has to be contained in the actions of the partial function.
+ *
+ * @param msec the time span before timeout
+ * @param handler a partial function with message patterns and actions
+ */
+ protected[actors] def reactWithin(msec: Long)(handler: PartialFunction[Any, Unit]): Nothing = {
+ assert(Actor.rawSelf(scheduler) == this, "react on channel belonging to other actor")
+
+ synchronized { drainSendBuffer(mailbox) }
+
+ // first, remove spurious TIMEOUT message from mailbox if any
+ mailbox.extractFirst((m: Any, replyTo: OutputChannel[Any]) => m == TIMEOUT)
+
+ while (true) {
+ val qel = mailbox.extractFirst((m: Any, replyTo: OutputChannel[Any]) => {
+ senders = List(replyTo)
+ handler isDefinedAt m
+ })
+ if (null eq qel) {
+ synchronized {
+ // in mean time new messages might have arrived
+ if (!sendBuffer.isEmpty) {
+ drainSendBuffer(mailbox)
+ // keep going
+ } else if (msec == 0L) {
+ // throws Actor.suspendException
+ resumeReceiver((TIMEOUT, this), handler, false)
+ } else {
+ waitingFor = handler
+ val thisActor = this
+ onTimeout = Some(new TimerTask {
+ def run() { thisActor.send(TIMEOUT, thisActor) }
+ })
+ Actor.timer.schedule(onTimeout.get, msec)
+ throw Actor.suspendException
+ }
+ }
+ } else
+ resumeReceiver((qel.msg, qel.session), handler, false)
+ }
+ throw Actor.suspendException
+ }
+
+ override def getState: Actor.State.Value = synchronized {
+ if (waitingFor ne Reactor.waitingForNone) {
+ if (onTimeout.isEmpty)
+ Actor.State.Suspended
+ else
+ Actor.State.TimedSuspended
+ } else
+ _state
+ }
+
+}
diff --git a/src/actors/scala/actors/OutputChannel.scala b/src/actors/scala/actors/OutputChannel.scala
index 089b3d0981..1fba684975 100644
--- a/src/actors/scala/actors/OutputChannel.scala
+++ b/src/actors/scala/actors/OutputChannel.scala
@@ -43,5 +43,5 @@ trait OutputChannel[-Msg] {
/**
* Returns the `Actor` that is receiving from this $actor.
*/
- def receiver: Actor
+ def receiver: InternalActor
}
diff --git a/src/actors/scala/actors/ReactChannel.scala b/src/actors/scala/actors/ReactChannel.scala
index fccde34272..81a166c1a4 100644
--- a/src/actors/scala/actors/ReactChannel.scala
+++ b/src/actors/scala/actors/ReactChannel.scala
@@ -12,7 +12,7 @@ package scala.actors
/**
* @author Philipp Haller
*/
-private[actors] class ReactChannel[Msg](receiver: ReplyReactor) extends InputChannel[Msg] {
+private[actors] class ReactChannel[Msg](receiver: InternalReplyReactor) extends InputChannel[Msg] {
private case class SendToReactor(channel: ReactChannel[Msg], msg: Msg)
diff --git a/src/actors/scala/actors/Reactor.scala b/src/actors/scala/actors/Reactor.scala
index 7d21e9f91e..8fc7578344 100644
--- a/src/actors/scala/actors/Reactor.scala
+++ b/src/actors/scala/actors/Reactor.scala
@@ -253,7 +253,7 @@ trait Reactor[Msg >: Null] extends OutputChannel[Msg] with Combinators {
_state
}
- implicit def mkBody[A](body: => A) = new Actor.Body[A] {
+ implicit def mkBody[A](body: => A) = new InternalActor.Body[A] {
def andThen[B](other: => B): Unit = Reactor.this.seq(body, other)
}
diff --git a/src/actors/scala/actors/ReactorCanReply.scala b/src/actors/scala/actors/ReactorCanReply.scala
index 68f9999776..dabd0832f0 100644
--- a/src/actors/scala/actors/ReactorCanReply.scala
+++ b/src/actors/scala/actors/ReactorCanReply.scala
@@ -16,7 +16,7 @@ package scala.actors
* @author Philipp Haller
*/
private[actors] trait ReactorCanReply extends CanReply[Any, Any] {
- _: ReplyReactor =>
+ _: InternalReplyReactor =>
type Future[+P] = scala.actors.Future[P]
diff --git a/src/actors/scala/actors/ReplyReactor.scala b/src/actors/scala/actors/ReplyReactor.scala
index 0e5ce00c91..0ffbbd3cce 100644
--- a/src/actors/scala/actors/ReplyReactor.scala
+++ b/src/actors/scala/actors/ReplyReactor.scala
@@ -5,165 +5,12 @@
** /____/\___/_/ |_/____/_/ | | **
** |/ **
\* */
-
package scala.actors
-import java.util.{Timer, TimerTask}
-
-/**
- * Extends the [[scala.actors.Reactor]] trait with methods to reply to the
- * sender of a message.
- *
- * Sending a message to a `ReplyReactor` implicitly passes a reference to
- * the sender together with the message.
- *
- * @author Philipp Haller
- *
- * @define actor `ReplyReactor`
- */
-trait ReplyReactor extends Reactor[Any] with ReactorCanReply {
-
- /* A list of the current senders. The head of the list is
- * the sender of the message that was received last.
- */
- @volatile
- private[actors] var senders: List[OutputChannel[Any]] = List()
-
- /* This option holds a TimerTask when the actor waits in a
- * reactWithin. The TimerTask is cancelled when the actor
- * resumes.
- *
- * guarded by this
- */
- private[actors] var onTimeout: Option[TimerTask] = None
-
- /**
- * Returns the $actor which sent the last received message.
- */
- protected[actors] def sender: OutputChannel[Any] = senders.head
-
- /**
- * Replies with `msg` to the sender.
- */
- protected[actors] def reply(msg: Any) {
- sender ! msg
- }
-
- override def !(msg: Any) {
- send(msg, Actor.rawSelf(scheduler))
- }
-
- override def forward(msg: Any) {
- send(msg, Actor.sender)
- }
-
- private[actors] override def resumeReceiver(item: (Any, OutputChannel[Any]), handler: PartialFunction[Any, Any], onSameThread: Boolean) {
- synchronized {
- if (!onTimeout.isEmpty) {
- onTimeout.get.cancel()
- onTimeout = None
- }
- }
- senders = List(item._2)
- super.resumeReceiver(item, handler, onSameThread)
- }
-
- private[actors] override def searchMailbox(startMbox: MQueue[Any],
- handler: PartialFunction[Any, Any],
- resumeOnSameThread: Boolean) {
- var tmpMbox = startMbox
- var done = false
- while (!done) {
- val qel = tmpMbox.extractFirst((msg: Any, replyTo: OutputChannel[Any]) => {
- senders = List(replyTo)
- handler.isDefinedAt(msg)
- })
- if (tmpMbox ne mailbox)
- tmpMbox.foreach((m, s) => mailbox.append(m, s))
- if (null eq qel) {
- synchronized {
- // in mean time new stuff might have arrived
- if (!sendBuffer.isEmpty) {
- tmpMbox = new MQueue[Any]("Temp")
- drainSendBuffer(tmpMbox)
- // keep going
- } else {
- waitingFor = handler
- // see Reactor.searchMailbox
- throw Actor.suspendException
- }
- }
- } else {
- resumeReceiver((qel.msg, qel.session), handler, resumeOnSameThread)
- done = true
- }
- }
- }
-
- private[actors] override def makeReaction(fun: () => Unit, handler: PartialFunction[Any, Any], msg: Any): Runnable =
- new ReplyReactorTask(this, fun, handler, msg)
-
- protected[actors] override def react(handler: PartialFunction[Any, Unit]): Nothing = {
- assert(Actor.rawSelf(scheduler) == this, "react on channel belonging to other actor")
- super.react(handler)
- }
-
- /**
- * Receives a message from this $actor's mailbox within a certain
- * time span.
- *
- * This method never returns. Therefore, the rest of the computation
- * has to be contained in the actions of the partial function.
- *
- * @param msec the time span before timeout
- * @param handler a partial function with message patterns and actions
- */
- protected[actors] def reactWithin(msec: Long)(handler: PartialFunction[Any, Unit]): Nothing = {
- assert(Actor.rawSelf(scheduler) == this, "react on channel belonging to other actor")
-
- synchronized { drainSendBuffer(mailbox) }
-
- // first, remove spurious TIMEOUT message from mailbox if any
- mailbox.extractFirst((m: Any, replyTo: OutputChannel[Any]) => m == TIMEOUT)
-
- while (true) {
- val qel = mailbox.extractFirst((m: Any, replyTo: OutputChannel[Any]) => {
- senders = List(replyTo)
- handler isDefinedAt m
- })
- if (null eq qel) {
- synchronized {
- // in mean time new messages might have arrived
- if (!sendBuffer.isEmpty) {
- drainSendBuffer(mailbox)
- // keep going
- } else if (msec == 0L) {
- // throws Actor.suspendException
- resumeReceiver((TIMEOUT, this), handler, false)
- } else {
- waitingFor = handler
- val thisActor = this
- onTimeout = Some(new TimerTask {
- def run() { thisActor.send(TIMEOUT, thisActor) }
- })
- Actor.timer.schedule(onTimeout.get, msec)
- throw Actor.suspendException
- }
- }
- } else
- resumeReceiver((qel.msg, qel.session), handler, false)
- }
- throw Actor.suspendException
- }
-
- override def getState: Actor.State.Value = synchronized {
- if (waitingFor ne Reactor.waitingForNone) {
- if (onTimeout.isEmpty)
- Actor.State.Suspended
- else
- Actor.State.TimedSuspended
- } else
- _state
- }
-
+@deprecated("Scala Actors are beeing removed from the standard library. Please refer to the migration guide.", "2.10")
+trait ReplyReactor extends InternalReplyReactor {
+
+ protected[actors] def sender: OutputChannel[Any] = super.internalSender
+
}
+
diff --git a/src/actors/scala/actors/ReplyReactorTask.scala b/src/actors/scala/actors/ReplyReactorTask.scala
index cb63d7e000..d38eb50381 100644
--- a/src/actors/scala/actors/ReplyReactorTask.scala
+++ b/src/actors/scala/actors/ReplyReactorTask.scala
@@ -17,13 +17,13 @@ package scala.actors
* changes to the underlying var invisible.) I can't figure out what's supposed
* to happen, so I renamed the constructor parameter to at least be less confusing.
*/
-private[actors] class ReplyReactorTask(replyReactor: ReplyReactor,
+private[actors] class ReplyReactorTask(replyReactor: InternalReplyReactor,
fun: () => Unit,
handler: PartialFunction[Any, Any],
msg: Any)
extends ReactorTask(replyReactor, fun, handler, msg) {
- var saved: ReplyReactor = _
+ var saved: InternalReplyReactor = _
protected override def beginExecution() {
saved = Actor.tl.get
diff --git a/src/actors/scala/actors/UncaughtException.scala b/src/actors/scala/actors/UncaughtException.scala
index 3e6efe3b7c..a3e7f795f1 100644
--- a/src/actors/scala/actors/UncaughtException.scala
+++ b/src/actors/scala/actors/UncaughtException.scala
@@ -20,7 +20,7 @@ package scala.actors
* @author Philipp Haller
* @author Erik Engbrecht
*/
-case class UncaughtException(actor: Actor,
+case class UncaughtException(actor: InternalActor,
message: Option[Any],
sender: Option[OutputChannel[Any]],
thread: Thread,
diff --git a/src/build/maven/maven-deploy.xml b/src/build/maven/maven-deploy.xml
index e0f31a5db2..7f8343a84e 100644
--- a/src/build/maven/maven-deploy.xml
+++ b/src/build/maven/maven-deploy.xml
@@ -111,11 +111,12 @@
<deploy-local name="scala-library" version="@{version}" repository="@{repository}" />
<deploy-local name="scala-compiler" version="@{version}" repository="@{repository}" />
<deploy-local-plugin name="continuations" version="@{version}" repository="@{repository}"/>
+ <deploy-local name="scala-actors" version="@{version}" repository="@{repository}" />
<deploy-local name="scala-dbc" version="@{version}" repository="@{repository}" />
<deploy-local name="scala-swing" version="@{version}" repository="@{repository}"/>
- <deploy-local name="scalap" version="@{version}" repository="@{repository}"/>
- <deploy-local name="scala-partest" version="@{version}" repository="@{repository}"/>
- <deploy-local name="jline" version="@{version}" repository="@{repository}"/>
+ <deploy-local name="scalap" version="@{version}" repository="@{repository}"/>
+ <deploy-local name="scala-partest" version="@{version}" repository="@{repository}"/>
+ <deploy-local name="jline" version="@{version}" repository="@{repository}"/>
</sequential>
</macrodef>
</target>
@@ -168,12 +169,13 @@
<artifact:attach type="jar" file="scala-library/scala-library-docs.jar" classifier="javadoc" />
</extra-attachments>
</deploy-remote>
- <deploy-remote name="jline" version="@{version}" repository="@{repository}"/>
+ <deploy-remote name="jline" version="@{version}" repository="@{repository}"/>
<deploy-remote name="scala-compiler" version="@{version}" repository="@{repository}" />
<deploy-remote name="scala-dbc" version="@{version}" repository="@{repository}" />
<deploy-remote name="scala-swing" version="@{version}" repository="@{repository}"/>
- <deploy-remote name="scalap" version="@{version}" repository="@{repository}"/>
- <deploy-remote name="scala-partest" version="@{version}" repository="@{repository}"/>
+ <deploy-remote name="scala-actors" version="@{version}" repository="@{repository}"/>
+ <deploy-remote name="scalap" version="@{version}" repository="@{repository}"/>
+ <deploy-remote name="scala-partest" version="@{version}" repository="@{repository}"/>
<deploy-remote-plugin name="continuations" version="@{version}" repository="@{repository}"/>
</sequential>
</macrodef>
@@ -234,13 +236,14 @@
<attribute name="version" />
<sequential>
<deploy-remote-plugin-signed name="continuations" version="@{version}" repository="@{repository}"/>
- <deploy-remote-signed name="scala-library" version="@{version}" repository="@{repository}"/>
- <deploy-remote-signed name="jline" version="@{version}" repository="@{repository}"/>
+ <deploy-remote-signed name="scala-library" version="@{version}" repository="@{repository}"/>
+ <deploy-remote-signed name="jline" version="@{version}" repository="@{repository}"/>
<deploy-remote-signed name="scala-compiler" version="@{version}" repository="@{repository}" />
<deploy-remote-signed name="scala-dbc" version="@{version}" repository="@{repository}" />
<deploy-remote-signed name="scala-swing" version="@{version}" repository="@{repository}"/>
- <deploy-remote-signed name="scalap" version="@{version}" repository="@{repository}"/>
- <deploy-remote-signed name="scala-partest" version="@{version}" repository="@{repository}"/>
+ <deploy-remote-signed name="scala-actors" version="@{version}" repository="@{repository}"/>
+ <deploy-remote-signed name="scalap" version="@{version}" repository="@{repository}"/>
+ <deploy-remote-signed name="scala-partest" version="@{version}" repository="@{repository}"/>
</sequential>
</macrodef>
</target>
diff --git a/src/build/maven/scala-actors-pom.xml b/src/build/maven/scala-actors-pom.xml
new file mode 100644
index 0000000000..12bae2a23d
--- /dev/null
+++ b/src/build/maven/scala-actors-pom.xml
@@ -0,0 +1,62 @@
+<project
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>org.scala-lang</groupId>
+ <artifactId>scala-actors</artifactId>
+ <packaging>jar</packaging>
+ <version>@VERSION@</version>
+ <name>Scala Actors library</name>
+ <description>Deprecated Actors Library for Scala</description>
+ <url>http://www.scala-lang.org/</url>
+ <inceptionYear>2006</inceptionYear>
+ <organization>
+ <name>LAMP/EPFL</name>
+ <url>http://lamp.epfl.ch/</url>
+ </organization>
+ <licenses>
+ <license>
+ <name>BSD-like</name>
+ <url>http://www.scala-lang.org/downloads/license.html
+ </url>
+ <distribution>repo</distribution>
+ </license>
+ </licenses>
+ <scm>
+ <connection>scm:svn:http://lampsvn.epfl.ch/svn-repos/scala/scala/trunk</connection>
+ <url>https://lampsvn.epfl.ch/trac/scala/browser/scala/trunk</url>
+ </scm>
+ <issueManagement>
+ <system>trac</system>
+ <url>http://lampsvn.epfl.ch/trac/scala
+ </url>
+ </issueManagement>
+ <dependencies>
+ <dependency>
+ <groupId>org.scala-lang</groupId>
+ <artifactId>scala-library</artifactId>
+ <version>@VERSION@</version>
+ </dependency>
+ </dependencies>
+ <distributionManagement>
+ <repository>
+ <id>scala-tools.org</id>
+ <url>@RELEASE_REPOSITORY@</url>
+ </repository>
+ <snapshotRepository>
+ <id>scala-tools.org</id>
+ <url>@SNAPSHOT_REPOSITORY@</url>
+ <uniqueVersion>false</uniqueVersion>
+ </snapshotRepository>
+ </distributionManagement>
+ <developers>
+ <developer>
+ <id>lamp</id>
+ <name>EPFL LAMP</name>
+ </developer>
+ <developer>
+ <id>Typesafe</id>
+ <name>Typesafe, Inc.</name>
+ </developer>
+ </developers>
+</project>
diff --git a/src/build/pack.xml b/src/build/pack.xml
index e79895e3a8..c12cfa44bf 100644
--- a/src/build/pack.xml
+++ b/src/build/pack.xml
@@ -114,8 +114,8 @@ MAIN DISTRIBUTION SBAZ
version="${version.number}"
desc="The Scala library. This is the minimal requirement to run any Scala program."
link="${sbaz.universe}/scala-library-${version.number}.sbp">
- <libset dir="${dist.dir}/lib" includes="scala-library.jar,scala-dbc.jar,scala-swing.jar"/>
- <srcset dir="${dist.dir}/src" includes="scala-library-src.jar,scala-dbc-src.jar,scala-swing-src.jar"/>
+ <libset dir="${dist.dir}/lib" includes="scala-library.jar,scala-dbc.jar,scala-swing.jar,scala-actors.jar"/>
+ <srcset dir="${dist.dir}/src" includes="scala-library-src.jar,scala-dbc-src.jar,scala-swing-src.jar,scala-actors-src.jar"/>
<looseset destination="doc">
<fileset dir="${dist.dir}/doc" includes="LICENSE,README"/>
</looseset>
@@ -228,6 +228,7 @@ MAIN DISTRIBUTION SBAZ
<mvn-copy-lib mvn.artifact.name="scala-compiler"/>
<mvn-copy-lib mvn.artifact.name="scala-dbc"/>
<mvn-copy-lib mvn.artifact.name="scala-swing"/>
+ <mvn-copy-lib mvn.artifact.name="scala-actors"/>
<mvn-copy-lib mvn.artifact.name="scala-partest"/>
<mvn-copy-lib mvn.artifact.name="scalap"/>
</target>
@@ -290,11 +291,13 @@ MAIN DISTRIBUTION SBAZ
basedir="${build-docs.dir}/continuations-plugin">
<include name="**/*"/>
</jar>
- <!-- TODO - Scala swing, dbc should maybe have thier own jar, but creating it is SLOW. -->
+ <!-- TODO - Scala swing, dbc and actors should maybe have thier own jar, but creating it is SLOW. -->
<copy tofile="${dists.dir}/maven/${version.number}/scala-swing/scala-swing-docs.jar"
file="${dists.dir}/maven/${version.number}/scala-library/scala-library-docs.jar"/>
<copy tofile="${dists.dir}/maven/${version.number}/scala-dbc/scala-dbc-docs.jar"
file="${dists.dir}/maven/${version.number}/scala-library/scala-library-docs.jar"/>
+ <copy tofile="${dists.dir}/maven/${version.number}/scala-actors/scala-actors-docs.jar"
+ file="${dists.dir}/maven/${version.number}/scala-library/scala-library-docs.jar"/>
</target>
<target name="pack-maven.latest.unix" depends="pack-maven.docs" unless="os.win">
diff --git a/src/compiler/scala/reflect/internal/Definitions.scala b/src/compiler/scala/reflect/internal/Definitions.scala
index bd823c3128..a2dd6fc4c3 100644
--- a/src/compiler/scala/reflect/internal/Definitions.scala
+++ b/src/compiler/scala/reflect/internal/Definitions.scala
@@ -764,9 +764,25 @@ trait Definitions extends reflect.api.StandardDefinitions {
else
removeRedundantObjects(parents)
}
+
+ def typeStringNoPackage(tp: Type) =
+ "" + tp stripPrefix tp.typeSymbol.enclosingPackage.fullName + "."
+
+ def briefParentsString(parents: List[Type]) =
+ normalizedParents(parents) map typeStringNoPackage mkString " with "
+
def parentsString(parents: List[Type]) =
normalizedParents(parents) mkString " with "
+ def typeParamsString(tp: Type) = tp match {
+ case PolyType(tparams, _) => tparams map (_.defString) mkString ("[", ",", "]")
+ case _ => ""
+ }
+ def valueParamsString(tp: Type) = tp match {
+ case MethodType(params, _) => params map (_.defString) mkString ("(", ",", ")")
+ case _ => ""
+ }
+
// members of class java.lang.{ Object, String }
lazy val Object_## = enterNewMethod(ObjectClass, nme.HASHHASH, Nil, inttype, FINAL)
lazy val Object_== = enterNewMethod(ObjectClass, nme.EQ, anyrefparam, booltype, FINAL)
@@ -970,7 +986,7 @@ trait Definitions extends reflect.api.StandardDefinitions {
case (_, restpe) => NullaryMethodType(restpe)
}
- msym setInfoAndEnter polyType(tparams, mtpe)
+ msym setInfoAndEnter genPolyType(tparams, mtpe)
}
/** T1 means one type parameter.
diff --git a/src/compiler/scala/reflect/internal/Flags.scala b/src/compiler/scala/reflect/internal/Flags.scala
index 8aae80eed4..3110d73461 100644
--- a/src/compiler/scala/reflect/internal/Flags.scala
+++ b/src/compiler/scala/reflect/internal/Flags.scala
@@ -107,7 +107,7 @@ class ModifierFlags {
// pre: PRIVATE or PROTECTED are also set
final val JAVA = 0x00100000 // symbol was defined by a Java class
final val STATIC = 0x00800000 // static field, method or class
- final val CASEACCESSOR = 0x01000000 // symbol is a case parameter (or its accessor)
+ final val CASEACCESSOR = 0x01000000 // symbol is a case parameter (or its accessor, or a GADT skolem)
final val TRAIT = 0x02000000 // symbol is a trait
final val DEFAULTPARAM = 0x02000000 // the parameter has a default value
final val PARAMACCESSOR = 0x20000000 // for field definitions generated for primary constructor
diff --git a/src/compiler/scala/reflect/internal/Importers.scala b/src/compiler/scala/reflect/internal/Importers.scala
index 1003fa804f..04381937d1 100644
--- a/src/compiler/scala/reflect/internal/Importers.scala
+++ b/src/compiler/scala/reflect/internal/Importers.scala
@@ -351,6 +351,8 @@ trait Importers { self: SymbolTable =>
new ApplyToImplicitArgs(importTree(fun), args map importTree)
case _: from.ApplyImplicitView =>
new ApplyImplicitView(importTree(fun), args map importTree)
+ case _: from.ApplyConstructor =>
+ new ApplyConstructor(importTree(fun), args map importTree)
case _ =>
new Apply(importTree(fun), args map importTree)
}
diff --git a/src/compiler/scala/reflect/internal/Kinds.scala b/src/compiler/scala/reflect/internal/Kinds.scala
index 23bff950b8..eca63c7c15 100644
--- a/src/compiler/scala/reflect/internal/Kinds.scala
+++ b/src/compiler/scala/reflect/internal/Kinds.scala
@@ -49,9 +49,15 @@ trait Kinds {
private def kindMessage(a: Symbol, p: Symbol)(f: (String, String) => String): String =
f(a+qualify(a,p), p+qualify(p,a))
+ // Normally it's nicer to print nothing rather than '>: Nothing <: Any' all over
+ // the place, but here we need it for the message to make sense.
private def strictnessMessage(a: Symbol, p: Symbol) =
- kindMessage(a, p)("%s's bounds %s are stricter than %s's declared bounds %s".format(
- _, a.info, _, p.info))
+ kindMessage(a, p)("%s's bounds%s are stricter than %s's declared bounds%s".format(
+ _, a.info, _, p.info match {
+ case tb @ TypeBounds(_, _) if tb.isEmptyBounds => " >: Nothing <: Any"
+ case tb => "" + tb
+ })
+ )
private def varianceMessage(a: Symbol, p: Symbol) =
kindMessage(a, p)("%s is %s, but %s is declared %s".format(_, varStr(a), _, varStr(p)))
@@ -62,11 +68,16 @@ trait Kinds {
_, countAsString(p.typeParams.length))
)
+ private def buildMessage(xs: List[SymPair], f: (Symbol, Symbol) => String) = (
+ if (xs.isEmpty) ""
+ else xs map f.tupled mkString ("\n", ", ", "")
+ )
+
def errorMessage(targ: Type, tparam: Symbol): String = (
- (targ+"'s type parameters do not match "+tparam+"'s expected parameters: ")
- + (arity map { case (a, p) => arityMessage(a, p) } mkString ", ")
- + (variance map { case (a, p) => varianceMessage(a, p) } mkString ", ")
- + (strictness map { case (a, p) => strictnessMessage(a, p) } mkString ", ")
+ (targ+"'s type parameters do not match "+tparam+"'s expected parameters:")
+ + buildMessage(arity, arityMessage)
+ + buildMessage(variance, varianceMessage)
+ + buildMessage(strictness, strictnessMessage)
)
}
val NoKindErrors = KindErrors(Nil, Nil, Nil)
diff --git a/src/compiler/scala/reflect/internal/Symbols.scala b/src/compiler/scala/reflect/internal/Symbols.scala
index 9678d2b8cd..f4039cf6d3 100644
--- a/src/compiler/scala/reflect/internal/Symbols.scala
+++ b/src/compiler/scala/reflect/internal/Symbols.scala
@@ -269,11 +269,17 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
/** Create a new existential type skolem with this symbol its owner,
* based on the given symbol and origin.
*/
- def newExistentialSkolem(basis: Symbol, origin: AnyRef, name: TypeName = null, info: Type = null): TypeSkolem = {
- val skolem = newTypeSkolemSymbol(if (name eq null) basis.name.toTypeName else name, origin, basis.pos, (basis.flags | EXISTENTIAL) & ~PARAM)
- skolem setInfo (if (info eq null) basis.info cloneInfo skolem else info)
+ def newExistentialSkolem(basis: Symbol, origin: AnyRef): TypeSkolem = {
+ val skolem = newTypeSkolemSymbol(basis.name.toTypeName, origin, basis.pos, (basis.flags | EXISTENTIAL) & ~PARAM)
+ skolem setInfo (basis.info cloneInfo skolem)
}
+ // flags set up to maintain TypeSkolem's invariant: origin.isInstanceOf[Symbol] == !hasFlag(EXISTENTIAL)
+ // CASEACCESSOR | SYNTHETIC used to single this symbol out in deskolemizeGADT
+ def newGADTSkolem(name: TypeName, origin: Symbol, info: Type): TypeSkolem =
+ newTypeSkolemSymbol(name, origin, origin.pos, origin.flags & ~(EXISTENTIAL | PARAM) | CASEACCESSOR | SYNTHETIC) setInfo info
+
+
final def newExistential(name: TypeName, pos: Position = NoPosition, newFlags: Long = 0L): Symbol =
newAbstractType(name, pos, EXISTENTIAL | newFlags)
@@ -495,6 +501,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
// List[T] forSome { type T }
final def isExistentialSkolem = isExistentiallyBound && isSkolem
final def isExistentialQuantified = isExistentiallyBound && !isSkolem
+ final def isGADTSkolem = isSkolem && hasFlag(CASEACCESSOR | SYNTHETIC)
// class C extends D( { class E { ... } ... } ). Here, E is a class local to a constructor
final def isClassLocalToConstructor = isClass && hasFlag(INCONSTRUCTOR)
@@ -1865,7 +1872,8 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
def unpackLocation: AnyRef = null
/** Remove private modifier from symbol `sym`s definition. If `sym` is a
- * term symbol rename it by expanding its name to avoid name clashes
+ * is not a constructor nor a static module rename it by expanding its name to avoid name clashes
+ * @param base the fully qualified name of this class will be appended if name expansion is needed
*/
final def makeNotPrivate(base: Symbol) {
if (this.isPrivate) {
@@ -1985,6 +1993,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
else if (isTrait) ("trait", "trait", "TRT")
else if (isClass) ("class", "class", "CLS")
else if (isType) ("type", "type", "TPE")
+ else if (isClassConstructor && isPrimaryConstructor) ("primary constructor", "constructor", "PCTOR")
else if (isClassConstructor) ("constructor", "constructor", "CTOR")
else if (isSourceMethod) ("method", "method", "METH")
else if (isTerm) ("value", "value", "VAL")
@@ -2070,47 +2079,32 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
case s => " in " + s
}
def fullLocationString: String = toString + locationString
- def signatureString: String = if (hasRawInfo) infoString(rawInfo) else "<_>"
+ def signatureString: String = if (hasRawInfo) infoString(rawInfo) else "<_>"
/** String representation of symbol's definition following its name */
final def infoString(tp: Type): String = {
- def typeParamsString: String = tp match {
- case PolyType(tparams, _) if tparams.nonEmpty =>
- (tparams map (_.defString)).mkString("[", ",", "]")
- case _ =>
- ""
- }
- if (isClass)
- typeParamsString + " extends " + tp.resultType
- else if (isAliasType)
- typeParamsString + " = " + tp.resultType
- else if (isAbstractType)
- typeParamsString + {
- tp.resultType match {
- case TypeBounds(lo, hi) =>
- (if (lo.typeSymbol == NothingClass) "" else " >: " + lo) +
- (if (hi.typeSymbol == AnyClass) "" else " <: " + hi)
- case rtp =>
- "<: " + rtp
- }
- }
- else if (isModule)
- moduleClass.infoString(tp)
- else
- tp match {
- case PolyType(tparams, res) =>
- typeParamsString + infoString(res)
- case NullaryMethodType(res) =>
- infoString(res)
- case MethodType(params, res) =>
- params.map(_.defString).mkString("(", ",", ")") + infoString(res)
- case _ =>
- ": " + tp
+ def parents = (
+ if (settings.debug.value) parentsString(tp.parents)
+ else briefParentsString(tp.parents)
+ )
+ if (isType) typeParamsString(tp) + (
+ if (isClass) " extends " + parents
+ else if (isAliasType) " = " + tp.resultType
+ else tp.resultType match {
+ case rt @ TypeBounds(_, _) => "" + rt
+ case rt => " <: " + rt
}
+ )
+ else if (isModule) moduleClass.infoString(tp)
+ else tp match {
+ case PolyType(tparams, res) => typeParamsString(tp) + infoString(res)
+ case NullaryMethodType(res) => infoString(res)
+ case MethodType(params, res) => valueParamsString(tp) + infoString(res)
+ case _ => ": " + tp
+ }
}
- def infosString = infos.toString()
-
+ def infosString = infos.toString
def debugLocationString = fullLocationString + " " + debugFlagString
def debugFlagString = hasFlagsToString(-1L)
def hasFlagsToString(mask: Long): String = flagsToString(
@@ -2129,12 +2123,17 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
else if (owner.isRefinementClass) ExplicitFlags & ~OVERRIDE
else ExplicitFlags
+ // make the error message more googlable
+ def flagsExplanationString =
+ if (isGADTSkolem) " (this is a GADT skolem)"
+ else ""
+
def accessString = hasFlagsToString(PRIVATE | PROTECTED | LOCAL)
def defaultFlagString = hasFlagsToString(defaultFlagMask)
private def defStringCompose(infoString: String) = compose(
defaultFlagString,
keyString,
- varianceString + nameString + infoString
+ varianceString + nameString + infoString + flagsExplanationString
)
/** String representation of symbol's definition. It uses the
* symbol's raw info to avoid forcing types.
@@ -2477,7 +2476,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
* where the skolem was introduced (this is important for knowing when to pack it
* again into ab Existential). origin is `null` only in skolemizeExistentials called
* from <:< or isAsSpecific, because here its value does not matter.
- * I elieve the following invariant holds:
+ * I believe the following invariant holds:
*
* origin.isInstanceOf[Symbol] == !hasFlag(EXISTENTIAL)
*/
@@ -2671,7 +2670,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
override def rawInfo: Type = NoType
protected def doCookJavaRawInfo() {}
override def accessBoundary(base: Symbol): Symbol = RootClass
- def cloneSymbolImpl(owner: Symbol, newFlags: Long): Symbol = abort()
+ def cloneSymbolImpl(owner: Symbol, newFlags: Long): Symbol = abort("NoSymbol.clone()")
override def originalEnclosingMethod = this
override def owner: Symbol =
diff --git a/src/compiler/scala/reflect/internal/TreeInfo.scala b/src/compiler/scala/reflect/internal/TreeInfo.scala
index 769d7a9ed1..ce3de94335 100644
--- a/src/compiler/scala/reflect/internal/TreeInfo.scala
+++ b/src/compiler/scala/reflect/internal/TreeInfo.scala
@@ -17,7 +17,7 @@ abstract class TreeInfo {
val global: SymbolTable
import global._
- import definitions.{ isVarArgsList, isCastSymbol, ThrowableClass }
+ import definitions.{ isVarArgsList, isCastSymbol, ThrowableClass, TupleClass }
/* Does not seem to be used. Not sure what it does anyway.
def isOwnerDefinition(tree: Tree): Boolean = tree match {
@@ -312,6 +312,24 @@ abstract class TreeInfo {
case _ => false
}
+ /** Is this tree comprised of nothing but identifiers,
+ * but possibly in bindings or tuples? For instance
+ *
+ * foo @ (bar, (baz, quux))
+ *
+ * is a variable pattern; if the structure matches,
+ * then the remainder is inevitable.
+ */
+ def isVariablePattern(tree: Tree): Boolean = tree match {
+ case Bind(name, pat) => isVariablePattern(pat)
+ case Ident(name) => true
+ case Apply(sel, args) =>
+ ( isReferenceToScalaMember(sel, TupleClass(args.size).name.toTermName)
+ && (args forall isVariablePattern)
+ )
+ case _ => false
+ }
+
/** Is this argument node of the form <expr> : _* ?
*/
def isWildcardStarArg(tree: Tree): Boolean = tree match {
diff --git a/src/compiler/scala/reflect/internal/TreePrinters.scala b/src/compiler/scala/reflect/internal/TreePrinters.scala
index f823110440..8ed0ee6357 100644
--- a/src/compiler/scala/reflect/internal/TreePrinters.scala
+++ b/src/compiler/scala/reflect/internal/TreePrinters.scala
@@ -433,7 +433,7 @@ trait TreePrinters extends api.TreePrinters { self: SymbolTable =>
/** Hook for extensions */
def xprintTree(treePrinter: TreePrinter, tree: Tree) =
- treePrinter.print(tree.productPrefix+tree.productIterator.mkString("(", ", ", ")"))
+ treePrinter.print(tree.printingPrefix+tree.productIterator.mkString("(", ", ", ")"))
def newTreePrinter(writer: PrintWriter): TreePrinter = new TreePrinter(writer)
def newTreePrinter(stream: OutputStream): TreePrinter = newTreePrinter(new PrintWriter(stream))
diff --git a/src/compiler/scala/reflect/internal/Trees.scala b/src/compiler/scala/reflect/internal/Trees.scala
index 9b1712b790..1a40e0105c 100644
--- a/src/compiler/scala/reflect/internal/Trees.scala
+++ b/src/compiler/scala/reflect/internal/Trees.scala
@@ -350,8 +350,9 @@ trait Trees extends api.Trees { self: SymbolTable =>
"subst[%s, %s](%s)".format(fromStr, toStr, (from, to).zipped map (_ + " -> " + _) mkString ", ")
}
- // NOTE: if symbols in `from` occur multiple times in the `tree` passed to `transform`,
- // the resulting Tree will be a graph, not a tree... this breaks all sorts of stuff,
+ // NOTE: calls shallowDuplicate on trees in `to` to avoid problems when symbols in `from`
+ // occur multiple times in the `tree` passed to `transform`,
+ // otherwise, the resulting Tree would be a graph, not a tree... this breaks all sorts of stuff,
// notably concerning the mutable aspects of Trees (such as setting their .tpe)
class TreeSubstituter(from: List[Symbol], to: List[Tree]) extends Transformer {
override def transform(tree: Tree): Tree = tree match {
diff --git a/src/compiler/scala/reflect/internal/Types.scala b/src/compiler/scala/reflect/internal/Types.scala
index 2382413a9a..a20853adc8 100644
--- a/src/compiler/scala/reflect/internal/Types.scala
+++ b/src/compiler/scala/reflect/internal/Types.scala
@@ -1041,8 +1041,8 @@ trait Types extends api.Types { self: SymbolTable =>
baseClasses.head.newOverloaded(this, members.toList)
}
}
- /** The existential skolems and existentially quantified variables which are free in this type */
- def existentialSkolems: List[Symbol] = {
+ /** The (existential or otherwise) skolems and existentially quantified variables which are free in this type */
+ def skolemsExceptMethodTypeParams: List[Symbol] = {
var boundSyms: List[Symbol] = List()
var skolems: List[Symbol] = List()
for (t <- this) {
@@ -1050,7 +1050,8 @@ trait Types extends api.Types { self: SymbolTable =>
case ExistentialType(quantified, qtpe) =>
boundSyms = boundSyms ::: quantified
case TypeRef(_, sym, _) =>
- if ((sym hasFlag EXISTENTIAL) && !(boundSyms contains sym) && !(skolems contains sym))
+ if ((sym.isExistentialSkolem || sym.isGADTSkolem) && // treat GADT skolems like existential skolems
+ !((boundSyms contains sym) || (skolems contains sym)))
skolems = sym :: skolems
case _ =>
}
@@ -1336,9 +1337,14 @@ trait Types extends api.Types { self: SymbolTable =>
case TypeBounds(_, _) => that <:< this
case _ => lo <:< that && that <:< hi
}
- def isEmptyBounds = (lo.typeSymbolDirect eq NothingClass) && (hi.typeSymbolDirect eq AnyClass)
+ private def lowerString = if (emptyLowerBound) "" else " >: " + lo
+ private def upperString = if (emptyUpperBound) "" else " <: " + hi
+ private def emptyLowerBound = lo.typeSymbolDirect eq NothingClass
+ private def emptyUpperBound = hi.typeSymbolDirect eq AnyClass
+ def isEmptyBounds = emptyLowerBound && emptyUpperBound
+
// override def isNullable: Boolean = NullClass.tpe <:< lo;
- override def safeToString = ">: " + lo + " <: " + hi
+ override def safeToString = lowerString + upperString
override def kind = "TypeBoundsType"
}
@@ -2616,15 +2622,6 @@ trait Types extends api.Types { self: SymbolTable =>
}
}
- // TODO: I don't really know why this happens -- maybe because
- // the owner hierarchy changes? the other workaround (besides
- // repackExistential) is to explicitly pass expectedTp as the type
- // argument for the call to guard, but repacking the existential
- // somehow feels more robust
- //
- // TODO: check if optimization makes a difference, try something else
- // if necessary (cache?)
-
/** Repack existential types, otherwise they sometimes get unpacked in the
* wrong location (type inference comes up with an unexpected skolem)
*/
@@ -3329,16 +3326,18 @@ trait Types extends api.Types { self: SymbolTable =>
* may or may not be poly? (It filched the standard "canonical creator" name.)
*/
object GenPolyType {
- def apply(tparams: List[Symbol], tpe: Type): Type =
- if (tparams nonEmpty) typeFun(tparams, tpe)
- else tpe // it's okay to be forgiving here
+ def apply(tparams: List[Symbol], tpe: Type): Type = (
+ if (tparams nonEmpty) typeFun(tparams, tpe)
+ else tpe // it's okay to be forgiving here
+ )
def unapply(tpe: Type): Option[(List[Symbol], Type)] = tpe match {
- case PolyType(tparams, restpe) => Some(tparams, restpe)
- case _ => Some(List(), tpe)
+ case PolyType(tparams, restpe) => Some((tparams, restpe))
+ case _ => Some((Nil, tpe))
}
}
+ def genPolyType(params: List[Symbol], tpe: Type): Type = GenPolyType(params, tpe)
- @deprecated("use GenPolyType(...) instead")
+ @deprecated("use genPolyType(...) instead", "2.10.0")
def polyType(params: List[Symbol], tpe: Type): Type = GenPolyType(params, tpe)
/** A creator for anonymous type functions, where the symbol for the type function still needs to be created.
diff --git a/src/compiler/scala/reflect/runtime/ToolBoxes.scala b/src/compiler/scala/reflect/runtime/ToolBoxes.scala
index 8cc4d5f788..28f12b378f 100644
--- a/src/compiler/scala/reflect/runtime/ToolBoxes.scala
+++ b/src/compiler/scala/reflect/runtime/ToolBoxes.scala
@@ -1,6 +1,7 @@
package scala.reflect
package runtime
+import scala.tools.nsc
import scala.tools.nsc.reporters.Reporter
import scala.tools.nsc.reporters.StoreReporter
import scala.tools.nsc.reporters.AbstractReporter
@@ -21,8 +22,7 @@ trait ToolBoxes extends { self: Universe =>
class ToolBox(val reporter: Reporter = new StoreReporter, val options: String = "") {
- class ToolBoxGlobal(settings: scala.tools.nsc.Settings, reporter: scala.tools.nsc.reporters.Reporter)
- extends ReflectGlobal(settings, reporter) {
+ class ToolBoxGlobal(settings0: nsc.Settings, reporter0: nsc.reporters.Reporter) extends ReflectGlobal(settings0, reporter0) {
import definitions._
private val trace = scala.tools.nsc.util.trace when settings.debug.value
diff --git a/src/compiler/scala/reflect/runtime/Universe.scala b/src/compiler/scala/reflect/runtime/Universe.scala
index 324fee87ab..7a5dda3d8e 100644
--- a/src/compiler/scala/reflect/runtime/Universe.scala
+++ b/src/compiler/scala/reflect/runtime/Universe.scala
@@ -39,6 +39,11 @@ class Universe extends SymbolTable {
definitions.AnyValClass // force it.
+ type TreeAnnotation = Position
+ val NoTreeAnnotation: TreeAnnotation = NoPosition
+ def positionToAnnotation(pos: Position): TreeAnnotation = pos // TODO
+ def annotationToPosition(annot: TreeAnnotation): Position = annot //TODO
+
// establish root association to avoid cyclic dependency errors later
classToScala(classOf[java.lang.Object]).initialize
diff --git a/src/compiler/scala/tools/nsc/ast/DocComments.scala b/src/compiler/scala/tools/nsc/ast/DocComments.scala
index 678f7b3028..d3f4688d4b 100755
--- a/src/compiler/scala/tools/nsc/ast/DocComments.scala
+++ b/src/compiler/scala/tools/nsc/ast/DocComments.scala
@@ -229,31 +229,36 @@ trait DocComments { self: Global =>
* 1. It takes longer to run compared to merge
* 2. The inheritdoc annotation should not be used very often, as building the comment from pieces severely
* impacts performance
+ *
+ * @param parent The source (or parent) comment
+ * @param child The child (overriding member or usecase) comment
+ * @param sym The child symbol
+ * @return The child comment with the inheritdoc sections expanded
*/
- def expandInheritdoc(src: String, dst: String, sym: Symbol): String =
- if (dst.indexOf("@inheritdoc") == -1)
- dst
+ def expandInheritdoc(parent: String, child: String, sym: Symbol): String =
+ if (child.indexOf("@inheritdoc") == -1)
+ child
else {
- val srcSections = tagIndex(src)
- val dstSections = tagIndex(dst)
- val srcTagMap = sectionTagMap(src, srcSections)
- val srcNamedParams = Map() +
- ("@param" -> paramDocs(src, "@param", srcSections)) +
- ("@tparam" -> paramDocs(src, "@tparam", srcSections)) +
- ("@throws" -> paramDocs(src, "@throws", srcSections))
+ val parentSections = tagIndex(parent)
+ val childSections = tagIndex(child)
+ val parentTagMap = sectionTagMap(parent, parentSections)
+ val parentNamedParams = Map() +
+ ("@param" -> paramDocs(parent, "@param", parentSections)) +
+ ("@tparam" -> paramDocs(parent, "@tparam", parentSections)) +
+ ("@throws" -> paramDocs(parent, "@throws", parentSections))
val out = new StringBuilder
- def replaceInheritdoc(src: String, dst: String) =
- if (dst.indexOf("@inheritdoc") == -1)
- dst
- else
- dst.replaceAllLiterally("@inheritdoc", src)
+ def replaceInheritdoc(childSection: String, parentSection: => String) =
+ if (childSection.indexOf("@inheritdoc") == -1)
+ childSection
+ else
+ childSection.replaceAllLiterally("@inheritdoc", parentSection)
- def getSourceSection(section: (Int, Int)): String = {
+ def getParentSection(section: (Int, Int)): String = {
- def getSectionHeader = extractSectionTag(dst, section) match {
- case param@("@param"|"@tparam"|"@throws") => param + " " + extractSectionParam(dst, section)
+ def getSectionHeader = extractSectionTag(child, section) match {
+ case param@("@param"|"@tparam"|"@throws") => param + " " + extractSectionParam(child, section)
case other => other
}
@@ -261,17 +266,19 @@ trait DocComments { self: Global =>
paramMap.get(param) match {
case Some(section) =>
// Cleanup the section tag and parameter
- val sectionTextBounds = extractSectionText(src, section)
- cleanupSectionText(src.substring(sectionTextBounds._1, sectionTextBounds._2))
+ val sectionTextBounds = extractSectionText(parent, section)
+ cleanupSectionText(parent.substring(sectionTextBounds._1, sectionTextBounds._2))
case None =>
reporter.info(sym.pos, "The \"" + getSectionHeader + "\" annotation of the " + sym +
" comment contains @inheritdoc, but the corresponding section in the parent is not defined.", true)
"<invalid inheritdoc annotation>"
}
- dst.substring(section._1, section._1 + 7) match {
- case param@("@param "|"@tparam"|"@throws") => sectionString(extractSectionParam(dst, section), srcNamedParams(param.trim))
- case _ => sectionString(extractSectionTag(dst, section), srcTagMap)
+ child.substring(section._1, section._1 + 7) match {
+ case param@("@param "|"@tparam"|"@throws") =>
+ sectionString(extractSectionParam(child, section), parentNamedParams(param.trim))
+ case _ =>
+ sectionString(extractSectionTag(child, section), parentTagMap)
}
}
@@ -283,11 +290,11 @@ trait DocComments { self: Global =>
// Append main comment
out.append("/**")
- out.append(replaceInheritdoc(mainComment(src, srcSections), mainComment(dst, dstSections)))
+ out.append(replaceInheritdoc(mainComment(child, childSections), mainComment(parent, parentSections)))
// Append sections
- for (section <- dstSections)
- out.append(replaceInheritdoc(getSourceSection(section), dst.substring(section._1, section._2)))
+ for (section <- childSections)
+ out.append(replaceInheritdoc(child.substring(section._1, section._2), getParentSection(section)))
out.append("*/")
out.toString
diff --git a/src/compiler/scala/tools/nsc/ast/NodePrinters.scala b/src/compiler/scala/tools/nsc/ast/NodePrinters.scala
index 9466d1c1f2..acbdcd501f 100644
--- a/src/compiler/scala/tools/nsc/ast/NodePrinters.scala
+++ b/src/compiler/scala/tools/nsc/ast/NodePrinters.scala
@@ -9,14 +9,13 @@ package ast
import compat.Platform.EOL
import symtab.Flags._
-/** The object <code>nodePrinter</code> converts the internal tree
- * representation to a string formatted as a Scala expression.
+/** The object `nodePrinter` converts the internal tree
+ * representation to a string.
*
* @author Stephane Micheloud
- * @version 1.0
+ * @author Paul Phillips
*/
abstract class NodePrinters {
-
val global: Global
import global._
@@ -25,262 +24,332 @@ abstract class NodePrinters {
}
var infolevel = InfoLevel.Quiet
- object nodeToString extends Function1[Tree, String] {
+ def nodeToString: Tree => String =
+ if (sys.props contains "scala.colors") nodeToColorizedString
+ else nodeToRegularString
+
+ object nodeToRegularString extends DefaultPrintAST with (Tree => String) {
+ def apply(tree: Tree) = stringify(tree)
+ }
+
+ object nodeToColorizedString extends ColorPrintAST with (Tree => String) {
+ def apply(tree: Tree) = stringify(tree)
+ }
+
+ trait ColorPrintAST extends DefaultPrintAST {
+ import scala.tools.util.color._
+
+ def keywordColor = Cyan
+ def typeColor = Yellow
+ def termColor = Blue
+ def flagColor = Red
+ def literalColor = Green
+
+ override def showFlags(tree: MemberDef) =
+ super.showFlags(tree) in flagColor.bright
+
+ override def showDefTreeName(tree: DefTree) =
+ if (tree.name.isTermName) tree.name.decode in termColor.bright
+ else tree.name.decode in typeColor.bright
+
+ override def showName(name: Name) =
+ if (name == nme.EMPTY || name == tpnme.EMPTY) "<empty>" in keywordColor
+ else if (name.isTermName) name.decode in termColor
+ else name.decode in typeColor
+
+ override def showLiteral(lit: Literal) =
+ super.showLiteral(lit) in literalColor.bright
+ }
+
+ trait DefaultPrintAST extends PrintAST {
+ def showDefTreeName(tree: DefTree) = showName(tree.name)
+ def showFlags(tree: MemberDef) = flagsToString(tree.symbol.flags | tree.mods.flags)
+ def showLiteral(lit: Literal) = lit.value.escapedStringValue
+ def showTypeTree(tt: TypeTree) = "<tpt>" + emptyOrComment(showType(tt))
+ def showName(name: Name) = name match {
+ case nme.EMPTY | tpnme.EMPTY => "<empty>"
+ case name => "\"" + name + "\""
+ }
+
+ def showSymbol(tree: Tree): String = {
+ val sym = tree.symbol
+ if (sym == null || sym == NoSymbol) ""
+ else sym.defString + sym.locationString
+ }
+ def showType(tree: Tree): String = {
+ val tpe = tree.tpe
+ if (tpe == null || tpe == NoType) ""
+ else "tree.tpe=" + tpe
+ }
+
+ def showAttributes(tree: Tree): String = {
+ if (infolevel == InfoLevel.Quiet) ""
+ else {
+ try { List(showSymbol(tree), showType(tree)) filterNot (_ == "") mkString ", " trim }
+ catch { case ex: Throwable => "sym= <error> " + ex.getMessage }
+ }
+ }
+ }
+
+ trait PrintAST {
private val buf = new StringBuilder
+ private var level = 0
- def apply(tree: Tree): String = {
- def traverse(tree: Tree, level: Int, comma: Boolean) {
- def println(s: String) {
- for (i <- 0 until level) buf.append(" ")
- buf.append(s)
- buf.append(EOL)
- }
- def printcln(s: String) {
- for (i <- 0 until level) buf.append(" ")
- buf.append(s)
- if (comma) buf.append(",")
- buf.append(EOL)
- }
- def annotationInfoToString(annot: AnnotationInfo): String = {
- val str = new StringBuilder
- str.append(annot.atp.toString())
- if (!annot.args.isEmpty)
- str.append(annot.args.mkString("(", ",", ")"))
- if (!annot.assocs.isEmpty)
- for (((name, value), index) <- annot.assocs.zipWithIndex) {
- if (index > 0)
- str.append(", ")
- str.append(name).append(" = ").append(value)
- }
- str.toString
- }
- def symflags(tree: Tree): String = {
- val buf = new StringBuffer
- val sym = tree.symbol
- buf append flagsToString(sym.flags)
+ def showName(name: Name): String
+ def showDefTreeName(defTree: DefTree): String
+ def showFlags(tree: MemberDef): String
+ def showLiteral(lit: Literal): String
+ def showTypeTree(tt: TypeTree): String
+ def showAttributes(tree: Tree): String // symbol and type
+
+ def showRefTreeName(tree: Tree): String = tree match {
+ case SelectFromTypeTree(qual, name) => showRefTreeName(qual) + "#" + showName(name)
+ case Select(qual, name) => showRefTreeName(qual) + "." + showName(name)
+ case Ident(name) => showName(name)
+ case _ => "" + tree
+ }
+ def showRefTree(tree: RefTree): String = {
+ def prefix0 = showRefTreeName(tree.qualifier)
+ def prefix = if (prefix0 == "") "" else (tree match {
+ case SelectFromTypeTree(_, _) => prefix0 + "#"
+ case Select(_, _) => prefix0 + "."
+ case _ => ""
+ })
+ prefix + showName(tree.name) + emptyOrComment(showAttributes(tree))
+ }
- val annots = ", annots=" + (
- if (!sym.annotations.isEmpty)
- sym.annotations.map(annotationInfoToString).mkString("[", ",", "]")
- else
- tree.asInstanceOf[MemberDef].mods.annotations)
- (if (buf.length() > 2) buf.substring(3)
- else "0") + ", // flags=" + flagsToString(sym.flags) + annots
- }
+ def emptyOrComment(s: String) = if (s == "") "" else " // " + s
- def nodeinfo(tree: Tree): String =
- if (infolevel == InfoLevel.Quiet) ""
- else {
- try {
- val buf = new StringBuilder(" // sym=" + tree.symbol)
- if (tree.hasSymbol) {
- if (tree.symbol.isPrimaryConstructor)
- buf.append(", isPrimaryConstructor")
- else if (tree.symbol.isConstructor)
- buf.append(", isConstructor")
- if (tree.symbol != NoSymbol)
- buf.append(", sym.owner=" + tree.symbol.owner)
- buf.append(", sym.tpe=" + tree.symbol.tpe)
- }
- buf.append(", tpe=" + tree.tpe)
- if (tree.tpe != null) {
- var sym = tree.tpe.termSymbol
- if (sym == NoSymbol) sym = tree.tpe.typeSymbol
- buf.append(", tpe.sym=" + sym)
- if (sym != NoSymbol) {
- buf.append(", tpe.sym.owner=" + sym.owner)
- if ((infolevel > InfoLevel.Normal) &&
- !(sym.owner eq definitions.ScalaPackageClass) &&
- !sym.isModuleClass && !sym.isPackageClass &&
- !sym.isJavaDefined) {
- val members = for (m <- tree.tpe.decls)
- yield m.toString() + ": " + m.tpe + ", "
- buf.append(", tpe.decls=" + members)
- }
- }
- }
- buf.toString
- } catch {
- case ex: Throwable =>
- return " // sym= <error> " + ex.getMessage
- }
- }
- def nodeinfo2(tree: Tree): String =
- (if (comma) "," else "") + nodeinfo(tree)
+ def stringify(tree: Tree): String = {
+ buf.clear()
+ level = 0
+ traverse(tree)
+ buf.toString
+ }
+ def traverseAny(x: Any) {
+ x match {
+ case t: Tree => traverse(t)
+ case xs: List[_] => printMultiline("List", "")(xs foreach traverseAny)
+ case _ => println("" + x)
+ }
+ }
+ def println(s: String) = printLine(s, "")
+
+ def printLine(value: String, comment: String) {
+ buf append " " * level
+ buf append value
+ if (comment != "") {
+ if (value != "")
+ buf append " "
- def applyCommon(name: String, tree: Tree, fun: Tree, args: List[Tree]) {
- println(name + "(" + nodeinfo(tree))
- traverse(fun, level + 1, true)
- if (args.isEmpty)
- println(" Nil // no argument")
- else {
- val n = args.length
- println(" List( // " + n + " arguments(s)")
- for (i <- 0 until n)
- traverse(args(i), level + 2, i < n-1)
- println(" )")
- }
- printcln(")")
+ buf append "// "
+ buf append comment
+ }
+ buf append EOL
+ }
+
+ def annotationInfoToString(annot: AnnotationInfo): String = {
+ val str = new StringBuilder
+ str.append(annot.atp.toString())
+ if (!annot.args.isEmpty)
+ str.append(annot.args.mkString("(", ",", ")"))
+ if (!annot.assocs.isEmpty)
+ for (((name, value), index) <- annot.assocs.zipWithIndex) {
+ if (index > 0)
+ str.append(", ")
+ str.append(name).append(" = ").append(value)
}
+ str.toString
+ }
+ def printModifiers(tree: MemberDef) {
+ val annots0 = tree.symbol.annotations match {
+ case Nil => tree.mods.annotations
+ case xs => xs map annotationInfoToString
+ }
+ val annots = annots0 match {
+ case Nil => ""
+ case xs => " " + xs.mkString("@{ ", ", ", " }")
+ }
+ val flagString = showFlags(tree) match {
+ case "" => "0"
+ case s => s
+ }
+ println(flagString + annots)
+ }
+
+ def applyCommon(tree: Tree, fun: Tree, args: List[Tree]) {
+ printMultiline(tree) {
+ traverse(fun)
+ traverseList("Nil", "argument")(args)
+ }
+ }
+
+ def printMultiline(tree: Tree)(body: => Unit) {
+ printMultiline(tree.printingPrefix, showAttributes(tree))(body)
+ }
+ def printMultiline(prefix: String, comment: String)(body: => Unit) {
+ printLine(prefix + "(", comment)
+ indent(body)
+ println(")")
+ }
- tree match {
- case AppliedTypeTree(tpt, args) => applyCommon("AppliedTypeTree", tree, tpt, args)
- case Apply(fun, args) => applyCommon("Apply", tree, fun, args)
- case ApplyDynamic(fun, args) => applyCommon("ApplyDynamic", tree, fun, args)
+ @inline private def indent[T](body: => T): T = {
+ level += 1
+ try body
+ finally level -= 1
+ }
- case Block(stats, expr) =>
- println("Block(" + nodeinfo(tree))
- if (stats.isEmpty)
- println(" List(), // no statement")
- else {
- val n = stats.length
- println(" List( // " + n + " statement(s)")
- for (i <- 0 until n)
- traverse(stats(i), level + 2, i < n-1)
- println(" ),")
- }
- traverse(expr, level + 1, false)
- printcln(")")
- case ClassDef(mods, name, tparams, impl) =>
- println("ClassDef(" + nodeinfo(tree))
- println(" " + symflags(tree))
- println(" \"" + name + "\",")
- if (tparams.isEmpty)
- println(" List(), // no type parameter")
- else {
- val n = tparams.length
- println(" List( // " + n + " type parameter(s)")
- for (i <- 0 until n)
- traverse(tparams(i), level + 2, i < n-1)
- println(" ),")
- }
- traverse(impl, level + 1, false)
- printcln(")")
- case DefDef(mods, name, tparams, vparamss, tpt, rhs) =>
- println("DefDef(" + nodeinfo(tree))
- println(" " + symflags(tree))
- println(" \"" + name + "\",")
- if (tparams.isEmpty)
- println(" List(), // no type parameter")
- else {
- val n = tparams.length
- println(" List( // " + n + " type parameter(s)")
- for (i <- 0 until n)
- traverse(tparams(i), level + 2, i < n-1)
- println(" ),")
+ def traverseList(ifEmpty: String, what: String)(trees: List[Tree]) {
+ if (trees.isEmpty)
+ println(ifEmpty)
+ else if (trees.tail.isEmpty)
+ traverse(trees.head)
+ else {
+ printLine("", trees.length + " " + what + "s")
+ trees foreach traverse
+ }
+ }
+
+ def printSingle(tree: Tree, name: Name) {
+ println(tree.printingPrefix + "(" + showName(name) + ")" + showAttributes(tree))
+ }
+
+ def traverse(tree: Tree) {
+ tree match {
+ case AppliedTypeTree(tpt, args) => applyCommon(tree, tpt, args)
+ case ApplyDynamic(fun, args) => applyCommon(tree, fun, args)
+ case Apply(fun, args) => applyCommon(tree, fun, args)
+
+ case Throw(Ident(name)) =>
+ printSingle(tree, name)
+
+ case Function(vparams, body) =>
+ printMultiline(tree) {
+ traverseList("()", "parameter")(vparams)
+ traverse(body)
+ }
+ case Try(block, catches, finalizer) =>
+ printMultiline(tree) {
+ traverse(block)
+ traverseList("{}", "case")(catches)
+ if (finalizer ne EmptyTree)
+ traverse(finalizer)
+ }
+
+ case Match(selector, cases) =>
+ printMultiline(tree) {
+ traverse(selector)
+ traverseList("", "case")(cases)
+ }
+ case CaseDef(pat, guard, body) =>
+ printMultiline(tree) {
+ traverse(pat)
+ if (guard ne EmptyTree)
+ traverse(guard)
+ traverse(body)
+ }
+ case Block(stats, expr) =>
+ printMultiline(tree) {
+ traverseList("{}", "statement")(stats)
+ traverse(expr)
+ }
+ case cd @ ClassDef(mods, name, tparams, impl) =>
+ printMultiline(tree) {
+ printModifiers(cd)
+ println(showDefTreeName(cd))
+ traverseList("[]", "type parameter")(tparams)
+ traverse(impl)
+ }
+ case md @ ModuleDef(mods, name, impl) =>
+ printMultiline(tree) {
+ printModifiers(md)
+ println(showDefTreeName(md))
+ traverse(impl)
+ }
+ case dd @ DefDef(mods, name, tparams, vparamss, tpt, rhs) =>
+ printMultiline(tree) {
+ printModifiers(dd)
+ println(showDefTreeName(dd))
+ traverseList("[]", "type parameter")(tparams)
+ vparamss match {
+ case Nil => println("Nil")
+ case Nil :: Nil => println("List(Nil)")
+ case ps :: Nil =>
+ printLine("", "1 parameter list")
+ ps foreach traverse
+ case pss =>
+ printLine("", pss.length + " parameter lists")
+ pss foreach (ps => traverseList("()", "parameter")(ps))
}
- val n = vparamss.length
- if (n == 1 && vparamss(0).isEmpty)
- println(" List(List()), // no parameter")
- else {
- println(" List(")
- for (i <- 0 until n) {
- val m = vparamss(i).length
- println(" List( // " + m + " parameter(s)")
- for (j <- 0 until m)
- traverse(vparamss(i)(j), level + 3, j < m-1)
- println(" )")
+ traverse(tpt)
+ traverse(rhs)
+ }
+ case EmptyTree =>
+ println(showName(nme.EMPTY))
+ case lit @ Literal(value) =>
+ println(showLiteral(lit))
+ case New(tpt) =>
+ printMultiline(tree)(traverse(tpt))
+ case Super(This(qual), mix) =>
+ println("Super(This(" + showName(qual) + "), " + showName(mix) + ")")
+ case Super(qual, mix) =>
+ printMultiline(tree) {
+ traverse(qual)
+ showName(mix)
+ }
+ case Template(parents, self, body) =>
+ printMultiline(tree) {
+ val ps0 = parents map { p =>
+ if (p.tpe eq null) p match {
+ case x: RefTree => showRefTree(x)
+ case x => "" + x
}
- println(" ),")
- }
- println(" " + tpt + ",")
- traverse(rhs, level + 1, false)
- printcln(")")
- case EmptyTree =>
- printcln("EmptyTree")
- case Ident(name) =>
- printcln("Ident(\"" + name + "\")" + nodeinfo2(tree))
- case Literal(value) =>
- printcln("Literal(" + value + ")")
- case New(tpt) =>
- println("New(" + nodeinfo(tree))
- traverse(tpt, level + 1, false)
- printcln(")")
- case Select(qualifier, selector) =>
- println("Select(" + nodeinfo(tree))
- traverse(qualifier, level + 1, true)
- printcln(" \"" + selector + "\")")
- case Super(qual, mix) =>
- println("Super(\"" + mix + "\")" + nodeinfo(tree))
- traverse(qual, level + 1, true)
- case Template(parents, self, body) =>
- println("Template(" + nodeinfo(tree))
- println(" " + parents.map(p =>
- if (p.tpe ne null) p.tpe.typeSymbol else "null-" + p
- ) + ", // parents")
- traverse(self, level + 1, true)
- if (body.isEmpty)
- println(" List() // no body")
- else {
- val n = body.length
- println(" List( // body")
- for (i <- 0 until n)
- traverse(body(i), level + 2, i < n-1)
- println(" )")
- }
- printcln(")")
- case This(qual) =>
- println("This(\"" + qual + "\")" + nodeinfo2(tree))
- case TypeApply(fun, args) =>
- println("TypeApply(" + nodeinfo(tree))
- traverse(fun, level + 1, true)
- if (args.isEmpty)
- println(" List() // no argument")
- else {
- val n = args.length
- println(" List(")
- for (i <- 0 until n)
- traverse(args(i), level + 1, i < n-1)
- println(" )")
+ else showName(newTypeName(p.tpe.typeSymbol.fullName))
}
- printcln(")")
- case TypeTree() =>
- printcln("TypeTree()" + nodeinfo2(tree))
- case Typed(expr, tpt) =>
- println("Typed(" + nodeinfo(tree))
- traverse(expr, level + 1, true)
- traverse(tpt, level + 1, false)
- printcln(")")
- case ValDef(mods, name, tpt, rhs) =>
- println("ValDef(" + nodeinfo(tree))
- println(" " + symflags(tree))
- println(" \"" + name + "\",")
- traverse(tpt, level + 1, true)
- traverse(rhs, level + 1, false)
- printcln(")")
- case PackageDef(pid, stats) =>
- println("PackageDef(")
- traverse(pid, level + 1, false)
- println(",\n")
- for (stat <- stats)
- traverse(stat, level + 1, false)
- printcln(")")
- case _ =>
- tree match {
- case p: Product =>
- if (p.productArity != 0) {
- println(p.productPrefix+"(")
- for (elem <- (0 until p.productArity) map p.productElement) {
- def printElem(elem: Any, level: Int): Unit = elem match {
- case t: Tree =>
- traverse(t, level, false)
- case xs: List[_] =>
- print("List(")
- for (x <- xs) printElem(x, level+1)
- printcln(")")
- case _ =>
- println(elem.toString)
- }
- printElem(elem, level+1)
- }
- printcln(")")
- } else printcln(p.productPrefix)
- }
- }
+ printLine(ps0 mkString ", ", "parents")
+ traverse(self)
+ traverseList("{}", "statement")(body)
+ }
+ case This(qual) =>
+ printSingle(tree, qual)
+ case TypeApply(fun, args) =>
+ printMultiline(tree) {
+ traverse(fun)
+ traverseList("[]", "type argument")(args)
+ }
+ case tt @ TypeTree() =>
+ println(showTypeTree(tt))
+
+ case Typed(expr, tpt) =>
+ printMultiline(tree) {
+ traverse(expr)
+ traverse(tpt)
+ }
+ case vd @ ValDef(mods, name, tpt, rhs) =>
+ printMultiline(tree) {
+ printModifiers(vd)
+ println(showDefTreeName(vd))
+ traverse(tpt)
+ traverse(rhs)
+ }
+ case td @ TypeDef(mods, name, tparams, rhs) =>
+ printMultiline(tree) {
+ printModifiers(td)
+ println(showDefTreeName(td))
+ traverseList("[]", "type parameter")(tparams)
+ traverse(rhs)
+ }
+
+ case PackageDef(pid, stats) =>
+ printMultiline("PackageDef", "")(pid :: stats foreach traverse)
+
+ case _ =>
+ tree match {
+ case t: RefTree => println(showRefTree(t))
+ case t if t.productArity == 0 => println(tree.printingPrefix)
+ case t => printMultiline(tree)(tree.productIterator foreach traverseAny)
+ }
}
- buf setLength 0
- traverse(tree, 0, false)
- buf.toString
}
}
diff --git a/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala b/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala
index c1d6c1a4d4..3302c11127 100644
--- a/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala
+++ b/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala
@@ -352,144 +352,17 @@ abstract class TreeBrowsers {
* Tree.
*/
object TreeInfo {
-
/** Return the case class name and the Name, if the node defines one */
- def treeName(t: Tree): (String, Name) = t match {
- case ProgramTree(units) =>
- ("Program", EMPTY)
-
- case UnitTree(unit) =>
- ("CompilationUnit", newTermName("" + unit))
-
- case DocDef(comment, definition) =>
- ("DocDef", EMPTY)
-
- case ClassDef(mods, name, tparams, impl) =>
- ("ClassDef", name)
-
- case PackageDef(packaged, impl) =>
- ("PackageDef", EMPTY)
-
- case ModuleDef(mods, name, impl) =>
- ("ModuleDef", name)
-
- case ValDef(mods, name, tpe, rhs) =>
- ("ValDef", name)
-
- case DefDef(mods, name, tparams, vparams, tpe, rhs) =>
- ("DefDef", name)
-
- case TypeDef(mods, name, tparams, rhs) =>
- ("TypeDef", name)
-
- case Import(expr, selectors) =>
- ("Import", EMPTY)
-
- case CaseDef(pat, guard, body) =>
- ("CaseDef", EMPTY)
-
- case Template(parents, self, body) =>
- ("Template", EMPTY)
-
- case LabelDef(name, params, rhs) =>
- ("LabelDef", name)
-
- case Block(stats, expr) =>
- ("Block", EMPTY)
-
- case Alternative(trees) =>
- ("Alternative", EMPTY)
-
- case Bind(name, rhs) =>
- ("Bind", name)
-
- case UnApply(fun, args) =>
- ("UnApply", EMPTY)
-
- case Match(selector, cases) =>
- ("Visitor", EMPTY)
-
- case Function(vparams, body) =>
- ("Function", EMPTY)
-
- case Assign(lhs, rhs) =>
- ("Assign", EMPTY)
-
- case If(cond, thenp, elsep) =>
- ("If", EMPTY)
-
- case Return(expr) =>
- ("Return", EMPTY)
-
- case Throw(expr) =>
- ("Throw", EMPTY)
-
- case New(init) =>
- ("New", EMPTY)
-
- case Typed(expr, tpe) =>
- ("Typed", EMPTY)
-
- case TypeApply(fun, args) =>
- ("TypeApply", EMPTY)
-
- case Apply(fun, args) =>
- ("Apply", EMPTY)
-
- case ApplyDynamic(qual, args) =>
- ("Apply", EMPTY)
-
- case Super(qualif, mix) =>
- ("Super", newTermName("mix: " + mix))
-
- case This(qualifier) =>
- ("This", qualifier)
-
- case Select(qualifier, selector) =>
- ("Select", selector)
-
- case Ident(name) =>
- ("Ident", name)
-
- case Literal(value) =>
- ("Literal", EMPTY)
-
- case TypeTree() =>
- ("TypeTree", EMPTY)
-
- case Annotated(annot, arg) =>
- ("Annotated", EMPTY)
-
- case SingletonTypeTree(ref) =>
- ("SingletonType", EMPTY)
-
- case SelectFromTypeTree(qualifier, selector) =>
- ("SelectFromType", selector)
-
- case CompoundTypeTree(template) =>
- ("CompoundType", EMPTY)
-
- case AppliedTypeTree(tpe, args) =>
- ("AppliedType", EMPTY)
-
- case TypeBoundsTree(lo, hi) =>
- ("TypeBoundsTree", EMPTY)
-
- case ExistentialTypeTree(tpt, whereClauses) =>
- ("ExistentialTypeTree", EMPTY)
-
- case Try(block, catcher, finalizer) =>
- ("Try", EMPTY)
-
- case EmptyTree =>
- ("Empty", EMPTY)
-
- case ArrayValue(elemtpt, trees) =>
- ("ArrayValue", EMPTY)
-
- case Star(t) =>
- ("Star", EMPTY)
- }
+ def treeName(t: Tree): (String, Name) = ((t.printingPrefix, t match {
+ case UnitTree(unit) => newTermName("" + unit)
+ case Super(_, mix) => newTermName("mix: " + mix)
+ case This(qual) => qual
+ case Select(_, selector) => selector
+ case Ident(name) => name
+ case SelectFromTypeTree(_, selector) => selector
+ case x: DefTree => x.name
+ case _ => EMPTY
+ }))
/** Return a list of children for the given tree node */
def children(t: Tree): List[Tree] = t match {
diff --git a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
index 0d2fbc5372..0bc88d1efd 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
@@ -262,29 +262,25 @@ abstract class TreeBuilder {
else if (stats.length == 1) stats.head
else Block(stats.init, stats.last)
+ def makeFilter(tree: Tree, condition: Tree, scrutineeName: String): Tree = {
+ val cases = List(
+ CaseDef(condition, EmptyTree, Literal(Constant(true))),
+ CaseDef(Ident(nme.WILDCARD), EmptyTree, Literal(Constant(false)))
+ )
+ val matchTree = makeVisitor(cases, false, scrutineeName)
+
+ atPos(tree.pos)(Apply(Select(tree, nme.withFilter), matchTree :: Nil))
+ }
+
/** Create tree for for-comprehension generator <val pat0 <- rhs0> */
def makeGenerator(pos: Position, pat: Tree, valeq: Boolean, rhs: Tree): Enumerator = {
val pat1 = patvarTransformer.transform(pat)
val rhs1 =
- if (valeq) rhs
- else matchVarPattern(pat1) match {
- case Some(_) =>
- rhs
- case None =>
- atPos(rhs.pos) {
- Apply(
- Select(rhs, nme.filter),
- List(
- makeVisitor(
- List(
- CaseDef(pat1.duplicate, EmptyTree, Literal(Constant(true))),
- CaseDef(Ident(nme.WILDCARD), EmptyTree, Literal(Constant(false)))),
- false,
- nme.CHECK_IF_REFUTABLE_STRING
- )))
- }
- }
- if (valeq) ValEq(pos, pat1, rhs1) else ValFrom(pos, pat1, rhs1)
+ if (valeq || treeInfo.isVariablePattern(pat)) rhs
+ else makeFilter(rhs, pat1.duplicate, nme.CHECK_IF_REFUTABLE_STRING)
+
+ if (valeq) ValEq(pos, pat1, rhs1)
+ else ValFrom(pos, pat1, rhs1)
}
def makeParam(pname: TermName, tpe: Tree) =
diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala
index 877c51ebc1..d31eafff48 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala
@@ -645,7 +645,7 @@ abstract class TypeFlowAnalysis {
} while(!done)
lastInstruction.clear()
- for(b <- isOnPerimeter; val lastIns = b.toList.reverse find isOnWatchlist) {
+ for (b <- isOnPerimeter; lastIns = b.toList.reverse find isOnWatchlist) {
lastInstruction += (b -> lastIns.get.asInstanceOf[opcodes.CALL_METHOD])
}
diff --git a/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala b/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
index a734b2b92b..dfe9081ee5 100644
--- a/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
+++ b/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
@@ -262,7 +262,7 @@ abstract class Inliners extends SubComponent {
def inlineWithoutTFA(inputBlocks: Traversable[BasicBlock], callsites: Function1[BasicBlock, List[opcodes.CALL_METHOD]]): Int = {
var inlineCount = 0
import scala.util.control.Breaks._
- for(x <- inputBlocks; val easyCake = callsites(x); if easyCake.nonEmpty) {
+ for(x <- inputBlocks; easyCake = callsites(x); if easyCake.nonEmpty) {
breakable {
for(ocm <- easyCake) {
assert(ocm.method.isEffectivelyFinal && ocm.method.owner.isEffectivelyFinal)
diff --git a/src/compiler/scala/tools/nsc/doc/Settings.scala b/src/compiler/scala/tools/nsc/doc/Settings.scala
index 6c74951156..45a2ad78b4 100644
--- a/src/compiler/scala/tools/nsc/doc/Settings.scala
+++ b/src/compiler/scala/tools/nsc/doc/Settings.scala
@@ -97,4 +97,6 @@ class Settings(error: String => Unit) extends scala.tools.nsc.Settings(error) {
docformat, doctitle, docfooter, docversion, docUncompilable, docsourceurl, docgenerator
)
val isScaladocSpecific: String => Boolean = scaladocSpecific map (_.name)
+
+ override def isScaladoc = true
}
diff --git a/src/compiler/scala/tools/nsc/doc/html/HtmlFactory.scala b/src/compiler/scala/tools/nsc/doc/html/HtmlFactory.scala
index 4f05678d85..0116e02e0e 100644
--- a/src/compiler/scala/tools/nsc/doc/html/HtmlFactory.scala
+++ b/src/compiler/scala/tools/nsc/doc/html/HtmlFactory.scala
@@ -119,7 +119,7 @@ class HtmlFactory(val universe: doc.Universe, index: doc.Index) {
def writeTemplate(tpl: DocTemplateEntity) {
if (!(written contains tpl)) {
- writeForThis(new page.Template(tpl))
+ writeForThis(new page.Template(universe, tpl))
written += tpl
tpl.templates map writeTemplate
}
diff --git a/src/compiler/scala/tools/nsc/doc/html/HtmlPage.scala b/src/compiler/scala/tools/nsc/doc/html/HtmlPage.scala
index b58c71eaa9..1544dafc69 100644
--- a/src/compiler/scala/tools/nsc/doc/html/HtmlPage.scala
+++ b/src/compiler/scala/tools/nsc/doc/html/HtmlPage.scala
@@ -22,6 +22,16 @@ abstract class HtmlPage extends Page { thisPage =>
/** The title of this page. */
protected def title: String
+ /** The page description */
+ protected def description: String =
+ // unless overwritten, will display the title in a spaced format, keeping - and .
+ title.replaceAll("[^a-zA-Z0-9\\.\\-]+", " ").replaceAll("\\-+", " - ").replaceAll(" +", " ")
+
+ /** The page keywords */
+ protected def keywords: String =
+ // unless overwritten, same as description, minus the " - "
+ description.replaceAll(" - ", " ")
+
/** Additional header elements (links, scripts, meta tags, etc.) required for this page. */
protected def headers: NodeSeq
@@ -35,6 +45,8 @@ abstract class HtmlPage extends Page { thisPage =>
<html>
<head>
<title>{ title }</title>
+ <meta name="description" content={ description }/>
+ <meta name="keywords" content={ keywords }/>
<meta http-equiv="content-type" content={ "text/html; charset=" + site.encoding }/>
{ headers }
</head>
diff --git a/src/compiler/scala/tools/nsc/doc/html/SyntaxHigh.scala b/src/compiler/scala/tools/nsc/doc/html/SyntaxHigh.scala
index f19f449d2c..f67abc58da 100644
--- a/src/compiler/scala/tools/nsc/doc/html/SyntaxHigh.scala
+++ b/src/compiler/scala/tools/nsc/doc/html/SyntaxHigh.scala
@@ -219,24 +219,24 @@ private[html] object SyntaxHigh {
parse(" ", i+1)
case '&' =>
parse("&amp;", i+1)
- case '<' =>
+ case '<' if i+1 < buf.length =>
val ch = buf(i+1).toChar
if (ch == '-' || ch == ':' || ch == '%')
parse("<span class=\"kw\">&lt;"+ch+"</span>", i+2)
else
parse("&lt;", i+1)
case '>' =>
- if (buf(i+1) == ':')
+ if (i+1 < buf.length && buf(i+1) == ':')
parse("<span class=\"kw\">&gt;:</span>", i+2)
else
parse("&gt;", i+1)
case '=' =>
- if (buf(i+1) == '>')
+ if (i+1 < buf.length && buf(i+1) == '>')
parse("<span class=\"kw\">=&gt;</span>", i+2)
else
parse(buf(i).toChar.toString, i+1)
case '/' =>
- if (buf(i+1) == '/' || buf(i+1) == '*') {
+ if (i+1 < buf.length && (buf(i+1) == '/' || buf(i+1) == '*')) {
val c = comment(i+1)
parse("<span class=\"cmt\">"+c+"</span>", i+c.length)
} else
@@ -257,9 +257,9 @@ private[html] object SyntaxHigh {
else
parse(buf(i).toChar.toString, i+1)
case _ =>
- if (i == 0 || !Character.isJavaIdentifierPart(buf(i-1).toChar)) {
+ if (i == 0 || (i >= 1 && !Character.isJavaIdentifierPart(buf(i-1).toChar))) {
if (Character.isDigit(buf(i)) ||
- (buf(i) == '.' && Character.isDigit(buf(i+1)))) {
+ (buf(i) == '.' && i + 1 < buf.length && Character.isDigit(buf(i+1)))) {
val s = numlit(i)
parse("<span class=\"num\">"+s+"</span>", i+s.length)
} else {
diff --git a/src/compiler/scala/tools/nsc/doc/html/page/Template.scala b/src/compiler/scala/tools/nsc/doc/html/page/Template.scala
index e35286b281..0c9c88c0f5 100644
--- a/src/compiler/scala/tools/nsc/doc/html/page/Template.scala
+++ b/src/compiler/scala/tools/nsc/doc/html/page/Template.scala
@@ -11,13 +11,19 @@ package page
import model._
import scala.xml.{ NodeSeq, Text, UnprefixedAttribute }
-class Template(tpl: DocTemplateEntity) extends HtmlPage {
+class Template(universe: doc.Universe, tpl: DocTemplateEntity) extends HtmlPage {
val path =
templateToPath(tpl)
- val title =
- tpl.qualifiedName
+ def title = {
+ val s = universe.settings
+
+ tpl.name +
+ ( if (!s.doctitle.isDefault) " - " + s.doctitle.value else "" ) +
+ ( if (!s.docversion.isDefault) (" " + s.docversion.value) else "" ) +
+ " - " + tpl.qualifiedName
+ }
val headers =
<xml:group>
@@ -339,6 +345,17 @@ class Template(tpl: DocTemplateEntity) extends HtmlPage {
}
}
+ val fullSignature: Seq[scala.xml.Node] = {
+ mbr match {
+ case nte: NonTemplateMemberEntity if nte.isUseCase =>
+ <div class="full-signature-block toggleContainer">
+ <span class="toggle">Full Signature</span>
+ <div class="hiddenContent full-signature-usecase">{ signature(nte.useCaseOf.get,true) }</div>
+ </div>
+ case _ => NodeSeq.Empty
+ }
+ }
+
val selfType: Seq[scala.xml.Node] = mbr match {
case dtpl: DocTemplateEntity if (isSelf && !dtpl.selfType.isEmpty && !isReduced) =>
<dt>Self Type</dt>
@@ -460,7 +477,7 @@ class Template(tpl: DocTemplateEntity) extends HtmlPage {
}
// end attributes block vals ---
- val attributesInfo = attributes ++ definitionClasses ++ selfType ++ annotations ++ deprecation ++ migration ++ sourceLink ++ mainComment
+ val attributesInfo = attributes ++ definitionClasses ++ fullSignature ++ selfType ++ annotations ++ deprecation ++ migration ++ sourceLink ++ mainComment
val attributesBlock =
if (attributesInfo.isEmpty)
NodeSeq.Empty
diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/template.css b/src/compiler/scala/tools/nsc/doc/html/resource/lib/template.css
index 4f552b7169..6fb83c133e 100644
--- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/template.css
+++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/template.css
@@ -340,6 +340,30 @@ div.members > ol > li:last-child {
color: darkgreen;
}
+.full-signature-usecase h4 span {
+ font-size: 10pt;
+}
+
+.full-signature-usecase > #signature {
+ padding-top: 0px;
+}
+
+#template .full-signature-usecase > .signature.closed {
+ background: none;
+}
+
+#template .full-signature-usecase > .signature.opened {
+ background: none;
+}
+
+.full-signature-block {
+ padding: 5px 0 0;
+ border-top: 1px solid #EBEBEB;
+ margin-top: 5px;
+ margin-bottom: 5px;
+}
+
+
/* Comments text formating */
.cmt {}
diff --git a/src/compiler/scala/tools/nsc/doc/model/comment/CommentFactory.scala b/src/compiler/scala/tools/nsc/doc/model/comment/CommentFactory.scala
index b088c643cb..bc5cd4a958 100644
--- a/src/compiler/scala/tools/nsc/doc/model/comment/CommentFactory.scala
+++ b/src/compiler/scala/tools/nsc/doc/model/comment/CommentFactory.scala
@@ -491,7 +491,7 @@ trait CommentFactory { thisFactory: ModelFactory with CommentFactory =>
else
jump("}}}")
blockEnded("code block")
- Code(getRead)
+ Code(normalizeIndentation(getRead))
}
/** {{{ title ::= ('=' inline '=' | "==" inline "==" | ...) '\n' }}} */
@@ -732,6 +732,64 @@ trait CommentFactory { thisFactory: ModelFactory with CommentFactory =>
nextChar()
}
+ /**
+ * Eliminates the (common) leading spaces in all lines, based on the first line
+ * For indented pieces of code, it reduces the indent to the least whitespace prefix:
+ * {{{
+ * indented example
+ * another indented line
+ * if (condition)
+ * then do something;
+ * ^ this is the least whitespace prefix
+ * }}}
+ */
+ def normalizeIndentation(_code: String): String = {
+
+ var code = _code.trim
+ var maxSkip = Integer.MAX_VALUE
+ var crtSkip = 0
+ var wsArea = true
+ var index = 0
+ var firstLine = true
+ var emptyLine = true
+
+ while (index < code.length) {
+ code(index) match {
+ case ' ' =>
+ if (wsArea)
+ crtSkip += 1
+ case c =>
+ wsArea = (c == '\n')
+ maxSkip = if (firstLine || emptyLine) maxSkip else if (maxSkip <= crtSkip) maxSkip else crtSkip
+ crtSkip = if (c == '\n') 0 else crtSkip
+ firstLine = if (c == '\n') false else firstLine
+ emptyLine = if (c == '\n') true else false
+ }
+ index += 1
+ }
+
+ if (maxSkip == 0)
+ code
+ else {
+ index = 0
+ val builder = new StringBuilder
+ while (index < code.length) {
+ builder.append(code(index))
+ if (code(index) == '\n') {
+ // we want to skip as many spaces are available, if there are less spaces (like on empty lines, do not
+ // over-consume them)
+ index += 1
+ val limit = index + maxSkip
+ while ((index < code.length) && (code(index) == ' ') && index < limit)
+ index += 1
+ }
+ else
+ index += 1
+ }
+ builder.toString
+ }
+ }
+
def checkParaEnded(): Boolean = {
(char == endOfText) ||
((char == endOfLine) && {
diff --git a/src/compiler/scala/tools/nsc/interactive/RangePositions.scala b/src/compiler/scala/tools/nsc/interactive/RangePositions.scala
index d08a363a9d..88e3827403 100644
--- a/src/compiler/scala/tools/nsc/interactive/RangePositions.scala
+++ b/src/compiler/scala/tools/nsc/interactive/RangePositions.scala
@@ -189,7 +189,7 @@ self: scala.tools.nsc.Global =>
override def validatePositions(tree: Tree) {
def reportTree(prefix : String, tree : Tree) {
val source = if (tree.pos.isDefined) tree.pos.source else ""
- inform("== "+prefix+" tree ["+tree.id+"] of type "+tree.productPrefix+" at "+tree.pos.show+source)
+ inform("== "+prefix+" tree ["+tree.id+"] of type "+tree.printingPrefix+" at "+tree.pos.show+source)
inform("")
inform(treeStatus(tree))
inform("")
diff --git a/src/compiler/scala/tools/nsc/interpreter/Naming.scala b/src/compiler/scala/tools/nsc/interpreter/Naming.scala
index 8e215cf63b..19266442cb 100644
--- a/src/compiler/scala/tools/nsc/interpreter/Naming.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/Naming.scala
@@ -11,16 +11,18 @@ package interpreter
*/
trait Naming {
def unmangle(str: String): String = {
+ val ESC = '\u001b'
val cleaned = removeIWPackages(removeLineWrapper(str))
- var ctrlChars = 0
- cleaned map { ch =>
- if (ch.isControl && !ch.isWhitespace) {
- ctrlChars += 1
- if (ctrlChars > 5) return "[line elided for control chars: possibly a scala signature]"
- else '?'
- }
- else ch
- }
+ // Looking to exclude binary data which hoses the terminal, but
+ // let through the subset of it we need, like whitespace and also
+ // <ESC> for ansi codes.
+ val binaryChars = cleaned count (ch => ch < 32 && !ch.isWhitespace && ch != ESC)
+ // Lots of binary chars - translate all supposed whitespace into spaces
+ if (binaryChars > 5)
+ cleaned map (ch => if (ch.isWhitespace) ' ' else if (ch < 32) '?' else ch)
+ // Not lots - preserve whitespace and ESC
+ else
+ cleaned map (ch => if (ch.isWhitespace || ch == ESC) ch else if (ch < 32) '?' else ch)
}
// The two name forms this is catching are the two sides of this assignment:
diff --git a/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala b/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala
index 1285e29d4a..be5a9907b8 100644
--- a/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala
+++ b/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala
@@ -309,7 +309,7 @@ trait ParallelMatching extends ast.TreeDSL
}
lazy val cases =
- for ((tag, indices) <- literalMap.toList) yield {
+ for ((tag, indices) <- literalMap.toList.sortBy(_._1)) yield {
val newRows = indices map (i => addDefaultVars(i)(rest rows i))
val r = remake(newRows ++ defaultRows, includeScrut = false)
val r2 = make(r.tvars, r.rows map (x => x rebind bindVars(tag, x.subst)))
diff --git a/src/compiler/scala/tools/nsc/reporters/ConsoleReporter.scala b/src/compiler/scala/tools/nsc/reporters/ConsoleReporter.scala
index c76a04c6ba..f5335fb0f5 100644
--- a/src/compiler/scala/tools/nsc/reporters/ConsoleReporter.scala
+++ b/src/compiler/scala/tools/nsc/reporters/ConsoleReporter.scala
@@ -8,7 +8,7 @@ package reporters
import java.io.{ BufferedReader, IOException, PrintWriter }
import util._
-import scala.tools.util.StringOps.countElementsAsString
+import scala.tools.util.StringOps
/**
* This class implements a Reporter that displays messages on a text
@@ -40,7 +40,7 @@ class ConsoleReporter(val settings: Settings, reader: BufferedReader, writer: Pr
* @return ...
*/
private def getCountString(severity: Severity): String =
- countElementsAsString((severity).count, label(severity))
+ StringOps.countElementsAsString((severity).count, label(severity))
/** Prints the message. */
def printMessage(msg: String) { writer.print(msg + "\n"); writer.flush() }
diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
index 5f3c7ec32c..d1224fc79d 100644
--- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
+++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
@@ -204,4 +204,7 @@ trait ScalaSettings extends AbsScalaSettings
*/
val pluginOptions = MultiStringSetting("-P", "plugin:opt", "Pass an option to a plugin") .
withHelpSyntax("-P:<plugin>:<opt>")
+
+ /** Test whether this is scaladoc we're looking at */
+ def isScaladoc = false
}
diff --git a/src/compiler/scala/tools/nsc/symtab/Positions.scala b/src/compiler/scala/tools/nsc/symtab/Positions.scala
index c96c709fb0..680b06f8ce 100644
--- a/src/compiler/scala/tools/nsc/symtab/Positions.scala
+++ b/src/compiler/scala/tools/nsc/symtab/Positions.scala
@@ -14,6 +14,15 @@ self: scala.tools.nsc.symtab.SymbolTable =>
type Position = scala.tools.nsc.util.Position
val NoPosition = scala.tools.nsc.util.NoPosition
+ type TreeAnnotation = scala.tools.nsc.util.TreeAnnotation
+ val NoTreeAnnotation: TreeAnnotation = NoPosition
+ def positionToAnnotation(pos: Position): TreeAnnotation = pos
+ def annotationToPosition(annot: TreeAnnotation): Position = annot.pos
+ override def _checkSetAnnotation(tree: Tree, annot: TreeAnnotation): Unit = {
+ if (tree.pos != NoPosition && tree.pos != annot.pos) debugwarn("Overwriting annotation "+ tree.annotation +" of tree "+ tree +" with annotation "+ annot)
+ // if ((tree.annotation.isInstanceOf[scala.tools.nsc.util.Position] || !annot.isInstanceOf[scala.tools.nsc.util.Position]) && tree.isInstanceOf[Block])
+ // println("Updating block from "+ tree.annotation +" to "+ annot)
+ }
def focusPos(pos: Position): Position = pos.focus
def isRangePos(pos: Position): Boolean = pos.isRange
def showPos(pos: Position): String = pos.show
diff --git a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala
index 942ec1fa86..f9ff147e82 100644
--- a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala
+++ b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala
@@ -126,9 +126,13 @@ abstract class SymbolLoaders {
ok = false
if (settings.debug.value) ex.printStackTrace()
val msg = ex.getMessage()
- globalError(
- if (msg eq null) "i/o error while loading " + root.name
- else "error while loading " + root.name + ", " + msg);
+ // SI-5593 Scaladoc's current strategy is to visit all packages in search of user code that can be documented
+ // therefore, it will rummage through the classpath triggering errors whenever it encounters package objects
+ // that are not in their correct place (see bug for details)
+ if (!settings.isScaladoc)
+ globalError(
+ if (msg eq null) "i/o error while loading " + root.name
+ else "error while loading " + root.name + ", " + msg);
}
try {
val start = currentTime
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
index 1cd4ab21ea..61668b1a8a 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
@@ -446,7 +446,11 @@ abstract class ClassfileParser {
def classNameToSymbol(name: Name): Symbol = {
def loadClassSymbol(name: Name): Symbol = {
val file = global.classPath findSourceFile ("" +name) getOrElse {
- warning("Class " + name + " not found - continuing with a stub.")
+ // SI-5593 Scaladoc's current strategy is to visit all packages in search of user code that can be documented
+ // therefore, it will rummage through the classpath triggering errors whenever it encounters package objects
+ // that are not in their correct place (see bug for details)
+ if (!settings.isScaladoc)
+ warning("Class " + name + " not found - continuing with a stub.")
return NoSymbol.newClass(name.toTypeName)
}
val completer = new global.loaders.ClassfileLoader(file)
diff --git a/src/compiler/scala/tools/nsc/symtab/clr/TypeParser.scala b/src/compiler/scala/tools/nsc/symtab/clr/TypeParser.scala
index e11a5a4ad9..4b847fa94a 100644
--- a/src/compiler/scala/tools/nsc/symtab/clr/TypeParser.scala
+++ b/src/compiler/scala/tools/nsc/symtab/clr/TypeParser.scala
@@ -108,7 +108,7 @@ abstract class TypeParser {
val method = new ConstructorInfo(declType, attrs, Array[MSILType]())
val flags = Flags.JAVA
val owner = clazz
- val methodSym = owner.newMethod(NoPosition, nme.CONSTRUCTOR).setFlag(flags)
+ val methodSym = owner.newMethod(nme.CONSTRUCTOR, NoPosition, flags)
val rettype = clazz.tpe
val mtype = methodType(Array[MSILType](), rettype);
val mInfo = mtype(methodSym)
@@ -224,14 +224,14 @@ abstract class TypeParser {
if (canBeTakenAddressOf) {
clazzBoxed.setInfo( if (ownTypeParams.isEmpty) classInfoAsInMetadata
- else polyType(ownTypeParams, classInfoAsInMetadata) )
+ else genPolyType(ownTypeParams, classInfoAsInMetadata) )
clazzBoxed.setFlag(flags)
val rawValueInfoType = ClassInfoType(definitions.anyvalparam, instanceDefs, clazz)
clazz.setInfo( if (ownTypeParams.isEmpty) rawValueInfoType
- else polyType(ownTypeParams, rawValueInfoType) )
+ else genPolyType(ownTypeParams, rawValueInfoType) )
} else {
clazz.setInfo( if (ownTypeParams.isEmpty) classInfoAsInMetadata
- else polyType(ownTypeParams, classInfoAsInMetadata) )
+ else genPolyType(ownTypeParams, classInfoAsInMetadata) )
}
// TODO I don't remember if statics.setInfo and staticModule.setInfo should also know about type params
@@ -284,7 +284,7 @@ abstract class TypeParser {
else
getCLRType(field.FieldType)
val owner = if (field.IsStatic()) statics else clazz;
- val sym = owner.newValue(NoPosition, name).setFlag(flags).setInfo(fieldType);
+ val sym = owner.newValue(name, NoPosition, flags).setInfo(fieldType);
// TODO: set private within!!! -> look at typechecker/Namers.scala
(if (field.IsStatic()) staticDefs else instanceDefs).enter(sym);
clrTypes.fields(sym) = field;
@@ -313,7 +313,7 @@ abstract class TypeParser {
val name: Name = if (gparamsLength == 0) prop.Name else nme.apply;
val flags = translateAttributes(getter);
val owner: Symbol = if (getter.IsStatic) statics else clazz;
- val methodSym = owner.newMethod(NoPosition, name).setFlag(flags)
+ val methodSym = owner.newMethod(name, NoPosition, flags)
val mtype: Type = if (gparamsLength == 0) NullaryMethodType(propType) // .NET properties can't be polymorphic
else methodType(getter, getter.ReturnType)(methodSym)
methodSym.setInfo(mtype);
@@ -337,7 +337,7 @@ abstract class TypeParser {
val flags = translateAttributes(setter);
val mtype = methodType(setter, definitions.UnitClass.tpe);
val owner: Symbol = if (setter.IsStatic) statics else clazz;
- val methodSym = owner.newMethod(NoPosition, name).setFlag(flags)
+ val methodSym = owner.newMethod(name, NoPosition, flags)
methodSym.setInfo(mtype(methodSym))
methodSym.setFlag(Flags.ACCESSOR);
(if (setter.IsStatic) staticDefs else instanceDefs).enter(methodSym);
@@ -424,14 +424,14 @@ abstract class TypeParser {
val flags = Flags.JAVA | Flags.FINAL
for (cmpName <- ENUM_CMP_NAMES) {
- val enumCmp = clazz.newMethod(NoPosition, cmpName)
+ val enumCmp = clazz.newMethod(cmpName)
val enumCmpType = JavaMethodType(enumCmp.newSyntheticValueParams(List(clazz.tpe)), definitions.BooleanClass.tpe)
enumCmp.setFlag(flags).setInfo(enumCmpType)
instanceDefs.enter(enumCmp)
}
for (bitLogName <- ENUM_BIT_LOG_NAMES) {
- val enumBitLog = clazz.newMethod(NoPosition, bitLogName)
+ val enumBitLog = clazz.newMethod(bitLogName)
val enumBitLogType = JavaMethodType(enumBitLog.newSyntheticValueParams(List(clazz.tpe)), clazz.tpe /* was classInfo, infinite typer */)
enumBitLog.setFlag(flags).setInfo(enumBitLogType)
instanceDefs.enter(enumBitLog)
@@ -469,7 +469,7 @@ abstract class TypeParser {
val flags = translateAttributes(method);
val owner = if (method.IsStatic()) statics else clazz;
- val methodSym = owner.newMethod(NoPosition, getName(method)).setFlag(flags)
+ val methodSym = owner.newMethod(getName(method), NoPosition, flags)
/* START CLR generics (snippet 3) */
val newMethodTParams = populateMethodTParams(method, methodSym)
/* END CLR generics (snippet 3) */
@@ -480,7 +480,7 @@ abstract class TypeParser {
val mtype = methodType(method, rettype);
if (mtype == null) return;
/* START CLR generics (snippet 4) */
- val mInfo = if (method.IsGeneric) polyType(newMethodTParams, mtype(methodSym))
+ val mInfo = if (method.IsGeneric) genPolyType(newMethodTParams, mtype(methodSym))
else mtype(methodSym)
/* END CLR generics (snippet 4) */
/* START CLR non-generics (snippet 4)
@@ -500,7 +500,7 @@ abstract class TypeParser {
}
private def createMethod(name: Name, flags: Long, mtype: Symbol => Type, method: MethodInfo, statik: Boolean): Symbol = {
- val methodSym: Symbol = (if (statik) statics else clazz).newMethod(NoPosition, name)
+ val methodSym: Symbol = (if (statik) statics else clazz).newMethod(name)
methodSym.setFlag(flags).setInfo(mtype(methodSym))
(if (statik) staticDefs else instanceDefs).enter(methodSym)
if (method != null)
diff --git a/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala b/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala
index 4c3972519a..5104518dd9 100644
--- a/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala
+++ b/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala
@@ -28,6 +28,9 @@ abstract class ExtensionMethods extends Transform with TypingTransformers {
/** the following two members override abstract members in Transform */
val phaseName: String = "extmethods"
+ /** The following flags may be set by this phase: */
+ override def phaseNewFlags: Long = notPRIVATE
+
def newTransformer(unit: CompilationUnit): Transformer =
new Extender(unit)
@@ -101,6 +104,7 @@ abstract class ExtensionMethods extends Transform with TypingTransformers {
case Template(_, _, _) =>
if (currentOwner.isDerivedValueClass) {
extensionDefs(currentOwner.companionModule) = new mutable.ListBuffer[Tree]
+ currentOwner.primaryConstructor.makeNotPrivate(NoSymbol)
super.transform(tree)
} else if (currentOwner.isStaticOwner) {
super.transform(tree)
diff --git a/src/compiler/scala/tools/nsc/transform/Mixin.scala b/src/compiler/scala/tools/nsc/transform/Mixin.scala
index 639e060812..adbb7d43d0 100644
--- a/src/compiler/scala/tools/nsc/transform/Mixin.scala
+++ b/src/compiler/scala/tools/nsc/transform/Mixin.scala
@@ -24,7 +24,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
/** This map contains a binding (class -> info) if
* the class with this info at phase mixinPhase has been treated for mixin composition
*/
- private val treatedClassInfos = perRunCaches.newMap[Symbol, Type]()
+ private val treatedClassInfos = perRunCaches.newMap[Symbol, Type]() withDefaultValue NoType
/** Map a lazy, mixedin field accessor to it's trait member accessor */
private val initializer = perRunCaches.newMap[Symbol, Symbol]
@@ -104,7 +104,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
*/
private val toInterfaceMap = new TypeMap {
def apply(tp: Type): Type = mapOver( tp match {
- case TypeRef(pre, sym, args) if (sym.isImplClass) =>
+ case TypeRef(pre, sym, args) if sym.isImplClass =>
typeRef(pre, beforeMixin(sym.toInterface), args)
case _ => tp
})
@@ -113,10 +113,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
/** The implementation class corresponding to a currently compiled interface.
* todo: try to use Symbol.implClass instead?
*/
- private def implClass(iface: Symbol): Symbol = {
- val impl = iface.implClass
- if (impl != NoSymbol) impl else erasure.implClass(iface)
- }
+ private def implClass(iface: Symbol) = iface.implClass orElse (erasure implClass iface)
/** Returns the symbol that is accessed by a super-accessor in a mixin composition.
*
@@ -168,6 +165,29 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
debuglog("new member of " + clazz + ":" + member.defString)
clazz.info.decls enter member setFlag MIXEDIN
}
+ def cloneAndAddMember(mixinClass: Symbol, mixinMember: Symbol, clazz: Symbol): Symbol =
+ addMember(clazz, cloneBeforeErasure(mixinClass, mixinMember, clazz))
+
+ def cloneBeforeErasure(mixinClass: Symbol, mixinMember: Symbol, clazz: Symbol): Symbol = {
+ val newSym = beforeErasure {
+ // since we used `mixinMember` from the interface that represents the trait that's
+ // being mixed in, have to instantiate the interface type params (that may occur in mixinMember's
+ // info) as they are seen from the class. We can't use the member that we get from the
+ // implementation class, as it's a clone that was made after erasure, and thus it does not
+ // know its info at the beginning of erasure anymore.
+ // Optimize: no need if mixinClass has no typeparams.
+ mixinMember cloneSymbol clazz modifyInfo (info =>
+ if (mixinClass.typeParams.isEmpty) info
+ else (clazz.thisType baseType mixinClass) memberInfo mixinMember
+ )
+ }
+ // clone before erasure got rid of type info we'll need to generate a javaSig
+ // now we'll have the type info at (the beginning of) erasure in our history,
+ // and now newSym has the info that's been transformed to fit this period
+ // (no need for asSeenFrom as phase.erasedTypes)
+ // TODO: verify we need the updateInfo and document why
+ newSym updateInfo (mixinMember.info cloneInfo newSym)
+ }
def needsExpandedSetterName(field: Symbol) = !field.isLazy && (
if (field.isMethod) field.hasStableFlag
@@ -181,9 +201,12 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
* - lazy fields don't get a setter.
*/
def addLateInterfaceMembers(clazz: Symbol) {
- if ((treatedClassInfos get clazz) != Some(clazz.info)) {
+ def makeConcrete(member: Symbol) =
+ member setPos clazz.pos resetFlag (DEFERRED | lateDEFERRED)
+
+ if (treatedClassInfos(clazz) != clazz.info) {
treatedClassInfos(clazz) = clazz.info
- assert(phase == currentRun.mixinPhase)
+ assert(phase == currentRun.mixinPhase, phase)
/** Create a new getter. Getters are never private or local. They are
* always accessors and deferred. */
@@ -210,8 +233,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
}
clazz.info // make sure info is up to date, so that implClass is set.
- val impl = implClass(clazz)
- assert(impl != NoSymbol)
+ val impl = implClass(clazz) orElse abort("No impl class for " + clazz)
for (member <- impl.info.decls) {
if (!member.isMethod && !member.isModule && !member.isModuleVar) {
@@ -242,139 +264,107 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
* - for every super accessor in T, add an implementation of that accessor
* - for every module in T, add a module
*/
- def addMixedinMembers(clazz: Symbol, unit : CompilationUnit) {
- def cloneBeforeErasure(iface: Symbol, clazz: Symbol, imember: Symbol): Symbol = {
- val newSym = beforeErasure {
- val res = imember.cloneSymbol(clazz)
- // since we used the member (imember) from the interface that represents the trait that's being mixed in,
- // have to instantiate the interface type params (that may occur in imember's info) as they are seen from the class
- // we can't use the member that we get from the implementation class, as it's a clone that was made after erasure,
- // and thus it does not know its info at the beginning of erasure anymore
- // optimize: no need if iface has no typeparams
- if(iface.typeParams nonEmpty) res.setInfo(clazz.thisType.baseType(iface).memberInfo(imember))
- res
- } // clone before erasure got rid of type info we'll need to generate a javaSig
- // now we'll have the type info at (the beginning of) erasure in our history,
- newSym.updateInfo(imember.info.cloneInfo(newSym)) // and now newSym has the info that's been transformed to fit this period (no need for asSeenFrom as phase.erasedTypes)
- newSym // TODO: verify we need the updateInfo and document why
- }
-
- if (!(clazz hasFlag JAVA) && (treatedClassInfos get clazz) != Some(clazz.info)) {
- treatedClassInfos(clazz) = clazz.info
-
- assert(!clazz.isTrait, clazz)
- assert(clazz.info.parents.nonEmpty, clazz)
-
- // first complete the superclass with mixed in members
- addMixedinMembers(clazz.superClass, unit)
-
- //Console.println("adding members of " + clazz.info.baseClasses.tail.takeWhile(superclazz !=) + " to " + clazz);//DEBUG
-
- /** Mix in members of implementation class mixinClass into class clazz */
- def mixinImplClassMembers(impl: Symbol, iface: Symbol) {
- assert(
- // XXX this should be impl.isImplClass, except that we get impl classes
- // coming through under -optimise which do not agree that they are (because
- // the IMPLCLASS flag is unset, I believe.) See ticket #4285.
- nme.isImplClassName(impl.name) || impl.isImplClass,
- "%s (%s) is not a an implementation class, it cannot mix in %s".format(
- impl, impl.defaultFlagString, iface)
- )
- if (!impl.isImplClass) {
- debugwarn("!!! " + impl + " has an impl class name, but !isImplClass: " + impl.defaultFlagString + ", mixing in " + iface)
- }
+ def addMixedinMembers(clazz: Symbol, unit: CompilationUnit) {
+ def cloneAndAddMixinMember(mixinClass: Symbol, mixinMember: Symbol): Symbol = (
+ cloneAndAddMember(mixinClass, mixinMember, clazz)
+ setPos clazz.pos
+ resetFlag DEFERRED | lateDEFERRED
+ )
- for (member <- impl.info.decls) {
- if (isForwarded(member)) {
- val imember = member.overriddenSymbol(iface)
- // atPhase(currentRun.erasurePhase){
- // println(""+(clazz, iface, clazz.typeParams, iface.typeParams, imember, clazz.thisType.baseType(iface), clazz.thisType.baseType(iface).memberInfo(imember), imember.info substSym(iface.typeParams, clazz.typeParams) ))
- // }
- // Console.println("mixin member "+member+":"+member.tpe+member.locationString+" "+imember+" "+imember.overridingSymbol(clazz)+" to "+clazz+" with scope "+clazz.info.decls)//DEBUG
- if (imember.overridingSymbol(clazz) == NoSymbol &&
- clazz.info.findMember(member.name, 0, lateDEFERRED, false).alternatives.contains(imember)) {
- val member1 = addMember(
- clazz,
- cloneBeforeErasure(iface, clazz, imember) setPos clazz.pos resetFlag (DEFERRED | lateDEFERRED))
- member1.asInstanceOf[TermSymbol] setAlias member;
- }
- }
+ /** Mix in members of implementation class mixinClass into class clazz */
+ def mixinImplClassMembers(mixinClass: Symbol, mixinInterface: Symbol) {
+ assert(mixinClass.isImplClass, "Not an impl class:" +
+ ((mixinClass.debugLocationString, mixinInterface.debugLocationString)))
+
+ for (member <- mixinClass.info.decls ; if isForwarded(member)) {
+ val imember = member overriddenSymbol mixinInterface
+ imember overridingSymbol clazz match {
+ case NoSymbol =>
+ if (clazz.info.findMember(member.name, 0, lateDEFERRED, false).alternatives contains imember)
+ cloneAndAddMixinMember(mixinInterface, imember).asInstanceOf[TermSymbol] setAlias member
+ case _ =>
}
}
+ }
- /** Mix in members of trait mixinClass into class clazz. Also,
- * for each lazy field in mixinClass, add a link from its mixed in member to its
- * initializer method inside the implclass.
- */
- def mixinTraitMembers(mixinClass: Symbol) {
- // For all members of a trait's interface do:
- for (member <- mixinClass.info.decls) {
- if (isConcreteAccessor(member)) {
- if (isOverriddenAccessor(member, clazz.info.baseClasses)) {
- debugwarn("!!! is overridden val: "+member.fullLocationString)
+ /** Mix in members of trait mixinClass into class clazz. Also,
+ * for each lazy field in mixinClass, add a link from its mixed in member to its
+ * initializer method inside the implclass.
+ */
+ def mixinTraitMembers(mixinClass: Symbol) {
+ // For all members of a trait's interface do:
+ for (mixinMember <- mixinClass.info.decls) {
+ if (isConcreteAccessor(mixinMember)) {
+ if (isOverriddenAccessor(mixinMember, clazz.info.baseClasses))
+ debugwarn("!!! is overridden val: "+mixinMember.fullLocationString)
+ else {
+ // mixin field accessors
+ val mixedInAccessor = cloneAndAddMixinMember(mixinClass, mixinMember)
+ if (mixinMember.isLazy) {
+ initializer(mixedInAccessor) = (
+ implClass(mixinClass).info.decl(mixinMember.name)
+ orElse abort("Could not find initializer for " + mixinMember.name)
+ )
}
- else {
- // mixin field accessors
- val member1 = addMember(
- clazz,
- cloneBeforeErasure(mixinClass, clazz, member) //member.cloneSymbol(clazz)
- setPos clazz.pos
- resetFlag (DEFERRED | lateDEFERRED))
- // println("mixing in: "+ (member, member.info, member1.info))
- // atPhase(currentRun.erasurePhase){
- // println("before erasure: "+ (member.info, member1.info))
- // }
- if (member.isLazy) {
- var init = implClass(mixinClass).info.decl(member.name)
- assert(init != NoSymbol, "Could not find initializer for " + member.name)
- initializer(member1) = init
+ if (!mixinMember.isSetter)
+ mixinMember.tpe match {
+ case MethodType(Nil, ConstantType(_)) =>
+ // mixinMember is a constant; only getter is needed
+ ;
+ case MethodType(Nil, TypeRef(_, UnitClass, _)) =>
+ // mixinMember is a value of type unit. No field needed
+ ;
+ case _ => // otherwise mixin a field as well
+ // atPhase: the private field is moved to the implementation class by erasure,
+ // so it can no longer be found in the mixinMember's owner (the trait)
+ val accessed = beforePickler(mixinMember.accessed)
+ // #3857, need to retain info before erasure when cloning (since cloning only
+ // carries over the current entry in the type history)
+ val sym = beforeErasure {
+ // so we have a type history entry before erasure
+ clazz.newValue(nme.getterToLocal(mixinMember.name), mixinMember.pos).setInfo(mixinMember.tpe.resultType)
+ }
+ sym updateInfo mixinMember.tpe.resultType // info at current phase
+
+ val newFlags = (
+ ( PrivateLocal )
+ | ( mixinMember getFlag MUTABLE | LAZY)
+ | ( if (mixinMember.hasStableFlag) 0 else MUTABLE )
+ )
+
+ addMember(clazz, sym setFlag newFlags setAnnotations accessed.annotations)
}
- if (!member.isSetter)
- member.tpe match {
- case MethodType(Nil, ConstantType(_)) =>
- // member is a constant; only getter is needed
- ;
- case MethodType(Nil, TypeRef(_, UnitClass, _)) =>
- // member is a value of type unit. No field needed
- ;
- case _ => // otherwise mixin a field as well
- // atPhase: the private field is moved to the implementation class by erasure,
- // so it can no longer be found in the member's owner (the trait)
- val accessed = beforePickler(member.accessed)
- val sym = beforeErasure { // #3857, need to retain info before erasure when cloning (since cloning only carries over the current entry in the type history)
- clazz.newValue(nme.getterToLocal(member.name), member.pos).setInfo(member.tpe.resultType) // so we have a type history entry before erasure
- }
- sym.updateInfo(member.tpe.resultType) // info at current phase
- addMember(clazz,
- sym
- setFlag (PrivateLocal | member.getFlag(MUTABLE | LAZY))
- setFlag (if (!member.hasStableFlag) MUTABLE else 0)
- setAnnotations accessed.annotations)
- }
- }
- }
- else if (member.isSuperAccessor) { // mixin super accessors
- val member1 = addMember(clazz, member.cloneSymbol(clazz)) setPos clazz.pos
- assert(member1.alias != NoSymbol, member1)
- val alias1 = rebindSuper(clazz, member.alias, mixinClass)
- member1.asInstanceOf[TermSymbol] setAlias alias1
-
- }
- else if (member.isMethod && member.isModule && member.hasNoFlags(LIFTED | BRIDGE)) {
- // mixin objects: todo what happens with abstract objects?
- addMember(clazz, member.cloneSymbol(clazz, member.flags & ~(DEFERRED | lateDEFERRED)) setPos clazz.pos)
}
}
+ else if (mixinMember.isSuperAccessor) { // mixin super accessors
+ val superAccessor = addMember(clazz, mixinMember.cloneSymbol(clazz)) setPos clazz.pos
+ assert(superAccessor.alias != NoSymbol, superAccessor)
+ val alias1 = rebindSuper(clazz, mixinMember.alias, mixinClass)
+ superAccessor.asInstanceOf[TermSymbol] setAlias alias1
+ }
+ else if (mixinMember.isMethod && mixinMember.isModule && mixinMember.hasNoFlags(LIFTED | BRIDGE)) {
+ // mixin objects: todo what happens with abstract objects?
+ addMember(clazz, mixinMember.cloneSymbol(clazz, mixinMember.flags & ~(DEFERRED | lateDEFERRED)) setPos clazz.pos)
+ }
}
+ }
- for (mc <- clazz.mixinClasses)
- if (mc hasFlag lateINTERFACE) {
- // @SEAN: adding trait tracking so we don't have to recompile transitive closures
- unit.depends += mc
- addLateInterfaceMembers(mc)
- mixinTraitMembers(mc)
- mixinImplClassMembers(implClass(mc), mc)
- }
+ if (clazz.isJavaDefined || treatedClassInfos(clazz) == clazz.info)
+ return
+
+ treatedClassInfos(clazz) = clazz.info
+ assert(!clazz.isTrait && clazz.info.parents.nonEmpty, clazz)
+
+ // first complete the superclass with mixed in members
+ addMixedinMembers(clazz.superClass, unit)
+
+ //Console.println("adding members of " + clazz.info.baseClasses.tail.takeWhile(superclazz !=) + " to " + clazz);//DEBUG
+ for (mc <- clazz.mixinClasses ; if mc hasFlag lateINTERFACE) {
+ // @SEAN: adding trait tracking so we don't have to recompile transitive closures
+ unit.depends += mc
+ addLateInterfaceMembers(mc)
+ mixinTraitMembers(mc)
+ mixinImplClassMembers(implClass(mc), mc)
}
}
@@ -436,9 +426,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
* Such fields will be nulled after the initializer has memoized the lazy value.
*/
def singleUseFields(templ: Template): collection.Map[Symbol, List[Symbol]] = {
- val usedIn = new mutable.HashMap[Symbol, List[Symbol]] {
- override def default(key: Symbol) = Nil
- }
+ val usedIn = mutable.HashMap[Symbol, List[Symbol]]() withDefaultValue Nil
object SingleUseTraverser extends Traverser {
override def traverse(tree: Tree) {
@@ -477,7 +465,6 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
new MixinTransformer(unit)
class MixinTransformer(unit : CompilationUnit) extends Transformer {
-
/** Within a static implementation method: the parameter referring to the
* current object. Undefined everywhere else.
*/
@@ -1129,18 +1116,20 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
* - refer to fields in some implementation class via an abstract method in the interface.
*/
private def postTransform(tree: Tree): Tree = {
+ def siteWithinImplClass = currentOwner.enclClass.isImplClass
val sym = tree.symbol
+
// change every node type that refers to an implementation class to its
// corresponding interface, unless the node's symbol is an implementation class.
if (tree.tpe.typeSymbol.isImplClass && ((sym eq null) || !sym.isImplClass))
tree.tpe = toInterface(tree.tpe)
tree match {
- case Template(parents, self, body) =>
+ case templ @ Template(parents, self, body) =>
// change parents of templates to conform to parents in the symbol info
val parents1 = currentOwner.info.parents map (t => TypeTree(t) setPos tree.pos)
// mark fields which can be nulled afterward
- lazyValNullables = nullableFields(tree.asInstanceOf[Template]) withDefaultValue Set()
+ lazyValNullables = nullableFields(templ) withDefaultValue Set()
// add all new definitions to current class or interface
treeCopy.Template(tree, parents1, self, addNewDefs(currentOwner, body))
@@ -1179,18 +1168,23 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
// - if `m` refers to a trait, insert a static call to the corresponding static
// implementation
// - otherwise return tree unchanged
- if (mix == tpnme.EMPTY && currentOwner.enclClass.isImplClass)
- assert(false, "illegal super in trait: " + currentOwner.enclClass + " " + tree);
+ assert(
+ !(mix == tpnme.EMPTY && siteWithinImplClass),
+ "illegal super in trait: " + currentOwner.enclClass + " " + tree
+ )
+
if (sym.owner hasFlag lateINTERFACE) {
if (sym.hasAccessorFlag) {
assert(args.isEmpty, args)
val sym1 = sym.overridingSymbol(currentOwner.enclClass)
typedPos(tree.pos)((transformSuper(qual) DOT sym1)())
- } else {
+ }
+ else {
staticCall(beforePrevPhase(sym.overridingSymbol(implClass(sym.owner))))
}
- } else {
- assert(!currentOwner.enclClass.isImplClass, currentOwner.enclClass)
+ }
+ else {
+ assert(!siteWithinImplClass, currentOwner.enclClass)
tree
}
case _ =>
@@ -1208,8 +1202,8 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
// refer to fields in some implementation class via an abstract
// getter in the interface.
val iface = toInterface(sym.owner.tpe).typeSymbol
- val getter = sym.getter(iface)
- assert(getter != NoSymbol, sym)
+ val getter = sym getter iface orElse abort("No getter for " + sym + " in " + iface)
+
typedPos(tree.pos)((qual DOT getter)())
case Assign(Apply(lhs @ Select(qual, _), List()), rhs) =>
diff --git a/src/compiler/scala/tools/nsc/transform/TailCalls.scala b/src/compiler/scala/tools/nsc/transform/TailCalls.scala
index fdb5c7e52e..6ebecb02c6 100644
--- a/src/compiler/scala/tools/nsc/transform/TailCalls.scala
+++ b/src/compiler/scala/tools/nsc/transform/TailCalls.scala
@@ -36,6 +36,8 @@ abstract class TailCalls extends Transform {
}
}
+ private def hasSynthCaseSymbol(t: Tree) = (t.symbol ne null) && (t.symbol hasFlag (Flags.CASE | Flags.SYNTHETIC))
+
/**
* A Tail Call Transformer
*
@@ -87,10 +89,22 @@ abstract class TailCalls extends Transform {
class TailCallElimination(unit: CompilationUnit) extends Transformer {
private val defaultReason = "it contains a recursive call not in tail position"
+ /** Has the label been accessed? Then its symbol is in this set. */
+ private val accessed = new collection.mutable.HashSet[Symbol]()
+ // `accessed` was stored as boolean in the current context -- this is no longer tenable
+ // with jumps to labels in tailpositions now considered in tailposition,
+ // a downstream context may access the label, and the upstream one will be none the wiser
+ // this is necessary because tail-calls may occur in places where syntactically they seem impossible
+ // (since we now consider jumps to labels that are in tailposition, such as matchEnd(x) {x})
+
+
class Context() {
/** The current method */
var method: Symbol = NoSymbol
+ // symbols of label defs in this method that are in tail position
+ var tailLabels: Set[Symbol] = Set()
+
/** The current tail-call label */
var label: Symbol = NoSymbol
@@ -104,24 +118,20 @@ abstract class TailCalls extends Transform {
var failReason = defaultReason
var failPos = method.pos
- /** Has the label been accessed? */
- var accessed = false
-
def this(that: Context) = {
this()
this.method = that.method
this.tparams = that.tparams
this.tailPos = that.tailPos
- this.accessed = that.accessed
this.failPos = that.failPos
this.label = that.label
+ this.tailLabels = that.tailLabels
}
def this(dd: DefDef) {
this()
this.method = dd.symbol
this.tparams = dd.tparams map (_.symbol)
this.tailPos = true
- this.accessed = false
this.failPos = dd.pos
/** Create a new method symbol for the current method and store it in
@@ -141,14 +151,14 @@ abstract class TailCalls extends Transform {
def isEligible = method.isEffectivelyFinal
// @tailrec annotation indicates mandatory transformation
def isMandatory = method.hasAnnotation(TailrecClass) && !forMSIL
- def isTransformed = isEligible && accessed
+ def isTransformed = isEligible && accessed(label)
def tailrecFailure() = unit.error(failPos, "could not optimize @tailrec annotated " + method + ": " + failReason)
def newThis(pos: Position) = method.newValue(nme.THIS, pos, SYNTHETIC) setInfo currentClass.typeOfThis
override def toString(): String = (
"" + method.name + " tparams: " + tparams + " tailPos: " + tailPos +
- " accessed: " + accessed + "\nLabel: " + label + "\nLabel type: " + label.info
+ " Label: " + label + " Label type: " + label.info
)
}
@@ -206,7 +216,7 @@ abstract class TailCalls extends Transform {
def rewriteTailCall(recv: Tree): Tree = {
debuglog("Rewriting tail recursive call: " + fun.pos.lineContent.trim)
- ctx.accessed = true
+ accessed += ctx.label
typedPos(fun.pos)(Apply(Ident(ctx.label), recv :: transformArgs))
}
@@ -242,10 +252,16 @@ abstract class TailCalls extends Transform {
unit.error(tree.pos, "@tailrec annotated method contains no recursive calls")
}
}
- debuglog("Considering " + dd.name + " for tailcalls")
+
+ // labels are local to a method, so only traverse the rhs of a defdef
+ val collectTailPosLabels = new TailPosLabelsTraverser
+ collectTailPosLabels traverse rhs0
+ newCtx.tailLabels = collectTailPosLabels.tailLabels.toSet
+
+ debuglog("Considering " + dd.name + " for tailcalls, with labels in tailpos: "+ newCtx.tailLabels)
val newRHS = transform(rhs0, newCtx)
- deriveDefDef(tree)(rhs =>
+ deriveDefDef(tree){rhs =>
if (newCtx.isTransformed) {
/** We have rewritten the tree, but there may be nested recursive calls remaining.
* If @tailrec is given we need to fail those now.
@@ -270,8 +286,22 @@ abstract class TailCalls extends Transform {
newRHS
}
+ }
+
+ // a translated match
+ case Block(stats, expr) if stats forall hasSynthCaseSymbol =>
+ // the assumption is once we encounter a case, the remainder of the block will consist of cases
+ // the prologue may be empty, usually it is the valdef that stores the scrut
+ val (prologue, cases) = stats span (s => !s.isInstanceOf[LabelDef])
+ treeCopy.Block(tree,
+ noTailTransforms(prologue) ++ transformTrees(cases),
+ transform(expr)
)
+ // a translated casedef
+ case LabelDef(_, _, body) if hasSynthCaseSymbol(tree) =>
+ deriveLabelDef(tree)(transform)
+
case Block(stats, expr) =>
treeCopy.Block(tree,
noTailTransforms(stats),
@@ -308,8 +338,18 @@ abstract class TailCalls extends Transform {
case Apply(fun, args) =>
if (fun.symbol == Boolean_or || fun.symbol == Boolean_and)
treeCopy.Apply(tree, fun, transformTrees(args))
- else
- rewriteApply(fun, fun, Nil, args)
+ else if (fun.symbol.isLabel && args.nonEmpty && args.tail.isEmpty && ctx.tailLabels(fun.symbol)) {
+ // this is to detect tailcalls in translated matches
+ // it's a one-argument call to a label that is in a tailposition and that looks like label(x) {x}
+ // thus, the argument to the call is in tailposition and we don't need to jump to the label, tail jump instead
+ val saved = ctx.tailPos
+ ctx.tailPos = true
+ debuglog("in tailpos label: "+ args.head)
+ val res = transform(args.head)
+ ctx.tailPos = saved
+ if (res ne args.head) res // we tail-called -- TODO: shield from false-positives where we rewrite but don't tail-call
+ else rewriteApply(fun, fun, Nil, args)
+ } else rewriteApply(fun, fun, Nil, args)
case Alternative(_) | Star(_) | Bind(_, _) =>
sys.error("We should've never gotten inside a pattern")
@@ -320,4 +360,66 @@ abstract class TailCalls extends Transform {
}
}
}
+
+ // collect the LabelDefs (generated by the pattern matcher) in a DefDef that are in tail position
+ // the labels all look like: matchEnd(x) {x}
+ // then, in a forward jump `matchEnd(expr)`, `expr` is considered in tail position (and the matchEnd jump is replaced by the jump generated by expr)
+ class TailPosLabelsTraverser extends Traverser {
+ val tailLabels = new collection.mutable.ListBuffer[Symbol]()
+
+ private var maybeTail: Boolean = true // since we start in the rhs of a DefDef
+
+ def traverse(tree: Tree, maybeTailNew: Boolean): Unit = {
+ val saved = maybeTail
+ maybeTail = maybeTailNew
+ try traverse(tree)
+ finally maybeTail = saved
+ }
+
+ def traverseNoTail(tree: Tree) = traverse(tree, false)
+ def traverseTreesNoTail(trees: List[Tree]) = trees foreach traverseNoTail
+
+ override def traverse(tree: Tree) = tree match {
+ case LabelDef(_, List(arg), body@Ident(_)) if arg.symbol == body.symbol => // we're looking for label(x){x} in tail position, since that means `a` is in tail position in a call `label(a)`
+ if (maybeTail) tailLabels += tree.symbol
+
+ // a translated casedef
+ case LabelDef(_, _, body) if hasSynthCaseSymbol(tree) =>
+ traverse(body)
+
+ // a translated match
+ case Block(stats, expr) if stats forall hasSynthCaseSymbol =>
+ // the assumption is once we encounter a case, the remainder of the block will consist of cases
+ // the prologue may be empty, usually it is the valdef that stores the scrut
+ val (prologue, cases) = stats span (s => !s.isInstanceOf[LabelDef])
+ traverseTreesNoTail(prologue) // selector (may be absent)
+ traverseTrees(cases)
+ traverse(expr)
+
+ case CaseDef(pat, guard, body) =>
+ traverse(body)
+
+ case Match(selector, cases) =>
+ traverseNoTail(selector)
+ traverseTrees(cases)
+
+ case dd @ DefDef(_, _, _, _, _, _) => // we are run per-method
+
+ case Block(stats, expr) =>
+ traverseTreesNoTail(stats)
+ traverse(expr)
+
+ case If(cond, thenp, elsep) =>
+ traverse(thenp)
+ traverse(elsep)
+
+ case Try(block, catches, finalizer) =>
+ traverseNoTail(block)
+ traverseTreesNoTail(catches)
+ traverseNoTail(finalizer)
+
+ case EmptyTree | Super(_, _) | This(_) | Select(_, _) | Ident(_) | Literal(_) | Function(_, _) | TypeTree() =>
+ case _ => super.traverse(tree)
+ }
+ }
}
diff --git a/src/compiler/scala/tools/nsc/transform/UnCurry.scala b/src/compiler/scala/tools/nsc/transform/UnCurry.scala
index ee565530b7..e54e0289bb 100644
--- a/src/compiler/scala/tools/nsc/transform/UnCurry.scala
+++ b/src/compiler/scala/tools/nsc/transform/UnCurry.scala
@@ -168,7 +168,7 @@ abstract class UnCurry extends InfoTransform
private def nonLocalReturnTry(body: Tree, key: Symbol, meth: Symbol) = {
localTyper typed {
val extpe = nonLocalReturnExceptionType(meth.tpe.finalResultType)
- val ex = meth.newValue(body.pos, nme.ex) setInfo extpe
+ val ex = meth.newValue(nme.ex, body.pos) setInfo extpe
val pat = gen.mkBindForCase(ex, NonLocalReturnControlClass, List(meth.tpe.finalResultType))
val rhs = (
IF ((ex DOT nme.key)() OBJ_EQ Ident(key))
@@ -237,8 +237,10 @@ abstract class UnCurry extends InfoTransform
def targs = fun.tpe.typeArgs
def isPartial = fun.tpe.typeSymbol == PartialFunctionClass
+ // if the function was eta-expanded, it's not a match without a selector
if (fun1 ne fun) fun1
else {
+ assert(!(opt.virtPatmat && isPartial)) // empty-selector matches have already been translated into instantiations of anonymous (partial) functions
val (formals, restpe) = (targs.init, targs.last)
val anonClass = owner.newAnonymousFunctionClass(fun.pos, inConstructorFlag)
def parents =
@@ -286,52 +288,54 @@ abstract class UnCurry extends InfoTransform
def defaultCase = CaseDef(Ident(nme.WILDCARD), EmptyTree, Literal(Constant(false)))
- val casesNoSynthCatchAll = dropSyntheticCatchAll(cases)
+// val casesNoSynthCatchAll = dropSyntheticCatchAll(cases)
gen.mkUncheckedMatch(
- if (casesNoSynthCatchAll exists treeInfo.isDefaultCase) Literal(Constant(true))
- else substTree(wrap(Match(selector, (casesNoSynthCatchAll map transformCase) :+ defaultCase)).duplicate)
+ if (cases exists treeInfo.isDefaultCase) Literal(Constant(true))
+ else substTree(wrap(Match(selector, (cases map transformCase) :+ defaultCase)).duplicate)
)
}
- override def caseVirtualizedMatch(orig: Tree, _match: Tree, targs: List[Tree], scrut: Tree, matcher: Tree): Tree = {
- object noOne extends Transformer {
- override val treeCopy = newStrictTreeCopier // must duplicate everything
- val one = _match.tpe member newTermName("one")
- override def transform(tree: Tree): Tree = tree match {
- case Apply(fun, List(a)) if fun.symbol == one =>
- // blow one's argument away since all we want to know is whether the match succeeds or not
- // (the alternative, making `one` CBN, would entail moving away from Option)
- Apply(fun.duplicate, List(gen.mkZeroContravariantAfterTyper(a.tpe)))
- case _ =>
- super.transform(tree)
- }
- }
- substTree(Apply(Apply(TypeApply(Select(_match.duplicate, _match.tpe.member(newTermName("isSuccess"))), targs map (_.duplicate)), List(scrut.duplicate)), List(noOne.transform(matcher))))
- }
-
- override def caseVirtualizedMatchOpt(orig: Tree, zero: ValDef, x: ValDef, matchRes: ValDef, keepGoing: ValDef, stats: List[Tree], epilogue: Tree, wrap: Tree => Tree) = {
- object dropMatchResAssign extends Transformer {
- // override val treeCopy = newStrictTreeCopier // will duplicate below
- override def transform(tree: Tree): Tree = tree match {
- // don't compute the result of the match -- remove the block for the RHS (emitted by pmgen.one), except for the assignment to keepGoing
- case gen.VirtualCaseDef(assignKeepGoing, matchRes, zero) if assignKeepGoing.lhs.symbol eq keepGoing.symbol =>
- Block(List(assignKeepGoing), zero)
- case _ =>
- super.transform(tree)
- }
- }
- val statsNoMatchRes: List[Tree] = stats map (dropMatchResAssign.transform) toList
- val idaBlock = wrap(Block(
- zero ::
- x ::
- /* drop matchRes def */
- keepGoing ::
- statsNoMatchRes,
- NOT(REF(keepGoing.symbol)) // replace `if (keepGoing) throw new MatchError(...) else matchRes` epilogue by `!keepGoing`
- ))
- substTree(idaBlock.duplicate) // duplicate on block as a whole to ensure valdefs are properly cloned and substed
- }
+ override def caseVirtualizedMatch(orig: Tree, _match: Tree, targs: List[Tree], scrut: Tree, matcher: Tree): Tree = {assert(false); orig}
+ // {
+ // object noOne extends Transformer {
+ // override val treeCopy = newStrictTreeCopier // must duplicate everything
+ // val one = _match.tpe member newTermName("one")
+ // override def transform(tree: Tree): Tree = tree match {
+ // case Apply(fun, List(a)) if fun.symbol == one =>
+ // // blow one's argument away since all we want to know is whether the match succeeds or not
+ // // (the alternative, making `one` CBN, would entail moving away from Option)
+ // Apply(fun.duplicate, List(gen.mkZeroContravariantAfterTyper(a.tpe)))
+ // case _ =>
+ // super.transform(tree)
+ // }
+ // }
+ // substTree(Apply(Apply(TypeApply(Select(_match.duplicate, _match.tpe.member(newTermName("isSuccess"))), targs map (_.duplicate)), List(scrut.duplicate)), List(noOne.transform(matcher))))
+ // }
+
+ override def caseVirtualizedMatchOpt(orig: Tree, zero: ValDef, x: ValDef, matchRes: ValDef, keepGoing: ValDef, stats: List[Tree], epilogue: Tree, wrap: Tree => Tree) = {assert(false); orig}
+ // {
+ // object dropMatchResAssign extends Transformer {
+ // // override val treeCopy = newStrictTreeCopier // will duplicate below
+ // override def transform(tree: Tree): Tree = tree match {
+ // // don't compute the result of the match -- remove the block for the RHS (emitted by pmgen.one), except for the assignment to keepGoing
+ // case gen.VirtualCaseDef(assignKeepGoing, matchRes, zero) if assignKeepGoing.lhs.symbol eq keepGoing.symbol =>
+ // Block(List(assignKeepGoing), zero)
+ // case _ =>
+ // super.transform(tree)
+ // }
+ // }
+ // val statsNoMatchRes: List[Tree] = stats map (dropMatchResAssign.transform) toList
+ // val idaBlock = wrap(Block(
+ // zero ::
+ // x ::
+ // /* drop matchRes def */
+ // keepGoing ::
+ // statsNoMatchRes,
+ // NOT(REF(keepGoing.symbol)) // replace `if (keepGoing) throw new MatchError(...) else matchRes` epilogue by `!keepGoing`
+ // ))
+ // substTree(idaBlock.duplicate) // duplicate on block as a whole to ensure valdefs are properly cloned and substed
+ // }
}
DefDef(m, isDefinedAtTransformer(fun.body))
diff --git a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
index ed9fee986f..e37f5784c9 100644
--- a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
@@ -684,7 +684,7 @@ trait ContextErrors {
def errMsg = {
val location = if (sym.isClassConstructor) owner0 else pre.widen
- underlying(sym).fullLocationString + " cannot be accessed in " +
+ underlyingSymbol(sym).fullLocationString + " cannot be accessed in " +
location + explanation
}
NormalTypeError(tree, errMsg, ErrorKinds.Access)
diff --git a/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala b/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala
index 29831c8469..eb0d489901 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala
@@ -256,8 +256,16 @@ abstract class Duplicators extends Analyzer {
case ldef @ LabelDef(name, params, rhs) =>
// log("label def: " + ldef)
ldef.tpe = null
- val params1 = params map (p => Ident(updateSym(p.symbol)))
- super.typed(treeCopy.LabelDef(tree, name, params1, rhs), mode, pt)
+ // since typer does not create the symbols for a LabelDef's params,
+ // we do that manually here -- we should really refactor LabelDef to be a subclass of DefDef
+ def newParam(p: Tree): Ident = {
+ val newsym = p.symbol.cloneSymbol //(context.owner) // TODO owner?
+ Ident(newsym.setInfo(fixType(p.symbol.info)))
+ }
+ val params1 = params map newParam
+ val rhs1 = (new TreeSubstituter(params map (_.symbol), params1) transform rhs) // TODO: duplicate?
+ rhs1.tpe = null
+ super.typed(treeCopy.LabelDef(tree, name, params1, rhs1), mode, pt)
case Bind(name, _) =>
// log("bind: " + tree)
diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
index 8b3bc253fd..a59622d4df 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
@@ -1099,7 +1099,7 @@ trait Infer {
// since instantiateTypeVar wants to modify the skolem that corresponds to the method's type parameter,
// and it uses the TypeVar's origin to locate it, deskolemize the existential skolem to the method tparam skolem
// (the existential skolem was created by adaptConstrPattern to introduce the type slack necessary to soundly deal with variant type parameters)
- case skolem if skolem.isExistentialSkolem => freshVar(skolem.deSkolemize.asInstanceOf[TypeSymbol])
+ case skolem if skolem.isGADTSkolem => freshVar(skolem.deSkolemize.asInstanceOf[TypeSymbol])
case p => freshVar(p)
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala
index f32ad9293c..5287fad3bb 100644
--- a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala
@@ -169,6 +169,16 @@ trait MethodSynthesis {
self: Namer =>
import NamerErrorGen._
+
+ /** TODO - synthesize method.
+ */
+ def enterImplicitClass(tree: ClassDef) {
+ /** e.g.
+ val ClassDef(mods, name, tparams, impl) = tree
+ val converter = ImplicitClassConverter(tree).createAndEnterSymbol()
+ ...
+ */
+ }
def enterGetterSetter(tree: ValDef) {
val ValDef(mods, name, _, _) = tree
@@ -230,14 +240,33 @@ trait MethodSynthesis {
}
trait Derived {
+ /** The tree from which we are deriving a synthetic member. */
+ def tree: Tree
def name: TermName
def flagsMask: Long
def flagsExtra: Long
+
+ /** The tree, symbol, and type completer for the synthetic member. */
def completer(sym: Symbol): Type
+ def derivedSym: Symbol
+ def derivedTree: Tree
}
- trait DerivedFromValDef extends Derived {
- /** The declaration from which we are deriving.
- */
+
+ trait DerivedFromMemberDef extends Derived {
+ def tree: MemberDef
+
+ // Final methods to make the rest easier to reason about.
+ final def mods = tree.mods
+ final def basisSym = tree.symbol
+ final def enclClass = basisSym.enclClass
+ final def derivedFlags: Long = basisSym.flags & flagsMask | flagsExtra
+ }
+
+ trait DerivedFromClassDef extends DerivedFromMemberDef {
+ def tree: ClassDef
+ }
+
+ trait DerivedFromValDef extends DerivedFromMemberDef {
def tree: ValDef
/** Which meta-annotation is associated with this kind of entity.
@@ -245,14 +274,8 @@ trait MethodSynthesis {
*/
def category: Symbol
- // Final methods to make the rest easier to reason about.
- final def mods = tree.mods
- final def basisSym = tree.symbol
- final def enclClass = basisSym.enclClass
-
final def completer(sym: Symbol) = namerOf(sym).accessorTypeCompleter(tree, isSetter)
final def fieldSelection = Select(This(enclClass), basisSym)
- final def derivedFlags: Long = basisSym.flags & flagsMask | flagsExtra
final def derivedMods: Modifiers = mods & flagsMask | flagsExtra mapAnnotations (_ => Nil)
def derivedSym: Symbol = tree.symbol
@@ -312,6 +335,19 @@ trait MethodSynthesis {
private def setterDef = DefDef(derivedSym, setterRhs)
override def derivedTree: Tree = if (setterParam == NoSymbol) EmptyTree else setterDef
}
+
+ /** A synthetic method which performs the implicit conversion implied by
+ * the declaration of an implicit class. Yet to be written.
+ */
+ case class ImplicitClassConverter(tree: ClassDef) extends DerivedFromClassDef {
+ def completer(sym: Symbol): Type = ???
+ def derivedSym: Symbol = ???
+ def derivedTree: DefDef = ???
+ def flagsExtra: Long = ???
+ def flagsMask: Long = ???
+ def name: TermName = ???
+ }
+
case class Getter(tree: ValDef) extends DerivedGetter {
def name = tree.name
def category = GetterTargetClass
diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
index c5fb13a5a9..6b27c27652 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
@@ -665,6 +665,10 @@ trait Namers extends MethodSynthesis {
"If possible, define " + tree.symbol + " in " + owner.skipPackageObject + " instead."
)
}
+
+ // Suggested location only.
+ if (mods.isImplicit)
+ enterImplicitClass(tree)
}
// this logic is needed in case typer was interrupted half
diff --git a/src/compiler/scala/tools/nsc/typechecker/PatMatVirtualiser.scala b/src/compiler/scala/tools/nsc/typechecker/PatMatVirtualiser.scala
index b060fd7121..de7f03dc62 100644
--- a/src/compiler/scala/tools/nsc/typechecker/PatMatVirtualiser.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/PatMatVirtualiser.scala
@@ -43,13 +43,13 @@ trait PatMatVirtualiser extends ast.TreeDSL { self: Analyzer =>
val outer = newTermName("<outer>")
val runOrElse = newTermName("runOrElse")
val zero = newTermName("zero")
- val _match = newTermName("__match") // don't call it __match, since that will trigger virtual pattern matching...
+ val _match = newTermName("__match") // don't call it __match, since that will trigger virtual pattern matching...
def counted(str: String, i: Int) = newTermName(str+i)
}
object MatchTranslator {
- def apply(typer: Typer): MatchTranslation = {
+ def apply(typer: Typer): MatchTranslation with CodegenCore = {
import typer._
// typing `_match` to decide which MatchTranslator to create adds 4% to quick.comp.timer
newTyper(context.makeImplicit(reportAmbiguousErrors = false)).silent(_.typed(Ident(vpmName._match), EXPRmode, WildcardType), reportAmbiguousErrors = false) match {
@@ -116,10 +116,6 @@ trait PatMatVirtualiser extends ast.TreeDSL { self: Analyzer =>
trait MatchTranslation extends MatchMonadInterface { self: TreeMakers with CodegenCore =>
import typer.{typed, context, silent, reallyExists}
- private def repeatedToSeq(tp: Type): Type = (tp baseType RepeatedParamClass) match {
- case TypeRef(_, RepeatedParamClass, args) => appliedType(SeqClass.typeConstructor, args)
- case _ => tp
- }
/** Implement a pattern match by turning its cases (including the implicit failure case)
* into the corresponding (monadic) extractors, and combining them with the `orElse` combinator.
@@ -131,18 +127,15 @@ trait PatMatVirtualiser extends ast.TreeDSL { self: Analyzer =>
* thus, you must typecheck the result (and that will in turn translate nested matches)
* this could probably optimized... (but note that the matchStrategy must be solved for each nested patternmatch)
*/
- def translateMatch(scrut: Tree, cases: List[CaseDef], pt: Type): Tree = {
+ def translateMatch(scrut: Tree, cases: List[CaseDef], pt: Type, scrutType: Type, matchFailGenOverride: Option[Tree => Tree] = None): Tree = {
// we don't transform after typers
// (that would require much more sophistication when generating trees,
// and the only place that emits Matches after typers is for exception handling anyway)
assert(phase.id <= currentRun.typerPhase.id, phase)
- val scrutType = repeatedToSeq(elimAnonymousClass(scrut.tpe.widen))
-
- val scrutSym = freshSym(scrut.pos, pureType(scrutType))
- val okPt = repeatedToSeq(pt)
+ val scrutSym = freshSym(scrut.pos, pureType(scrutType)) setFlag (Flags.CASE | SYNTHETIC) // the flags allow us to detect generated matches by looking at the scrutinee's symbol (needed to avoid recursing endlessly on generated switches)
// pt = Any* occurs when compiling test/files/pos/annotDepMethType.scala with -Xexperimental
- combineCases(scrut, scrutSym, cases map translateCase(scrutSym, okPt), okPt, matchOwner)
+ combineCases(scrut, scrutSym, cases map translateCase(scrutSym, pt), pt, matchOwner, matchFailGenOverride)
}
// return list of typed CaseDefs that are supported by the backend (typed/bind/wildcard)
@@ -154,13 +147,12 @@ trait PatMatVirtualiser extends ast.TreeDSL { self: Analyzer =>
// if they're already simple enough to be handled by the back-end, we're done
if (caseDefs forall treeInfo.isCatchCase) caseDefs
else {
- val okPt = repeatedToSeq(pt)
val switch = {
val bindersAndCases = caseDefs map { caseDef =>
// generate a fresh symbol for each case, hoping we'll end up emitting a type-switch (we don't have a global scrut there)
// if we fail to emit a fine-grained switch, have to do translateCase again with a single scrutSym (TODO: uniformize substitution on treemakers so we can avoid this)
val caseScrutSym = freshSym(pos, pureType(ThrowableClass.tpe))
- (caseScrutSym, propagateSubstitution(translateCase(caseScrutSym, okPt)(caseDef), EmptySubstitution))
+ (caseScrutSym, propagateSubstitution(translateCase(caseScrutSym, pt)(caseDef), EmptySubstitution))
}
(emitTypeSwitch(bindersAndCases, pt) map (_.map(fixerUpper(matchOwner, pos).apply(_).asInstanceOf[CaseDef])))
@@ -168,7 +160,7 @@ trait PatMatVirtualiser extends ast.TreeDSL { self: Analyzer =>
val catches = switch getOrElse {
val scrutSym = freshSym(pos, pureType(ThrowableClass.tpe))
- val casesNoSubstOnly = caseDefs map { caseDef => (propagateSubstitution(translateCase(scrutSym, okPt)(caseDef), EmptySubstitution))}
+ val casesNoSubstOnly = caseDefs map { caseDef => (propagateSubstitution(translateCase(scrutSym, pt)(caseDef), EmptySubstitution))}
val exSym = freshSym(pos, pureType(ThrowableClass.tpe), "ex")
@@ -177,7 +169,7 @@ trait PatMatVirtualiser extends ast.TreeDSL { self: Analyzer =>
CaseDef(
Bind(exSym, Ident(nme.WILDCARD)), // TODO: does this need fixing upping?
EmptyTree,
- combineCasesNoSubstOnly(CODE.REF(exSym), scrutSym, casesNoSubstOnly, pt, matchOwner, scrut => Throw(CODE.REF(exSym)))
+ combineCasesNoSubstOnly(CODE.REF(exSym), scrutSym, casesNoSubstOnly, pt, matchOwner, Some(scrut => Throw(CODE.REF(exSym))))
)
})
}
@@ -706,10 +698,10 @@ class Foo(x: Other) { x._1 } // no error in this order
def optimizeCases(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type): (List[List[TreeMaker]], List[Tree]) =
(cases, Nil)
- def emitSwitch(scrut: Tree, scrutSym: Symbol, cases: List[List[TreeMaker]], pt: Type): Option[Tree] =
+ def emitSwitch(scrut: Tree, scrutSym: Symbol, cases: List[List[TreeMaker]], pt: Type, matchFailGenOverride: Option[Tree => Tree]): Option[Tree] =
None
- // for catch
+ // for catch (no need to customize match failure)
def emitTypeSwitch(bindersAndCases: List[(Symbol, List[TreeMaker])], pt: Type): Option[List[CaseDef]] =
None
@@ -733,23 +725,25 @@ class Foo(x: Other) { x._1 } // no error in this order
private[this] var currSub: Substitution = null
// build Tree that chains `next` after the current extractor
- def chainBefore(next: Tree, pt: Type): Tree
+ def chainBefore(next: Tree)(casegen: Casegen): Tree
+ }
+
+ trait NoNewBinders extends TreeMaker {
+ protected val localSubstitution: Substitution = EmptySubstitution
}
- case class TrivialTreeMaker(tree: Tree) extends TreeMaker {
- val localSubstitution: Substitution = EmptySubstitution
- def chainBefore(next: Tree, pt: Type): Tree = tree
+ case class TrivialTreeMaker(tree: Tree) extends TreeMaker with NoNewBinders {
+ def chainBefore(next: Tree)(casegen: Casegen): Tree = tree
}
- case class BodyTreeMaker(body: Tree, matchPt: Type) extends TreeMaker {
- val localSubstitution: Substitution = EmptySubstitution
- def chainBefore(next: Tree, pt: Type): Tree = // assert(next eq EmptyTree)
- atPos(body.pos)(substitution(codegen.one(body, body.tpe, matchPt))) // since SubstOnly treemakers are dropped, need to do it here
+ case class BodyTreeMaker(body: Tree, matchPt: Type) extends TreeMaker with NoNewBinders {
+ def chainBefore(next: Tree)(casegen: Casegen): Tree = // assert(next eq EmptyTree)
+ atPos(body.pos)(casegen.one(substitution(body))) // since SubstOnly treemakers are dropped, need to do it here
}
case class SubstOnlyTreeMaker(prevBinder: Symbol, nextBinder: Symbol) extends TreeMaker {
val localSubstitution = Substitution(prevBinder, CODE.REF(nextBinder))
- def chainBefore(next: Tree, pt: Type): Tree = substitution(next)
+ def chainBefore(next: Tree)(casegen: Casegen): Tree = substitution(next)
}
abstract class FunTreeMaker extends TreeMaker {
@@ -766,8 +760,8 @@ class Foo(x: Other) { x._1 } // no error in this order
lazy val nextBinder = freshSym(pos, nextBinderTp)
lazy val localSubstitution = Substitution(List(prevBinder), List(CODE.REF(nextBinder)))
- def chainBefore(next: Tree, pt: Type): Tree =
- atPos(pos)(codegen.flatMapCond(cond, res, nextBinder, nextBinderTp, substitution(next)))
+ def chainBefore(next: Tree)(casegen: Casegen): Tree =
+ atPos(pos)(casegen.flatMapCond(cond, res, nextBinder, substitution(next)))
}
/**
@@ -778,11 +772,11 @@ class Foo(x: Other) { x._1 } // no error in this order
* in this function's body, and all the subsequent ones, references to the symbols in `from` will be replaced by the corresponding tree in `to`
*/
case class ExtractorTreeMaker(extractor: Tree, extraCond: Option[Tree], nextBinder: Symbol, localSubstitution: Substitution)(extractorReturnsBoolean: Boolean) extends FunTreeMaker {
- def chainBefore(next: Tree, pt: Type): Tree = {
- val condAndNext = extraCond map (codegen.ifThenElseZero(_, next)) getOrElse next
+ def chainBefore(next: Tree)(casegen: Casegen): Tree = {
+ val condAndNext = extraCond map (casegen.ifThenElseZero(_, next)) getOrElse next
atPos(extractor.pos)(
- if (extractorReturnsBoolean) codegen.flatMapCond(extractor, CODE.UNIT, nextBinder, nextBinder.info.widen, substitution(condAndNext))
- else codegen.flatMap(extractor, nextBinder, substitution(condAndNext))
+ if (extractorReturnsBoolean) casegen.flatMapCond(extractor, CODE.UNIT, nextBinder, substitution(condAndNext))
+ else casegen.flatMap(extractor, nextBinder, substitution(condAndNext))
)
}
@@ -791,10 +785,10 @@ class Foo(x: Other) { x._1 } // no error in this order
// TODO: allow user-defined unapplyProduct
case class ProductExtractorTreeMaker(prevBinder: Symbol, extraCond: Option[Tree], localSubstitution: Substitution) extends TreeMaker { import CODE._
- def chainBefore(next: Tree, pt: Type): Tree = {
+ def chainBefore(next: Tree)(casegen: Casegen): Tree = {
val nullCheck = REF(prevBinder) OBJ_NE NULL
val cond = extraCond map (nullCheck AND _) getOrElse nullCheck
- codegen.ifThenElseZero(cond, substitution(next))
+ casegen.ifThenElseZero(cond, substitution(next))
}
override def toString = "P"+(prevBinder, extraCond getOrElse "", localSubstitution)
@@ -907,61 +901,39 @@ class Foo(x: Other) { x._1 } // no error in this order
override def toString = "ET"+(prevBinder, patTree)
}
- case class AlternativesTreeMaker(prevBinder: Symbol, var altss: List[List[TreeMaker]], pos: Position) extends TreeMaker {
+ case class AlternativesTreeMaker(prevBinder: Symbol, var altss: List[List[TreeMaker]], pos: Position) extends TreeMaker with NoNewBinders {
// don't substitute prevBinder to nextBinder, a set of alternatives does not need to introduce a new binder, simply reuse the previous one
- val localSubstitution: Substitution = EmptySubstitution
override private[TreeMakers] def incorporateOuterSubstitution(outerSubst: Substitution): Unit = {
super.incorporateOuterSubstitution(outerSubst)
altss = altss map (alts => propagateSubstitution(alts, substitution))
}
- def chainBefore(next: Tree, pt: Type): Tree = { import CODE._
- // next does not contain deftrees, is pretty short
- val canDuplicate = {
- var okToInline = true
- var sizeBudget = 100 / (altss.length max 1) // yep, totally arbitrary!
- object travOkToInline extends Traverser { override def traverse(tree: Tree): Unit = if (sizeBudget >= 0) { sizeBudget -= 1; tree match {
- case TypeApply(_, _) | Apply(_, _) | Select(_, _)
- | Block(_, _) | Assign(_, _) | If(_, _, _) | Typed(_, _) => super.traverse(tree) // these are allowed if their subtrees are
- case EmptyTree | This(_) | New(_) | Literal(_) | Ident(_) => // these are always ok
- case _ if tree.isType => // these are always ok
- case _ => okToInline = false //; println("not inlining: "+ (tree, tree.getClass))
- }}}
- travOkToInline.traverse(next)
- // println("(okToInline, sizeBudget): "+ (okToInline, sizeBudget))
- okToInline && sizeBudget > 0 // must be strict comparison
- }
+ def chainBefore(next: Tree)(codegenAlt: Casegen): Tree = { import CODE._
+ atPos(pos){
+ // one alternative may still generate multiple trees (e.g., an extractor call + equality test)
+ // (for now,) alternatives may not bind variables (except wildcards), so we don't care about the final substitution built internally by makeTreeMakers
+ val combinedAlts = altss map (altTreeMakers =>
+ ((casegen: Casegen) => combineExtractors(altTreeMakers :+ TrivialTreeMaker(casegen.one(TRUE)))(casegen))
+ )
- atPos(pos)(
- if (canDuplicate) {
- altss map {altTreeMakers =>
- combineExtractors(altTreeMakers :+ TrivialTreeMaker(substitution(next).duplicate), pt)
- } reduceLeft codegen.typedOrElse(pt)
- } else {
- val rest = freshSym(pos, functionType(List(), inMatchMonad(pt)), "rest")
- // rest.info.member(nme.apply).withAnnotation(AnnotationInfo(ScalaInlineClass.tpe, Nil, Nil))
-
- // one alternative may still generate multiple trees (e.g., an extractor call + equality test)
- // (for now,) alternatives may not bind variables (except wildcards), so we don't care about the final substitution built internally by makeTreeMakers
- val combinedAlts = altss map (altTreeMakers =>
- combineExtractors(altTreeMakers :+ TrivialTreeMaker(REF(rest) APPLY ()), pt)
- )
- BLOCK(
- VAL(rest) === Function(Nil, substitution(next)),
- combinedAlts reduceLeft codegen.typedOrElse(pt)
- )
- }
- )
+ val findAltMatcher = codegenAlt.matcher(EmptyTree, NoSymbol, BooleanClass.tpe)(combinedAlts, Some(x => FALSE))
+ codegenAlt.ifThenElseZero(findAltMatcher, substitution(next))
+ }
}
}
- case class GuardTreeMaker(guardTree: Tree) extends TreeMaker {
- val localSubstitution: Substitution = EmptySubstitution
- def chainBefore(next: Tree, pt: Type): Tree = codegen.flatMapGuard(substitution(guardTree), next)
+ case class GuardTreeMaker(guardTree: Tree) extends TreeMaker with NoNewBinders {
+ def chainBefore(next: Tree)(casegen: Casegen): Tree = casegen.flatMapGuard(substitution(guardTree), next)
override def toString = "G("+ guardTree +")"
}
+ // combineExtractors changes the current substitution's of the tree makers in `treeMakers`
+ // requires propagateSubstitution(treeMakers) has been called
+ def combineExtractors(treeMakers: List[TreeMaker])(casegen: Casegen): Tree =
+ treeMakers.foldRight(EmptyTree: Tree)((a, b) => a.chainBefore(b)(casegen))
+
+
def removeSubstOnly(makers: List[TreeMaker]) = makers filterNot (_.isInstanceOf[SubstOnlyTreeMaker])
// a foldLeft to accumulate the localSubstitution left-to-right
@@ -976,51 +948,42 @@ class Foo(x: Other) { x._1 } // no error in this order
}
// calls propagateSubstitution on the treemakers
- def combineCases(scrut: Tree, scrutSym: Symbol, casesRaw: List[List[TreeMaker]], pt: Type, owner: Symbol): Tree = {
- val casesNoSubstOnly = casesRaw map (propagateSubstitution(_, EmptySubstitution)) // drops SubstOnlyTreeMakers, since their effect is now contained in the TreeMakers that follow them
- combineCasesNoSubstOnly(scrut, scrutSym, casesNoSubstOnly, pt, owner, CODE.MATCHERROR(_))
+ def combineCases(scrut: Tree, scrutSym: Symbol, casesRaw: List[List[TreeMaker]], pt: Type, owner: Symbol, matchFailGenOverride: Option[Tree => Tree]): Tree = {
+ // drops SubstOnlyTreeMakers, since their effect is now contained in the TreeMakers that follow them
+ val casesNoSubstOnly = casesRaw map (propagateSubstitution(_, EmptySubstitution))
+ combineCasesNoSubstOnly(scrut, scrutSym, casesNoSubstOnly, pt, owner, matchFailGenOverride)
}
- def combineCasesNoSubstOnly(scrut: Tree, scrutSym: Symbol, casesNoSubstOnly: List[List[TreeMaker]], pt: Type, owner: Symbol, matchFail: Tree => Tree): Tree = fixerUpper(owner, scrut.pos){
- emitSwitch(scrut, scrutSym, casesNoSubstOnly, pt).getOrElse{
- val (matcher, hasDefault, toHoist) =
- if (casesNoSubstOnly nonEmpty) {
- // when specified, need to propagate pt explicitly (type inferencer can't handle it)
- val optPt =
- if (isFullyDefined(pt)) inMatchMonad(pt)
- else NoType
+ def combineCasesNoSubstOnly(scrut: Tree, scrutSym: Symbol, casesNoSubstOnly: List[List[TreeMaker]], pt: Type, owner: Symbol, matchFailGenOverride: Option[Tree => Tree]): Tree =
+ fixerUpper(owner, scrut.pos){
+ val ptDefined = if (isFullyDefined(pt)) pt else NoType
+ def matchFailGen = (matchFailGenOverride orElse Some(CODE.MATCHERROR(_: Tree)))
- // do this check on casesNoSubstOnly, since DCE will eliminate trivial cases like `case _ =>`, even if they're the last one
+ emitSwitch(scrut, scrutSym, casesNoSubstOnly, pt, matchFailGenOverride).getOrElse{
+ if (casesNoSubstOnly nonEmpty) {
+ // before optimizing, check casesNoSubstOnly for presence of a default case,
+ // since DCE will eliminate trivial cases like `case _ =>`, even if they're the last one
// exhaustivity and reachability must be checked before optimization as well
- // TODO: improve, a trivial type test before the body still makes for a default case
- // ("trivial" depends on whether we're emitting a straight match or an exception, or more generally, any supertype of scrutSym.tpe is a no-op)
- val hasDefault = casesNoSubstOnly.nonEmpty && {
- val nonTrivLast = casesNoSubstOnly.last
- nonTrivLast.nonEmpty && nonTrivLast.head.isInstanceOf[BodyTreeMaker]
- }
+ // TODO: improve notion of trivial/irrefutable -- a trivial type test before the body still makes for a default case
+ // ("trivial" depends on whether we're emitting a straight match or an exception, or more generally, any supertype of scrutSym.tpe is a no-op)
+ // irrefutability checking should use the approximation framework also used for CSE, unreachability and exhaustivity checking
+ val synthCatchAll =
+ if (casesNoSubstOnly.nonEmpty && {
+ val nonTrivLast = casesNoSubstOnly.last
+ nonTrivLast.nonEmpty && nonTrivLast.head.isInstanceOf[BodyTreeMaker]
+ }) None
+ else matchFailGen
val (cases, toHoist) = optimizeCases(scrutSym, casesNoSubstOnly, pt)
- val combinedCases =
- cases.map(combineExtractors(_, pt)).reduceLeft(codegen.typedOrElse(optPt))
+ val matchRes = codegen.matcher(scrut, scrutSym, pt)(cases map combineExtractors, synthCatchAll)
- (combinedCases, hasDefault, toHoist)
- } else (codegen.zero, false, Nil)
-
- // catch-all
- val catchAll =
- if (hasDefault) None // no need for a catch-all when there's already a default
- else Some(matchFail)
- val expr = codegen.runOrElse(scrut, scrutSym, matcher, if (isFullyDefined(pt)) pt else NoType, catchAll)
- if (toHoist isEmpty) expr
- else Block(toHoist, expr)
+ if (toHoist isEmpty) matchRes else Block(toHoist, matchRes)
+ } else {
+ codegen.matcher(scrut, scrutSym, pt)(Nil, matchFailGen)
+ }
+ }
}
- }
-
- // combineExtractors changes the current substitution's of the tree makers in `treeMakers`
- // requires propagateSubstitution(treeMakers) has been called
- def combineExtractors(treeMakers: List[TreeMaker], pt: Type): Tree =
- treeMakers.foldRight (EmptyTree: Tree) (_.chainBefore(_, pt))
// TODO: do this during tree construction, but that will require tracking the current owner in treemakers
// TODO: assign more fine-grained positions
@@ -1070,34 +1033,36 @@ class Foo(x: Other) { x._1 } // no error in this order
///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
trait CodegenCore extends MatchMonadInterface {
private var ctr = 0
- def freshSym(pos: Position, tp: Type = NoType, prefix: String = "x") = {ctr += 1;
- // assert(owner ne null)
- // assert(owner ne NoSymbol)
- NoSymbol.newTermSymbol(vpmName.counted(prefix, ctr), pos) setInfo repackExistential(tp)
- }
+ def freshName(prefix: String) = {ctr += 1; vpmName.counted(prefix, ctr)}
+
+ // assert(owner ne null); assert(owner ne NoSymbol)
+ def freshSym(pos: Position, tp: Type = NoType, prefix: String = "x") =
+ NoSymbol.newTermSymbol(freshName(prefix), pos) setInfo /*repackExistential*/(tp)
// codegen relevant to the structure of the translation (how extractors are combined)
trait AbsCodegen {
- def runOrElse(scrut: Tree, scrutSym: Symbol, matcher: Tree, resTp: Type, catchAll: Option[Tree => Tree]): Tree
- def one(res: Tree, bodyPt: Type, matchPt: Type): Tree
- def zero: Tree
- def flatMap(prev: Tree, b: Symbol, next: Tree): Tree
- def typedOrElse(pt: Type)(thisCase: Tree, elseCase: Tree): Tree
-
- def flatMapCond(cond: Tree, res: Tree, nextBinder: Symbol, nextBinderTp: Type, next: Tree): Tree
- def flatMapGuard(cond: Tree, next: Tree): Tree
+ def matcher(scrut: Tree, scrutSym: Symbol, restpe: Type)(cases: List[Casegen => Tree], matchFailGen: Option[Tree => Tree]): Tree
- def fun(arg: Symbol, body: Tree): Tree
- def ifThenElseZero(c: Tree, then: Tree): Tree
- def _equals(checker: Tree, binder: Symbol): Tree
+ // local / context-free
def _asInstanceOf(b: Symbol, tp: Type): Tree
+ def _equals(checker: Tree, binder: Symbol): Tree
+ def _isInstanceOf(b: Symbol, tp: Type): Tree
+ def and(a: Tree, b: Tree): Tree
+ def drop(tgt: Tree)(n: Int): Tree
+ def index(tgt: Tree)(i: Int): Tree
def mkZero(tp: Type): Tree
-
def tupleSel(binder: Symbol)(i: Int): Tree
- def index(tgt: Tree)(i: Int): Tree
- def drop(tgt: Tree)(n: Int): Tree
- def and(a: Tree, b: Tree): Tree
- def _isInstanceOf(b: Symbol, tp: Type): Tree
+ }
+
+ // structure
+ trait Casegen extends AbsCodegen { import CODE._
+ def one(res: Tree): Tree
+
+ def flatMap(prev: Tree, b: Symbol, next: Tree): Tree
+ def flatMapCond(cond: Tree, res: Tree, nextBinder: Symbol, next: Tree): Tree
+ def flatMapGuard(cond: Tree, next: Tree): Tree
+ def ifThenElseZero(c: Tree, then: Tree): Tree = IF (c) THEN then ELSE zero
+ protected def zero: Tree
}
def codegen: AbsCodegen
@@ -1112,21 +1077,20 @@ class Foo(x: Other) { x._1 } // no error in this order
def drop(tgt: Tree)(n: Int): Tree = (tgt DOT vpmName.drop) (LIT(n))
def _equals(checker: Tree, binder: Symbol): Tree = checker MEMBER_== REF(binder) // NOTE: checker must be the target of the ==, that's the patmat semantics for ya
def and(a: Tree, b: Tree): Tree = a AND b
- def ifThenElseZero(c: Tree, then: Tree): Tree = IF (c) THEN then ELSE zero
// the force is needed mainly to deal with the GADT typing hack (we can't detect it otherwise as tp nor pt need contain an abstract type, we're just casting wildly)
- def _asInstanceOf(t: Tree, tp: Type, force: Boolean = false): Tree = { val tpX = repackExistential(tp)
+ def _asInstanceOf(t: Tree, tp: Type, force: Boolean = false): Tree = { val tpX = /*repackExistential*/(tp)
if (!force && (t.tpe ne NoType) && t.isTyped && typesConform(t.tpe, tpX)) t //{ println("warning: emitted redundant asInstanceOf: "+(t, t.tpe, tp)); t } //.setType(tpX)
else gen.mkAsInstanceOf(t, tpX, true, false)
}
- def _isInstanceOf(b: Symbol, tp: Type): Tree = gen.mkIsInstanceOf(REF(b), repackExistential(tp), true, false)
- // { val tpX = repackExistential(tp)
+ def _isInstanceOf(b: Symbol, tp: Type): Tree = gen.mkIsInstanceOf(REF(b), /*repackExistential*/(tp), true, false)
+ // { val tpX = /*repackExistential*/(tp)
// if (typesConform(b.info, tpX)) { println("warning: emitted spurious isInstanceOf: "+(b, tp)); TRUE }
// else gen.mkIsInstanceOf(REF(b), tpX, true, false)
// }
- def _asInstanceOf(b: Symbol, tp: Type): Tree = { val tpX = repackExistential(tp)
+ def _asInstanceOf(b: Symbol, tp: Type): Tree = { val tpX = /*repackExistential*/(tp)
if (typesConform(b.info, tpX)) REF(b) //{ println("warning: emitted redundant asInstanceOf: "+(b, b.info, tp)); REF(b) } //.setType(tpX)
else gen.mkAsInstanceOf(REF(b), tpX, true, false)
}
@@ -1166,28 +1130,29 @@ class Foo(x: Other) { x._1 } // no error in this order
trait PureCodegen extends CodegenCore with PureMatchMonadInterface {
def codegen: AbsCodegen = pureCodegen
- object pureCodegen extends CommonCodegen { import CODE._
+ object pureCodegen extends CommonCodegen with Casegen { import CODE._
//// methods in MatchingStrategy (the monad companion) -- used directly in translation
// __match.runOrElse(`scrut`)(`scrutSym` => `matcher`)
// TODO: consider catchAll, or virtualized matching will break in exception handlers
- def runOrElse(scrut: Tree, scrutSym: Symbol, matcher: Tree, resTp: Type, catchAll: Option[Tree => Tree]): Tree
- = _match(vpmName.runOrElse) APPLY (scrut) APPLY (fun(scrutSym, matcher))
+ def matcher(scrut: Tree, scrutSym: Symbol, restpe: Type)(cases: List[Casegen => Tree], matchFailGen: Option[Tree => Tree]): Tree =
+ _match(vpmName.runOrElse) APPLY (scrut) APPLY (fun(scrutSym, cases map (f => f(this)) reduceLeft typedOrElse))
+
// __match.one(`res`)
- def one(res: Tree, bodyPt: Type, matchPt: Type): Tree = (_match(vpmName.one)) (res)
+ def one(res: Tree): Tree = (_match(vpmName.one)) (res)
// __match.zero
- def zero: Tree = _match(vpmName.zero)
+ protected def zero: Tree = _match(vpmName.zero)
// __match.guard(`c`, `then`)
- def guard(c: Tree, then: Tree, tp: Type): Tree = _match(vpmName.guard) APPLY (c, then)
+ def guard(c: Tree, then: Tree): Tree = _match(vpmName.guard) APPLY (c, then)
//// methods in the monad instance -- used directly in translation
// `prev`.flatMap(`b` => `next`)
def flatMap(prev: Tree, b: Symbol, next: Tree): Tree = (prev DOT vpmName.flatMap)(fun(b, next))
// `thisCase`.orElse(`elseCase`)
- def typedOrElse(pt: Type)(thisCase: Tree, elseCase: Tree): Tree = (thisCase DOT vpmName.orElse) APPLY (elseCase)
+ def typedOrElse(thisCase: Tree, elseCase: Tree): Tree = (thisCase DOT vpmName.orElse) APPLY (elseCase)
// __match.guard(`cond`, `res`).flatMap(`nextBinder` => `next`)
- def flatMapCond(cond: Tree, res: Tree, nextBinder: Symbol, nextBinderTp: Type, next: Tree): Tree = flatMap(guard(cond, res, nextBinderTp), nextBinder, next)
+ def flatMapCond(cond: Tree, res: Tree, nextBinder: Symbol, next: Tree): Tree = flatMap(guard(cond, res), nextBinder, next)
// __match.guard(`guardTree`, ()).flatMap((_: P[Unit]) => `next`)
- def flatMapGuard(guardTree: Tree, next: Tree): Tree = flatMapCond(guardTree, CODE.UNIT, freshSym(guardTree.pos, pureType(UnitClass.tpe)), pureType(UnitClass.tpe), next)
+ def flatMapGuard(guardTree: Tree, next: Tree): Tree = flatMapCond(guardTree, CODE.UNIT, freshSym(guardTree.pos, pureType(UnitClass.tpe)), next)
}
}
@@ -1456,8 +1421,8 @@ class Foo(x: Other) { x._1 } // no error in this order
}
// TODO: finer-grained duplication
- def chainBefore(next: Tree, pt: Type): Tree = // assert(codegen eq optimizedCodegen)
- atPos(pos)(optimizedCodegen.flatMapCondStored(cond, storedCond, res, nextBinder, substitution(next).duplicate))
+ def chainBefore(next: Tree)(casegen: Casegen): Tree = // assert(codegen eq optimizedCodegen)
+ atPos(pos)(casegen.asInstanceOf[optimizedCodegen.OptimizedCasegen].flatMapCondStored(cond, storedCond, res, nextBinder, substitution(next).duplicate))
}
case class ReusingCondTreeMaker(sharedPrefix: List[Test], toReused: TreeMaker => TreeMaker) extends TreeMaker { import CODE._
@@ -1474,12 +1439,11 @@ class Foo(x: Other) { x._1 } // no error in this order
oldSubs.foldLeft(Substitution(from, to))(_ >> _)
}
- def chainBefore(next: Tree, pt: Type): Tree = {
+ def chainBefore(next: Tree)(casegen: Casegen): Tree = {
val cond = REF(dropped_priors.reverse.collectFirst{case (_, Some(ctm: ReusedCondTreeMaker)) => ctm}.get.storedCond)
- IF (cond) THEN BLOCK(
- substitution(next).duplicate // TODO: finer-grained duplication -- MUST duplicate though, or we'll get VerifyErrors since sharing trees confuses lambdalift, and its confusion it emits illegal casts (diagnosed by Grzegorz: checkcast T ; invokevirtual S.m, where T not a subtype of S)
- ) ELSE codegen.zero
+ // TODO: finer-grained duplication -- MUST duplicate though, or we'll get VerifyErrors since sharing trees confuses lambdalift, and its confusion it emits illegal casts (diagnosed by Grzegorz: checkcast T ; invokevirtual S.m, where T not a subtype of S)
+ casegen.ifThenElseZero(cond, substitution(next).duplicate)
}
}
}
@@ -1551,7 +1515,7 @@ class Foo(x: Other) { x._1 } // no error in this order
}
}
- class RegularSwitchMaker(scrutSym: Symbol) extends SwitchMaker {
+ class RegularSwitchMaker(scrutSym: Symbol, matchFailGenOverride: Option[Tree => Tree]) extends SwitchMaker {
val switchableTpe = Set(ByteClass.tpe, ShortClass.tpe, IntClass.tpe, CharClass.tpe)
val alternativesSupported = true
@@ -1574,14 +1538,14 @@ class Foo(x: Other) { x._1 } // no error in this order
}
def defaultSym: Symbol = scrutSym
- def defaultBody: Tree = { import CODE._; MATCHERROR(REF(scrutSym)) }
+ def defaultBody: Tree = { import CODE._; matchFailGenOverride map (gen => gen(REF(scrutSym))) getOrElse MATCHERROR(REF(scrutSym)) }
def defaultCase(scrutSym: Symbol = defaultSym, body: Tree = defaultBody): CaseDef = { import CODE._; atPos(body.pos) {
DEFAULT ==> body
}}
}
- override def emitSwitch(scrut: Tree, scrutSym: Symbol, cases: List[List[TreeMaker]], pt: Type): Option[Tree] = { import CODE._
- val regularSwitchMaker = new RegularSwitchMaker(scrutSym)
+ override def emitSwitch(scrut: Tree, scrutSym: Symbol, cases: List[List[TreeMaker]], pt: Type, matchFailGenOverride: Option[Tree => Tree]): Option[Tree] = { import CODE._
+ val regularSwitchMaker = new RegularSwitchMaker(scrutSym, matchFailGenOverride)
// TODO: if patterns allow switch but the type of the scrutinee doesn't, cast (type-test) the scrutinee to the corresponding switchable type and switch on the result
if (regularSwitchMaker.switchableTpe(scrutSym.tpe)) {
val caseDefsWithDefault = regularSwitchMaker(cases map {c => (scrutSym, c)}, pt)
@@ -1589,7 +1553,7 @@ class Foo(x: Other) { x._1 } // no error in this order
else {
// match on scrutSym -- converted to an int if necessary -- not on scrut directly (to avoid duplicating scrut)
val scrutToInt: Tree =
- if(scrutSym.tpe =:= IntClass.tpe) REF(scrutSym)
+ if (scrutSym.tpe =:= IntClass.tpe) REF(scrutSym)
else (REF(scrutSym) DOT (nme.toInt))
Some(BLOCK(
VAL(scrutSym) === scrut,
@@ -1657,8 +1621,7 @@ class Foo(x: Other) { x._1 } // no error in this order
// for example, `o.flatMap(f)` becomes `if(o == None) None else f(o.get)`, similarly for orElse and guard
// this is a special instance of the advanced inlining optimization that takes a method call on
// an object of a type that only has two concrete subclasses, and inlines both bodies, guarded by an if to distinguish the two cases
- object optimizedCodegen extends CommonCodegen /*with AbsOptimizedCodegen*/ { import CODE._
- lazy val zeroSym = freshSym(NoPosition, optionType(NothingClass.tpe), "zero")
+ object optimizedCodegen extends CommonCodegen { import CODE._
/** Inline runOrElse and get rid of Option allocations
*
@@ -1666,67 +1629,92 @@ class Foo(x: Other) { x._1 } // no error in this order
* the matcher's optional result is encoded as a flag, keepGoing, where keepGoing == true encodes result.isEmpty,
* if keepGoing is false, the result Some(x) of the naive translation is encoded as matchRes == x
*/
- @inline private def dontStore(tp: Type) = (tp.typeSymbol eq UnitClass) || (tp.typeSymbol eq NothingClass)
- lazy val keepGoing = freshSym(NoPosition, BooleanClass.tpe, "keepGoing") setFlag MUTABLE
- lazy val matchRes = freshSym(NoPosition, AnyClass.tpe, "matchRes") setFlag MUTABLE
- def runOrElse(scrut: Tree, scrutSym: Symbol, matcher: Tree, resTp: Type, catchAll: Option[Tree => Tree]) = {
- matchRes.info = if (resTp ne NoType) resTp.widen else AnyClass.tpe // we don't always know resTp, and it might be AnyVal, in which case we can't assign NULL
- if (dontStore(resTp)) matchRes resetFlag MUTABLE // don't assign to Unit-typed var's, in fact, make it a val -- conveniently also works around SI-5245
- BLOCK(
- VAL(zeroSym) === REF(NoneModule), // TODO: can we just get rid of explicitly emitted zero? don't know how to do that as a local rewrite...
- VAL(scrutSym) === scrut,
- VAL(matchRes) === mkZero(matchRes.info), // must cast to deal with GADT typing, hence the private mkZero above
- VAL(keepGoing) === TRUE,
- matcher,
- catchAll map { catchAllGen => (IF (REF(keepGoing)) THEN catchAllGen(REF(scrutSym)) ELSE REF(matchRes)) } getOrElse REF(matchRes)
- )
- }
+ def matcher(scrut: Tree, scrutSym: Symbol, restpe: Type)(cases: List[Casegen => Tree], matchFailGen: Option[Tree => Tree]): Tree = {
+ val matchEnd = NoSymbol.newLabel(freshName("matchEnd"), NoPosition) setFlag (SYNTHETIC | Flags.CASE)
+ val matchRes = NoSymbol.newValueParameter(newTermName("x"), NoPosition, SYNTHETIC) setInfo restpe
+ matchEnd setInfo MethodType(List(matchRes), restpe)
+
+ def newCaseSym = NoSymbol.newLabel(freshName("case"), NoPosition) setInfo MethodType(Nil, restpe) setFlag (SYNTHETIC | Flags.CASE)
+ var nextCase = newCaseSym
+ def caseDef(mkCase: Casegen => Tree): Tree = {
+ val currCase = nextCase
+ nextCase = newCaseSym
+ val casegen = new OptimizedCasegen(matchEnd, nextCase)
+ LabelDef(currCase, Nil, mkCase(casegen))
+ }
- // only used to wrap the RHS of a body
- def one(res: Tree, bodyPt: Type, matchPt: Type): Tree = {
- BLOCK(
- REF(keepGoing) === FALSE, // comes before assignment to matchRes, so the latter is in tail positions (can ignore the trailing zero -- will disappear when we flatten blocks, which is TODO)
- if (dontStore(matchPt)) res else (REF(matchRes) === res), // runOrElse hasn't been called yet, so matchRes.isMutable is irrelevant, also, tp may be a subtype of resTp used in runOrElse...
- zero // to have a nice lub for lubs -- otherwise we'll get a boxed unit here -- TODO: get rid of all those dangling else zero's
+ def catchAll = matchFailGen map { matchFailGen =>
+ val scrutRef = if(scrutSym ne NoSymbol) REF(scrutSym) else EmptyTree // for alternatives
+ LabelDef(nextCase, Nil, matchEnd APPLY (matchFailGen(scrutRef))) // need to jump to matchEnd with result generated by matchFailGen (could be `FALSE` for isDefinedAt)
+ } toList
+ // catchAll.isEmpty iff no synthetic default case needed (the (last) user-defined case is a default)
+ // if the last user-defined case is a default, it will never jump to the next case; it will go immediately to matchEnd
+
+ // the generated block is taken apart in TailCalls under the following assumptions
+ // the assumption is once we encounter a case, the remainder of the block will consist of cases
+ // the prologue may be empty, usually it is the valdef that stores the scrut
+ // val (prologue, cases) = stats span (s => !s.isInstanceOf[LabelDef])
+
+ // scrutSym == NoSymbol when generating an alternatives matcher
+ val scrutDef = if(scrutSym ne NoSymbol) List(VAL(scrutSym) === scrut) else Nil // for alternatives
+ Block(
+ scrutDef ++ (cases map caseDef) ++ catchAll,
+ LabelDef(matchEnd, List(matchRes), REF(matchRes))
)
}
- def zero: Tree = REF(zeroSym)
-
- def flatMap(prev: Tree, b: Symbol, next: Tree): Tree = {
- val tp = inMatchMonad(b.tpe)
- val prevSym = freshSym(prev.pos, tp, "o")
- val isEmpty = tp member vpmName.isEmpty
- val get = tp member vpmName.get
+ class OptimizedCasegen(matchEnd: Symbol, nextCase: Symbol) extends CommonCodegen with Casegen {
+ def matcher(scrut: Tree, scrutSym: Symbol, restpe: Type)(cases: List[Casegen => Tree], matchFailGen: Option[Tree => Tree]): Tree =
+ optimizedCodegen.matcher(scrut, scrutSym, restpe)(cases, matchFailGen)
+
+ // only used to wrap the RHS of a body
+ // res: T
+ // returns MatchMonad[T]
+ def one(res: Tree): Tree = matchEnd APPLY (res)
+ protected def zero: Tree = nextCase APPLY ()
+
+ // prev: MatchMonad[T]
+ // b: T
+ // next: MatchMonad[U]
+ // returns MatchMonad[U]
+ def flatMap(prev: Tree, b: Symbol, next: Tree): Tree = {
+ val tp = inMatchMonad(b.tpe)
+ val prevSym = freshSym(prev.pos, tp, "o")
+ val isEmpty = tp member vpmName.isEmpty
+ val get = tp member vpmName.get
+
+ BLOCK(
+ VAL(prevSym) === prev,
+ // must be isEmpty and get as we don't control the target of the call (prev is an extractor call)
+ ifThenElseZero(NOT(prevSym DOT isEmpty), Substitution(b, prevSym DOT get)(next))
+ )
+ }
- BLOCK(
- VAL(prevSym) === prev,
- IF (prevSym DOT isEmpty) THEN zero ELSE Substitution(b, prevSym DOT get)(next) // must be isEmpty and get as we don't control the target of the call (could be the result of a user-defined extractor)
- )
- }
+ // cond: Boolean
+ // res: T
+ // nextBinder: T
+ // next == MatchMonad[U]
+ // returns MatchMonad[U]
+ def flatMapCond(cond: Tree, res: Tree, nextBinder: Symbol, next: Tree): Tree =
+ ifThenElseZero(cond, BLOCK(
+ VAL(nextBinder) === res,
+ next
+ ))
- def typedOrElse(pt: Type)(thisCase: Tree, elseCase: Tree): Tree = {
- BLOCK(
- thisCase,
- IF (REF(keepGoing)) THEN elseCase ELSE zero // leave trailing zero for now, otherwise typer adds () anyway
- )
+ // guardTree: Boolean
+ // next: MatchMonad[T]
+ // returns MatchMonad[T]
+ def flatMapGuard(guardTree: Tree, next: Tree): Tree =
+ ifThenElseZero(guardTree, next)
+
+ def flatMapCondStored(cond: Tree, condSym: Symbol, res: Tree, nextBinder: Symbol, next: Tree): Tree =
+ ifThenElseZero(cond, BLOCK(
+ condSym === TRUE,
+ nextBinder === res,
+ next
+ ))
}
- def flatMapCond(cond: Tree, res: Tree, nextBinder: Symbol, nextBinderTp: Type, next: Tree): Tree =
- IF (cond) THEN BLOCK(
- VAL(nextBinder) === res,
- next
- ) ELSE zero
-
- def flatMapCondStored(cond: Tree, condSym: Symbol, res: Tree, nextBinder: Symbol, next: Tree): Tree =
- IF (cond) THEN BLOCK(
- condSym === TRUE,
- nextBinder === res,
- next
- ) ELSE zero
-
- def flatMapGuard(guardTree: Tree, next: Tree): Tree =
- IF (guardTree) THEN next ELSE zero
}
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
index ec42d251ff..d98d248231 100644
--- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
@@ -266,7 +266,7 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R
(if (showLocation)
sym1.locationString +
(if (sym1.isAliasType) ", which equals "+self.memberInfo(sym1)
- else if (sym1.isAbstractType) " with bounds "+self.memberInfo(sym1)
+ else if (sym1.isAbstractType) " with bounds"+self.memberInfo(sym1)
else if (sym1.isTerm) " of type "+self.memberInfo(sym1)
else "")
else "")
@@ -1445,7 +1445,7 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R
private def transformApply(tree: Apply): Tree = tree match {
case Apply(
- Select(qual, nme.filter),
+ Select(qual, nme.filter | nme.withFilter),
List(Function(
List(ValDef(_, pname, tpt, _)),
Match(_, CaseDef(pat1, _, _) :: _))))
diff --git a/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala b/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala
index 2f4eff30d2..da87d38ab0 100644
--- a/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala
@@ -136,7 +136,7 @@ trait SyntheticMethods extends ast.TreeDSL {
* where that is the given methods first parameter.
*/
def thatTest(eqmeth: Symbol): Tree =
- gen.mkIsInstanceOf(Ident(eqmeth.firstParam), typeCaseType(clazz), true, false)
+ gen.mkIsInstanceOf(Ident(eqmeth.firstParam), classExistentialType(clazz), true, false)
/** (that.asInstanceOf[this.C])
* where that is the given methods first parameter.
diff --git a/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala b/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala
index ed263cbbef..105c2c0b98 100644
--- a/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala
@@ -263,8 +263,8 @@ abstract class TreeCheckers extends Analyzer {
tree match {
case x: PackageDef =>
- if (sym.ownerChain contains currentOwner) ()
- else fail(sym + " owner chain does not contain currentOwner " + currentOwner)
+ if ((sym.ownerChain contains currentOwner) || currentOwner == definitions.EmptyPackageClass) ()
+ else fail(sym + " owner chain does not contain currentOwner " + currentOwner + sym.ownerChain)
case _ =>
def cond(s: Symbol) = !s.isTerm || s.isMethod || s == sym.owner
diff --git a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala
index 1434002121..6efa595d99 100644
--- a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala
@@ -157,7 +157,7 @@ trait TypeDiagnostics {
}
// todo: use also for other error messages
- def existentialContext(tp: Type) = tp.existentialSkolems match {
+ def existentialContext(tp: Type) = tp.skolemsExceptMethodTypeParams match {
case Nil => ""
case xs => " where " + (disambiguate(xs map (_.existentialToString)) mkString ", ")
}
@@ -252,12 +252,17 @@ trait TypeDiagnostics {
}
"" // no elaborable variance situation found
}
- def foundReqMsg(found: Type, req: Type): String = (
- withDisambiguation(Nil, found, req)(
+ // TODO - figure out how to avoid doing any work at all
+ // when the message will never be seen. I though context.reportErrors
+ // being false would do that, but if I return "<suppressed>" under
+ // that condition, I see it.
+ def foundReqMsg(found: Type, req: Type): String = {
+ def baseMessage = (
";\n found : " + found.toLongString + existentialContext(found) + explainAlias(found) +
"\n required: " + req + existentialContext(req) + explainAlias(req)
- ) + explainVariance(found, req)
- )
+ )
+ withDisambiguation(Nil, found, req)(baseMessage) + explainVariance(found, req)
+ }
case class TypeDiag(tp: Type, sym: Symbol) extends Ordered[TypeDiag] {
// save the name because it will be mutated until it has been
@@ -307,16 +312,37 @@ trait TypeDiagnostics {
)
}
}
- private def typeDiags(locals: List[Symbol], types: Type*): List[TypeDiag] = {
- object SymExtractor {
- def unapply(x: Any) = x match {
- case t @ ConstantType(_) => Some(t -> t.underlying.typeSymbol)
- case t @ TypeRef(_, sym, _) => if (locals contains sym) None else Some(t -> sym)
- case _ => None
+ /** This is tricky stuff - we need to traverse types deeply to
+ * explain name ambiguities, which may occur anywhere. However
+ * when lub explosions come through it knocks us into an n^2
+ * disaster, see SI-5580. This is trying to perform the initial
+ * filtering of possibly ambiguous types in a sufficiently
+ * aggressive way that the state space won't explode.
+ */
+ private def typeDiags(locals: List[Symbol], types0: Type*): List[TypeDiag] = {
+ val types = types0.toList
+ // If two different type diag instances are seen for a given
+ // key (either the string representation of a type, or the simple
+ // name of a symbol) then keep them for disambiguation.
+ val strings = mutable.Map[String, Set[TypeDiag]]() withDefaultValue Set()
+ val names = mutable.Map[Name, Set[TypeDiag]]() withDefaultValue Set()
+
+ def record(t: Type, sym: Symbol) = {
+ val diag = TypeDiag(t, sym)
+
+ strings("" + t) += diag
+ names(sym.name) += diag
+ }
+ for (tpe <- types ; t <- tpe) {
+ t match {
+ case ConstantType(_) => record(t, t.underlying.typeSymbol)
+ case TypeRef(_, sym, _) => record(t, sym)
+ case _ => ()
}
}
- for (tp <- types.toList; SymExtractor(t, sym) <- tp) yield TypeDiag(t, sym)
+ val collisions = strings.values ++ names.values filter (_.size > 1)
+ collisions.flatten.toList
}
/** The distinct pairs from an ordered list. */
diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
index 25f3e7af5c..49ce9712df 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
@@ -909,8 +909,9 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
def apply(tp: Type) = mapOver(tp) match {
case TypeRef(NoPrefix, tpSym, Nil) if variance != 0 && tpSym.isTypeParameterOrSkolem && tpSym.owner.isTerm =>
val bounds = if (variance == 1) TypeBounds.upper(tpSym.tpe) else TypeBounds.lower(tpSym.tpe)
- val skolem = context.owner.newExistentialSkolem(tpSym, tpSym, unit.freshTypeName("?"+tpSym.name), bounds)
- // println("mapping "+ tpSym +" to "+ skolem + " : "+ bounds +" -- pt= "+ pt)
+ // origin must be the type param so we can deskolemize
+ val skolem = context.owner.newGADTSkolem(unit.freshTypeName("?"+tpSym.name), tpSym, bounds)
+ // println("mapping "+ tpSym +" to "+ skolem + " : "+ bounds +" -- pt= "+ pt +" in "+ context.owner +" at "+ context.tree )
skolems += skolem
skolem.tpe
case tp1 => tp1
@@ -928,9 +929,19 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
freeVars foreach ctorContext.scope.enter
newTyper(ctorContext).infer.inferConstructorInstance(tree1, clazz.typeParams, ptSafe)
- // tree1's type-slack skolems will be deskolemized (to the method type parameter skolems)
- // once the containing CaseDef has been type checked (see typedCase)
- tree1
+ // simplify types without losing safety,
+ // so that error messages don't unnecessarily refer to skolems
+ val extrapolate = new ExistentialExtrapolation(freeVars) extrapolate (_: Type)
+ val extrapolated = tree1.tpe match {
+ case MethodType(ctorArgs, res) => // ctorArgs are actually in a covariant position, since this is the type of the subpatterns of the pattern represented by this Apply node
+ ctorArgs foreach (p => p.info = extrapolate(p.info)) // no need to clone, this is OUR method type
+ copyMethodType(tree1.tpe, ctorArgs, extrapolate(res))
+ case tp => tp
+ }
+
+ // once the containing CaseDef has been type checked (see typedCase),
+ // tree1's remaining type-slack skolems will be deskolemized (to the method type parameter skolems)
+ tree1 setType extrapolated
} else {
tree
}
@@ -1095,7 +1106,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
val found = tree.tpe
val req = pt
if (!found.isErroneous && !req.isErroneous) {
- if (!context.reportErrors && isPastTyper && req.existentialSkolems.nonEmpty) {
+ if (!context.reportErrors && isPastTyper && req.skolemsExceptMethodTypeParams.nonEmpty) {
// Ignore type errors raised in later phases that are due to mismatching types with existential skolems
// We have lift crashing in 2.9 with an adapt failure in the pattern matcher.
// Here's my hypothsis why this happens. The pattern matcher defines a variable of type
@@ -1112,7 +1123,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
//
// val x = expr
context.unit.warning(tree.pos, "recovering from existential Skolem type error in tree \n" + tree + "\nwith type " + tree.tpe + "\n expected type = " + pt + "\n context = " + context.tree)
- adapt(tree, mode, deriveTypeWithWildcards(pt.existentialSkolems)(pt))
+ adapt(tree, mode, deriveTypeWithWildcards(pt.skolemsExceptMethodTypeParams)(pt))
} else {
// create an actual error
AdaptTypeError(tree, found, req)
@@ -1147,6 +1158,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
val qtpe = qual.tpe.widen
( !isPastTyper
&& qual.isTerm
+ && !qual.isInstanceOf[Super]
&& ((qual.symbol eq null) || !qual.symbol.isTerm || qual.symbol.isValue)
&& !qtpe.isError
&& !qtpe.typeSymbol.isBottomClass
@@ -1162,12 +1174,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
)
}
- def adaptToMember(qual: Tree, searchTemplate: Type): Tree =
- adaptToMember(qual, searchTemplate, true, true)
- def adaptToMember(qual: Tree, searchTemplate: Type, reportAmbiguous: Boolean): Tree =
- adaptToMember(qual, searchTemplate, reportAmbiguous, true)
-
- def adaptToMember(qual: Tree, searchTemplate: Type, reportAmbiguous: Boolean, saveErrors: Boolean): Tree = {
+ def adaptToMember(qual: Tree, searchTemplate: Type, reportAmbiguous: Boolean = true, saveErrors: Boolean = true): Tree = {
if (isAdaptableWithView(qual)) {
qual.tpe.widen.normalize match {
case et: ExistentialType =>
@@ -1273,8 +1280,6 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
unit.error(clazz.pos, "value class may not be a "+
(if (clazz.owner.isTerm) "local class" else "member of another class"))
val constr = clazz.primaryConstructor
- if ((constr hasFlag (PRIVATE | PROTECTED)) || constr.privateWithin != NoSymbol)
- unit.error(constr.pos, "value class must have public primary constructor")
clazz.info.decls.toList.filter(acc => acc.isMethod && (acc hasFlag PARAMACCESSOR)) match {
case List(acc) =>
def isUnderlyingAcc(sym: Symbol) =
@@ -2112,26 +2117,161 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
// body1 = checkNoEscaping.locals(context.scope, pt, body1)
val treeWithSkolems = treeCopy.CaseDef(cdef, pat1, guard1, body1) setType body1.tpe
- // undo adaptConstrPattern's evil deeds, as they confuse the old pattern matcher
- // TODO: Paul, can we do the deskolemization lazily in the old pattern matcher
- object deskolemizeOnce extends TypeMap {
- def apply(tp: Type): Type = mapOver(tp) match {
- case TypeRef(pre, sym, args) if sym.isExistentialSkolem && sym.deSkolemize.isSkolem && sym.deSkolemize.owner.isTerm =>
- typeRef(NoPrefix, sym.deSkolemize, args)
- case tp1 => tp1
- }
- }
-
- new TypeMapTreeSubstituter(deskolemizeOnce).traverse(treeWithSkolems)
+ new TypeMapTreeSubstituter(deskolemizeGADTSkolems).traverse(treeWithSkolems)
treeWithSkolems // now without skolems, actually
}
+ // undo adaptConstrPattern's evil deeds, as they confuse the old pattern matcher
+ // the flags are used to avoid accidentally deskolemizing unrelated skolems of skolems
+ object deskolemizeGADTSkolems extends TypeMap {
+ def apply(tp: Type): Type = mapOver(tp) match {
+ case TypeRef(pre, sym, args) if sym.isGADTSkolem =>
+ typeRef(NoPrefix, sym.deSkolemize, args)
+ case tp1 => tp1
+ }
+ }
+
def typedCases(cases: List[CaseDef], pattp: Type, pt: Type): List[CaseDef] =
cases mapConserve { cdef =>
- newTyper(context.makeNewScope(cdef, context.owner)).typedCase(cdef, pattp, pt)
+ val caseTyped = newTyper(context.makeNewScope(cdef, context.owner)).typedCase(cdef, pattp, pt)
+ if (opt.virtPatmat) {
+ val tpPacked = packedType(caseTyped, context.owner)
+ caseTyped setType tpPacked
+ } else caseTyped
+ }
+
+ def adaptCase(cdef: CaseDef, mode: Int, tpe: Type): CaseDef = deriveCaseDef(cdef)(adapt(_, mode, tpe))
+
+ def translateMatch(selector: Tree, cases: List[CaseDef], mode: Int, resTp: Type, scrutTp: Type = NoType, matchFailGen: Option[Tree => Tree] = None) = {
+ val selector1 = if(scrutTp eq NoType) checkDead(typed(selector, EXPRmode | BYVALmode, WildcardType)) else selector
+ val selectorTp = if(scrutTp eq NoType) packCaptured(selector1.tpe.widen) else scrutTp
+ val casesTyped = typedCases(cases, selectorTp, resTp)
+ val (ownType, needAdapt) = if (isFullyDefined(resTp)) (resTp, false) else weakLub(casesTyped map (_.tpe.deconst))
+ val casesAdapted = if (!needAdapt) casesTyped else casesTyped map (adaptCase(_, mode, ownType))
+ // val (owntype0, needAdapt) = ptOrLub(casesTyped map (x => repackExistential(x.tpe)))
+ // val owntype = elimAnonymousClass(owntype0)
+
+ def repeatedToSeq(tp: Type): Type = (tp baseType RepeatedParamClass) match {
+ case TypeRef(_, RepeatedParamClass, args) => appliedType(SeqClass.typeConstructor, args)
+ case _ => tp
+ }
+
+ def isSynthSelector(selector: Tree): Boolean = selector match {
+ case Ident(_) if selector.symbol.hasFlag(SYNTHETIC | CASE) => true
+ case Select(sel, nme.toInt) => isSynthSelector(sel) // switch may need to convert to int first
+ case _ => false
}
+ if (isSynthSelector(selector1)) { // a switch
+ (Match(selector1, casesAdapted) setType ownType, ownType) // setType of the Match to avoid recursing endlessly
+ } else {
+ val scrutType = repeatedToSeq(elimAnonymousClass(selectorTp))
+ (MatchTranslator(this).translateMatch(selector1, casesAdapted, repeatedToSeq(ownType), scrutType, matchFailGen), ownType)
+ }
+ }
+
+ // TODO: use this to synthesize (partial)function implementation for matches from the get-go,
+ // instead of the dirty post-factum hacks in uncurry -- typedMatchAnonFun is currently not used due to mindboggling failures (see virtpatmat_anonfun_for.scala)
+ def typedMatchAnonFun(tree: Tree, cases: List[CaseDef], mode: Int, pt0: Type, selOverride: Option[(List[Symbol], Tree)] = None) = {
+ val pt = deskolemizeGADTSkolems(pt0)
+ val targs = pt.normalize.typeArgs
+ val arity = if (isFunctionType(pt)) targs.length - 1 else 1
+ val scrutTp0 = if (arity == 1) targs.head else /* arity > 1 */ tupleType(targs.init)
+ val scrutTp = packCaptured(scrutTp0)
+ val ptRes = targs.last // may not be fully defined
+ val isPartial = pt.typeSymbol == PartialFunctionClass
+ val cname = tpnme.ANON_FUN_NAME
+ val funThis = This(cname)
+ // used to create a new context for pattern matching translation so that
+ // we can easily rejig the owner structure when we have the actual symbols for these methods
+ // (after type checking them, but type checking requires translation -- this seems like the easiest way to end this vicious cycle)
+ val applySentinel = NoSymbol.newMethod(nme.apply)
+ val idaSentinel = NoSymbol.newMethod(nme._isDefinedAt)
+
+ def mkParams = {
+ val params =
+ for (i <- List.range(0, arity)) yield atPos(tree.pos.focusStart) {
+ ValDef(Modifiers(SYNTHETIC | PARAM), unit.freshTermName("x" + i + "$"), TypeTree(), EmptyTree)
+ }
+ val ids = params map (p => Ident(p.name))
+
+ val paramsRef = selOverride match {
+ case None => atPos(tree.pos.focusStart) { if (arity == 1) ids.head else gen.mkTuple(ids) }
+ case Some((_, sel)) => sel.duplicate // we'll replace the symbols that refer to the function's original syms by the ones introduced by the DefDef once the method's been type checked (until then, we don't know them)
+ }
+
+ (params, paramsRef) // paramsRef can't be typed until after match has been translated, thus supply explicit scrutTp to translate below
+ }
+
+ import CODE._
+
+ // need to duplicate the cases before typing them to generate the apply method, or the symbols will be all messed up
+ val casesTrue = if (isPartial) cases map (c => deriveCaseDef(c)(x => TRUE).duplicate) else Nil
+
+ val (applyMethod, parents) = {
+ val (params, paramsRef) = mkParams
+ val (body, resTp) = newTyper(context.make(context.tree, applySentinel)).translateMatch(paramsRef, cases, mode, ptRes, scrutTp, if (isPartial) Some(scrut => (funThis DOT nme.missingCase) (scrut)) else None)
+
+ def abstractFunctionType = {
+ val sym = AbstractFunctionClass(arity)
+ typeRef(sym.typeConstructor.prefix, sym, targs.init :+ resTp)
+ }
+
+ val parents =
+ if (isFunctionType(pt)) List(abstractFunctionType, SerializableClass.tpe)
+ else if (isPartial) List(appliedType(AbstractPartialFunctionClass.typeConstructor, List(scrutTp, resTp)), SerializableClass.tpe)
+ else List(ObjectClass.tpe, pt, SerializableClass.tpe)
+
+ (atPos(tree.pos.focus)(DefDef(Modifiers(FINAL), nme.apply, Nil, List(params), TypeTree() setType resTp, body)), parents)
+ }
+
+ def isDefinedAtMethod = {
+ val (params, paramsRef) = mkParams
+ val (body, _) = newTyper(context.make(context.tree, idaSentinel)).translateMatch(paramsRef, casesTrue, mode, BooleanClass.tpe, scrutTp, Some(scrutinee => FALSE))
+ atPos(tree.pos.focus)(
+ DefDef(Modifiers(FINAL), nme._isDefinedAt, Nil, List(params), TypeTree() setType BooleanClass.tpe, body)
+ )
+ }
+
+ val members = if (!isPartial) List(applyMethod) else List(applyMethod, isDefinedAtMethod)
+
+ val cmods = Modifiers(FINAL | SYNTHETIC /*TODO: when do we need INCONSTRUCTOR ?*/) withAnnotations (
+ List(NEW(SerialVersionUIDAttr, LIT(0))))
+ val cdef =
+ ClassDef(cmods, cname, Nil,
+ Template(parents map (TypeTree() setType _), emptyValDef, Modifiers(0), Nil, List(Nil), members, tree.pos)
+ )
+ val funInst = (Block(List(cdef), Apply(Select(New(Ident(cname)), nme.CONSTRUCTOR), Nil)))
+
+ val res = typed(funInst, mode, pt)
+
+ // now that we have the symbols corresponding to the apply/isDefinedAt methods,
+ // we can fix up the result of fixerUpper... URGH
+ // fixerUpper nests the top-level definitions generated in the match under context.owner, but they should be owner by the apply/isDefinedAt method
+ res foreach {
+ case d: DefDef if (d.symbol.name == nme.apply) =>
+ d.rhs.changeOwner(applySentinel -> d.symbol)
+ case d: DefDef if (d.symbol.name == nme._isDefinedAt) =>
+ d.rhs.changeOwner(idaSentinel -> d.symbol)
+ case _ =>
+ }
+
+ selOverride match {
+ case None => res
+ case Some((paramSyms, sel)) =>
+ object substParamSyms extends Transformer {
+ override def transform(t: Tree): Tree = t match {
+ case d: DefDef if (d.symbol.name == nme.apply) || (d.symbol.name == nme._isDefinedAt) && (d.symbol.owner == res.tpe.typeSymbol) =>
+ deriveDefDef(d)(rhs => rhs.substTreeSyms(paramSyms, d.vparamss.head.map(_.symbol)))
+ case _ =>
+ super.transform(t)
+ }
+ }
+ substParamSyms.transform(res)
+ }
+ }
+
/**
* @param fun ...
* @param mode ...
@@ -2144,14 +2284,10 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
return MaxFunctionArityError(fun)
def decompose(pt: Type): (Symbol, List[Type], Type) =
- if ((isFunctionType(pt)
- ||
- pt.typeSymbol == PartialFunctionClass &&
- numVparams == 1 && fun.body.isInstanceOf[Match])
- && // see bug901 for a reason why next conditions are needed
- (pt.normalize.typeArgs.length - 1 == numVparams
- ||
- fun.vparams.exists(_.tpt.isEmpty)))
+ if ((isFunctionType(pt) || (pt.typeSymbol == PartialFunctionClass && numVparams == 1 && fun.body.isInstanceOf[Match])) && // see bug901 for a reason why next conditions are needed
+ ( pt.normalize.typeArgs.length - 1 == numVparams
+ || fun.vparams.exists(_.tpt.isEmpty)
+ ))
(pt.typeSymbol, pt.normalize.typeArgs.init, pt.normalize.typeArgs.last)
else
(FunctionClass(numVparams), fun.vparams map (x => NoType), WildcardType)
@@ -2193,13 +2329,27 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
// for (vparam <- vparams) {
// checkNoEscaping.locals(context.scope, WildcardType, vparam.tpt); ()
// }
- val body1 = typed(fun.body, respt)
- val formals = vparamSyms map (_.tpe)
- val restpe = packedType(body1, fun.symbol).deconst.resultType
- val funtpe = typeRef(clazz.tpe.prefix, clazz, formals :+ restpe)
-// body = checkNoEscaping.locals(context.scope, restpe, body)
- treeCopy.Function(fun, vparams, body1).setType(funtpe)
- }
+
+ def recompose(from: Type, to: Type) =
+ if(clazz == PartialFunctionClass) appliedType(PartialFunctionClass.typeConstructor, List(from, to))
+ else functionType(List(from), to)
+
+ fun.body match {
+ case Match(sel, cases) if opt.virtPatmat =>
+ val typedSel = typed(sel, EXPRmode | BYVALmode, WildcardType)
+ // go to outer context -- must discard the context that was created for the Function since we're discarding the function
+ // thus, its symbol, which serves as the current context.owner, is not the right owner
+ // you won't know you're using the wrong owner until lambda lift crashes (unless you know better than to use the wrong owner)
+ newTyper(context.outer).typedMatchAnonFun(fun, cases, mode, recompose(typedSel.tpe, respt), Some((vparamSyms, typedSel)))
+ case _ =>
+ val body1 = typed(fun.body, respt)
+ val formals = vparamSyms map (_.tpe)
+ val restpe = packedType(body1, fun.symbol).deconst.resultType
+ val funtpe = typeRef(clazz.tpe.prefix, clazz, formals :+ restpe)
+ // body = checkNoEscaping.locals(context.scope, restpe, body)
+ treeCopy.Function(fun, vparams, body1).setType(funtpe)
+ }
+ }
}
def typedRefinement(stats: List[Tree]) {
@@ -3410,7 +3560,15 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
} else {
var thenp1 = typed(thenp, pt)
var elsep1 = typed(elsep, pt)
- val (owntype, needAdapt) = ptOrLub(List(thenp1.tpe, elsep1.tpe))
+
+ lazy val thenTp = packedType(thenp1, context.owner)
+ lazy val elseTp = packedType(elsep1, context.owner)
+ val (owntype, needAdapt) =
+ // virtpatmat needs more aggressive unification of skolemized types, but lub is not robust enough --> middle ground
+ if (opt.virtPatmat && !isPastTyper && thenTp =:= elseTp) (thenTp, true) // this breaks src/library/scala/collection/immutable/TrieIterator.scala
+ // TODO: skolemize (lub of packed types) when that no longer crashes on files/pos/t4070b.scala
+ else ptOrLub(List(thenp1.tpe, elsep1.tpe))
+
if (needAdapt) { //isNumericValueType(owntype)) {
thenp1 = adapt(thenp1, mode, owntype)
elsep1 = adapt(elsep1, mode, owntype)
@@ -3420,7 +3578,10 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
}
def typedMatch(tree: Tree, selector: Tree, cases: List[CaseDef]): Tree = {
- if (selector == EmptyTree) {
+ if (opt.virtPatmat && !isPastTyper) {
+ if (selector ne EmptyTree) typed(translateMatch(selector, cases, mode, pt)._1, mode, pt)
+ else typedMatchAnonFun(tree, cases, mode, pt)
+ } else if (selector == EmptyTree) {
val arity = if (isFunctionType(pt)) pt.normalize.typeArgs.length - 1 else 1
val params = for (i <- List.range(0, arity)) yield
atPos(tree.pos.focusStart) {
@@ -3434,32 +3595,11 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
} else {
val selector1 = checkDead(typed(selector, EXPRmode | BYVALmode, WildcardType))
var cases1 = typedCases(cases, packCaptured(selector1.tpe.widen), pt)
-
- if (isPastTyper || !opt.virtPatmat) {
- val (owntype, needAdapt) = ptOrLub(cases1 map (_.tpe))
- if (needAdapt) {
- cases1 = cases1 map (adaptCase(_, owntype))
- }
- treeCopy.Match(tree, selector1, cases1) setType owntype
- } else { // don't run translator after typers (see comments in PatMatVirtualiser)
- val (owntype0, needAdapt) = ptOrLub(cases1 map (x => repackExistential(x.tpe)))
- val owntype = elimAnonymousClass(owntype0)
- if (needAdapt) cases1 = cases1 map (adaptCase(_, owntype))
-
- (MatchTranslator(this)).translateMatch(selector1, cases1, owntype) match {
- case Block(vd :: Nil, tree@Match(selector, cases)) =>
- val selector1 = checkDead(typed(selector, EXPRmode | BYVALmode, WildcardType))
- var cases1 = typedCases(cases, packCaptured(selector1.tpe.widen), pt)
- val (owntype, needAdapt) = ptOrLub(cases1 map (_.tpe))
- if (needAdapt)
- cases1 = cases1 map (adaptCase(_, owntype))
- typed(Block(vd :: Nil, treeCopy.Match(tree, selector1, cases1) setType owntype))
- case translated =>
- // TODO: get rid of setType owntype -- it should all typecheck
- // must call typed, not typed1, or we overflow the stack when emitting switches
- typed(translated, mode, WildcardType) setType owntype
- }
+ val (owntype, needAdapt) = ptOrLub(cases1 map (_.tpe))
+ if (needAdapt) {
+ cases1 = cases1 map (adaptCase(_, mode, owntype))
}
+ treeCopy.Match(tree, selector1, cases1) setType owntype
}
}
@@ -4229,9 +4369,6 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
}
}
- def adaptCase(cdef: CaseDef, tpe: Type): CaseDef =
- deriveCaseDef(cdef)(adapt(_, mode, tpe))
-
// begin typed1
val sym: Symbol = tree.symbol
if ((sym ne null) && (sym ne NoSymbol)) sym.initialize
@@ -4340,7 +4477,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser {
val (owntype, needAdapt) = ptOrLub(block1.tpe :: (catches1 map (_.tpe)))
if (needAdapt) {
block1 = adapt(block1, mode, owntype)
- catches1 = catches1 map (adaptCase(_, owntype))
+ catches1 = catches1 map (adaptCase(_, mode, owntype))
}
if(!isPastTyper && opt.virtPatmat) {
diff --git a/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala b/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
index cc272b7b8d..4f5b6868ae 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
@@ -45,6 +45,11 @@ trait Unapplies extends ast.TreeDSL
case BooleanClass => Nil
case OptionClass | SomeClass =>
val prod = tp.typeArgs.head
+// the spec doesn't allow just any subtype of Product, it *must* be TupleN[...] -- see run/virtpatmat_extends_product.scala
+// this breaks plenty of stuff, though...
+// val targs =
+// if (isTupleType(prod)) getProductArgs(prod)
+// else List(prod)
val targs = getProductArgs(prod)
if (targs.isEmpty || targs.tail.isEmpty) List(prod) // special n == 0 || n == 1
diff --git a/src/compiler/scala/tools/nsc/util/Position.scala b/src/compiler/scala/tools/nsc/util/Position.scala
index 53c767be20..bc74717366 100644
--- a/src/compiler/scala/tools/nsc/util/Position.scala
+++ b/src/compiler/scala/tools/nsc/util/Position.scala
@@ -33,6 +33,16 @@ object Position {
}
}
+/**
+ * A tree does not directly store a Position. It stores a TreeAnnotation, which /typically/ is a Position.
+ *
+ * A TreeAnnotion may encompass more than just a Position, though, depending on the exact subclass of TreeAnnotation.
+ */
+trait TreeAnnotation {
+ def pos: Position
+}
+
+
/** The Position class and its subclasses represent positions of ASTs and symbols.
* Except for NoPosition and FakePos, every position refers to a SourceFile
* and to an offset in the sourcefile (its `point`). For batch compilation,
@@ -77,7 +87,8 @@ object Position {
* pos.makeTransparent converts an opaque range position into a transparent one.
* returns all other positions unchanged.
*/
-trait Position {
+trait Position extends TreeAnnotation {
+ def pos: Position = this
/** An optional value containing the source file referred to by this position, or
* None if not defined.
diff --git a/src/compiler/scala/tools/util/color/CString.scala b/src/compiler/scala/tools/util/color/CString.scala
index d0785eaeff..fa57229f09 100644
--- a/src/compiler/scala/tools/util/color/CString.scala
+++ b/src/compiler/scala/tools/util/color/CString.scala
@@ -11,18 +11,27 @@ final class CString(val uncolorized: String, val colorized: String) {
def visibleLength = uncolorized.length
def colorizedLength = colorized.length
def show() = Console println colorized
- def bytes() = colorized map (ch => ch.toByte)
- def > = show()
+ def bytes() = colorized map (_.toByte)
+ def >() = show()
def append(x: CString): CString = new CString(uncolorized + x.uncolorized, colorized + x.colorized)
def +(other: CString): CString = this append other
+
override def toString = colorized
}
class CStringOps(str: String) {
- /** Enables for example
+ /** String to String operation.
* println("foo" in Red)
- * println("foo" in Magenta.bright)
+ * println("bar" in Magenta.bright)
+ */
+ def in(ansi: Ansi): String = ansi colorize str
+
+ /** Gave in to one bit of punctuation, because everyone adds
+ * strings with '+' and we need something which higher precedence
+ * for it to be at all satisfying.
+ *
+ * "foo" %> Red + "bar" %> Magenta.bright
*/
- def in(ansi: Ansi): CString = new CString(str, ansi colorize str)
+ def %>(ansi: Ansi): CString = new CString(str, in(ansi))
}
diff --git a/src/compiler/scala/tools/util/color/package.scala b/src/compiler/scala/tools/util/color/package.scala
index 7c7c7dab74..3b3e85751e 100644
--- a/src/compiler/scala/tools/util/color/package.scala
+++ b/src/compiler/scala/tools/util/color/package.scala
@@ -18,4 +18,5 @@ package object color {
case x: AnsiForeground => x.flip
}
implicit def implicitCStringOps(str: String): CStringOps = new CStringOps(str)
+ implicit def implicitCString(str: String): CString = new CString(str, str)
}
diff --git a/src/library/scala/Specializable.scala b/src/library/scala/Specializable.scala
index 67126b3069..d5e22195d2 100644
--- a/src/library/scala/Specializable.scala
+++ b/src/library/scala/Specializable.scala
@@ -20,10 +20,10 @@ object Specializable {
// Smuggle a list of types by way of a tuple upon which Group is parameterized.
class Group[T >: Null](value: T) extends SpecializedGroup { }
- final val Primitives = new Group(Byte, Short, Int, Long, Char, Float, Double, Boolean, Unit)
- final val Everything = new Group(Byte, Short, Int, Long, Char, Float, Double, Boolean, Unit, AnyRef)
- final val Bits32AndUp = new Group(Int, Long, Float, Double)
- final val Integral = new Group(Byte, Short, Int, Long, Char)
- final val AllNumeric = new Group(Byte, Short, Int, Long, Char, Float, Double)
- final val BestOfBreed = new Group(Int, Double, Boolean, Unit, AnyRef)
+ final val Primitives = new Group((Byte, Short, Int, Long, Char, Float, Double, Boolean, Unit))
+ final val Everything = new Group((Byte, Short, Int, Long, Char, Float, Double, Boolean, Unit, AnyRef))
+ final val Bits32AndUp = new Group((Int, Long, Float, Double))
+ final val Integral = new Group((Byte, Short, Int, Long, Char))
+ final val AllNumeric = new Group((Byte, Short, Int, Long, Char, Float, Double))
+ final val BestOfBreed = new Group((Int, Double, Boolean, Unit, AnyRef))
}
diff --git a/src/library/scala/collection/GenIterableLike.scala b/src/library/scala/collection/GenIterableLike.scala
index 7e68733afd..8fa5981969 100644
--- a/src/library/scala/collection/GenIterableLike.scala
+++ b/src/library/scala/collection/GenIterableLike.scala
@@ -40,17 +40,18 @@ trait GenIterableLike[+A, +Repr] extends Any with GenTraversableLike[A, Repr] {
/** Checks if the other iterable collection contains the same elements in the same order as this $coll.
*
- * $orderDependent
- * $willNotTerminateInf
- *
* @param that the collection to compare with.
* @tparam B the type of the elements of collection `that`.
* @return `true`, if both collections contain the same elements in the same order, `false` otherwise.
*
* @usecase def sameElements(that: GenIterable[A]): Boolean
+ * @inheritdoc
*
- * @param that the collection to compare with.
- * @return `true`, if both collections contain the same elements in the same order, `false` otherwise.
+ * $orderDependent
+ * $willNotTerminateInf
+ *
+ * @param that the collection to compare with.
+ * @return `true`, if both collections contain the same elements in the same order, `false` otherwise.
*/
def sameElements[A1 >: A](that: GenIterable[A1]): Boolean
@@ -58,8 +59,6 @@ trait GenIterableLike[+A, +Repr] extends Any with GenTraversableLike[A, Repr] {
* by combining corresponding elements in pairs.
* If one of the two collections is longer than the other, its remaining elements are ignored.
*
- * $orderDependent
- *
* @param that The iterable providing the second half of each result pair
* @tparam A1 the type of the first half of the returned pairs (this is always a supertype
* of the collection's element type `A`).
@@ -71,19 +70,20 @@ trait GenIterableLike[+A, +Repr] extends Any with GenTraversableLike[A, Repr] {
* of the returned collection is the minimum of the lengths of this $coll and `that`.
*
* @usecase def zip[B](that: GenIterable[B]): $Coll[(A, B)]
+ * @inheritdoc
*
- * @param that The iterable providing the second half of each result pair
- * @tparam B the type of the second half of the returned pairs
- * @return a new $coll containing pairs consisting of
- * corresponding elements of this $coll and `that`. The length
- * of the returned collection is the minimum of the lengths of this $coll and `that`.
+ * $orderDependent
+ *
+ * @param that The iterable providing the second half of each result pair
+ * @tparam B the type of the second half of the returned pairs
+ * @return a new $coll containing pairs consisting of
+ * corresponding elements of this $coll and `that`. The length
+ * of the returned collection is the minimum of the lengths of this $coll and `that`.
*/
def zip[A1 >: A, B, That](that: GenIterable[B])(implicit bf: CBF[Repr, (A1, B), That]): That
/** Zips this $coll with its indices.
*
- * $orderDependent
- *
* @tparam A1 the type of the first half of the returned pairs (this is always a supertype
* of the collection's element type `A`).
* @tparam That the class of the returned collection. Where possible, `That` is
@@ -98,11 +98,14 @@ trait GenIterableLike[+A, +Repr] extends Any with GenTraversableLike[A, Repr] {
* $coll paired with their index. Indices start at `0`.
*
* @usecase def zipWithIndex: $Coll[(A, Int)]
+ * @inheritdoc
*
- * @return A new $coll containing pairs consisting of all elements of this
- * $coll paired with their index. Indices start at `0`.
- * @example
- * `List("a", "b", "c").zipWithIndex = List(("a", 0), ("b", 1), ("c", 2))`
+ * $orderDependent
+ *
+ * @return A new $coll containing pairs consisting of all elements of this
+ * $coll paired with their index. Indices start at `0`.
+ * @example
+ * `List("a", "b", "c").zipWithIndex = List(("a", 0), ("b", 1), ("c", 2))`
*
*/
def zipWithIndex[A1 >: A, That](implicit bf: CBF[Repr, (A1, Int), That]): That
@@ -112,8 +115,6 @@ trait GenIterableLike[+A, +Repr] extends Any with GenTraversableLike[A, Repr] {
* If one of the two collections is shorter than the other,
* placeholder elements are used to extend the shorter collection to the length of the longer.
*
- * $orderDependent
- *
* @param that the iterable providing the second half of each result pair
* @param thisElem the element to be used to fill up the result if this $coll is shorter than `that`.
* @param thatElem the element to be used to fill up the result if `that` is shorter than this $coll.
@@ -124,16 +125,19 @@ trait GenIterableLike[+A, +Repr] extends Any with GenTraversableLike[A, Repr] {
* If `that` is shorter than this $coll, `thatElem` values are used to pad the result.
*
* @usecase def zipAll[B](that: Iterable[B], thisElem: A, thatElem: B): $Coll[(A, B)]
- *
- * @param that The iterable providing the second half of each result pair
- * @param thisElem the element to be used to fill up the result if this $coll is shorter than `that`.
- * @param thatElem the element to be used to fill up the result if `that` is shorter than this $coll.
- * @tparam B the type of the second half of the returned pairs
- * @return a new $coll containing pairs consisting of
- * corresponding elements of this $coll and `that`. The length
- * of the returned collection is the maximum of the lengths of this $coll and `that`.
- * If this $coll is shorter than `that`, `thisElem` values are used to pad the result.
- * If `that` is shorter than this $coll, `thatElem` values are used to pad the result.
+ * @inheritdoc
+ *
+ * $orderDependent
+ *
+ * @param that The iterable providing the second half of each result pair
+ * @param thisElem the element to be used to fill up the result if this $coll is shorter than `that`.
+ * @param thatElem the element to be used to fill up the result if `that` is shorter than this $coll.
+ * @tparam B the type of the second half of the returned pairs
+ * @return a new $coll containing pairs consisting of
+ * corresponding elements of this $coll and `that`. The length
+ * of the returned collection is the maximum of the lengths of this $coll and `that`.
+ * If this $coll is shorter than `that`, `thisElem` values are used to pad the result.
+ * If `that` is shorter than this $coll, `thatElem` values are used to pad the result.
*/
def zipAll[B, A1 >: A, That](that: GenIterable[B], thisElem: A1, thatElem: B)(implicit bf: CBF[Repr, (A1, B), That]): That
diff --git a/src/library/scala/collection/GenMapLike.scala b/src/library/scala/collection/GenMapLike.scala
index 12ecbcf140..114169c849 100644
--- a/src/library/scala/collection/GenMapLike.scala
+++ b/src/library/scala/collection/GenMapLike.scala
@@ -41,6 +41,8 @@ trait GenMapLike[A, +B, +Repr] extends GenIterableLike[(A, B), Repr] with Equals
* @return the value associated with `key` if it exists,
* otherwise the result of the `default` computation.
* @usecase def getOrElse(key: A, default: => B): B
+ * @inheritdoc
+ * @tparam B the result type of the default computation.
*/
def getOrElse[B1 >: B](key: A, default: => B1): B1
diff --git a/src/library/scala/collection/GenSeqLike.scala b/src/library/scala/collection/GenSeqLike.scala
index cb0e96fcbb..755abcd2bf 100644
--- a/src/library/scala/collection/GenSeqLike.scala
+++ b/src/library/scala/collection/GenSeqLike.scala
@@ -115,21 +115,21 @@ trait GenSeqLike[+A, +Repr] extends Any with GenIterableLike[A, Repr] with Equal
/** Finds index of first occurrence of some value in this $coll.
*
- * $mayNotTerminateInf
- *
* @param elem the element value to search for.
* @tparam B the type of the element `elem`.
* @return the index of the first element of this $coll that is equal (wrt `==`)
* to `elem`, or `-1`, if none exists.
*
* @usecase def indexOf(elem: A): Int
+ * @inheritdoc
+ *
+ * $mayNotTerminateInf
+ *
*/
def indexOf[B >: A](elem: B): Int = indexOf(elem, 0)
/** Finds index of first occurrence of some value in this $coll after or at some start index.
*
- * $mayNotTerminateInf
- *
* @param elem the element value to search for.
* @tparam B the type of the element `elem`.
* @param from the start index
@@ -137,19 +137,25 @@ trait GenSeqLike[+A, +Repr] extends Any with GenIterableLike[A, Repr] with Equal
* to `elem`, or `-1`, if none exists.
*
* @usecase def indexOf(elem: A, from: Int): Int
+ * @inheritdoc
+ *
+ * $mayNotTerminateInf
+ *
*/
def indexOf[B >: A](elem: B, from: Int): Int = indexWhere(elem ==, from)
/** Finds index of last occurrence of some value in this $coll.
*
- * $willNotTerminateInf
- *
* @param elem the element value to search for.
* @tparam B the type of the element `elem`.
* @return the index of the last element of this $coll that is equal (wrt `==`)
* to `elem`, or `-1`, if none exists.
*
* @usecase def lastIndexOf(elem: A): Int
+ * @inheritdoc
+ *
+ * $willNotTerminateInf
+ *
*/
def lastIndexOf[B >: A](elem: B): Int = lastIndexWhere(elem ==)
@@ -162,6 +168,7 @@ trait GenSeqLike[+A, +Repr] extends Any with GenIterableLike[A, Repr] with Equal
* to `elem`, or `-1`, if none exists.
*
* @usecase def lastIndexOf(elem: A, end: Int): Int
+ * @inheritdoc
*/
def lastIndexOf[B >: A](elem: B, end: Int): Int = lastIndexWhere(elem ==, end)
@@ -195,10 +202,6 @@ trait GenSeqLike[+A, +Repr] extends Any with GenIterableLike[A, Repr] with Equal
* Builds a new collection by applying a function to all elements of this $coll and
* collecting the results in reversed order.
*
- * $willNotTerminateInf
- *
- * Note: `xs.reverseMap(f)` is the same as `xs.reverse.map(f)` but might be more efficient.
- *
* @param f the function to apply to each element.
* @tparam B the element type of the returned collection.
* @tparam That $thatinfo
@@ -207,10 +210,14 @@ trait GenSeqLike[+A, +Repr] extends Any with GenIterableLike[A, Repr] with Equal
* `f` to each element of this $coll and collecting the results in reversed order.
*
* @usecase def reverseMap[B](f: A => B): $Coll[B]
+ * @inheritdoc
*
- * Note: `xs.reverseMap(f)` is the same as `xs.reverse.map(f)` but might be more efficient.
- * @return a new $coll resulting from applying the given function
- * `f` to each element of this $coll and collecting the results in reversed order.
+ * $willNotTerminateInf
+ *
+ * Note: `xs.reverseMap(f)` is the same as `xs.reverse.map(f)` but might be more efficient.
+ *
+ * @return a new $coll resulting from applying the given function
+ * `f` to each element of this $coll and collecting the results in reversed order.
*/
def reverseMap[B, That](f: A => B)(implicit bf: CanBuildFrom[Repr, B, That]): That
@@ -254,10 +261,13 @@ trait GenSeqLike[+A, +Repr] extends Any with GenIterableLike[A, Repr] with Equal
* @return a new $coll consisting of all elements of this $coll
* except that `replaced` elements starting from `from` are replaced
* by `patch`.
+ *
* @usecase def patch(from: Int, that: GenSeq[A], replaced: Int): $Coll[A]
- * @return a new $coll consisting of all elements of this $coll
- * except that `replaced` elements starting from `from` are replaced
- * by `patch`.
+ * @inheritdoc
+ *
+ * @return a new $coll consisting of all elements of this $coll
+ * except that `replaced` elements starting from `from` are replaced
+ * by `patch`.
*/
def patch[B >: A, That](from: Int, patch: GenSeq[B], replaced: Int)(implicit bf: CanBuildFrom[Repr, B, That]): That
@@ -268,20 +278,33 @@ trait GenSeqLike[+A, +Repr] extends Any with GenIterableLike[A, Repr] with Equal
* @tparam That $thatinfo
* @param bf $bfinfo
* @return a new $coll` which is a copy of this $coll with the element at position `index` replaced by `elem`.
+ *
* @usecase def updated(index: Int, elem: A): $Coll[A]
- * @return a copy of this $coll with the element at position `index` replaced by `elem`.
+ * @inheritdoc
+ *
+ * @return a copy of this $coll with the element at position `index` replaced by `elem`.
*/
def updated[B >: A, That](index: Int, elem: B)(implicit bf: CanBuildFrom[Repr, B, That]): That
/** A copy of the $coll with an element prepended.
*
- * Note that :-ending operators are right associative (see example).
- * A mnemonic for `+:` vs. `:+` is: the COLon goes on the COLlection side.
+ * @param elem the prepended element
+ * @tparam B the element type of the returned $coll.
+ * @tparam That $thatinfo
+ * @param bf $bfinfo
+ * @return a new collection of type `That` consisting of `elem` followed
+ * by all elements of this $coll.
+ *
+ * @usecase def +:(elem: A): $Coll[A]
+ * @inheritdoc
+ *
+ * Note that :-ending operators are right associative (see example).
+ * A mnemonic for `+:` vs. `:+` is: the COLon goes on the COLlection side.
*
- * Also, the original $coll is not modified, so you will want to capture the result.
+ * Also, the original $coll is not modified, so you will want to capture the result.
*
- * Example:
- * {{{
+ * Example:
+ * {{{
* scala> val x = LinkedList(1)
* x: scala.collection.mutable.LinkedList[Int] = LinkedList(1)
*
@@ -290,17 +313,10 @@ trait GenSeqLike[+A, +Repr] extends Any with GenIterableLike[A, Repr] with Equal
*
* scala> println(x)
* LinkedList(1)
- * }}}
+ * }}}
*
- * @param elem the prepended element
- * @tparam B the element type of the returned $coll.
- * @tparam That $thatinfo
- * @param bf $bfinfo
- * @return a new collection of type `That` consisting of `elem` followed
- * by all elements of this $coll.
- * @usecase def +:(elem: A): $Coll[A]
- * @return a new $coll consisting of `elem` followed
- * by all elements of this $coll.
+ * @return a new $coll consisting of `elem` followed
+ * by all elements of this $coll.
*/
def +:[B >: A, That](elem: B)(implicit bf: CanBuildFrom[Repr, B, That]): That
@@ -308,18 +324,20 @@ trait GenSeqLike[+A, +Repr] extends Any with GenIterableLike[A, Repr] with Equal
*
* A mnemonic for `+:` vs. `:+` is: the COLon goes on the COLlection side.
*
- * $willNotTerminateInf
* @param elem the appended element
* @tparam B the element type of the returned $coll.
* @tparam That $thatinfo
* @param bf $bfinfo
* @return a new collection of type `That` consisting of
* all elements of this $coll followed by `elem`.
+ *
* @usecase def :+(elem: A): $Coll[A]
- * @return a new $coll consisting of
- * all elements of this $coll followed by `elem`.
- * @example
- * {{{
+ * @inheritdoc
+ *
+ * $willNotTerminateInf
+ *
+ * Example:
+ * {{{
* scala> import scala.collection.mutable.LinkedList
* import scala.collection.mutable.LinkedList
*
@@ -331,7 +349,10 @@ trait GenSeqLike[+A, +Repr] extends Any with GenIterableLike[A, Repr] with Equal
*
* scala> println(a)
* LinkedList(1)
- * }}}
+ * }}}
+ *
+ * @return a new $coll consisting of
+ * all elements of this $coll followed by `elem`.
*/
def :+[B >: A, That](elem: B)(implicit bf: CanBuildFrom[Repr, B, That]): That
@@ -346,9 +367,11 @@ trait GenSeqLike[+A, +Repr] extends Any with GenIterableLike[A, Repr] with Equal
* all elements of this $coll followed by the minimal number of occurrences of `elem` so
* that the resulting collection has a length of at least `len`.
* @usecase def padTo(len: Int, elem: A): $Coll[A]
- * @return a new $coll consisting of
- * all elements of this $coll followed by the minimal number of occurrences of `elem` so
- * that the resulting $coll has a length of at least `len`.
+ * @inheritdoc
+ *
+ * @return a new $coll consisting of
+ * all elements of this $coll followed by the minimal number of occurrences of `elem` so
+ * that the resulting $coll has a length of at least `len`.
*/
def padTo[B >: A, That](len: Int, elem: B)(implicit bf: CanBuildFrom[Repr, B, That]): That
@@ -368,13 +391,6 @@ trait GenSeqLike[+A, +Repr] extends Any with GenIterableLike[A, Repr] with Equal
/** Produces a new sequence which contains all elements of this $coll and also all elements of
* a given sequence. `xs union ys` is equivalent to `xs ++ ys`.
- * $willNotTerminateInf
- *
- * Another way to express this
- * is that `xs union ys` computes the order-presevring multi-set union of `xs` and `ys`.
- * `union` is hence a counter-part of `diff` and `intersect` which also work on multi-sets.
- *
- * $willNotTerminateInf
*
* @param that the sequence to add.
* @tparam B the element type of the returned $coll.
@@ -382,9 +398,18 @@ trait GenSeqLike[+A, +Repr] extends Any with GenIterableLike[A, Repr] with Equal
* @param bf $bfinfo
* @return a new collection of type `That` which contains all elements of this $coll
* followed by all elements of `that`.
+ *
* @usecase def union(that: GenSeq[A]): $Coll[A]
- * @return a new $coll which contains all elements of this $coll
- * followed by all elements of `that`.
+ * @inheritdoc
+ *
+ * Another way to express this
+ * is that `xs union ys` computes the order-presevring multi-set union of `xs` and `ys`.
+ * `union` is hence a counter-part of `diff` and `intersect` which also work on multi-sets.
+ *
+ * $willNotTerminateInf
+ *
+ * @return a new $coll which contains all elements of this $coll
+ * followed by all elements of `that`.
*/
def union[B >: A, That](that: GenSeq[B])(implicit bf: CanBuildFrom[Repr, B, That]): That = this ++ that
@@ -393,7 +418,6 @@ trait GenSeqLike[+A, +Repr] extends Any with GenIterableLike[A, Repr] with Equal
union(that: GenSeq[B])(bf)
/** Computes the multiset difference between this $coll and another sequence.
- * $willNotTerminateInf
*
* @param that the sequence of elements to remove
* @tparam B the element type of the returned $coll.
@@ -404,17 +428,21 @@ trait GenSeqLike[+A, +Repr] extends Any with GenIterableLike[A, Repr] with Equal
* If an element value `x` appears
* ''n'' times in `that`, then the first ''n'' occurrences of `x` will not form
* part of the result, but any following occurrences will.
+ *
* @usecase def diff(that: GenSeq[A]): $Coll[A]
- * @return a new $coll which contains all elements of this $coll
- * except some of occurrences of elements that also appear in `that`.
- * If an element value `x` appears
- * ''n'' times in `that`, then the first ''n'' occurrences of `x` will not form
- * part of the result, but any following occurrences will.
+ * @inheritdoc
+ *
+ * $willNotTerminateInf
+ *
+ * @return a new $coll which contains all elements of this $coll
+ * except some of occurrences of elements that also appear in `that`.
+ * If an element value `x` appears
+ * ''n'' times in `that`, then the first ''n'' occurrences of `x` will not form
+ * part of the result, but any following occurrences will.
*/
def diff[B >: A](that: GenSeq[B]): Repr
/** Computes the multiset intersection between this $coll and another sequence.
- * $mayNotTerminateInf
*
* @param that the sequence of elements to intersect with.
* @tparam B the element type of the returned $coll.
@@ -425,12 +453,17 @@ trait GenSeqLike[+A, +Repr] extends Any with GenIterableLike[A, Repr] with Equal
* If an element value `x` appears
* ''n'' times in `that`, then the first ''n'' occurrences of `x` will be retained
* in the result, but any following occurrences will be omitted.
+ *
* @usecase def intersect(that: GenSeq[A]): $Coll[A]
- * @return a new $coll which contains all elements of this $coll
- * which also appear in `that`.
- * If an element value `x` appears
- * ''n'' times in `that`, then the first ''n'' occurrences of `x` will be retained
- * in the result, but any following occurrences will be omitted.
+ * @inheritdoc
+ *
+ * $mayNotTerminateInf
+ *
+ * @return a new $coll which contains all elements of this $coll
+ * which also appear in `that`.
+ * If an element value `x` appears
+ * ''n'' times in `that`, then the first ''n'' occurrences of `x` will be retained
+ * in the result, but any following occurrences will be omitted.
*/
def intersect[B >: A](that: GenSeq[B]): Repr
diff --git a/src/library/scala/collection/GenTraversableLike.scala b/src/library/scala/collection/GenTraversableLike.scala
index dd5f602c41..fd03e0f446 100644
--- a/src/library/scala/collection/GenTraversableLike.scala
+++ b/src/library/scala/collection/GenTraversableLike.scala
@@ -136,6 +136,7 @@ trait GenTraversableLike[+A, +Repr] extends Any with GenTraversableOnce[A] with
* but this is not necessary.
*
* @usecase def foreach(f: A => Unit): Unit
+ * @inheritdoc
*/
def foreach[U](f: A => U): Unit
@@ -149,17 +150,15 @@ trait GenTraversableLike[+A, +Repr] extends Any with GenTraversableOnce[A] with
* `f` to each element of this $coll and collecting the results.
*
* @usecase def map[B](f: A => B): $Coll[B]
- *
- * @return a new $coll resulting from applying the given function
- * `f` to each element of this $coll and collecting the results.
+ * @inheritdoc
+ * @return a new $coll resulting from applying the given function
+ * `f` to each element of this $coll and collecting the results.
*/
def map[B, That](f: A => B)(implicit bf: CanBuildFrom[Repr, B, That]): That
/** Builds a new collection by applying a partial function to all elements of this $coll
* on which the function is defined.
*
- * $collectExample
- *
* @param pf the partial function which filters and maps the $coll.
* @tparam B the element type of the returned collection.
* @tparam That $thatinfo
@@ -169,36 +168,18 @@ trait GenTraversableLike[+A, +Repr] extends Any with GenTraversableOnce[A] with
* The order of the elements is preserved.
*
* @usecase def collect[B](pf: PartialFunction[A, B]): $Coll[B]
+ * @inheritdoc
*
- * @return a new $coll resulting from applying the given partial function
- * `pf` to each element on which it is defined and collecting the results.
- * The order of the elements is preserved.
+ * $collectExample
+ *
+ * @return a new $coll resulting from applying the given partial function
+ * `pf` to each element on which it is defined and collecting the results.
+ * The order of the elements is preserved.
*/
def collect[B, That](pf: PartialFunction[A, B])(implicit bf: CanBuildFrom[Repr, B, That]): That
/** Builds a new collection by applying a function to all elements of this $coll
- * and using the elements of the resulting collections. For example:
- *
- * {{{
- * def getWords(lines: Seq[String]): Seq[String] = lines flatMap (line => line split "\\W+")
- * }}}
- *
- * The type of the resulting collection is guided by the static type of $coll. This might
- * cause unexpected results sometimes. For example:
- *
- * {{{
- * // lettersOf will return a Seq[Char] of likely repeated letters, instead of a Set
- * def lettersOf(words: Seq[String]) = words flatMap (word => word.toSet)
- *
- * // lettersOf will return a Set[Char], not a Seq
- * def lettersOf(words: Seq[String]) = words.toSet flatMap (word => word.toSeq)
- *
- * // xs will be a an Iterable[Int]
- * val xs = Map("a" -> List(11,111), "b" -> List(22,222)).flatMap(_._2)
- *
- * // ys will be a Map[Int, Int]
- * val ys = Map("a" -> List(1 -> 11,1 -> 111), "b" -> List(2 -> 22,2 -> 222)).flatMap(_._2)
- * }}}
+ * and using the elements of the resulting collections.
*
* @param f the function to apply to each element.
* @tparam B the element type of the returned collection.
@@ -208,33 +189,39 @@ trait GenTraversableLike[+A, +Repr] extends Any with GenTraversableOnce[A] with
* `f` to each element of this $coll and concatenating the results.
*
* @usecase def flatMap[B](f: A => GenTraversableOnce[B]): $Coll[B]
+ * @inheritdoc
*
- * @return a new $coll resulting from applying the given collection-valued function
- * `f` to each element of this $coll and concatenating the results.
- */
- def flatMap[B, That](f: A => GenTraversableOnce[B])(implicit bf: CanBuildFrom[Repr, B, That]): That
-
- /** Returns a new $coll containing the elements from the left hand operand followed by the elements from the
- * right hand operand. The element type of the $coll is the most specific superclass encompassing
- * the element types of the two operands (see example).
+ * For example:
*
- * Example:
- * {{{
- * scala> val a = LinkedList(1)
- * a: scala.collection.mutable.LinkedList[Int] = LinkedList(1)
+ * {{{
+ * def getWords(lines: Seq[String]): Seq[String] = lines flatMap (line => line split "\\W+")
+ * }}}
*
- * scala> val b = LinkedList(2)
- * b: scala.collection.mutable.LinkedList[Int] = LinkedList(2)
+ * The type of the resulting collection is guided by the static type of $coll. This might
+ * cause unexpected results sometimes. For example:
*
- * scala> val c = a ++ b
- * c: scala.collection.mutable.LinkedList[Int] = LinkedList(1, 2)
+ * {{{
+ * // lettersOf will return a Seq[Char] of likely repeated letters, instead of a Set
+ * def lettersOf(words: Seq[String]) = words flatMap (word => word.toSet)
*
- * scala> val d = LinkedList('a')
- * d: scala.collection.mutable.LinkedList[Char] = LinkedList(a)
+ * // lettersOf will return a Set[Char], not a Seq
+ * def lettersOf(words: Seq[String]) = words.toSet flatMap (word => word.toSeq)
*
- * scala> val e = c ++ d
- * e: scala.collection.mutable.LinkedList[AnyVal] = LinkedList(1, 2, a)
- * }}}
+ * // xs will be a an Iterable[Int]
+ * val xs = Map("a" -> List(11,111), "b" -> List(22,222)).flatMap(_._2)
+ *
+ * // ys will be a Map[Int, Int]
+ * val ys = Map("a" -> List(1 -> 11,1 -> 111), "b" -> List(2 -> 22,2 -> 222)).flatMap(_._2)
+ * }}}
+ *
+ * @return a new $coll resulting from applying the given collection-valued function
+ * `f` to each element of this $coll and concatenating the results.
+ */
+ def flatMap[B, That](f: A => GenTraversableOnce[B])(implicit bf: CanBuildFrom[Repr, B, That]): That
+
+ /** Returns a new $coll containing the elements from the left hand operand followed by the elements from the
+ * right hand operand. The element type of the $coll is the most specific superclass encompassing
+ * the element types of the two operands.
*
* @param that the traversable to append.
* @tparam B the element type of the returned collection.
@@ -244,9 +231,28 @@ trait GenTraversableLike[+A, +Repr] extends Any with GenTraversableOnce[A] with
* of this $coll followed by all elements of `that`.
*
* @usecase def ++[B](that: GenTraversableOnce[B]): $Coll[B]
+ * @inheritdoc
+ *
+ * Example:
+ * {{{
+ * scala> val a = LinkedList(1)
+ * a: scala.collection.mutable.LinkedList[Int] = LinkedList(1)
+ *
+ * scala> val b = LinkedList(2)
+ * b: scala.collection.mutable.LinkedList[Int] = LinkedList(2)
+ *
+ * scala> val c = a ++ b
+ * c: scala.collection.mutable.LinkedList[Int] = LinkedList(1, 2)
+ *
+ * scala> val d = LinkedList('a')
+ * d: scala.collection.mutable.LinkedList[Char] = LinkedList(a)
+ *
+ * scala> val e = c ++ d
+ * e: scala.collection.mutable.LinkedList[AnyVal] = LinkedList(1, 2, a)
+ * }}}
*
- * @return a new $coll which contains all elements of this $coll
- * followed by all elements of `that`.
+ * @return a new $coll which contains all elements of this $coll
+ * followed by all elements of `that`.
*/
def ++[B >: A, That](that: GenTraversableOnce[B])(implicit bf: CanBuildFrom[Repr, B, That]): That
diff --git a/src/library/scala/collection/GenTraversableOnce.scala b/src/library/scala/collection/GenTraversableOnce.scala
index 67ea4cdb00..019d3d0785 100644
--- a/src/library/scala/collection/GenTraversableOnce.scala
+++ b/src/library/scala/collection/GenTraversableOnce.scala
@@ -124,7 +124,7 @@ trait GenTraversableOnce[+A] extends Any {
* scala> val b = (a /:\ 5)(_+_)
* b: Int = 15
* }}}*/
- @deprecated("use fold instead")
+ @deprecated("use fold instead", "2.10.0")
def /:\[A1 >: A](z: A1)(op: (A1, A1) => A1): A1 = fold(z)(op)
/** Applies a binary operator to a start value and all elements of this $coll,
@@ -316,11 +316,12 @@ trait GenTraversableOnce[+A] extends Any {
* @return the sum of all elements of this $coll with respect to the `+` operator in `num`.
*
* @usecase def sum: A
+ * @inheritdoc
*
- * @return the sum of all elements in this $coll of numbers of type `Int`.
- * Instead of `Int`, any other type `T` with an implicit `Numeric[T]` implementation
- * can be used as element type of the $coll and as result type of `sum`.
- * Examples of such types are: `Long`, `Float`, `Double`, `BigInt`.
+ * @return the sum of all elements in this $coll of numbers of type `Int`.
+ * Instead of `Int`, any other type `T` with an implicit `Numeric[T]` implementation
+ * can be used as element type of the $coll and as result type of `sum`.
+ * Examples of such types are: `Long`, `Float`, `Double`, `BigInt`.
*
*/
def sum[A1 >: A](implicit num: Numeric[A1]): A1
@@ -333,11 +334,12 @@ trait GenTraversableOnce[+A] extends Any {
* @return the product of all elements of this $coll with respect to the `*` operator in `num`.
*
* @usecase def product: A
+ * @inheritdoc
*
- * @return the product of all elements in this $coll of numbers of type `Int`.
- * Instead of `Int`, any other type `T` with an implicit `Numeric[T]` implementation
- * can be used as element type of the $coll and as result type of `product`.
- * Examples of such types are: `Long`, `Float`, `Double`, `BigInt`.
+ * @return the product of all elements in this $coll of numbers of type `Int`.
+ * Instead of `Int`, any other type `T` with an implicit `Numeric[T]` implementation
+ * can be used as element type of the $coll and as result type of `product`.
+ * Examples of such types are: `Long`, `Float`, `Double`, `BigInt`.
*/
def product[A1 >: A](implicit num: Numeric[A1]): A1
@@ -348,7 +350,9 @@ trait GenTraversableOnce[+A] extends Any {
* @return the smallest element of this $coll with respect to the ordering `cmp`.
*
* @usecase def min: A
- * @return the smallest element of this $coll
+ * @inheritdoc
+ *
+ * @return the smallest element of this $coll
*/
def min[A1 >: A](implicit ord: Ordering[A1]): A
@@ -359,7 +363,9 @@ trait GenTraversableOnce[+A] extends Any {
* @return the largest element of this $coll with respect to the ordering `cmp`.
*
* @usecase def max: A
- * @return the largest element of this $coll.
+ * @inheritdoc
+ *
+ * @return the largest element of this $coll.
*/
def max[A1 >: A](implicit ord: Ordering[A1]): A
@@ -387,12 +393,13 @@ trait GenTraversableOnce[+A] extends Any {
* Copying will stop once either the end of the current $coll is reached,
* or the end of the array is reached.
*
- * $willNotTerminateInf
- *
* @param xs the array to fill.
* @tparam B the type of the elements of the array.
*
* @usecase def copyToArray(xs: Array[A]): Unit
+ * @inheritdoc
+ *
+ * $willNotTerminateInf
*/
def copyToArray[B >: A](xs: Array[B]): Unit
@@ -401,13 +408,14 @@ trait GenTraversableOnce[+A] extends Any {
* Copying will stop once either the end of the current $coll is reached,
* or the end of the array is reached.
*
- * $willNotTerminateInf
- *
* @param xs the array to fill.
* @param start the starting index.
* @tparam B the type of the elements of the array.
*
* @usecase def copyToArray(xs: Array[A], start: Int): Unit
+ * @inheritdoc
+ *
+ * $willNotTerminateInf
*/
def copyToArray[B >: A](xs: Array[B], start: Int): Unit
@@ -450,15 +458,18 @@ trait GenTraversableOnce[+A] extends Any {
def mkString: String
/** Converts this $coll to an array.
- * $willNotTerminateInf
*
* @tparam B the type of the elements of the array. A `ClassManifest` for
* this type must be available.
* @return an array containing all elements of this $coll.
*
* @usecase def toArray: Array[A]
- * @return an array containing all elements of this $coll.
- * A `ClassManifest` must be available for the element type of this $coll.
+ * @inheritdoc
+ *
+ * $willNotTerminateInf
+ *
+ * @return an array containing all elements of this $coll.
+ * A `ClassManifest` must be available for the element type of this $coll.
*/
def toArray[A1 >: A: ClassManifest]: Array[A1]
@@ -530,11 +541,13 @@ trait GenTraversableOnce[+A] extends Any {
* pair in the map. Duplicate keys will be overwritten by later keys:
* if this is an unordered collection, which key is in the resulting map
* is undefined.
- * $willNotTerminateInf
* @return a map containing all elements of this $coll.
+ *
* @usecase def toMap[T, U]: Map[T, U]
- * @return a map of type `immutable.Map[T, U]`
- * containing all key/value pairs of type `(T, U)` of this $coll.
+ * @inheritdoc
+ * $willNotTerminateInf
+ * @return a map of type `immutable.Map[T, U]`
+ * containing all key/value pairs of type `(T, U)` of this $coll.
*/
def toMap[K, V](implicit ev: A <:< (K, V)): GenMap[K, V]
}
diff --git a/src/library/scala/collection/IterableLike.scala b/src/library/scala/collection/IterableLike.scala
index 73d4efe125..fb6d154952 100644
--- a/src/library/scala/collection/IterableLike.scala
+++ b/src/library/scala/collection/IterableLike.scala
@@ -67,6 +67,7 @@ self =>
* Subclasses should re-implement this method if a more efficient implementation exists.
*
* @usecase def foreach(f: A => Unit): Unit
+ * @inheritdoc
*/
def foreach[U](f: A => U): Unit =
iterator.foreach(f)
diff --git a/src/library/scala/collection/Iterator.scala b/src/library/scala/collection/Iterator.scala
index d46d215e0c..7b57a91c41 100644
--- a/src/library/scala/collection/Iterator.scala
+++ b/src/library/scala/collection/Iterator.scala
@@ -322,7 +322,9 @@ trait Iterator[+A] extends TraversableOnce[A] {
* @return a new iterator that first yields the values produced by this
* iterator followed by the values produced by iterator `that`.
* @note Reuse: $consumesTwoAndProducesOneIterator
+ *
* @usecase def ++(that: => Iterator[A]): Iterator[A]
+ * @inheritdoc
*/
def ++[B >: A](that: => GenTraversableOnce[B]): Iterator[B] = new AbstractIterator[B] {
// optimize a little bit to prevent n log n behavior.
@@ -609,7 +611,9 @@ trait Iterator[+A] extends TraversableOnce[A] {
* followed by the minimal number of occurrences of `elem` so
* that the number of produced values is at least `len`.
* @note Reuse: $consumesAndProducesIterator
+ *
* @usecase def padTo(len: Int, elem: A): Iterator[A]
+ * @inheritdoc
*/
def padTo[A1 >: A](len: Int, elem: A1): Iterator[A1] = new AbstractIterator[A1] {
private var count = 0
@@ -658,7 +662,9 @@ trait Iterator[+A] extends TraversableOnce[A] {
* If this iterator is shorter than `that`, `thisElem` values are used to pad the result.
* If `that` is shorter than this iterator, `thatElem` values are used to pad the result.
* @note Reuse: $consumesTwoAndProducesOneIterator
+ *
* @usecase def zipAll[B](that: Iterator[B], thisElem: A, thatElem: B): Iterator[(A, B)]
+ * @inheritdoc
*/
def zipAll[B, A1 >: A, B1 >: B](that: Iterator[B], thisElem: A1, thatElem: B1): Iterator[(A1, B1)] = new AbstractIterator[(A1, B1)] {
def hasNext = self.hasNext || that.hasNext
@@ -682,7 +688,9 @@ trait Iterator[+A] extends TraversableOnce[A] {
* but this is not necessary.
*
* @note Reuse: $consumesIterator
+ *
* @usecase def foreach(f: A => Unit): Unit
+ * @inheritdoc
*/
def foreach[U](f: A => U) { while (hasNext) f(next()) }
@@ -1057,15 +1065,17 @@ trait Iterator[+A] extends TraversableOnce[A] {
* Copying will stop once either the end of the current iterator is reached,
* or the end of the array is reached, or `len` elements have been copied.
*
- * $willNotTerminateInf
- *
* @param xs the array to fill.
* @param start the starting index.
* @param len the maximal number of elements to copy.
* @tparam B the type of the elements of the array.
*
* @note Reuse: $consumesIterator
+ *
* @usecase def copyToArray(xs: Array[A], start: Int, len: Int): Unit
+ * @inheritdoc
+ *
+ * $willNotTerminateInf
*/
def copyToArray[B >: A](xs: Array[B], start: Int, len: Int): Unit = {
var i = start
diff --git a/src/library/scala/collection/JavaConversions.scala b/src/library/scala/collection/JavaConversions.scala
index 50919e506a..75ab3f28f5 100644
--- a/src/library/scala/collection/JavaConversions.scala
+++ b/src/library/scala/collection/JavaConversions.scala
@@ -8,6 +8,9 @@
package scala.collection
+import java.{ lang => jl, util => ju }, java.util.{ concurrent => juc }
+import convert._
+
/** A collection of implicit conversions supporting interoperability between
* Scala and Java collections.
*
@@ -46,877 +49,81 @@ package scala.collection
* @author Martin Odersky
* @since 2.8
*/
-object JavaConversions {
+object JavaConversions extends WrapAsScala with WrapAsJava {
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type ConcurrentMapWrapper[A, B] = Wrappers.ConcurrentMapWrapper[A, B]
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type DictionaryWrapper[A, B] = Wrappers.DictionaryWrapper[A, B]
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type IterableWrapper[A] = Wrappers.IterableWrapper[A]
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type IteratorWrapper[A] = Wrappers.IteratorWrapper[A]
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type JCollectionWrapper[A] = Wrappers.JCollectionWrapper[A]
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type JConcurrentMapWrapper[A, B] = Wrappers.JConcurrentMapWrapper[A, B]
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type JDictionaryWrapper[A, B] = Wrappers.JDictionaryWrapper[A, B]
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type JEnumerationWrapper[A] = Wrappers.JEnumerationWrapper[A]
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type JIterableWrapper[A] = Wrappers.JIterableWrapper[A]
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type JIteratorWrapper[A] = Wrappers.JIteratorWrapper[A]
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type JListWrapper[A] = Wrappers.JListWrapper[A]
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type JMapWrapper[A, B] = Wrappers.JMapWrapper[A, B]
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type JPropertiesWrapper = Wrappers.JPropertiesWrapper
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type JSetWrapper[A] = Wrappers.JSetWrapper[A]
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type MapWrapper[A, B] = Wrappers.MapWrapper[A, B]
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type MutableBufferWrapper[A] = Wrappers.MutableBufferWrapper[A]
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type MutableMapWrapper[A, B] = Wrappers.MutableMapWrapper[A, B]
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type MutableSeqWrapper[A] = Wrappers.MutableSeqWrapper[A]
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type MutableSetWrapper[A] = Wrappers.MutableSetWrapper[A]
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type SeqWrapper[A] = Wrappers.SeqWrapper[A]
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type SetWrapper[A] = Wrappers.SetWrapper[A]
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type ToIteratorWrapper[A] = Wrappers.ToIteratorWrapper[A]
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val DictionaryWrapper = Wrappers.DictionaryWrapper
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val IterableWrapper = Wrappers.IterableWrapper
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val IteratorWrapper = Wrappers.IteratorWrapper
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val JCollectionWrapper = Wrappers.JCollectionWrapper
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val JConcurrentMapWrapper = Wrappers.JConcurrentMapWrapper
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val JDictionaryWrapper = Wrappers.JDictionaryWrapper
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val JEnumerationWrapper = Wrappers.JEnumerationWrapper
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val JIterableWrapper = Wrappers.JIterableWrapper
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val JIteratorWrapper = Wrappers.JIteratorWrapper
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val JListWrapper = Wrappers.JListWrapper
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val JMapWrapper = Wrappers.JMapWrapper
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val JPropertiesWrapper = Wrappers.JPropertiesWrapper
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val JSetWrapper = Wrappers.JSetWrapper
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val MutableBufferWrapper = Wrappers.MutableBufferWrapper
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val MutableMapWrapper = Wrappers.MutableMapWrapper
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val MutableSeqWrapper = Wrappers.MutableSeqWrapper
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val MutableSetWrapper = Wrappers.MutableSetWrapper
+ @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val SeqWrapper = Wrappers.SeqWrapper
+
// Note to implementors: the cavalcade of deprecated methods herein should
// serve as a warning to any who follow: don't overload implicit methods.
- import java.{ lang => jl, util => ju }
- import java.util.{ concurrent => juc }
-
- // Scala => Java
-
- /**
- * Implicitly converts a Scala Iterator to a Java Iterator.
- * The returned Java Iterator is backed by the provided Scala
- * Iterator and any side-effects of using it via the Java interface will
- * be visible via the Scala interface and vice versa.
- *
- * If the Scala Iterator was previously obtained from an implicit or
- * explicit call of `asIterator(java.util.Iterator)` then the original
- * Java Iterator will be returned.
- *
- * @param i The Iterator to be converted.
- * @return A Java Iterator view of the argument.
- */
- implicit def asJavaIterator[A](it: Iterator[A]): ju.Iterator[A] = it match {
- case JIteratorWrapper(wrapped) => wrapped.asInstanceOf[ju.Iterator[A]]
- case _ => IteratorWrapper(it)
- }
-
- /**
- * Implicitly converts a Scala Iterator to a Java Enumeration.
- * The returned Java Enumeration is backed by the provided Scala
- * Iterator and any side-effects of using it via the Java interface will
- * be visible via the Scala interface and vice versa.
- *
- * If the Scala Iterator was previously obtained from an implicit or
- * explicit call of `asIterator(java.util.Enumeration)` then the
- * original Java Enumeration will be returned.
- *
- * @param i The Iterator to be converted.
- * @return A Java Enumeration view of the argument.
- */
- implicit def asJavaEnumeration[A](it: Iterator[A]): ju.Enumeration[A] = it match {
- case JEnumerationWrapper(wrapped) => wrapped.asInstanceOf[ju.Enumeration[A]]
- case _ => IteratorWrapper(it)
- }
-
- /**
- * Implicitly converts a Scala Iterable to a Java Iterable.
- * The returned Java Iterable is backed by the provided Scala
- * Iterable and any side-effects of using it via the Java interface will
- * be visible via the Scala interface and vice versa.
- *
- * If the Scala Iterable was previously obtained from an implicit or
- * explicit call of `asIterable(java.lang.Iterable)` then the original
- * Java Iterable will be returned.
- *
- * @param i The Iterable to be converted.
- * @return A Java Iterable view of the argument.
- */
- implicit def asJavaIterable[A](i: Iterable[A]): jl.Iterable[A] = i match {
- case JIterableWrapper(wrapped) => wrapped.asInstanceOf[jl.Iterable[A]]
- case _ => IterableWrapper(i)
- }
-
- /**
- * Implicitly converts a Scala Iterable to an immutable Java
- * Collection.
- *
- * If the Scala Iterable was previously obtained from an implicit or
- * explicit call of `asSizedIterable(java.util.Collection)` then the original
- * Java Collection will be returned.
- *
- * @param i The SizedIterable to be converted.
- * @return A Java Collection view of the argument.
- */
- implicit def asJavaCollection[A](it: Iterable[A]): ju.Collection[A] = it match {
- case JCollectionWrapper(wrapped) => wrapped.asInstanceOf[ju.Collection[A]]
- case _ => new IterableWrapper(it)
- }
-
- /**
- * Implicitly converts a Scala mutable Buffer to a Java List.
- * The returned Java List is backed by the provided Scala
- * Buffer and any side-effects of using it via the Java interface will
- * be visible via the Scala interface and vice versa.
- *
- * If the Scala Buffer was previously obtained from an implicit or
- * explicit call of `asBuffer(java.util.List)` then the original
- * Java List will be returned.
- *
- * @param b The Buffer to be converted.
- * @return A Java List view of the argument.
- */
- implicit def bufferAsJavaList[A](b: mutable.Buffer[A]): ju.List[A] = b match {
- case JListWrapper(wrapped) => wrapped
- case _ => new MutableBufferWrapper(b)
- }
@deprecated("use bufferAsJavaList instead", "2.9.0")
def asJavaList[A](b : mutable.Buffer[A]): ju.List[A] = bufferAsJavaList[A](b)
- /**
- * Implicitly converts a Scala mutable Seq to a Java List.
- * The returned Java List is backed by the provided Scala
- * Seq and any side-effects of using it via the Java interface will
- * be visible via the Scala interface and vice versa.
- *
- * If the Scala Seq was previously obtained from an implicit or
- * explicit call of `asSeq(java.util.List)` then the original
- * Java List will be returned.
- *
- * @param b The Seq to be converted.
- * @return A Java List view of the argument.
- */
- implicit def mutableSeqAsJavaList[A](seq: mutable.Seq[A]): ju.List[A] = seq match {
- case JListWrapper(wrapped) => wrapped
- case _ => new MutableSeqWrapper(seq)
- }
@deprecated("use mutableSeqAsJavaList instead", "2.9.0")
def asJavaList[A](b : mutable.Seq[A]): ju.List[A] = mutableSeqAsJavaList[A](b)
- /**
- * Implicitly converts a Scala Seq to a Java List.
- * The returned Java List is backed by the provided Scala
- * Seq and any side-effects of using it via the Java interface will
- * be visible via the Scala interface and vice versa.
- *
- * If the Scala Seq was previously obtained from an implicit or
- * explicit call of `asSeq(java.util.List)` then the original
- * Java List will be returned.
- *
- * @param b The Seq to be converted.
- * @return A Java List view of the argument.
- */
- implicit def seqAsJavaList[A](seq: Seq[A]): ju.List[A] = seq match {
- case JListWrapper(wrapped) => wrapped.asInstanceOf[ju.List[A]]
- case _ => new SeqWrapper(seq)
- }
-
@deprecated("use seqAsJavaList instead", "2.9.0")
def asJavaList[A](b : Seq[A]): ju.List[A] = seqAsJavaList[A](b)
- /**
- * Implicitly converts a Scala mutable Set to a Java Set.
- * The returned Java Set is backed by the provided Scala
- * Set and any side-effects of using it via the Java interface will
- * be visible via the Scala interface and vice versa.
- *
- * If the Scala Set was previously obtained from an implicit or
- * explicit call of `asSet(java.util.Set)` then the original
- * Java Set will be returned.
- *
- * @param s The Set to be converted.
- * @return A Java Set view of the argument.
- */
- implicit def mutableSetAsJavaSet[A](s: mutable.Set[A]): ju.Set[A] = s match {
- case JSetWrapper(wrapped) => wrapped
- case _ => new MutableSetWrapper(s)
- }
-
@deprecated("use mutableSetAsJavaSet instead", "2.9.0")
def asJavaSet[A](s : mutable.Set[A]): ju.Set[A] = mutableSetAsJavaSet[A](s)
- /**
- * Implicitly converts a Scala Set to a Java Set.
- * The returned Java Set is backed by the provided Scala
- * Set and any side-effects of using it via the Java interface will
- * be visible via the Scala interface and vice versa.
- *
- * If the Scala Set was previously obtained from an implicit or
- * explicit call of asSet(java.util.Set) then the original
- * Java Set will be returned.
- *
- * @param s The Set to be converted.
- * @return A Java Set view of the argument.
- */
- implicit def setAsJavaSet[A](s: Set[A]): ju.Set[A] = s match {
- case JSetWrapper(wrapped) => wrapped
- case _ => new SetWrapper(s)
- }
-
@deprecated("use setAsJavaSet instead", "2.9.0")
def asJavaSet[A](s: Set[A]): ju.Set[A] = setAsJavaSet[A](s)
- /**
- * Implicitly converts a Scala mutable Map to a Java Map.
- * The returned Java Map is backed by the provided Scala
- * Map and any side-effects of using it via the Java interface will
- * be visible via the Scala interface and vice versa.
- *
- * If the Scala Map was previously obtained from an implicit or
- * explicit call of `asMap(java.util.Map)` then the original
- * Java Map will be returned.
- *
- * @param m The Map to be converted.
- * @return A Java Map view of the argument.
- */
- implicit def mutableMapAsJavaMap[A, B](m: mutable.Map[A, B]): ju.Map[A, B] = m match {
- //case JConcurrentMapWrapper(wrapped) => wrapped
- case JMapWrapper(wrapped) => wrapped
- case _ => new MutableMapWrapper(m)
- }
-
@deprecated("use mutableMapAsJavaMap instead", "2.9.0")
def asJavaMap[A, B](m : mutable.Map[A, B]): ju.Map[A, B] = mutableMapAsJavaMap[A, B](m)
- /**
- * Implicitly converts a Scala mutable `Map` to a Java `Dictionary`.
- *
- * The returned Java `Dictionary` is backed by the provided Scala
- * `Dictionary` and any side-effects of using it via the Java interface
- * will be visible via the Scala interface and vice versa.
- *
- * If the Scala `Dictionary` was previously obtained from an implicit or
- * explicit call of `asMap(java.util.Dictionary)` then the original
- * Java Dictionary will be returned.
- *
- * @param m The `Map` to be converted.
- * @return A Java `Dictionary` view of the argument.
- */
- implicit def asJavaDictionary[A, B](m: mutable.Map[A, B]): ju.Dictionary[A, B] = m match {
- //case JConcurrentMapWrapper(wrapped) => wrapped
- case JDictionaryWrapper(wrapped) => wrapped
- case _ => new DictionaryWrapper(m)
- }
-
- /**
- * Implicitly converts a Scala `Map` to a Java `Map`.
- *
- * The returned Java `Map` is backed by the provided Scala `Map` and
- * any side-effects of using it via the Java interface will be visible
- * via the Scala interface and vice versa.
- *
- * If the Scala `Map` was previously obtained from an implicit or
- * explicit call of `asMap(java.util.Map)` then the original
- * Java `Map` will be returned.
- *
- * @param m The `Map` to be converted.
- * @return A Java `Map` view of the argument.
- */
- implicit def mapAsJavaMap[A, B](m: Map[A, B]): ju.Map[A, B] = m match {
- //case JConcurrentMapWrapper(wrapped) => wrapped
- case JMapWrapper(wrapped) => wrapped.asInstanceOf[ju.Map[A, B]]
- case _ => new MapWrapper(m)
- }
-
@deprecated("use mapAsJavaMap instead", "2.9.0")
def asJavaMap[A, B](m : Map[A, B]): ju.Map[A, B] = mapAsJavaMap[A, B](m)
- /**
- * Implicitly converts a Scala mutable `ConcurrentMap` to a Java
- * `ConcurrentMap`.
- *
- * The returned Java `ConcurrentMap` is backed by the provided Scala
- * `ConcurrentMap` and any side-effects of using it via the Java interface
- * will be visible via the Scala interface and vice versa.
- *
- * If the Scala `ConcurrentMap` was previously obtained from an implicit or
- * explicit call of `asConcurrentMap(java.util.concurrect.ConcurrentMap)`
- * then the original Java ConcurrentMap will be returned.
- *
- * @param m The `ConcurrentMap` to be converted.
- * @return A Java `ConcurrentMap` view of the argument.
- */
- implicit def asJavaConcurrentMap[A, B](m: mutable.ConcurrentMap[A, B]): juc.ConcurrentMap[A, B] = m match {
- case JConcurrentMapWrapper(wrapped) => wrapped
- case _ => new ConcurrentMapWrapper(m)
- }
-
- // Java => Scala
-
- /**
- * Implicitly converts a Java `Iterator` to a Scala `Iterator`.
- *
- * The returned Scala `Iterator` is backed by the provided Java `Iterator`
- * and any side-effects of using it via the Scala interface will be visible
- * via the Java interface and vice versa.
- *
- * If the Java `Iterator` was previously obtained from an implicit or
- * explicit call of `asIterator(scala.collection.Iterator)` then the
- * original Scala `Iterator` will be returned.
- *
- * @param i The `Iterator` to be converted.
- * @return A Scala `Iterator` view of the argument.
- */
- implicit def asScalaIterator[A](it: ju.Iterator[A]): Iterator[A] = it match {
- case IteratorWrapper(wrapped) => wrapped
- case _ => JIteratorWrapper(it)
- }
-
- /**
- * Implicitly converts a Java Enumeration to a Scala Iterator.
- * The returned Scala Iterator is backed by the provided Java
- * Enumeration and any side-effects of using it via the Scala interface will
- * be visible via the Java interface and vice versa.
- *
- * If the Java Enumeration was previously obtained from an implicit or
- * explicit call of `enumerationAsScalaIterator(scala.collection.Iterator)`
- * then the original Scala Iterator will be returned.
- *
- * @param i The Enumeration to be converted.
- * @return A Scala Iterator view of the argument.
- */
- implicit def enumerationAsScalaIterator[A](i: ju.Enumeration[A]): Iterator[A] = i match {
- case IteratorWrapper(wrapped) => wrapped
- case _ => JEnumerationWrapper(i)
- }
-
- /**
- * Implicitly converts a Java `Iterable` to a Scala `Iterable`.
- *
- * The returned Scala `Iterable` is backed by the provided Java `Iterable`
- * and any side-effects of using it via the Scala interface will be visible
- * via the Java interface and vice versa.
- *
- * If the Java `Iterable` was previously obtained from an implicit or
- * explicit call of `iterableAsScalaIterable(scala.collection.Iterable)`
- * then the original Scala Iterable will be returned.
- *
- * @param i The Iterable to be converted.
- * @return A Scala Iterable view of the argument.
- */
- implicit def iterableAsScalaIterable[A](i: jl.Iterable[A]): Iterable[A] = i match {
- case IterableWrapper(wrapped) => wrapped
- case _ => JIterableWrapper(i)
- }
-
@deprecated("use iterableAsScalaIterable instead", "2.9.0")
def asScalaIterable[A](i : jl.Iterable[A]): Iterable[A] = iterableAsScalaIterable[A](i)
- /**
- * Implicitly converts a Java `Collection` to an Scala `Iterable`.
- *
- * If the Java `Collection` was previously obtained from an implicit or
- * explicit call of `collectionAsScalaIterable(scala.collection.SizedIterable)`
- * then the original Scala `Iterable` will be returned.
- *
- * @param i The Collection to be converted.
- * @return A Scala Iterable view of the argument.
- */
- implicit def collectionAsScalaIterable[A](i: ju.Collection[A]): Iterable[A] = i match {
- case IterableWrapper(wrapped) => wrapped
- case _ => JCollectionWrapper(i)
- }
@deprecated("use collectionAsScalaIterable instead", "2.9.0")
def asScalaIterable[A](i : ju.Collection[A]): Iterable[A] = collectionAsScalaIterable[A](i)
- /**
- * Implicitly converts a Java `List` to a Scala mutable `Buffer`.
- *
- * The returned Scala `Buffer` is backed by the provided Java `List`
- * and any side-effects of using it via the Scala interface will
- * be visible via the Java interface and vice versa.
- *
- * If the Java `List` was previously obtained from an implicit or
- * explicit call of `asScalaBuffer(scala.collection.mutable.Buffer)`
- * then the original Scala `Buffer` will be returned.
- *
- * @param l The `List` to be converted.
- * @return A Scala mutable `Buffer` view of the argument.
- */
- implicit def asScalaBuffer[A](l: ju.List[A]): mutable.Buffer[A] = l match {
- case MutableBufferWrapper(wrapped) => wrapped
- case _ =>new JListWrapper(l)
- }
-
- /**
- * Implicitly converts a Java Set to a Scala mutable Set.
- * The returned Scala Set is backed by the provided Java
- * Set and any side-effects of using it via the Scala interface will
- * be visible via the Java interface and vice versa.
- *
- * If the Java Set was previously obtained from an implicit or
- * explicit call of `asScalaSet(scala.collection.mutable.Set)` then
- * the original Scala Set will be returned.
- *
- * @param s The Set to be converted.
- * @return A Scala mutable Set view of the argument.
- */
- implicit def asScalaSet[A](s: ju.Set[A]): mutable.Set[A] = s match {
- case MutableSetWrapper(wrapped) => wrapped
- case _ =>new JSetWrapper(s)
- }
-
- /**
- * Implicitly converts a Java `Map` to a Scala mutable `Map`.
- *
- * The returned Scala `Map` is backed by the provided Java `Map` and any
- * side-effects of using it via the Scala interface will be visible via
- * the Java interface and vice versa.
- *
- * If the Java `Map` was previously obtained from an implicit or
- * explicit call of `mapAsScalaMap(scala.collection.mutable.Map)` then
- * the original Scala Map will be returned.
- *
- * @param m The Map to be converted.
- * @return A Scala mutable Map view of the argument.
- */
- implicit def mapAsScalaMap[A, B](m: ju.Map[A, B]): mutable.Map[A, B] = m match {
- //case ConcurrentMapWrapper(wrapped) => wrapped
- case MutableMapWrapper(wrapped) => wrapped
- case _ => new JMapWrapper(m)
- }
-
@deprecated("use mapAsScalaMap instead", "2.9.0")
def asScalaMap[A, B](m: ju.Map[A, B]): mutable.Map[A, B] = mapAsScalaMap[A, B](m)
- /**
- * Implicitly converts a Java ConcurrentMap to a Scala mutable ConcurrentMap.
- * The returned Scala ConcurrentMap is backed by the provided Java
- * ConcurrentMap and any side-effects of using it via the Scala interface will
- * be visible via the Java interface and vice versa.
- *
- * If the Java ConcurrentMap was previously obtained from an implicit or
- * explicit call of `asConcurrentMap(scala.collection.mutable.ConcurrentMap)`
- * then the original Scala ConcurrentMap will be returned.
- *
- * @param m The ConcurrentMap to be converted.
- * @return A Scala mutable ConcurrentMap view of the argument.
- */
- implicit def asScalaConcurrentMap[A, B](m: juc.ConcurrentMap[A, B]): mutable.ConcurrentMap[A, B] = m match {
- case cmw: ConcurrentMapWrapper[a, b] => cmw.underlying
- case _ => new JConcurrentMapWrapper(m)
- }
-
- /**
- * Implicitly converts a Java `Dictionary` to a Scala mutable
- * `Map[String, String]`.
- *
- * The returned Scala `Map[String, String]` is backed by the provided Java
- * `Dictionary` and any side-effects of using it via the Scala interface
- * will be visible via the Java interface and vice versa.
- *
- * @param m The Dictionary to be converted.
- * @return A Scala mutable Map[String, String] view of the argument.
- */
- implicit def dictionaryAsScalaMap[A, B](p: ju.Dictionary[A, B]): mutable.Map[A, B] = p match {
- case DictionaryWrapper(wrapped) => wrapped
- case _ => new JDictionaryWrapper(p)
- }
-
- /**
- * Implicitly converts a Java `Properties` to a Scala `mutable Map[String, String]`.
- *
- * The returned Scala `Map[String, String]` is backed by the provided Java
- * `Properties` and any side-effects of using it via the Scala interface
- * will be visible via the Java interface and vice versa.
- *
- * @param m The Properties to be converted.
- * @return A Scala mutable Map[String, String] view of the argument.
- */
- implicit def propertiesAsScalaMap(p: ju.Properties): mutable.Map[String, String] = p match {
- case _ => new JPropertiesWrapper(p)
- }
-
@deprecated("use propertiesAsScalaMap instead", "2.9.0")
def asScalaMap(p: ju.Properties): mutable.Map[String, String] = propertiesAsScalaMap(p)
-
- // Private implementations (shared by JavaConverters) ...
-
- trait IterableWrapperTrait[A] extends ju.AbstractCollection[A] {
- val underlying: Iterable[A]
- def size = underlying.size
- override def iterator = IteratorWrapper(underlying.iterator)
- override def isEmpty = underlying.isEmpty
- }
-
- case class IteratorWrapper[A](underlying: Iterator[A])
- extends ju.Iterator[A] with ju.Enumeration[A] {
- def hasNext = underlying.hasNext
- def next() = underlying.next
- def hasMoreElements = underlying.hasNext
- def nextElement() = underlying.next
- def remove() = throw new UnsupportedOperationException
- }
-
- class ToIteratorWrapper[A](underlying : Iterator[A]) {
- def asJava = new IteratorWrapper(underlying)
- }
-
- case class JIteratorWrapper[A](underlying: ju.Iterator[A]) extends AbstractIterator[A] with Iterator[A] {
- def hasNext = underlying.hasNext
- def next() = underlying.next
- }
-
- case class JEnumerationWrapper[A](underlying: ju.Enumeration[A]) extends AbstractIterator[A] with Iterator[A] {
- def hasNext = underlying.hasMoreElements
- def next() = underlying.nextElement
- }
-
- case class IterableWrapper[A](underlying: Iterable[A])
- extends ju.AbstractCollection[A]
- with IterableWrapperTrait[A] { }
-
- case class JIterableWrapper[A](underlying: jl.Iterable[A]) extends AbstractIterable[A] with Iterable[A] {
- def iterator = underlying.iterator
- def newBuilder[B] = new mutable.ArrayBuffer[B]
- }
-
- case class JCollectionWrapper[A](underlying: ju.Collection[A]) extends AbstractIterable[A] with Iterable[A] {
- def iterator = underlying.iterator
- override def size = underlying.size
- override def isEmpty = underlying.isEmpty
- def newBuilder[B] = new mutable.ArrayBuffer[B]
- }
-
- case class SeqWrapper[A](underlying: Seq[A]) extends ju.AbstractList[A] with IterableWrapperTrait[A] {
- def get(i: Int) = underlying(i)
- }
-
- case class MutableSeqWrapper[A](underlying: mutable.Seq[A])
- extends ju.AbstractList[A] with IterableWrapperTrait[A] {
- def get(i: Int) = underlying(i)
- override def set(i: Int, elem: A) = {
- val p = underlying(i)
- underlying(i) = elem
- p
- }
- }
-
- case class MutableBufferWrapper[A](underlying: mutable.Buffer[A])
- extends ju.AbstractList[A] with IterableWrapperTrait[A] {
- def get(i: Int) = underlying(i)
- override def set(i: Int, elem: A) = { val p = underlying(i); underlying(i) = elem; p }
- override def add(elem: A) = { underlying append elem; true }
- override def remove(i: Int) = underlying remove i
- }
-
- case class JListWrapper[A](val underlying: ju.List[A]) extends mutable.AbstractBuffer[A] with mutable.Buffer[A] {
- def length = underlying.size
- override def isEmpty = underlying.isEmpty
- override def iterator: Iterator[A] = underlying.iterator
- def apply(i: Int) = underlying.get(i)
- def update(i: Int, elem: A) = underlying.set(i, elem)
- def +=:(elem: A) = { underlying.subList(0, 0) add elem; this }
- def +=(elem: A): this.type = { underlying add elem; this }
- def insertAll(i: Int, elems: Traversable[A]) = {
- val ins = underlying.subList(0, i)
- elems.seq.foreach(ins.add(_))
- }
- def remove(i: Int) = underlying.remove(i)
- def clear() = underlying.clear()
- def result = this
- }
-
- class SetWrapper[A](underlying: Set[A]) extends ju.AbstractSet[A] {
- self =>
- def size = underlying.size
- def iterator = new ju.Iterator[A] {
- val ui = underlying.iterator
- var prev: Option[A] = None
- def hasNext = ui.hasNext
- def next = { val e = ui.next; prev = Some(e); e }
- def remove = prev match {
- case Some(e) =>
- underlying match {
- case ms: mutable.Set[a] =>
- ms remove e
- prev = None
- case _ =>
- throw new UnsupportedOperationException("remove")
- }
- case _ =>
- throw new IllegalStateException("next must be called at least once before remove")
- }
- }
- }
-
- case class MutableSetWrapper[A](underlying: mutable.Set[A]) extends SetWrapper[A](underlying) {
- override def add(elem: A) = {
- val sz = underlying.size
- underlying += elem
- sz < underlying.size
- }
- override def remove(elem: AnyRef) =
- try underlying remove elem.asInstanceOf[A]
- catch { case ex: ClassCastException => false }
- override def clear() = underlying.clear()
- }
-
- case class JSetWrapper[A](underlying: ju.Set[A])
- extends mutable.AbstractSet[A]
- with mutable.Set[A]
- with mutable.SetLike[A, JSetWrapper[A]] {
-
- override def size = underlying.size
-
- def iterator = underlying.iterator
-
- def contains(elem: A): Boolean = underlying.contains(elem)
-
- def +=(elem: A): this.type = { underlying add elem; this }
- def -=(elem: A): this.type = { underlying remove elem; this }
-
- override def add(elem: A): Boolean = underlying add elem
- override def remove(elem: A): Boolean = underlying remove elem
- override def clear() = underlying.clear()
-
- override def empty = JSetWrapper(new ju.HashSet[A])
- }
-
- class MapWrapper[A, B](underlying: Map[A, B]) extends ju.AbstractMap[A, B] { self =>
- override def size = underlying.size
-
- override def get(key: AnyRef): B = try {
- underlying get key.asInstanceOf[A] match {
- case None => null.asInstanceOf[B]
- case Some(v) => v
- }
- } catch {
- case ex: ClassCastException => null.asInstanceOf[B]
- }
-
- override def entrySet: ju.Set[ju.Map.Entry[A, B]] = new ju.AbstractSet[ju.Map.Entry[A, B]] {
- def size = self.size
-
- def iterator = new ju.Iterator[ju.Map.Entry[A, B]] {
- val ui = underlying.iterator
- var prev : Option[A] = None
-
- def hasNext = ui.hasNext
-
- def next() = {
- val (k, v) = ui.next
- prev = Some(k)
- new ju.Map.Entry[A, B] {
- def getKey = k
- def getValue = v
- def setValue(v1 : B) = self.put(k, v1)
- override def hashCode = k.hashCode + v.hashCode
- override def equals(other: Any) = other match {
- case e: ju.Map.Entry[_, _] => k == e.getKey && v == e.getValue
- case _ => false
- }
- }
- }
-
- def remove() {
- prev match {
- case Some(k) =>
- underlying match {
- case mm: mutable.Map[a, _] =>
- mm remove k
- prev = None
- case _ =>
- throw new UnsupportedOperationException("remove")
- }
- case _ =>
- throw new IllegalStateException("next must be called at least once before remove")
- }
- }
- }
- }
- }
-
- case class MutableMapWrapper[A, B](underlying: mutable.Map[A, B])
- extends MapWrapper[A, B](underlying) {
- override def put(k: A, v: B) = underlying.put(k, v) match {
- case Some(v1) => v1
- case None => null.asInstanceOf[B]
- }
-
- override def remove(k: AnyRef): B = try {
- underlying remove k.asInstanceOf[A] match {
- case None => null.asInstanceOf[B]
- case Some(v) => v
- }
- } catch {
- case ex: ClassCastException => null.asInstanceOf[B]
- }
-
- override def clear() = underlying.clear()
- }
-
- trait JMapWrapperLike[A, B, +Repr <: mutable.MapLike[A, B, Repr] with mutable.Map[A, B]]
- extends mutable.Map[A, B] with mutable.MapLike[A, B, Repr] {
- def underlying: ju.Map[A, B]
-
- override def size = underlying.size
-
- def get(k: A) = {
- val v = underlying get k
- if (v != null)
- Some(v)
- else if (underlying containsKey k)
- Some(null.asInstanceOf[B])
- else
- None
- }
-
- def +=(kv: (A, B)): this.type = { underlying.put(kv._1, kv._2); this }
- def -=(key: A): this.type = { underlying remove key; this }
-
- override def put(k: A, v: B): Option[B] = {
- val r = underlying.put(k, v)
- if (r != null) Some(r) else None
- }
-
- override def update(k: A, v: B) { underlying.put(k, v) }
-
- override def remove(k: A): Option[B] = {
- val r = underlying remove k
- if (r != null) Some(r) else None
- }
-
- def iterator: Iterator[(A, B)] = new AbstractIterator[(A, B)] {
- val ui = underlying.entrySet.iterator
- def hasNext = ui.hasNext
- def next() = { val e = ui.next(); (e.getKey, e.getValue) }
- }
-
- override def clear() = underlying.clear()
-
- override def empty: Repr = null.asInstanceOf[Repr]
- }
-
- case class JMapWrapper[A, B](val underlying : ju.Map[A, B])
- extends mutable.AbstractMap[A, B] with JMapWrapperLike[A, B, JMapWrapper[A, B]] {
- override def empty = JMapWrapper(new ju.HashMap[A, B])
- }
-
- class ConcurrentMapWrapper[A, B](override val underlying: mutable.ConcurrentMap[A, B])
- extends MutableMapWrapper[A, B](underlying) with juc.ConcurrentMap[A, B] {
-
- def putIfAbsent(k: A, v: B) = underlying.putIfAbsent(k, v) match {
- case Some(v) => v
- case None => null.asInstanceOf[B]
- }
-
- def remove(k: AnyRef, v: AnyRef) = try {
- underlying.remove(k.asInstanceOf[A], v.asInstanceOf[B])
- } catch {
- case ex: ClassCastException =>
- false
- }
-
- def replace(k: A, v: B): B = underlying.replace(k, v) match {
- case Some(v) => v
- case None => null.asInstanceOf[B]
- }
-
- def replace(k: A, oldval: B, newval: B) = underlying.replace(k, oldval, newval)
- }
-
- case class JConcurrentMapWrapper[A, B](val underlying: juc.ConcurrentMap[A, B])
- extends mutable.AbstractMap[A, B] with JMapWrapperLike[A, B, JConcurrentMapWrapper[A, B]] with mutable.ConcurrentMap[A, B] {
- override def get(k: A) = {
- val v = underlying get k
- if (v != null) Some(v)
- else None
- }
-
- override def empty = new JConcurrentMapWrapper(new juc.ConcurrentHashMap[A, B])
-
- def putIfAbsent(k: A, v: B): Option[B] = {
- val r = underlying.putIfAbsent(k, v)
- if (r != null) Some(r) else None
- }
-
- def remove(k: A, v: B): Boolean = underlying.remove(k, v)
-
- def replace(k: A, v: B): Option[B] = {
- val prev = underlying.replace(k, v)
- if (prev != null) Some(prev) else None
- }
-
- def replace(k: A, oldvalue: B, newvalue: B): Boolean =
- underlying.replace(k, oldvalue, newvalue)
- }
-
- case class DictionaryWrapper[A, B](underlying: mutable.Map[A, B])
- extends ju.Dictionary[A, B] {
- def size: Int = underlying.size
- def isEmpty: Boolean = underlying.isEmpty
- def keys: ju.Enumeration[A] = asJavaEnumeration(underlying.keysIterator)
- def elements: ju.Enumeration[B] = asJavaEnumeration(underlying.valuesIterator)
- def get(key: AnyRef) = try {
- underlying get key.asInstanceOf[A] match {
- case None => null.asInstanceOf[B]
- case Some(v) => v
- }
- } catch {
- case ex: ClassCastException => null.asInstanceOf[B]
- }
- def put(key: A, value: B): B = underlying.put(key, value) match {
- case Some(v) => v
- case None => null.asInstanceOf[B]
- }
- override def remove(key: AnyRef) = try {
- underlying remove key.asInstanceOf[A] match {
- case None => null.asInstanceOf[B]
- case Some(v) => v
- }
- } catch {
- case ex: ClassCastException => null.asInstanceOf[B]
- }
- }
-
- case class JDictionaryWrapper[A, B](underlying: ju.Dictionary[A, B])
- extends mutable.AbstractMap[A, B] with mutable.Map[A, B] {
- override def size: Int = underlying.size
-
- def get(k: A) = {
- val v = underlying get k
- if (v != null) Some(v) else None
- }
-
- def +=(kv: (A, B)): this.type = { underlying.put(kv._1, kv._2); this }
- def -=(key: A): this.type = { underlying remove key; this }
-
- override def put(k: A, v: B): Option[B] = {
- val r = underlying.put(k, v)
- if (r != null) Some(r) else None
- }
-
- override def update(k: A, v: B) { underlying.put(k, v) }
-
- override def remove(k: A): Option[B] = {
- val r = underlying remove k
- if (r != null) Some(r) else None
- }
-
- def iterator = enumerationAsScalaIterator(underlying.keys) map (k => (k, underlying get k))
-
- override def clear() = underlying.clear()
- }
-
- case class JPropertiesWrapper(underlying: ju.Properties)
- extends mutable.AbstractMap[String, String]
- with mutable.Map[String, String]
- with mutable.MapLike[String, String, JPropertiesWrapper] {
-
- override def size = underlying.size
-
- def get(k: String) = {
- val v = underlying get k
- if (v != null) Some(v.asInstanceOf[String]) else None
- }
-
- def +=(kv: (String, String)): this.type = { underlying.put(kv._1, kv._2); this }
- def -=(key: String): this.type = { underlying remove key; this }
-
- override def put(k: String, v: String): Option[String] = {
- val r = underlying.put(k, v)
- if (r != null) Some(r.asInstanceOf[String]) else None
- }
-
- override def update(k: String, v: String) { underlying.put(k, v) }
-
- override def remove(k: String): Option[String] = {
- val r = underlying remove k
- if (r != null) Some(r.asInstanceOf[String]) else None
- }
-
- def iterator: Iterator[(String, String)] = new AbstractIterator[(String, String)] {
- val ui = underlying.entrySet.iterator
- def hasNext = ui.hasNext
- def next() = {
- val e = ui.next()
- (e.getKey.asInstanceOf[String], e.getValue.asInstanceOf[String])
- }
- }
-
- override def clear() = underlying.clear()
-
- override def empty = JPropertiesWrapper(new ju.Properties)
-
- def getProperty(key: String) = underlying.getProperty(key)
-
- def getProperty(key: String, defaultValue: String) =
- underlying.getProperty(key, defaultValue)
-
- def setProperty(key: String, value: String) =
- underlying.setProperty(key, value)
- }
}
diff --git a/src/library/scala/collection/JavaConverters.scala b/src/library/scala/collection/JavaConverters.scala
index d213e60112..07e8518cb0 100755
--- a/src/library/scala/collection/JavaConverters.scala
+++ b/src/library/scala/collection/JavaConverters.scala
@@ -8,6 +8,14 @@
package scala.collection
+import java.{ lang => jl, util => ju }, java.util.{ concurrent => juc }
+import convert._
+
+// TODO: I cleaned all this documentation up in JavaConversions, but the
+// documentation in here is basically the pre-cleaned-up version with minor
+// additions. Would be nice to have in one place.
+
+
/** A collection of decorators that allow to convert between
* Scala and Java collections using `asScala` and `asJava` methods.
*
@@ -48,494 +56,48 @@ package scala.collection
* @author Martin Odersky
* @since 2.8.1
*/
-
-trait JavaConverters {
- import java.{ lang => jl, util => ju }
- import java.util.{ concurrent => juc }
- import JavaConversions._
-
- // TODO: I cleaned all this documentation up in JavaConversions, but the
- // documentation in here is basically the pre-cleaned-up version with minor
- // additions. Would be nice to have in one place.
-
- // Conversion decorator classes
-
- /** Generic class containing the `asJava` converter method */
- class AsJava[C](op: => C) {
- /** Converts a Scala collection to the corresponding Java collection */
- def asJava: C = op
- }
-
- /** Generic class containing the `asScala` converter method */
- class AsScala[C](op: => C) {
- /** Converts a Java collection to the corresponding Scala collection */
- def asScala: C = op
- }
-
- /** Generic class containing the `asJavaCollection` converter method */
- class AsJavaCollection[A](i: Iterable[A]) {
- /** Converts a Scala `Iterable` to a Java `Collection` */
- def asJavaCollection: ju.Collection[A] = JavaConversions.asJavaCollection(i)
- }
-
- /** Generic class containing the `asJavaEnumeration` converter method */
- class AsJavaEnumeration[A](i: Iterator[A]) {
- /** Converts a Scala `Iterator` to a Java `Enumeration` */
- def asJavaEnumeration: ju.Enumeration[A] = JavaConversions.asJavaEnumeration(i)
- }
-
- /** Generic class containing the `asJavaDictionary` converter method */
- class AsJavaDictionary[A, B](m : mutable.Map[A, B]) {
- /** Converts a Scala `Map` to a Java `Dictionary` */
- def asJavaDictionary: ju.Dictionary[A, B] = JavaConversions.asJavaDictionary(m)
- }
-
- // Scala => Java
-
- /**
- * Adds an `asJava` method that implicitly converts a Scala `Iterator` to a
- * Java `Iterator`. The returned Java `Iterator` is backed by the provided Scala
- * `Iterator` and any side-effects of using it via the Java interface will
- * be visible via the Scala interface and vice versa.
- *
- * If the Scala `Iterator` was previously obtained from an implicit or explicit
- * call of `asIterator(java.util.Iterator)` then the original Java `Iterator`
- * will be returned by the `asJava` method.
- *
- * @param i The `Iterator` to be converted.
- * @return An object with an `asJava` method that returns a Java `Iterator` view of the argument.
- */
- implicit def asJavaIteratorConverter[A](i : Iterator[A]): AsJava[ju.Iterator[A]] =
- new AsJava(asJavaIterator(i))
-
- /**
- * Adds an `asJavaEnumeration` method that implicitly converts a Scala
- * `Iterator` to a Java `Enumeration`. The returned Java `Enumeration` is
- * backed by the provided Scala `Iterator` and any side-effects of using
- * it via the Java interface will be visible via the Scala interface and
- * vice versa.
- *
- * If the Scala `Iterator` was previously obtained from an implicit or
- * explicit call of `asIterator(java.util.Enumeration)` then the
- * original Java `Enumeration` will be returned.
- *
- * @param i The `Iterator` to be converted.
- * @return An object with an `asJavaEnumeration` method that returns a Java
- * `Enumeration` view of the argument.
- */
- implicit def asJavaEnumerationConverter[A](i : Iterator[A]): AsJavaEnumeration[A] =
- new AsJavaEnumeration(i)
-
- /**
- * Adds an `asJava` method that implicitly converts a Scala `Iterable` to
- * a Java `Iterable`.
- *
- * The returned Java `Iterable` is backed by the provided Scala `Iterable`
- * and any side-effects of using it via the Java interface will be visible
- * via the Scala interface and vice versa.
- *
- * If the Scala `Iterable` was previously obtained from an implicit or
- * explicit call of `asIterable(java.lang.Iterable)` then the original
- * Java `Iterable` will be returned.
- *
- * @param i The `Iterable` to be converted.
- * @return An object with an `asJavaCollection` method that returns a Java
- * `Iterable` view of the argument.
- */
- implicit def asJavaIterableConverter[A](i : Iterable[A]): AsJava[jl.Iterable[A]] =
- new AsJava(asJavaIterable(i))
-
- /**
- * Adds an `asJavaCollection` method that implicitly converts a Scala
- * `Iterable` to an immutable Java `Collection`.
- *
- * If the Scala `Iterable` was previously obtained from an implicit or
- * explicit call of `asSizedIterable(java.util.Collection)` then the
- * original Java `Collection` will be returned.
- *
- * @param i The `SizedIterable` to be converted.
- * @return An object with an `asJava` method that returns a Java
- * `Collection` view of the argument.
- */
- implicit def asJavaCollectionConverter[A](i : Iterable[A]): AsJavaCollection[A] =
- new AsJavaCollection(i)
-
- /**
- * Adds an `asJava` method that implicitly converts a Scala mutable `Buffer`
- * to a Java `List`.
- *
- * The returned Java `List` is backed by the provided Scala `Buffer` and any
- * side-effects of using it via the Java interface will be visible via the
- * Scala interface and vice versa.
- *
- * If the Scala `Buffer` was previously obtained from an implicit or explicit
- * call of `asBuffer(java.util.List)` then the original Java `List` will be
- * returned.
- *
- * @param b The `Buffer` to be converted.
- * @return An object with an `asJava` method that returns a Java `List` view
- * of the argument.
- */
- implicit def bufferAsJavaListConverter[A](b : mutable.Buffer[A]): AsJava[ju.List[A]] =
- new AsJava(bufferAsJavaList(b))
-
- /**
- * Adds an `asJava` method that implicitly converts a Scala mutable `Seq`
- * to a Java `List`.
- *
- * The returned Java `List` is backed by the provided Scala `Seq` and any
- * side-effects of using it via the Java interface will be visible via the
- * Scala interface and vice versa.
- *
- * If the Scala `Seq` was previously obtained from an implicit or explicit
- * call of `asSeq(java.util.List)` then the original Java `List` will be
- * returned.
- *
- * @param b The `Seq` to be converted.
- * @return An object with an `asJava` method that returns a Java `List`
- * view of the argument.
- */
- implicit def mutableSeqAsJavaListConverter[A](b : mutable.Seq[A]): AsJava[ju.List[A]] =
- new AsJava(mutableSeqAsJavaList(b))
-
- /**
- * Adds an `asJava` method that implicitly converts a Scala `Seq` to a
- * Java `List`.
- *
- * The returned Java `List` is backed by the provided Scala `Seq` and any
- * side-effects of using it via the Java interface will be visible via the
- * Scala interface and vice versa.
- *
- * If the Scala `Seq` was previously obtained from an implicit or explicit
- * call of `asSeq(java.util.List)` then the original Java `List` will be
- * returned.
- *
- * @param b The `Seq` to be converted.
- * @return An object with an `asJava` method that returns a Java `List`
- * view of the argument.
- */
- implicit def seqAsJavaListConverter[A](b : Seq[A]): AsJava[ju.List[A]] =
- new AsJava(seqAsJavaList(b))
+object JavaConverters extends DecorateAsJava with DecorateAsScala {
+ @deprecated("Don't access these decorators directly.", "2.10.0")
+ type AsJava[A] = Decorators.AsJava[A]
+ @deprecated("Don't access these decorators directly.", "2.10.0")
+ type AsScala[A] = Decorators.AsScala[A]
+ @deprecated("Don't access these decorators directly.", "2.10.0")
+ type AsJavaCollection[A] = Decorators.AsJavaCollection[A]
+ @deprecated("Don't access these decorators directly.", "2.10.0")
+ type AsJavaEnumeration[A] = Decorators.AsJavaEnumeration[A]
+ @deprecated("Don't access these decorators directly.", "2.10.0")
+ type AsJavaDictionary[A, B] = Decorators.AsJavaDictionary[A, B]
@deprecated("Use bufferAsJavaListConverter instead", "2.9.0")
def asJavaListConverter[A](b : mutable.Buffer[A]): AsJava[ju.List[A]] = bufferAsJavaListConverter(b)
+
@deprecated("Use mutableSeqAsJavaListConverter instead", "2.9.0")
def asJavaListConverter[A](b : mutable.Seq[A]): AsJava[ju.List[A]] = mutableSeqAsJavaListConverter(b)
+
@deprecated("Use seqAsJavaListConverter instead", "2.9.0")
def asJavaListConverter[A](b : Seq[A]): AsJava[ju.List[A]] = seqAsJavaListConverter(b)
- /**
- * Adds an `asJava` method that implicitly converts a Scala mutable `Set`>
- * to a Java `Set`.
- *
- * The returned Java `Set` is backed by the provided Scala `Set` and any
- * side-effects of using it via the Java interface will be visible via
- * the Scala interface and vice versa.
- *
- * If the Scala `Set` was previously obtained from an implicit or explicit
- * call of `asSet(java.util.Set)` then the original Java `Set` will be
- * returned.
- *
- * @param s The `Set` to be converted.
- * @return An object with an `asJava` method that returns a Java `Set` view
- * of the argument.
- */
- implicit def mutableSetAsJavaSetConverter[A](s : mutable.Set[A]): AsJava[ju.Set[A]] =
- new AsJava(mutableSetAsJavaSet(s))
-
@deprecated("Use mutableSetAsJavaSetConverter instead", "2.9.0")
def asJavaSetConverter[A](s : mutable.Set[A]): AsJava[ju.Set[A]] = mutableSetAsJavaSetConverter(s)
- /**
- * Adds an `asJava` method that implicitly converts a Scala `Set` to a
- * Java `Set`.
- *
- * The returned Java `Set` is backed by the provided Scala `Set` and any
- * side-effects of using it via the Java interface will be visible via
- * the Scala interface and vice versa.
- *
- * If the Scala `Set` was previously obtained from an implicit or explicit
- * call of `asSet(java.util.Set)` then the original Java `Set` will be
- * returned.
- *
- * @param s The `Set` to be converted.
- * @return An object with an `asJava` method that returns a Java `Set` view
- * of the argument.
- */
- implicit def setAsJavaSetConverter[A](s : Set[A]): AsJava[ju.Set[A]] =
- new AsJava(setAsJavaSet(s))
-
@deprecated("Use setAsJavaSetConverter instead", "2.9.0")
def asJavaSetConverter[A](s : Set[A]): AsJava[ju.Set[A]] = setAsJavaSetConverter(s)
- /**
- * Adds an `asJava` method that implicitly converts a Scala mutable `Map`
- * to a Java `Map`.
- *
- * The returned Java `Map` is backed by the provided Scala `Map` and any
- * side-effects of using it via the Java interface will be visible via the
- * Scala interface and vice versa.
- *
- * If the Scala `Map` was previously obtained from an implicit or explicit
- * call of `asMap(java.util.Map)` then the original Java `Map` will be
- * returned.
- *
- * @param m The `Map` to be converted.
- * @return An object with an `asJava` method that returns a Java `Map` view
- * of the argument.
- */
- implicit def mutableMapAsJavaMapConverter[A, B](m : mutable.Map[A, B]): AsJava[ju.Map[A, B]] =
- new AsJava(mutableMapAsJavaMap(m))
-
@deprecated("use mutableMapAsJavaMapConverter instead", "2.9.0")
def asJavaMapConverter[A, B](m : mutable.Map[A, B]): AsJava[ju.Map[A, B]] = mutableMapAsJavaMapConverter(m)
- /**
- * Adds an `asJavaDictionary` method that implicitly converts a Scala
- * mutable `Map` to a Java `Dictionary`.
- *
- * The returned Java `Dictionary` is backed by the provided Scala
- * `Dictionary` and any side-effects of using it via the Java interface
- * will be visible via the Scala interface and vice versa.
- *
- * If the Scala `Dictionary` was previously obtained from an implicit or
- * explicit call of `asMap(java.util.Dictionary)` then the original
- * Java `Dictionary` will be returned.
- *
- * @param m The `Map` to be converted.
- * @return An object with an `asJavaDictionary` method that returns a
- * Java `Dictionary` view of the argument.
- */
- implicit def asJavaDictionaryConverter[A, B](m : mutable.Map[A, B]): AsJavaDictionary[A, B] =
- new AsJavaDictionary(m)
-
- /**
- * Adds an `asJava` method that implicitly converts a Scala `Map` to
- * a Java `Map`.
- *
- * The returned Java `Map` is backed by the provided Scala `Map` and any
- * side-effects of using it via the Java interface will be visible via
- * the Scala interface and vice versa.
- *
- * If the Scala `Map` was previously obtained from an implicit or explicit
- * call of `asMap(java.util.Map)` then the original Java `Map` will be
- * returned.
- *
- * @param m The `Map` to be converted.
- * @return An object with an `asJava` method that returns a Java `Map` view
- * of the argument.
- */
- implicit def mapAsJavaMapConverter[A, B](m : Map[A, B]): AsJava[ju.Map[A, B]] =
- new AsJava(mapAsJavaMap(m))
-
@deprecated("Use mapAsJavaMapConverter instead", "2.9.0")
def asJavaMapConverter[A, B](m : Map[A, B]): AsJava[ju.Map[A, B]] = mapAsJavaMapConverter(m)
- /**
- * Adds an `asJava` method that implicitly converts a Scala mutable
- * `ConcurrentMap` to a Java `ConcurrentMap`.
- *
- * The returned Java `ConcurrentMap` is backed by the provided Scala
- * `ConcurrentMap` and any side-effects of using it via the Java interface
- * will be visible via the Scala interface and vice versa.
- *
- * If the Scala `ConcurrentMap` was previously obtained from an implicit or
- * explicit call of `asConcurrentMap(java.util.concurrect.ConcurrentMap)`
- * then the original Java `ConcurrentMap` will be returned.
- *
- * @param m The `ConcurrentMap` to be converted.
- * @return An object with an `asJava` method that returns a Java
- * `ConcurrentMap` view of the argument.
- */
- implicit def asJavaConcurrentMapConverter[A, B](m: mutable.ConcurrentMap[A, B]): AsJava[juc.ConcurrentMap[A, B]] =
- new AsJava(asJavaConcurrentMap(m))
-
- /**
- * Adds an `asScala` method that implicitly converts a Java `Iterator` to
- * a Scala `Iterator`.
- *
- * The returned Scala `Iterator` is backed by the provided Java `Iterator`
- * and any side-effects of using it via the Scala interface will be visible
- * via the Java interface and vice versa.
- *
- * If the Java `Iterator` was previously obtained from an implicit or
- * explicit call of `asIterator(scala.collection.Iterator)` then the
- * original Scala `Iterator` will be returned.
- *
- * @param i The `Iterator` to be converted.
- * @return An object with an `asScala` method that returns a Scala
- * `Iterator` view of the argument.
- */
- implicit def asScalaIteratorConverter[A](i : ju.Iterator[A]): AsScala[Iterator[A]] =
- new AsScala(asScalaIterator(i))
-
- /**
- * Adds an `asScala` method that implicitly converts a Java `Enumeration`
- * to a Scala `Iterator`.
- *
- * The returned Scala `Iterator` is backed by the provided Java
- * `Enumeration` and any side-effects of using it via the Scala interface
- * will be visible via the Java interface and vice versa.
- *
- * If the Java `Enumeration` was previously obtained from an implicit or
- * explicit call of `asEnumeration(scala.collection.Iterator)` then the
- * original Scala `Iterator` will be returned.
- *
- * @param i The `Enumeration` to be converted.
- * @return An object with an `asScala` method that returns a Scala
- * `Iterator` view of the argument.
- */
- implicit def enumerationAsScalaIteratorConverter[A](i : ju.Enumeration[A]): AsScala[Iterator[A]] =
- new AsScala(enumerationAsScalaIterator(i))
-
- /**
- * Adds an `asScala` method that implicitly converts a Java `Iterable` to
- * a Scala `Iterable`.
- *
- * The returned Scala `Iterable` is backed by the provided Java `Iterable`
- * and any side-effects of using it via the Scala interface will be visible
- * via the Java interface and vice versa.
- *
- * If the Java `Iterable` was previously obtained from an implicit or
- * explicit call of `asIterable(scala.collection.Iterable)` then the original
- * Scala `Iterable` will be returned.
- *
- * @param i The `Iterable` to be converted.
- * @return An object with an `asScala` method that returns a Scala `Iterable`
- * view of the argument.
- */
- implicit def iterableAsScalaIterableConverter[A](i : jl.Iterable[A]): AsScala[Iterable[A]] =
- new AsScala(iterableAsScalaIterable(i))
-
@deprecated("Use iterableAsScalaIterableConverter instead", "2.9.0")
def asScalaIterableConverter[A](i : jl.Iterable[A]): AsScala[Iterable[A]] = iterableAsScalaIterableConverter(i)
- /**
- * Adds an `asScala` method that implicitly converts a Java `Collection` to
- * an Scala `Iterable`.
- *
- * If the Java `Collection` was previously obtained from an implicit or
- * explicit call of `asCollection(scala.collection.SizedIterable)` then
- * the original Scala `SizedIterable` will be returned.
- *
- * @param i The `Collection` to be converted.
- * @return An object with an `asScala` method that returns a Scala
- * `SizedIterable` view of the argument.
- */
- implicit def collectionAsScalaIterableConverter[A](i : ju.Collection[A]): AsScala[Iterable[A]] =
- new AsScala(collectionAsScalaIterable(i))
-
@deprecated("Use collectionAsScalaIterableConverter instead", "2.9.0")
def asScalaIterableConverter[A](i : ju.Collection[A]): AsScala[Iterable[A]] = collectionAsScalaIterableConverter(i)
- /**
- * Adds an `asScala` method that implicitly converts a Java `List` to a
- * Scala mutable `Buffer`.
- *
- * The returned Scala `Buffer` is backed by the provided Java `List` and
- * any side-effects of using it via the Scala interface will be visible via
- * the Java interface and vice versa.
- *
- * If the Java `List` was previously obtained from an implicit or explicit
- * call of `asList(scala.collection.mutable.Buffer)` then the original
- * Scala `Buffer` will be returned.
- *
- * @param l The `List` to be converted.
- * @return An object with an `asScala` method that returns a Scala mutable
- * `Buffer` view of the argument.
- */
- implicit def asScalaBufferConverter[A](l : ju.List[A]): AsScala[mutable.Buffer[A]] =
- new AsScala(asScalaBuffer(l))
-
- /**
- * Adds an `asScala` method that implicitly converts a Java `Set` to a
- * Scala mutable `Set`.
- *
- * The returned Scala `Set` is backed by the provided Java `Set` and any
- * side-effects of using it via the Scala interface will be visible via
- * the Java interface and vice versa.
- *
- * If the Java `Set` was previously obtained from an implicit or explicit
- * call of `asSet(scala.collection.mutable.Set)` then the original
- * Scala `Set` will be returned.
- *
- * @param s The `Set` to be converted.
- * @return An object with an `asScala` method that returns a Scala mutable
- * `Set` view of the argument.
- */
- implicit def asScalaSetConverter[A](s : ju.Set[A]): AsScala[mutable.Set[A]] =
- new AsScala(asScalaSet(s))
-
- /**
- * Adds an `asScala` method that implicitly converts a Java `Map` to a Scala
- * mutable `Map`. The returned Scala `Map` is backed by the provided Java
- * `Map` and any side-effects of using it via the Scala interface will
- * be visible via the Java interface and vice versa.
- *
- * If the Java `Map` was previously obtained from an implicit or explicit
- * call of `asMap(scala.collection.mutable.Map)` then the original
- * Scala `Map` will be returned.
- *
- * @param m The `Map` to be converted.
- * @return An object with an `asScala` method that returns a Scala mutable
- * `Map` view of the argument.
- */
- implicit def mapAsScalaMapConverter[A, B](m : ju.Map[A, B]): AsScala[mutable.Map[A, B]] =
- new AsScala(mapAsScalaMap(m))
-
@deprecated("Use mapAsScalaMapConverter instead", "2.9.0")
def asScalaMapConverter[A, B](m : ju.Map[A, B]): AsScala[mutable.Map[A, B]] = mapAsScalaMapConverter(m)
- /**
- * Adds an `asScala` method that implicitly converts a Java `ConcurrentMap`
- * to a Scala mutable `ConcurrentMap`. The returned Scala `ConcurrentMap` is
- * backed by the provided Java `ConcurrentMap` and any side-effects of using
- * it via the Scala interface will be visible via the Java interface and
- * vice versa.
- *
- * If the Java `ConcurrentMap` was previously obtained from an implicit or
- * explicit call of `asConcurrentMap(scala.collection.mutable.ConcurrentMap)`
- * then the original Scala `ConcurrentMap` will be returned.
- *
- * @param m The `ConcurrentMap` to be converted.
- * @return An object with an `asScala` method that returns a Scala mutable
- * `ConcurrentMap` view of the argument.
- */
- implicit def asScalaConcurrentMapConverter[A, B](m: juc.ConcurrentMap[A, B]): AsScala[mutable.ConcurrentMap[A, B]] =
- new AsScala(asScalaConcurrentMap(m))
-
- /**
- * Adds an `asScala` method that implicitly converts a Java `Dictionary`
- * to a Scala mutable `Map[String, String]`. The returned Scala
- * `Map[String, String]` is backed by the provided Java `Dictionary` and
- * any side-effects of using it via the Scala interface will be visible via
- * the Java interface and vice versa.
- *
- * @param m The `Dictionary` to be converted.
- * @return An object with an `asScala` method that returns a Scala mutable
- * `Map[String, String]` view of the argument.
- */
- implicit def dictionaryAsScalaMapConverter[A, B](p: ju.Dictionary[A, B]): AsScala[mutable.Map[A, B]] =
- new AsScala(dictionaryAsScalaMap(p))
-
- /**
- * Adds an `asScala` method that implicitly converts a Java `Properties`
- * to a Scala mutable `Map[String, String]`. The returned Scala
- * `Map[String, String]` is backed by the provided Java `Properties` and
- * any side-effects of using it via the Scala interface will be visible via
- * the Java interface and vice versa.
- *
- * @param m The `Properties` to be converted.
- * @return An object with an `asScala` method that returns a Scala mutable
- * `Map[String, String]` view of the argument.
- */
- implicit def propertiesAsScalaMapConverter(p: ju.Properties): AsScala[mutable.Map[String, String]] =
- new AsScala(propertiesAsScalaMap(p))
-
@deprecated("Use propertiesAsScalaMapConverter instead", "2.9.0")
- def asScalaMapConverter(p: ju.Properties): AsScala[mutable.Map[String, String]] =
- propertiesAsScalaMapConverter(p)
-
-}
-
-object JavaConverters extends JavaConverters \ No newline at end of file
+ def asScalaMapConverter(p: ju.Properties): AsScala[mutable.Map[String, String]] = propertiesAsScalaMapConverter(p)
+} \ No newline at end of file
diff --git a/src/library/scala/collection/MapLike.scala b/src/library/scala/collection/MapLike.scala
index 07116e99dd..8f88e62791 100644
--- a/src/library/scala/collection/MapLike.scala
+++ b/src/library/scala/collection/MapLike.scala
@@ -91,14 +91,18 @@ self =>
* @param kv the key/value pair
* @tparam B1 the type of the value in the key/value pair.
* @return a new map with the new binding added to this map
+ *
* @usecase def + (kv: (A, B)): Map[A, B]
+ * @inheritdoc
*/
def + [B1 >: B] (kv: (A, B1)): Map[A, B1]
/** Removes a key from this map, returning a new map.
* @param key the key to be removed
* @return a new map without a binding for `key`
+ *
* @usecase def - (key: A): Map[A, B]
+ * @inheritdoc
*/
def - (key: A): This
@@ -115,7 +119,9 @@ self =>
* @tparam B1 the result type of the default computation.
* @return the value associated with `key` if it exists,
* otherwise the result of the `default` computation.
+ *
* @usecase def getOrElse(key: A, default: => B): B
+ * @inheritdoc
*/
def getOrElse[B1 >: B](key: A, default: => B1): B1 = get(key) match {
case Some(v) => v
@@ -255,7 +261,9 @@ self =>
* @param value the value
* @tparam B1 the type of the added value
* @return A new map with the new key/value mapping added to this map.
+ *
* @usecase def updated(key: A, value: B): Map[A, B]
+ * @inheritdoc
*/
def updated [B1 >: B](key: A, value: B1): Map[A, B1] = this + ((key, value))
@@ -269,8 +277,10 @@ self =>
* @param kvs the remaining key/value pairs
* @tparam B1 the type of the added values
* @return a new map with the given bindings added to this map
+ *
* @usecase def + (kvs: (A, B)*): Map[A, B]
- * @param the key/value pairs
+ * @inheritdoc
+ * @param the key/value pairs
*/
def + [B1 >: B] (kv1: (A, B1), kv2: (A, B1), kvs: (A, B1) *): Map[A, B1] =
this + kv1 + kv2 ++ kvs
@@ -280,7 +290,9 @@ self =>
* @param kvs the collection containing the added key/value pairs
* @tparam B1 the type of the added values
* @return a new map with the given bindings added to this map
+ *
* @usecase def ++ (xs: Traversable[(A, B)]): Map[A, B]
+ * @inheritdoc
*/
def ++[B1 >: B](xs: GenTraversableOnce[(A, B1)]): Map[A, B1] =
((repr: Map[A, B1]) /: xs.seq) (_ + _)
diff --git a/src/library/scala/collection/SeqLike.scala b/src/library/scala/collection/SeqLike.scala
index 526ea7e240..fd1d42d7e9 100644
--- a/src/library/scala/collection/SeqLike.scala
+++ b/src/library/scala/collection/SeqLike.scala
@@ -410,13 +410,6 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[
/** Produces a new sequence which contains all elements of this $coll and also all elements of
* a given sequence. `xs union ys` is equivalent to `xs ++ ys`.
- * $willNotTerminateInf
- *
- * Another way to express this
- * is that `xs union ys` computes the order-presevring multi-set union of `xs` and `ys`.
- * `union` is hence a counter-part of `diff` and `intersect` which also work on multi-sets.
- *
- * $willNotTerminateInf
*
* @param that the sequence to add.
* @tparam B the element type of the returned $coll.
@@ -425,14 +418,21 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[
* @return a new collection of type `That` which contains all elements of this $coll
* followed by all elements of `that`.
* @usecase def union(that: Seq[A]): $Coll[A]
- * @return a new $coll which contains all elements of this $coll
- * followed by all elements of `that`.
+ * @inheritdoc
+ *
+ * Another way to express this
+ * is that `xs union ys` computes the order-presevring multi-set union of `xs` and `ys`.
+ * `union` is hence a counter-part of `diff` and `intersect` which also work on multi-sets.
+ *
+ * $willNotTerminateInf
+ *
+ * @return a new $coll which contains all elements of this $coll
+ * followed by all elements of `that`.
*/
override def union[B >: A, That](that: GenSeq[B])(implicit bf: CanBuildFrom[Repr, B, That]): That =
this ++ that
/** Computes the multiset difference between this $coll and another sequence.
- * $willNotTerminateInf
*
* @param that the sequence of elements to remove
* @tparam B the element type of the returned $coll.
@@ -444,11 +444,15 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[
* ''n'' times in `that`, then the first ''n'' occurrences of `x` will not form
* part of the result, but any following occurrences will.
* @usecase def diff(that: Seq[A]): $Coll[A]
- * @return a new $coll which contains all elements of this $coll
- * except some of occurrences of elements that also appear in `that`.
- * If an element value `x` appears
- * ''n'' times in `that`, then the first ''n'' occurrences of `x` will not form
- * part of the result, but any following occurrences will.
+ * @inheritdoc
+ *
+ * $willNotTerminateInf
+ *
+ * @return a new $coll which contains all elements of this $coll
+ * except some of occurrences of elements that also appear in `that`.
+ * If an element value `x` appears
+ * ''n'' times in `that`, then the first ''n'' occurrences of `x` will not form
+ * part of the result, but any following occurrences will.
*/
def diff[B >: A](that: GenSeq[B]): Repr = {
val occ = occCounts(that.seq)
@@ -463,7 +467,6 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[
def diff[B >: A](that: Seq[B]): Repr = diff(that: GenSeq[B])
/** Computes the multiset intersection between this $coll and another sequence.
- * $mayNotTerminateInf
*
* @param that the sequence of elements to intersect with.
* @tparam B the element type of the returned $coll.
@@ -475,11 +478,15 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[
* ''n'' times in `that`, then the first ''n'' occurrences of `x` will be retained
* in the result, but any following occurrences will be omitted.
* @usecase def intersect(that: Seq[A]): $Coll[A]
- * @return a new $coll which contains all elements of this $coll
- * which also appear in `that`.
- * If an element value `x` appears
- * ''n'' times in `that`, then the first ''n'' occurrences of `x` will be retained
- * in the result, but any following occurrences will be omitted.
+ * @inheritdoc
+ *
+ * $mayNotTerminateInf
+ *
+ * @return a new $coll which contains all elements of this $coll
+ * which also appear in `that`.
+ * If an element value `x` appears
+ * ''n'' times in `that`, then the first ''n'' occurrences of `x` will be retained
+ * in the result, but any following occurrences will be omitted.
*/
def intersect[B >: A](that: GenSeq[B]): Repr = {
val occ = occCounts(that.seq)
diff --git a/src/library/scala/collection/TraversableLike.scala b/src/library/scala/collection/TraversableLike.scala
index 0da1cfb913..1f5beb5109 100644
--- a/src/library/scala/collection/TraversableLike.scala
+++ b/src/library/scala/collection/TraversableLike.scala
@@ -103,10 +103,6 @@ trait TraversableLike[+A, +Repr] extends Any
/** Applies a function `f` to all elements of this $coll.
*
- * Note: this method underlies the implementation of most other bulk operations.
- * It's important to implement this method in an efficient way.
- *
- *
* @param f the function that is applied for its side-effect to every element.
* The result of function `f` is discarded.
*
@@ -115,6 +111,11 @@ trait TraversableLike[+A, +Repr] extends Any
* but this is not necessary.
*
* @usecase def foreach(f: A => Unit): Unit
+ * @inheritdoc
+ *
+ * Note: this method underlies the implementation of most other bulk operations.
+ * It's important to implement this method in an efficient way.
+ *
*/
def foreach[U](f: A => U): Unit
@@ -165,17 +166,6 @@ trait TraversableLike[+A, +Repr] extends Any
* the resulting collection rather than the left one.
* Mnemonic: the COLon is on the side of the new COLlection type.
*
- * Example:
- * {{{
- * scala> val x = List(1)
- * x: List[Int] = List(1)
- *
- * scala> val y = LinkedList(2)
- * y: scala.collection.mutable.LinkedList[Int] = LinkedList(2)
- *
- * scala> val z = x ++: y
- * z: scala.collection.mutable.LinkedList[Int] = LinkedList(1, 2)
- * }}}
* @param that the traversable to append.
* @tparam B the element type of the returned collection.
* @tparam That $thatinfo
@@ -184,9 +174,22 @@ trait TraversableLike[+A, +Repr] extends Any
* of this $coll followed by all elements of `that`.
*
* @usecase def ++:[B](that: TraversableOnce[B]): $Coll[B]
- *
- * @return a new $coll which contains all elements of this $coll
- * followed by all elements of `that`.
+ * @inheritdoc
+ *
+ * Example:
+ * {{{
+ * scala> val x = List(1)
+ * x: List[Int] = List(1)
+ *
+ * scala> val y = LinkedList(2)
+ * y: scala.collection.mutable.LinkedList[Int] = LinkedList(2)
+ *
+ * scala> val z = x ++: y
+ * z: scala.collection.mutable.LinkedList[Int] = LinkedList(1, 2)
+ * }}}
+ *
+ * @return a new $coll which contains all elements of this $coll
+ * followed by all elements of `that`.
*/
def ++:[B >: A, That](that: TraversableOnce[B])(implicit bf: CanBuildFrom[Repr, B, That]): That = {
val b = bf(repr)
@@ -284,11 +287,12 @@ trait TraversableLike[+A, +Repr] extends Any
* The order of the elements is preserved.
*
* @usecase def filterMap[B](f: A => Option[B]): $Coll[B]
+ * @inheritdoc
*
- * @param pf the partial function which filters and maps the $coll.
- * @return a new $coll resulting from applying the given option-valued function
- * `f` to each element and collecting all defined results.
- * The order of the elements is preserved.
+ * @param pf the partial function which filters and maps the $coll.
+ * @return a new $coll resulting from applying the given option-valued function
+ * `f` to each element and collecting all defined results.
+ * The order of the elements is preserved.
def filterMap[B, That](f: A => Option[B])(implicit bf: CanBuildFrom[Repr, B, That]): That = {
val b = bf(repr)
for (x <- this)
@@ -588,8 +592,6 @@ trait TraversableLike[+A, +Repr] extends Any
* Copying will stop once either the end of the current $coll is reached,
* or the end of the array is reached, or `len` elements have been copied.
*
- * $willNotTerminateInf
- *
* @param xs the array to fill.
* @param start the starting index.
* @param len the maximal number of elements to copy.
@@ -597,6 +599,9 @@ trait TraversableLike[+A, +Repr] extends Any
*
*
* @usecase def copyToArray(xs: Array[A], start: Int, len: Int): Unit
+ * @inheritdoc
+ *
+ * $willNotTerminateInf
*/
def copyToArray[B >: A](xs: Array[B], start: Int, len: Int) {
var i = start
@@ -694,10 +699,11 @@ trait TraversableLike[+A, +Repr] extends Any
* that satisfies predicate `p` and collecting the results.
*
* @usecase def map[B](f: A => B): $Coll[B]
+ * @inheritdoc
*
- * @return a new $coll resulting from applying the given function
- * `f` to each element of the outer $coll that satisfies
- * predicate `p` and collecting the results.
+ * @return a new $coll resulting from applying the given function
+ * `f` to each element of the outer $coll that satisfies
+ * predicate `p` and collecting the results.
*/
def map[B, That](f: A => B)(implicit bf: CanBuildFrom[Repr, B, That]): That = {
val b = bf(repr)
@@ -710,9 +716,6 @@ trait TraversableLike[+A, +Repr] extends Any
* outer $coll containing this `WithFilter` instance that satisfy
* predicate `p` and concatenating the results.
*
- * The type of the resulting collection will be guided by the static type
- * of the outer $coll.
- *
* @param f the function to apply to each element.
* @tparam B the element type of the returned collection.
* @tparam That $thatinfo
@@ -723,11 +726,15 @@ trait TraversableLike[+A, +Repr] extends Any
* concatenating the results.
*
* @usecase def flatMap[B](f: A => TraversableOnce[B]): $Coll[B]
+ * @inheritdoc
+ *
+ * The type of the resulting collection will be guided by the static type
+ * of the outer $coll.
*
- * @return a new $coll resulting from applying the given
- * collection-valued function `f` to each element of the
- * outer $coll that satisfies predicate `p` and concatenating
- * the results.
+ * @return a new $coll resulting from applying the given
+ * collection-valued function `f` to each element of the
+ * outer $coll that satisfies predicate `p` and concatenating
+ * the results.
*/
def flatMap[B, That](f: A => GenTraversableOnce[B])(implicit bf: CanBuildFrom[Repr, B, That]): That = {
val b = bf(repr)
@@ -747,6 +754,7 @@ trait TraversableLike[+A, +Repr] extends Any
* but this is not necessary.
*
* @usecase def foreach(f: A => Unit): Unit
+ * @inheritdoc
*/
def foreach[U](f: A => U): Unit =
for (x <- self)
diff --git a/src/library/scala/collection/convert/DecorateAsJava.scala b/src/library/scala/collection/convert/DecorateAsJava.scala
new file mode 100644
index 0000000000..76837d937c
--- /dev/null
+++ b/src/library/scala/collection/convert/DecorateAsJava.scala
@@ -0,0 +1,296 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2006-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://www.scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.collection
+package convert
+
+import java.{ lang => jl, util => ju }, java.util.{ concurrent => juc }
+import Decorators._
+import WrapAsJava._
+
+/** A collection of decorators that allow to convert between
+ * Scala and Java collections using `asScala` and `asJava` methods.
+ *
+ * The following conversions are supported via `asJava`, `asScala`
+ *
+ * - `scala.collection.Iterable` <=> `java.lang.Iterable`
+ * - `scala.collection.Iterator` <=> `java.util.Iterator`
+ * - `scala.collection.mutable.Buffer` <=> `java.util.List`
+ * - `scala.collection.mutable.Set` <=> `java.util.Set`
+ * - `scala.collection.mutable.Map` <=> `java.util.Map`
+ * - `scala.collection.mutable.ConcurrentMap` <=> `java.util.concurrent.ConcurrentMap`
+ *
+ * In all cases, converting from a source type to a target type and back
+ * again will return the original source object, e.g.
+ * {{{
+ * import scala.collection.JavaConverters._
+ *
+ * val sl = new scala.collection.mutable.ListBuffer[Int]
+ * val jl : java.util.List[Int] = sl.asJava
+ * val sl2 : scala.collection.mutable.Buffer[Int] = jl.asScala
+ * assert(sl eq sl2)
+ * }}}
+ * The following conversions also are supported, but the
+ * direction Scala to Java is done my a more specifically named method:
+ * `asJavaCollection`, `asJavaEnumeration`, `asJavaDictionary`.
+ *
+ * - `scala.collection.Iterable` <=> `java.util.Collection`
+ * - `scala.collection.Iterator` <=> `java.util.Enumeration`
+ * - `scala.collection.mutable.Map` <=> `java.util.Dictionary`
+ *
+ * In addition, the following one way conversions are provided via `asJava`:
+ *
+ * - `scala.collection.Seq` => `java.util.List`
+ * - `scala.collection.mutable.Seq` => `java.util.List`
+ * - `scala.collection.Set` => `java.util.Set`
+ * - `scala.collection.Map` => `java.util.Map`
+ *
+ * @author Martin Odersky
+ * @since 2.8.1
+ */
+
+trait DecorateAsJava {
+ /**
+ * Adds an `asJava` method that implicitly converts a Scala `Iterator` to a
+ * Java `Iterator`. The returned Java `Iterator` is backed by the provided Scala
+ * `Iterator` and any side-effects of using it via the Java interface will
+ * be visible via the Scala interface and vice versa.
+ *
+ * If the Scala `Iterator` was previously obtained from an implicit or explicit
+ * call of `asIterator(java.util.Iterator)` then the original Java `Iterator`
+ * will be returned by the `asJava` method.
+ *
+ * @param i The `Iterator` to be converted.
+ * @return An object with an `asJava` method that returns a Java `Iterator` view of the argument.
+ */
+ implicit def asJavaIteratorConverter[A](i : Iterator[A]): AsJava[ju.Iterator[A]] =
+ new AsJava(asJavaIterator(i))
+
+ /**
+ * Adds an `asJavaEnumeration` method that implicitly converts a Scala
+ * `Iterator` to a Java `Enumeration`. The returned Java `Enumeration` is
+ * backed by the provided Scala `Iterator` and any side-effects of using
+ * it via the Java interface will be visible via the Scala interface and
+ * vice versa.
+ *
+ * If the Scala `Iterator` was previously obtained from an implicit or
+ * explicit call of `asIterator(java.util.Enumeration)` then the
+ * original Java `Enumeration` will be returned.
+ *
+ * @param i The `Iterator` to be converted.
+ * @return An object with an `asJavaEnumeration` method that returns a Java
+ * `Enumeration` view of the argument.
+ */
+ implicit def asJavaEnumerationConverter[A](i : Iterator[A]): AsJavaEnumeration[A] =
+ new AsJavaEnumeration(i)
+
+ /**
+ * Adds an `asJava` method that implicitly converts a Scala `Iterable` to
+ * a Java `Iterable`.
+ *
+ * The returned Java `Iterable` is backed by the provided Scala `Iterable`
+ * and any side-effects of using it via the Java interface will be visible
+ * via the Scala interface and vice versa.
+ *
+ * If the Scala `Iterable` was previously obtained from an implicit or
+ * explicit call of `asIterable(java.lang.Iterable)` then the original
+ * Java `Iterable` will be returned.
+ *
+ * @param i The `Iterable` to be converted.
+ * @return An object with an `asJavaCollection` method that returns a Java
+ * `Iterable` view of the argument.
+ */
+ implicit def asJavaIterableConverter[A](i : Iterable[A]): AsJava[jl.Iterable[A]] =
+ new AsJava(asJavaIterable(i))
+
+ /**
+ * Adds an `asJavaCollection` method that implicitly converts a Scala
+ * `Iterable` to an immutable Java `Collection`.
+ *
+ * If the Scala `Iterable` was previously obtained from an implicit or
+ * explicit call of `asSizedIterable(java.util.Collection)` then the
+ * original Java `Collection` will be returned.
+ *
+ * @param i The `SizedIterable` to be converted.
+ * @return An object with an `asJava` method that returns a Java
+ * `Collection` view of the argument.
+ */
+ implicit def asJavaCollectionConverter[A](i : Iterable[A]): AsJavaCollection[A] =
+ new AsJavaCollection(i)
+
+ /**
+ * Adds an `asJava` method that implicitly converts a Scala mutable `Buffer`
+ * to a Java `List`.
+ *
+ * The returned Java `List` is backed by the provided Scala `Buffer` and any
+ * side-effects of using it via the Java interface will be visible via the
+ * Scala interface and vice versa.
+ *
+ * If the Scala `Buffer` was previously obtained from an implicit or explicit
+ * call of `asBuffer(java.util.List)` then the original Java `List` will be
+ * returned.
+ *
+ * @param b The `Buffer` to be converted.
+ * @return An object with an `asJava` method that returns a Java `List` view
+ * of the argument.
+ */
+ implicit def bufferAsJavaListConverter[A](b : mutable.Buffer[A]): AsJava[ju.List[A]] =
+ new AsJava(bufferAsJavaList(b))
+
+ /**
+ * Adds an `asJava` method that implicitly converts a Scala mutable `Seq`
+ * to a Java `List`.
+ *
+ * The returned Java `List` is backed by the provided Scala `Seq` and any
+ * side-effects of using it via the Java interface will be visible via the
+ * Scala interface and vice versa.
+ *
+ * If the Scala `Seq` was previously obtained from an implicit or explicit
+ * call of `asSeq(java.util.List)` then the original Java `List` will be
+ * returned.
+ *
+ * @param b The `Seq` to be converted.
+ * @return An object with an `asJava` method that returns a Java `List`
+ * view of the argument.
+ */
+ implicit def mutableSeqAsJavaListConverter[A](b : mutable.Seq[A]): AsJava[ju.List[A]] =
+ new AsJava(mutableSeqAsJavaList(b))
+
+ /**
+ * Adds an `asJava` method that implicitly converts a Scala `Seq` to a
+ * Java `List`.
+ *
+ * The returned Java `List` is backed by the provided Scala `Seq` and any
+ * side-effects of using it via the Java interface will be visible via the
+ * Scala interface and vice versa.
+ *
+ * If the Scala `Seq` was previously obtained from an implicit or explicit
+ * call of `asSeq(java.util.List)` then the original Java `List` will be
+ * returned.
+ *
+ * @param b The `Seq` to be converted.
+ * @return An object with an `asJava` method that returns a Java `List`
+ * view of the argument.
+ */
+ implicit def seqAsJavaListConverter[A](b : Seq[A]): AsJava[ju.List[A]] =
+ new AsJava(seqAsJavaList(b))
+
+ /**
+ * Adds an `asJava` method that implicitly converts a Scala mutable `Set`>
+ * to a Java `Set`.
+ *
+ * The returned Java `Set` is backed by the provided Scala `Set` and any
+ * side-effects of using it via the Java interface will be visible via
+ * the Scala interface and vice versa.
+ *
+ * If the Scala `Set` was previously obtained from an implicit or explicit
+ * call of `asSet(java.util.Set)` then the original Java `Set` will be
+ * returned.
+ *
+ * @param s The `Set` to be converted.
+ * @return An object with an `asJava` method that returns a Java `Set` view
+ * of the argument.
+ */
+ implicit def mutableSetAsJavaSetConverter[A](s : mutable.Set[A]): AsJava[ju.Set[A]] =
+ new AsJava(mutableSetAsJavaSet(s))
+
+ /**
+ * Adds an `asJava` method that implicitly converts a Scala `Set` to a
+ * Java `Set`.
+ *
+ * The returned Java `Set` is backed by the provided Scala `Set` and any
+ * side-effects of using it via the Java interface will be visible via
+ * the Scala interface and vice versa.
+ *
+ * If the Scala `Set` was previously obtained from an implicit or explicit
+ * call of `asSet(java.util.Set)` then the original Java `Set` will be
+ * returned.
+ *
+ * @param s The `Set` to be converted.
+ * @return An object with an `asJava` method that returns a Java `Set` view
+ * of the argument.
+ */
+ implicit def setAsJavaSetConverter[A](s : Set[A]): AsJava[ju.Set[A]] =
+ new AsJava(setAsJavaSet(s))
+
+ /**
+ * Adds an `asJava` method that implicitly converts a Scala mutable `Map`
+ * to a Java `Map`.
+ *
+ * The returned Java `Map` is backed by the provided Scala `Map` and any
+ * side-effects of using it via the Java interface will be visible via the
+ * Scala interface and vice versa.
+ *
+ * If the Scala `Map` was previously obtained from an implicit or explicit
+ * call of `asMap(java.util.Map)` then the original Java `Map` will be
+ * returned.
+ *
+ * @param m The `Map` to be converted.
+ * @return An object with an `asJava` method that returns a Java `Map` view
+ * of the argument.
+ */
+ implicit def mutableMapAsJavaMapConverter[A, B](m : mutable.Map[A, B]): AsJava[ju.Map[A, B]] =
+ new AsJava(mutableMapAsJavaMap(m))
+
+ /**
+ * Adds an `asJavaDictionary` method that implicitly converts a Scala
+ * mutable `Map` to a Java `Dictionary`.
+ *
+ * The returned Java `Dictionary` is backed by the provided Scala
+ * `Dictionary` and any side-effects of using it via the Java interface
+ * will be visible via the Scala interface and vice versa.
+ *
+ * If the Scala `Dictionary` was previously obtained from an implicit or
+ * explicit call of `asMap(java.util.Dictionary)` then the original
+ * Java `Dictionary` will be returned.
+ *
+ * @param m The `Map` to be converted.
+ * @return An object with an `asJavaDictionary` method that returns a
+ * Java `Dictionary` view of the argument.
+ */
+ implicit def asJavaDictionaryConverter[A, B](m : mutable.Map[A, B]): AsJavaDictionary[A, B] =
+ new AsJavaDictionary(m)
+
+ /**
+ * Adds an `asJava` method that implicitly converts a Scala `Map` to
+ * a Java `Map`.
+ *
+ * The returned Java `Map` is backed by the provided Scala `Map` and any
+ * side-effects of using it via the Java interface will be visible via
+ * the Scala interface and vice versa.
+ *
+ * If the Scala `Map` was previously obtained from an implicit or explicit
+ * call of `asMap(java.util.Map)` then the original Java `Map` will be
+ * returned.
+ *
+ * @param m The `Map` to be converted.
+ * @return An object with an `asJava` method that returns a Java `Map` view
+ * of the argument.
+ */
+ implicit def mapAsJavaMapConverter[A, B](m : Map[A, B]): AsJava[ju.Map[A, B]] =
+ new AsJava(mapAsJavaMap(m))
+
+ /**
+ * Adds an `asJava` method that implicitly converts a Scala mutable
+ * `ConcurrentMap` to a Java `ConcurrentMap`.
+ *
+ * The returned Java `ConcurrentMap` is backed by the provided Scala
+ * `ConcurrentMap` and any side-effects of using it via the Java interface
+ * will be visible via the Scala interface and vice versa.
+ *
+ * If the Scala `ConcurrentMap` was previously obtained from an implicit or
+ * explicit call of `asConcurrentMap(java.util.concurrect.ConcurrentMap)`
+ * then the original Java `ConcurrentMap` will be returned.
+ *
+ * @param m The `ConcurrentMap` to be converted.
+ * @return An object with an `asJava` method that returns a Java
+ * `ConcurrentMap` view of the argument.
+ */
+ implicit def asJavaConcurrentMapConverter[A, B](m: mutable.ConcurrentMap[A, B]): AsJava[juc.ConcurrentMap[A, B]] =
+ new AsJava(asJavaConcurrentMap(m))
+}
diff --git a/src/library/scala/collection/convert/DecorateAsScala.scala b/src/library/scala/collection/convert/DecorateAsScala.scala
new file mode 100644
index 0000000000..bb14228e67
--- /dev/null
+++ b/src/library/scala/collection/convert/DecorateAsScala.scala
@@ -0,0 +1,189 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2006-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://www.scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.collection
+package convert
+
+import java.{ lang => jl, util => ju }, java.util.{ concurrent => juc }
+import Decorators._
+import WrapAsScala._
+
+trait DecorateAsScala {
+ /**
+ * Adds an `asScala` method that implicitly converts a Java `Iterator` to
+ * a Scala `Iterator`.
+ *
+ * The returned Scala `Iterator` is backed by the provided Java `Iterator`
+ * and any side-effects of using it via the Scala interface will be visible
+ * via the Java interface and vice versa.
+ *
+ * If the Java `Iterator` was previously obtained from an implicit or
+ * explicit call of `asIterator(scala.collection.Iterator)` then the
+ * original Scala `Iterator` will be returned.
+ *
+ * @param i The `Iterator` to be converted.
+ * @return An object with an `asScala` method that returns a Scala
+ * `Iterator` view of the argument.
+ */
+ implicit def asScalaIteratorConverter[A](i : ju.Iterator[A]): AsScala[Iterator[A]] =
+ new AsScala(asScalaIterator(i))
+
+ /**
+ * Adds an `asScala` method that implicitly converts a Java `Enumeration`
+ * to a Scala `Iterator`.
+ *
+ * The returned Scala `Iterator` is backed by the provided Java
+ * `Enumeration` and any side-effects of using it via the Scala interface
+ * will be visible via the Java interface and vice versa.
+ *
+ * If the Java `Enumeration` was previously obtained from an implicit or
+ * explicit call of `asEnumeration(scala.collection.Iterator)` then the
+ * original Scala `Iterator` will be returned.
+ *
+ * @param i The `Enumeration` to be converted.
+ * @return An object with an `asScala` method that returns a Scala
+ * `Iterator` view of the argument.
+ */
+ implicit def enumerationAsScalaIteratorConverter[A](i : ju.Enumeration[A]): AsScala[Iterator[A]] =
+ new AsScala(enumerationAsScalaIterator(i))
+
+ /**
+ * Adds an `asScala` method that implicitly converts a Java `Iterable` to
+ * a Scala `Iterable`.
+ *
+ * The returned Scala `Iterable` is backed by the provided Java `Iterable`
+ * and any side-effects of using it via the Scala interface will be visible
+ * via the Java interface and vice versa.
+ *
+ * If the Java `Iterable` was previously obtained from an implicit or
+ * explicit call of `asIterable(scala.collection.Iterable)` then the original
+ * Scala `Iterable` will be returned.
+ *
+ * @param i The `Iterable` to be converted.
+ * @return An object with an `asScala` method that returns a Scala `Iterable`
+ * view of the argument.
+ */
+ implicit def iterableAsScalaIterableConverter[A](i : jl.Iterable[A]): AsScala[Iterable[A]] =
+ new AsScala(iterableAsScalaIterable(i))
+
+ /**
+ * Adds an `asScala` method that implicitly converts a Java `Collection` to
+ * an Scala `Iterable`.
+ *
+ * If the Java `Collection` was previously obtained from an implicit or
+ * explicit call of `asCollection(scala.collection.SizedIterable)` then
+ * the original Scala `SizedIterable` will be returned.
+ *
+ * @param i The `Collection` to be converted.
+ * @return An object with an `asScala` method that returns a Scala
+ * `SizedIterable` view of the argument.
+ */
+ implicit def collectionAsScalaIterableConverter[A](i : ju.Collection[A]): AsScala[Iterable[A]] =
+ new AsScala(collectionAsScalaIterable(i))
+
+ /**
+ * Adds an `asScala` method that implicitly converts a Java `List` to a
+ * Scala mutable `Buffer`.
+ *
+ * The returned Scala `Buffer` is backed by the provided Java `List` and
+ * any side-effects of using it via the Scala interface will be visible via
+ * the Java interface and vice versa.
+ *
+ * If the Java `List` was previously obtained from an implicit or explicit
+ * call of `asList(scala.collection.mutable.Buffer)` then the original
+ * Scala `Buffer` will be returned.
+ *
+ * @param l The `List` to be converted.
+ * @return An object with an `asScala` method that returns a Scala mutable
+ * `Buffer` view of the argument.
+ */
+ implicit def asScalaBufferConverter[A](l : ju.List[A]): AsScala[mutable.Buffer[A]] =
+ new AsScala(asScalaBuffer(l))
+
+ /**
+ * Adds an `asScala` method that implicitly converts a Java `Set` to a
+ * Scala mutable `Set`.
+ *
+ * The returned Scala `Set` is backed by the provided Java `Set` and any
+ * side-effects of using it via the Scala interface will be visible via
+ * the Java interface and vice versa.
+ *
+ * If the Java `Set` was previously obtained from an implicit or explicit
+ * call of `asSet(scala.collection.mutable.Set)` then the original
+ * Scala `Set` will be returned.
+ *
+ * @param s The `Set` to be converted.
+ * @return An object with an `asScala` method that returns a Scala mutable
+ * `Set` view of the argument.
+ */
+ implicit def asScalaSetConverter[A](s : ju.Set[A]): AsScala[mutable.Set[A]] =
+ new AsScala(asScalaSet(s))
+
+ /**
+ * Adds an `asScala` method that implicitly converts a Java `Map` to a Scala
+ * mutable `Map`. The returned Scala `Map` is backed by the provided Java
+ * `Map` and any side-effects of using it via the Scala interface will
+ * be visible via the Java interface and vice versa.
+ *
+ * If the Java `Map` was previously obtained from an implicit or explicit
+ * call of `asMap(scala.collection.mutable.Map)` then the original
+ * Scala `Map` will be returned.
+ *
+ * @param m The `Map` to be converted.
+ * @return An object with an `asScala` method that returns a Scala mutable
+ * `Map` view of the argument.
+ */
+ implicit def mapAsScalaMapConverter[A, B](m : ju.Map[A, B]): AsScala[mutable.Map[A, B]] =
+ new AsScala(mapAsScalaMap(m))
+
+ /**
+ * Adds an `asScala` method that implicitly converts a Java `ConcurrentMap`
+ * to a Scala mutable `ConcurrentMap`. The returned Scala `ConcurrentMap` is
+ * backed by the provided Java `ConcurrentMap` and any side-effects of using
+ * it via the Scala interface will be visible via the Java interface and
+ * vice versa.
+ *
+ * If the Java `ConcurrentMap` was previously obtained from an implicit or
+ * explicit call of `asConcurrentMap(scala.collection.mutable.ConcurrentMap)`
+ * then the original Scala `ConcurrentMap` will be returned.
+ *
+ * @param m The `ConcurrentMap` to be converted.
+ * @return An object with an `asScala` method that returns a Scala mutable
+ * `ConcurrentMap` view of the argument.
+ */
+ implicit def asScalaConcurrentMapConverter[A, B](m: juc.ConcurrentMap[A, B]): AsScala[mutable.ConcurrentMap[A, B]] =
+ new AsScala(asScalaConcurrentMap(m))
+
+ /**
+ * Adds an `asScala` method that implicitly converts a Java `Dictionary`
+ * to a Scala mutable `Map[String, String]`. The returned Scala
+ * `Map[String, String]` is backed by the provided Java `Dictionary` and
+ * any side-effects of using it via the Scala interface will be visible via
+ * the Java interface and vice versa.
+ *
+ * @param m The `Dictionary` to be converted.
+ * @return An object with an `asScala` method that returns a Scala mutable
+ * `Map[String, String]` view of the argument.
+ */
+ implicit def dictionaryAsScalaMapConverter[A, B](p: ju.Dictionary[A, B]): AsScala[mutable.Map[A, B]] =
+ new AsScala(dictionaryAsScalaMap(p))
+
+ /**
+ * Adds an `asScala` method that implicitly converts a Java `Properties`
+ * to a Scala mutable `Map[String, String]`. The returned Scala
+ * `Map[String, String]` is backed by the provided Java `Properties` and
+ * any side-effects of using it via the Scala interface will be visible via
+ * the Java interface and vice versa.
+ *
+ * @param m The `Properties` to be converted.
+ * @return An object with an `asScala` method that returns a Scala mutable
+ * `Map[String, String]` view of the argument.
+ */
+ implicit def propertiesAsScalaMapConverter(p: ju.Properties): AsScala[mutable.Map[String, String]] =
+ new AsScala(propertiesAsScalaMap(p))
+}
diff --git a/src/library/scala/collection/convert/Decorators.scala b/src/library/scala/collection/convert/Decorators.scala
new file mode 100644
index 0000000000..3bdd9a0f1c
--- /dev/null
+++ b/src/library/scala/collection/convert/Decorators.scala
@@ -0,0 +1,46 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2006-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://www.scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.collection
+package convert
+
+import java.{ lang => jl, util => ju }, java.util.{ concurrent => juc }
+
+private[collection] trait Decorators {
+ /** Generic class containing the `asJava` converter method */
+ class AsJava[A](op: => A) {
+ /** Converts a Scala collection to the corresponding Java collection */
+ def asJava: A = op
+ }
+
+ /** Generic class containing the `asScala` converter method */
+ class AsScala[A](op: => A) {
+ /** Converts a Java collection to the corresponding Scala collection */
+ def asScala: A = op
+ }
+
+ /** Generic class containing the `asJavaCollection` converter method */
+ class AsJavaCollection[A](i: Iterable[A]) {
+ /** Converts a Scala `Iterable` to a Java `Collection` */
+ def asJavaCollection: ju.Collection[A] = JavaConversions.asJavaCollection(i)
+ }
+
+ /** Generic class containing the `asJavaEnumeration` converter method */
+ class AsJavaEnumeration[A](i: Iterator[A]) {
+ /** Converts a Scala `Iterator` to a Java `Enumeration` */
+ def asJavaEnumeration: ju.Enumeration[A] = JavaConversions.asJavaEnumeration(i)
+ }
+
+ /** Generic class containing the `asJavaDictionary` converter method */
+ class AsJavaDictionary[A, B](m : mutable.Map[A, B]) {
+ /** Converts a Scala `Map` to a Java `Dictionary` */
+ def asJavaDictionary: ju.Dictionary[A, B] = JavaConversions.asJavaDictionary(m)
+ }
+}
+
+private[collection] object Decorators extends Decorators
diff --git a/src/library/scala/collection/convert/WrapAsJava.scala b/src/library/scala/collection/convert/WrapAsJava.scala
new file mode 100644
index 0000000000..6274518d1a
--- /dev/null
+++ b/src/library/scala/collection/convert/WrapAsJava.scala
@@ -0,0 +1,256 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2006-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://www.scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.collection
+package convert
+
+import java.{ lang => jl, util => ju }, java.util.{ concurrent => juc }
+import Wrappers._
+
+trait WrapAsJava {
+ /**
+ * Implicitly converts a Scala Iterator to a Java Iterator.
+ * The returned Java Iterator is backed by the provided Scala
+ * Iterator and any side-effects of using it via the Java interface will
+ * be visible via the Scala interface and vice versa.
+ *
+ * If the Scala Iterator was previously obtained from an implicit or
+ * explicit call of `asIterator(java.util.Iterator)` then the original
+ * Java Iterator will be returned.
+ *
+ * @param i The Iterator to be converted.
+ * @return A Java Iterator view of the argument.
+ */
+ implicit def asJavaIterator[A](it: Iterator[A]): ju.Iterator[A] = it match {
+ case JIteratorWrapper(wrapped) => wrapped.asInstanceOf[ju.Iterator[A]]
+ case _ => IteratorWrapper(it)
+ }
+
+ /**
+ * Implicitly converts a Scala Iterator to a Java Enumeration.
+ * The returned Java Enumeration is backed by the provided Scala
+ * Iterator and any side-effects of using it via the Java interface will
+ * be visible via the Scala interface and vice versa.
+ *
+ * If the Scala Iterator was previously obtained from an implicit or
+ * explicit call of `asIterator(java.util.Enumeration)` then the
+ * original Java Enumeration will be returned.
+ *
+ * @param i The Iterator to be converted.
+ * @return A Java Enumeration view of the argument.
+ */
+ implicit def asJavaEnumeration[A](it: Iterator[A]): ju.Enumeration[A] = it match {
+ case JEnumerationWrapper(wrapped) => wrapped.asInstanceOf[ju.Enumeration[A]]
+ case _ => IteratorWrapper(it)
+ }
+
+ /**
+ * Implicitly converts a Scala Iterable to a Java Iterable.
+ * The returned Java Iterable is backed by the provided Scala
+ * Iterable and any side-effects of using it via the Java interface will
+ * be visible via the Scala interface and vice versa.
+ *
+ * If the Scala Iterable was previously obtained from an implicit or
+ * explicit call of `asIterable(java.lang.Iterable)` then the original
+ * Java Iterable will be returned.
+ *
+ * @param i The Iterable to be converted.
+ * @return A Java Iterable view of the argument.
+ */
+ implicit def asJavaIterable[A](i: Iterable[A]): jl.Iterable[A] = i match {
+ case JIterableWrapper(wrapped) => wrapped.asInstanceOf[jl.Iterable[A]]
+ case _ => IterableWrapper(i)
+ }
+
+ /**
+ * Implicitly converts a Scala Iterable to an immutable Java
+ * Collection.
+ *
+ * If the Scala Iterable was previously obtained from an implicit or
+ * explicit call of `asSizedIterable(java.util.Collection)` then the original
+ * Java Collection will be returned.
+ *
+ * @param i The SizedIterable to be converted.
+ * @return A Java Collection view of the argument.
+ */
+ implicit def asJavaCollection[A](it: Iterable[A]): ju.Collection[A] = it match {
+ case JCollectionWrapper(wrapped) => wrapped.asInstanceOf[ju.Collection[A]]
+ case _ => new IterableWrapper(it)
+ }
+
+ /**
+ * Implicitly converts a Scala mutable Buffer to a Java List.
+ * The returned Java List is backed by the provided Scala
+ * Buffer and any side-effects of using it via the Java interface will
+ * be visible via the Scala interface and vice versa.
+ *
+ * If the Scala Buffer was previously obtained from an implicit or
+ * explicit call of `asBuffer(java.util.List)` then the original
+ * Java List will be returned.
+ *
+ * @param b The Buffer to be converted.
+ * @return A Java List view of the argument.
+ */
+ implicit def bufferAsJavaList[A](b: mutable.Buffer[A]): ju.List[A] = b match {
+ case JListWrapper(wrapped) => wrapped
+ case _ => new MutableBufferWrapper(b)
+ }
+
+ /**
+ * Implicitly converts a Scala mutable Seq to a Java List.
+ * The returned Java List is backed by the provided Scala
+ * Seq and any side-effects of using it via the Java interface will
+ * be visible via the Scala interface and vice versa.
+ *
+ * If the Scala Seq was previously obtained from an implicit or
+ * explicit call of `asSeq(java.util.List)` then the original
+ * Java List will be returned.
+ *
+ * @param b The Seq to be converted.
+ * @return A Java List view of the argument.
+ */
+ implicit def mutableSeqAsJavaList[A](seq: mutable.Seq[A]): ju.List[A] = seq match {
+ case JListWrapper(wrapped) => wrapped
+ case _ => new MutableSeqWrapper(seq)
+ }
+
+ /**
+ * Implicitly converts a Scala Seq to a Java List.
+ * The returned Java List is backed by the provided Scala
+ * Seq and any side-effects of using it via the Java interface will
+ * be visible via the Scala interface and vice versa.
+ *
+ * If the Scala Seq was previously obtained from an implicit or
+ * explicit call of `asSeq(java.util.List)` then the original
+ * Java List will be returned.
+ *
+ * @param b The Seq to be converted.
+ * @return A Java List view of the argument.
+ */
+ implicit def seqAsJavaList[A](seq: Seq[A]): ju.List[A] = seq match {
+ case JListWrapper(wrapped) => wrapped.asInstanceOf[ju.List[A]]
+ case _ => new SeqWrapper(seq)
+ }
+
+ /**
+ * Implicitly converts a Scala mutable Set to a Java Set.
+ * The returned Java Set is backed by the provided Scala
+ * Set and any side-effects of using it via the Java interface will
+ * be visible via the Scala interface and vice versa.
+ *
+ * If the Scala Set was previously obtained from an implicit or
+ * explicit call of `asSet(java.util.Set)` then the original
+ * Java Set will be returned.
+ *
+ * @param s The Set to be converted.
+ * @return A Java Set view of the argument.
+ */
+ implicit def mutableSetAsJavaSet[A](s: mutable.Set[A]): ju.Set[A] = s match {
+ case JSetWrapper(wrapped) => wrapped
+ case _ => new MutableSetWrapper(s)
+ }
+
+ /**
+ * Implicitly converts a Scala Set to a Java Set.
+ * The returned Java Set is backed by the provided Scala
+ * Set and any side-effects of using it via the Java interface will
+ * be visible via the Scala interface and vice versa.
+ *
+ * If the Scala Set was previously obtained from an implicit or
+ * explicit call of asSet(java.util.Set) then the original
+ * Java Set will be returned.
+ *
+ * @param s The Set to be converted.
+ * @return A Java Set view of the argument.
+ */
+ implicit def setAsJavaSet[A](s: Set[A]): ju.Set[A] = s match {
+ case JSetWrapper(wrapped) => wrapped
+ case _ => new SetWrapper(s)
+ }
+
+ /**
+ * Implicitly converts a Scala mutable Map to a Java Map.
+ * The returned Java Map is backed by the provided Scala
+ * Map and any side-effects of using it via the Java interface will
+ * be visible via the Scala interface and vice versa.
+ *
+ * If the Scala Map was previously obtained from an implicit or
+ * explicit call of `asMap(java.util.Map)` then the original
+ * Java Map will be returned.
+ *
+ * @param m The Map to be converted.
+ * @return A Java Map view of the argument.
+ */
+ implicit def mutableMapAsJavaMap[A, B](m: mutable.Map[A, B]): ju.Map[A, B] = m match {
+ //case JConcurrentMapWrapper(wrapped) => wrapped
+ case JMapWrapper(wrapped) => wrapped
+ case _ => new MutableMapWrapper(m)
+ }
+
+ /**
+ * Implicitly converts a Scala mutable `Map` to a Java `Dictionary`.
+ *
+ * The returned Java `Dictionary` is backed by the provided Scala
+ * `Dictionary` and any side-effects of using it via the Java interface
+ * will be visible via the Scala interface and vice versa.
+ *
+ * If the Scala `Dictionary` was previously obtained from an implicit or
+ * explicit call of `asMap(java.util.Dictionary)` then the original
+ * Java Dictionary will be returned.
+ *
+ * @param m The `Map` to be converted.
+ * @return A Java `Dictionary` view of the argument.
+ */
+ implicit def asJavaDictionary[A, B](m: mutable.Map[A, B]): ju.Dictionary[A, B] = m match {
+ //case JConcurrentMapWrapper(wrapped) => wrapped
+ case JDictionaryWrapper(wrapped) => wrapped
+ case _ => new DictionaryWrapper(m)
+ }
+
+ /**
+ * Implicitly converts a Scala `Map` to a Java `Map`.
+ *
+ * The returned Java `Map` is backed by the provided Scala `Map` and
+ * any side-effects of using it via the Java interface will be visible
+ * via the Scala interface and vice versa.
+ *
+ * If the Scala `Map` was previously obtained from an implicit or
+ * explicit call of `asMap(java.util.Map)` then the original
+ * Java `Map` will be returned.
+ *
+ * @param m The `Map` to be converted.
+ * @return A Java `Map` view of the argument.
+ */
+ implicit def mapAsJavaMap[A, B](m: Map[A, B]): ju.Map[A, B] = m match {
+ //case JConcurrentMapWrapper(wrapped) => wrapped
+ case JMapWrapper(wrapped) => wrapped.asInstanceOf[ju.Map[A, B]]
+ case _ => new MapWrapper(m)
+ }
+
+ /**
+ * Implicitly converts a Scala mutable `ConcurrentMap` to a Java
+ * `ConcurrentMap`.
+ *
+ * The returned Java `ConcurrentMap` is backed by the provided Scala
+ * `ConcurrentMap` and any side-effects of using it via the Java interface
+ * will be visible via the Scala interface and vice versa.
+ *
+ * If the Scala `ConcurrentMap` was previously obtained from an implicit or
+ * explicit call of `asConcurrentMap(java.util.concurrect.ConcurrentMap)`
+ * then the original Java ConcurrentMap will be returned.
+ *
+ * @param m The `ConcurrentMap` to be converted.
+ * @return A Java `ConcurrentMap` view of the argument.
+ */
+ implicit def asJavaConcurrentMap[A, B](m: mutable.ConcurrentMap[A, B]): juc.ConcurrentMap[A, B] = m match {
+ case JConcurrentMapWrapper(wrapped) => wrapped
+ case _ => new ConcurrentMapWrapper(m)
+ }
+}
+
+object WrapAsJava extends WrapAsJava { }
diff --git a/src/library/scala/collection/convert/WrapAsScala.scala b/src/library/scala/collection/convert/WrapAsScala.scala
new file mode 100644
index 0000000000..02b58f55a4
--- /dev/null
+++ b/src/library/scala/collection/convert/WrapAsScala.scala
@@ -0,0 +1,193 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2006-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://www.scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.collection
+package convert
+
+import java.{ lang => jl, util => ju }, java.util.{ concurrent => juc }
+import Wrappers._
+
+trait WrapAsScala {
+ /**
+ * Implicitly converts a Java `Iterator` to a Scala `Iterator`.
+ *
+ * The returned Scala `Iterator` is backed by the provided Java `Iterator`
+ * and any side-effects of using it via the Scala interface will be visible
+ * via the Java interface and vice versa.
+ *
+ * If the Java `Iterator` was previously obtained from an implicit or
+ * explicit call of `asIterator(scala.collection.Iterator)` then the
+ * original Scala `Iterator` will be returned.
+ *
+ * @param i The `Iterator` to be converted.
+ * @return A Scala `Iterator` view of the argument.
+ */
+ implicit def asScalaIterator[A](it: ju.Iterator[A]): Iterator[A] = it match {
+ case IteratorWrapper(wrapped) => wrapped
+ case _ => JIteratorWrapper(it)
+ }
+
+ /**
+ * Implicitly converts a Java Enumeration to a Scala Iterator.
+ * The returned Scala Iterator is backed by the provided Java
+ * Enumeration and any side-effects of using it via the Scala interface will
+ * be visible via the Java interface and vice versa.
+ *
+ * If the Java Enumeration was previously obtained from an implicit or
+ * explicit call of `enumerationAsScalaIterator(scala.collection.Iterator)`
+ * then the original Scala Iterator will be returned.
+ *
+ * @param i The Enumeration to be converted.
+ * @return A Scala Iterator view of the argument.
+ */
+ implicit def enumerationAsScalaIterator[A](i: ju.Enumeration[A]): Iterator[A] = i match {
+ case IteratorWrapper(wrapped) => wrapped
+ case _ => JEnumerationWrapper(i)
+ }
+
+ /**
+ * Implicitly converts a Java `Iterable` to a Scala `Iterable`.
+ *
+ * The returned Scala `Iterable` is backed by the provided Java `Iterable`
+ * and any side-effects of using it via the Scala interface will be visible
+ * via the Java interface and vice versa.
+ *
+ * If the Java `Iterable` was previously obtained from an implicit or
+ * explicit call of `iterableAsScalaIterable(scala.collection.Iterable)`
+ * then the original Scala Iterable will be returned.
+ *
+ * @param i The Iterable to be converted.
+ * @return A Scala Iterable view of the argument.
+ */
+ implicit def iterableAsScalaIterable[A](i: jl.Iterable[A]): Iterable[A] = i match {
+ case IterableWrapper(wrapped) => wrapped
+ case _ => JIterableWrapper(i)
+ }
+
+ /**
+ * Implicitly converts a Java `Collection` to an Scala `Iterable`.
+ *
+ * If the Java `Collection` was previously obtained from an implicit or
+ * explicit call of `collectionAsScalaIterable(scala.collection.SizedIterable)`
+ * then the original Scala `Iterable` will be returned.
+ *
+ * @param i The Collection to be converted.
+ * @return A Scala Iterable view of the argument.
+ */
+ implicit def collectionAsScalaIterable[A](i: ju.Collection[A]): Iterable[A] = i match {
+ case IterableWrapper(wrapped) => wrapped
+ case _ => JCollectionWrapper(i)
+ }
+
+ /**
+ * Implicitly converts a Java `List` to a Scala mutable `Buffer`.
+ *
+ * The returned Scala `Buffer` is backed by the provided Java `List`
+ * and any side-effects of using it via the Scala interface will
+ * be visible via the Java interface and vice versa.
+ *
+ * If the Java `List` was previously obtained from an implicit or
+ * explicit call of `asScalaBuffer(scala.collection.mutable.Buffer)`
+ * then the original Scala `Buffer` will be returned.
+ *
+ * @param l The `List` to be converted.
+ * @return A Scala mutable `Buffer` view of the argument.
+ */
+ implicit def asScalaBuffer[A](l: ju.List[A]): mutable.Buffer[A] = l match {
+ case MutableBufferWrapper(wrapped) => wrapped
+ case _ =>new JListWrapper(l)
+ }
+
+ /**
+ * Implicitly converts a Java Set to a Scala mutable Set.
+ * The returned Scala Set is backed by the provided Java
+ * Set and any side-effects of using it via the Scala interface will
+ * be visible via the Java interface and vice versa.
+ *
+ * If the Java Set was previously obtained from an implicit or
+ * explicit call of `asScalaSet(scala.collection.mutable.Set)` then
+ * the original Scala Set will be returned.
+ *
+ * @param s The Set to be converted.
+ * @return A Scala mutable Set view of the argument.
+ */
+ implicit def asScalaSet[A](s: ju.Set[A]): mutable.Set[A] = s match {
+ case MutableSetWrapper(wrapped) => wrapped
+ case _ =>new JSetWrapper(s)
+ }
+
+ /**
+ * Implicitly converts a Java `Map` to a Scala mutable `Map`.
+ *
+ * The returned Scala `Map` is backed by the provided Java `Map` and any
+ * side-effects of using it via the Scala interface will be visible via
+ * the Java interface and vice versa.
+ *
+ * If the Java `Map` was previously obtained from an implicit or
+ * explicit call of `mapAsScalaMap(scala.collection.mutable.Map)` then
+ * the original Scala Map will be returned.
+ *
+ * @param m The Map to be converted.
+ * @return A Scala mutable Map view of the argument.
+ */
+ implicit def mapAsScalaMap[A, B](m: ju.Map[A, B]): mutable.Map[A, B] = m match {
+ //case ConcurrentMapWrapper(wrapped) => wrapped
+ case MutableMapWrapper(wrapped) => wrapped
+ case _ => new JMapWrapper(m)
+ }
+
+ /**
+ * Implicitly converts a Java ConcurrentMap to a Scala mutable ConcurrentMap.
+ * The returned Scala ConcurrentMap is backed by the provided Java
+ * ConcurrentMap and any side-effects of using it via the Scala interface will
+ * be visible via the Java interface and vice versa.
+ *
+ * If the Java ConcurrentMap was previously obtained from an implicit or
+ * explicit call of `asConcurrentMap(scala.collection.mutable.ConcurrentMap)`
+ * then the original Scala ConcurrentMap will be returned.
+ *
+ * @param m The ConcurrentMap to be converted.
+ * @return A Scala mutable ConcurrentMap view of the argument.
+ */
+ implicit def asScalaConcurrentMap[A, B](m: juc.ConcurrentMap[A, B]): mutable.ConcurrentMap[A, B] = m match {
+ case cmw: ConcurrentMapWrapper[a, b] => cmw.underlying
+ case _ => new JConcurrentMapWrapper(m)
+ }
+
+ /**
+ * Implicitly converts a Java `Dictionary` to a Scala mutable
+ * `Map[String, String]`.
+ *
+ * The returned Scala `Map[String, String]` is backed by the provided Java
+ * `Dictionary` and any side-effects of using it via the Scala interface
+ * will be visible via the Java interface and vice versa.
+ *
+ * @param m The Dictionary to be converted.
+ * @return A Scala mutable Map[String, String] view of the argument.
+ */
+ implicit def dictionaryAsScalaMap[A, B](p: ju.Dictionary[A, B]): mutable.Map[A, B] = p match {
+ case DictionaryWrapper(wrapped) => wrapped
+ case _ => new JDictionaryWrapper(p)
+ }
+
+ /**
+ * Implicitly converts a Java `Properties` to a Scala `mutable Map[String, String]`.
+ *
+ * The returned Scala `Map[String, String]` is backed by the provided Java
+ * `Properties` and any side-effects of using it via the Scala interface
+ * will be visible via the Java interface and vice versa.
+ *
+ * @param m The Properties to be converted.
+ * @return A Scala mutable Map[String, String] view of the argument.
+ */
+ implicit def propertiesAsScalaMap(p: ju.Properties): mutable.Map[String, String] = p match {
+ case _ => new JPropertiesWrapper(p)
+ }
+}
+
+object WrapAsScala extends WrapAsScala { }
diff --git a/src/library/scala/collection/convert/Wrappers.scala b/src/library/scala/collection/convert/Wrappers.scala
new file mode 100644
index 0000000000..8136e462cb
--- /dev/null
+++ b/src/library/scala/collection/convert/Wrappers.scala
@@ -0,0 +1,422 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2006-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://www.scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.collection
+package convert
+
+import java.{ lang => jl, util => ju }, java.util.{ concurrent => juc }
+import WrapAsScala._
+import WrapAsJava._
+
+/** Don't put the implementations in the same scope as the implicits
+ * which utilize them, or they will stow away into every scope which
+ * extends one of those implementations. See SI-5580.
+ */
+private[collection] trait Wrappers {
+ trait IterableWrapperTrait[A] extends ju.AbstractCollection[A] {
+ val underlying: Iterable[A]
+ def size = underlying.size
+ override def iterator = IteratorWrapper(underlying.iterator)
+ override def isEmpty = underlying.isEmpty
+ }
+
+ case class IteratorWrapper[A](underlying: Iterator[A]) extends ju.Iterator[A] with ju.Enumeration[A] {
+ def hasNext = underlying.hasNext
+ def next() = underlying.next
+ def hasMoreElements = underlying.hasNext
+ def nextElement() = underlying.next
+ def remove() = throw new UnsupportedOperationException
+ }
+
+ class ToIteratorWrapper[A](underlying : Iterator[A]) {
+ def asJava = new IteratorWrapper(underlying)
+ }
+
+ case class JIteratorWrapper[A](underlying: ju.Iterator[A]) extends AbstractIterator[A] with Iterator[A] {
+ def hasNext = underlying.hasNext
+ def next() = underlying.next
+ }
+
+ case class JEnumerationWrapper[A](underlying: ju.Enumeration[A]) extends AbstractIterator[A] with Iterator[A] {
+ def hasNext = underlying.hasMoreElements
+ def next() = underlying.nextElement
+ }
+
+ case class IterableWrapper[A](underlying: Iterable[A]) extends ju.AbstractCollection[A] with IterableWrapperTrait[A] { }
+
+ case class JIterableWrapper[A](underlying: jl.Iterable[A]) extends AbstractIterable[A] with Iterable[A] {
+ def iterator = underlying.iterator
+ def newBuilder[B] = new mutable.ArrayBuffer[B]
+ }
+
+ case class JCollectionWrapper[A](underlying: ju.Collection[A]) extends AbstractIterable[A] with Iterable[A] {
+ def iterator = underlying.iterator
+ override def size = underlying.size
+ override def isEmpty = underlying.isEmpty
+ def newBuilder[B] = new mutable.ArrayBuffer[B]
+ }
+
+ case class SeqWrapper[A](underlying: Seq[A]) extends ju.AbstractList[A] with IterableWrapperTrait[A] {
+ def get(i: Int) = underlying(i)
+ }
+
+ case class MutableSeqWrapper[A](underlying: mutable.Seq[A]) extends ju.AbstractList[A] with IterableWrapperTrait[A] {
+ def get(i: Int) = underlying(i)
+ override def set(i: Int, elem: A) = {
+ val p = underlying(i)
+ underlying(i) = elem
+ p
+ }
+ }
+
+ case class MutableBufferWrapper[A](underlying: mutable.Buffer[A]) extends ju.AbstractList[A] with IterableWrapperTrait[A] {
+ def get(i: Int) = underlying(i)
+ override def set(i: Int, elem: A) = { val p = underlying(i); underlying(i) = elem; p }
+ override def add(elem: A) = { underlying append elem; true }
+ override def remove(i: Int) = underlying remove i
+ }
+
+ case class JListWrapper[A](val underlying: ju.List[A]) extends mutable.AbstractBuffer[A] with mutable.Buffer[A] {
+ def length = underlying.size
+ override def isEmpty = underlying.isEmpty
+ override def iterator: Iterator[A] = underlying.iterator
+ def apply(i: Int) = underlying.get(i)
+ def update(i: Int, elem: A) = underlying.set(i, elem)
+ def +=:(elem: A) = { underlying.subList(0, 0) add elem; this }
+ def +=(elem: A): this.type = { underlying add elem; this }
+ def insertAll(i: Int, elems: Traversable[A]) = {
+ val ins = underlying.subList(0, i)
+ elems.seq.foreach(ins.add(_))
+ }
+ def remove(i: Int) = underlying.remove(i)
+ def clear() = underlying.clear()
+ def result = this
+ }
+
+ class SetWrapper[A](underlying: Set[A]) extends ju.AbstractSet[A] {
+ self =>
+ def size = underlying.size
+ def iterator = new ju.Iterator[A] {
+ val ui = underlying.iterator
+ var prev: Option[A] = None
+ def hasNext = ui.hasNext
+ def next = { val e = ui.next; prev = Some(e); e }
+ def remove = prev match {
+ case Some(e) =>
+ underlying match {
+ case ms: mutable.Set[a] =>
+ ms remove e
+ prev = None
+ case _ =>
+ throw new UnsupportedOperationException("remove")
+ }
+ case _ =>
+ throw new IllegalStateException("next must be called at least once before remove")
+ }
+ }
+ }
+
+ case class MutableSetWrapper[A](underlying: mutable.Set[A]) extends SetWrapper[A](underlying) {
+ override def add(elem: A) = {
+ val sz = underlying.size
+ underlying += elem
+ sz < underlying.size
+ }
+ override def remove(elem: AnyRef) =
+ try underlying remove elem.asInstanceOf[A]
+ catch { case ex: ClassCastException => false }
+ override def clear() = underlying.clear()
+ }
+
+ case class JSetWrapper[A](underlying: ju.Set[A]) extends mutable.AbstractSet[A] with mutable.Set[A] with mutable.SetLike[A, JSetWrapper[A]] {
+
+ override def size = underlying.size
+
+ def iterator = underlying.iterator
+
+ def contains(elem: A): Boolean = underlying.contains(elem)
+
+ def +=(elem: A): this.type = { underlying add elem; this }
+ def -=(elem: A): this.type = { underlying remove elem; this }
+
+ override def add(elem: A): Boolean = underlying add elem
+ override def remove(elem: A): Boolean = underlying remove elem
+ override def clear() = underlying.clear()
+
+ override def empty = JSetWrapper(new ju.HashSet[A])
+ }
+
+ class MapWrapper[A, B](underlying: Map[A, B]) extends ju.AbstractMap[A, B] { self =>
+ override def size = underlying.size
+
+ override def get(key: AnyRef): B = try {
+ underlying get key.asInstanceOf[A] match {
+ case None => null.asInstanceOf[B]
+ case Some(v) => v
+ }
+ } catch {
+ case ex: ClassCastException => null.asInstanceOf[B]
+ }
+
+ override def entrySet: ju.Set[ju.Map.Entry[A, B]] = new ju.AbstractSet[ju.Map.Entry[A, B]] {
+ def size = self.size
+
+ def iterator = new ju.Iterator[ju.Map.Entry[A, B]] {
+ val ui = underlying.iterator
+ var prev : Option[A] = None
+
+ def hasNext = ui.hasNext
+
+ def next() = {
+ val (k, v) = ui.next
+ prev = Some(k)
+ new ju.Map.Entry[A, B] {
+ def getKey = k
+ def getValue = v
+ def setValue(v1 : B) = self.put(k, v1)
+ override def hashCode = k.hashCode + v.hashCode
+ override def equals(other: Any) = other match {
+ case e: ju.Map.Entry[_, _] => k == e.getKey && v == e.getValue
+ case _ => false
+ }
+ }
+ }
+
+ def remove() {
+ prev match {
+ case Some(k) =>
+ underlying match {
+ case mm: mutable.Map[a, _] =>
+ mm remove k
+ prev = None
+ case _ =>
+ throw new UnsupportedOperationException("remove")
+ }
+ case _ =>
+ throw new IllegalStateException("next must be called at least once before remove")
+ }
+ }
+ }
+ }
+ }
+
+ case class MutableMapWrapper[A, B](underlying: mutable.Map[A, B]) extends MapWrapper[A, B](underlying) {
+ override def put(k: A, v: B) = underlying.put(k, v) match {
+ case Some(v1) => v1
+ case None => null.asInstanceOf[B]
+ }
+
+ override def remove(k: AnyRef): B = try {
+ underlying remove k.asInstanceOf[A] match {
+ case None => null.asInstanceOf[B]
+ case Some(v) => v
+ }
+ } catch {
+ case ex: ClassCastException => null.asInstanceOf[B]
+ }
+
+ override def clear() = underlying.clear()
+ }
+
+ trait JMapWrapperLike[A, B, +Repr <: mutable.MapLike[A, B, Repr] with mutable.Map[A, B]] extends mutable.Map[A, B] with mutable.MapLike[A, B, Repr] {
+ def underlying: ju.Map[A, B]
+
+ override def size = underlying.size
+
+ def get(k: A) = {
+ val v = underlying get k
+ if (v != null)
+ Some(v)
+ else if (underlying containsKey k)
+ Some(null.asInstanceOf[B])
+ else
+ None
+ }
+
+ def +=(kv: (A, B)): this.type = { underlying.put(kv._1, kv._2); this }
+ def -=(key: A): this.type = { underlying remove key; this }
+
+ override def put(k: A, v: B): Option[B] = {
+ val r = underlying.put(k, v)
+ if (r != null) Some(r) else None
+ }
+
+ override def update(k: A, v: B) { underlying.put(k, v) }
+
+ override def remove(k: A): Option[B] = {
+ val r = underlying remove k
+ if (r != null) Some(r) else None
+ }
+
+ def iterator: Iterator[(A, B)] = new AbstractIterator[(A, B)] {
+ val ui = underlying.entrySet.iterator
+ def hasNext = ui.hasNext
+ def next() = { val e = ui.next(); (e.getKey, e.getValue) }
+ }
+
+ override def clear() = underlying.clear()
+
+ override def empty: Repr = null.asInstanceOf[Repr]
+ }
+
+ case class JMapWrapper[A, B](val underlying : ju.Map[A, B]) extends mutable.AbstractMap[A, B] with JMapWrapperLike[A, B, JMapWrapper[A, B]] {
+ override def empty = JMapWrapper(new ju.HashMap[A, B])
+ }
+
+ class ConcurrentMapWrapper[A, B](override val underlying: mutable.ConcurrentMap[A, B]) extends MutableMapWrapper[A, B](underlying) with juc.ConcurrentMap[A, B] {
+
+ def putIfAbsent(k: A, v: B) = underlying.putIfAbsent(k, v) match {
+ case Some(v) => v
+ case None => null.asInstanceOf[B]
+ }
+
+ def remove(k: AnyRef, v: AnyRef) = try {
+ underlying.remove(k.asInstanceOf[A], v.asInstanceOf[B])
+ } catch {
+ case ex: ClassCastException =>
+ false
+ }
+
+ def replace(k: A, v: B): B = underlying.replace(k, v) match {
+ case Some(v) => v
+ case None => null.asInstanceOf[B]
+ }
+
+ def replace(k: A, oldval: B, newval: B) = underlying.replace(k, oldval, newval)
+ }
+
+ case class JConcurrentMapWrapper[A, B](val underlying: juc.ConcurrentMap[A, B]) extends mutable.AbstractMap[A, B] with JMapWrapperLike[A, B, JConcurrentMapWrapper[A, B]] with mutable.ConcurrentMap[A, B] {
+ override def get(k: A) = {
+ val v = underlying get k
+ if (v != null) Some(v)
+ else None
+ }
+
+ override def empty = new JConcurrentMapWrapper(new juc.ConcurrentHashMap[A, B])
+
+ def putIfAbsent(k: A, v: B): Option[B] = {
+ val r = underlying.putIfAbsent(k, v)
+ if (r != null) Some(r) else None
+ }
+
+ def remove(k: A, v: B): Boolean = underlying.remove(k, v)
+
+ def replace(k: A, v: B): Option[B] = {
+ val prev = underlying.replace(k, v)
+ if (prev != null) Some(prev) else None
+ }
+
+ def replace(k: A, oldvalue: B, newvalue: B): Boolean =
+ underlying.replace(k, oldvalue, newvalue)
+ }
+
+ case class DictionaryWrapper[A, B](underlying: mutable.Map[A, B]) extends ju.Dictionary[A, B] {
+ def size: Int = underlying.size
+ def isEmpty: Boolean = underlying.isEmpty
+ def keys: ju.Enumeration[A] = asJavaEnumeration(underlying.keysIterator)
+ def elements: ju.Enumeration[B] = asJavaEnumeration(underlying.valuesIterator)
+ def get(key: AnyRef) = try {
+ underlying get key.asInstanceOf[A] match {
+ case None => null.asInstanceOf[B]
+ case Some(v) => v
+ }
+ } catch {
+ case ex: ClassCastException => null.asInstanceOf[B]
+ }
+ def put(key: A, value: B): B = underlying.put(key, value) match {
+ case Some(v) => v
+ case None => null.asInstanceOf[B]
+ }
+ override def remove(key: AnyRef) = try {
+ underlying remove key.asInstanceOf[A] match {
+ case None => null.asInstanceOf[B]
+ case Some(v) => v
+ }
+ } catch {
+ case ex: ClassCastException => null.asInstanceOf[B]
+ }
+ }
+
+ case class JDictionaryWrapper[A, B](underlying: ju.Dictionary[A, B]) extends mutable.AbstractMap[A, B] with mutable.Map[A, B] {
+ override def size: Int = underlying.size
+
+ def get(k: A) = {
+ val v = underlying get k
+ if (v != null) Some(v) else None
+ }
+
+ def +=(kv: (A, B)): this.type = { underlying.put(kv._1, kv._2); this }
+ def -=(key: A): this.type = { underlying remove key; this }
+
+ override def put(k: A, v: B): Option[B] = {
+ val r = underlying.put(k, v)
+ if (r != null) Some(r) else None
+ }
+
+ override def update(k: A, v: B) { underlying.put(k, v) }
+
+ override def remove(k: A): Option[B] = {
+ val r = underlying remove k
+ if (r != null) Some(r) else None
+ }
+
+ def iterator = enumerationAsScalaIterator(underlying.keys) map (k => (k, underlying get k))
+
+ override def clear() = underlying.clear()
+ }
+
+ case class JPropertiesWrapper(underlying: ju.Properties) extends mutable.AbstractMap[String, String]
+ with mutable.Map[String, String]
+ with mutable.MapLike[String, String, JPropertiesWrapper] {
+
+ override def size = underlying.size
+
+ def get(k: String) = {
+ val v = underlying get k
+ if (v != null) Some(v.asInstanceOf[String]) else None
+ }
+
+ def +=(kv: (String, String)): this.type = { underlying.put(kv._1, kv._2); this }
+ def -=(key: String): this.type = { underlying remove key; this }
+
+ override def put(k: String, v: String): Option[String] = {
+ val r = underlying.put(k, v)
+ if (r != null) Some(r.asInstanceOf[String]) else None
+ }
+
+ override def update(k: String, v: String) { underlying.put(k, v) }
+
+ override def remove(k: String): Option[String] = {
+ val r = underlying remove k
+ if (r != null) Some(r.asInstanceOf[String]) else None
+ }
+
+ def iterator: Iterator[(String, String)] = new AbstractIterator[(String, String)] {
+ val ui = underlying.entrySet.iterator
+ def hasNext = ui.hasNext
+ def next() = {
+ val e = ui.next()
+ (e.getKey.asInstanceOf[String], e.getValue.asInstanceOf[String])
+ }
+ }
+
+ override def clear() = underlying.clear()
+
+ override def empty = JPropertiesWrapper(new ju.Properties)
+
+ def getProperty(key: String) = underlying.getProperty(key)
+
+ def getProperty(key: String, defaultValue: String) =
+ underlying.getProperty(key, defaultValue)
+
+ def setProperty(key: String, value: String) =
+ underlying.setProperty(key, value)
+ }
+}
+
+object Wrappers extends Wrappers
diff --git a/src/library/scala/collection/convert/package.scala b/src/library/scala/collection/convert/package.scala
new file mode 100644
index 0000000000..2f8bca1e1f
--- /dev/null
+++ b/src/library/scala/collection/convert/package.scala
@@ -0,0 +1,18 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2006-2011, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://www.scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala.collection
+
+package object convert {
+ val decorateAsJava = new DecorateAsJava { }
+ val decorateAsScala = new DecorateAsScala { }
+ val decorateAll = new DecorateAsJava with DecorateAsScala { }
+ val wrapAsJava = new WrapAsJava { }
+ val wrapAsScala = new WrapAsScala { }
+ val wrapAll = new WrapAsJava with WrapAsScala { }
+}
diff --git a/src/library/scala/collection/generic/GenericTraversableTemplate.scala b/src/library/scala/collection/generic/GenericTraversableTemplate.scala
index 7333074778..6586434924 100644
--- a/src/library/scala/collection/generic/GenericTraversableTemplate.scala
+++ b/src/library/scala/collection/generic/GenericTraversableTemplate.scala
@@ -119,22 +119,25 @@ trait GenericTraversableTemplate[+A, +CC[X] <: GenTraversable[X]] extends HasNew
* a $coll formed by the elements of these traversable
* collections.
*
- * The resulting collection's type will be guided by the
- * static type of $coll. For example:
- *
- * {{{
- * val xs = List(Set(1, 2, 3), Set(1, 2, 3))
- * // xs == List(1, 2, 3, 1, 2, 3)
- *
- * val ys = Set(List(1, 2, 3), List(3, 2, 1))
- * // ys == Set(1, 2, 3)
- * }}}
- *
* @tparam B the type of the elements of each traversable collection.
* @param asTraversable an implicit conversion which asserts that the element
* type of this $coll is a `GenTraversable`.
* @return a new $coll resulting from concatenating all element ${coll}s.
+ *
* @usecase def flatten[B]: $Coll[B]
+ *
+ * @inheritdoc
+ *
+ * The resulting collection's type will be guided by the
+ * static type of $coll. For example:
+ *
+ * {{{
+ * val xs = List(Set(1, 2, 3), Set(1, 2, 3))
+ * // xs == List(1, 2, 3, 1, 2, 3)
+ *
+ * val ys = Set(List(1, 2, 3), List(3, 2, 1))
+ * // ys == Set(1, 2, 3)
+ * }}}
*/
def flatten[B](implicit asTraversable: A => /*<:<!!!*/ GenTraversableOnce[B]): CC[B] = {
val b = genericBuilder[B]
diff --git a/src/library/scala/collection/immutable/List.scala b/src/library/scala/collection/immutable/List.scala
index 381fcf3117..870c179b2d 100644
--- a/src/library/scala/collection/immutable/List.scala
+++ b/src/library/scala/collection/immutable/List.scala
@@ -93,8 +93,12 @@ sealed abstract class List[+A] extends AbstractSeq[A]
* @param x the element to prepend.
* @return a list which contains `x` as first element and
* which continues with this list.
- * @example `1 :: List(2, 3) = List(2, 3).::(1) = List(1, 2, 3)`
+ *
* @usecase def ::(x: A): List[A]
+ * @inheritdoc
+ *
+ * Example:
+ * {{{1 :: List(2, 3) = List(2, 3).::(1) = List(1, 2, 3)}}}
*/
def ::[B >: A] (x: B): List[B] =
new scala.collection.immutable.::(x, this)
@@ -103,8 +107,12 @@ sealed abstract class List[+A] extends AbstractSeq[A]
* @param prefix The list elements to prepend.
* @return a list resulting from the concatenation of the given
* list `prefix` and this list.
- * @example `List(1, 2) ::: List(3, 4) = List(3, 4).:::(List(1, 2)) = List(1, 2, 3, 4)`
+ *
* @usecase def :::(prefix: List[A]): List[A]
+ * @inheritdoc
+ *
+ * Example:
+ * {{{List(1, 2) ::: List(3, 4) = List(3, 4).:::(List(1, 2)) = List(1, 2, 3, 4)}}}
*/
def :::[B >: A](prefix: List[B]): List[B] =
if (isEmpty) prefix
@@ -117,7 +125,9 @@ sealed abstract class List[+A] extends AbstractSeq[A]
*
* @param prefix the prefix to reverse and then prepend
* @return the concatenation of the reversed prefix and the current list.
+ *
* @usecase def reverse_:::(prefix: List[A]): List[A]
+ * @inheritdoc
*/
def reverse_:::[B >: A](prefix: List[B]): List[B] = {
var these: List[B] = this
@@ -137,7 +147,9 @@ sealed abstract class List[+A] extends AbstractSeq[A]
* @tparam B the element type of the returned collection.
* @return a list resulting from applying the given function
* `f` to each element of this list and collecting the results.
+ *
* @usecase def mapConserve(f: A => A): List[A]
+ * @inheritdoc
*/
def mapConserve[B >: A <: AnyRef](f: A => B): List[B] = {
@tailrec
diff --git a/src/library/scala/collection/mutable/ConcurrentTrieMap.scala b/src/library/scala/collection/mutable/ConcurrentTrieMap.scala
index 1a44c8e423..cfe1b1950d 100644
--- a/src/library/scala/collection/mutable/ConcurrentTrieMap.scala
+++ b/src/library/scala/collection/mutable/ConcurrentTrieMap.scala
@@ -1027,7 +1027,7 @@ private[collection] class ConcurrentTrieMapIterator[K, V](var level: Int, privat
Seq(this)
}
- def printDebug {
+ def printDebug() {
println("ctrie iterator")
println(stackpos.mkString(","))
println("depth: " + depth)
diff --git a/src/library/scala/collection/mutable/WeakHashMap.scala b/src/library/scala/collection/mutable/WeakHashMap.scala
index 89d7c7a695..4e09755acf 100644
--- a/src/library/scala/collection/mutable/WeakHashMap.scala
+++ b/src/library/scala/collection/mutable/WeakHashMap.scala
@@ -6,14 +6,11 @@
** |/ **
\* */
-
-
package scala.collection
package mutable
-import JavaConversions._
import generic._
-
+import convert.Wrappers._
/** A hash map with references to entries which are weakly reachable. Entries are
* removed from this map when the key is no longer (strongly) referenced. This class wraps
diff --git a/src/library/scala/collection/parallel/ParIterableLike.scala b/src/library/scala/collection/parallel/ParIterableLike.scala
index 7e0fa366ab..5551c04ce2 100644
--- a/src/library/scala/collection/parallel/ParIterableLike.scala
+++ b/src/library/scala/collection/parallel/ParIterableLike.scala
@@ -668,8 +668,9 @@ self: ParIterableLike[T, Repr, Sequential] =>
* @return a collection containing the prefix scan of the elements in the original collection
*
* @usecase def scan(z: T)(op: (T, T) => T): $Coll[T]
+ * @inheritdoc
*
- * @return a new $coll containing the prefix scan of the elements in this $coll
+ * @return a new $coll containing the prefix scan of the elements in this $coll
*/
def scan[U >: T, That](z: U)(op: (U, U) => U)(implicit bf: CanBuildFrom[Repr, U, That]): That = if (bf(repr).isCombiner) {
if (tasksupport.parallelismLevel > 1) {
diff --git a/src/library/scala/collection/parallel/ParSeqLike.scala b/src/library/scala/collection/parallel/ParSeqLike.scala
index 9f28a286ca..b3c527da84 100644
--- a/src/library/scala/collection/parallel/ParSeqLike.scala
+++ b/src/library/scala/collection/parallel/ParSeqLike.scala
@@ -285,7 +285,6 @@ self =>
}
/** Computes the multiset intersection between this $coll and another sequence.
- * $mayNotTerminateInf
*
* @param that the sequence of elements to intersect with.
* @tparam B the element type of the returned $coll.
@@ -296,12 +295,17 @@ self =>
* If an element value `x` appears
* ''n'' times in `that`, then the first ''n'' occurrences of `x` will be retained
* in the result, but any following occurrences will be omitted.
+ *
* @usecase def intersect(that: Seq[T]): $Coll[T]
- * @return a new $coll which contains all elements of this $coll
- * which also appear in `that`.
- * If an element value `x` appears
- * ''n'' times in `that`, then the first ''n'' occurrences of `x` will be retained
- * in the result, but any following occurrences will be omitted.
+ * @inheritdoc
+ *
+ * $mayNotTerminateInf
+ *
+ * @return a new $coll which contains all elements of this $coll
+ * which also appear in `that`.
+ * If an element value `x` appears
+ * ''n'' times in `that`, then the first ''n'' occurrences of `x` will be retained
+ * in the result, but any following occurrences will be omitted.
*/
def intersect[U >: T](that: GenSeq[U]) = sequentially {
_ intersect that
diff --git a/src/library/scala/concurrent/Awaitable.scala b/src/library/scala/concurrent/Awaitable.scala
index c38e668f30..6c9995eb05 100644
--- a/src/library/scala/concurrent/Awaitable.scala
+++ b/src/library/scala/concurrent/Awaitable.scala
@@ -11,7 +11,7 @@ package scala.concurrent
import scala.annotation.implicitNotFound
-import scala.util.Duration
+import scala.concurrent.util.Duration
diff --git a/src/library/scala/concurrent/ConcurrentPackageObject.scala b/src/library/scala/concurrent/ConcurrentPackageObject.scala
index 7d005838d3..3471095051 100644
--- a/src/library/scala/concurrent/ConcurrentPackageObject.scala
+++ b/src/library/scala/concurrent/ConcurrentPackageObject.scala
@@ -12,7 +12,8 @@ package scala.concurrent
import java.util.concurrent.{ Executors, ExecutorService }
import scala.concurrent.forkjoin.ForkJoinPool
-import scala.util.{ Duration, Try, Success, Failure }
+import scala.util.{ Try, Success, Failure }
+import scala.concurrent.util.Duration
import ConcurrentPackageObject._
@@ -26,8 +27,8 @@ abstract class ConcurrentPackageObject {
new impl.ExecutionContextImpl(getExecutorService)
private[concurrent] def getExecutorService: AnyRef =
- if (util.Properties.isJavaAtLeast("1.6")) {
- val vendor = util.Properties.javaVmVendor
+ if (scala.util.Properties.isJavaAtLeast("1.6")) {
+ val vendor = scala.util.Properties.javaVmVendor
if ((vendor contains "Oracle") || (vendor contains "Sun") || (vendor contains "Apple")) new ForkJoinPool
else Executors.newCachedThreadPool()
} else Executors.newCachedThreadPool()
diff --git a/src/library/scala/concurrent/DelayedLazyVal.scala b/src/library/scala/concurrent/DelayedLazyVal.scala
index a17153bad5..96a66d83b6 100644
--- a/src/library/scala/concurrent/DelayedLazyVal.scala
+++ b/src/library/scala/concurrent/DelayedLazyVal.scala
@@ -40,9 +40,8 @@ class DelayedLazyVal[T](f: () => T, body: => Unit) {
def apply(): T = if (isDone) complete else f()
// TODO replace with scala.concurrent.future { ... }
- ops.future {
+ future {
body
_isDone = true
}
-
}
diff --git a/src/library/scala/concurrent/ExecutionContext.scala b/src/library/scala/concurrent/ExecutionContext.scala
index eb1b3355c0..c4a45f9fb5 100644
--- a/src/library/scala/concurrent/ExecutionContext.scala
+++ b/src/library/scala/concurrent/ExecutionContext.scala
@@ -12,7 +12,7 @@ package scala.concurrent
import java.util.concurrent.atomic.{ AtomicInteger }
import java.util.concurrent.{ Executors, Future => JFuture, Callable }
-import scala.util.Duration
+import scala.concurrent.util.Duration
import scala.util.{ Try, Success, Failure }
import scala.concurrent.forkjoin.{ ForkJoinPool, RecursiveTask => FJTask, RecursiveAction, ForkJoinWorkerThread }
import scala.collection.generic.CanBuildFrom
diff --git a/src/library/scala/concurrent/Future.scala b/src/library/scala/concurrent/Future.scala
index eb54b61db0..1dc8e38355 100644
--- a/src/library/scala/concurrent/Future.scala
+++ b/src/library/scala/concurrent/Future.scala
@@ -17,7 +17,8 @@ import java.util.{ LinkedList => JLinkedList }
import java.{ lang => jl }
import java.util.concurrent.atomic.{ AtomicReferenceFieldUpdater, AtomicInteger, AtomicBoolean }
-import scala.util.{ Timeout, Duration, Try, Success, Failure }
+import scala.util.{ Try, Success, Failure }
+import scala.concurrent.util.Duration
import scala.Option
import scala.annotation.tailrec
diff --git a/src/library/scala/concurrent/Scheduler.scala b/src/library/scala/concurrent/Scheduler.scala
index 39d798e6b4..e2eb4d69fe 100644
--- a/src/library/scala/concurrent/Scheduler.scala
+++ b/src/library/scala/concurrent/Scheduler.scala
@@ -8,7 +8,7 @@
package scala.concurrent
-import scala.util.Duration
+import scala.concurrent.util.Duration
/** A service for scheduling tasks and thunks for one-time, or periodic execution.
*/
diff --git a/src/library/scala/concurrent/default/SchedulerImpl.scala.disabled b/src/library/scala/concurrent/default/SchedulerImpl.scala.disabled
index 745d2d1a15..241efa8857 100644
--- a/src/library/scala/concurrent/default/SchedulerImpl.scala.disabled
+++ b/src/library/scala/concurrent/default/SchedulerImpl.scala.disabled
@@ -9,7 +9,7 @@
package scala.concurrent
package default
-import scala.util.Duration
+import scala.concurrent.util.Duration
private[concurrent] final class SchedulerImpl extends Scheduler {
private val timer =
diff --git a/src/library/scala/concurrent/default/TaskImpl.scala.disabled b/src/library/scala/concurrent/default/TaskImpl.scala.disabled
index 94e54cb372..50753a7154 100644
--- a/src/library/scala/concurrent/default/TaskImpl.scala.disabled
+++ b/src/library/scala/concurrent/default/TaskImpl.scala.disabled
@@ -7,7 +7,7 @@ import java.util.concurrent.atomic.AtomicReferenceFieldUpdater
import scala.concurrent.forkjoin.{ ForkJoinPool, RecursiveAction, ForkJoinWorkerThread }
import scala.util.Try
import scala.util
-import scala.util.Duration
+import scala.concurrent.util.Duration
import scala.annotation.tailrec
diff --git a/src/library/scala/concurrent/impl/ExecutionContextImpl.scala b/src/library/scala/concurrent/impl/ExecutionContextImpl.scala
index 6201de14d7..8ac745fd25 100644
--- a/src/library/scala/concurrent/impl/ExecutionContextImpl.scala
+++ b/src/library/scala/concurrent/impl/ExecutionContextImpl.scala
@@ -13,7 +13,8 @@ package scala.concurrent.impl
import java.util.concurrent.{Callable, ExecutorService}
import scala.concurrent.forkjoin._
import scala.concurrent.{ExecutionContext, resolver, Awaitable, body2awaitable}
-import scala.util.{ Duration, Try, Success, Failure }
+import scala.util.{ Try, Success, Failure }
+import scala.concurrent.util.{ Duration }
import scala.collection.mutable.Stack
diff --git a/src/library/scala/concurrent/impl/Promise.scala b/src/library/scala/concurrent/impl/Promise.scala
index 5238bc51db..4a983b5001 100644
--- a/src/library/scala/concurrent/impl/Promise.scala
+++ b/src/library/scala/concurrent/impl/Promise.scala
@@ -14,7 +14,7 @@ import java.util.concurrent.TimeUnit.{ NANOSECONDS, MILLISECONDS }
import java.util.concurrent.atomic.AtomicReferenceFieldUpdater
import scala.concurrent.{Awaitable, ExecutionContext, resolve, resolver, blocking, CanAwait, TimeoutException}
//import scala.util.continuations._
-import scala.util.Duration
+import scala.concurrent.util.Duration
import scala.util.Try
import scala.util
import scala.annotation.tailrec
diff --git a/src/library/scala/concurrent/package.scala b/src/library/scala/concurrent/package.scala
index 7cc48c09b2..204b3f2673 100644
--- a/src/library/scala/concurrent/package.scala
+++ b/src/library/scala/concurrent/package.scala
@@ -8,7 +8,8 @@
package scala
-import scala.util.{ Duration, Try, Success, Failure }
+import scala.util.{ Try, Success, Failure }
+import scala.concurrent.util.Duration
/** This package object contains primitives for concurrent and parallel programming.
*/
diff --git a/src/library/scala/concurrent/util/Duration.scala b/src/library/scala/concurrent/util/Duration.scala
new file mode 100644
index 0000000000..33d034da76
--- /dev/null
+++ b/src/library/scala/concurrent/util/Duration.scala
@@ -0,0 +1,578 @@
+/**
+ * Copyright (C) 2009-2012 Typesafe Inc. <http://www.typesafe.com>
+ */
+
+package scala.concurrent.util
+
+import java.util.concurrent.TimeUnit
+import TimeUnit._
+import java.lang.{ Double ⇒ JDouble }
+
+object DurationImplicits {
+ trait Classifier[C] {
+ type R
+ def convert(d: FiniteDuration): R
+ }
+
+ object span
+ implicit object spanConvert extends Classifier[span.type] {
+ type R = FiniteDuration
+ def convert(d: FiniteDuration) = d
+ }
+
+ object fromNow
+ implicit object fromNowConvert extends Classifier[fromNow.type] {
+ type R = Deadline
+ def convert(d: FiniteDuration) = Deadline.now + d
+ }
+
+ implicit def intToDurationInt(n: Int) = new DurationInt(n)
+ implicit def longToDurationLong(n: Long) = new DurationLong(n)
+ implicit def doubleToDurationDouble(d: Double) = new DurationDouble(d)
+
+ implicit def pairIntToDuration(p: (Int, TimeUnit)) = Duration(p._1, p._2)
+ implicit def pairLongToDuration(p: (Long, TimeUnit)) = Duration(p._1, p._2)
+ implicit def durationToPair(d: Duration) = (d.length, d.unit)
+
+ /*
+ * Avoid reflection based invocation by using non-duck type
+ */
+ class IntMult(i: Int) {
+ def *(d: Duration) = d * i
+ }
+ implicit def intMult(i: Int) = new IntMult(i)
+
+ class LongMult(l: Long) {
+ def *(d: Duration) = d * l
+ }
+ implicit def longMult(l: Long) = new LongMult(l)
+
+ class DoubleMult(f: Double) {
+ def *(d: Duration) = d * f
+ }
+ implicit def doubleMult(f: Double) = new DoubleMult(f)
+}
+
+case class Deadline private (time: Duration) {
+ def +(other: Duration): Deadline = copy(time = time + other)
+ def -(other: Duration): Deadline = copy(time = time - other)
+ def -(other: Deadline): Duration = time - other.time
+ def timeLeft: Duration = this - Deadline.now
+ def hasTimeLeft(): Boolean = !isOverdue()
+ def isOverdue(): Boolean = (time.toNanos - System.nanoTime()) < 0
+}
+
+object Deadline {
+ def now: Deadline = Deadline(Duration(System.nanoTime, NANOSECONDS))
+}
+
+object Duration {
+ implicit def timeLeft(implicit d: Deadline): Duration = d.timeLeft
+
+ def apply(length: Long, unit: TimeUnit): FiniteDuration = new FiniteDuration(length, unit)
+ def apply(length: Double, unit: TimeUnit): FiniteDuration = fromNanos(unit.toNanos(1) * length)
+ def apply(length: Long, unit: String): FiniteDuration = {
+ val (mult, timeUnit) = Duration.timeUnit(unit)
+ new FiniteDuration(length * mult, timeUnit)
+ }
+
+ /**
+ * Construct a Duration by parsing a String. In case of a format error, a
+ * RuntimeException is thrown. See `unapply(String)` for more information.
+ */
+ def apply(s: String): Duration = unapply(s) getOrElse sys.error("format error " + s)
+
+ private val RE = ("""^\s*([\+|-]?\d+(?:\.\d+)?)\s*""" + // length part
+ "(?:" + // units are distinguished in separate match groups
+ "(d|day|days)|" +
+ "(h|hour|hours)|" +
+ "(min|minute|minutes)|" +
+ "(s|sec|second|seconds)|" +
+ "(ms|milli|millis|millisecond|milliseconds)|" +
+ "(µs|micro|micros|microsecond|microseconds)|" +
+ "(ns|nano|nanos|nanosecond|nanoseconds)" +
+ """)\s*$""").r // close the non-capturing group
+ private val REinf = """^\s*(?:\+|Plus)?Inf\s*$""".r
+ private val REminf = """^\s*(?:-|Minus)Inf\s*""".r
+
+ /**
+ * Deconstruct a Duration into `Long` length and [[java.util.concurrent.TimeUnit]] if it is a
+ * [[scala.util.concurrent.FiniteDuration]].
+ *
+ * @param d Duration to be deconstructed.
+ */
+ def unapply(d: Duration): Option[(Long, TimeUnit)] = {
+ if (d.finite_?) {
+ Some((d.length, d.unit))
+ } else {
+ None
+ }
+ }
+
+ /**
+ * Parse String, return None if no match. Format is `"<length><unit>"`, where
+ * whitespace is allowed before, between and after the parts. Infinities are
+ * designated by `"Inf"`, `"PlusInf"`, `"+Inf"` and `"-Inf"` or `"MinusInf"`.
+ */
+ def unapply(s: String): Option[Duration] = s match {
+ case RE(length, d, h, m, s, ms, mus, ns) ⇒
+ if (d ne null)
+ Some(Duration(JDouble.parseDouble(length) * 86400, SECONDS))
+ else if (h ne null)
+ Some(Duration(JDouble.parseDouble(length) * 3600, SECONDS))
+ else if (m ne null)
+ Some(Duration(JDouble.parseDouble(length) * 60, SECONDS))
+ else if (s ne null)
+ Some(Duration(JDouble.parseDouble(length), SECONDS))
+ else if (ms ne null)
+ Some(Duration(JDouble.parseDouble(length), MILLISECONDS))
+ else if (mus ne null)
+ Some(Duration(JDouble.parseDouble(length), MICROSECONDS))
+ else if (ns ne null)
+ Some(Duration(JDouble.parseDouble(length), NANOSECONDS))
+ else
+ sys.error("made some error in regex (should not be possible)")
+ case REinf() ⇒ Some(Inf)
+ case REminf() ⇒ Some(MinusInf)
+ case _ ⇒ None
+ }
+
+ def fromNanos(nanos: Double): FiniteDuration =
+ fromNanos((nanos + 0.5).asInstanceOf[Long])
+
+ def fromNanos(nanos: Long): FiniteDuration = {
+ if (nanos % 86400000000000L == 0) {
+ Duration(nanos / 1000000000L, SECONDS)
+ } else if (nanos % 1000000000L == 0) {
+ Duration(nanos / 1000000000L, SECONDS)
+ } else if (nanos % 1000000000L == 0) {
+ Duration(nanos / 1000000000L, SECONDS)
+ } else if (nanos % 1000000000L == 0) {
+ Duration(nanos / 1000000000L, SECONDS)
+ } else if (nanos % 1000000L == 0) {
+ Duration(nanos / 1000000L, MILLISECONDS)
+ } else if (nanos % 1000L == 0) {
+ Duration(nanos / 1000L, MICROSECONDS)
+ } else {
+ Duration(nanos, NANOSECONDS)
+ }
+ }
+
+ /**
+ * Parse TimeUnit from string representation.
+ */
+ protected[util] def timeUnit(unit: String): (Long, TimeUnit) = unit.toLowerCase match {
+ case "d" | "day" | "days" ⇒ (86400, SECONDS)
+ case "h" | "hour" | "hours" ⇒ (3600, SECONDS)
+ case "min" | "minute" | "minutes" ⇒ (60, SECONDS)
+ case "s" | "sec" | "second" | "seconds" ⇒ (1, SECONDS)
+ case "ms" | "milli" | "millis" | "millisecond" | "milliseconds" ⇒ (1, MILLISECONDS)
+ case "µs" | "micro" | "micros" | "microsecond" | "microseconds" ⇒ (1, MICROSECONDS)
+ case "ns" | "nano" | "nanos" | "nanosecond" | "nanoseconds" ⇒ (1, NANOSECONDS)
+ }
+
+ val Zero: FiniteDuration = new FiniteDuration(0, NANOSECONDS)
+ val Undefined: Duration = new Duration with Infinite {
+ override def toString = "Duration.Undefined"
+ override def equals(other: Any) = other.asInstanceOf[AnyRef] eq this
+ override def +(other: Duration): Duration = throw new IllegalArgumentException("cannot add Undefined duration")
+ override def -(other: Duration): Duration = throw new IllegalArgumentException("cannot subtract Undefined duration")
+ override def *(factor: Double): Duration = throw new IllegalArgumentException("cannot multiply Undefined duration")
+ override def /(factor: Double): Duration = throw new IllegalArgumentException("cannot divide Undefined duration")
+ override def /(other: Duration): Double = throw new IllegalArgumentException("cannot divide Undefined duration")
+ def compare(other: Duration) = throw new IllegalArgumentException("cannot compare Undefined duration")
+ def unary_- : Duration = throw new IllegalArgumentException("cannot negate Undefined duration")
+ }
+
+ trait Infinite {
+ this: Duration ⇒
+
+ def +(other: Duration): Duration =
+ other match {
+ case _: this.type ⇒ this
+ case _: Infinite ⇒ throw new IllegalArgumentException("illegal addition of infinities")
+ case _ ⇒ this
+ }
+ def -(other: Duration): Duration =
+ other match {
+ case _: this.type ⇒ throw new IllegalArgumentException("illegal subtraction of infinities")
+ case _ ⇒ this
+ }
+ def *(factor: Double): Duration = this
+ def /(factor: Double): Duration = this
+ def /(other: Duration): Double =
+ other match {
+ case _: Infinite ⇒ throw new IllegalArgumentException("illegal division of infinities")
+ // maybe questionable but pragmatic: Inf / 0 => Inf
+ case x ⇒ Double.PositiveInfinity * (if ((this > Zero) ^ (other >= Zero)) -1 else 1)
+ }
+
+ def finite_? = false
+
+ def length: Long = throw new IllegalArgumentException("length not allowed on infinite Durations")
+ def unit: TimeUnit = throw new IllegalArgumentException("unit not allowed on infinite Durations")
+ def toNanos: Long = throw new IllegalArgumentException("toNanos not allowed on infinite Durations")
+ def toMicros: Long = throw new IllegalArgumentException("toMicros not allowed on infinite Durations")
+ def toMillis: Long = throw new IllegalArgumentException("toMillis not allowed on infinite Durations")
+ def toSeconds: Long = throw new IllegalArgumentException("toSeconds not allowed on infinite Durations")
+ def toMinutes: Long = throw new IllegalArgumentException("toMinutes not allowed on infinite Durations")
+ def toHours: Long = throw new IllegalArgumentException("toHours not allowed on infinite Durations")
+ def toDays: Long = throw new IllegalArgumentException("toDays not allowed on infinite Durations")
+ def toUnit(unit: TimeUnit): Double = throw new IllegalArgumentException("toUnit not allowed on infinite Durations")
+
+ }
+
+ /**
+ * Infinite duration: greater than any other and not equal to any other,
+ * including itself.
+ */
+ val Inf: Duration = new Duration with Infinite {
+ override def toString = "Duration.Inf"
+ def compare(other: Duration) = if (other eq this) 0 else 1
+ def unary_- : Duration = MinusInf
+ }
+
+ /**
+ * Infinite negative duration: lesser than any other and not equal to any other,
+ * including itself.
+ */
+ val MinusInf: Duration = new Duration with Infinite {
+ override def toString = "Duration.MinusInf"
+ def compare(other: Duration) = if (other eq this) 0 else -1
+ def unary_- : Duration = Inf
+ }
+
+ // Java Factories
+ def create(length: Long, unit: TimeUnit): FiniteDuration = apply(length, unit)
+ def create(length: Double, unit: TimeUnit): FiniteDuration = apply(length, unit)
+ def create(length: Long, unit: String): FiniteDuration = apply(length, unit)
+ def parse(s: String): Duration = unapply(s).get
+
+ implicit object DurationIsOrdered extends Ordering[Duration] {
+ def compare(a: Duration, b: Duration) = a compare b
+ }
+}
+
+/**
+ * Utility for working with java.util.concurrent.TimeUnit durations.
+ *
+ * <p/>
+ * Examples:
+ * <pre>
+ * import scala.concurrent.util.Duration
+ * import java.util.concurrent.TimeUnit
+ *
+ * val duration = Duration(100, MILLISECONDS)
+ * val duration = Duration(100, "millis")
+ *
+ * duration.toNanos
+ * duration < 1.second
+ * duration <= Duration.Inf
+ * </pre>
+ *
+ * <p/>
+ * Implicits are also provided for Int, Long and Double. Example usage:
+ * <pre>
+ * import scala.concurrent.util.Duration._
+ *
+ * val duration = 100 millis
+ * </pre>
+ *
+ * Extractors, parsing and arithmetic are also included:
+ * <pre>
+ * val d = Duration("1.2 µs")
+ * val Duration(length, unit) = 5 millis
+ * val d2 = d * 2.5
+ * val d3 = d2 + 1.millisecond
+ * </pre>
+ */
+abstract class Duration extends Serializable with Ordered[Duration] {
+ def length: Long
+ def unit: TimeUnit
+ def toNanos: Long
+ def toMicros: Long
+ def toMillis: Long
+ def toSeconds: Long
+ def toMinutes: Long
+ def toHours: Long
+ def toDays: Long
+ def toUnit(unit: TimeUnit): Double
+
+ def +(other: Duration): Duration
+ def -(other: Duration): Duration
+ def *(factor: Double): Duration
+ def /(factor: Double): Duration
+ def /(other: Duration): Double
+ def unary_- : Duration
+ def finite_? : Boolean
+ def min(other: Duration): Duration = if (this < other) this else other
+ def max(other: Duration): Duration = if (this > other) this else other
+ def fromNow: Deadline = Deadline.now + this
+
+ // Java API
+ def lt(other: Duration) = this < other
+ def lteq(other: Duration) = this <= other
+ def gt(other: Duration) = this > other
+ def gteq(other: Duration) = this >= other
+ def plus(other: Duration) = this + other
+ def minus(other: Duration) = this - other
+ def mul(factor: Double) = this * factor
+ def div(factor: Double) = this / factor
+ def div(other: Duration) = this / other
+ def neg() = -this
+ def isFinite() = finite_?
+}
+
+object FiniteDuration {
+ implicit object FiniteDurationIsOrdered extends Ordering[FiniteDuration] {
+ def compare(a: FiniteDuration, b: FiniteDuration) = a compare b
+ }
+
+ def apply(length: Long, unit: TimeUnit) =
+ new FiniteDuration(length, unit)
+
+ def apply(length: Long, unit: String) = {
+ val (mult, timeUnit) = Duration.timeUnit(unit)
+ new FiniteDuration(length * mult, timeUnit)
+ }
+
+}
+
+class FiniteDuration(val length: Long, val unit: TimeUnit) extends Duration {
+ import Duration._
+
+ def toNanos = unit.toNanos(length)
+ def toMicros = unit.toMicros(length)
+ def toMillis = unit.toMillis(length)
+ def toSeconds = unit.toSeconds(length)
+ def toMinutes = unit.toMinutes(length)
+ def toHours = unit.toHours(length)
+ def toDays = unit.toDays(length)
+ def toUnit(u: TimeUnit) = long2double(toNanos) / NANOSECONDS.convert(1, u)
+
+ override def toString = this match {
+ case Duration(1, SECONDS) ⇒ "1 second"
+ case Duration(x, SECONDS) ⇒ x + " seconds"
+ case Duration(1, MILLISECONDS) ⇒ "1 millisecond"
+ case Duration(x, MILLISECONDS) ⇒ x + " milliseconds"
+ case Duration(1, MICROSECONDS) ⇒ "1 microsecond"
+ case Duration(x, MICROSECONDS) ⇒ x + " microseconds"
+ case Duration(1, NANOSECONDS) ⇒ "1 nanosecond"
+ case Duration(x, NANOSECONDS) ⇒ x + " nanoseconds"
+ }
+
+ def compare(other: Duration) =
+ if (other.finite_?) {
+ val me = toNanos
+ val o = other.toNanos
+ if (me > o) 1 else if (me < o) -1 else 0
+ } else -other.compare(this)
+
+ def +(other: Duration) = {
+ if (!other.finite_?) {
+ other
+ } else {
+ val nanos = toNanos + other.asInstanceOf[FiniteDuration].toNanos
+ fromNanos(nanos)
+ }
+ }
+
+ def -(other: Duration) = {
+ if (!other.finite_?) {
+ other
+ } else {
+ val nanos = toNanos - other.asInstanceOf[FiniteDuration].toNanos
+ fromNanos(nanos)
+ }
+ }
+
+ def *(factor: Double) = fromNanos(long2double(toNanos) * factor)
+
+ def /(factor: Double) = fromNanos(long2double(toNanos) / factor)
+
+ def /(other: Duration) = if (other.finite_?) long2double(toNanos) / other.toNanos else 0
+
+ def unary_- = Duration(-length, unit)
+
+ def finite_? = true
+
+ override def equals(other: Any) =
+ other.isInstanceOf[FiniteDuration] &&
+ toNanos == other.asInstanceOf[FiniteDuration].toNanos
+
+ override def hashCode = toNanos.asInstanceOf[Int]
+}
+
+class DurationInt(n: Int) {
+ import DurationImplicits.Classifier
+
+ def nanoseconds = Duration(n, NANOSECONDS)
+ def nanos = Duration(n, NANOSECONDS)
+ def nanosecond = Duration(n, NANOSECONDS)
+ def nano = Duration(n, NANOSECONDS)
+
+ def microseconds = Duration(n, MICROSECONDS)
+ def micros = Duration(n, MICROSECONDS)
+ def microsecond = Duration(n, MICROSECONDS)
+ def micro = Duration(n, MICROSECONDS)
+
+ def milliseconds = Duration(n, MILLISECONDS)
+ def millis = Duration(n, MILLISECONDS)
+ def millisecond = Duration(n, MILLISECONDS)
+ def milli = Duration(n, MILLISECONDS)
+
+ def seconds = Duration(n, SECONDS)
+ def second = Duration(n, SECONDS)
+
+ def minutes = Duration(n * 60, SECONDS)
+ def minute = Duration(n * 60, SECONDS)
+
+ def hours = Duration(n * 3600, SECONDS)
+ def hour = Duration(n * 3600, SECONDS)
+
+ def days = Duration(n * 86400, SECONDS)
+ def day = Duration(n * 86400, SECONDS)
+
+ def nanoseconds[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, NANOSECONDS))
+ def nanos[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, NANOSECONDS))
+ def nanosecond[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, NANOSECONDS))
+ def nano[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, NANOSECONDS))
+
+ def microseconds[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, MICROSECONDS))
+ def micros[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, MICROSECONDS))
+ def microsecond[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, MICROSECONDS))
+ def micro[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, MICROSECONDS))
+
+ def milliseconds[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, MILLISECONDS))
+ def millis[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, MILLISECONDS))
+ def millisecond[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, MILLISECONDS))
+ def milli[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, MILLISECONDS))
+
+ def seconds[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, SECONDS))
+ def second[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, SECONDS))
+
+ def minutes[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n * 60, SECONDS))
+ def minute[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n * 60, SECONDS))
+
+ def hours[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n * 3600, SECONDS))
+ def hour[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n * 3600, SECONDS))
+
+ def days[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n * 86400, SECONDS))
+ def day[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n * 86400, SECONDS))
+}
+
+class DurationLong(n: Long) {
+ import DurationImplicits.Classifier
+
+ def nanoseconds = Duration(n, NANOSECONDS)
+ def nanos = Duration(n, NANOSECONDS)
+ def nanosecond = Duration(n, NANOSECONDS)
+ def nano = Duration(n, NANOSECONDS)
+
+ def microseconds = Duration(n, MICROSECONDS)
+ def micros = Duration(n, MICROSECONDS)
+ def microsecond = Duration(n, MICROSECONDS)
+ def micro = Duration(n, MICROSECONDS)
+
+ def milliseconds = Duration(n, MILLISECONDS)
+ def millis = Duration(n, MILLISECONDS)
+ def millisecond = Duration(n, MILLISECONDS)
+ def milli = Duration(n, MILLISECONDS)
+
+ def seconds = Duration(n, SECONDS)
+ def second = Duration(n, SECONDS)
+
+ def minutes = Duration(n * 60, SECONDS)
+ def minute = Duration(n * 60, SECONDS)
+
+ def hours = Duration(n * 3600, SECONDS)
+ def hour = Duration(n * 3600, SECONDS)
+
+ def days = Duration(n * 86400, SECONDS)
+ def day = Duration(n * 86400, SECONDS)
+
+ def nanoseconds[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, NANOSECONDS))
+ def nanos[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, NANOSECONDS))
+ def nanosecond[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, NANOSECONDS))
+ def nano[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, NANOSECONDS))
+
+ def microseconds[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, MICROSECONDS))
+ def micros[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, MICROSECONDS))
+ def microsecond[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, MICROSECONDS))
+ def micro[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, MICROSECONDS))
+
+ def milliseconds[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, MILLISECONDS))
+ def millis[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, MILLISECONDS))
+ def millisecond[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, MILLISECONDS))
+ def milli[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, MILLISECONDS))
+
+ def seconds[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, SECONDS))
+ def second[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, SECONDS))
+
+ def minutes[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n * 60, SECONDS))
+ def minute[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n * 60, SECONDS))
+
+ def hours[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n * 3600, SECONDS))
+ def hour[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n * 3600, SECONDS))
+
+ def days[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n * 86400, SECONDS))
+ def day[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n * 86400, SECONDS))
+}
+
+class DurationDouble(d: Double) {
+ import DurationImplicits.Classifier
+
+ def nanoseconds = Duration(d, NANOSECONDS)
+ def nanos = Duration(d, NANOSECONDS)
+ def nanosecond = Duration(d, NANOSECONDS)
+ def nano = Duration(d, NANOSECONDS)
+
+ def microseconds = Duration(d, MICROSECONDS)
+ def micros = Duration(d, MICROSECONDS)
+ def microsecond = Duration(d, MICROSECONDS)
+ def micro = Duration(d, MICROSECONDS)
+
+ def milliseconds = Duration(d, MILLISECONDS)
+ def millis = Duration(d, MILLISECONDS)
+ def millisecond = Duration(d, MILLISECONDS)
+ def milli = Duration(d, MILLISECONDS)
+
+ def seconds = Duration(d, SECONDS)
+ def second = Duration(d, SECONDS)
+
+ def minutes = Duration(d * 60, SECONDS)
+ def minute = Duration(d * 60, SECONDS)
+
+ def hours = Duration(d * 3600, SECONDS)
+ def hour = Duration(d * 3600, SECONDS)
+
+ def days = Duration(d * 86400, SECONDS)
+ def day = Duration(d * 86400, SECONDS)
+
+ def nanoseconds[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, NANOSECONDS))
+ def nanos[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, NANOSECONDS))
+ def nanosecond[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, NANOSECONDS))
+ def nano[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, NANOSECONDS))
+
+ def microseconds[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, MICROSECONDS))
+ def micros[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, MICROSECONDS))
+ def microsecond[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, MICROSECONDS))
+ def micro[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, MICROSECONDS))
+
+ def milliseconds[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, MILLISECONDS))
+ def millis[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, MILLISECONDS))
+ def millisecond[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, MILLISECONDS))
+ def milli[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, MILLISECONDS))
+
+ def seconds[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, SECONDS))
+ def second[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, SECONDS))
+
+ def minutes[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d * 60, SECONDS))
+ def minute[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d * 60, SECONDS))
+
+ def hours[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d * 3600, SECONDS))
+ def hour[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d * 3600, SECONDS))
+
+ def days[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d * 86400, SECONDS))
+ def day[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d * 86400, SECONDS))
+}
diff --git a/src/library/scala/reflect/api/Positions.scala b/src/library/scala/reflect/api/Positions.scala
index 181e7c1525..4c8c33bec8 100644
--- a/src/library/scala/reflect/api/Positions.scala
+++ b/src/library/scala/reflect/api/Positions.scala
@@ -1,9 +1,21 @@
package scala.reflect
package api
-trait Positions {
+trait Positions { self: Universe =>
+ /** TreeAnnotation is a generalisation of Position.
+ *
+ * TreeAnnotation cannot be an upperbound of Position since the corresponding classes
+ * must live outside of the universe for backwards compatibility (see scala.tools.nsc.util.Position).
+ * Thus, represent subtyping as coercions.
+ *
+ * Typically, positionToAnnotation is the identity, and annotationToPosition returns annot.pos
+ */
+ type TreeAnnotation // <: { def pos: Position }
+ val NoTreeAnnotation: TreeAnnotation
+ implicit def positionToAnnotation(pos: Position): TreeAnnotation // = pos
+ def annotationToPosition(annot: TreeAnnotation): Position // = annot.pos
+ def _checkSetAnnotation(tree: Tree, annot: TreeAnnotation): Unit = () // check that annot may overwrite tree.annot
- type Position
+ type Position // <: TreeAnnotation, but not practical to enforce this (would require moving Position, SourceFile, Reporter,... into the universe)
val NoPosition: Position
-
} \ No newline at end of file
diff --git a/src/library/scala/reflect/api/TreePrinters.scala b/src/library/scala/reflect/api/TreePrinters.scala
index 21b55e9c0e..43865915d3 100644
--- a/src/library/scala/reflect/api/TreePrinters.scala
+++ b/src/library/scala/reflect/api/TreePrinters.scala
@@ -41,7 +41,7 @@ trait TreePrinters { self: Universe =>
else if (tree.original != null)
print(".setOriginal(", tree.original, ")")
case tree: Tree =>
- print(tree.productPrefix+"(")
+ print(tree.printingPrefix+"(")
val it = tree.productIterator
while (it.hasNext) {
it.next() match {
diff --git a/src/library/scala/reflect/api/Trees.scala b/src/library/scala/reflect/api/Trees.scala
index a355207ff0..4ee13adf52 100644
--- a/src/library/scala/reflect/api/Trees.scala
+++ b/src/library/scala/reflect/api/Trees.scala
@@ -74,11 +74,24 @@ trait Trees { self: Universe =>
val id = nodeCount
nodeCount += 1
- private[this] var rawpos: Position = NoPosition
+ /** Prefix under which to print this tree type. Defaults to product
+ * prefix (e.g. DefTree) but because that is used in reification
+ * it cannot be altered without breaking reflection.
+ */
+ def printingPrefix = productPrefix
+
+ def pos: Position = annotationToPosition(rawannot)
+ def pos_=(pos: Position): Unit = annotation = pos
+ def setPos(newpos: Position): this.type = { pos = newpos; this }
+
+ private[this] var rawannot: TreeAnnotation = NoTreeAnnotation
+ def annotation: TreeAnnotation = rawannot
+ def annotation_=(annot: TreeAnnotation): Unit = {
+ _checkSetAnnotation(this, annot)
+ rawannot = annot
+ }
- def pos = rawpos
- def pos_=(pos: Position) = rawpos = pos
- def setPos(pos: Position): this.type = { rawpos = pos; this }
+ def setAnnotation(annot: TreeAnnotation): this.type = { annotation = annot; this }
private[this] var rawtpe: Type = _
@@ -159,11 +172,12 @@ trait Trees { self: Universe =>
def foreach(f: Tree => Unit) { new ForeachTreeTraverser(f).traverse(this) }
/** Find all subtrees matching predicate `p` */
- def filter(f: Tree => Boolean): List[Tree] = {
+ def withFilter(f: Tree => Boolean): List[Tree] = {
val ft = new FilterTreeTraverser(f)
ft.traverse(this)
ft.hits.toList
}
+ def filter(f: Tree => Boolean): List[Tree] = withFilter(f)
/** Returns optionally first tree (in a preorder traversal) which satisfies predicate `p`,
* or None if none exists.
@@ -216,7 +230,7 @@ trait Trees { self: Universe =>
duplicateTree(this).asInstanceOf[this.type]
private[scala] def copyAttrs(tree: Tree): this.type = {
- pos = tree.pos
+ annotation = tree.annotation
tpe = tree.tpe
if (hasSymbol) symbol = tree.symbol
this
@@ -249,6 +263,7 @@ trait Trees { self: Universe =>
* are in DefTrees.
*/
trait RefTree extends SymTree {
+ def qualifier: Tree // empty for Idents
def name: Name
}
@@ -489,16 +504,14 @@ trait Trees { self: Universe =>
/** Factory method for object creation `new tpt(args_1)...(args_n)`
* A `New(t, as)` is expanded to: `(new t).<init>(as)`
*/
- def New(tpt: Tree, argss: List[List[Tree]]): Tree = {
- // todo. we need to expose names in scala.reflect.api
- val superRef: Tree = Select(New(tpt), nme.CONSTRUCTOR)
- if (argss.isEmpty) Apply(superRef, Nil)
- else (superRef /: argss) (Apply)
+ def New(tpt: Tree, argss: List[List[Tree]]): Tree = argss match {
+ case Nil => new ApplyConstructor(tpt, Nil)
+ case xs :: rest => rest.foldLeft(new ApplyConstructor(tpt, xs): Tree)(Apply)
}
/** 0-1 argument list new, based on a type.
*/
def New(tpe: Type, args: Tree*): Tree =
- New(TypeTree(tpe), List(args.toList))
+ new ApplyConstructor(TypeTree(tpe), args.toList)
/** Type annotation, eliminated by explicit outer */
case class Typed(expr: Tree, tpt: Tree)
@@ -537,6 +550,10 @@ trait Trees { self: Universe =>
class ApplyImplicitView(fun: Tree, args: List[Tree]) extends Apply(fun, args)
+ class ApplyConstructor(tpt: Tree, args: List[Tree]) extends Apply(Select(New(tpt), nme.CONSTRUCTOR), args) {
+ override def printingPrefix = "ApplyConstructor"
+ }
+
/** Dynamic value application.
* In a dynamic application q.f(as)
* - q is stored in qual
@@ -575,7 +592,9 @@ trait Trees { self: Universe =>
Select(qualifier, sym.name) setSymbol sym
/** Identifier <name> */
- case class Ident(name: Name) extends RefTree
+ case class Ident(name: Name) extends RefTree {
+ def qualifier: Tree = EmptyTree
+ }
def Ident(name: String): Ident =
Ident(newTermName(name))
diff --git a/src/library/scala/util/Duration.scala b/src/library/scala/util/Duration.scala
deleted file mode 100644
index 4c118f8b3b..0000000000
--- a/src/library/scala/util/Duration.scala
+++ /dev/null
@@ -1,485 +0,0 @@
-/**
- * Copyright (C) 2009-2011 Typesafe Inc. <http://www.typesafe.com>
- */
-
-package scala.util
-
-import java.util.concurrent.TimeUnit
-import TimeUnit._
-import java.lang.{ Long ⇒ JLong, Double ⇒ JDouble }
-//import akka.actor.ActorSystem (commented methods)
-
-class TimerException(message: String) extends RuntimeException(message)
-
-/**
- * Simple timer class.
- * Usage:
- * <pre>
- * import akka.util.duration._
- * import akka.util.Timer
- *
- * val timer = Timer(30.seconds)
- * while (timer.isTicking) { ... }
- * </pre>
- */
-case class Timer(duration: Duration, throwExceptionOnTimeout: Boolean = false) {
- val startTimeInMillis = System.currentTimeMillis
- val timeoutInMillis = duration.toMillis
-
- /**
- * Returns true while the timer is ticking. After that it either throws and exception or
- * returns false. Depending on if the 'throwExceptionOnTimeout' argument is true or false.
- */
- def isTicking: Boolean = {
- if (!(timeoutInMillis > (System.currentTimeMillis - startTimeInMillis))) {
- if (throwExceptionOnTimeout) throw new TimerException("Time out after " + duration)
- else false
- } else true
- }
-}
-
-object Duration {
- def apply(length: Long, unit: TimeUnit): Duration = new FiniteDuration(length, unit)
- def apply(length: Double, unit: TimeUnit): Duration = fromNanos(unit.toNanos(1) * length)
- def apply(length: Long, unit: String): Duration = new FiniteDuration(length, timeUnit(unit))
-
- def fromNanos(nanos: Long): Duration = {
- if (nanos % 86400000000000L == 0) {
- Duration(nanos / 86400000000000L, DAYS)
- } else if (nanos % 3600000000000L == 0) {
- Duration(nanos / 3600000000000L, HOURS)
- } else if (nanos % 60000000000L == 0) {
- Duration(nanos / 60000000000L, MINUTES)
- } else if (nanos % 1000000000L == 0) {
- Duration(nanos / 1000000000L, SECONDS)
- } else if (nanos % 1000000L == 0) {
- Duration(nanos / 1000000L, MILLISECONDS)
- } else if (nanos % 1000L == 0) {
- Duration(nanos / 1000L, MICROSECONDS)
- } else {
- Duration(nanos, NANOSECONDS)
- }
- }
-
- def fromNanos(nanos: Double): Duration = fromNanos((nanos + 0.5).asInstanceOf[Long])
-
- /**
- * Construct a Duration by parsing a String. In case of a format error, a
- * RuntimeException is thrown. See `unapply(String)` for more information.
- */
- def apply(s: String): Duration = unapply(s) getOrElse sys.error("format error")
-
- /**
- * Deconstruct a Duration into length and unit if it is finite.
- */
- def unapply(d: Duration): Option[(Long, TimeUnit)] = {
- if (d.finite_?) {
- Some((d.length, d.unit))
- } else {
- None
- }
- }
-
- private val RE = ("""^\s*(\d+(?:\.\d+)?)\s*""" + // length part
- "(?:" + // units are distinguished in separate match groups
- "(d|day|days)|" +
- "(h|hour|hours)|" +
- "(min|minute|minutes)|" +
- "(s|sec|second|seconds)|" +
- "(ms|milli|millis|millisecond|milliseconds)|" +
- "(µs|micro|micros|microsecond|microseconds)|" +
- "(ns|nano|nanos|nanosecond|nanoseconds)" +
- """)\s*$""").r // close the non-capturing group
- private val REinf = """^\s*Inf\s*$""".r
- private val REminf = """^\s*(?:-\s*|Minus)Inf\s*""".r
-
- /**
- * Parse String, return None if no match. Format is `"<length><unit>"`, where
- * whitespace is allowed before, between and after the parts. Infinities are
- * designated by `"Inf"` and `"-Inf"` or `"MinusInf"`.
- */
- def unapply(s: String): Option[Duration] = s match {
- case RE(length, d, h, m, s, ms, mus, ns) ⇒
- if (d ne null) Some(Duration(JDouble.parseDouble(length), DAYS)) else if (h ne null) Some(Duration(JDouble.parseDouble(length), HOURS)) else if (m ne null) Some(Duration(JDouble.parseDouble(length), MINUTES)) else if (s ne null) Some(Duration(JDouble.parseDouble(length), SECONDS)) else if (ms ne null) Some(Duration(JDouble.parseDouble(length), MILLISECONDS)) else if (mus ne null) Some(Duration(JDouble.parseDouble(length), MICROSECONDS)) else if (ns ne null) Some(Duration(JDouble.parseDouble(length), NANOSECONDS)) else
- sys.error("made some error in regex (should not be possible)")
- case REinf() ⇒ Some(Inf)
- case REminf() ⇒ Some(MinusInf)
- case _ ⇒ None
- }
-
- /**
- * Parse TimeUnit from string representation.
- */
- def timeUnit(unit: String) = unit.toLowerCase match {
- case "d" | "day" | "days" ⇒ DAYS
- case "h" | "hour" | "hours" ⇒ HOURS
- case "min" | "minute" | "minutes" ⇒ MINUTES
- case "s" | "sec" | "second" | "seconds" ⇒ SECONDS
- case "ms" | "milli" | "millis" | "millisecond" | "milliseconds" ⇒ MILLISECONDS
- case "µs" | "micro" | "micros" | "microsecond" | "microseconds" ⇒ MICROSECONDS
- case "ns" | "nano" | "nanos" | "nanosecond" | "nanoseconds" ⇒ NANOSECONDS
- }
-
- val Zero: Duration = new FiniteDuration(0, NANOSECONDS)
- val Undefined: Duration = new Duration with Infinite {
- override def toString = "Duration.Undefined"
- override def equals(other: Any) = other.asInstanceOf[AnyRef] eq this
- override def +(other: Duration): Duration = throw new IllegalArgumentException("cannot add Undefined duration")
- override def -(other: Duration): Duration = throw new IllegalArgumentException("cannot subtract Undefined duration")
- override def *(factor: Double): Duration = throw new IllegalArgumentException("cannot multiply Undefined duration")
- override def /(factor: Double): Duration = throw new IllegalArgumentException("cannot divide Undefined duration")
- override def /(other: Duration): Double = throw new IllegalArgumentException("cannot divide Undefined duration")
- def >(other: Duration) = throw new IllegalArgumentException("cannot compare Undefined duration")
- def >=(other: Duration) = throw new IllegalArgumentException("cannot compare Undefined duration")
- def <(other: Duration) = throw new IllegalArgumentException("cannot compare Undefined duration")
- def <=(other: Duration) = throw new IllegalArgumentException("cannot compare Undefined duration")
- def unary_- : Duration = throw new IllegalArgumentException("cannot negate Undefined duration")
- }
-
- trait Infinite {
- this: Duration ⇒
-
- override def equals(other: Any) = false
-
- def +(other: Duration): Duration =
- other match {
- case _: this.type ⇒ this
- case _: Infinite ⇒ throw new IllegalArgumentException("illegal addition of infinities")
- case _ ⇒ this
- }
- def -(other: Duration): Duration =
- other match {
- case _: this.type ⇒ throw new IllegalArgumentException("illegal subtraction of infinities")
- case _ ⇒ this
- }
- def *(factor: Double): Duration = this
- def /(factor: Double): Duration = this
- def /(other: Duration): Double =
- other match {
- case _: Infinite ⇒ throw new IllegalArgumentException("illegal division of infinities")
- // maybe questionable but pragmatic: Inf / 0 => Inf
- case x ⇒ Double.PositiveInfinity * (if ((this > Zero) ^ (other >= Zero)) -1 else 1)
- }
-
- def finite_? = false
-
- def length: Long = throw new IllegalArgumentException("length not allowed on infinite Durations")
- def unit: TimeUnit = throw new IllegalArgumentException("unit not allowed on infinite Durations")
- def toNanos: Long = throw new IllegalArgumentException("toNanos not allowed on infinite Durations")
- def toMicros: Long = throw new IllegalArgumentException("toMicros not allowed on infinite Durations")
- def toMillis: Long = throw new IllegalArgumentException("toMillis not allowed on infinite Durations")
- def toSeconds: Long = throw new IllegalArgumentException("toSeconds not allowed on infinite Durations")
- def toMinutes: Long = throw new IllegalArgumentException("toMinutes not allowed on infinite Durations")
- def toHours: Long = throw new IllegalArgumentException("toHours not allowed on infinite Durations")
- def toDays: Long = throw new IllegalArgumentException("toDays not allowed on infinite Durations")
- def toUnit(unit: TimeUnit): Double = throw new IllegalArgumentException("toUnit not allowed on infinite Durations")
-
- def printHMS = toString
- }
-
- /**
- * Infinite duration: greater than any other and not equal to any other,
- * including itself.
- */
- val Inf: Duration = new Duration with Infinite {
- override def toString = "Duration.Inf"
- def >(other: Duration) = true
- def >=(other: Duration) = true
- def <(other: Duration) = false
- def <=(other: Duration) = false
- def unary_- : Duration = MinusInf
- }
-
- /**
- * Infinite negative duration: lesser than any other and not equal to any other,
- * including itself.
- */
- val MinusInf: Duration = new Duration with Infinite {
- override def toString = "Duration.MinusInf"
- def >(other: Duration) = false
- def >=(other: Duration) = false
- def <(other: Duration) = true
- def <=(other: Duration) = true
- def unary_- : Duration = Inf
- }
-
- // Java Factories
- def create(length: Long, unit: TimeUnit): Duration = apply(length, unit)
- def create(length: Double, unit: TimeUnit): Duration = apply(length, unit)
- def create(length: Long, unit: String): Duration = apply(length, unit)
- def parse(s: String): Duration = unapply(s).get
-}
-
-/**
- * Utility for working with java.util.concurrent.TimeUnit durations.
- *
- * <p/>
- * Examples of usage from Java:
- * <pre>
- * import akka.util.FiniteDuration;
- * import java.util.concurrent.TimeUnit;
- *
- * Duration duration = new FiniteDuration(100, MILLISECONDS);
- * Duration duration = new FiniteDuration(5, "seconds");
- *
- * duration.toNanos();
- * </pre>
- *
- * <p/>
- * Examples of usage from Scala:
- * <pre>
- * import akka.util.Duration
- * import java.util.concurrent.TimeUnit
- *
- * val duration = Duration(100, MILLISECONDS)
- * val duration = Duration(100, "millis")
- *
- * duration.toNanos
- * duration < 1.second
- * duration <= Duration.Inf
- * </pre>
- *
- * <p/>
- * Implicits are also provided for Int, Long and Double. Example usage:
- * <pre>
- * import akka.util.duration._
- *
- * val duration = 100 millis
- * </pre>
- *
- * Extractors, parsing and arithmetic are also included:
- * <pre>
- * val d = Duration("1.2 µs")
- * val Duration(length, unit) = 5 millis
- * val d2 = d * 2.5
- * val d3 = d2 + 1.millisecond
- * </pre>
- */
-abstract class Duration extends Serializable {
- def length: Long
- def unit: TimeUnit
- def toNanos: Long
- def toMicros: Long
- def toMillis: Long
- def toSeconds: Long
- def toMinutes: Long
- def toHours: Long
- def toDays: Long
- def toUnit(unit: TimeUnit): Double
- def printHMS: String
- def <(other: Duration): Boolean
- def <=(other: Duration): Boolean
- def >(other: Duration): Boolean
- def >=(other: Duration): Boolean
- def +(other: Duration): Duration
- def -(other: Duration): Duration
- def *(factor: Double): Duration
- def /(factor: Double): Duration
- def /(other: Duration): Double
- def unary_- : Duration
- def finite_? : Boolean
-// def dilated(implicit system: ActorSystem): Duration = this * system.settings.TestTimeFactor
- def min(other: Duration): Duration = if (this < other) this else other
- def max(other: Duration): Duration = if (this > other) this else other
- def sleep(): Unit = Thread.sleep(toMillis)
-
- // Java API
- def lt(other: Duration) = this < other
- def lteq(other: Duration) = this <= other
- def gt(other: Duration) = this > other
- def gteq(other: Duration) = this >= other
- def plus(other: Duration) = this + other
- def minus(other: Duration) = this - other
- def mul(factor: Double) = this * factor
- def div(factor: Double) = this / factor
- def div(other: Duration) = this / other
- def neg() = -this
- def isFinite() = finite_?
-}
-
-class FiniteDuration(val length: Long, val unit: TimeUnit) extends Duration {
- import Duration._
-
- def this(length: Long, unit: String) = this(length, Duration.timeUnit(unit))
-
- def toNanos = unit.toNanos(length)
- def toMicros = unit.toMicros(length)
- def toMillis = unit.toMillis(length)
- def toSeconds = unit.toSeconds(length)
- def toMinutes = unit.toMinutes(length)
- def toHours = unit.toHours(length)
- def toDays = unit.toDays(length)
- def toUnit(u: TimeUnit) = long2double(toNanos) / NANOSECONDS.convert(1, u)
-
- override def toString = this match {
- case Duration(1, DAYS) ⇒ "1 day"
- case Duration(x, DAYS) ⇒ x + " days"
- case Duration(1, HOURS) ⇒ "1 hour"
- case Duration(x, HOURS) ⇒ x + " hours"
- case Duration(1, MINUTES) ⇒ "1 minute"
- case Duration(x, MINUTES) ⇒ x + " minutes"
- case Duration(1, SECONDS) ⇒ "1 second"
- case Duration(x, SECONDS) ⇒ x + " seconds"
- case Duration(1, MILLISECONDS) ⇒ "1 millisecond"
- case Duration(x, MILLISECONDS) ⇒ x + " milliseconds"
- case Duration(1, MICROSECONDS) ⇒ "1 microsecond"
- case Duration(x, MICROSECONDS) ⇒ x + " microseconds"
- case Duration(1, NANOSECONDS) ⇒ "1 nanosecond"
- case Duration(x, NANOSECONDS) ⇒ x + " nanoseconds"
- }
-
- def printHMS = "%02d:%02d:%06.3f".format(toHours, toMinutes % 60, toMillis / 1000.0 % 60)
-
- def <(other: Duration) = {
- if (other.finite_?) {
- toNanos < other.asInstanceOf[FiniteDuration].toNanos
- } else {
- other > this
- }
- }
-
- def <=(other: Duration) = {
- if (other.finite_?) {
- toNanos <= other.asInstanceOf[FiniteDuration].toNanos
- } else {
- other >= this
- }
- }
-
- def >(other: Duration) = {
- if (other.finite_?) {
- toNanos > other.asInstanceOf[FiniteDuration].toNanos
- } else {
- other < this
- }
- }
-
- def >=(other: Duration) = {
- if (other.finite_?) {
- toNanos >= other.asInstanceOf[FiniteDuration].toNanos
- } else {
- other <= this
- }
- }
-
- def +(other: Duration) = {
- if (!other.finite_?) {
- other
- } else {
- val nanos = toNanos + other.asInstanceOf[FiniteDuration].toNanos
- fromNanos(nanos)
- }
- }
-
- def -(other: Duration) = {
- if (!other.finite_?) {
- other
- } else {
- val nanos = toNanos - other.asInstanceOf[FiniteDuration].toNanos
- fromNanos(nanos)
- }
- }
-
- def *(factor: Double) = fromNanos(long2double(toNanos) * factor)
-
- def /(factor: Double) = fromNanos(long2double(toNanos) / factor)
-
- def /(other: Duration) = if (other.finite_?) long2double(toNanos) / other.toNanos else 0
-
- def unary_- = Duration(-length, unit)
-
- def finite_? = true
-
- override def equals(other: Any) =
- other.isInstanceOf[FiniteDuration] &&
- toNanos == other.asInstanceOf[FiniteDuration].toNanos
-
- override def hashCode = toNanos.asInstanceOf[Int]
-}
-
-class DurationInt(n: Int) {
- def nanoseconds = Duration(n, NANOSECONDS)
- def nanos = Duration(n, NANOSECONDS)
- def nanosecond = Duration(n, NANOSECONDS)
- def nano = Duration(n, NANOSECONDS)
-
- def microseconds = Duration(n, MICROSECONDS)
- def micros = Duration(n, MICROSECONDS)
- def microsecond = Duration(n, MICROSECONDS)
- def micro = Duration(n, MICROSECONDS)
-
- def milliseconds = Duration(n, MILLISECONDS)
- def millis = Duration(n, MILLISECONDS)
- def millisecond = Duration(n, MILLISECONDS)
- def milli = Duration(n, MILLISECONDS)
-
- def seconds = Duration(n, SECONDS)
- def second = Duration(n, SECONDS)
-
- def minutes = Duration(n, MINUTES)
- def minute = Duration(n, MINUTES)
-
- def hours = Duration(n, HOURS)
- def hour = Duration(n, HOURS)
-
- def days = Duration(n, DAYS)
- def day = Duration(n, DAYS)
-}
-
-class DurationLong(n: Long) {
- def nanoseconds = Duration(n, NANOSECONDS)
- def nanos = Duration(n, NANOSECONDS)
- def nanosecond = Duration(n, NANOSECONDS)
- def nano = Duration(n, NANOSECONDS)
-
- def microseconds = Duration(n, MICROSECONDS)
- def micros = Duration(n, MICROSECONDS)
- def microsecond = Duration(n, MICROSECONDS)
- def micro = Duration(n, MICROSECONDS)
-
- def milliseconds = Duration(n, MILLISECONDS)
- def millis = Duration(n, MILLISECONDS)
- def millisecond = Duration(n, MILLISECONDS)
- def milli = Duration(n, MILLISECONDS)
-
- def seconds = Duration(n, SECONDS)
- def second = Duration(n, SECONDS)
-
- def minutes = Duration(n, MINUTES)
- def minute = Duration(n, MINUTES)
-
- def hours = Duration(n, HOURS)
- def hour = Duration(n, HOURS)
-
- def days = Duration(n, DAYS)
- def day = Duration(n, DAYS)
-}
-
-class DurationDouble(d: Double) {
- def nanoseconds = Duration(d, NANOSECONDS)
- def nanos = Duration(d, NANOSECONDS)
- def nanosecond = Duration(d, NANOSECONDS)
- def nano = Duration(d, NANOSECONDS)
-
- def microseconds = Duration(d, MICROSECONDS)
- def micros = Duration(d, MICROSECONDS)
- def microsecond = Duration(d, MICROSECONDS)
- def micro = Duration(d, MICROSECONDS)
-
- def milliseconds = Duration(d, MILLISECONDS)
- def millis = Duration(d, MILLISECONDS)
- def millisecond = Duration(d, MILLISECONDS)
- def milli = Duration(d, MILLISECONDS)
-
- def seconds = Duration(d, SECONDS)
- def second = Duration(d, SECONDS)
-
- def minutes = Duration(d, MINUTES)
- def minute = Duration(d, MINUTES)
-
- def hours = Duration(d, HOURS)
- def hour = Duration(d, HOURS)
-
- def days = Duration(d, DAYS)
- def day = Duration(d, DAYS)
-}
diff --git a/src/library/scala/util/Timeout.scala b/src/library/scala/util/Timeout.scala
deleted file mode 100644
index 0190675344..0000000000
--- a/src/library/scala/util/Timeout.scala
+++ /dev/null
@@ -1,33 +0,0 @@
-/**
- * Copyright (C) 2009-2011 Typesafe Inc. <http://www.typesafe.com>
- */
-package scala.util
-
-import java.util.concurrent.TimeUnit
-
-case class Timeout(duration: Duration) {
- def this(timeout: Long) = this(Duration(timeout, TimeUnit.MILLISECONDS))
- def this(length: Long, unit: TimeUnit) = this(Duration(length, unit))
-}
-
-object Timeout {
- /**
- * A timeout with zero duration, will cause most requests to always timeout.
- */
- val zero = new Timeout(Duration.Zero)
-
- /**
- * A Timeout with infinite duration. Will never timeout. Use extreme caution with this
- * as it may cause memory leaks, blocked threads, or may not even be supported by
- * the receiver, which would result in an exception.
- */
- val never = new Timeout(Duration.Inf)
-
- def apply(timeout: Long) = new Timeout(timeout)
- def apply(length: Long, unit: TimeUnit) = new Timeout(length, unit)
-
- implicit def durationToTimeout(duration: Duration) = new Timeout(duration)
- implicit def intToTimeout(timeout: Int) = new Timeout(timeout)
- implicit def longToTimeout(timeout: Long) = new Timeout(timeout)
- //implicit def defaultTimeout(implicit system: ActorSystem) = system.settings.ActorTimeout (have to introduce this in ActorSystem)
-}
diff --git a/src/library/scala/xml/Elem.scala b/src/library/scala/xml/Elem.scala
index cc244a5b88..5b6b9f2bb9 100755
--- a/src/library/scala/xml/Elem.scala
+++ b/src/library/scala/xml/Elem.scala
@@ -23,12 +23,12 @@ object Elem {
* @deprecated This factory method is retained for backward compatibility; please use the other one, with which you
* can specify your own preference for minimizeEmpty.
*/
- @deprecated
- def apply(prefix: String,label: String, attributes: MetaData, scope: NamespaceBinding, child: Node*): Elem =
+ @deprecated("Use the other apply method in this object", "2.10.0")
+ def apply(prefix: String, label: String, attributes: MetaData, scope: NamespaceBinding, child: Node*): Elem =
apply(prefix, label, attributes, scope, child.isEmpty, child: _*)
- def apply(prefix: String,label: String, attributes: MetaData, scope: NamespaceBinding, minimizeEmpty: Boolean, child: Node*): Elem =
- new Elem(prefix,label,attributes,scope, minimizeEmpty, child:_*)
+ def apply(prefix: String, label: String, attributes: MetaData, scope: NamespaceBinding, minimizeEmpty: Boolean, child: Node*): Elem =
+ new Elem(prefix, label, attributes, scope, minimizeEmpty, child: _*)
def unapplySeq(n: Node) = n match {
case _: SpecialNode | _: Group => None
diff --git a/src/library/scala/xml/XML.scala b/src/library/scala/xml/XML.scala
index 4beba91899..f6955c6612 100755
--- a/src/library/scala/xml/XML.scala
+++ b/src/library/scala/xml/XML.scala
@@ -15,8 +15,7 @@ import java.io.{ InputStream, Reader, StringReader, Writer }
import java.nio.channels.Channels
import scala.util.control.Exception.ultimately
-object Source
-{
+object Source {
def fromFile(file: File) = new InputSource(new FileInputStream(file))
def fromFile(fd: FileDescriptor) = new InputSource(new FileInputStream(fd))
def fromFile(name: String) = new InputSource(new FileInputStream(name))
@@ -31,13 +30,18 @@ object Source
* Governs how empty elements (i.e. those without child elements) should be serialized.
*/
object MinimizeMode extends Enumeration {
- /** Minimize empty tags if they were originally empty when parsed, or if they were constructed with [[scala.xml.Elem]]`#minimizeEmpty` == true */
+ /** Minimize empty tags if they were originally empty when parsed, or if they were constructed
+ * with [[scala.xml.Elem]]`#minimizeEmpty` == true
+ */
val Default = Value
- /** Always minimize empty tags. Note that this may be problematic for XHTML, in which case [[scala.xml.Xhtml]]`#toXhtml` should be used instead. */
+ /** Always minimize empty tags. Note that this may be problematic for XHTML, in which
+ * case [[scala.xml.Xhtml]]`#toXhtml` should be used instead.
+ */
val Always = Value
- /** Never minimize empty tags. */
+ /** Never minimize empty tags.
+ */
val Never = Value
}
@@ -50,8 +54,7 @@ import Source._
* @author Burak Emir
* @version 1.0, 25/04/2005
*/
-object XML extends XMLLoader[Elem]
-{
+object XML extends XMLLoader[Elem] {
val xml = "xml"
val xmlns = "xmlns"
val namespace = "http://www.w3.org/XML/1998/namespace"
diff --git a/src/library/scala/xml/parsing/MarkupParser.scala b/src/library/scala/xml/parsing/MarkupParser.scala
index 32feaa2209..af9b5f47cf 100755
--- a/src/library/scala/xml/parsing/MarkupParser.scala
+++ b/src/library/scala/xml/parsing/MarkupParser.scala
@@ -134,7 +134,7 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
//
/** {{{
- * &lt;? prolog ::= xml S ... ?&gt;
+ * <? prolog ::= xml S ... ?>
* }}} */
def xmlProcInstr(): MetaData = {
xToken("xml")
@@ -195,7 +195,7 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
}
/** {{{
- * &lt;? prolog ::= xml S?
+ * <? prolog ::= xml S?
* // this is a bit more lenient than necessary...
* }}} */
def prolog(): (Option[String], Option[String], Option[Boolean]) =
@@ -355,7 +355,7 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
}
/** {{{
- * '&lt;! CharData ::= [CDATA[ ( {char} - {char}"]]&gt;"{char} ) ']]&gt;'
+ * '<! CharData ::= [CDATA[ ( {char} - {char}"]]>"{char} ) ']]>'
*
* see [15]
* }}} */
@@ -369,7 +369,7 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
}
/** {{{
- * Comment ::= '&lt;!--' ((Char - '-') | ('-' (Char - '-')))* '--&gt;'
+ * Comment ::= '<!--' ((Char - '-') | ('-' (Char - '-')))* '-->'
*
* see [15]
* }}} */
@@ -399,7 +399,7 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
}
/** {{{
- * '&lt;' content1 ::= ...
+ * '<' content1 ::= ...
* }}} */
def content1(pscope: NamespaceBinding, ts: NodeBuffer) {
ch match {
@@ -420,7 +420,7 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
}
/** {{{
- * content1 ::= '&lt;' content1 | '&amp;' charref ...
+ * content1 ::= '<' content1 | '&' charref ...
* }}} */
def content(pscope: NamespaceBinding): NodeSeq = {
var ts = new NodeBuffer
@@ -490,7 +490,7 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
/** parses document type declaration and assigns it to instance variable
* dtd.
* {{{
- * &lt;! parseDTD ::= DOCTYPE name ... >
+ * <! parseDTD ::= DOCTYPE name ... >
* }}} */
def parseDTD() { // dirty but fast
var extID: ExternalID = null
@@ -545,8 +545,8 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
}
/** {{{
- * '&lt;' element ::= xmlTag1 '&gt;' { xmlExpr | '{' simpleExpr '}' } ETag
- * | xmlTag1 '/' '&gt;'
+ * '<' element ::= xmlTag1 '>' { xmlExpr | '{' simpleExpr '}' } ETag
+ * | xmlTag1 '/' '>'
* }}} */
def element1(pscope: NamespaceBinding): NodeSeq = {
val pos = this.pos
@@ -778,7 +778,7 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
}
/** {{{
- * &lt;! attlist := ATTLIST
+ * <! attlist := ATTLIST
* }}} */
def attrDecl() = {
xToken("TTLIST")
@@ -824,7 +824,7 @@ trait MarkupParser extends MarkupParserCommon with TokenTests
}
/** {{{
- * &lt;! element := ELEMENT
+ * <! element := ELEMENT
* }}} */
def entityDecl() = {
var isParameterEntity = false
diff --git a/src/partest/scala/tools/partest/PartestTask.scala b/src/partest/scala/tools/partest/PartestTask.scala
index 524dc06327..ad2e155182 100644
--- a/src/partest/scala/tools/partest/PartestTask.scala
+++ b/src/partest/scala/tools/partest/PartestTask.scala
@@ -299,6 +299,16 @@ class PartestTask extends Task with CompilationPathProperty {
}
} getOrElse sys.error("Provided classpath does not contain a Scala partest.")
+ val scalaActors = {
+ (classpath.list map { fs => new File(fs) }) find { f =>
+ f.getName match {
+ case "scala-actors.jar" => true
+ case "actors" if (f.getParentFile.getName == "classes") => true
+ case _ => false
+ }
+ }
+ } getOrElse sys.error("Provided classpath does not contain a Scala actors.")
+
def scalacArgsFlat: Option[Seq[String]] = scalacArgs map (_ flatMap { a =>
val parts = a.getParts
if(parts eq null) Seq[String]() else parts.toSeq
@@ -324,6 +334,7 @@ class PartestTask extends Task with CompilationPathProperty {
antFileManager.LATEST_LIB = scalaLibrary.getAbsolutePath
antFileManager.LATEST_COMP = scalaCompiler.getAbsolutePath
antFileManager.LATEST_PARTEST = scalaPartest.getAbsolutePath
+ antFileManager.LATEST_ACTORS = scalaActors.getAbsolutePath
javacmd foreach (x => antFileManager.JAVACMD = x.getAbsolutePath)
javaccmd foreach (x => antFileManager.JAVAC_CMD = x.getAbsolutePath)
diff --git a/src/partest/scala/tools/partest/nest/AntRunner.scala b/src/partest/scala/tools/partest/nest/AntRunner.scala
index 4795e5551a..e77385d6e9 100644
--- a/src/partest/scala/tools/partest/nest/AntRunner.scala
+++ b/src/partest/scala/tools/partest/nest/AntRunner.scala
@@ -22,6 +22,7 @@ class AntRunner extends DirectRunner {
var LATEST_LIB: String = _
var LATEST_COMP: String = _
var LATEST_PARTEST: String = _
+ var LATEST_ACTORS: String = _
val testRootPath: String = "test"
val testRootDir: Directory = Directory(testRootPath)
}
diff --git a/src/partest/scala/tools/partest/nest/ConsoleFileManager.scala b/src/partest/scala/tools/partest/nest/ConsoleFileManager.scala
index 3d72227b04..fa533eeb10 100644
--- a/src/partest/scala/tools/partest/nest/ConsoleFileManager.scala
+++ b/src/partest/scala/tools/partest/nest/ConsoleFileManager.scala
@@ -83,6 +83,7 @@ class ConsoleFileManager extends FileManager {
latestFile = testClassesDir.parent / "bin"
latestLibFile = testClassesDir / "library"
+ latestActorsFile = testClassesDir / "library" / "actors"
latestCompFile = testClassesDir / "compiler"
latestPartestFile = testClassesDir / "partest"
latestFjbgFile = testParent / "lib" / "fjbg.jar"
@@ -92,6 +93,7 @@ class ConsoleFileManager extends FileManager {
NestUI.verbose("Running on "+dir)
latestFile = dir / "bin"
latestLibFile = dir / "lib/scala-library.jar"
+ latestActorsFile = dir / "lib/scala-actors.jar"
latestCompFile = dir / "lib/scala-compiler.jar"
latestPartestFile = dir / "lib/scala-partest.jar"
}
@@ -100,6 +102,7 @@ class ConsoleFileManager extends FileManager {
NestUI.verbose("Running build/quick")
latestFile = prefixFile("build/quick/bin")
latestLibFile = prefixFile("build/quick/classes/library")
+ latestActorsFile = prefixFile("build/quick/classes/library/actors")
latestCompFile = prefixFile("build/quick/classes/compiler")
latestPartestFile = prefixFile("build/quick/classes/partest")
}
@@ -109,6 +112,7 @@ class ConsoleFileManager extends FileManager {
val p = testParent.getParentFile
latestFile = prefixFileWith(p, "bin")
latestLibFile = prefixFileWith(p, "lib/scala-library.jar")
+ latestActorsFile = prefixFileWith(p, "lib/scala-actors.jar")
latestCompFile = prefixFileWith(p, "lib/scala-compiler.jar")
latestPartestFile = prefixFileWith(p, "lib/scala-partest.jar")
}
@@ -117,6 +121,7 @@ class ConsoleFileManager extends FileManager {
NestUI.verbose("Running dists/latest")
latestFile = prefixFile("dists/latest/bin")
latestLibFile = prefixFile("dists/latest/lib/scala-library.jar")
+ latestActorsFile = prefixFile("dists/latest/lib/scala-actors.jar")
latestCompFile = prefixFile("dists/latest/lib/scala-compiler.jar")
latestPartestFile = prefixFile("dists/latest/lib/scala-partest.jar")
}
@@ -125,6 +130,7 @@ class ConsoleFileManager extends FileManager {
NestUI.verbose("Running build/pack")
latestFile = prefixFile("build/pack/bin")
latestLibFile = prefixFile("build/pack/lib/scala-library.jar")
+ latestActorsFile = prefixFile("build/pack/lib/scala-actors.jar")
latestCompFile = prefixFile("build/pack/lib/scala-compiler.jar")
latestPartestFile = prefixFile("build/pack/lib/scala-partest.jar")
}
@@ -159,14 +165,17 @@ class ConsoleFileManager extends FileManager {
LATEST_LIB = latestLibFile.getAbsolutePath
LATEST_COMP = latestCompFile.getAbsolutePath
LATEST_PARTEST = latestPartestFile.getAbsolutePath
+ LATEST_ACTORS = latestActorsFile.getAbsolutePath
}
var LATEST_LIB: String = ""
var LATEST_COMP: String = ""
var LATEST_PARTEST: String = ""
+ var LATEST_ACTORS: String = ""
var latestFile: File = _
var latestLibFile: File = _
+ var latestActorsFile: File = _
var latestCompFile: File = _
var latestPartestFile: File = _
var latestFjbgFile: File = _
diff --git a/src/partest/scala/tools/partest/nest/DirectRunner.scala b/src/partest/scala/tools/partest/nest/DirectRunner.scala
index d3d50ca58c..20f435cfbb 100644
--- a/src/partest/scala/tools/partest/nest/DirectRunner.scala
+++ b/src/partest/scala/tools/partest/nest/DirectRunner.scala
@@ -57,13 +57,14 @@ trait DirectRunner {
// for example, see how it's done in ReflectiveRunner
//val consFM = new ConsoleFileManager
//import consFM.{ latestCompFile, latestLibFile, latestPartestFile }
- val latestCompFile = new File(fileManager.LATEST_COMP);
- val latestLibFile = new File(fileManager.LATEST_LIB);
- val latestPartestFile = new File(fileManager.LATEST_PARTEST);
+ val latestCompFile = new File(fileManager.LATEST_COMP)
+ val latestLibFile = new File(fileManager.LATEST_LIB)
+ val latestPartestFile = new File(fileManager.LATEST_PARTEST)
+ val latestActorsFile = new File(fileManager.LATEST_ACTORS)
val scalacheckURL = PathSettings.scalaCheck.toURL
val scalaCheckParentClassLoader = ScalaClassLoader.fromURLs(
- List(scalacheckURL, latestCompFile.toURI.toURL, latestLibFile.toURI.toURL, latestPartestFile.toURI.toURL)
+ scalacheckURL :: (List(latestCompFile, latestLibFile, latestActorsFile, latestPartestFile).map(_.toURI.toURL))
)
Output.init()
diff --git a/src/partest/scala/tools/partest/nest/FileManager.scala b/src/partest/scala/tools/partest/nest/FileManager.scala
index a4a94fe93e..6d9e64730f 100644
--- a/src/partest/scala/tools/partest/nest/FileManager.scala
+++ b/src/partest/scala/tools/partest/nest/FileManager.scala
@@ -62,6 +62,7 @@ trait FileManager extends FileUtil {
var LATEST_LIB: String
var LATEST_COMP: String
var LATEST_PARTEST: String
+ var LATEST_ACTORS: String
var showDiff = false
var updateCheck = false
diff --git a/src/partest/scala/tools/partest/nest/ReflectiveRunner.scala b/src/partest/scala/tools/partest/nest/ReflectiveRunner.scala
index 5cde63dc81..a0511774a9 100644
--- a/src/partest/scala/tools/partest/nest/ReflectiveRunner.scala
+++ b/src/partest/scala/tools/partest/nest/ReflectiveRunner.scala
@@ -48,9 +48,9 @@ class ReflectiveRunner {
new ConsoleFileManager
import fileManager.
- { latestCompFile, latestLibFile, latestPartestFile, latestFjbgFile, latestScalapFile }
+ { latestCompFile, latestLibFile, latestPartestFile, latestFjbgFile, latestScalapFile, latestActorsFile }
val files =
- Array(latestCompFile, latestLibFile, latestPartestFile, latestFjbgFile, latestScalapFile) map (x => io.File(x))
+ Array(latestCompFile, latestLibFile, latestPartestFile, latestFjbgFile, latestScalapFile, latestActorsFile) map (x => io.File(x))
val sepUrls = files map (_.toURL)
var sepLoader = new URLClassLoader(sepUrls, null)
diff --git a/src/partest/scala/tools/partest/nest/SBTRunner.scala b/src/partest/scala/tools/partest/nest/SBTRunner.scala
index 4c6f417df5..750e270c18 100644
--- a/src/partest/scala/tools/partest/nest/SBTRunner.scala
+++ b/src/partest/scala/tools/partest/nest/SBTRunner.scala
@@ -15,6 +15,7 @@ object SBTRunner extends DirectRunner {
var LATEST_LIB: String = _
var LATEST_COMP: String = _
var LATEST_PARTEST: String = _
+ var LATEST_ACTORS: String = _
val testRootPath: String = "test"
val testRootDir: Directory = Directory(testRootPath)
}
@@ -60,6 +61,9 @@ object SBTRunner extends DirectRunner {
fileManager.LATEST_COMP = comp getOrElse sys.error("No scala-compiler found! Classpath = " + fileManager.CLASSPATH)
val partest: Option[String] = (fileManager.CLASSPATH split File.pathSeparator filter (_ matches ".*scala-partest.*\\.jar")).headOption
fileManager.LATEST_PARTEST = partest getOrElse sys.error("No scala-partest found! Classpath = " + fileManager.CLASSPATH)
+ val actors: Option[String] = (fileManager.CLASSPATH split File.pathSeparator filter (_ matches ".*scala-actors.*\\.jar")).headOption
+ fileManager.LATEST_ACTORS = actors getOrElse sys.error("No scala-actors found! Classpath = " + fileManager.CLASSPATH)
+
// TODO - Do something useful here!!!
fileManager.JAVAC_CMD = "javac"
fileManager.failed = config.justFailedTests
diff --git a/src/partest/scala/tools/partest/nest/Worker.scala b/src/partest/scala/tools/partest/nest/Worker.scala
index 3f2cb16082..cb6f2a0edc 100644
--- a/src/partest/scala/tools/partest/nest/Worker.scala
+++ b/src/partest/scala/tools/partest/nest/Worker.scala
@@ -55,6 +55,7 @@ class ScalaCheckFileManager(val origmanager: FileManager) extends FileManager {
var LATEST_LIB: String = origmanager.LATEST_LIB
var LATEST_COMP: String = origmanager.LATEST_COMP
var LATEST_PARTEST: String = origmanager.LATEST_PARTEST
+ var LATEST_ACTORS: String = origmanager.LATEST_ACTORS
}
object Output {