summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorAntonio Cunei <antonio.cunei@epfl.ch>2009-11-24 17:30:44 +0000
committerAntonio Cunei <antonio.cunei@epfl.ch>2009-11-24 17:30:44 +0000
commit839d0ee5ec9c27e2e5da6bab6f03c8a95e90a109 (patch)
tree613ed692570fb5891f57e76854d9ff4af8ed13d1
parent5f71b9208404015f425f30a0b4b3f4b3df4ec83f (diff)
downloadscala-839d0ee5ec9c27e2e5da6bab6f03c8a95e90a109.tar.gz
scala-839d0ee5ec9c27e2e5da6bab6f03c8a95e90a109.tar.bz2
scala-839d0ee5ec9c27e2e5da6bab6f03c8a95e90a109.zip
Merged revisions 19624,19629-19630,19645,19651,...
Merged revisions 19624,19629-19630,19645,19651,19655-19660,19666-19668,19670,19673-19679, 19683-19685,19688,19692,19695-19700,19706-19707,19717-19719,19723-19724, 19726,19730,19735-19740,19742-19744,19746-19759,19762-19764,19767-19769, 19773,19776,19781,19787,19789,19792-19793,19798-19800,19803-19804,19806- 19808,19813-19815,19818-19820,19824 via svnmerge from https://lampsvn.epfl.ch/svn-repos/scala/scala/trunk ........ r19624 | moors | 2009-11-13 13:17:50 +0100 (Fri, 13 Nov 2009) | 2 lines fixed #1236 another Symbol::tpe bites the dust (should'be been tpeHK) ........ r19629 | rytz | 2009-11-13 16:53:11 +0100 (Fri, 13 Nov 2009) | 1 line review board scipt ........ r19630 | rytz | 2009-11-13 17:10:35 +0100 (Fri, 13 Nov 2009) | 1 line updates to review script ........ r19645 | odersky | 2009-11-13 18:31:12 +0100 (Fri, 13 Nov 2009) | 1 line Fixes #1477 by requiring that abstract types with non-volatile upper bounds cannot be overridden by volatile types. ........ r19651 | extempore | 2009-11-14 19:02:10 +0100 (Sat, 14 Nov 2009) | 5 lines Fixes and test cases for #2087 and #2400. This required fixing a long-standing bug in fjbg and recompiling fjbg.jar, which had the side effect of revealing that the current fjbg jar had never been recompiled with target 1.5, so now it's smaller and (I imagine) faster. ........ r19655 | odersky | 2009-11-15 12:14:57 +0100 (Sun, 15 Nov 2009) | 2 lines Fixed #2848 and #2630; Improvements in equality speed ........ r19656 | odersky | 2009-11-15 14:56:21 +0100 (Sun, 15 Nov 2009) | 2 lines Fixed #1459 ........ r19657 | odersky | 2009-11-15 15:17:48 +0100 (Sun, 15 Nov 2009) | 2 lines Added benchmarks with results for equality. ........ r19658 | milessabin | 2009-11-15 16:46:52 +0100 (Sun, 15 Nov 2009) | 1 line Fixed #2627. Also ensure that a non-empty dependencies file is created on the first build. ........ r19659 | extempore | 2009-11-15 19:08:45 +0100 (Sun, 15 Nov 2009) | 3 lines Tweaked a test which has been regularly failing due to heap exhaustion, although what it's supposed to be testing is stack utilization. ........ r19660 | milessabin | 2009-11-15 21:57:46 +0100 (Sun, 15 Nov 2009) | 1 line Corrected help syntax for -Ybuilder-debug. ........ r19666 | milessabin | 2009-11-16 11:17:01 +0100 (Mon, 16 Nov 2009) | 1 line Fixed #2627. ........ r19667 | rytz | 2009-11-16 11:45:54 +0100 (Mon, 16 Nov 2009) | 1 line review requests can now be created outside a checkout ........ r19668 | rytz | 2009-11-16 11:55:59 +0100 (Mon, 16 Nov 2009) | 1 line another minor change to teh review script ........ r19670 | odersky | 2009-11-16 13:31:40 +0100 (Mon, 16 Nov 2009) | 2 lines Fixed #2323; made Pickler do the right thing. ........ r19673 | moors | 2009-11-16 18:44:25 +0100 (Mon, 16 Nov 2009) | 1 line test cases in pending ........ r19674 | dragos | 2009-11-16 19:24:40 +0100 (Mon, 16 Nov 2009) | 2 lines Faster optimizer by caching successors/predecessors in basic blocks, and better lub for icode ........ r19675 | extempore | 2009-11-16 22:30:22 +0100 (Mon, 16 Nov 2009) | 1 line Minor pickler organization stemming from optimizer appeasement. ........ r19676 | extempore | 2009-11-16 22:30:36 +0100 (Mon, 16 Nov 2009) | 2 lines Some organization & duplication removal in GenICode stemming from optimizer appeasement. ........ r19677 | extempore | 2009-11-16 22:30:54 +0100 (Mon, 16 Nov 2009) | 2 lines A lot of minor code adjustments to ease the burden on the optimizer, and various cleanups encountered along the way. ........ r19678 | extempore | 2009-11-16 22:31:07 +0100 (Mon, 16 Nov 2009) | 2 lines Some organization & duplication removal in RefChecks stemming from optimizer appeasement. ........ r19679 | extempore | 2009-11-16 22:56:00 +0100 (Mon, 16 Nov 2009) | 1 line Fix for #2647. ........ r19683 | extempore | 2009-11-17 05:51:43 +0100 (Tue, 17 Nov 2009) | 1 line Fix and test case for #2636. ........ r19684 | extempore | 2009-11-17 06:25:48 +0100 (Tue, 17 Nov 2009) | 2 lines Contents of scala.Math moved into scala.math package object, and scala.Math deprecated. Also a couple janitorial cleanups. ........ r19685 | milessabin | 2009-11-17 10:04:00 +0100 (Tue, 17 Nov 2009) | 1 line More String.isEmpty breakage. ........ r19688 | odersky | 2009-11-17 14:12:31 +0100 (Tue, 17 Nov 2009) | 1 line Allow implicit modifier on single-parameter function literals. Fixes and closes #1492. ........ r19692 | extempore | 2009-11-17 21:51:23 +0100 (Tue, 17 Nov 2009) | 1 line Partial fix for #2625. ........ r19695 | extempore | 2009-11-18 01:18:58 +0100 (Wed, 18 Nov 2009) | 11 lines Removing bits from the library which shouldn't make the 2.8 cut. Removed outright: util.Hashable: unused and I have a better plan for this net.Utility: class created to accomodate expansion which never materialized reflect.Invocation: doesn't go far enough, needs love it won't find right now reflect.RichClass: same as Invocation Moved into compiler: util.ScalaClassLoader: too useful to lose, not done enough to ship ........ r19696 | extempore | 2009-11-18 01:36:26 +0100 (Wed, 18 Nov 2009) | 1 line More minor removals and some cleanups of !!!s and XXXs. ........ r19697 | dcaoyuan | 2009-11-18 05:35:07 +0100 (Wed, 18 Nov 2009) | 1 line Fixed #2631 ........ r19698 | dcaoyuan | 2009-11-18 05:36:59 +0100 (Wed, 18 Nov 2009) | 1 line to fix varies conditions of removed source/class files that were previously recorded in .scala_dependencies. ........ r19699 | extempore | 2009-11-18 06:41:16 +0100 (Wed, 18 Nov 2009) | 3 lines More deprecation work. Removes most elements which have been deprecated since 2.7.2 (still except for lower case primitive type aliases) and removes every deprecated method which has never shipped in a release. ........ r19700 | rytz | 2009-11-18 11:19:30 +0100 (Wed, 18 Nov 2009) | 1 line recompiled msil.jar, it refered to scala.Math$ ........ r19706 | extempore | 2009-11-18 15:45:11 +0100 (Wed, 18 Nov 2009) | 3 lines Fixed a bug in Range which was causing take and drop to overflow and return empty if the argument was larger than the actual length of the Range and arg * step > MaxInt. ........ r19707 | dcaoyuan | 2009-11-18 15:46:01 +0100 (Wed, 18 Nov 2009) | 1 line Fixed #2645 ........ r19717 | extempore | 2009-11-18 23:06:03 +0100 (Wed, 18 Nov 2009) | 3 lines Finally completed the incredibly tedious task of removing the lower case primitive aliases from Predef. Had to rebuild msil.jar along the way. ........ r19718 | extempore | 2009-11-18 23:43:54 +0100 (Wed, 18 Nov 2009) | 2 lines New starr based on r19717 since I'm finding the current starr doesn't have TupleN.zipped fully working. ........ r19719 | extempore | 2009-11-19 00:24:23 +0100 (Thu, 19 Nov 2009) | 2 lines More deprecation soothing. Soon we'll be down to a double-digit warning count. ........ r19723 | extempore | 2009-11-19 05:59:46 +0100 (Thu, 19 Nov 2009) | 2 lines Restoring an embarassingly large quantity of deprecated methods whose time had not yet come. ........ r19724 | dcaoyuan | 2009-11-19 09:24:37 +0100 (Thu, 19 Nov 2009) | 1 line Path.parent now returns Path instead of Option[Path], and it prefers relative path. ........ r19726 | rytz | 2009-11-19 14:44:15 +0100 (Thu, 19 Nov 2009) | 1 line closes #2670. the target-annotations can now be placed on annotation classes as well ........ r19730 | extempore | 2009-11-19 21:31:46 +0100 (Thu, 19 Nov 2009) | 2 lines Deprecation patrol exercises the new capabilities in Tuple2.zipped among other exciting no-ops. ........ r19735 | extempore | 2009-11-20 01:49:58 +0100 (Fri, 20 Nov 2009) | 1 line Fix for infinite loop in StringBuilder pointed out by dpp. ........ r19736 | extempore | 2009-11-20 04:50:12 +0100 (Fri, 20 Nov 2009) | 1 line Slightly more fixy fix than the previous fix. ........ r19737 | plocinic | 2009-11-20 10:04:04 +0100 (Fri, 20 Nov 2009) | 1 line closes #1422 ........ r19738 | odersky | 2009-11-20 14:57:22 +0100 (Fri, 20 Nov 2009) | 1 line Simplifiations in collections libraries, enabled by introduction of Self type in TraversableLike. ........ r19739 | odersky | 2009-11-20 14:58:36 +0100 (Fri, 20 Nov 2009) | 1 line new test ........ r19740 | extempore | 2009-11-20 16:16:45 +0100 (Fri, 20 Nov 2009) | 4 lines More world-shaking deprecation work. Using the scala.math package object, updating some @deprecated messages to give realistic alternatives, properly resolving the semantic mismatch between List.-- and diff, its once-recommended but inequivalent alternative. ........ r19742 | prokopec | 2009-11-20 17:11:19 +0100 (Fri, 20 Nov 2009) | 1 line PriorityQueue fixed, should work ok now. ........ r19743 | prokopec | 2009-11-20 17:11:53 +0100 (Fri, 20 Nov 2009) | 1 line Priority queue test. ........ r19744 | prokopec | 2009-11-20 17:12:48 +0100 (Fri, 20 Nov 2009) | 1 line Priority queue test, updated. ........ r19746 | prokopec | 2009-11-20 18:25:08 +0100 (Fri, 20 Nov 2009) | 1 line Changes made in the clean up phase - now the symbols get interned during classload for each symbol literal - references to them reside in static fields. These static fields get initialized in static constructors - the java backend will now identify ctors with static flags and generate a static initializer containing the necessary code. ........ r19747 | prokopec | 2009-11-20 18:29:39 +0100 (Fri, 20 Nov 2009) | 1 line Removed a couple of unneeded comments. ........ r19748 | extempore | 2009-11-20 18:39:49 +0100 (Fri, 20 Nov 2009) | 3 lines Deprecated the Tuple(...) methods in Predef, but at the same time extended the overload out to 22 and moved them into the scala package object. ........ r19749 | odersky | 2009-11-20 19:02:42 +0100 (Fri, 20 Nov 2009) | 1 line Closed #2641 ........ r19750 | prokopec | 2009-11-20 19:06:41 +0100 (Fri, 20 Nov 2009) | 1 line Commented out anonymous function invocation for which Hudson was complaining it couldn't find the classdef. ........ r19751 | extempore | 2009-11-20 19:08:57 +0100 (Fri, 20 Nov 2009) | 2 lines Subtly altered implementation of iterator which does not go into an infinite loop when deprecated "append" is replaced with ++. ........ r19752 | extempore | 2009-11-20 19:09:08 +0100 (Fri, 20 Nov 2009) | 2 lines Tweak to Iterator.++ to keep it from going into an infinite loop on x ++ x. ........ r19753 | prokopec | 2009-11-20 19:17:13 +0100 (Fri, 20 Nov 2009) | 1 line Trying to get the test to pass on the server. ........ r19754 | extempore | 2009-11-20 19:35:52 +0100 (Fri, 20 Nov 2009) | 1 line Eliminated warning about Tuple2 and Tuple3 importing Traversable. ........ r19755 | extempore | 2009-11-20 19:59:30 +0100 (Fri, 20 Nov 2009) | 1 line More deprecation avoidance and some minor smoothings. ........ r19756 | moors | 2009-11-20 20:25:32 +0100 (Fri, 20 Nov 2009) | 3 lines closes #2585: generate more precise Java generic signatures for classes nested in parametric outer classes fix based on review by Martin baseType is your friend ........ r19757 | michelou | 2009-11-20 21:19:41 +0100 (Fri, 20 Nov 2009) | 2 lines updated/extended serialization tests ........ r19758 | extempore | 2009-11-20 21:37:12 +0100 (Fri, 20 Nov 2009) | 1 line Expanding the warning cleansing into -unchecked territory. ........ r19759 | odersky | 2009-11-20 22:02:23 +0100 (Fri, 20 Nov 2009) | 2 lines Closed #2642 ........ r19762 | extempore | 2009-11-21 18:24:29 +0100 (Sat, 21 Nov 2009) | 1 line Applied performance patch and test case from ijuma; closes #2526. ........ r19763 | extempore | 2009-11-21 20:58:05 +0100 (Sat, 21 Nov 2009) | 2 lines Partially addresses #2626 - pattern matcher no longer depends on drop(n) behavior if n < 0. ........ r19764 | extempore | 2009-11-22 00:55:06 +0100 (Sun, 22 Nov 2009) | 2 lines Cleanup of Cleanup. Finally straightening out a bunch of duplicated boxing code in the right location. ........ r19767 | extempore | 2009-11-22 02:26:04 +0100 (Sun, 22 Nov 2009) | 2 lines Fix (I think) for recently introduced MSIL breakage stemming from eliminating deprecation warnings. ........ r19768 | odersky | 2009-11-22 12:32:26 +0100 (Sun, 22 Nov 2009) | 2 lines Made implicit resolution compatible with numeric conformance. ........ r19769 | odersky | 2009-11-22 13:28:57 +0100 (Sun, 22 Nov 2009) | 2 lines Closed #2635 ........ r19773 | extempore | 2009-11-22 17:07:46 +0100 (Sun, 22 Nov 2009) | 1 line Removed all traces of Boxed*Array. New starr. ........ r19776 | odersky | 2009-11-22 19:13:34 +0100 (Sun, 22 Nov 2009) | 2 lines Moved failing test due to fix of #2635 to pending. ........ r19781 | prokopec | 2009-11-22 23:47:08 +0100 (Sun, 22 Nov 2009) | 2 lines Priority queue reverse is undefined - overriden to throw an exception. Reverse iterator seems to have sense - it is overriden and is defined, and some methods in SeqLike are implemented in terms of it. ........ r19787 | extempore | 2009-11-23 13:51:56 +0100 (Mon, 23 Nov 2009) | 1 line A couple more warning fixes I meant to check in with r19758. ........ r19789 | dragos | 2009-11-23 14:58:56 +0100 (Mon, 23 Nov 2009) | 2 lines == for specialized types will not cause boxing anymore ........ r19792 | odersky | 2009-11-23 15:45:44 +0100 (Mon, 23 Nov 2009) | 1 line Closed #1226. Added new test cases. ........ r19793 | malayeri | 2009-11-23 16:16:25 +0100 (Mon, 23 Nov 2009) | 1 line Closed #2552. Changed Iterator.takeWhile and Iterator.filter to avoid recomputing predicate in next and hasNext methods. ........ r19798 | odersky | 2009-11-23 18:50:54 +0100 (Mon, 23 Nov 2009) | 1 line Partial fix for #2683 ........ r19799 | odersky | 2009-11-23 19:12:34 +0100 (Mon, 23 Nov 2009) | 1 line Closed #1545 ........ r19800 | extempore | 2009-11-23 22:03:51 +0100 (Mon, 23 Nov 2009) | 4 lines Partitioned scala.Math and scala.math a little bit. ALL_CAP aliases are deprecated and only in Math. Formerly unavailable members of java.lang.Math which were added in 1.5 are now available in scala.math. ........ r19803 | extempore | 2009-11-24 01:08:32 +0100 (Tue, 24 Nov 2009) | 2 lines Some more XML cleanups. I'm seeing if I can break the compiler dependency on scala.util.automata. ........ r19804 | milessabin | 2009-11-24 02:13:01 +0100 (Tue, 24 Nov 2009) | 1 line Improved completion for locals and import. ........ r19806 | moors | 2009-11-24 10:09:49 +0100 (Tue, 24 Nov 2009) | 5 lines close #2665 and close #2667: use weak conformance in polymorphic case of isApplicable reviewed by: odersky exprTypeArgs now takes a comparison function: isWeaklyCompatible is passed in isApplicable's typesCompatible (to mimic what happens in the monomorphic case) Martin: please review as this is different from my original proposal (that one broke type inference, this one passes all tests and does not slow down quick.comp) ........ r19807 | moors | 2009-11-24 11:15:58 +0100 (Tue, 24 Nov 2009) | 1 line pending test: see #2660, #2691 ........ r19808 | moors | 2009-11-24 11:19:08 +0100 (Tue, 24 Nov 2009) | 1 line close #2626 as specified by Martin ........ r19813 | odersky | 2009-11-24 15:40:04 +0100 (Tue, 24 Nov 2009) | 1 line relaxed rule requiring `override` modifiers so that it's OK if self type contains overridden symbol. ........ r19814 | odersky | 2009-11-24 16:50:06 +0100 (Tue, 24 Nov 2009) | 1 line Closed #2629 #2639 #2669 ........ r19815 | odersky | 2009-11-24 16:57:17 +0100 (Tue, 24 Nov 2009) | 1 line Closed #2696 ........ r19818 | odersky | 2009-11-24 17:07:49 +0100 (Tue, 24 Nov 2009) | 1 line Closed #2698 ........ r19819 | odersky | 2009-11-24 17:12:18 +0100 (Tue, 24 Nov 2009) | 1 line Closed #2664 ........ r19820 | prokopec | 2009-11-24 17:35:09 +0100 (Tue, 24 Nov 2009) | 1 line Added reverse capabilities to PriorityQueue. Seems to work well - tests pass. ........ r19824 | phaller | 2009-11-24 18:11:45 +0100 (Tue, 24 Nov 2009) | 1 line Made mutable.OpenHashMap a MapLike. Closes #2681. ........
-rw-r--r--build.xml1
-rw-r--r--lib/fjbg.jar.desired.sha12
-rw-r--r--lib/msil.jar.desired.sha12
-rw-r--r--lib/scala-compiler.jar.desired.sha12
-rw-r--r--lib/scala-library-src.jar.desired.sha12
-rw-r--r--lib/scala-library.jar.desired.sha12
-rw-r--r--src/compiler/scala/tools/ant/Scalac.scala70
-rw-r--r--src/compiler/scala/tools/ant/sabbus/Compiler.scala2
-rw-r--r--src/compiler/scala/tools/nsc/Global.scala55
-rw-r--r--src/compiler/scala/tools/nsc/Interpreter.scala10
-rw-r--r--src/compiler/scala/tools/nsc/MainGenericRunner.scala3
-rw-r--r--src/compiler/scala/tools/nsc/ObjectRunner.scala2
-rw-r--r--src/compiler/scala/tools/nsc/PhaseAssembly.scala178
-rw-r--r--src/compiler/scala/tools/nsc/Settings.scala6
-rw-r--r--src/compiler/scala/tools/nsc/ast/TreeDSL.scala4
-rw-r--r--src/compiler/scala/tools/nsc/ast/TreeInfo.scala9
-rw-r--r--src/compiler/scala/tools/nsc/ast/Trees.scala70
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Parsers.scala14
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Scanners.scala4
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala2
-rw-r--r--src/compiler/scala/tools/nsc/backend/ScalaPrimitives.scala12
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala122
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/Checkers.scala2
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/GenICode.scala762
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/Members.scala68
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/Printers.scala4
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala36
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/TypeStacks.scala14
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/analysis/CopyPropagation.scala21
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/analysis/DataFlowAnalysis.scala2
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/analysis/Liveness.scala2
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/analysis/ReachingDefinitions.scala26
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala15
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala217
-rw-r--r--src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala35
-rw-r--r--src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala2
-rw-r--r--src/compiler/scala/tools/nsc/backend/opt/Inliners.scala37
-rw-r--r--src/compiler/scala/tools/nsc/dependencies/Changes.scala16
-rw-r--r--src/compiler/scala/tools/nsc/dependencies/Files.scala29
-rw-r--r--src/compiler/scala/tools/nsc/interactive/Global.scala46
-rw-r--r--src/compiler/scala/tools/nsc/interactive/RangePositions.scala2
-rw-r--r--src/compiler/scala/tools/nsc/interactive/RefinedBuildManager.scala6
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/AbstractFileClassLoader.scala2
-rw-r--r--src/compiler/scala/tools/nsc/interpreter/Completion.scala2
-rw-r--r--src/compiler/scala/tools/nsc/io/File.scala2
-rw-r--r--src/compiler/scala/tools/nsc/io/Path.scala51
-rw-r--r--src/compiler/scala/tools/nsc/io/PlainFile.scala2
-rw-r--r--src/compiler/scala/tools/nsc/io/VirtualFile.scala2
-rw-r--r--src/compiler/scala/tools/nsc/javac/JavaScanners.scala4
-rw-r--r--src/compiler/scala/tools/nsc/matching/ParallelMatching.scala57
-rw-r--r--src/compiler/scala/tools/nsc/models/Signatures.scala10
-rw-r--r--src/compiler/scala/tools/nsc/symtab/BaseTypeSeqs.scala4
-rw-r--r--src/compiler/scala/tools/nsc/symtab/Definitions.scala101
-rw-r--r--src/compiler/scala/tools/nsc/symtab/StdNames.scala2
-rw-r--r--src/compiler/scala/tools/nsc/symtab/SymbolWalker.scala43
-rw-r--r--src/compiler/scala/tools/nsc/symtab/Symbols.scala17
-rw-r--r--src/compiler/scala/tools/nsc/symtab/Types.scala140
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala4
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala40
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/UnPickler.scala235
-rw-r--r--src/compiler/scala/tools/nsc/transform/CleanUp.scala216
-rw-r--r--src/compiler/scala/tools/nsc/transform/Erasure.scala36
-rw-r--r--src/compiler/scala/tools/nsc/transform/LazyVals.scala5
-rw-r--r--src/compiler/scala/tools/nsc/transform/Mixin.scala2
-rw-r--r--src/compiler/scala/tools/nsc/transform/TailCalls.scala7
-rw-r--r--src/compiler/scala/tools/nsc/transform/UnCurry.scala12
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/ConstantFolder.scala2
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Contexts.scala10
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Duplicators.scala3
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala11
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Implicits.scala108
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Infer.scala31
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Namers.scala155
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala6
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/RefChecks.scala420
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala207
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala2
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala141
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Typers.scala63
-rw-r--r--src/compiler/scala/tools/nsc/util/ClassPath.scala2
-rw-r--r--src/compiler/scala/tools/nsc/util/HashSet.scala2
-rw-r--r--src/compiler/scala/tools/nsc/util/ScalaClassLoader.scala (renamed from src/library/scala/util/ScalaClassLoader.scala)17
-rw-r--r--src/compiler/scala/tools/nsc/util/TreeSet.scala10
-rw-r--r--src/fjbg/ch/epfl/lamp/fjbg/JExtendedCode.java2
-rw-r--r--src/library/scala/Array.scala4
-rw-r--r--src/library/scala/Enumeration.scala2
-rw-r--r--src/library/scala/Math.scala121
-rw-r--r--src/library/scala/Predef.scala21
-rw-r--r--src/library/scala/Responder.scala3
-rw-r--r--src/library/scala/Tuple2.scala2
-rw-r--r--src/library/scala/Tuple3.scala2
-rw-r--r--src/library/scala/annotation/experimental.scala19
-rw-r--r--src/library/scala/annotation/target/beanGetter.scala12
-rw-r--r--src/library/scala/annotation/target/beanSetter.scala12
-rw-r--r--src/library/scala/annotation/target/field.scala12
-rw-r--r--src/library/scala/annotation/target/getter.scala12
-rw-r--r--src/library/scala/annotation/target/setter.scala12
-rw-r--r--src/library/scala/collection/Iterator.scala58
-rw-r--r--src/library/scala/collection/LinearSeqLike.scala5
-rw-r--r--src/library/scala/collection/MapLike.scala4
-rw-r--r--src/library/scala/collection/SeqProxyLike.scala1
-rw-r--r--src/library/scala/collection/SetLike.scala2
-rw-r--r--src/library/scala/collection/TraversableLike.scala3
-rw-r--r--src/library/scala/collection/TraversableViewLike.scala3
-rw-r--r--src/library/scala/collection/immutable/HashMap.scala2
-rw-r--r--src/library/scala/collection/immutable/HashSet.scala2
-rw-r--r--src/library/scala/collection/immutable/List.scala58
-rw-r--r--src/library/scala/collection/immutable/NumericRange.scala15
-rw-r--r--src/library/scala/collection/immutable/PagedSeq.scala2
-rw-r--r--src/library/scala/collection/immutable/Range.scala10
-rw-r--r--src/library/scala/collection/immutable/Set.scala14
-rw-r--r--src/library/scala/collection/immutable/Stack.scala1
-rw-r--r--src/library/scala/collection/immutable/Stream.scala1
-rw-r--r--src/library/scala/collection/immutable/Vector.scala8
-rw-r--r--src/library/scala/collection/interfaces/SeqMethods.scala1
-rw-r--r--src/library/scala/collection/mutable/ArrayBuilder.scala71
-rw-r--r--src/library/scala/collection/mutable/ArrayStack.scala2
-rw-r--r--src/library/scala/collection/mutable/BufferLike.scala8
-rw-r--r--src/library/scala/collection/mutable/BufferProxy.scala6
-rw-r--r--src/library/scala/collection/mutable/DoubleLinkedList.scala24
-rw-r--r--src/library/scala/collection/mutable/HashMap.scala26
-rw-r--r--src/library/scala/collection/mutable/HashTable.scala13
-rw-r--r--src/library/scala/collection/mutable/History.scala12
-rw-r--r--src/library/scala/collection/mutable/IndexedSeqView.scala3
-rw-r--r--src/library/scala/collection/mutable/LinkedHashMap.scala2
-rw-r--r--src/library/scala/collection/mutable/LinkedList.scala2
-rw-r--r--src/library/scala/collection/mutable/LinkedListLike.scala2
-rw-r--r--src/library/scala/collection/mutable/ObservableBuffer.scala7
-rw-r--r--src/library/scala/collection/mutable/ObservableMap.scala8
-rw-r--r--src/library/scala/collection/mutable/ObservableSet.scala8
-rw-r--r--src/library/scala/collection/mutable/OpenHashMap.scala33
-rw-r--r--src/library/scala/collection/mutable/PriorityQueue.scala144
-rw-r--r--src/library/scala/collection/mutable/Publisher.scala38
-rw-r--r--src/library/scala/collection/mutable/ResizableArray.scala3
-rw-r--r--src/library/scala/collection/mutable/RevertibleHistory.scala2
-rw-r--r--src/library/scala/collection/mutable/Stack.scala1
-rw-r--r--src/library/scala/collection/mutable/StringBuilder.scala8
-rw-r--r--src/library/scala/collection/mutable/Subscriber.scala7
-rw-r--r--src/library/scala/collection/mutable/SynchronizedBuffer.scala10
-rw-r--r--src/library/scala/collection/mutable/SynchronizedMap.scala4
-rw-r--r--src/library/scala/concurrent/DelayedLazyVal.scala4
-rw-r--r--src/library/scala/deprecated.scala3
-rw-r--r--src/library/scala/io/UTF8Codec.scala2
-rw-r--r--src/library/scala/math/Ordering.scala6
-rw-r--r--src/library/scala/math/package.scala156
-rw-r--r--src/library/scala/net/Utility.scala23
-rw-r--r--src/library/scala/package.scala45
-rw-r--r--src/library/scala/reflect/Invocation.scala134
-rw-r--r--src/library/scala/reflect/Print.scala5
-rw-r--r--src/library/scala/reflect/RichClass.scala93
-rw-r--r--src/library/scala/runtime/BoxedAnyArray.scala224
-rw-r--r--src/library/scala/runtime/BoxedArray.scala165
-rw-r--r--src/library/scala/runtime/BoxedBooleanArray.scala28
-rw-r--r--src/library/scala/runtime/BoxedByteArray.scala28
-rw-r--r--src/library/scala/runtime/BoxedCharArray.scala28
-rw-r--r--src/library/scala/runtime/BoxedDoubleArray.scala28
-rw-r--r--src/library/scala/runtime/BoxedFloatArray.scala28
-rw-r--r--src/library/scala/runtime/BoxedIntArray.scala28
-rw-r--r--src/library/scala/runtime/BoxedLongArray.scala28
-rw-r--r--src/library/scala/runtime/BoxedObjectArray.scala40
-rw-r--r--src/library/scala/runtime/BoxedShortArray.scala28
-rw-r--r--src/library/scala/runtime/BoxedUnitArray.scala28
-rw-r--r--src/library/scala/runtime/BoxesRunTime.java107
-rw-r--r--src/library/scala/runtime/RichDouble.scala16
-rw-r--r--src/library/scala/runtime/RichException.scala11
-rw-r--r--src/library/scala/runtime/RichFloat.scala16
-rw-r--r--src/library/scala/runtime/ScalaRunTime.scala42
-rw-r--r--src/library/scala/util/Hashable.scala62
-rw-r--r--src/library/scala/util/Random.scala2
-rw-r--r--src/library/scala/util/Sorting.scala16
-rw-r--r--src/library/scala/util/automata/DetWordAutom.scala36
-rw-r--r--src/library/scala/util/parsing/combinator/PackratParsers.scala4
-rw-r--r--src/library/scala/util/parsing/combinator/Parsers.scala4
-rw-r--r--src/library/scala/util/parsing/json/Lexer.scala2
-rw-r--r--src/library/scala/xml/Attribute.scala1
-rw-r--r--src/library/scala/xml/Document.scala1
-rw-r--r--src/library/scala/xml/HasKeyValue.scala1
-rw-r--r--src/library/scala/xml/NamespaceBinding.scala2
-rw-r--r--src/library/scala/xml/Node.scala9
-rw-r--r--src/library/scala/xml/NodeSeq.scala2
-rw-r--r--src/library/scala/xml/PrettyPrinter.scala15
-rw-r--r--src/library/scala/xml/TopScope.scala3
-rw-r--r--src/library/scala/xml/dtd/ContentModel.scala10
-rw-r--r--src/library/scala/xml/dtd/ElementValidator.scala86
-rw-r--r--src/library/scala/xml/include/sax/Main.scala14
-rw-r--r--src/library/scala/xml/parsing/ConstructingParser.scala2
-rw-r--r--src/library/scala/xml/parsing/MarkupParser.scala2
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/emit/AssemblyBuilder.scala2
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/emit/ConstructorBuilder.scala4
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/emit/FieldBuilder.scala2
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/emit/ICustomAttributeSetter.scala2
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/emit/ILPrinterVisitor.scala16
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/emit/Label.scala2
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/emit/MethodBuilder.scala2
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/emit/ModuleBuilder.scala2
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/emit/OpCode.scala140
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/emit/OpCodes.scala2
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/emit/ParameterBuilder.scala2
-rw-r--r--src/msil/ch/epfl/lamp/compiler/msil/emit/TypeBuilder.scala16
-rw-r--r--src/partest/scala/tools/partest/PartestTask.scala5
-rw-r--r--src/partest/scala/tools/partest/nest/Worker.scala2
-rw-r--r--src/scalap/scala/tools/scalap/scalax/rules/scalasig/ClassFileParser.scala2
-rw-r--r--src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSigPrinter.scala6
-rw-r--r--src/swing/scala/swing/Table.scala2
-rwxr-xr-xtest/files/bench/equality/eq.scala34
-rwxr-xr-xtest/files/bench/equality/eqeq.log42
-rwxr-xr-xtest/files/bench/equality/eqeq.scala46
-rw-r--r--test/files/jvm/serialization.check229
-rw-r--r--test/files/jvm/serialization.scala552
-rw-r--r--test/files/jvm/stringbuilder.scala4
-rw-r--r--test/files/jvm/t2585.check0
-rw-r--r--test/files/jvm/t2585/Test.java16
-rw-r--r--test/files/jvm/t2585/genericouter.scala25
-rw-r--r--test/files/neg/bug563.scala2
-rw-r--r--test/files/neg/bug700.check2
-rw-r--r--test/files/neg/bug700.scala2
-rw-r--r--test/files/neg/bug875.check4
-rw-r--r--test/files/neg/bug875.scala6
-rw-r--r--test/files/neg/bug910.check4
-rw-r--r--test/files/neg/bug910.scala2
-rw-r--r--test/files/neg/constrs.check2
-rw-r--r--test/files/neg/constrs.scala2
-rw-r--r--test/files/neg/gadts1.scala8
-rw-r--r--test/files/neg/implicits.check2
-rw-r--r--test/files/neg/implicits.scala4
-rw-r--r--test/files/neg/overload.check2
-rw-r--r--test/files/neg/overload.scala2
-rw-r--r--test/files/neg/t0218.scala2
-rw-r--r--test/files/neg/t1422.check4
-rw-r--r--test/files/neg/t1422.scala1
-rw-r--r--test/files/neg/t1477.check5
-rw-r--r--test/files/neg/t1477.scala25
-rw-r--r--test/files/neg/t2179.check9
-rwxr-xr-xtest/files/neg/t2179.scala3
-rw-r--r--test/files/neg/t2641.check39
-rw-r--r--test/files/neg/t2641.scala31
-rw-r--r--test/files/neg/t771.check4
-rwxr-xr-xtest/files/neg/t771.scala5
-rw-r--r--test/files/neg/viewtest.scala16
-rw-r--r--test/files/pos/bug0091.scala2
-rw-r--r--test/files/pos/bug1075.scala2
-rw-r--r--test/files/pos/bug287.scala2
-rw-r--r--test/files/pos/collections.scala2
-rw-r--r--test/files/pos/depexists.scala5
-rw-r--r--test/files/pos/implicits.scala19
-rw-r--r--test/files/pos/nested2.scala2
-rw-r--r--test/files/pos/switchUnbox.scala2
-rw-r--r--test/files/pos/t1164.scala2
-rw-r--r--test/files/pos/t1226.scala8
-rw-r--r--test/files/pos/t1236.scala14
-rw-r--r--test/files/pos/t1422.scala2
-rwxr-xr-xtest/files/pos/t1459/AbstractBase.java5
-rwxr-xr-xtest/files/pos/t1459/App.scala18
-rwxr-xr-xtest/files/pos/t1459/Caller.java7
-rwxr-xr-xtest/files/pos/t1545.scala (renamed from test/pending/neg/t1545.scala)0
-rwxr-xr-xtest/files/pos/t2484.scala17
-rwxr-xr-xtest/files/pos/t2635.scala16
-rw-r--r--test/files/pos/t2664.scala9
-rw-r--r--test/files/pos/t2665.scala3
-rw-r--r--test/files/pos/t2667.scala6
-rw-r--r--test/files/pos/t2669.scala28
-rw-r--r--test/files/pos/t2698.scala10
-rw-r--r--test/files/run/Course-2002-09.scala8
-rw-r--r--test/files/run/SymbolsTest.scala283
-rw-r--r--test/files/run/bug2552.check49
-rw-r--r--test/files/run/bug2552.scala34
-rw-r--r--test/files/run/bug2636.scala35
-rw-r--r--test/files/run/bug627.scala2
-rw-r--r--test/files/run/bugs2087-and-2400.scala20
-rw-r--r--test/files/run/priorityQueue.scala354
-rw-r--r--test/files/run/randomAccessSeq-apply.scala2
-rw-r--r--test/files/run/t1524.scala2
-rw-r--r--test/files/run/t153.check2
-rw-r--r--test/files/run/t153.scala2
-rw-r--r--test/files/run/t2526.scala54
-rw-r--r--test/files/run/unapply.scala2
-rw-r--r--test/files/run/unapplyArray.scala2
-rw-r--r--test/pending/pos/bug1357.scala (renamed from test/files/pos/bug1357.scala)0
-rw-r--r--test/pending/pos/t2610.scala17
-rw-r--r--test/pending/pos/t2619.scala80
-rw-r--r--test/pending/pos/t2625.scala9
-rw-r--r--test/pending/pos/t2635.scala16
-rw-r--r--test/pending/pos/t2641.scala16
-rw-r--r--test/pending/pos/t2660.scala25
-rw-r--r--test/pending/pos/t2691.scala9
-rw-r--r--test/postreview.py2540
-rwxr-xr-xtest/review44
-rw-r--r--test/simplejson/__init__.py318
-rw-r--r--test/simplejson/decoder.py354
-rw-r--r--test/simplejson/encoder.py440
-rw-r--r--test/simplejson/scanner.py65
-rw-r--r--test/simplejson/tool.py37
292 files changed, 9009 insertions, 3928 deletions
diff --git a/build.xml b/build.xml
index 47e5a679ca..5356fb114c 100644
--- a/build.xml
+++ b/build.xml
@@ -1128,6 +1128,7 @@ LIBRARIES (MSIL, FJBG maybe later)
destdir="${build-libs.dir}/classes/fjbg"
classpath="${build-libs.dir}/classes/fjbg"
includes="**/*.java"
+ debug="true"
target="1.5" source="1.4">
<compilerarg line="${javac.args}"/>
</javac>
diff --git a/lib/fjbg.jar.desired.sha1 b/lib/fjbg.jar.desired.sha1
index f7bb459d8a..059817251c 100644
--- a/lib/fjbg.jar.desired.sha1
+++ b/lib/fjbg.jar.desired.sha1
@@ -1 +1 @@
-3ddd34e6fda096e9624306b9fba8a1ee8e2ba97d ?fjbg.jar
+6ef6a21997d01d64a3ff8447a0e110d04b3d6c7d ?fjbg.jar
diff --git a/lib/msil.jar.desired.sha1 b/lib/msil.jar.desired.sha1
index 843cb568c5..8854c2de16 100644
--- a/lib/msil.jar.desired.sha1
+++ b/lib/msil.jar.desired.sha1
@@ -1 +1 @@
-e28574c5a6ca797c755284a530519254f0abb5e4 ?msil.jar
+df54e06ffe8574a2d45e51227713a76a74945b34 ?msil.jar
diff --git a/lib/scala-compiler.jar.desired.sha1 b/lib/scala-compiler.jar.desired.sha1
index c83384afed..2e408e7b30 100644
--- a/lib/scala-compiler.jar.desired.sha1
+++ b/lib/scala-compiler.jar.desired.sha1
@@ -1 +1 @@
-0b932d9548029d992264f0780fd5e24b2d0b7c61 ?scala-compiler.jar
+ffd0a45a376f604415980cf8891d780edc327c8f ?scala-compiler.jar
diff --git a/lib/scala-library-src.jar.desired.sha1 b/lib/scala-library-src.jar.desired.sha1
index c0ba53e5f4..639779719e 100644
--- a/lib/scala-library-src.jar.desired.sha1
+++ b/lib/scala-library-src.jar.desired.sha1
@@ -1 +1 @@
-6761a76f6314540a9a69f4b45419a1d4d8b71773 ?scala-library-src.jar
+4b4cf49e7d50ec49a0a5c1762f7213c41b8149e2 ?scala-library-src.jar
diff --git a/lib/scala-library.jar.desired.sha1 b/lib/scala-library.jar.desired.sha1
index 0772bdf9d7..d130f5a43f 100644
--- a/lib/scala-library.jar.desired.sha1
+++ b/lib/scala-library.jar.desired.sha1
@@ -1 +1 @@
-6ac2e1d59fdb05902788b1e55ca8dbf391504a32 ?scala-library.jar
+a3dc8a4cf67a2fd2096cd42ab5ee34ea7a45c0d4 ?scala-library.jar
diff --git a/src/compiler/scala/tools/ant/Scalac.scala b/src/compiler/scala/tools/ant/Scalac.scala
index aaa50cdd08..7b28ee9103 100644
--- a/src/compiler/scala/tools/ant/Scalac.scala
+++ b/src/compiler/scala/tools/ant/Scalac.scala
@@ -10,7 +10,7 @@
package scala.tools.ant
-import java.io.{File,PrintWriter,BufferedWriter,FileWriter}
+import java.io.{File,PrintWriter,BufferedWriter,FileWriter, IOException}
import org.apache.tools.ant.{ BuildException, Project, AntClassLoader }
import org.apache.tools.ant.taskdefs.{MatchingTask,Java}
@@ -18,7 +18,7 @@ import org.apache.tools.ant.types.{Path, Reference, FileSet}
import org.apache.tools.ant.util.{FileUtils, GlobPatternMapper,
SourceFileScanner}
-import scala.tools.nsc.{Global, Settings}
+import scala.tools.nsc.{Global, Settings, Properties}
import scala.tools.nsc.reporters.{Reporter, ConsoleReporter}
/** <p>
@@ -105,10 +105,10 @@ class Scalac extends MatchingTask {
/** Defines valid values for the <code>deprecation</code> and
* <code>unchecked</code> properties. */
object Flag extends PermissibleValue {
- val values = List("yes", "no", "on", "off")
+ val values = List("yes", "no", "on", "off", "true", "false")
def toBoolean(flag: String) =
- if (flag == "yes" || flag == "on") Some(true)
- else if (flag == "no" || flag == "off") Some(false)
+ if (flag == "yes" || flag == "on" || flag == "true") Some(true)
+ else if (flag == "no" || flag == "off" || flag == "false") Some(false)
else None
}
@@ -477,6 +477,9 @@ class Scalac extends MatchingTask {
/** Initializes settings and source files */
protected def initialize: (Settings, List[File], Boolean) = {
+ if (scalacDebugging)
+ log("Base directory is `%s`".format(scala.tools.nsc.io.Path("").normalize))
+
// Tests if all mandatory attributes are set and valid.
if (origin.isEmpty) error("Attribute 'srcdir' is not set.")
if (!destination.isEmpty && !destination.get.isDirectory())
@@ -558,11 +561,64 @@ class Scalac extends MatchingTask {
if (!assemrefs.isEmpty) settings.assemrefs.value = assemrefs.get
log("Scalac params = '" + addParams + "'", Project.MSG_DEBUG)
- settings.parseParams(addParams)
+ // todo, process fs from addParams?
+ val fs = processArguments(settings, addParams.trim.split("""\s+""").toList)
+
+ // resolve dependenciesFile path from project's basedir, so <ant antfile ...> call from other project works.
+ // the dependenciesFile may be relative path to basedir or absolute path, in either case, the following code
+ // will return correct answer.
+ settings.dependenciesFile.value match {
+ case "none" =>
+ case x =>
+ val depFilePath = scala.tools.nsc.io.Path(x)
+ settings.dependenciesFile.value = scala.tools.nsc.io.Path(getProject.getBaseDir).normalize resolve depFilePath path
+ }
(settings, sourceFiles, javaOnly)
}
+ /** Process the arguments and update the settings accordingly.
+ * This method is called only once, during initialization.
+ * @return Accumulated files to compile
+ */
+ protected def processArguments(settings: Settings, arguments: List[String]): List[String] = {
+ /** file extensions of files that the compiler can process */
+ lazy val fileEndings = Properties.fileEndings
+
+ // initialization
+ var ok = true
+ var fs: List[String] = Nil
+ var args = arguments
+ def errorAndNotOk(msg: String) = { error(msg) ; ok = false }
+
+ // given a @ argument expands it out
+ def doExpand(x: String) =
+ try { args = scala.tools.nsc.util.ArgumentsExpander.expandArg(x) ::: args.tail }
+ catch { case ex: IOException => errorAndNotOk(ex.getMessage) }
+
+ // true if it's a legit looking source file
+ def isSourceFile(x: String) =
+ (settings.script.value != "") ||
+ (fileEndings exists (x endsWith _))
+
+ // given an option for scalac finds out what it is
+ def doOption(x: String): Unit = {
+ val argsLeft = settings.parseParams(args)
+ if (args != argsLeft) args = argsLeft
+ else errorAndNotOk("bad option: '" + x + "'")
+ }
+
+ // cycle through args until empty or error
+ while (!args.isEmpty && ok) args.head match {
+ case x if x startsWith "@" => doExpand(x)
+ case x if x startsWith "-" => doOption(x)
+ case x if isSourceFile(x) => fs = x :: fs ; args = args.tail
+ case "" => args = args.tail // quick fix [martin: for what?]
+ case x => errorAndNotOk("don't know what to do with " + x)
+ }
+
+ fs
+ }
override def execute() {
val (settings, sourceFiles, javaOnly) = initialize
@@ -586,7 +642,7 @@ class Scalac extends MatchingTask {
if (compilerPath.isDefined) path add compilerPath.get
else getClass.getClassLoader match {
case cl: AntClassLoader => path add new Path(getProject, cl.getClasspath)
- case _ => error("Cannot determine default classpath for sclac, please specify one!")
+ case _ => error("Cannot determine default classpath for scalac, please specify one!")
}
path
}
diff --git a/src/compiler/scala/tools/ant/sabbus/Compiler.scala b/src/compiler/scala/tools/ant/sabbus/Compiler.scala
index 787c6af870..f6372f741a 100644
--- a/src/compiler/scala/tools/ant/sabbus/Compiler.scala
+++ b/src/compiler/scala/tools/ant/sabbus/Compiler.scala
@@ -13,7 +13,7 @@ package scala.tools.ant.sabbus
import java.io.File
import java.net.URL
import java.lang.reflect.InvocationTargetException
-import scala.util.ScalaClassLoader
+import scala.tools.nsc.util.ScalaClassLoader
class Compiler(classpath: Array[URL], val settings: Settings)
{
diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala
index bae0d624c6..a933c13c39 100644
--- a/src/compiler/scala/tools/nsc/Global.scala
+++ b/src/compiler/scala/tools/nsc/Global.scala
@@ -10,7 +10,7 @@ import java.io.{File, FileOutputStream, PrintWriter}
import java.io.{IOException, FileNotFoundException}
import java.nio.charset._
import compat.Platform.currentTime
-import scala.tools.nsc.io.{SourceReader, AbstractFile}
+import scala.tools.nsc.io.{SourceReader, AbstractFile, Path}
import scala.tools.nsc.reporters._
import scala.tools.nsc.util.{ClassPath, MsilClassPath, JavaClassPath, SourceFile, BatchSourceFile, OffsetPosition, RangePosition}
@@ -227,28 +227,12 @@ class Global(var settings: Settings, var reporter: Reporter) extends SymbolTable
settings.dependenciesFile.value match {
case "none" => ()
case x =>
- val jfile = new java.io.File(x)
- if (!jfile.exists) jfile.createNewFile
- else {
- // This logic moved here from scala.tools.nsc.dependencies.File.
- // Note that it will trip an assertion in lookupPathUnchecked
- // if the path being looked at is absolute.
-
- /** The directory where file lookup should start at. */
- val rootDirectory: AbstractFile = {
- AbstractFile.getDirectory(".")
-// val roots = java.io.File.listRoots()
-// assert(roots.length > 0)
-// new PlainFile(roots(0))
- }
-
- def toFile(path: String) = {
- val file = rootDirectory.lookupPathUnchecked(path, false)
- assert(file ne null, path)
- file
- }
-
- dependencyAnalysis.loadFrom(AbstractFile.getFile(jfile), toFile)
+ val depFilePath = Path(x)
+ if (depFilePath.exists) {
+ /** The directory where file lookup should start */
+ val rootPath = depFilePath.parent
+ def toFile(path: String) = AbstractFile.getFile(rootPath resolve Path(path))
+ dependencyAnalysis.loadFrom(AbstractFile.getFile(depFilePath), toFile)
}
}
@@ -391,10 +375,6 @@ class Global(var settings: Settings, var reporter: Reporter) extends SymbolTable
val runsRightAfter = None
} with TailCalls
- // object checkDefined extends {
- // val global: Global.this.type = Global.this
- // } with CheckDefined
-
// phaseName = "explicitouter"
object explicitOuter extends {
val global: Global.this.type = Global.this
@@ -841,15 +821,20 @@ class Global(var settings: Settings, var reporter: Reporter) extends SymbolTable
informTime("total", startTime)
if (!dependencyAnalysis.off) {
-
- def fromFile(file: AbstractFile): String = {
- val path = file.path
- if (path.startsWith("./"))
- path.substring(2, path.length)
- else path
+ settings.dependenciesFile.value match {
+ case "none" =>
+ case x =>
+ val depFilePath = Path(x)
+ if (!depFilePath.exists)
+ dependencyAnalysis.dependenciesFile = AbstractFile.getFile(depFilePath.createFile())
+
+ /** The directory where file lookup should start */
+ val rootPath = depFilePath.parent.normalize
+ def fromFile(file: AbstractFile): String =
+ rootPath.relativize(Path(file.file).normalize).path
+
+ dependencyAnalysis.saveDependencies(fromFile)
}
-
- dependencyAnalysis.saveDependencies(fromFile)
}
}
diff --git a/src/compiler/scala/tools/nsc/Interpreter.scala b/src/compiler/scala/tools/nsc/Interpreter.scala
index 489ab1a3e0..77436fe55f 100644
--- a/src/compiler/scala/tools/nsc/Interpreter.scala
+++ b/src/compiler/scala/tools/nsc/Interpreter.scala
@@ -15,7 +15,8 @@ import reflect.InvocationTargetException
import scala.collection.immutable.ListSet
import scala.collection.mutable
import scala.collection.mutable.{ ListBuffer, HashSet, ArrayBuffer }
-import scala.util.{ ScalaClassLoader, URLClassLoader }
+import scala.tools.nsc.util.ScalaClassLoader
+import ScalaClassLoader.URLClassLoader
import scala.util.control.Exception.{ Catcher, catching, ultimately, unwrapping }
import io.{ PlainFile, VirtualDirectory }
@@ -119,11 +120,14 @@ class Interpreter(val settings: Settings, out: PrintWriter)
/** the compiler's classpath, as URL's */
val compilerClasspath: List[URL] = {
- import scala.net.Utility.parseURL
+ def parseURL(s: String): Option[URL] =
+ catching(classOf[MalformedURLException]) opt new URL(s)
+
val classpathPart =
ClassPath.expandPath(compiler.settings.classpath.value).map(s => new File(s).toURL)
+ val codebasePart =
+ (compiler.settings.Xcodebase.value.split(" ")).toList flatMap parseURL
- val codebasePart = (compiler.settings.Xcodebase.value.split(" ")).toList flatMap parseURL
classpathPart ::: codebasePart
}
diff --git a/src/compiler/scala/tools/nsc/MainGenericRunner.scala b/src/compiler/scala/tools/nsc/MainGenericRunner.scala
index 9cd9cdbd43..0365f28dc3 100644
--- a/src/compiler/scala/tools/nsc/MainGenericRunner.scala
+++ b/src/compiler/scala/tools/nsc/MainGenericRunner.scala
@@ -11,9 +11,8 @@ import java.io.{ File, IOException }
import java.lang.{ClassNotFoundException, NoSuchMethodException}
import java.lang.reflect.InvocationTargetException
import java.net.{ URL, MalformedURLException }
-import scala.util.ScalaClassLoader
-import util.ClassPath
+import util.{ ClassPath, ScalaClassLoader }
import File.pathSeparator
import Properties.{ versionString, copyrightString }
diff --git a/src/compiler/scala/tools/nsc/ObjectRunner.scala b/src/compiler/scala/tools/nsc/ObjectRunner.scala
index e4e0826d32..282cff4987 100644
--- a/src/compiler/scala/tools/nsc/ObjectRunner.scala
+++ b/src/compiler/scala/tools/nsc/ObjectRunner.scala
@@ -8,7 +8,7 @@
package scala.tools.nsc
import java.net.URL
-import scala.util.ScalaClassLoader
+import util.ScalaClassLoader
/** An object that runs another object specified by name.
*
diff --git a/src/compiler/scala/tools/nsc/PhaseAssembly.scala b/src/compiler/scala/tools/nsc/PhaseAssembly.scala
index 918b56fcfb..958bef5652 100644
--- a/src/compiler/scala/tools/nsc/PhaseAssembly.scala
+++ b/src/compiler/scala/tools/nsc/PhaseAssembly.scala
@@ -42,8 +42,8 @@ trait PhaseAssembly { self: Global =>
var level = 0
def allPhaseNames(): String = phaseobj match {
- case None => phasename
- case Some(lst) => lst.map(_.phaseName).reduceLeft(_+","+_)
+ case None => phasename
+ case Some(lst) => lst.map(_.phaseName).reduceLeft(_+","+_)
}
}
@@ -56,9 +56,9 @@ trait PhaseAssembly { self: Global =>
def getNodeByPhase(phs: SubComponent): Node = {
var node: Node = getNodeByPhase(phs.phaseName)
node.phaseobj match {
- case None =>
- node.phaseobj = Some(List[SubComponent](phs))
- case _ =>
+ case None =>
+ node.phaseobj = Some(List[SubComponent](phs))
+ case _ =>
}
node
}
@@ -107,12 +107,12 @@ trait PhaseAssembly { self: Global =>
var lvl = 1
var nds = nodes.valuesIterator.filter(_.level == lvl).toList
while(nds.size > 0) {
- nds = nds.sort((n1,n2) => (n1.phasename compareTo n2.phasename) < 0)
- for (n <- nds) {
- chain = chain ::: n.phaseobj.get
- }
- lvl += 1
- nds = nodes.valuesIterator.filter(_.level == lvl).toList
+ nds = nds.sortWith((n1,n2) => (n1.phasename compareTo n2.phasename) < 0)
+ for (n <- nds) {
+ chain = chain ::: n.phaseobj.get
+ }
+ lvl += 1
+ nds = nodes.valuesIterator.filter(_.level == lvl).toList
}
chain
}
@@ -122,7 +122,7 @@ trait PhaseAssembly { self: Global =>
*/
def collapseHardLinksAndLevels(node: Node, lvl: Int) {
if (node.visited) {
- throw new FatalError(
+ throw new FatalError(
"Cycle in compiler phase dependencies detected, phase " +
node.phasename + " reacted twice!")
}
@@ -131,19 +131,19 @@ trait PhaseAssembly { self: Global =>
var hls = Nil ++ node.before.filter(_.hard)
while (hls.size > 0) {
- for (hl <- hls) {
- node.phaseobj = Some(node.phaseobj.get ++ hl.frm.phaseobj.get)
- node.before = hl.frm.before
- nodes -= hl.frm.phasename
- edges -= hl
- for (edge <- node.before) edge.to = node
- }
- hls = Nil ++ node.before.filter(_.hard)
+ for (hl <- hls) {
+ node.phaseobj = Some(node.phaseobj.get ++ hl.frm.phaseobj.get)
+ node.before = hl.frm.before
+ nodes -= hl.frm.phasename
+ edges -= hl
+ for (edge <- node.before) edge.to = node
+ }
+ hls = Nil ++ node.before.filter(_.hard)
}
node.visited = true
for (edge <- node.before) {
- collapseHardLinksAndLevels( edge.frm, lvl + 1)
+ collapseHardLinksAndLevels( edge.frm, lvl + 1)
}
node.visited = false
@@ -156,44 +156,44 @@ trait PhaseAssembly { self: Global =>
def validateAndEnforceHardlinks() {
var hardlinks = edges.filter(_.hard)
for (hl <- hardlinks) {
- if (hl.frm.after.size > 1) {
- throw new FatalError("phase " + hl.frm.phasename + " want to run right after " + hl.to.phasename + ", but some phase has declared to run before " + hl.frm.phasename + ". Re-run with -Xgenerate-phase-graph <filename> to better see the problem.")
- }
+ if (hl.frm.after.size > 1) {
+ throw new FatalError("phase " + hl.frm.phasename + " want to run right after " + hl.to.phasename + ", but some phase has declared to run before " + hl.frm.phasename + ". Re-run with -Xgenerate-phase-graph <filename> to better see the problem.")
+ }
}
var rerun = true
while (rerun) {
- rerun = false
- hardlinks = edges.filter(_.hard)
- for (hl <- hardlinks) {
- var sanity = Nil ++ hl.to.before.filter(_.hard)
- if (sanity.length == 0) {
- throw new FatalError("There is no runs right after dependency, where there should be one! This is not supposed to happen!")
- } else if (sanity.length > 1) {
- var msg = "Multiple phases want to run right after the phase " + sanity.head.to.phasename + "\n"
- msg += "Phases: "
- sanity = sanity.sort((e1,e2) => (e1.frm.phasename compareTo e2.frm.phasename) < 0)
- for (edge <- sanity) {
- msg += edge.frm.phasename + ", "
- }
- msg += "\nRe-run with -Xgenerate-phase-graph <filename> to better see the problem."
- throw new FatalError(msg)
-
- } else {
-
- var promote = hl.to.before.filter(e => (!e.hard))
- hl.to.before.clear
- sanity foreach (edge => hl.to.before += edge)
- for (edge <- promote) {
- rerun = true
- informProgress(
+ rerun = false
+ hardlinks = edges.filter(_.hard)
+ for (hl <- hardlinks) {
+ var sanity = Nil ++ hl.to.before.filter(_.hard)
+ if (sanity.length == 0) {
+ throw new FatalError("There is no runs right after dependency, where there should be one! This is not supposed to happen!")
+ } else if (sanity.length > 1) {
+ var msg = "Multiple phases want to run right after the phase " + sanity.head.to.phasename + "\n"
+ msg += "Phases: "
+ sanity = sanity.sortWith((e1,e2) => (e1.frm.phasename compareTo e2.frm.phasename) < 0)
+ for (edge <- sanity) {
+ msg += edge.frm.phasename + ", "
+ }
+ msg += "\nRe-run with -Xgenerate-phase-graph <filename> to better see the problem."
+ throw new FatalError(msg)
+
+ } else {
+
+ var promote = hl.to.before.filter(e => (!e.hard))
+ hl.to.before.clear
+ sanity foreach (edge => hl.to.before += edge)
+ for (edge <- promote) {
+ rerun = true
+ informProgress(
"promote the dependency of " + edge.frm.phasename +
": " + edge.to.phasename + " => " + hl.frm.phasename)
- edge.to = hl.frm
- hl.frm.before += edge
- }
- }
- }
+ edge.to = hl.frm
+ hl.frm.before += edge
+ }
+ }
+ }
}
}
@@ -205,17 +205,17 @@ trait PhaseAssembly { self: Global =>
def removeDanglingNodes() {
var dnodes = nodes.valuesIterator filter (_.phaseobj.isEmpty)
for (node <- dnodes) {
- val msg = "dropping dependency on node with no phase object: "+node.phasename
+ val msg = "dropping dependency on node with no phase object: "+node.phasename
informProgress(msg)
- nodes -= node.phasename
- for (edge <- node.before) {
- edges -= edge
- edge.frm.after -= edge
- edge.frm.phaseobj match {
- case Some(lsc) => if (! lsc.head.internal) warning(msg)
- case _ =>
- }
- }
+ nodes -= node.phasename
+ for (edge <- node.before) {
+ edges -= edge
+ edge.frm.after -= edge
+ edge.frm.phaseobj match {
+ case Some(lsc) => if (! lsc.head.internal) warning(msg)
+ case _ =>
+ }
+ }
}
}
@@ -268,30 +268,30 @@ trait PhaseAssembly { self: Global =>
var fromnode = graph.getNodeByPhase(phs)
phs.runsRightAfter match {
- case None =>
- for (phsname <- phs.runsAfter) {
- if (phsname != "terminal") {
- val tonode = graph.getNodeByPhase(phsname)
- graph.softConnectNodes(fromnode, tonode)
- } else {
- error("[phase assembly, after dependency on terminal phase not allowed: " + fromnode.phasename + " => "+ phsname + "]")
- }
- }
- for (phsname <- phs.runsBefore) {
- if (phsname != "parser") {
- val tonode = graph.getNodeByPhase(phsname)
- graph.softConnectNodes(tonode, fromnode)
- } else {
- error("[phase assembly, before dependency on parser phase not allowed: " + phsname + " => "+ fromnode.phasename + "]")
- }
- }
- case Some(phsname) =>
- if (phsname != "terminal") {
- val tonode = graph.getNodeByPhase(phsname)
+ case None =>
+ for (phsname <- phs.runsAfter) {
+ if (phsname != "terminal") {
+ val tonode = graph.getNodeByPhase(phsname)
+ graph.softConnectNodes(fromnode, tonode)
+ } else {
+ error("[phase assembly, after dependency on terminal phase not allowed: " + fromnode.phasename + " => "+ phsname + "]")
+ }
+ }
+ for (phsname <- phs.runsBefore) {
+ if (phsname != "parser") {
+ val tonode = graph.getNodeByPhase(phsname)
+ graph.softConnectNodes(tonode, fromnode)
+ } else {
+ error("[phase assembly, before dependency on parser phase not allowed: " + phsname + " => "+ fromnode.phasename + "]")
+ }
+ }
+ case Some(phsname) =>
+ if (phsname != "terminal") {
+ val tonode = graph.getNodeByPhase(phsname)
graph.hardConnectNodes(fromnode, tonode)
- } else {
- error("[phase assembly, right after dependency on terminal phase not allowed: " + fromnode.phasename + " => "+ phsname + "]")
- }
+ } else {
+ error("[phase assembly, right after dependency on terminal phase not allowed: " + fromnode.phasename + " => "+ phsname + "]")
+ }
}
}
graph
@@ -309,14 +309,14 @@ trait PhaseAssembly { self: Global =>
for (edge <- graph.edges) {
sbuf.append("\"" + edge.frm.allPhaseNames + "(" + edge.frm.level + ")" + "\"->\"" + edge.to.allPhaseNames + "(" + edge.to.level + ")" + "\"")
if (! edge.frm.phaseobj.get.head.internal) {
- extnodes += edge.frm
+ extnodes += edge.frm
}
edge.frm.phaseobj match { case None => null case Some(ln) => if(ln.size > 1) fatnodes += edge.frm }
edge.to.phaseobj match { case None => null case Some(ln) => if(ln.size > 1) fatnodes += edge.to }
if (edge.hard) {
- sbuf.append(" [color=\"#0000ff\"]\n")
+ sbuf.append(" [color=\"#0000ff\"]\n")
} else {
- sbuf.append(" [color=\"#000000\"]\n")
+ sbuf.append(" [color=\"#000000\"]\n")
}
}
for (node <- extnodes) {
diff --git a/src/compiler/scala/tools/nsc/Settings.scala b/src/compiler/scala/tools/nsc/Settings.scala
index 5db78420ec..50ef58232b 100644
--- a/src/compiler/scala/tools/nsc/Settings.scala
+++ b/src/compiler/scala/tools/nsc/Settings.scala
@@ -426,7 +426,8 @@ object Settings {
// Ordered (so we can use TreeSet)
def compare(that: Setting): Int = name compare that.name
- def compareLists[T <% Ordered[T]](xs: List[T], ys: List[T]): Boolean = xs.sort(_ < _) == ys.sort(_ < _)
+ def compareLists[T <% Ordered[T]](xs: List[T], ys: List[T]): Boolean =
+ xs.sortWith(_ < _) == ys.sortWith(_ < _)
// Equality
def eqValues: List[Any] = List(name, value)
@@ -832,7 +833,8 @@ trait ScalacSettings {
val specialize = BooleanSetting ("-Yspecialize", "Specialize generic code on types.")
val Yrangepos = BooleanSetting ("-Yrangepos", "Use range positions for syntax trees.")
val Yidedebug = BooleanSetting ("-Yide-debug", "Generate, validate and output trees using the interactive compiler.")
- val Ybuilderdebug = ChoiceSetting ("-Ybuilder-debug", "Compile using the specified build manager", List("none", "refined", "simple"), "none")
+ val Ybuilderdebug = ChoiceSetting ("-Ybuilder-debug", "Compile using the specified build manager", List("none", "refined", "simple"), "none") .
+ withHelpSyntax("-Ybuilder-debug:<method>")
val Ytyperdebug = BooleanSetting ("-Ytyper-debug", "Trace all type assignements")
val Ypmatdebug = BooleanSetting ("-Ypmat-debug", "Trace all pattern matcher activity.")
val Ytailrec = BooleanSetting ("-Ytailrecommend", "Alert methods which would be tail-recursive if private or final.")
diff --git a/src/compiler/scala/tools/nsc/ast/TreeDSL.scala b/src/compiler/scala/tools/nsc/ast/TreeDSL.scala
index 569b0f7b37..5d0b69e0da 100644
--- a/src/compiler/scala/tools/nsc/ast/TreeDSL.scala
+++ b/src/compiler/scala/tools/nsc/ast/TreeDSL.scala
@@ -199,6 +199,10 @@ trait TreeDSL {
if (guards.isEmpty) EmptyTree
else guards reduceLeft gen.mkAnd
+ def OR(guards: Tree*) =
+ if (guards.isEmpty) EmptyTree
+ else guards reduceLeft gen.mkOr
+
def IF(tree: Tree) = new IfStart(tree, EmptyTree)
def TRY(tree: Tree) = new TryStart(tree, Nil, EmptyTree)
def BLOCK(xs: Tree*) = Block(xs.init.toList, xs.last)
diff --git a/src/compiler/scala/tools/nsc/ast/TreeInfo.scala b/src/compiler/scala/tools/nsc/ast/TreeInfo.scala
index e128b4e12f..be00805732 100644
--- a/src/compiler/scala/tools/nsc/ast/TreeInfo.scala
+++ b/src/compiler/scala/tools/nsc/ast/TreeInfo.scala
@@ -193,15 +193,6 @@ abstract class TreeInfo {
reserved addEntry nme.false_
reserved addEntry nme.true_
reserved addEntry nme.null_
- reserved addEntry newTypeName("byte")
- reserved addEntry newTypeName("char")
- reserved addEntry newTypeName("short")
- reserved addEntry newTypeName("int")
- reserved addEntry newTypeName("long")
- reserved addEntry newTypeName("float")
- reserved addEntry newTypeName("double")
- reserved addEntry newTypeName("boolean")
- reserved addEntry newTypeName("unit")
/** Is name a variable name? */
def isVariableName(name: Name): Boolean = {
diff --git a/src/compiler/scala/tools/nsc/ast/Trees.scala b/src/compiler/scala/tools/nsc/ast/Trees.scala
index bff4bd51c3..dbbb306130 100644
--- a/src/compiler/scala/tools/nsc/ast/Trees.scala
+++ b/src/compiler/scala/tools/nsc/ast/Trees.scala
@@ -131,13 +131,24 @@ trait Trees {
this
}
+ /** Set tpe to give `tp` and return this.
+ */
def setType(tp: Type): this.type = {
/*assert(kindingIrrelevant(tp) || !kindStar || !tp.isHigherKinded,
- tp+" should not be higher-kinded");*/
+ tp+" should not be higher-kinded"); */
tpe = tp
this
}
+ /** Like `setType`, but if this is a previously empty TypeTree
+ * that fact is remembered so that resetType will snap back.
+ */
+ def defineType(tp: Type): this.type = setType(tp)
+
+ /** Reset type to `null`, with special handling of TypeTrees and the EmptyType
+ */
+ def resetType() { tpe = null }
+
def symbol: Symbol = null
def symbol_=(sym: Symbol) {
throw new Error("symbol_= inapplicable for " + this)
@@ -323,6 +334,7 @@ trait Trees {
super.tpe_=(NoType)
override def tpe_=(t: Type) =
if (t != NoType) throw new Error("tpe_=("+t+") inapplicable for <empty>")
+ override def resetType() {}
override def isEmpty = true
}
@@ -618,17 +630,16 @@ trait Trees {
}})
val (edefs, rest) = body span treeInfo.isEarlyDef
val (evdefs, etdefs) = edefs partition treeInfo.isEarlyValDef
- val (lvdefs, gvdefs) = List.unzip {
- evdefs map {
- case vdef @ ValDef(mods, name, tpt, rhs) =>
- val fld = treeCopy.ValDef(
- vdef.duplicate, mods, name,
- atPos(vdef.pos.focus) { TypeTree() setOriginal tpt setPos tpt.pos.focus }, // atPos in case
- EmptyTree)
- val local = treeCopy.ValDef(vdef, Modifiers(PRESUPER), name, tpt, rhs)
- (local, fld)
- }
- }
+ val (lvdefs, gvdefs) = evdefs map {
+ case vdef @ ValDef(mods, name, tpt, rhs) =>
+ val fld = treeCopy.ValDef(
+ vdef.duplicate, mods, name,
+ atPos(vdef.pos.focus) { TypeTree() setOriginal tpt setPos tpt.pos.focus }, // atPos in case
+ EmptyTree)
+ val local = treeCopy.ValDef(vdef, Modifiers(PRESUPER), name, tpt, rhs)
+ (local, fld)
+ } unzip
+
val constrs = {
if (constrMods.isTrait) {
if (body forall treeInfo.isInterfaceMember) List()
@@ -868,12 +879,25 @@ trait Trees {
case class TypeTree() extends TypTree {
override def symbol = if (tpe == null) null else tpe.typeSymbol
- private var orig: Tree = null // should be EmptyTree?
+ private var orig: Tree = null
+ private var wasEmpty: Boolean = false
def original: Tree = orig
def setOriginal(tree: Tree): this.type = { orig = tree; setPos(tree.pos); this }
+ override def defineType(tp: Type): this.type = {
+ wasEmpty = isEmpty
+ setType(tp)
+ }
+
+ /** Reset type to null, unless type original was empty and then
+ * got its type via a defineType
+ */
+ override def resetType() {
+ if (wasEmpty) tpe = null
+ }
+
override def isEmpty = (tpe eq null) || tpe == NoType
}
@@ -1817,18 +1841,16 @@ trait Trees {
protected def isLocal(sym: Symbol): Boolean = true
protected def resetDef(tree: Tree) {
tree.symbol = NoSymbol
- tree.tpe = null
- super.traverse(tree)
}
- override def traverse(tree: Tree): Unit = tree match {
- case EmptyTree | TypeTree() =>
- ;
- case _: DefTree | Function(_, _) | Template(_, _, _) =>
- resetDef(tree)
- case _ =>
- if (tree.hasSymbol && isLocal(tree.symbol)) tree.symbol = NoSymbol
- tree.tpe = null
- super.traverse(tree)
+ override def traverse(tree: Tree): Unit = {
+ tree match {
+ case _: DefTree | Function(_, _) | Template(_, _, _) =>
+ resetDef(tree)
+ case _ =>
+ if (tree.hasSymbol && isLocal(tree.symbol)) tree.symbol = NoSymbol
+ }
+ tree.resetType()
+ super.traverse(tree)
}
}
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
index a4f228cffb..ccb46c0500 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
@@ -957,7 +957,7 @@ self =>
*/
def statement(location: Int): Tree = expr(location) // !!! still needed?
- /** Expr ::= (Bindings | Id | `_') `=>' Expr
+ /** Expr ::= (Bindings | [`implicit'] Id | `_') `=>' Expr
* | Expr1
* ResultExpr ::= (Bindings | Id `:' CompoundType) `=>' Block
* | Expr1
@@ -1057,6 +1057,14 @@ self =>
atPos(in.skipToken()) {
Throw(expr())
}
+ case IMPLICIT =>
+ val start = in.skipToken()
+ val param0 = convertToParam(atPos(in.offset)(Ident(ident())))
+ val param = treeCopy.ValDef(param0, param0.mods | Flags.IMPLICIT, param0.name, param0.tpt, param0.rhs)
+ atPos(start, in.offset) {
+ accept(ARROW)
+ Function(List(param), if (location != InBlock) expr() else block())
+ }
case _ =>
var t = postfixExpr()
if (in.token == EQUALS) {
@@ -1555,9 +1563,9 @@ self =>
*/
private def normalize(mods: Modifiers): Modifiers =
if ((mods hasFlag Flags.PRIVATE) && mods.privateWithin != nme.EMPTY.toTypeName)
- mods &~ Flags.PRIVATE
+ normalize(mods &~ Flags.PRIVATE)
else if ((mods hasFlag Flags.ABSTRACT) && (mods hasFlag Flags.OVERRIDE))
- mods &~ (Flags.ABSTRACT | Flags.OVERRIDE) | Flags.ABSOVERRIDE
+ normalize(mods &~ (Flags.ABSTRACT | Flags.OVERRIDE) | Flags.ABSOVERRIDE)
else
mods
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
index 267d5bd17f..9648735a15 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
@@ -680,7 +680,7 @@ trait Scanners {
var value: Long = 0
val divider = if (base == 10) 1 else 2
val limit: Long =
- if (token == LONGLIT) Math.MAX_LONG else Math.MAX_INT
+ if (token == LONGLIT) Long.MaxValue else Int.MaxValue
var i = 0
val len = strVal.length
while (i < len) {
@@ -709,7 +709,7 @@ trait Scanners {
*/
def floatVal(negated: Boolean): Double = {
val limit: Double =
- if (token == DOUBLELIT) Math.MAX_DOUBLE else Math.MAX_FLOAT
+ if (token == DOUBLELIT) Double.MaxValue else Float.MaxValue
try {
val value: Double = java.lang.Double.valueOf(strVal).doubleValue()
if (value > limit)
diff --git a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
index 29a9599744..da864ef706 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
@@ -378,7 +378,7 @@ abstract class TreeBuilder {
val rhss = valeqs map { case ValEq(_, _, rhs) => rhs }
val defpat1 = makeBind(pat)
val defpats = pats map makeBind
- val pdefs = (List.map2(defpats, rhss)(makePatDef)).flatten
+ val pdefs = (defpats, rhss).zipped flatMap makePatDef
val ids = (defpat1 :: defpats) map makeValue
val rhs1 = makeForYield(
List(ValFrom(pos, defpat1, rhs)),
diff --git a/src/compiler/scala/tools/nsc/backend/ScalaPrimitives.scala b/src/compiler/scala/tools/nsc/backend/ScalaPrimitives.scala
index 290d90b3e9..88e867e487 100644
--- a/src/compiler/scala/tools/nsc/backend/ScalaPrimitives.scala
+++ b/src/compiler/scala/tools/nsc/backend/ScalaPrimitives.scala
@@ -469,6 +469,18 @@ abstract class ScalaPrimitives {
def isCoercion(code: Int): Boolean = (code >= B2B) && (code <= D2D)
+ final val typeOfArrayOp: Map[Int, TypeKind] = Map(
+ (List(ZARRAY_LENGTH, ZARRAY_GET, ZARRAY_SET) map (_ -> BOOL)) ++
+ (List(BARRAY_LENGTH, BARRAY_GET, BARRAY_SET) map (_ -> BYTE)) ++
+ (List(SARRAY_LENGTH, SARRAY_GET, SARRAY_SET) map (_ -> SHORT)) ++
+ (List(CARRAY_LENGTH, CARRAY_GET, CARRAY_SET) map (_ -> CHAR)) ++
+ (List(IARRAY_LENGTH, IARRAY_GET, IARRAY_SET) map (_ -> INT)) ++
+ (List(LARRAY_LENGTH, LARRAY_GET, LARRAY_SET) map (_ -> LONG)) ++
+ (List(FARRAY_LENGTH, FARRAY_GET, FARRAY_SET) map (_ -> FLOAT)) ++
+ (List(DARRAY_LENGTH, DARRAY_GET, DARRAY_SET) map (_ -> DOUBLE)) ++
+ (List(OARRAY_LENGTH, OARRAY_GET, OARRAY_SET) map (_ -> REFERENCE(AnyRefClass))) : _*
+ )
+
/** Check whether the given operation code is an array operation. */
def isArrayOp(code: Int): Boolean =
isArrayNew(code) | isArrayLength(code) | isArrayGet(code) | isArraySet(code)
diff --git a/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala b/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala
index a774473167..b708d4df80 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala
@@ -40,8 +40,8 @@ trait BasicBlocks {
def hasFlag(flag: Int): Boolean = (flags & flag) != 0
/** Set the given flag. */
- def setFlag(flag: Int): Unit = flags |= flag
- def resetFlag(flag: Int) {
+ private def setFlag(flag: Int): Unit = flags |= flag
+ private def resetFlag(flag: Int) {
flags &= ~flag
}
@@ -63,9 +63,16 @@ trait BasicBlocks {
def exceptionHandlerStart_=(b: Boolean) =
if (b) setFlag(EX_HEADER) else resetFlag(EX_HEADER)
- /** Has this basic block been modified since the last call to 'toList'? */
- private def touched = hasFlag(TOUCHED)
- private def touched_=(b: Boolean) = if (b) setFlag(TOUCHED) else resetFlag(TOUCHED)
+ /** Has this basic block been modified since the last call to 'successors'? */
+ def touched = hasFlag(DIRTYSUCCS)
+ def touched_=(b: Boolean) = if (b) {
+ setFlag(DIRTYSUCCS | DIRTYPREDS)
+ } else {
+ resetFlag(DIRTYSUCCS | DIRTYPREDS)
+ }
+
+ // basic blocks start in a dirty state
+ setFlag(DIRTYSUCCS | DIRTYPREDS)
/** Cached predecessors. */
var preds: List[BasicBlock] = null
@@ -85,9 +92,9 @@ trait BasicBlocks {
private var instrs: Array[Instruction] = _
override def toList: List[Instruction] = {
- if (closed && touched)
- instructionList = instrs.toList
- instructionList
+ if (closed)
+ instrs.toList
+ else instructionList
}
/** Return an iterator over the instructions in this basic block. */
@@ -101,12 +108,11 @@ trait BasicBlocks {
}
def fromList(is: List[Instruction]) {
+ code.touched = true
instrs = toInstructionArray(is)
closed = true
}
- // public:
-
/** Return the index of inst. Uses reference equality.
* Returns -1 if not found.
*/
@@ -166,9 +172,9 @@ trait BasicBlocks {
*/
def replaceInstruction(pos: Int, instr: Instruction): Boolean = {
assert(closed, "Instructions can be replaced only after the basic block is closed")
-
instr.setPos(instrs(pos).pos)
instrs(pos) = instr
+ code.touched = true
true
}
@@ -187,6 +193,7 @@ trait BasicBlocks {
newInstr.setPos(oldInstr.pos)
instrs(i) = newInstr
changed = true
+ code.touched = true
}
i += 1
}
@@ -213,6 +220,8 @@ trait BasicBlocks {
if (i < instrs.length) {
val newInstrs = new Array[Instruction](instrs.length + is.length - 1);
changed = true
+ code.touched = true
+
Array.copy(instrs, 0, newInstrs, 0, i)
var j = i
for (x <- is) {
@@ -244,6 +253,7 @@ trait BasicBlocks {
Array.copy(instrs, i + 1, newInstrs, j, instrs.length - i)
instrs = newInstrs;
}
+ code.touched = true
}
/** Removes instructions found at the given positions.
@@ -264,6 +274,7 @@ trait BasicBlocks {
i += 1
}
instrs = newInstrs
+ code.touched = true
}
/** Remove the last instruction of this basic block. It is
@@ -274,7 +285,7 @@ trait BasicBlocks {
removeInstructionsAt(size)
else {
instructionList = instructionList.tail
- touched = true
+ code.touched = true
}
}
@@ -287,7 +298,9 @@ trait BasicBlocks {
var i = 0
while (i < instrs.length) {
map get instrs(i) match {
- case Some(instr) => touched = replaceInstruction(i, instr)
+ case Some(instr) =>
+ val changed = replaceInstruction(i, instr)
+ code.touched |= changed
case None => ()
}
i += 1
@@ -321,6 +334,10 @@ trait BasicBlocks {
emit(instr, NoPosition)
}
+ /** Emitting does not set touched to true. During code generation this is a hotspot and
+ * setting the flag for each emit is a waste. Caching should happend only after a block
+ * is closed, which sets the DIRTYSUCCS flag.
+ */
def emit(instr: Instruction, pos: Position) {
if (closed) {
print()
@@ -329,7 +346,6 @@ trait BasicBlocks {
assert(!closed || ignore, "BasicBlock closed")
if (!ignore) {
- touched = true
instr.setPos(pos)
instructionList = instr :: instructionList
_lastInstruction = instr
@@ -357,6 +373,7 @@ trait BasicBlocks {
def close {
assert(instructionList.length > 0, "Empty block.")
closed = true
+ setFlag(DIRTYSUCCS)
instructionList = instructionList.reverse
instrs = toInstructionArray(instructionList)
}
@@ -365,6 +382,7 @@ trait BasicBlocks {
assert(closed)
closed = false
ignore = false
+ touched = true
instructionList = instructionList.reverse // prepare for appending to the head
}
@@ -409,25 +427,37 @@ trait BasicBlocks {
array
}
- def successors : List[BasicBlock] = if (isEmpty) Nil else {
- var res = lastInstruction match {
- case JUMP (whereto) => List(whereto)
- case CJUMP(success, failure, _, _) => failure :: success :: Nil
- case CZJUMP(success, failure, _, _) => failure :: success :: Nil
- case SWITCH(_,labels) => labels
- case RETURN(_) => Nil
- case THROW() => Nil
- case _ =>
- if (closed) {
- dump
- global.abort("The last instruction is not a control flow instruction: " + lastInstruction)
+ /** Cached value of successors. Must be recomputed whenver a block in the current method is changed. */
+ private var succs: List[BasicBlock] = Nil
+
+ def successors : List[BasicBlock] = {
+ if (touched) {
+ resetFlag(DIRTYSUCCS)
+ succs = if (isEmpty) Nil else {
+ var res = lastInstruction match {
+ case JUMP(whereto) => List(whereto)
+ case CJUMP(success, failure, _, _) => failure :: success :: Nil
+ case CZJUMP(success, failure, _, _) => failure :: success :: Nil
+ case SWITCH(_, labels) => labels
+ case RETURN(_) => Nil
+ case THROW() => Nil
+ case _ =>
+ if (closed) {
+ dump
+ global.abort("The last instruction is not a control flow instruction: " + lastInstruction)
+ }
+ else Nil
}
- else Nil
- }
- method.exh.foreach { e: ExceptionHandler =>
- if (e.covers(this)) res = e.startBlock :: res
+ method.exh.foreach {
+ e: ExceptionHandler =>
+ if (e.covers(this)) res = e.startBlock :: res
+ }
+ val res1 = res ++ exceptionalSucc(this, res)
+ res1
+ }
}
- res ++ exceptionalSucc(this, res)
+// println("reusing cached successors for " + this + " in method " + method)
+ succs
}
/** Return a list of successors for 'b' that come from exception handlers
@@ -446,12 +476,12 @@ trait BasicBlocks {
succs.flatMap(findSucc).removeDuplicates
}
- /** Returns the precessors of this block, in the current 'code' chunk.
- * This is signifficant only if there are exception handlers, which live
- * in different code 'chunks' than the rest of the method.
- */
+ /** Returns the precessors of this block. */
def predecessors: List[BasicBlock] = {
- preds = code.blocks.iterator.filter (_.successors.contains(this)).toList
+ if (hasFlag(DIRTYPREDS)) {
+ resetFlag(DIRTYPREDS)
+ preds = code.blocks.iterator.filter (_.successors.contains(this)).toList
+ }
preds
}
@@ -467,7 +497,7 @@ trait BasicBlocks {
def print(out: java.io.PrintStream) {
out.println("block #"+label+" :")
- toList.foreach(i => out.println(" " + i))
+ foreach(i => out.println(" " + i))
out.print("Successors: ")
successors.foreach((x: BasicBlock) => out.print(" "+x.label.toString()))
out.println()
@@ -482,6 +512,17 @@ trait BasicBlocks {
}
override def toString(): String = "" + label
+
+ def flagsString: String =
+ ("block " + label + (
+ if (hasFlag(LOOP_HEADER)) " <loopheader> "
+ else if (hasFlag(IGNORING)) " <ignore> "
+ else if (hasFlag(EX_HEADER)) " <exheader> "
+ else if (hasFlag(CLOSED)) " <closed> "
+ else if (hasFlag(DIRTYSUCCS)) " <dirtysuccs> "
+ else if (hasFlag(DIRTYPREDS)) " <dirtypreds> "
+ else ""
+ ))
}
}
@@ -499,6 +540,9 @@ object BBFlags {
/** This block is closed. No new instructions can be added. */
final val CLOSED = 0x00000008
- /** This block has been changed, cached results are recomputed. */
- final val TOUCHED = 0x00000010
+ /** Code has been changed, recompute successors. */
+ final val DIRTYSUCCS = 0x00000010
+
+ /** Code has been changed, recompute predecessors. */
+ final val DIRTYPREDS = 0x00000020
}
diff --git a/src/compiler/scala/tools/nsc/backend/icode/Checkers.scala b/src/compiler/scala/tools/nsc/backend/icode/Checkers.scala
index ab32e69944..31abf6c18a 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/Checkers.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/Checkers.scala
@@ -152,7 +152,7 @@ abstract class Checkers {
else {
if (s1.length != s2.length)
throw new CheckerError("Incompatible stacks: " + s1 + " and " + s2 + " in " + method + " at entry to block: " + bl);
- new TypeStack(List.map2(s1.types, s2.types) (lub))
+ new TypeStack((s1.types, s2.types).zipped map lub)
}
}
diff --git a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
index f0c44bb227..22934a78a7 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
@@ -12,6 +12,7 @@ package icode
import scala.collection.mutable.{Map, HashMap, ListBuffer, Buffer, HashSet}
import scala.tools.nsc.symtab._
import scala.tools.nsc.util.Position
+import scala.annotation.switch
import PartialFunction._
/** This class ...
@@ -27,7 +28,8 @@ abstract class GenICode extends SubComponent {
import icodes.opcodes._
import definitions.{
ArrayClass, ObjectClass, ThrowableClass,
- Object_equals
+ Object_equals, Object_isInstanceOf, Object_asInstanceOf,
+ isMaybeBoxed
}
import scalaPrimitives.{
isArrayOp, isComparisonOp, isLogicalOp,
@@ -38,6 +40,9 @@ abstract class GenICode extends SubComponent {
override def newPhase(prev: Phase) = new ICodePhase(prev)
+ private def debugLog(msg: => String): Unit =
+ if (settings.debug.value) log(msg)
+
class ICodePhase(prev: Phase) extends StdPhase(prev) {
override def description = "Generate ICode from the AST"
@@ -150,14 +155,8 @@ abstract class GenICode extends SubComponent {
abort("Illegal tree in gen: " + tree)
}
- private def genStat(trees: List[Tree], ctx: Context): Context = {
- var currentCtx = ctx
-
- for (t <- trees)
- currentCtx = genStat(t, currentCtx)
-
- currentCtx
- }
+ private def genStat(trees: List[Tree], ctx: Context): Context =
+ trees.foldLeft(ctx)((currentCtx, t) => genStat(t, currentCtx))
/**
* Generate code for the given tree. The trees should contain statements
@@ -169,30 +168,282 @@ abstract class GenICode extends SubComponent {
* @return a new context. This is necessary for control flow instructions
* which may change the current basic block.
*/
- private def genStat(tree: Tree, ctx: Context): Context = {
+ private def genStat(tree: Tree, ctx: Context): Context = tree match {
+ case Assign(lhs @ Select(_, _), rhs) =>
+ val isStatic = lhs.symbol.isStaticMember
+ var ctx1 = if (isStatic) ctx else genLoadQualifier(lhs, ctx)
- tree match {
- case Assign(lhs @ Select(_, _), rhs) =>
- if (lhs.symbol.isStaticMember) {
- val ctx1 = genLoad(rhs, ctx, toTypeKind(lhs.symbol.info))
- ctx1.bb.emit(STORE_FIELD(lhs.symbol, true), tree.pos)
- ctx1
- } else {
- var ctx1 = genLoadQualifier(lhs, ctx)
- ctx1 = genLoad(rhs, ctx1, toTypeKind(lhs.symbol.info))
- ctx1.bb.emit(STORE_FIELD(lhs.symbol, false), tree.pos)
- ctx1
+ ctx1 = genLoad(rhs, ctx1, toTypeKind(lhs.symbol.info))
+ ctx1.bb.emit(STORE_FIELD(lhs.symbol, isStatic), tree.pos)
+ ctx1
+
+ case Assign(lhs, rhs) =>
+ val ctx1 = genLoad(rhs, ctx, toTypeKind(lhs.symbol.info))
+ val Some(l) = ctx.method.lookupLocal(lhs.symbol)
+ ctx1.bb.emit(STORE_LOCAL(l), tree.pos)
+ ctx1
+
+ case _ =>
+ genLoad(tree, ctx, UNIT)
+ }
+ /**
+ * Generate code for primitive arithmetic operations.
+ * Returns (Context, Generated Type)
+ */
+ private def genArithmeticOp(tree: Tree, ctx: Context, code: Int): (Context, TypeKind) = {
+ val Apply(fun @ Select(larg, _), args) = tree
+ var ctx1 = ctx
+ var resKind = toTypeKind(larg.tpe)
+
+ if (settings.debug.value) {
+ assert(args.length <= 1,
+ "Too many arguments for primitive function: " + fun.symbol)
+ assert(resKind.isNumericType | resKind == BOOL,
+ resKind.toString() + " is not a numeric or boolean type " +
+ "[operation: " + fun.symbol + "]")
+ }
+
+ args match {
+ // unary operation
+ case Nil =>
+ ctx1 = genLoad(larg, ctx1, resKind)
+ code match {
+ case scalaPrimitives.POS =>
+ () // nothing
+ case scalaPrimitives.NEG =>
+ ctx1.bb.emit(CALL_PRIMITIVE(Negation(resKind)), larg.pos)
+ case scalaPrimitives.NOT =>
+ ctx1.bb.emit(CALL_PRIMITIVE(Arithmetic(NOT, resKind)), larg.pos)
+ case _ =>
+ abort("Unknown unary operation: " + fun.symbol.fullNameString +
+ " code: " + code)
}
- case Assign(lhs, rhs) =>
- val ctx1 = genLoad(rhs, ctx, toTypeKind(lhs.symbol.info))
- val Some(l) = ctx.method.lookupLocal(lhs.symbol)
- ctx1.bb.emit(STORE_LOCAL(l), tree.pos)
- ctx1
+ // binary operation
+ case rarg :: Nil =>
+ resKind = getMaxType(larg.tpe :: rarg.tpe :: Nil);
+ if (scalaPrimitives.isShiftOp(code) || scalaPrimitives.isBitwiseOp(code))
+ assert(resKind.isIntType | resKind == BOOL,
+ resKind.toString() + " incompatible with arithmetic modulo operation: " + ctx1);
+
+ ctx1 = genLoad(larg, ctx1, resKind)
+ ctx1 = genLoad(rarg,
+ ctx1, // check .NET size of shift arguments!
+ if (scalaPrimitives.isShiftOp(code)) INT else resKind)
+
+ val primitiveOp = code match {
+ case scalaPrimitives.ADD => Arithmetic(ADD, resKind)
+ case scalaPrimitives.SUB => Arithmetic(SUB, resKind)
+ case scalaPrimitives.MUL => Arithmetic(MUL, resKind)
+ case scalaPrimitives.DIV => Arithmetic(DIV, resKind)
+ case scalaPrimitives.MOD => Arithmetic(REM, resKind)
+ case scalaPrimitives.OR => Logical(OR, resKind)
+ case scalaPrimitives.XOR => Logical(XOR, resKind)
+ case scalaPrimitives.AND => Logical(AND, resKind)
+ case scalaPrimitives.LSL => Shift(LSL, resKind)
+ case scalaPrimitives.LSR => Shift(LSR, resKind)
+ case scalaPrimitives.ASR => Shift(ASR, resKind)
+ case _ => abort("Unknown primitive: " + fun.symbol + "[" + code + "]")
+ }
+ ctx1.bb.emit(CALL_PRIMITIVE(primitiveOp), tree.pos)
case _ =>
- genLoad(tree, ctx, UNIT)
+ abort("Too many arguments for primitive function: " + tree)
}
+ (ctx1, resKind)
+ }
+
+ /** Generate primitive array operations.
+ *
+ * @param tree ...
+ * @param ctx ...
+ * @param code ...
+ * @return ...
+ */
+ private def genArrayOp(tree: Tree, ctx: Context, code: Int, expectedType: TypeKind): (Context, TypeKind) = {
+ import scalaPrimitives._
+ val Apply(Select(arrayObj, _), args) = tree
+ val k = toTypeKind(arrayObj.tpe)
+ val ARRAY(elem) = k
+ var ctx1 = genLoad(arrayObj, ctx, k)
+ val elementType = (typeOfArrayOp get code) getOrElse abort("Unknown operation on arrays: " + tree + " code: " + code)
+
+ var generatedType = expectedType
+
+ if (scalaPrimitives.isArrayGet(code)) {
+ // load argument on stack
+ if (settings.debug.value)
+ assert(args.length == 1,
+ "Too many arguments for array get operation: " + tree);
+ ctx1 = genLoad(args.head, ctx1, INT)
+ generatedType = elem
+ ctx1.bb.emit(LOAD_ARRAY_ITEM(elementType), tree.pos)
+ }
+ else if (scalaPrimitives.isArraySet(code)) {
+ if (settings.debug.value)
+ assert(args.length == 2,
+ "Too many arguments for array set operation: " + tree);
+ ctx1 = genLoad(args.head, ctx1, INT)
+ ctx1 = genLoad(args.tail.head, ctx1, toTypeKind(args.tail.head.tpe))
+ // the following line should really be here, but because of bugs in erasure
+ // we pretend we generate whatever type is expected from us.
+ //generatedType = UNIT
+
+ ctx1.bb.emit(STORE_ARRAY_ITEM(elementType), tree.pos)
+ }
+ else {
+ generatedType = INT
+ ctx1.bb.emit(CALL_PRIMITIVE(ArrayLength(elementType)), tree.pos)
+ }
+
+ (ctx1, generatedType)
+ }
+ private def genSynchronized(tree: Apply, ctx: Context, expectedType: TypeKind): (Context, TypeKind) = {
+ val Apply(fun, args) = tree
+ val monitor = ctx.makeLocal(tree.pos, ObjectClass.tpe, "monitor")
+ var ctx1 = genLoadQualifier(fun, ctx)
+ ctx1.bb.emit(Seq(
+ DUP(ANY_REF_CLASS),
+ STORE_LOCAL(monitor),
+ MONITOR_ENTER() setPos tree.pos
+ ))
+ ctx1.enterSynchronized(monitor)
+ debugLog("synchronized block start")
+
+ ctx1 = ctx1.Try(
+ bodyCtx => {
+ val ctx2 = genLoad(args.head, bodyCtx, expectedType /* toTypeKind(tree.tpe.resultType) */)
+ ctx2.bb.emit(Seq(
+ LOAD_LOCAL(monitor),
+ MONITOR_EXIT() setPos tree.pos
+ ))
+ ctx2
+ }, List(
+ // tree.tpe / fun.tpe is object, which is no longer true after this transformation
+ (NoSymbol, expectedType, exhCtx => {
+ exhCtx.bb.emit(Seq(
+ LOAD_LOCAL(monitor),
+ MONITOR_EXIT() setPos tree.pos,
+ THROW()
+ ))
+ exhCtx.bb.enterIgnoreMode
+ exhCtx
+ })), EmptyTree, tree)
+
+ debugLog("synchronized block end with block %s closed=%s".format(ctx1.bb, ctx1.bb.closed))
+ ctx1.exitSynchronized(monitor)
+
+ (ctx1, expectedType)
+ }
+
+ private def genLoadIf(tree: If, ctx: Context, expectedType: TypeKind): (Context, TypeKind) = {
+ val If(cond, thenp, elsep) = tree
+
+ var thenCtx = ctx.newBlock
+ var elseCtx = ctx.newBlock
+ val contCtx = ctx.newBlock
+
+ genCond(cond, ctx, thenCtx, elseCtx)
+
+ val ifKind = toTypeKind(tree.tpe)
+ val thenKind = toTypeKind(thenp.tpe)
+ val elseKind = if (elsep == EmptyTree) UNIT else toTypeKind(elsep.tpe)
+
+ // we need to drop unneeded results, if one branch gives
+ // unit and the other gives something on the stack, because
+ // the type of 'if' is scala.Any, and its erasure would be Object.
+ // But unboxed units are not Objects...
+ def hasUnitBranch = thenKind == UNIT || elseKind == UNIT
+ val resKind = if (hasUnitBranch) UNIT else ifKind
+
+ if (hasUnitBranch)
+ debugLog("Will drop result from an if branch")
+
+ thenCtx = genLoad(thenp, thenCtx, resKind)
+ elseCtx = genLoad(elsep, elseCtx, resKind)
+
+ assert(!settings.debug.value || expectedType == UNIT,
+ "I produce UNIT in a context where " + expectedType + " is expected!")
+
+ thenCtx.bb.emitOnly(JUMP(contCtx.bb))
+ elseCtx.bb.emitOnly(
+ if (elsep == EmptyTree) JUMP(contCtx.bb)
+ else JUMP(contCtx.bb) setPos tree.pos
+ )
+
+ (contCtx, resKind)
+ }
+ private def genLoadTry(tree: Try, ctx: Context, setGeneratedType: TypeKind => Unit): Context = {
+ val Try(block, catches, finalizer) = tree
+ val kind = toTypeKind(tree.tpe)
+
+ val caseHandlers =
+ for (CaseDef(pat, _, body) <- catches.reverse) yield {
+ def genWildcardHandler(sym: Symbol): (Symbol, TypeKind, Context => Context) =
+ (sym, kind, ctx => {
+ ctx.bb.emit(DROP(REFERENCE(sym)))
+ genLoad(body, ctx, kind)
+ })
+
+ pat match {
+ case Typed(Ident(nme.WILDCARD), tpt) => genWildcardHandler(tpt.tpe.typeSymbol)
+ case Ident(nme.WILDCARD) => genWildcardHandler(ThrowableClass)
+ case Bind(name, _) =>
+ val exception = ctx.method addLocal new Local(pat.symbol, toTypeKind(pat.symbol.tpe), false)
+
+ (pat.symbol.tpe.typeSymbol, kind, {
+ ctx: Context =>
+ ctx.bb.emit(STORE_LOCAL(exception), pat.pos);
+ genLoad(body, ctx, kind);
+ })
+ }
+ }
+
+ ctx.Try(
+ bodyCtx => {
+ setGeneratedType(kind)
+ genLoad(block, bodyCtx, kind)
+ },
+ caseHandlers,
+ finalizer,
+ tree)
+ }
+
+ private def genPrimitiveOp(tree: Apply, ctx: Context, expectedType: TypeKind): (Context, TypeKind) = {
+ val sym = tree.symbol
+ val Apply(fun @ Select(receiver, _), args) = tree
+ val code = scalaPrimitives.getPrimitive(sym, receiver.tpe)
+
+ if (scalaPrimitives.isArithmeticOp(code))
+ genArithmeticOp(tree, ctx, code)
+ else if (code == scalaPrimitives.CONCAT)
+ (genStringConcat(tree, ctx), STRING)
+ else if (isArrayOp(code))
+ genArrayOp(tree, ctx, code, expectedType)
+ else if (isLogicalOp(code) || isComparisonOp(code)) {
+ val trueCtx = ctx.newBlock
+ val falseCtx = ctx.newBlock
+ val afterCtx = ctx.newBlock
+ genCond(tree, ctx, trueCtx, falseCtx)
+ trueCtx.bb.emitOnly(
+ CONSTANT(Constant(true)) setPos tree.pos,
+ JUMP(afterCtx.bb)
+ )
+ falseCtx.bb.emitOnly(
+ CONSTANT(Constant(false)) setPos tree.pos,
+ JUMP(afterCtx.bb)
+ )
+ (afterCtx, BOOL)
+ }
+ else if (code == scalaPrimitives.SYNCHRONIZED)
+ genSynchronized(tree, ctx, expectedType)
+ else if (scalaPrimitives.isCoercion(code)) {
+ val ctx1 = genLoad(receiver, ctx, toTypeKind(receiver.tpe))
+ genCoercion(tree, ctx1, code)
+ (ctx1, scalaPrimitives.generatedKind(code))
+ }
+ else abort("Primitive operation not handled yet: " + sym.fullNameString + "(" +
+ fun.symbol.simpleName + ") " + " at: " + (tree.pos))
}
/**
@@ -210,186 +461,6 @@ abstract class GenICode extends SubComponent {
if (settings.debug.value)
log("at line: " + (if (tree.pos.isDefined) tree.pos.line else tree.pos))
- /**
- * Generate code for primitive arithmetic operations.
- */
- def genArithmeticOp(tree: Tree, ctx: Context, code: Int): Context = {
- val Apply(fun @ Select(larg, _), args) = tree
- var ctx1 = ctx
- var resKind = toTypeKind(larg.tpe)
-
- if (settings.debug.value) {
- assert(args.length <= 1,
- "Too many arguments for primitive function: " + fun.symbol)
- assert(resKind.isNumericType | resKind == BOOL,
- resKind.toString() + " is not a numeric or boolean type " +
- "[operation: " + fun.symbol + "]")
- }
-
- args match {
- // unary operation
- case Nil =>
- ctx1 = genLoad(larg, ctx1, resKind)
- code match {
- case scalaPrimitives.POS =>
- () // nothing
- case scalaPrimitives.NEG =>
- ctx1.bb.emit(CALL_PRIMITIVE(Negation(resKind)), larg.pos)
- case scalaPrimitives.NOT =>
- ctx1.bb.emit(CALL_PRIMITIVE(Arithmetic(NOT, resKind)), larg.pos)
- case _ =>
- abort("Unknown unary operation: " + fun.symbol.fullNameString +
- " code: " + code)
- }
- generatedType = resKind
-
- // binary operation
- case rarg :: Nil =>
- resKind = getMaxType(larg.tpe :: rarg.tpe :: Nil);
- if (scalaPrimitives.isShiftOp(code) || scalaPrimitives.isBitwiseOp(code))
- assert(resKind.isIntType | resKind == BOOL,
- resKind.toString() + " incompatible with arithmetic modulo operation: " + ctx1);
-
- ctx1 = genLoad(larg, ctx1, resKind);
- ctx1 = genLoad(rarg,
- ctx1, // check .NET size of shift arguments!
- if (scalaPrimitives.isShiftOp(code)) INT else resKind)
-
- generatedType = resKind
- code match {
- case scalaPrimitives.ADD =>
- ctx1.bb.emit(CALL_PRIMITIVE(Arithmetic(ADD, resKind)), tree.pos)
- case scalaPrimitives.SUB =>
- ctx1.bb.emit(CALL_PRIMITIVE(Arithmetic(SUB, resKind)), tree.pos)
- case scalaPrimitives.MUL =>
- ctx1.bb.emit(CALL_PRIMITIVE(Arithmetic(MUL, resKind)), tree.pos)
- case scalaPrimitives.DIV =>
- ctx1.bb.emit(CALL_PRIMITIVE(Arithmetic(DIV, resKind)), tree.pos)
- case scalaPrimitives.MOD =>
- ctx1.bb.emit(CALL_PRIMITIVE(Arithmetic(REM, resKind)), tree.pos)
- case scalaPrimitives.OR =>
- ctx1.bb.emit(CALL_PRIMITIVE(Logical(OR, resKind)), tree.pos)
- case scalaPrimitives.XOR =>
- ctx1.bb.emit(CALL_PRIMITIVE(Logical(XOR, resKind)), tree.pos)
- case scalaPrimitives.AND =>
- ctx1.bb.emit(CALL_PRIMITIVE(Logical(AND, resKind)), tree.pos)
- case scalaPrimitives.LSL =>
- ctx1.bb.emit(CALL_PRIMITIVE(Shift(LSL, resKind)), tree.pos)
- generatedType = resKind
- case scalaPrimitives.LSR =>
- ctx1.bb.emit(CALL_PRIMITIVE(Shift(LSR, resKind)), tree.pos)
- generatedType = resKind
- case scalaPrimitives.ASR =>
- ctx1.bb.emit(CALL_PRIMITIVE(Shift(ASR, resKind)), tree.pos)
- generatedType = resKind
- case _ =>
- abort("Unknown primitive: " + fun.symbol + "[" + code + "]")
- }
-
- case _ =>
- abort("Too many arguments for primitive function: " + tree)
- }
- ctx1
- }
-
- /** Generate primitive array operations.
- *
- * @param tree ...
- * @param ctx ...
- * @param code ...
- * @return ...
- */
- def genArrayOp(tree: Tree, ctx: Context, code: Int): Context = {
- import scalaPrimitives._
- val Apply(Select(arrayObj, _), args) = tree
- val k = toTypeKind(arrayObj.tpe)
- val ARRAY(elem) = k
- var ctx1 = genLoad(arrayObj, ctx, k)
-
- if (scalaPrimitives.isArrayGet(code)) {
- // load argument on stack
- if (settings.debug.value)
- assert(args.length == 1,
- "Too many arguments for array get operation: " + tree);
- ctx1 = genLoad(args.head, ctx1, INT)
- generatedType = elem
- } else if (scalaPrimitives.isArraySet(code)) {
- if (settings.debug.value)
- assert(args.length == 2,
- "Too many arguments for array set operation: " + tree);
- ctx1 = genLoad(args.head, ctx1, INT)
- ctx1 = genLoad(args.tail.head, ctx1, toTypeKind(args.tail.head.tpe))
- // the following line should really be here, but because of bugs in erasure
- // we pretend we generate whatever type is expected from us.
- //generatedType = UNIT
- } else
- generatedType = INT
-
- code match {
- case ZARRAY_LENGTH =>
- ctx1.bb.emit(CALL_PRIMITIVE(ArrayLength(BOOL)), tree.pos)
- case BARRAY_LENGTH =>
- ctx1.bb.emit(CALL_PRIMITIVE(ArrayLength(BYTE)), tree.pos)
- case SARRAY_LENGTH =>
- ctx1.bb.emit(CALL_PRIMITIVE(ArrayLength(SHORT)), tree.pos)
- case CARRAY_LENGTH =>
- ctx1.bb.emit(CALL_PRIMITIVE(ArrayLength(CHAR)), tree.pos)
- case IARRAY_LENGTH =>
- ctx1.bb.emit(CALL_PRIMITIVE(ArrayLength(INT)), tree.pos)
- case LARRAY_LENGTH =>
- ctx1.bb.emit(CALL_PRIMITIVE(ArrayLength(LONG)), tree.pos)
- case FARRAY_LENGTH =>
- ctx1.bb.emit(CALL_PRIMITIVE(ArrayLength(FLOAT)), tree.pos)
- case DARRAY_LENGTH =>
- ctx1.bb.emit(CALL_PRIMITIVE(ArrayLength(DOUBLE)), tree.pos)
- case OARRAY_LENGTH =>
- ctx1.bb.emit(CALL_PRIMITIVE(ArrayLength(ANY_REF_CLASS)), tree.pos)
-
- case ZARRAY_GET =>
- ctx1.bb.emit(LOAD_ARRAY_ITEM(BOOL), tree.pos)
- case BARRAY_GET =>
- ctx1.bb.emit(LOAD_ARRAY_ITEM(BYTE), tree.pos)
- case SARRAY_GET =>
- ctx1.bb.emit(LOAD_ARRAY_ITEM(SHORT), tree.pos)
- case CARRAY_GET =>
- ctx1.bb.emit(LOAD_ARRAY_ITEM(CHAR), tree.pos)
- case IARRAY_GET =>
- ctx1.bb.emit(LOAD_ARRAY_ITEM(INT), tree.pos)
- case LARRAY_GET =>
- ctx1.bb.emit(LOAD_ARRAY_ITEM(LONG), tree.pos)
- case FARRAY_GET =>
- ctx1.bb.emit(LOAD_ARRAY_ITEM(FLOAT), tree.pos)
- case DARRAY_GET =>
- ctx1.bb.emit(LOAD_ARRAY_ITEM(DOUBLE), tree.pos)
- case OARRAY_GET =>
- ctx1.bb.emit(LOAD_ARRAY_ITEM(ANY_REF_CLASS), tree.pos)
-
- case ZARRAY_SET =>
- ctx1.bb.emit(STORE_ARRAY_ITEM(BOOL), tree.pos)
- case BARRAY_SET =>
- ctx1.bb.emit(STORE_ARRAY_ITEM(BYTE), tree.pos)
- case SARRAY_SET =>
- ctx1.bb.emit(STORE_ARRAY_ITEM(SHORT), tree.pos)
- case CARRAY_SET =>
- ctx1.bb.emit(STORE_ARRAY_ITEM(CHAR), tree.pos)
- case IARRAY_SET =>
- ctx1.bb.emit(STORE_ARRAY_ITEM(INT), tree.pos)
- case LARRAY_SET =>
- ctx1.bb.emit(STORE_ARRAY_ITEM(LONG), tree.pos)
- case FARRAY_SET =>
- ctx1.bb.emit(STORE_ARRAY_ITEM(FLOAT), tree.pos)
- case DARRAY_SET =>
- ctx1.bb.emit(STORE_ARRAY_ITEM(DOUBLE), tree.pos)
- case OARRAY_SET =>
- ctx1.bb.emit(STORE_ARRAY_ITEM(ANY_REF_CLASS), tree.pos)
-
- case _ =>
- abort("Unknown operation on arrays: " + tree + " code: " + code)
- }
- ctx1
- }
-
- // genLoad
val resCtx: Context = tree match {
case LabelDef(name, params, rhs) =>
val ctx1 = ctx.newBlock
@@ -436,46 +507,10 @@ abstract class GenICode extends SubComponent {
generatedType = UNIT
ctx1
- case If(cond, thenp, elsep) =>
- var thenCtx = ctx.newBlock
- var elseCtx = ctx.newBlock
- val contCtx = ctx.newBlock
- genCond(cond, ctx, thenCtx, elseCtx)
- val ifKind = toTypeKind(tree.tpe)
-
- val thenKind = toTypeKind(thenp.tpe)
- val elseKind = if (elsep == EmptyTree) UNIT else toTypeKind(elsep.tpe)
-
- generatedType = ifKind
-
- // we need to drop unneeded results, if one branch gives
- // unit and the other gives something on the stack, because
- // the type of 'if' is scala.Any, and its erasure would be Object.
- // But unboxed units are not Objects...
- if (thenKind == UNIT || elseKind == UNIT) {
- if (settings.debug.value)
- log("Will drop result from an if branch");
- thenCtx = genLoad(thenp, thenCtx, UNIT)
- elseCtx = genLoad(elsep, elseCtx, UNIT)
- if (settings.debug.value)
- assert(expectedType == UNIT,
- "I produce UNIT in a context where " +
- expectedType + " is expected!")
- generatedType = UNIT
- } else {
- thenCtx = genLoad(thenp, thenCtx, ifKind)
- elseCtx = genLoad(elsep, elseCtx, ifKind)
- }
-
- thenCtx.bb.emit(JUMP(contCtx.bb))
- thenCtx.bb.close
- if (elsep == EmptyTree)
- elseCtx.bb.emit(JUMP(contCtx.bb), tree.pos)
- else
- elseCtx.bb.emit(JUMP(contCtx.bb))
- elseCtx.bb.close
-
- contCtx
+ case t @ If(cond, thenp, elsep) =>
+ val (newCtx, resKind) = genLoadIf(t, ctx, expectedType)
+ generatedType = resKind
+ newCtx
case Return(expr) =>
val returnedKind = toTypeKind(expr.tpe)
@@ -510,41 +545,7 @@ abstract class GenICode extends SubComponent {
generatedType = expectedType
ctx1
- case Try(block, catches, finalizer) =>
- val kind = toTypeKind(tree.tpe)
-
- var handlers = for (CaseDef(pat, _, body) <- catches.reverse)
- yield pat match {
- case Typed(Ident(nme.WILDCARD), tpt) => (tpt.tpe.typeSymbol, kind, {
- ctx: Context =>
- ctx.bb.emit(DROP(REFERENCE(tpt.tpe.typeSymbol)));
- genLoad(body, ctx, kind);
- })
-
- case Ident(nme.WILDCARD) => (ThrowableClass, kind, {
- ctx: Context =>
- ctx.bb.emit(DROP(REFERENCE(ThrowableClass)))
- genLoad(body, ctx, kind)
- })
-
- case Bind(name, _) =>
- val exception = ctx.method.addLocal(new Local(pat.symbol, toTypeKind(pat.symbol.tpe), false))
-
- (pat.symbol.tpe.typeSymbol, kind, {
- ctx: Context =>
- ctx.bb.emit(STORE_LOCAL(exception), pat.pos);
- genLoad(body, ctx, kind);
- })
- }
-
- ctx.Try(
- bodyCtx => {
- generatedType = kind; //toTypeKind(block.tpe);
- genLoad(block, bodyCtx, generatedType);
- },
- handlers,
- finalizer,
- tree)
+ case t @ Try(_, _, _) => genLoadTry(t, ctx, (x: TypeKind) => generatedType = x)
case Throw(expr) =>
val ctx1 = genLoad(expr, ctx, THROWABLE)
@@ -558,30 +559,27 @@ abstract class GenICode extends SubComponent {
case Apply(TypeApply(fun, targs), _) =>
val sym = fun.symbol
- var ctx1 = ctx
- var cast = false
-
- if (sym == definitions.Object_isInstanceOf)
- cast = false
- else if (sym == definitions.Object_asInstanceOf)
- cast = true
- else
- abort("Unexpected type application " + fun + "[sym: " + sym.fullNameString + "]" + " in: " + tree)
+ val cast = sym match {
+ case Object_isInstanceOf => false
+ case Object_asInstanceOf => true
+ case _ => abort("Unexpected type application " + fun + "[sym: " + sym.fullNameString + "]" + " in: " + tree)
+ }
val Select(obj, _) = fun
val l = toTypeKind(obj.tpe)
val r = toTypeKind(targs.head.tpe)
-
- ctx1 = genLoadQualifier(fun, ctx)
+ val ctx1 = genLoadQualifier(fun, ctx)
if (l.isValueType && r.isValueType)
genConversion(l, r, ctx1, cast)
else if (l.isValueType) {
ctx1.bb.emit(DROP(l), fun.pos)
if (cast) {
- ctx1.bb.emit(NEW(REFERENCE(definitions.getClass("ClassCastException"))))
- ctx1.bb.emit(DUP(ANY_REF_CLASS))
- ctx1.bb.emit(THROW())
+ ctx1.bb.emit(Seq(
+ NEW(REFERENCE(definitions.getClass("ClassCastException"))),
+ DUP(ANY_REF_CLASS),
+ THROW()
+ ))
} else
ctx1.bb.emit(CONSTANT(Constant(false)))
}
@@ -632,11 +630,9 @@ abstract class GenICode extends SubComponent {
assert(generatedType.isReferenceType || generatedType.isArrayType,
"Non reference type cannot be instantiated: " + generatedType)
- var ctx1 = ctx
-
generatedType match {
case arr @ ARRAY(elem) =>
- ctx1 = genLoadArguments(args, ctor.info.paramTypes, ctx)
+ val ctx1 = genLoadArguments(args, ctor.info.paramTypes, ctx)
val dims = arr.dimensions
var elemKind = arr.elementKind
if (args.length > dims)
@@ -645,24 +641,25 @@ abstract class GenICode extends SubComponent {
if (args.length != dims)
for (i <- args.length until dims) elemKind = ARRAY(elemKind)
ctx1.bb.emit(CREATE_ARRAY(elemKind, args.length), tree.pos)
+ ctx1
case rt @ REFERENCE(cls) =>
if (settings.debug.value)
assert(ctor.owner == cls,
"Symbol " + ctor.owner.fullNameString + " is different than " + tpt)
val nw = NEW(rt)
- ctx1.bb.emit(nw, tree.pos)
- ctx1.bb.emit(DUP(generatedType))
- ctx1 = genLoadArguments(args, ctor.info.paramTypes, ctx)
+ ctx.bb.emit(nw, tree.pos)
+ ctx.bb.emit(DUP(generatedType))
+ val ctx1 = genLoadArguments(args, ctor.info.paramTypes, ctx)
val init = CALL_METHOD(ctor, Static(true))
nw.init = init
ctx1.bb.emit(init, tree.pos)
+ ctx1
case _ =>
abort("Cannot instantiate " + tpt + "of kind: " + generatedType)
}
- ctx1
case Apply(fun @ _, List(expr)) if (definitions.isBox(fun.symbol)) =>
if (settings.debug.value)
@@ -690,7 +687,7 @@ abstract class GenICode extends SubComponent {
ctx1.bb.emit(UNBOX(boxType), expr.pos)
ctx1
- case Apply(fun, args) =>
+ case app @ Apply(fun, args) =>
val sym = fun.symbol
if (sym.isLabel) { // jump to a label
@@ -711,78 +708,12 @@ abstract class GenICode extends SubComponent {
}
}
val ctx1 = genLoadLabelArguments(args, label, ctx)
- if (label.anchored)
- ctx1.bb.emit(JUMP(label.block), tree.pos)
- else
- ctx1.bb.emit(PJUMP(label), tree.pos)
-
- ctx1.bb.close
+ ctx1.bb.emitOnly(if (label.anchored) JUMP(label.block) else PJUMP(label))
ctx1.newBlock
} else if (isPrimitive(sym)) { // primitive method call
- val Select(receiver, _) = fun
-
- val code = scalaPrimitives.getPrimitive(sym, receiver.tpe)
- var ctx1 = ctx
-
- if (scalaPrimitives.isArithmeticOp(code)) {
- ctx1 = genArithmeticOp(tree, ctx1, code)
- } else if (code == scalaPrimitives.CONCAT) {
- ctx1 = genStringConcat(tree, ctx1)
- generatedType = STRING
- } else if (isArrayOp(code)) {
- ctx1 = genArrayOp(tree, ctx1, code)
- } else if (isLogicalOp(code) || isComparisonOp(code)) {
-
- val trueCtx = ctx1.newBlock
- val falseCtx = ctx1.newBlock
- val afterCtx = ctx1.newBlock
- genCond(tree, ctx1, trueCtx, falseCtx)
- trueCtx.bb.emit(CONSTANT(Constant(true)), tree.pos)
- trueCtx.bb.emit(JUMP(afterCtx.bb))
- trueCtx.bb.close
- falseCtx.bb.emit(CONSTANT(Constant(false)), tree.pos)
- falseCtx.bb.emit(JUMP(afterCtx.bb))
- falseCtx.bb.close
- generatedType = BOOL
- ctx1 = afterCtx
- } else if (code == scalaPrimitives.SYNCHRONIZED) {
- val monitor = ctx.makeLocal(tree.pos, ObjectClass.tpe, "monitor")
- ctx1 = genLoadQualifier(fun, ctx1)
- ctx1.bb.emit(DUP(ANY_REF_CLASS))
- ctx1.bb.emit(STORE_LOCAL(monitor))
- ctx1.bb.emit(MONITOR_ENTER(), tree.pos)
- ctx1.enterSynchronized(monitor)
-
- if (settings.debug.value)
- log("synchronized block start");
-
- ctx1 = ctx1.Try(
- bodyCtx => {
- val ctx1 = genLoad(args.head, bodyCtx, expectedType /* toTypeKind(tree.tpe.resultType) */)
- ctx1.bb.emit(LOAD_LOCAL(monitor))
- ctx1.bb.emit(MONITOR_EXIT(), tree.pos)
- ctx1
- }, List(
- // tree.tpe / fun.tpe is object, which is no longer true after this transformation
- (NoSymbol, expectedType, exhCtx => {
- exhCtx.bb.emit(LOAD_LOCAL(monitor))
- exhCtx.bb.emit(MONITOR_EXIT(), tree.pos)
- exhCtx.bb.emit(THROW())
- exhCtx.bb.enterIgnoreMode
- exhCtx
- })), EmptyTree, tree);
- if (settings.debug.value)
- log("synchronized block end with block " + ctx1.bb +
- " closed=" + ctx1.bb.closed);
- ctx1.exitSynchronized(monitor)
- } else if (scalaPrimitives.isCoercion(code)) {
- ctx1 = genLoad(receiver, ctx1, toTypeKind(receiver.tpe))
- genCoercion(tree, ctx1, code)
- generatedType = scalaPrimitives.generatedKind(code)
- } else
- abort("Primitive operation not handled yet: " + sym.fullNameString + "(" +
- fun.symbol.simpleName + ") " + " at: " + (tree.pos));
- ctx1
+ val (newCtx, resKind) = genPrimitiveOp(app, ctx, expectedType)
+ generatedType = resKind
+ newCtx
} else { // normal method call
if (settings.debug.value)
log("Gen CALL_METHOD with sym: " + sym + " isStaticSymbol: " + sym.isStaticMember);
@@ -837,10 +768,9 @@ abstract class GenICode extends SubComponent {
generatedType = REFERENCE(tree.symbol)
} else {
ctx.bb.emit(THIS(ctx.clazz.symbol), tree.pos)
- if (tree.symbol == ArrayClass)
- generatedType = REFERENCE(ObjectClass)
- else
- generatedType = REFERENCE(ctx.clazz.symbol)
+ generatedType = REFERENCE(
+ if (tree.symbol == ArrayClass) ObjectClass else ctx.clazz.symbol
+ )
}
ctx
@@ -961,31 +891,26 @@ abstract class GenICode extends SubComponent {
var tags: List[Int] = Nil
var default: BasicBlock = afterCtx.bb
- for (caze <- cases) caze match {
- case CaseDef(Literal(value), EmptyTree, body) =>
- tags = value.intValue :: tags
- val tmpCtx = ctx1.newBlock
- targets = tmpCtx.bb :: targets
-
- caseCtx = genLoad(body, tmpCtx , generatedType)
- caseCtx.bb.emit(JUMP(afterCtx.bb), caze.pos)
- caseCtx.bb.close
-
- case CaseDef(Ident(nme.WILDCARD), EmptyTree, body) =>
- val tmpCtx = ctx1.newBlock
- default = tmpCtx.bb
-
- caseCtx = genLoad(body, tmpCtx , generatedType)
- caseCtx.bb.emit(JUMP(afterCtx.bb), caze.pos)
- caseCtx.bb.close
+ for (caze @ CaseDef(pat, guard, body) <- cases) {
+ assert(guard == EmptyTree)
+ val tmpCtx = ctx1.newBlock
+ pat match {
+ case Literal(value) =>
+ tags = value.intValue :: tags
+ targets = tmpCtx.bb :: targets
+ case Ident(nme.WILDCARD) =>
+ default = tmpCtx.bb
+ case _ =>
+ abort("Invalid case statement in switch-like pattern match: " +
+ tree + " at: " + (tree.pos))
+ }
- case _ =>
- abort("Invalid case statement in switch-like pattern match: " +
- tree + " at: " + (tree.pos))
+ caseCtx = genLoad(body, tmpCtx, generatedType)
+ caseCtx.bb.emitOnly(JUMP(afterCtx.bb) setPos caze.pos)
}
- ctx1.bb.emit(SWITCH(tags.reverse map (x => List(x)),
- (default :: targets).reverse), tree.pos)
- ctx1.bb.close
+ ctx1.bb.emitOnly(
+ SWITCH(tags.reverse map (x => List(x)), (default :: targets).reverse) setPos tree.pos
+ )
afterCtx
case EmptyTree =>
@@ -994,13 +919,12 @@ abstract class GenICode extends SubComponent {
ctx
case _ =>
- abort("Unexpected tree in genLoad: " + tree + " at: " +
- (tree.pos))
+ abort("Unexpected tree in genLoad: " + tree + " at: " + tree.pos)
}
// emit conversion
if (generatedType != expectedType)
- adapt(generatedType, expectedType, resCtx, tree.pos);
+ adapt(generatedType, expectedType, resCtx, tree.pos)
resCtx
}
@@ -1143,7 +1067,7 @@ abstract class GenICode extends SubComponent {
*/
def genCoercion(tree: Tree, ctx: Context, code: Int) = {
import scalaPrimitives._
- code match {
+ (code: @switch) match {
case B2B => ()
case B2C => ctx.bb.emit(CALL_PRIMITIVE(Conversion(BYTE, CHAR)), tree.pos)
case B2S => ctx.bb.emit(CALL_PRIMITIVE(Conversion(BYTE, SHORT)), tree.pos)
@@ -1409,23 +1333,8 @@ abstract class GenICode extends SubComponent {
* comparison might have a run-time type subtype of java.lang.Number or java.lang.Character.
* When it is statically known that both sides are equal and subtypes of Number of Character,
* not using the rich equality is possible (their own equals method will do ok.)*/
- def mustUseAnyComparator: Boolean = {
- import definitions._
-
- /** The various ways a boxed primitive might materialize at runtime. */
- def isJavaBoxed(sym: Symbol) =
- (sym == ObjectClass) ||
- (sym == SerializableClass) ||
- (sym == ComparableClass) ||
- (sym isNonBottomSubClass BoxedNumberClass) ||
- (sym isNonBottomSubClass BoxedCharacterClass)
-
- def isBoxed(sym: Symbol): Boolean =
- if (forMSIL) (sym isNonBottomSubClass BoxedNumberClass)
- else isJavaBoxed(sym)
-
- isBoxed(l.tpe.typeSymbol) && isBoxed(r.tpe.typeSymbol)
- }
+ def mustUseAnyComparator: Boolean =
+ isMaybeBoxed(l.tpe.typeSymbol) && isMaybeBoxed(r.tpe.typeSymbol)
if (mustUseAnyComparator) {
// when -optimise is on we call the @inline-version of equals, found in ScalaRunTime
@@ -1501,9 +1410,8 @@ abstract class GenICode extends SubComponent {
assert(ctx.clazz.symbol eq cls,
"Classes are not the same: " + ctx.clazz.symbol + ", " + cls)
- for (f <- cls.info.decls.iterator)
- if (!f.isMethod && f.isTerm)
- ctx.clazz.addField(new IField(f));
+ for (f <- cls.info.decls ; if !f.isMethod && f.isTerm)
+ ctx.clazz addField new IField(f)
}
/**
@@ -1624,7 +1532,7 @@ abstract class GenICode extends SubComponent {
do {
changed = false
n += 1
- method.code traverse prune0
+ method.code.blocks foreach prune0
} while (changed)
if (settings.debug.value)
@@ -2061,7 +1969,7 @@ abstract class GenICode extends SubComponent {
}
val map = substMap
- code traverse (_.subst(map))
+ code.blocks foreach (_.subst(map))
}
/**
diff --git a/src/compiler/scala/tools/nsc/backend/icode/Members.scala b/src/compiler/scala/tools/nsc/backend/icode/Members.scala
index 19f78626e9..993ce62140 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/Members.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/Members.scala
@@ -36,6 +36,14 @@ trait Members { self: ICodes =>
var producedStack: TypeStack = null
private var currentLabel: Int = 0
+ private var _touched = false
+
+ def touched = _touched
+ def touched_=(b: Boolean): Unit = if (b) {
+ blocks foreach (_.touched = true)
+ _touched = true
+ } else
+ _touched = false
// Constructor code
startBlock = newBlock
@@ -52,52 +60,11 @@ trait Members { self: ICodes =>
if (b == startBlock)
startBlock = b.successors.head;
blocks -= b
- }
-
- /**
- * Apply a function to all basic blocks, for side-effects. It starts at
- * the given startBlock and checks that are no predecessors of the given node.
- * Only blocks that are reachable via a path from startBlock are ever visited.
- */
- def traverseFrom(startBlock: BasicBlock, f: BasicBlock => Unit) = {
- val visited: Set[BasicBlock] = new HashSet();
-
- def traverse0(toVisit: List[BasicBlock]): Unit = toVisit match {
- case Nil => ();
- case b :: bs => if (!visited.contains(b)) {
- f(b);
- visited += b;
- traverse0(bs ::: b.successors);
- } else
- traverse0(bs);
- }
- assert(startBlock.predecessors == Nil,
- "Starting traverse from a block with predecessors: " + this);
- traverse0(startBlock :: Nil)
- }
+ assert(!blocks.contains(b))
+ for (handler <- method.exh if handler.covers(b))
+ handler.covered -= b
- def traverse(f: BasicBlock => Unit) = blocks.toList foreach f;
-
- /* This method applies the given function to each basic block. */
- def traverseFeedBack(f: (BasicBlock, HashMap[BasicBlock, Boolean]) => Unit) = {
- val visited : HashMap[BasicBlock, Boolean] = new HashMap;
- visited ++= blocks.iterator.map(x => (x, false));
-
- var blockToVisit: List[BasicBlock] = List(startBlock)
-
- while (!blockToVisit.isEmpty) {
- blockToVisit match {
- case b::xs =>
- if (!visited(b)) {
- f(b, visited);
- blockToVisit = b.successors ::: xs;
- visited += (b -> true)
- } else
- blockToVisit = xs
- case _ =>
- error("impossible match")
- }
- }
+ touched = true
}
/** This methods returns a string representation of the ICode */
@@ -112,6 +79,7 @@ trait Members { self: ICodes =>
/* Create a new block and append it to the list
*/
def newBlock: BasicBlock = {
+ touched = true
val block = new BasicBlock(nextLabel, method);
blocks += block;
block
@@ -145,6 +113,11 @@ trait Members { self: ICodes =>
def lookupField(s: Symbol) = fields find (_.symbol == s)
def lookupMethod(s: Symbol) = methods find (_.symbol == s)
def lookupMethod(s: Name) = methods find (_.symbol.name == s)
+
+ /* determines whether or not this class contains a static ctor. */
+ def containsStaticCtor: Boolean = methods.exists(_.isStaticCtor)
+ /* returns this methods static ctor if it has one. */
+ def lookupStaticCtor: Option[IMethod] = methods.find(_.isStaticCtor)
}
/** Represent a field in ICode */
@@ -225,12 +198,15 @@ trait Members { self: ICodes =>
def isStatic: Boolean = symbol.isStaticMember
+ /* determines whether or not this method is the class static constructor. */
+ def isStaticCtor: Boolean = isStatic && symbol.rawname == nme.CONSTRUCTOR
+
override def toString() = symbol.fullNameString
import opcodes._
def checkLocals: Unit = if (code ne null) {
Console.println("[checking locals of " + this + "]")
- for (bb <- code.blocks; i <- bb.toList) i match {
+ for (bb <- code.blocks; i <- bb) i match {
case LOAD_LOCAL(l) =>
if (!this.locals.contains(l))
Console.println("Local " + l + " is not declared in " + this)
diff --git a/src/compiler/scala/tools/nsc/backend/icode/Printers.scala b/src/compiler/scala/tools/nsc/backend/icode/Printers.scala
index f03b84a50e..e50260b651 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/Printers.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/Printers.scala
@@ -119,9 +119,9 @@ trait Printers { self: ICodes =>
print(bb.label)
if (bb.loopHeader) print("[loop header]")
print(": ");
- if (settings.debug.value) print("pred: " + bb.predecessors + " succs: " + bb.successors)
+ if (settings.debug.value) print("pred: " + bb.predecessors + " succs: " + bb.successors + " flags: " + bb.flagsString)
indent; println
- bb.toList foreach printInstruction
+ bb foreach printInstruction
undent; println
}
diff --git a/src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala b/src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala
index 8ed2b04045..81461c1bf7 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/TypeKinds.scala
@@ -108,6 +108,8 @@ trait TypeKinds { self: ICodes =>
def dimensions: Int = 0
}
+ var lubs0 = 0
+
/**
* The least upper bound of two typekinds. They have to be either
* REFERENCE or ARRAY kinds.
@@ -116,16 +118,11 @@ trait TypeKinds { self: ICodes =>
*/
def lub(a: TypeKind, b: TypeKind): TypeKind = {
def lub0(t1: Type, t2: Type): Type = {
- val lubTpe = global.lub(t1 :: t2 :: Nil)
- assert(lubTpe.typeSymbol.isClass,
- "Least upper bound of " + t1 + " and " + t2 + " is not a class: " + lubTpe)
- lubTpe
+ //lubs0 += 1
+ global.lub(t1 :: t2 :: Nil)
}
- if ((a.isReferenceType || a.isArrayType) &&
- (b.isReferenceType || b.isArrayType))
- toTypeKind(lub0(a.toType, b.toType))
- else if (a == b) a
+ if (a == b) a
else if (a == REFERENCE(NothingClass)) b
else if (b == REFERENCE(NothingClass)) a
else (a, b) match {
@@ -136,7 +133,12 @@ trait TypeKinds { self: ICodes =>
case (SHORT, INT) | (INT, SHORT) => INT
case (CHAR, INT) | (INT, CHAR) => INT
case (BOOL, INT) | (INT, BOOL) => INT
- case _ => throw new CheckerError("Incompatible types: " + a + " with " + b)
+ case _ =>
+ if ((a.isReferenceType || a.isArrayType) &&
+ (b.isReferenceType || b.isArrayType))
+ toTypeKind(lub0(a.toType, b.toType))
+ else
+ throw new CheckerError("Incompatible types: " + a + " with " + b)
}
}
@@ -162,28 +164,36 @@ trait TypeKinds { self: ICodes =>
case object BYTE extends TypeKind {
override def maxType(other: TypeKind): TypeKind =
other match {
- case BYTE | SHORT | CHAR | INT | LONG | FLOAT | DOUBLE => other
+ case CHAR => INT
+ case BYTE | SHORT | INT | LONG | FLOAT | DOUBLE => other
case REFERENCE(NothingClass) => BYTE
case _ => abort("Uncomparable type kinds: BYTE with " + other)
}
}
+ /** Note that the max of Char/Byte and Char/Short is Int, because
+ * neither strictly encloses the other due to unsignedness.
+ * See ticket #2087 for a consequence.
+ */
+
/** A 2-byte signed integer */
case object SHORT extends TypeKind {
override def maxType(other: TypeKind): TypeKind =
other match {
- case BYTE | SHORT | CHAR => SHORT
+ case CHAR => INT
+ case BYTE | SHORT => SHORT
case REFERENCE(NothingClass) => SHORT
case INT | LONG | FLOAT | DOUBLE => other
case _ => abort("Uncomparable type kinds: SHORT with " + other)
}
}
- /** A 2-byte signed integer */
+ /** A 2-byte UNSIGNED integer */
case object CHAR extends TypeKind {
override def maxType(other: TypeKind): TypeKind =
other match {
- case BYTE | SHORT | CHAR => CHAR
+ case CHAR => CHAR
+ case BYTE | SHORT => INT
case REFERENCE(NothingClass) => CHAR
case INT | LONG | FLOAT | DOUBLE => other
case _ => abort("Uncomparable type kinds: CHAR with " + other)
diff --git a/src/compiler/scala/tools/nsc/backend/icode/TypeStacks.scala b/src/compiler/scala/tools/nsc/backend/icode/TypeStacks.scala
index 6e7a81ac1e..eef1863374 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/TypeStacks.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/TypeStacks.scala
@@ -18,7 +18,7 @@ trait TypeStacks { self: ICodes =>
import opcodes._
import global.{Symbol, Type, definitions}
- /* This class simulates the type of the opperand
+ /* This class simulates the type of the operand
* stack of the ICode.
*/
type Rep = List[TypeKind]
@@ -71,20 +71,22 @@ trait TypeStacks { self: ICodes =>
def apply(n: Int): TypeKind = types(n)
/**
- * A TypeStack aggress with another one if they have the same
+ * A TypeStack agrees with another one if they have the same
* length and each type kind agrees position-wise. Two
* types agree if one is a subtype of the other.
*/
def agreesWith(other: TypeStack): Boolean =
(types.length == other.types.length) &&
- List.forall2(types, other.types) ((t1, t2) => t1 <:< t2 || t2 <:< t1)
+ ((types, other.types).zipped forall ((t1, t2) => t1 <:< t2 || t2 <:< t1))
/* This method returns a String representation of the stack */
override def toString() = types.mkString("\n", "\n", "\n")
- override def equals(other: Any): Boolean =
- other.isInstanceOf[TypeStack] &&
- List.forall2(other.asInstanceOf[TypeStack].types, types)((a, b) => a == b)
+ override def hashCode() = types.hashCode()
+ override def equals(other: Any): Boolean = other match {
+ case x: TypeStack => x.types sameElements types
+ case _ => false
+ }
}
}
diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/CopyPropagation.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/CopyPropagation.scala
index 9ee628ef19..fe114e3faa 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/analysis/CopyPropagation.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/CopyPropagation.scala
@@ -66,8 +66,8 @@ abstract class CopyPropagation {
if ((other eq bottom) || (this eq bottom))
(this eq other)
else {
- this.bindings == other.bindings &&
- List.forall2(this.stack, other.stack) { (a, b) => a == b }
+ (this.bindings == other.bindings) &&
+ ((this.stack, other.stack).zipped forall (_ == _))
}
}
@@ -188,7 +188,7 @@ abstract class CopyPropagation {
else {
// if (a.stack.length != b.stack.length)
// throw new LubError(a, b, "Invalid stacks in states: ");
- List.map2(a.stack, b.stack) { (v1, v2) =>
+ (a.stack, b.stack).zipped map { (v1, v2) =>
if (v1 == v2) v1 else Unknown
}
}
@@ -245,10 +245,15 @@ abstract class CopyPropagation {
}
def blockTransfer(b: BasicBlock, in: lattice.Elem): lattice.Elem =
- b.toList.foldLeft(in)(interpret)
+ b.foldLeft(in)(interpret)
import opcodes._
+ private def retain[A, B](map: Map[A, B])(p: (A, B) => Boolean) = {
+ for ((k, v) <- map ; if !p(k, v)) map -= k
+ map
+ }
+
/** Abstract interpretation for one instruction. */
def interpret(in: copyLattice.Elem, i: Instruction): copyLattice.Elem = {
var out = in.dup
@@ -458,7 +463,7 @@ abstract class CopyPropagation {
*/
final def cleanReferencesTo(s: copyLattice.State, target: Location) {
def cleanRecord(r: Record): Record = {
- r.bindings retain { (loc, value) =>
+ retain(r.bindings) { (loc, value) =>
(value match {
case Deref(loc1) if (loc1 == target) => false
case Boxed(loc1) if (loc1 == target) => false
@@ -478,7 +483,7 @@ abstract class CopyPropagation {
case _ => v
}}
- s.bindings retain { (loc, value) =>
+ retain(s.bindings) { (loc, value) =>
(value match {
case Deref(loc1) if (loc1 == target) => false
case Boxed(loc1) if (loc1 == target) => false
@@ -531,12 +536,12 @@ abstract class CopyPropagation {
}
state.stack = state.stack map { v => v match {
case Record(cls, bindings) =>
- bindings.retain { (sym: Symbol, v: Value) => shouldRetain(sym) }
+ retain(bindings) { (sym, _) => shouldRetain(sym) }
Record(cls, bindings)
case _ => v
}}
- state.bindings retain {(loc, value) =>
+ retain(state.bindings) { (loc, value) =>
value match {
case Deref(Field(rec, sym)) => shouldRetain(sym)
case Boxed(Field(rec, sym)) => shouldRetain(sym)
diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/DataFlowAnalysis.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/DataFlowAnalysis.scala
index 09a39f4280..8e14159197 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/analysis/DataFlowAnalysis.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/DataFlowAnalysis.scala
@@ -70,6 +70,8 @@ trait DataFlowAnalysis[L <: CompleteLattice] {
succs foreach { p =>
if (!worklist.contains(p))
worklist += p;
+ if (!in.isDefinedAt(p))
+ assert(false, "Invalid successor for: " + point + " successor " + p + " does not exist")
// if (!p.exceptionHandlerHeader) {
// println("lubbing " + p.predecessors + " outs: " + p.predecessors.map(out.apply).mkString("\n", "\n", ""))
in(p) = lattice.lub(/*in(p) :: */(p.predecessors map out.apply), p.exceptionHandlerStart)
diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/Liveness.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/Liveness.scala
index 46e24c18ec..0d301347b1 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/analysis/Liveness.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/Liveness.scala
@@ -71,7 +71,7 @@ abstract class Liveness {
def genAndKill(b: BasicBlock): (Set[Local], Set[Local]) = {
var genSet = new ListSet[Local]
var killSet = new ListSet[Local]
- for (i <- b.toList) i match {
+ for (i <- b) i match {
case LOAD_LOCAL(local) if (!killSet(local)) => genSet = genSet + local
case STORE_LOCAL(local) if (!genSet(local)) => killSet = killSet + local
case _ => ()
diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/ReachingDefinitions.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/ReachingDefinitions.scala
index 65065fe0d1..1c714cbd5d 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/analysis/ReachingDefinitions.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/ReachingDefinitions.scala
@@ -43,19 +43,19 @@ abstract class ReachingDefinitions {
else if (bottom == b) a
else {
val locals = a.vars ++ b.vars
- val stack = if (a.stack == Nil)
- b.stack
- else if (b.stack == Nil) a.stack
- else List.map2(a.stack, b.stack) (_ ++ _)
-
- val res = IState(locals, stack)
-
-// Console.println("\tlub2: " + a + ", " + b)
-// Console.println("\tis: " + res)
-
-// if (res._1 eq bottom._1) (new ListSet[Definition], Nil)
-// else res
- res
+ val stack =
+ if (a.stack == Nil) b.stack
+ else if (b.stack == Nil) a.stack
+ else (a.stack, b.stack).zipped map (_ ++ _)
+
+ IState(locals, stack)
+
+ // val res = IState(locals, stack)
+ // Console.println("\tlub2: " + a + ", " + b)
+ // Console.println("\tis: " + res)
+ // if (res._1 eq bottom._1) (new ListSet[Definition], Nil)
+ // else res
+ // res
}
}
diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala
index 32a6037d41..eecae5e578 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala
@@ -53,7 +53,7 @@ abstract class TypeFlowAnalysis {
else {
// if (s1.length != s2.length)
// throw new CheckerError("Incompatible stacks: " + s1 + " and " + s2);
- new TypeStack(List.map2(s1.types, s2.types) (icodes.lub))
+ new TypeStack((s1.types, s2.types).zipped map icodes.lub)
}
}
}
@@ -81,10 +81,14 @@ abstract class TypeFlowAnalysis {
override val top = new Elem(new VarBinding, typeStackLattice.top)
override val bottom = new Elem(new VarBinding, typeStackLattice.bottom)
+// var lubs = 0
+
def lub2(exceptional: Boolean)(a: Elem, b: Elem) = {
val IState(env1, s1) = a
val IState(env2, s2) = b
+// lubs += 1
+
val resultingLocals = new VarBinding
for (binding1 <- env1.iterator) {
@@ -118,7 +122,7 @@ abstract class TypeFlowAnalysis {
/** Initialize the in/out maps for the analysis of the given method. */
def init(m: icodes.IMethod) {
this.method = m
-
+ //typeFlowLattice.lubs = 0
init {
worklist += m.code.startBlock
worklist ++= (m.exh map (_.startBlock))
@@ -168,14 +172,17 @@ abstract class TypeFlowAnalysis {
def run = {
timer.start
+// icodes.lubs0 = 0
forwardAnalysis(blockTransfer)
- timer.stop
+ val t = timer.stop
if (settings.debug.value) {
linearizer.linearize(method).foreach(b => if (b != method.code.startBlock)
assert(visited.contains(b),
"Block " + b + " in " + this.method + " has input equal to bottom -- not visited? .." + visited));
}
- //println("iterations: " + iterations + " for " + method.code.blocks.size)
+// log("" + method.symbol.fullNameString + " [" + method.code.blocks.size + " blocks] "
+// + "\n\t" + iterations + " iterations: " + t + " ms."
+// + "\n\tlubs: " + typeFlowLattice.lubs + " out of which " + icodes.lubs0 + " typer lubs")
}
def blockTransfer(b: BasicBlock, in: lattice.Elem): lattice.Elem = {
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala
index 67f3f7f8b2..c4408c661a 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala
@@ -47,7 +47,8 @@ abstract class GenJVM extends SubComponent {
override def run {
if (settings.debug.value) inform("[running phase " + name + " on icode]")
if (settings.Xdce.value)
- icodes.classes.retain { (sym: Symbol, cls: IClass) => !inliner.isClosureClass(sym) || deadCode.liveClosures(sym) }
+ for ((sym, cls) <- icodes.classes ; if inliner.isClosureClass(sym) && !deadCode.liveClosures(sym))
+ icodes.classes -= sym
classes.valuesIterator foreach apply
}
@@ -202,7 +203,7 @@ abstract class GenJVM extends SubComponent {
if (isStaticModule(c.symbol) || serialVUID != None || clasz.bootstrapClass.isDefined) {
if (isStaticModule(c.symbol))
addModuleInstanceField;
- addStaticInit(jclass)
+ addStaticInit(jclass, c.lookupStaticCtor)
if (isTopLevelModule(c.symbol)) {
if (c.symbol.linkedClassOfModule == NoSymbol)
@@ -213,6 +214,8 @@ abstract class GenJVM extends SubComponent {
}
}
else {
+ if (c.containsStaticCtor) addStaticInit(jclass, c.lookupStaticCtor)
+
// it must be a top level class (name contains no $s)
def isCandidateForForwarders(sym: Symbol): Boolean =
atPhase (currentRun.picklerPhase.next) {
@@ -431,9 +434,11 @@ abstract class GenJVM extends SubComponent {
def addGenericSignature(jmember: JMember, sym: Symbol, owner: Symbol) {
if (!sym.hasFlag(Flags.EXPANDEDNAME | Flags.SYNTHETIC)
- && !(sym.isMethod && sym.hasFlag(Flags.LIFTED))) {
+ && !(sym.isMethod && sym.hasFlag(Flags.LIFTED))
+ && !(sym.ownerChain exists (_.isImplClass))) { // @M don't generate java generics sigs for (members of) implementation classes, as they are monomorphic (TODO: ok?)
val memberTpe = atPhase(currentRun.erasurePhase)(owner.thisType.memberInfo(sym))
-// println("sym: " + sym.fullNameString + " : " + memberTpe + " sym.info: " + sym.info)
+ // println("addGenericSignature sym: " + sym.fullNameString + " : " + memberTpe + " sym.info: " + sym.info)
+ // println("addGenericSignature: "+ (sym.ownerChain map (x => (x.name, x.isImplClass))))
erasure.javaSig(sym, memberTpe) match {
case Some(sig) =>
val index = jmember.getConstantPool().addUtf8(sig).toShort
@@ -505,7 +510,7 @@ abstract class GenJVM extends SubComponent {
val innerClassesAttr = jclass.getInnerClasses()
// sort them so inner classes succeed their enclosing class
// to satisfy the Eclipse Java compiler
- for (innerSym <- innerClasses.toList.sort(_.name.length < _.name.length)) {
+ for (innerSym <- innerClasses.toList sortBy (_.name.length)) {
var outerName = javaName(innerSym.rawowner)
// remove the trailing '$'
if (outerName.endsWith("$") && isTopLevelModule(innerSym.rawowner))
@@ -556,6 +561,8 @@ abstract class GenJVM extends SubComponent {
}
def genMethod(m: IMethod) {
+ if (m.isStaticCtor) return
+
log("Generating method " + m.symbol.fullNameString)
method = m
endPC.clear
@@ -667,7 +674,7 @@ abstract class GenJVM extends SubComponent {
jclass.getType())
}
- def addStaticInit(cls: JClass) {
+ def addStaticInit(cls: JClass, mopt: Option[IMethod]) {
import JAccessFlags._
val clinitMethod = cls.addNewMethod(ACC_PUBLIC | ACC_STATIC,
"<clinit>",
@@ -675,6 +682,53 @@ abstract class GenJVM extends SubComponent {
JType.EMPTY_ARRAY,
new Array[String](0))
val clinit = clinitMethod.getCode().asInstanceOf[JExtendedCode]
+
+ mopt match {
+ case Some(m) =>
+ if (clasz.bootstrapClass.isDefined) legacyEmitBootstrapMethodInstall(clinit)
+
+ val oldLastBlock = m.code.blocks.last
+ val lastBlock = m.code.newBlock
+ oldLastBlock.replaceInstruction(oldLastBlock.length - 1, JUMP(lastBlock))
+
+ if (isStaticModule(clasz.symbol)) {
+ // call object's private ctor from static ctor
+ lastBlock.emit(NEW(REFERENCE(m.symbol.enclClass)))
+ lastBlock.emit(CALL_METHOD(m.symbol.enclClass.primaryConstructor, Static(true)))
+ }
+
+ // add serialVUID code
+ serialVUID match {
+ case Some(value) =>
+ import Flags._
+ import definitions._
+ val fieldName = "serialVersionUID"
+ val fieldSymbol = clasz.symbol.newValue(NoPosition, newTermName(fieldName))
+ .setFlag(STATIC | FINAL)
+ .setInfo(longType)
+ clasz.addField(new IField(fieldSymbol))
+ lastBlock.emit(CONSTANT(Constant(value)))
+ lastBlock.emit(STORE_FIELD(fieldSymbol, true))
+ case None => ()
+ }
+
+ if (clasz.bootstrapClass.isDefined) {
+ // emit bootstrap method install
+ //emitBootstrapMethodInstall(block)
+ }
+
+ lastBlock.emit(RETURN(UNIT))
+ lastBlock.close
+
+ method = m
+ jmethod = clinitMethod
+ genCode(m)
+ case None =>
+ legacyStaticInitializer(cls, clinit)
+ }
+ }
+
+ private def legacyStaticInitializer(cls: JClass, clinit: JExtendedCode) {
if (isStaticModule(clasz.symbol)) {
clinit.emitNEW(cls.getName())
clinit.emitINVOKESPECIAL(cls.getName(),
@@ -693,7 +747,7 @@ abstract class GenJVM extends SubComponent {
case None => ()
}
- if (clasz.bootstrapClass.isDefined) emitBootstrapMethodInstall(clinit)
+ if (clasz.bootstrapClass.isDefined) legacyEmitBootstrapMethodInstall(clinit)
clinit.emitRETURN()
}
@@ -701,7 +755,7 @@ abstract class GenJVM extends SubComponent {
/** Emit code that installs a boostrap method for invoke dynamic. It installs the default
* method, found in scala.runtime.DynamicDispatch.
*/
- def emitBootstrapMethodInstall(jcode: JExtendedCode) {
+ def legacyEmitBootstrapMethodInstall(jcode: JExtendedCode) {
jcode.emitPUSH(jclass.getType.asInstanceOf[JReferenceType])
jcode.emitPUSH(new JObjectType("scala.runtime.DynamicDispatch"))
jcode.emitPUSH("bootstrapInvokeDynamic")
@@ -822,9 +876,38 @@ abstract class GenJVM extends SubComponent {
}
var linearization: List[BasicBlock] = Nil
-
var isModuleInitialized = false
+ private def genConstant(jcode: JExtendedCode, const: Constant) {
+ const.tag match {
+ case UnitTag => ()
+ case BooleanTag => jcode.emitPUSH(const.booleanValue)
+ case ByteTag => jcode.emitPUSH(const.byteValue)
+ case ShortTag => jcode.emitPUSH(const.shortValue)
+ case CharTag => jcode.emitPUSH(const.charValue)
+ case IntTag => jcode.emitPUSH(const.intValue)
+ case LongTag => jcode.emitPUSH(const.longValue)
+ case FloatTag => jcode.emitPUSH(const.floatValue)
+ case DoubleTag => jcode.emitPUSH(const.doubleValue)
+ case StringTag => jcode.emitPUSH(const.stringValue)
+ case NullTag => jcode.emitACONST_NULL()
+ case ClassTag =>
+ val kind = toTypeKind(const.typeValue)
+ val toPush =
+ if (kind.isValueType) classLiteral(kind)
+ else javaType(kind).asInstanceOf[JReferenceType]
+
+ jcode emitPUSH toPush
+
+ case EnumTag =>
+ val sym = const.symbolValue
+ jcode.emitGETSTATIC(javaName(sym.owner),
+ javaName(sym),
+ javaType(sym.tpe.underlying))
+ case _ => abort("Unknown constant value: " + const);
+ }
+ }
+
/**
* @param m ...
*/
@@ -835,7 +918,7 @@ abstract class GenJVM extends SubComponent {
if (settings.debug.value)
log("Making labels for: " + method)
- HashMap(bs map (b => b -> jcode.newLabel) : _*)
+ HashMap(bs map (_ -> jcode.newLabel) : _*)
}
isModuleInitialized = false
@@ -847,12 +930,11 @@ abstract class GenJVM extends SubComponent {
var nextBlock: BasicBlock = linearization.head
- def genBlocks(l: List[BasicBlock]): Unit = l match {
- case Nil => ()
- case x :: Nil => nextBlock = null; genBlock(x)
- case x :: y :: ys => nextBlock = y; genBlock(x); genBlocks(y :: ys)
- }
-
+ def genBlocks(l: List[BasicBlock]): Unit = l match {
+ case Nil => ()
+ case x :: Nil => nextBlock = null; genBlock(x)
+ case x :: y :: ys => nextBlock = y; genBlock(x); genBlocks(y :: ys)
+ }
/** Generate exception handlers for the current method. */
def genExceptionHandlers {
@@ -867,30 +949,28 @@ abstract class GenJVM extends SubComponent {
var start = -1
var end = -1
- linearization foreach ((b) => {
+ linearization foreach { b =>
if (! (covered contains b) ) {
if (start >= 0) { // we're inside a handler range
end = labels(b).getAnchor()
- ranges = (start, end) :: ranges
+ ranges ::= (start, end)
start = -1
}
} else {
- if (start >= 0) { // we're inside a handler range
- end = endPC(b)
- } else {
+ if (start < 0) // we're not inside a handler range
start = labels(b).getAnchor()
- end = endPC(b)
- }
- covered = covered - b
+
+ end = endPC(b)
+ covered -= b
}
- });
+ }
/* Add the last interval. Note that since the intervals are
* open-ended to the right, we have to give a number past the actual
* code!
*/
if (start >= 0) {
- ranges = (start, jcode.getPC()) :: ranges;
+ ranges ::= (start, jcode.getPC())
}
if (!covered.isEmpty)
@@ -900,19 +980,16 @@ abstract class GenJVM extends SubComponent {
ranges
}
- this.method.exh foreach { e =>
- ranges(e).sort({ (p1, p2) => p1._1 < p2._1 })
- .foreach { p =>
- if (p._1 < p._2) {
- if (settings.debug.value)
- log("Adding exception handler " + e + "at block: " + e.startBlock + " for " + method +
- " from: " + p._1 + " to: " + p._2 + " catching: " + e.cls);
- jcode.addExceptionHandler(p._1, p._2,
- labels(e.startBlock).getAnchor(),
- if (e.cls == NoSymbol) null else javaName(e.cls))
- } else
- log("Empty exception range: " + p)
- }
+ for (e <- this.method.exh ; p <- ranges(e).sortBy(_._1)) {
+ if (p._1 < p._2) {
+ if (settings.debug.value)
+ log("Adding exception handler " + e + "at block: " + e.startBlock + " for " + method +
+ " from: " + p._1 + " to: " + p._2 + " catching: " + e.cls);
+ jcode.addExceptionHandler(p._1, p._2,
+ labels(e.startBlock).getAnchor(),
+ if (e.cls == NoSymbol) null else javaName(e.cls))
+ } else
+ log("Empty exception range: " + p)
}
}
@@ -944,31 +1021,7 @@ abstract class GenJVM extends SubComponent {
jcode.emitALOAD_0()
case CONSTANT(const) =>
- const.tag match {
- case UnitTag => ();
- case BooleanTag => jcode.emitPUSH(const.booleanValue)
- case ByteTag => jcode.emitPUSH(const.byteValue)
- case ShortTag => jcode.emitPUSH(const.shortValue)
- case CharTag => jcode.emitPUSH(const.charValue)
- case IntTag => jcode.emitPUSH(const.intValue)
- case LongTag => jcode.emitPUSH(const.longValue)
- case FloatTag => jcode.emitPUSH(const.floatValue)
- case DoubleTag => jcode.emitPUSH(const.doubleValue)
- case StringTag => jcode.emitPUSH(const.stringValue)
- case NullTag => jcode.emitACONST_NULL()
- case ClassTag =>
- val kind = toTypeKind(const.typeValue);
- if (kind.isValueType)
- jcode.emitPUSH(classLiteral(kind));
- else
- jcode.emitPUSH(javaType(kind).asInstanceOf[JReferenceType]);
- case EnumTag =>
- val sym = const.symbolValue
- jcode.emitGETSTATIC(javaName(sym.owner),
- javaName(sym),
- javaType(sym.tpe.underlying))
- case _ => abort("Unknown constant value: " + const);
- }
+ genConstant(jcode, const)
case LOAD_ARRAY_ITEM(kind) =>
jcode.emitALOAD(javaType(kind))
@@ -1029,46 +1082,36 @@ abstract class GenJVM extends SubComponent {
genPrimitive(primitive, instr.pos)
case call @ CALL_METHOD(method, style) =>
- val owner: String = javaName(method.owner);
- //reference the type of the receiver instead of the method owner (if not an interface!)
+ val owner: String = javaName(method.owner)
+ // reference the type of the receiver instead of the method owner (if not an interface!)
val dynamicOwner =
if (needsInterfaceCall(call.hostClass)) owner
else javaName(call.hostClass)
+ val jname = javaName(method)
+ val jtype = javaType(method).asInstanceOf[JMethodType]
style match {
case InvokeDynamic =>
- jcode.emitINVOKEINTERFACE("java.dyn.Dynamic",
- javaName(method),
- javaType(method).asInstanceOf[JMethodType])
+ jcode.emitINVOKEINTERFACE("java.dyn.Dynamic", jname, jtype)
case Dynamic =>
if (needsInterfaceCall(method.owner))
- jcode.emitINVOKEINTERFACE(owner,
- javaName(method),
- javaType(method).asInstanceOf[JMethodType])
+ jcode.emitINVOKEINTERFACE(owner, jname, jtype)
else
- jcode.emitINVOKEVIRTUAL(dynamicOwner,
- javaName(method),
- javaType(method).asInstanceOf[JMethodType]);
+ jcode.emitINVOKEVIRTUAL(dynamicOwner, jname, jtype)
case Static(instance) =>
- if (instance) {
- jcode.emitINVOKESPECIAL(owner,
- javaName(method),
- javaType(method).asInstanceOf[JMethodType]);
- } else
- jcode.emitINVOKESTATIC(owner,
- javaName(method),
- javaType(method).asInstanceOf[JMethodType]);
+ if (instance)
+ jcode.emitINVOKESPECIAL(owner, jname, jtype)
+ else
+ jcode.emitINVOKESTATIC(owner, jname, jtype)
case SuperCall(_) =>
- jcode.emitINVOKESPECIAL(owner,
- javaName(method),
- javaType(method).asInstanceOf[JMethodType]);
+ jcode.emitINVOKESPECIAL(owner, jname, jtype)
// we initialize the MODULE$ field immediately after the super ctor
if (isStaticModule(clasz.symbol) && !isModuleInitialized &&
jmethod.getName() == JMethod.INSTANCE_CONSTRUCTOR_NAME &&
- javaName(method) == JMethod.INSTANCE_CONSTRUCTOR_NAME) {
+ jname == JMethod.INSTANCE_CONSTRUCTOR_NAME) {
isModuleInitialized = true;
jcode.emitALOAD_0();
jcode.emitPUTSTATIC(jclass.getName(),
diff --git a/src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala b/src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala
index bbc11037ce..6bc87d082d 100644
--- a/src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala
+++ b/src/compiler/scala/tools/nsc/backend/msil/GenMSIL.scala
@@ -561,7 +561,7 @@ abstract class GenMSIL extends SubComponent {
}
if (mcode != null) {
- for (local <- m.locals -- m.params) {
+ for (local <- m.locals ; if !(m.params contains local)) {
if (settings.debug.value)
log("add local var: " + local + ", of kind " + local.kind)
val t: MsilType = msilType(local.kind)
@@ -828,6 +828,15 @@ abstract class GenMSIL extends SubComponent {
// covering the same blocks
def orderBlocksForExh(blocks: List[BasicBlock], exH: List[ExceptionHandler]): List[BasicBlock] = {
+ def moveToFront[T](xs: List[T], x: T) = (xs indexOf x) match {
+ case -1 => x :: xs
+ case idx => x :: (xs take idx) ::: (xs drop (idx + 1))
+ }
+ def moveToEnd[T](xs: List[T], x: T) = (xs indexOf x) match {
+ case -1 => xs ::: List(x)
+ case idx => (xs take idx) ::: (xs drop (idx + 1)) ::: List(x)
+ }
+
var blocksToPut: List[BasicBlock] = blocks
var nextBlock: BasicBlock = null
var untreatedHandlers: List[ExceptionHandler] = exH
@@ -841,7 +850,7 @@ abstract class GenMSIL extends SubComponent {
// problem: block may already be added, and and needs to be moved.
// if nextblock NOT in b: check if nextblock in blocksToPut, if NOT, check if movable, else don't put
if (nextBlock != null && b.contains(nextBlock)) {
- val blocksToAdd = nextBlock :: (b - nextBlock)
+ val blocksToAdd = moveToFront(b, nextBlock)
nextBlock = null
addBlocks(blocksToAdd)
}
@@ -854,7 +863,7 @@ abstract class GenMSIL extends SubComponent {
{
// the block is not part of some catch or finally code
currentBlock.addBasicBlock(x)
- blocksToPut = blocksToPut - x
+ blocksToPut = moveToFront(blocksToPut, x)
if (settings.debug.value) log(" -> addBlocks(" + xs + ")")
addBlocks(xs)
} else {
@@ -865,7 +874,7 @@ abstract class GenMSIL extends SubComponent {
// is optimized by compiler (no try left)
if(untreatedHandlers.forall(h =>
(!h.blocks.contains(x) || h.covered.isEmpty))) {
- blocksToPut = blocksToPut - x
+ blocksToPut = moveToFront(blocksToPut, x)
addBlocks(xs)
} else
addBlocks(xs ::: List(x))
@@ -960,8 +969,9 @@ abstract class GenMSIL extends SubComponent {
firstBlockAfter(exh) = outside(0)
//else ()
//assert(firstBlockAfter(exh) == outside(0), "try/catch leaving to multiple targets: " + firstBlockAfter(exh) + ", new: " + outside(0))
+
val last = leaving(0)._1
- ((blocks - last) ::: List(last), None)
+ (moveToEnd(blocks, last), None)
} else {
val outside = leaving.flatMap(p => p._2)
//assert(outside.forall(b => b == outside(0)), "exception-block leaving to multiple targets")
@@ -981,9 +991,9 @@ abstract class GenMSIL extends SubComponent {
})
// shorter try-catch-finally last (the ones contained in another)
- affectedHandlers = affectedHandlers.sort({(h1, h2) => h1.covered.size > h2.covered.size})
+ affectedHandlers = affectedHandlers.sortWith(_.covered.size > _.covered.size)
affectedHandlers = affectedHandlers.filter(h => {h.covered.size == affectedHandlers(0).covered.size})
- untreatedHandlers = untreatedHandlers -- affectedHandlers
+ untreatedHandlers = untreatedHandlers filterNot (affectedHandlers contains)
// more than one catch produces more than one exh, but we only need one
var singleAffectedHandler: ExceptionHandler = affectedHandlers(0) // List[ExceptionHandler] = Nil
@@ -997,7 +1007,7 @@ abstract class GenMSIL extends SubComponent {
h1.addBlock(block)
case None => ()
}
- val orderedCatchBlocks = h1.startBlock :: (adaptedBlocks - h1.startBlock)
+ val orderedCatchBlocks = moveToFront(adaptedBlocks, h1.startBlock)
exceptionBlock match {
case Some(excBlock) =>
@@ -1028,7 +1038,7 @@ abstract class GenMSIL extends SubComponent {
singleAffectedHandler.finalizer.addBlock(block)
case None => ()
}
- val blocks = singleAffectedHandler.finalizer.startBlock :: (blocks0 - singleAffectedHandler.finalizer.startBlock)
+ val blocks = moveToFront(blocks0, singleAffectedHandler.finalizer.startBlock)
currentBlock = excBlock.finallyBlock
addBlocks(blocks)
}
@@ -1128,15 +1138,14 @@ abstract class GenMSIL extends SubComponent {
"untreated exception handlers left: " + untreatedHandlers)
// remove catch blocks from empty handlers (finally-blocks remain)
untreatedHandlers.foreach((h) => {
- orderedBlocks = orderedBlocks -- h.blocks
+ orderedBlocks = orderedBlocks filterNot (h.blocks contains)
})
// take care of order in which exHInstructions are executed (BeginExceptionBlock as last)
bb2exHInstructions.keysIterator.foreach((b) => {
- bb2exHInstructions(b).sort((i1, i2) => (!i1.isInstanceOf[BeginExceptionBlock]))
+ bb2exHInstructions(b).sortBy(x => x.isInstanceOf[BeginExceptionBlock])
})
-
if (settings.debug.value) {
log("after: " + orderedBlocks)
log(" exhInstr: " + bb2exHInstructions)
@@ -1791,7 +1800,7 @@ abstract class GenMSIL extends SubComponent {
idx += 1 // sizeOf(l.kind)
}
- val locvars = m.locals -- params
+ val locvars = m.locals filterNot (params contains)
idx = 0
for (l <- locvars) {
diff --git a/src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala b/src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala
index 248b24bc43..1e53627273 100644
--- a/src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala
+++ b/src/compiler/scala/tools/nsc/backend/opt/ClosureElimination.scala
@@ -105,7 +105,7 @@ abstract class ClosureElimination extends SubComponent {
var info = cpp.in(bb)
log("Cpp info at entry to block " + bb + ": " + info)
- for (i <- bb.toList) {
+ for (i <- bb) {
i match {
case LOAD_LOCAL(l) if (info.bindings.isDefinedAt(LocalVar(l))) =>
val t = info.getBinding(l)
diff --git a/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala b/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
index 321b27b030..194c99f800 100644
--- a/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
+++ b/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
@@ -23,6 +23,18 @@ abstract class Inliners extends SubComponent {
val phaseName = "inliner"
+ /** Debug - for timing the inliner. */
+ private def timed[T](s: String, body: => T): T = {
+ val t1 = System.currentTimeMillis()
+ val res = body
+ val t2 = System.currentTimeMillis()
+ val ms = (t2 - t1).toInt
+ if (ms >= 2000)
+ println("%s: %d milliseconds".format(s, ms))
+
+ res
+ }
+
/** The maximum size in basic blocks of methods considered for inlining. */
final val MAX_INLINE_SIZE = 16
@@ -269,9 +281,9 @@ abstract class Inliners extends SubComponent {
def analyzeClass(cls: IClass): Unit = if (settings.inline.value) {
if (settings.debug.value)
log("Analyzing " + cls);
- cls.methods.foreach { m => if (!m.symbol.isConstructor) analyzeMethod(m)
- }}
+ cls.methods filterNot (_.symbol.isConstructor) foreach analyzeMethod
+ }
val tfa = new analysis.MethodTFA();
tfa.stat = settings.Ystatistics.value
@@ -281,7 +293,7 @@ abstract class Inliners extends SubComponent {
override def default(k: Symbol) = 0
}
- def analyzeMethod(m: IMethod): Unit = {//try {
+ def analyzeMethod(m: IMethod): Unit = {
var retry = false
var count = 0
fresh.clear
@@ -290,13 +302,12 @@ abstract class Inliners extends SubComponent {
do {
retry = false;
if (m.code ne null) {
- if (settings.debug.value)
- log("Analyzing " + m + " count " + count + " with " + m.code.blocks.length + " blocks");
+ log("Analyzing " + m + " count " + count + " with " + m.code.blocks.length + " blocks");
tfa.init(m)
tfa.run
for (bb <- linearizer.linearize(m)) {
var info = tfa.in(bb);
- for (i <- bb.toList) {
+ for (i <- bb) {
if (!retry) {
i match {
case CALL_METHOD(msym, Dynamic) =>
@@ -308,11 +319,11 @@ abstract class Inliners extends SubComponent {
if (receiver != msym.owner && receiver != NoSymbol) {
if (settings.debug.value)
log("" + i + " has actual receiver: " + receiver);
- }
- if (!concreteMethod.isFinal && receiver.isFinal) {
- concreteMethod = lookupImpl(concreteMethod, receiver)
- if (settings.debug.value)
- log("\tlooked up method: " + concreteMethod.fullNameString)
+ if (!concreteMethod.isFinal && receiver.isFinal) {
+ concreteMethod = lookupImpl(concreteMethod, receiver)
+ if (settings.debug.value)
+ log("\tlooked up method: " + concreteMethod.fullNameString)
+ }
}
if (shouldLoad(receiver, concreteMethod)) {
@@ -372,7 +383,7 @@ abstract class Inliners extends SubComponent {
// e.printStackTrace();
// m.dump
// throw e
- }
+ }
def isMonadMethod(method: Symbol): Boolean =
@@ -427,7 +438,7 @@ abstract class Inliners extends SubComponent {
callsNonPublic = b
case None =>
breakable {
- for (b <- callee.code.blocks; i <- b.toList)
+ for (b <- callee.code.blocks; i <- b)
i match {
case CALL_METHOD(m, style) =>
if (m.hasFlag(Flags.PRIVATE) ||
diff --git a/src/compiler/scala/tools/nsc/dependencies/Changes.scala b/src/compiler/scala/tools/nsc/dependencies/Changes.scala
index 80a068dcdf..3c6184bc24 100644
--- a/src/compiler/scala/tools/nsc/dependencies/Changes.scala
+++ b/src/compiler/scala/tools/nsc/dependencies/Changes.scala
@@ -92,15 +92,9 @@ abstract class Changes {
tp1.isInstanceOf[ImplicitMethodType] == tp2.isInstanceOf[ImplicitMethodType])
case (PolyType(tparams1, res1), PolyType(tparams2, res2)) =>
- (tparams1.length == tparams2.length &&
- List.forall2(tparams1, tparams2)
- ((p1, p2) => sameType(p1.info, p2.info)) &&
- sameType(res1, res2))
+ sameTypeParams(tparams1, tparams2) && sameType(res1, res2)
case (ExistentialType(tparams1, res1), ExistentialType(tparams2, res2)) =>
- (tparams1.length == tparams2.length &&
- List.forall2(tparams1, tparams2)
- ((p1, p2) => sameType(p1.info, p2.info)) &&
- sameType(res1, res2))
+ sameTypeParams(tparams1, tparams2) && sameType(res1, res2)
case (TypeBounds(lo1, hi1), TypeBounds(lo2, hi2)) =>
sameType(lo1, lo2) && sameType(hi1, hi2)
case (BoundedWildcardType(bounds), _) =>
@@ -133,9 +127,11 @@ abstract class Changes {
((tp1n ne tp1) || (tp2n ne tp2)) && sameType(tp1n, tp2n)
}
+ private def sameTypeParams(tparams1: List[Symbol], tparams2: List[Symbol]) =
+ sameTypes(tparams1 map (_.info), tparams2 map (_.info))
+
def sameTypes(tps1: List[Type], tps2: List[Type]): Boolean =
- (tps1.length == tps2.length
- && List.forall2(tps1, tps2)(sameType))
+ (tps1.length == tps2.length) && ((tps1, tps2).zipped forall sameType)
/** Return the list of changes between 'from' and 'to'.
*/
diff --git a/src/compiler/scala/tools/nsc/dependencies/Files.scala b/src/compiler/scala/tools/nsc/dependencies/Files.scala
index 501936ee4e..2165855ac9 100644
--- a/src/compiler/scala/tools/nsc/dependencies/Files.scala
+++ b/src/compiler/scala/tools/nsc/dependencies/Files.scala
@@ -4,11 +4,13 @@ package dependencies;
import java.io.{InputStream, OutputStream, PrintStream, InputStreamReader, BufferedReader}
import io.{AbstractFile, PlainFile}
-import scala.collection._;
+import scala.collection._;import scala.tools.nsc.io.VirtualFile
+
trait Files { self : SubComponent =>
class FileDependencies(val classpath : String) {
+ import FileDependencies._
class Tracker extends mutable.OpenHashMap[AbstractFile, mutable.Set[AbstractFile]]{
override def default(key: AbstractFile) = {
@@ -30,8 +32,10 @@ trait Files { self : SubComponent =>
def reset(file: AbstractFile) = dependencies -= file;
def cleanEmpty() = {
- dependencies.foreach({case (key, value) => value.retain(_.exists)})
+ dependencies foreach {case (key, value) => value.retain(x => x.exists && (x ne RemovedFile))}
dependencies.retain((key, value) => key.exists && !value.isEmpty)
+ targets foreach {case (key, value) => value.retain(_.exists)}
+ targets.retain((key, value) => key.exists && !value.isEmpty)
}
def containsFile(f: AbstractFile) = targets.contains(f.absolute)
@@ -50,7 +54,7 @@ trait Files { self : SubComponent =>
val indirect = dependentFiles(maxDepth, direct)
for ((source, targets) <- targets;
- if direct(source) || indirect(source)){
+ if direct(source) || indirect(source) || (source eq RemovedFile)){
targets.foreach(_.delete);
targets -= source;
}
@@ -65,7 +69,7 @@ trait Files { self : SubComponent =>
val indirect = new mutable.HashSet[AbstractFile];
val newInvalidations = new mutable.HashSet[AbstractFile];
- def invalid(file: AbstractFile) = indirect(file) || changed(file);
+ def invalid(file: AbstractFile) = indirect(file) || changed(file) || (file eq RemovedFile)
def go(i : Int) : Unit = if(i > 0){
newInvalidations.clear;
@@ -108,6 +112,7 @@ trait Files { self : SubComponent =>
object FileDependencies{
val Separator = "-------";
+ private val RemovedFile = new VirtualFile("removed")
def readFrom(file: AbstractFile, toFile : String => AbstractFile): Option[FileDependencies] = readFromFile(file) { in =>
val reader = new BufferedReader(new InputStreamReader(in))
@@ -116,14 +121,26 @@ trait Files { self : SubComponent =>
var line : String = null
while ({line = reader.readLine; (line != null) && (line != Separator)}){
line.split(" -> ") match {
- case Array(from, on) => it.depends(toFile(from), toFile(on));
+ case Array(from, on) =>
+ (toFile(from), toFile(on)) match {
+ case (null, _) => // fromFile is removed, it's ok
+ case (fromFile, null) => it.depends(fromFile, RemovedFile) // onFile is removed, should recompile fromFile
+ case (fromFile, onFile) => it.depends(fromFile, onFile)
+ }
case x => global.inform("Parse error: Unrecognised string " + line); return None
}
}
while ({line = reader.readLine; (line != null) && (line != Separator)}){
line.split(" -> ") match {
- case Array(source, target) => it.emits(toFile(source), toFile(target));
+ case Array(source, target) =>
+ val targetFile = toFile(target)
+ (toFile(source), toFile(target)) match {
+ case (null, null) => // source and target are all removed, it's ok
+ case (null, targetFile) => it.emits(RemovedFile, targetFile) // source is removed, should remove relative target later
+ case (_, null) => // it may has been cleaned outside, or removed during last phase
+ case (sourceFile, targetFile) => it.emits(sourceFile, targetFile)
+ }
case x => global.inform("Parse error: Unrecognised string " + line); return None
}
}
diff --git a/src/compiler/scala/tools/nsc/interactive/Global.scala b/src/compiler/scala/tools/nsc/interactive/Global.scala
index be3af86d53..b8a219fb4d 100644
--- a/src/compiler/scala/tools/nsc/interactive/Global.scala
+++ b/src/compiler/scala/tools/nsc/interactive/Global.scala
@@ -334,6 +334,16 @@ self =>
def stabilizedType(tree: Tree): Type = tree match {
case Ident(_) if tree.symbol.isStable => singleType(NoPrefix, tree.symbol)
case Select(qual, _) if tree.symbol.isStable => singleType(qual.tpe, tree.symbol)
+ case Import(expr, selectors) =>
+ tree.symbol.info match {
+ case analyzer.ImportType(expr) => expr match {
+ case s@Select(qual, name) => singleType(qual.tpe, s.symbol)
+ case i : Ident => i.tpe
+ case _ => tree.tpe
+ }
+ case _ => tree.tpe
+ }
+
case _ => tree.tpe
}
@@ -388,7 +398,16 @@ self =>
}
def typeMembers(pos: Position): List[TypeMember] = {
- val tree = typedTreeAt(pos)
+ val tree1 = typedTreeAt(pos)
+ val tree0 = tree1 match {
+ case tt : TypeTree => tt.original
+ case t => t
+ }
+ val tree = tree0 match {
+ case s@Select(qual, name) if s.tpe == ErrorType => qual
+ case t => t
+ }
+
println("typeMembers at "+tree+" "+tree.tpe)
val context = doLocateContext(pos)
val superAccess = tree.isInstanceOf[Super]
@@ -415,12 +434,13 @@ self =>
}
}
val pre = stabilizedType(tree)
- for (sym <- tree.tpe.decls)
+ val ownerTpe = if (tree.tpe != null) tree.tpe else pre
+ for (sym <- ownerTpe.decls)
addTypeMember(sym, pre, false, NoSymbol)
- for (sym <- tree.tpe.members)
+ for (sym <- ownerTpe.members)
addTypeMember(sym, pre, true, NoSymbol)
val applicableViews: List[SearchResult] =
- new ImplicitSearch(tree, functionType(List(tree.tpe), AnyClass.tpe), true, context.makeImplicit(false))
+ new ImplicitSearch(tree, functionType(List(ownerTpe), AnyClass.tpe), true, context.makeImplicit(false))
.allImplicits
for (view <- applicableViews) {
val vtree = viewApply(view)
@@ -443,24 +463,6 @@ self =>
}
}
- /** A traverser that resets all type and symbol attributes in a tree
- object ResetAttrs extends Transformer {
- override def transform(t: Tree): Tree = {
- if (t.hasSymbol) t.symbol = NoSymbol
- t match {
- case EmptyTree =>
- t
- case tt: TypeTree =>
- if (tt.original != null) tt.original
- else t
- case _ =>
- t.tpe = null
- super.transform(t)
- }
- }
- }
- */
-
/** The typer run */
class TyperRun extends Run {
// units is always empty
diff --git a/src/compiler/scala/tools/nsc/interactive/RangePositions.scala b/src/compiler/scala/tools/nsc/interactive/RangePositions.scala
index 2e6041f4c4..78d3b55a3b 100644
--- a/src/compiler/scala/tools/nsc/interactive/RangePositions.scala
+++ b/src/compiler/scala/tools/nsc/interactive/RangePositions.scala
@@ -71,7 +71,7 @@ self: scala.tools.nsc.Global =>
Range(new RangePosition(null, lo, lo, hi), EmptyTree)
/** The maximal free range */
- private lazy val maxFree: Range = free(0, Math.MAX_INT)
+ private lazy val maxFree: Range = free(0, Int.MaxValue)
/** A singleton list of a non-empty range from `lo` to `hi`, or else the empty List */
private def maybeFree(lo: Int, hi: Int) =
diff --git a/src/compiler/scala/tools/nsc/interactive/RefinedBuildManager.scala b/src/compiler/scala/tools/nsc/interactive/RefinedBuildManager.scala
index fdc2fd3a24..6af23f8fde 100644
--- a/src/compiler/scala/tools/nsc/interactive/RefinedBuildManager.scala
+++ b/src/compiler/scala/tools/nsc/interactive/RefinedBuildManager.scala
@@ -118,7 +118,7 @@ class RefinedBuildManager(val settings: Settings) extends Changes with BuildMana
}
}
// Create a change for the top level classes that were removed
- val removed = definitions(src) remove ((s: Symbol) =>
+ val removed = definitions(src) filterNot ((s: Symbol) =>
syms.find(_.fullNameString == s.fullNameString) match {
case None => false
case _ => true
@@ -131,7 +131,7 @@ class RefinedBuildManager(val settings: Settings) extends Changes with BuildMana
println("Changes: " + changesOf)
updateDefinitions(files)
val compiled = updated ++ files
- val invalid = invalidated(files, changesOf, additionalDefs ++ compiled)
+ val invalid = invalidated(files, changesOf, additionalDefs.clone() ++= compiled)
update0(invalid -- compiled, compiled)
}
@@ -224,7 +224,7 @@ class RefinedBuildManager(val settings: Settings) extends Changes with BuildMana
if (buf.isEmpty)
processed
else
- invalidated(buf -- processed, newChangesOf, processed ++ buf)
+ invalidated(buf.clone() --= processed, newChangesOf, processed ++ buf)
}
/** Update the map of definitions per source file */
diff --git a/src/compiler/scala/tools/nsc/interpreter/AbstractFileClassLoader.scala b/src/compiler/scala/tools/nsc/interpreter/AbstractFileClassLoader.scala
index cf41852652..02df1d7318 100644
--- a/src/compiler/scala/tools/nsc/interpreter/AbstractFileClassLoader.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/AbstractFileClassLoader.scala
@@ -7,7 +7,7 @@ package scala.tools.nsc
package interpreter
import scala.tools.nsc.io.AbstractFile
-import scala.util.ScalaClassLoader
+import util.ScalaClassLoader
/**
* A class loader that loads files from a {@link scala.tools.nsc.io.AbstractFile}.
diff --git a/src/compiler/scala/tools/nsc/interpreter/Completion.scala b/src/compiler/scala/tools/nsc/interpreter/Completion.scala
index fed2a6c5c1..7c8c6e6ef4 100644
--- a/src/compiler/scala/tools/nsc/interpreter/Completion.scala
+++ b/src/compiler/scala/tools/nsc/interpreter/Completion.scala
@@ -62,7 +62,7 @@ class Completion(val interpreter: Interpreter) extends Completor {
def filt(xs: List[String]) = xs filter (_ startsWith stub)
case class Result(candidates: List[String], position: Int) {
- def getCandidates() = (candidates map (_.trim) removeDuplicates) sort (_ < _)
+ def getCandidates() = (candidates map (_.trim) removeDuplicates) sortWith (_ < _)
}
// work out completion candidates and position
diff --git a/src/compiler/scala/tools/nsc/io/File.scala b/src/compiler/scala/tools/nsc/io/File.scala
index 294139ba44..1fbe384bfa 100644
--- a/src/compiler/scala/tools/nsc/io/File.scala
+++ b/src/compiler/scala/tools/nsc/io/File.scala
@@ -90,7 +90,7 @@ with Streamable.Chars
val dest = destPath.toFile
if (!isValid) fail("Source %s is not a valid file." format name)
if (this.normalize == dest.normalize) fail("Source and destination are the same.")
- if (!dest.parent.map(_.exists).getOrElse(false)) fail("Destination cannot be created.")
+ if (!dest.parent.exists) fail("Destination cannot be created.")
if (dest.exists && !dest.canWrite) fail("Destination exists but is not writable.")
if (dest.isDirectory) fail("Destination exists but is a directory.")
diff --git a/src/compiler/scala/tools/nsc/io/Path.scala b/src/compiler/scala/tools/nsc/io/Path.scala
index 37cc64cf75..64a313b00f 100644
--- a/src/compiler/scala/tools/nsc/io/Path.scala
+++ b/src/compiler/scala/tools/nsc/io/Path.scala
@@ -66,6 +66,7 @@ import Path._
class Path private[io] (val jfile: JFile)
{
val separator = JFile.separatorChar
+ val separatorStr = JFile.separator
// Validation: this verifies that the type of this object and the
// contents of the filesystem are in agreement. All objects are
@@ -83,7 +84,7 @@ class Path private[io] (val jfile: JFile)
/** Creates a new Path with the specified path appended. Assumes
* the type of the new component implies the type of the result.
*/
- def /(child: Path): Path = new Path(new JFile(jfile, child.path))
+ def /(child: Path): Path = if (isEmpty) child else new Path(new JFile(jfile, child.path))
def /(child: Directory): Directory = /(child: Path).toDirectory
def /(child: File): File = /(child: Path).toFile
@@ -91,17 +92,42 @@ class Path private[io] (val jfile: JFile)
def name: String = jfile.getName()
def path: String = jfile.getPath()
def normalize: Path = Path(jfile.getCanonicalPath())
- // todo -
- // def resolve(other: Path): Path
- // def relativize(other: Path): Path
+
+ def resolve(other: Path) = if (other.isAbsolute || isEmpty) other else /(other)
+ def relativize(other: Path) = {
+ assert(isAbsolute == other.isAbsolute, "Paths not of same type: "+this+", "+other)
+
+ def createRelativePath(baseSegs: List[String], otherSegs: List[String]) : String = {
+ (baseSegs, otherSegs) match {
+ case (b :: bs, o :: os) if b == o => createRelativePath(bs, os)
+ case (bs, os) => ((".."+separator)*bs.length)+os.mkString(separatorStr)
+ }
+ }
+
+ Path(createRelativePath(segments, other.segments))
+ }
// derived from identity
def root: Option[Path] = roots find (this startsWith _)
- def segments: List[String] = (path split separator).toList filterNot (_.isEmpty)
- def parent: Option[Path] = Option(jfile.getParent()) map Path.apply
- def parents: List[Path] = parent match {
- case None => Nil
- case Some(p) => p :: p.parents
+ def segments: List[String] = (path split separator).toList filterNot (_.length == 0)
+ /**
+ * @return The path of the parent directory, or root if path is already root
+ */
+ def parent: Path = {
+ val p = path match {
+ case "" | "." => ".."
+ case _ if path endsWith ".." => path + separator + ".." // the only solution
+ case _ => jfile.getParent match {
+ case null if isAbsolute => path // it should be a root. BTW, don't need to worry about relative pathed root
+ case null => "." // a file ot dir under pwd
+ case x => x
+ }
+ }
+ new Directory(new JFile(p))
+ }
+ def parents: List[Path] = {
+ val p = parent
+ if (p isSame this) Nil else p :: p.parents
}
// if name ends with an extension (e.g. "foo.jpg") returns the extension ("jpg"), otherwise ""
def extension: String = (name lastIndexOf '.') match {
@@ -119,10 +145,11 @@ class Path private[io] (val jfile: JFile)
def isDirectory = jfile.isDirectory()
def isAbsolute = jfile.isAbsolute()
def isHidden = jfile.isHidden()
- def isSymlink = parent.isDefined && {
- val x = parent.get / name
+ def isSymlink = {
+ val x = parent / name
x.normalize != x.toAbsolute
}
+ def isEmpty = path.length == 0
// Information
def lastModified = jfile.lastModified()
@@ -132,7 +159,7 @@ class Path private[io] (val jfile: JFile)
// Boolean path comparisons
def endsWith(other: Path) = segments endsWith other.segments
def startsWith(other: Path) = segments startsWith other.segments
- def isSame(other: Path) = toAbsolute == other.toAbsolute
+ def isSame(other: Path) = normalize == other.normalize
def isFresher(other: Path) = lastModified > other.lastModified
// creations
diff --git a/src/compiler/scala/tools/nsc/io/PlainFile.scala b/src/compiler/scala/tools/nsc/io/PlainFile.scala
index 926f5ee042..ef10b6d6bd 100644
--- a/src/compiler/scala/tools/nsc/io/PlainFile.scala
+++ b/src/compiler/scala/tools/nsc/io/PlainFile.scala
@@ -38,7 +38,7 @@ class PlainFile(val givenPath: Path) extends AbstractFile {
/** The absolute file. */
def absolute = new PlainFile(givenPath.normalize)
- override def container: AbstractFile = new PlainFile(givenPath.parent.get)
+ override def container: AbstractFile = new PlainFile(givenPath.parent)
override def input = givenPath.toFile.inputStream()
override def output = givenPath.toFile.outputStream()
override def sizeOption = Some(givenPath.length.toInt)
diff --git a/src/compiler/scala/tools/nsc/io/VirtualFile.scala b/src/compiler/scala/tools/nsc/io/VirtualFile.scala
index 90769d7086..65e7e34d88 100644
--- a/src/compiler/scala/tools/nsc/io/VirtualFile.scala
+++ b/src/compiler/scala/tools/nsc/io/VirtualFile.scala
@@ -68,7 +68,7 @@ class VirtualFile(val name: String, _path: String) extends AbstractFile
def isDirectory: Boolean = false
/** Returns the time that this abstract file was last modified. */
- def lastModified: Long = Math.MIN_LONG
+ def lastModified: Long = Long.MinValue
/** Returns all abstract subfiles of this abstract directory. */
def iterator: Iterator[AbstractFile] = {
diff --git a/src/compiler/scala/tools/nsc/javac/JavaScanners.scala b/src/compiler/scala/tools/nsc/javac/JavaScanners.scala
index 2ffea32307..9d7889ccc2 100644
--- a/src/compiler/scala/tools/nsc/javac/JavaScanners.scala
+++ b/src/compiler/scala/tools/nsc/javac/JavaScanners.scala
@@ -836,7 +836,7 @@ trait JavaScanners {
var value: Long = 0
val divider = if (base == 10) 1 else 2
val limit: Long =
- if (token == LONGLIT) Math.MAX_LONG else Math.MAX_INT
+ if (token == LONGLIT) Long.MaxValue else Int.MaxValue
var i = 0
val len = name.length
while (i < len) {
@@ -864,7 +864,7 @@ trait JavaScanners {
*/
def floatVal(negated: Boolean): Double = {
val limit: Double =
- if (token == DOUBLELIT) Math.MAX_DOUBLE else Math.MAX_FLOAT
+ if (token == DOUBLELIT) Double.MaxValue else Float.MaxValue
try {
val value: Double = java.lang.Double.valueOf(name.toString()).doubleValue()
if (value > limit)
diff --git a/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala b/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala
index d87eb29cf1..57a49cd461 100644
--- a/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala
+++ b/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala
@@ -53,20 +53,19 @@ trait ParallelMatching extends ast.TreeDSL
-shortCuts.length
}
- // XXX transitional.
- final def requestBody(bx: Int, subst: Bindings): Tree =
- requestBody(bx, PatternVarGroup.fromBindings(subst.get(), targets(bx).freeVars))
-
/** first time bx is requested, a LabelDef is returned. next time, a jump.
* the function takes care of binding
*/
- final def requestBody(bx: Int, pvgroup: PatternVarGroup): Tree = {
+ final def requestBody(bx: Int, subst: Bindings): Tree = {
+ // shortcut
+ if (bx < 0)
+ return Apply(ID(shortCuts(-bx-1)), Nil)
+
+ val pvgroup = PatternVarGroup.fromBindings(subst.get(), targets(bx).freeVars)
val target = targets(bx)
- // shortcut
- if (bx < 0) Apply(ID(shortCuts(-bx-1)), Nil)
// first time this bx is requested - might be bound elsewhere
- else if (target.isNotReached) target.createLabelBody(bx, pvgroup)
+ if (target.isNotReached) target.createLabelBody(bx, pvgroup)
// call label "method" if possible
else target.getLabelBody(pvgroup)
}
@@ -454,17 +453,17 @@ trait ParallelMatching extends ast.TreeDSL
private lazy val rowsplit = {
require(scrut.tpe <:< head.tpe)
- List.unzip(
- for ((c, rows) <- pmatch pzip rest.rows) yield {
- def canSkip = pivot canSkipSubsequences c
- def passthrough(skip: Boolean) = if (skip) None else Some(rows insert c)
+ val res = for ((c, rows) <- pmatch pzip rest.rows) yield {
+ def canSkip = pivot canSkipSubsequences c
+ def passthrough(skip: Boolean) = if (skip) None else Some(rows insert c)
- pivot.subsequences(c, scrut.seqType) match {
- case Some(ps) => (Some(rows insert ps), passthrough(canSkip))
- case None => (None, passthrough(false))
- }
+ pivot.subsequences(c, scrut.seqType) match {
+ case Some(ps) => (Some(rows insert ps), passthrough(canSkip))
+ case None => (None, passthrough(false))
}
- ) match { case (l1, l2) => (l1.flatten, l2.flatten) }
+ }
+
+ res.unzip match { case (l1, l2) => (l1.flatten, l2.flatten) }
}
lazy val cond = (pivot precondition pmatch).get // length check
@@ -477,7 +476,7 @@ trait ParallelMatching extends ast.TreeDSL
// @todo: equals test for same constant
class MixEquals(val pmatch: PatternMatch, val rest: Rep) extends RuleApplication {
private lazy val labelBody =
- remake(List.map2(rest.rows.tail, pmatch.tail)(_ insert _)).toTree
+ remake((rest.rows.tail, pmatch.tail).zipped map (_ insert _)).toTree
private lazy val rhs =
decodedEqualsType(head.tpe) match {
@@ -511,8 +510,11 @@ trait ParallelMatching extends ast.TreeDSL
case class Yes(bx: Int, moreSpecific: Pattern, subsumed: List[Pattern])
case class No(bx: Int, remaining: Pattern)
- val (yeses, noes) : (List[Yes], List[No]) = List.unzip(
- for ((pattern, j) <- pmatch.pzip()) yield {
+ val (yeses, noes) = {
+ val _ys = new ListBuffer[Yes]
+ val _ns = new ListBuffer[No]
+
+ for ((pattern, j) <- pmatch.pzip()) {
// scrutinee, head of pattern group
val (s, p) = (pattern.tpe, head.necessaryType)
@@ -530,7 +532,7 @@ trait ParallelMatching extends ast.TreeDSL
def typed(pp: Tree) = passl(ifEquiv(Pattern(pp)))
def subs() = passl(ifEquiv(NoPattern), pattern subpatterns pmatch)
- (pattern match {
+ val (oneY, oneN) = pattern match {
case Pattern(LIT(null), _) if !(p =:= s) => (None, passr) // (1)
case x if isObjectTest => (passl(), None) // (2)
case Pattern(Typed(pp, _), _) if sMatchesP => (typed(pp), None) // (4)
@@ -538,9 +540,12 @@ trait ParallelMatching extends ast.TreeDSL
case x if !x.isDefault && sMatchesP => (subs(), None)
case x if x.isDefault || pMatchesS => (passl(), passr)
case _ => (None, passr)
- }) : (Option[Yes], Option[No])
+ }
+ oneY map (_ys +=)
+ oneN map (_ns +=)
}
- ) match { case (x,y) => (x.flatten, y.flatten) }
+ (_ys.toList, _ns.toList)
+ }
val moreSpecific = yeses map (_.moreSpecific)
val subsumed = yeses map (x => (x.bx, x.subsumed))
@@ -739,12 +744,10 @@ trait ParallelMatching extends ast.TreeDSL
/** Cut out the column containing the non-default pattern. */
class Cut(index: Int) {
/** The first two separate out the 'i'th pattern in each row from the remainder. */
- private val (_column, _rows) =
- List.unzip(rows map (_ extractColumn index))
+ private val (_column, _rows) = rows map (_ extractColumn index) unzip
/** Now the 'i'th tvar is separated out and used as a new Scrutinee. */
- private val (_pv, _tvars) =
- tvars extractIndex index
+ private val (_pv, _tvars) = tvars extractIndex index
/** The non-default pattern (others.head) replaces the column head. */
private val (_ncol, _nrep) =
diff --git a/src/compiler/scala/tools/nsc/models/Signatures.scala b/src/compiler/scala/tools/nsc/models/Signatures.scala
index 98bfa142ff..0d75cf7101 100644
--- a/src/compiler/scala/tools/nsc/models/Signatures.scala
+++ b/src/compiler/scala/tools/nsc/models/Signatures.scala
@@ -25,14 +25,10 @@ class Signatures(val compiler: Compiler) {
def asString: String = name + "[" + asString0(children) + "]"
}
- def sort(sigs: List[Signature]) =
- sigs.sort((l0,l1) => l0.name.compareTo(l1.name) > 0)
+ def sort(sigs: List[Signature]) = sigs sortBy (_.name) reverse
- def asString0(sigs: List[Signature]): String = {
- var ret = ""
- for (sig <- sort(sigs)) ret = ret + sig.asString
- ret
- }
+ def asString0(sigs: List[Signature]): String =
+ sort(sigs) map (_.asString) mkString
def signature(unit: CompilationUnit): String =
asString0(signature(unit.body, Nil))
diff --git a/src/compiler/scala/tools/nsc/symtab/BaseTypeSeqs.scala b/src/compiler/scala/tools/nsc/symtab/BaseTypeSeqs.scala
index 6c29ab5cf3..9bd2a79449 100644
--- a/src/compiler/scala/tools/nsc/symtab/BaseTypeSeqs.scala
+++ b/src/compiler/scala/tools/nsc/symtab/BaseTypeSeqs.scala
@@ -8,7 +8,7 @@ package symtab
// todo implement in terms of BitSet
import scala.collection.mutable.ListBuffer
import scala.collection.immutable.Map
-import Math.max
+import math.max
/** A base type sequence (BaseTypeSeq) is an ordered sequence spanning all the base types
* of a type. It characterized by the following two laws:
@@ -137,7 +137,7 @@ trait BaseTypeSeqs {
protected def maxDepthOfElems = {
var d = 0
- for (i <- 0 until length) d = Math.max(d, maxDpth(elems(i)))
+ for (i <- 0 until length) d = max(d, maxDpth(elems(i)))
d
}
diff --git a/src/compiler/scala/tools/nsc/symtab/Definitions.scala b/src/compiler/scala/tools/nsc/symtab/Definitions.scala
index 25c60c32b8..c32a6fffd8 100644
--- a/src/compiler/scala/tools/nsc/symtab/Definitions.scala
+++ b/src/compiler/scala/tools/nsc/symtab/Definitions.scala
@@ -51,6 +51,25 @@ trait Definitions {
lazy val anyvalparam = List(AnyValClass.typeConstructor)
lazy val anyrefparam = List(AnyRefClass.typeConstructor)
+ // private parameter conveniences
+ private def booltype = BooleanClass.typeConstructor
+ private def boolparam = List(booltype)
+ private def bytetype = ByteClass.typeConstructor
+ private def byteparam = List(bytetype)
+ private def shorttype = ShortClass.typeConstructor
+ private def shortparam = List(shorttype)
+ private def inttype = IntClass.typeConstructor
+ private def intparam = List(inttype)
+ private def longtype = LongClass.typeConstructor
+ private def longparam = List(longtype)
+ private def floattype = FloatClass.typeConstructor
+ private def floatparam = List(floattype)
+ private def doubletype = DoubleClass.typeConstructor
+ private def doubleparam = List(doubletype)
+ private def chartype = CharClass.typeConstructor
+ private def charparam = List(chartype)
+ private def stringtype = StringClass.typeConstructor
+
// top types
lazy val AnyClass = newClass(ScalaPackageClass, nme.Any, Nil) setFlag (ABSTRACT)
lazy val AnyValClass = newClass(ScalaPackageClass, nme.AnyVal, anyparam) setFlag (ABSTRACT | SEALED)
@@ -76,6 +95,11 @@ trait Definitions {
def Boolean_and = getMember(BooleanClass, nme.ZAND)
def Boolean_or = getMember(BooleanClass, nme.ZOR)
+ def ScalaValueClasses = List(
+ UnitClass, ByteClass, ShortClass, IntClass, LongClass,
+ CharClass, FloatClass, DoubleClass, BooleanClass
+ )
+
// exceptions and other throwables
lazy val ThrowableClass = getClass(sn.Throwable)
lazy val NullPointerExceptionClass = getClass(sn.NPException)
@@ -106,6 +130,7 @@ trait Definitions {
// fundamental reference classes
lazy val ScalaObjectClass = getClass("scala.ScalaObject")
lazy val PartialFunctionClass = getClass("scala.PartialFunction")
+ lazy val SymbolClass = getClass("scala.Symbol")
lazy val StringClass = getClass(sn.String)
lazy val ClassClass = getClass(sn.Class)
def Class_getMethod = getMember(ClassClass, nme.getMethod_)
@@ -122,9 +147,8 @@ trait Definitions {
def Predef_conforms = getMember(PredefModule, nme.conforms)
lazy val ConsoleModule: Symbol = getModule("scala.Console")
lazy val ScalaRunTimeModule: Symbol = getModule("scala.runtime.ScalaRunTime")
+ lazy val SymbolModule: Symbol = getModule("scala.Symbol")
def SeqFactory = getMember(ScalaRunTimeModule, nme.Seq)
- def checkDefinedMethod = getMember(ScalaRunTimeModule, "checkDefined")
- def isArrayMethod = getMember(ScalaRunTimeModule, "isArray")
def arrayApplyMethod = getMember(ScalaRunTimeModule, "array_apply")
def arrayUpdateMethod = getMember(ScalaRunTimeModule, "array_update")
def arrayLengthMethod = getMember(ScalaRunTimeModule, "array_length")
@@ -227,6 +251,8 @@ trait Definitions {
def optionType(tp: Type) = typeRef(OptionClass.typeConstructor.prefix, OptionClass, List(tp))
def someType(tp: Type) = typeRef(SomeClass.typeConstructor.prefix, SomeClass, List(tp))
+ def symbolType = typeRef(SymbolClass.typeConstructor.prefix, SymbolClass, List())
+ def longType = typeRef(LongClass.typeConstructor.prefix, LongClass, List())
// Product, Tuple, Function
private def mkArityArray(name: String, arity: Int, countFrom: Int = 1) = {
@@ -351,8 +377,13 @@ trait Definitions {
var Object_== : Symbol = _
var Object_!= : Symbol = _
var Object_synchronized: Symbol = _
- var Object_isInstanceOf: Symbol = _
- var Object_asInstanceOf: Symbol = _
+ lazy val Object_isInstanceOf = newPolyMethod(
+ ObjectClass, "$isInstanceOf",
+ tparam => MethodType(List(), booltype)) setFlag FINAL
+ lazy val Object_asInstanceOf = newPolyMethod(
+ ObjectClass, "$asInstanceOf",
+ tparam => MethodType(List(), tparam.typeConstructor)) setFlag FINAL
+
def Object_getClass = getMember(ObjectClass, nme.getClass_)
def Object_clone = getMember(ObjectClass, nme.clone_)
def Object_finalize = getMember(ObjectClass, nme.finalize_)
@@ -367,9 +398,6 @@ trait Definitions {
// boxed classes
lazy val ObjectRefClass = getClass("scala.runtime.ObjectRef")
lazy val BoxesRunTimeClass = getModule("scala.runtime.BoxesRunTime")
- lazy val BoxedArrayClass = getClass("scala.runtime.BoxedArray")
- lazy val BoxedAnyArrayClass = getClass("scala.runtime.BoxedAnyArray")
- lazy val BoxedObjectArrayClass = getClass("scala.runtime.BoxedObjectArray")
lazy val BoxedNumberClass = getClass(sn.BoxedNumber)
lazy val BoxedCharacterClass = getClass(sn.BoxedCharacter)
lazy val BoxedBooleanClass = getClass(sn.BoxedBoolean)
@@ -380,6 +408,18 @@ trait Definitions {
lazy val BoxedFloatClass = getClass("java.lang.Float")
lazy val BoxedDoubleClass = getClass("java.lang.Double")
+ /** The various ways a boxed primitive might materialize at runtime. */
+ def isMaybeBoxed(sym: Symbol) =
+ if (forMSIL)
+ sym isNonBottomSubClass BoxedNumberClass
+ else {
+ (sym == ObjectClass) ||
+ (sym == SerializableClass) ||
+ (sym == ComparableClass) ||
+ (sym isNonBottomSubClass BoxedNumberClass) ||
+ (sym isNonBottomSubClass BoxedCharacterClass)
+ }
+
lazy val BoxedUnitClass = getClass("scala.runtime.BoxedUnit")
lazy val BoxedUnitModule = getModule("scala.runtime.BoxedUnit")
def BoxedUnit_UNIT = getMember(BoxedUnitModule, "UNIT")
@@ -525,7 +565,6 @@ trait Definitions {
val boxedModule = new HashMap[Symbol, Symbol]
val unboxMethod = new HashMap[Symbol, Symbol] // Type -> Method
val boxMethod = new HashMap[Symbol, Symbol] // Type -> Method
- val boxedArrayClass = new HashMap[Symbol, Symbol]
def isUnbox(m: Symbol) = (m.name == nme.unbox) && cond(m.tpe) {
case MethodType(_, restpe) => cond(unboxMethod get restpe.typeSymbol) {
@@ -550,7 +589,6 @@ trait Definitions {
val clazz = newClass(ScalaPackageClass, name, anyvalparam) setFlag (ABSTRACT | FINAL)
boxedClass(clazz) = getClass(boxedName)
boxedModule(clazz) = getModule(boxedName)
- boxedArrayClass(clazz) = getClass("scala.runtime.Boxed" + name + "Array")
refClass(clazz) = getClass("scala.runtime." + name + "Ref")
abbrvTag(clazz) = tag
if (width > 0) numericWidth(clazz) = width
@@ -572,26 +610,6 @@ trait Definitions {
/** Sets-up symbols etc. for value classes, and their boxed versions. This
* method is called once from within the body of init. */
private def initValueClasses() {
- val booltype = BooleanClass.typeConstructor
- val boolparam = List(booltype)
- val bytetype = ByteClass.typeConstructor
- val byteparam = List(bytetype)
- val chartype = CharClass.typeConstructor
- val charparam = List(chartype)
- val shorttype = ShortClass.typeConstructor
- val shortparam = List(shorttype)
- val inttype = IntClass.typeConstructor
- val intparam = List(inttype)
- val longtype = LongClass.typeConstructor
- val longparam = List(longtype)
-
- val floattype = FloatClass.typeConstructor
- val floatparam = List(floattype)
- val doubletype = DoubleClass.typeConstructor
- val doubleparam = List(doubletype)
-
- val stringtype = StringClass.typeConstructor
-
// init scala.Boolean
newParameterlessMethod(BooleanClass, nme.UNARY_!, booltype)
List(nme.EQ, nme.NE, nme.ZOR, nme.ZAND, nme.OR, nme.AND, nme.XOR) foreach {
@@ -619,7 +637,6 @@ trait Definitions {
newParameterlessMethod(clazz, nme.toChar, chartype)
newParameterlessMethod(clazz, nme.toInt, inttype)
newParameterlessMethod(clazz, nme.toLong, longtype)
-
newParameterlessMethod(clazz, nme.toFloat, floattype)
newParameterlessMethod(clazz, nme.toDouble, doubletype)
@@ -730,7 +747,7 @@ trait Definitions {
if (isInitialized) return
isInitialized = true
- EmptyPackageClass.setInfo(ClassInfoType(List(), new Scope, EmptyPackageClass))
+ EmptyPackageClass.setInfo(ClassInfoType(Nil, new Scope, EmptyPackageClass))
EmptyPackage.setInfo(EmptyPackageClass.tpe)
RootClass.info.decls.enter(EmptyPackage)
RootClass.info.decls.enter(RootPackage)
@@ -738,16 +755,13 @@ trait Definitions {
abbrvTag(UnitClass) = 'V'
initValueClasses()
- val booltype = BooleanClass.typeConstructor
// members of class scala.Any
Any_== = newMethod(AnyClass, nme.EQ, anyparam, booltype) setFlag FINAL
Any_!= = newMethod(AnyClass, nme.NE, anyparam, booltype) setFlag FINAL
Any_equals = newMethod(AnyClass, nme.equals_, anyparam, booltype)
- Any_hashCode = newMethod(
- AnyClass, nme.hashCode_, List(), IntClass.typeConstructor)
- Any_toString = newMethod(
- AnyClass, nme.toString_, List(), StringClass.typeConstructor)
+ Any_hashCode = newMethod(AnyClass, nme.hashCode_, Nil, inttype)
+ Any_toString = newMethod(AnyClass, nme.toString_, Nil, stringtype)
Any_isInstanceOf = newPolyMethod(
AnyClass, nme.isInstanceOf_, tparam => booltype) setFlag FINAL
@@ -762,16 +776,11 @@ trait Definitions {
Object_synchronized = newPolyMethodCon(
ObjectClass, nme.synchronized_,
tparam => msym => MethodType(msym.newSyntheticValueParams(List(tparam.typeConstructor)), tparam.typeConstructor)) setFlag FINAL
- Object_isInstanceOf = newPolyMethod(
- ObjectClass, "$isInstanceOf",
- tparam => MethodType(List(), booltype)) setFlag FINAL
- Object_asInstanceOf = newPolyMethod(
- ObjectClass, "$asInstanceOf",
- tparam => MethodType(List(), tparam.typeConstructor)) setFlag FINAL
+
String_+ = newMethod(
- StringClass, "+", anyparam, StringClass.typeConstructor) setFlag FINAL
+ StringClass, "+", anyparam, stringtype) setFlag FINAL
- val forced = List( // force initialization of every symbol that is enetred as a side effect
+ val forced = List( // force initialization of every symbol that is entered as a side effect
AnnotationDefaultAttr,
RepeatedParamClass,
JavaRepeatedParamClass,
@@ -791,7 +800,9 @@ trait Definitions {
NullClass,
NothingClass,
SingletonClass,
- EqualsPatternClass
+ EqualsPatternClass,
+ Object_isInstanceOf,
+ Object_asInstanceOf
)
// #2264
diff --git a/src/compiler/scala/tools/nsc/symtab/StdNames.scala b/src/compiler/scala/tools/nsc/symtab/StdNames.scala
index 3c5b866ba8..990d6dc463 100644
--- a/src/compiler/scala/tools/nsc/symtab/StdNames.scala
+++ b/src/compiler/scala/tools/nsc/symtab/StdNames.scala
@@ -268,8 +268,6 @@ trait StdNames {
val assume_ = newTermName("assume")
val asInstanceOf_ = newTermName("asInstanceOf")
val box = newTermName("box")
- val boxArray = newTermName("boxArray")
- val forceBoxedArray = newTermName("forceBoxedArray")
val canEqual_ = newTermName("canEqual")
val checkInitialized = newTermName("checkInitialized")
val classOf = newTermName("classOf")
diff --git a/src/compiler/scala/tools/nsc/symtab/SymbolWalker.scala b/src/compiler/scala/tools/nsc/symtab/SymbolWalker.scala
index 5f0574b525..f2e16b537a 100644
--- a/src/compiler/scala/tools/nsc/symtab/SymbolWalker.scala
+++ b/src/compiler/scala/tools/nsc/symtab/SymbolWalker.scala
@@ -20,32 +20,35 @@ trait SymbolWalker {
def apply(pos : Position) : Symbol = map.apply(pos)
}
*/
+ private def validSym(t: Tree) = t.symbol != NoSymbol && t.symbol != null
+ private def validSym(tp: Type) = tp != null && tp.typeSymbol != NoSymbol && tp.typeSymbol != null
+ private def notNull(tp: Type) = tp.typeSymbol != null
+ private def isNoSymbol(t: Tree) = t.symbol eq NoSymbol
+
def walk(tree: Tree, visitor : Visitor)(fid : (util.Position) => Option[String]) : Unit = {
val visited = new LinkedHashSet[Tree]
def f(t : Tree) : Unit = {
if (visited.add(t)) return
- def fs(l : List[Tree]) : Unit = {
- val i = l.iterator
- while (i.hasNext) f(i.next)
- }
- def fss(l : List[List[Tree]]) : Unit = {
- val i = l.iterator
- while (i.hasNext) fs(i.next)
- }
+
+ def fs(l: List[Tree]) = l foreach f
+ def fss(l: List[List[Tree]]) = l foreach fs
+
if (t.isInstanceOf[StubTree]) return
- def asTypeRef = t.tpe.asInstanceOf[TypeRef]
- val sym = (t,t.tpe) match {
- case (Super(_,_),SuperType(_,supertp)) if supertp.typeSymbol != NoSymbol && supertp.typeSymbol != null => supertp.typeSymbol
- case _ if t.symbol != NoSymbol && t.symbol != null => t.symbol
- case (t : TypeTree, tp) if tp != null && tp.typeSymbol != null && tp.typeSymbol != NoSymbol => tp.typeSymbol
- case (t : TypeTree, tp) if tp != null && tp.resultType != null && tp.resultType.typeSymbol != null => tp.resultType.typeSymbol
- case (t, tpe : Type) if tpe != null && (t.symbol eq NoSymbol) && t.isTerm && tpe.termSymbol != null =>
- tpe.termSymbol
- case (t, tpe : Type) if tpe != null && (t.symbol eq NoSymbol) && tpe.typeSymbol != null =>
- if (t.tpe.isInstanceOf[TypeRef]) asTypeRef.sym // XXX: looks like a bug
- else tpe.typeSymbol
- case _ => NoSymbol
+
+ val sym = (t, t.tpe) match {
+ case (Super(_,_),SuperType(_,supertp)) if validSym(supertp) => supertp.typeSymbol
+ case _ if validSym(t) => t.symbol
+ case (t: TypeTree, tp) if validSym(tp) => tp.typeSymbol
+ case (t: TypeTree, tp) if validSym(tp.resultType) => tp.resultType.typeSymbol
+ case (t, tpe: Type) if isNoSymbol(t) && tpe.termSymbol != null =>
+ if (t.isTerm) tpe.termSymbol
+ else t.tpe match {
+ case x: TypeRef => x.sym // XXX: looks like a bug
+ case _ => tpe.typeSymbol
+ }
+ case _ => NoSymbol
}
+
if (sym != null && sym != NoSymbol /* && !sym.hasFlag(SYNTHETIC) */) {
var id = fid(t.pos)
val doAdd = if (id.isDefined) {
diff --git a/src/compiler/scala/tools/nsc/symtab/Symbols.scala b/src/compiler/scala/tools/nsc/symtab/Symbols.scala
index e4cbe159a1..1634e04025 100644
--- a/src/compiler/scala/tools/nsc/symtab/Symbols.scala
+++ b/src/compiler/scala/tools/nsc/symtab/Symbols.scala
@@ -112,7 +112,7 @@ trait Symbols {
* the annotations attached to member a definition (class, method, type, field).
*/
def annotations: List[AnnotationInfo] = {
- // .initialize: the type completer of the symbol parses the annotations,
+ // .initialize: the type completer o f the symbol parses the annotations,
// see "def typeSig" in Namers
val annots1 = initialize.rawannots map {
case LazyAnnotationInfo(annot) => annot()
@@ -563,14 +563,14 @@ trait Symbols {
final def isStaticOwner: Boolean =
isPackageClass || isModuleClass && isStatic
- /** Is this symbol final?*/
+ /** Is this symbol final? */
final def isFinal: Boolean = (
hasFlag(FINAL) ||
isTerm && (
hasFlag(PRIVATE) || isLocal || owner.isClass && owner.hasFlag(FINAL | MODULE))
)
- /** Is this symbol a sealed class?*/
+ /** Is this symbol a sealed class? */
final def isSealed: Boolean =
isClass && (hasFlag(SEALED) || isValueClass(this))
@@ -1166,9 +1166,10 @@ trait Symbols {
def renamedGetter = accessors find (_.originalName startsWith (getterName + "$"))
val accessorName = origGetter orElse renamedGetter
- accessorName getOrElse {
- throw new Error("Could not find case accessor for %s in %s".format(field, this))
- }
+ // This fails more gracefully rather than throw an Error as it used to because
+ // as seen in #2625, we can reach this point with an already erroneous tree.
+ accessorName getOrElse NoSymbol
+ // throw new Error("Could not find case accessor for %s in %s".format(field, this))
}
fields map findAccessor
@@ -1369,8 +1370,8 @@ trait Symbols {
/** The non-private member of `site' whose type and name match the type of this symbol
*/
- final def matchingSymbol(site: Type): Symbol =
- site.nonPrivateMember(name).filter(sym =>
+ final def matchingSymbol(site: Type, admit: Long = 0L): Symbol =
+ site.nonPrivateMemberAdmitting(name, admit).filter(sym =>
!sym.isTerm || (site.memberType(this) matches site.memberType(sym)))
/** The symbol overridden by this symbol in given class `ofclazz'.
diff --git a/src/compiler/scala/tools/nsc/symtab/Types.scala b/src/compiler/scala/tools/nsc/symtab/Types.scala
index 6145c4a0d7..668f7149a9 100644
--- a/src/compiler/scala/tools/nsc/symtab/Types.scala
+++ b/src/compiler/scala/tools/nsc/symtab/Types.scala
@@ -1507,6 +1507,7 @@ trait Types {
private var baseTypeSeqPeriod = NoPeriod
override def isStable: Boolean = {
+ sym == NothingClass ||
sym == SingletonClass ||
sym.isAliasType && normalize.isStable ||
sym.isAbstractType && (bounds.hi.typeSymbol isSubClass SingletonClass)
@@ -1564,6 +1565,8 @@ A type's typeSymbol should never be inspected directly.
parentsPeriod = currentPeriod
if (!isValidForBaseClasses(period)) {
parentsCache = thisInfo.parents map transform
+ } else if (parentsCache == null) { // seems this can happen if things are currupted enough, see #2641
+ parentsCache = List(AnyClass.tpe)
}
}
parentsCache
@@ -1988,16 +1991,17 @@ A type's typeSymbol should never be inspected directly.
case class AntiPolyType(pre: Type, targs: List[Type]) extends Type {
override def safeToString =
pre.toString + targs.mkString("(with type arguments ", ",", ")");
- override def memberType(sym: Symbol) = pre.memberType(sym) match {
- case PolyType(tparams, restp) =>
- restp.subst(tparams, targs)
-/* I don't think this is needed, as existential types close only over value types
- case ExistentialType(tparams, qtpe) =>
- existentialAbstraction(tparams, qtpe.memberType(sym))
-*/
- case ErrorType =>
- ErrorType
- }
+ override def memberType(sym: Symbol) = appliedType(pre.memberType(sym), targs)
+// override def memberType(sym: Symbol) = pre.memberType(sym) match {
+// case PolyType(tparams, restp) =>
+// restp.subst(tparams, targs)
+// /* I don't think this is needed, as existential types close only over value types
+// case ExistentialType(tparams, qtpe) =>
+// existentialAbstraction(tparams, qtpe.memberType(sym))
+// */
+// case ErrorType =>
+// ErrorType
+// }
override def kind = "AntiPolyType"
}
@@ -2337,9 +2341,7 @@ A type's typeSymbol should never be inspected directly.
if (phase.erasedTypes)
if (parents.isEmpty) ObjectClass.tpe else parents.head
else {
- // having $anonfun as owner causes the pickler to break upon unpickling; see ticket #2323
- val nonAnonOwner = (owner.ownerChain dropWhile (_.isAnonymousFunction)).headOption getOrElse NoSymbol
- val clazz = nonAnonOwner.newRefinementClass(NoPosition)
+ val clazz = owner.newRefinementClass(NoPosition)
val result = refinementOfClass(clazz, parents, decls)
clazz.setInfo(result)
result
@@ -2520,16 +2522,19 @@ A type's typeSymbol should never be inspected directly.
else {
var occurCount = emptySymCount ++ (tparams map (_ -> 0))
val tpe = deAlias(tpe0)
- for (t <- tpe) {
- t match {
- case TypeRef(_, sym, _) =>
- occurCount get sym match {
- case Some(count) => occurCount += (sym -> (count + 1))
- case None =>
- }
- case _ =>
+ def countOccs(tp: Type) =
+ for (t <- tp) {
+ t match {
+ case TypeRef(_, sym, _) =>
+ occurCount get sym match {
+ case Some(count) => occurCount += (sym -> (count + 1))
+ case None =>
+ }
+ case _ =>
+ }
}
- }
+ countOccs(tpe)
+ for (tparam <- tparams) countOccs(tparam.info)
val extrapolate = new TypeMap {
variance = 1
@@ -2862,9 +2867,8 @@ A type's typeSymbol should never be inspected directly.
val clonedSyms = origSyms map (_.cloneSymbol)
val clonedInfos = clonedSyms map (_.info.substSym(origSyms, clonedSyms))
val transformedInfos = clonedInfos mapConserve (this)
- List.map2(clonedSyms, transformedInfos) {
- ((newSym, newInfo) => newSym.setInfo(newInfo))
- }
+ (clonedSyms, transformedInfos).zipped map (_ setInfo _)
+
clonedSyms
}
}
@@ -3287,8 +3291,10 @@ A type's typeSymbol should never be inspected directly.
class SubstWildcardMap(from: List[Symbol]) extends TypeMap {
def apply(tp: Type): Type = try {
tp match {
- case TypeRef(_, sym, _) if (from contains sym) => WildcardType
- case _ => mapOver(tp)
+ case TypeRef(_, sym, _) if (from contains sym) =>
+ BoundedWildcardType(sym.info.bounds)
+ case _ =>
+ mapOver(tp)
}
} catch {
case ex: MalformedType =>
@@ -3628,7 +3634,7 @@ A type's typeSymbol should never be inspected directly.
*/
def lubDepth(ts: List[Type]) = {
var d = 0
- for (tp <- ts) d = Math.max(d, tp.baseTypeSeqDepth)
+ for (tp <- ts) d = math.max(d, tp.baseTypeSeqDepth)
d + LubGlbMargin
}
@@ -3664,7 +3670,7 @@ A type's typeSymbol should never be inspected directly.
case (TypeRef(pre1, sym1, args1), TypeRef(pre2, sym2, args2)) =>
assert(sym1 == sym2)
pre1 =:= pre2 &&
- !(List.map3(args1, args2, sym1.typeParams) {
+ ((args1, args2, sym1.typeParams).zipped forall {
(arg1, arg2, tparam) =>
//if (tparam.variance == 0 && !(arg1 =:= arg2)) Console.println("inconsistent: "+arg1+"!="+arg2)//DEBUG
if (tparam.variance == 0) arg1 =:= arg2
@@ -3675,7 +3681,7 @@ A type's typeSymbol should never be inspected directly.
// also: think what happens if there are embedded typevars?
if (tparam.variance < 0) arg1 <:< arg2 else arg2 <:< arg1
else true
- } contains false)
+ })
case (et: ExistentialType, _) =>
et.withTypeVars(isConsistent(_, tp2))
case (_, et: ExistentialType) =>
@@ -3846,12 +3852,12 @@ A type's typeSymbol should never be inspected directly.
case (PolyType(tparams1, res1), PolyType(tparams2, res2)) =>
// assert((tparams1 map (_.typeParams.length)) == (tparams2 map (_.typeParams.length)))
(tparams1.length == tparams2.length &&
- List.forall2(tparams1, tparams2)
+ (tparams1, tparams2).zipped.forall
((p1, p2) => p1.info =:= p2.info.substSym(tparams2, tparams1)) && //@M looks like it might suffer from same problem as #2210
res1 =:= res2.substSym(tparams2, tparams1))
case (ExistentialType(tparams1, res1), ExistentialType(tparams2, res2)) =>
(tparams1.length == tparams2.length &&
- List.forall2(tparams1, tparams2)
+ (tparams1, tparams2).zipped.forall
((p1, p2) => p1.info =:= p2.info.substSym(tparams2, tparams1)) && //@M looks like it might suffer from same problem as #2210
res1 =:= res2.substSym(tparams2, tparams1))
case (TypeBounds(lo1, hi1), TypeBounds(lo2, hi2)) =>
@@ -3892,8 +3898,7 @@ A type's typeSymbol should never be inspected directly.
* types?
*/
def isSameTypes(tps1: List[Type], tps2: List[Type]): Boolean =
- tps1.length == tps2.length &&
- List.forall2(tps1, tps2)((tp1, tp2) => tp1 =:= tp2)
+ tps1.length == tps2.length && ((tps1, tps2).zipped forall (_ =:= _))
private var pendingSubTypes = new collection.mutable.HashSet[SubTypePair]
private var basetypeRecursions: Int = 0
@@ -3975,16 +3980,16 @@ A type's typeSymbol should never be inspected directly.
tparams1.length == tparams2.length && {
if(tparams1.isEmpty) res1 <:< res2 // fast-path: monomorphic nullary method type
else if(tparams1.head.owner.isMethod) { // fast-path: polymorphic method type -- type params cannot be captured
- List.forall2(tparams1, tparams2)((p1, p2) =>
- p2.info.substSym(tparams2, tparams1) <:< p1.info) &&
+ ((tparams1, tparams2).zipped forall ((p1, p2) =>
+ p2.info.substSym(tparams2, tparams1) <:< p1.info)) &&
res1 <:< res2.substSym(tparams2, tparams1)
} else { // normalized higher-kinded type
//@M for an example of why we need to generate fresh symbols, see neg/tcpoly_ticket2101.scala
val tpsFresh = cloneSymbols(tparams1) // @M cloneSymbols(tparams2) should be equivalent -- TODO: check
- (List.forall2(tparams1, tparams2)((p1, p2) =>
- p2.info.substSym(tparams2, tpsFresh) <:< p1.info.substSym(tparams1, tpsFresh)) &&
- res1.substSym(tparams1, tpsFresh) <:< res2.substSym(tparams2, tpsFresh))
+ ((tparams1, tparams2).zipped forall ((p1, p2) =>
+ p2.info.substSym(tparams2, tpsFresh) <:< p1.info.substSym(tparams1, tpsFresh))) &&
+ res1.substSym(tparams1, tpsFresh) <:< res2.substSym(tparams2, tpsFresh)
//@M the forall in the previous test could be optimised to the following,
// but not worth the extra complexity since it only shaves 1s from quick.comp
@@ -4030,7 +4035,7 @@ A type's typeSymbol should never be inspected directly.
/** First try, on the right:
* - unwrap Annotated types, BoundedWildcardTypes,
- * - bind TypeVars on the right, if lhs is not Annotated nor BoundedWildcard
+ * - bind TypeVars on the right, if lhs is not Annotated nor BoundedWildcard
* - handle common cases for first-kind TypeRefs on both sides as a fast path.
*/
def firstTry = tp2 match {
@@ -4201,8 +4206,7 @@ A type's typeSymbol should never be inspected directly.
* of `tps2'?
*/
def isSubTypes(tps1: List[Type], tps2: List[Type]): Boolean =
- tps1.length == tps2.length &&
- List.forall2(tps1, tps2)((tp1, tp2) => tp1 <:< tp2)
+ tps1.length == tps2.length && ((tps1, tps2).zipped forall (_ <:< _))
/** Does type `tp' implement symbol `sym' with same or
* stronger type? Exact only if `sym' is a member of some
@@ -4263,11 +4267,11 @@ A type's typeSymbol should never be inspected directly.
/** Are `tps1' and `tps2' lists of pairwise equivalent types? */
private def matchingParams(tps1: List[Type], tps2: List[Type], tps1isJava: Boolean, tps2isJava: Boolean): Boolean =
- tps1.length == tps2.length &&
- List.forall2(tps1, tps2)((tp1, tp2) =>
+ (tps1.length == tps2.length) &&
+ ((tps1, tps2).zipped forall ((tp1, tp2) =>
(tp1 =:= tp2) ||
tps1isJava && tp2.typeSymbol == ObjectClass && tp1.typeSymbol == AnyClass ||
- tps2isJava && tp1.typeSymbol == ObjectClass && tp2.typeSymbol == AnyClass)
+ tps2isJava && tp1.typeSymbol == ObjectClass && tp2.typeSymbol == AnyClass))
/** like map2, but returns list `xs' itself - instead of a copy - if function
* `f' maps all elements to themselves.
@@ -4318,15 +4322,21 @@ A type's typeSymbol should never be inspected directly.
if (bound.typeSymbol != AnyClass)
tvar addHiBound bound.instantiateTypeParams(tparams, tvars)
for (tparam2 <- tparams)
- if (tparam2.info.bounds.lo =:= tparam.tpe) // declaration tp2 :> tparam implies ?tparam <: tp2
- tvar addHiBound tparam2.tpe.instantiateTypeParams(tparams, tvars)
+ tparam2.info.bounds.lo.dealias match {
+ case TypeRef(_, `tparam`, _) =>
+ tvar addHiBound tparam2.tpe.instantiateTypeParams(tparams, tvars)
+ case _ =>
+ }
} else {
if (bound.typeSymbol != NothingClass && bound.typeSymbol != tparam) {
tvar addLoBound bound.instantiateTypeParams(tparams, tvars)
}
for (tparam2 <- tparams)
- if (tparam2.info.bounds.hi =:= tparam.tpe)
- tvar addLoBound tparam2.tpe.instantiateTypeParams(tparams, tvars)
+ tparam2.info.bounds.hi.dealias match {
+ case TypeRef(_, `tparam`, _) =>
+ tvar addLoBound tparam2.tpe.instantiateTypeParams(tparams, tvars)
+ case _ =>
+ }
}
}
tvar.constr.inst = NoType // necessary because hibounds/lobounds may contain tvar
@@ -4342,6 +4352,7 @@ A type's typeSymbol should never be inspected directly.
}
}
+ // println("solving "+tvars+"/"+tparams+"/"+(tparams map (_.info)))
for ((tvar, (tparam, variance)) <- config)
solveOne(tvar, tparam, variance)
@@ -4357,7 +4368,7 @@ A type's typeSymbol should never be inspected directly.
*/
def isWithinBounds(pre: Type, owner: Symbol, tparams: List[Symbol], targs: List[Type]): Boolean = {
val bounds = instantiatedBounds(pre, owner, tparams, targs)
- !(List.map2(bounds, targs)((bound, targ) => bound containsType targ) contains false)
+ (bounds, targs).zipped forall (_ containsType _)
}
def instantiatedBounds(pre: Type, owner: Symbol, tparams: List[Symbol], targs: List[Type]): List[TypeBounds] =
@@ -4481,9 +4492,9 @@ A type's typeSymbol should never be inspected directly.
(DoubleClass.tpe /: ts) ((t1, t2) => if (isNumericSubType(t1, t2)) t1 else t2)
def isWeakSubType(tp1: Type, tp2: Type) =
- tp1 match {
+ tp1.deconst.normalize match {
case TypeRef(_, sym1, _) if isNumericValueClass(sym1) =>
- tp2 match {
+ tp2.deconst.normalize match {
case TypeRef(_, sym2, _) if isNumericValueClass(sym2) =>
sym1 == sym2 || numericWidth(sym1) < numericWidth(sym2)
case tv2 @ TypeVar(_, _) =>
@@ -4492,7 +4503,7 @@ A type's typeSymbol should never be inspected directly.
isSubType(tp1, tp2)
}
case tv1 @ TypeVar(_, _) =>
- tp2 match {
+ tp2.deconst.normalize match {
case TypeRef(_, sym2, _) if isNumericValueClass(sym2) =>
tv1.registerBound(tp2, isLowerBound = false, numBound = true)
case _ =>
@@ -4515,7 +4526,7 @@ A type's typeSymbol should never be inspected directly.
case List(t) => t
case ts @ PolyType(tparams, _) :: _ =>
PolyType(
- List.map2(tparams, List.transpose(matchingBounds(ts, tparams)))
+ (tparams, matchingBounds(ts, tparams).transpose).zipped map
((tparam, bounds) => tparam.cloneSymbol.setInfo(glb(bounds, depth))),
lub0(matchingInstTypes(ts, tparams)))
case ts @ MethodType(params, _) :: rest =>
@@ -4543,8 +4554,7 @@ A type's typeSymbol should never be inspected directly.
if (syms contains NoSymbol) NoSymbol
else {
val symtypes =
- (List.map2(narrowts, syms)
- ((t, sym) => t.memberInfo(sym).substThis(t.typeSymbol, lubThisType)));
+ (narrowts, syms).zipped map ((t, sym) => t.memberInfo(sym).substThis(t.typeSymbol, lubThisType))
if (proto.isTerm) // possible problem: owner of info is still the old one, instead of new refinement class
proto.cloneSymbol(lubRefined.typeSymbol).setInfo(lub(symtypes, decr(depth)))
else if (symtypes.tail forall (symtypes.head =:=))
@@ -4616,7 +4626,7 @@ A type's typeSymbol should never be inspected directly.
case List(t) => t
case ts @ PolyType(tparams, _) :: _ =>
PolyType(
- List.map2(tparams, List.transpose(matchingBounds(ts, tparams)))
+ (tparams, matchingBounds(ts, tparams).transpose).zipped map
((tparam, bounds) => tparam.cloneSymbol.setInfo(lub(bounds, depth))),
glb0(matchingInstTypes(ts, tparams)))
case ts @ MethodType(params, _) :: rest =>
@@ -4699,15 +4709,9 @@ A type's typeSymbol should never be inspected directly.
else NothingClass.tpe
}
}
- if (settings.debug.value) {
- println(indent + "glb of " + ts + " at depth "+depth)//debug
- indent = indent + " "
- }
+ // if (settings.debug.value) { println(indent + "glb of " + ts + " at depth "+depth); indent = indent + " " } //DEBUG
val res = if (depth < 0) NothingClass.tpe else glb0(ts)
- if (settings.debug.value) {
- indent = indent.substring(0, indent.length() - 2)
- log(indent + "glb of " + ts + " is " + res)//debug
- }
+ // if (settings.debug.value) { indent = indent.substring(0, indent.length() - 2); log(indent + "glb of " + ts + " is " + res) }//DEBUG
if (ts exists (_.isNotNull)) res.notNull else res
}
@@ -4725,7 +4729,7 @@ A type's typeSymbol should never be inspected directly.
* of types.
*/
private def commonOwner(tps: List[Type]): Symbol = {
- if (settings.debug.value) log("computing common owner of types " + tps)//debug
+ // if (settings.debug.value) log("computing common owner of types " + tps)//DEBUG
commonOwnerMap.init
tps foreach { tp => commonOwnerMap.apply(tp); () }
commonOwnerMap.result
@@ -4745,7 +4749,7 @@ A type's typeSymbol should never be inspected directly.
val pre = if (variance == 1) lub(pres, depth) else glb(pres, depth)
val argss = tps map (_.typeArgs)
val capturedParams = new ListBuffer[Symbol]
- val args = List.map2(sym.typeParams, List.transpose(argss)) {
+ val args = (sym.typeParams, argss.transpose).zipped map {
(tparam, as) =>
if (depth == 0)
if (tparam.variance == variance) AnyClass.tpe
@@ -4792,7 +4796,7 @@ A type's typeSymbol should never be inspected directly.
*/
def addMember(thistp: Type, tp: Type, sym: Symbol) {
assert(sym != NoSymbol)
- if (settings.debug.value) log("add member " + sym+":"+sym.info+" to "+thistp)
+ // if (settings.debug.value) log("add member " + sym+":"+sym.info+" to "+thistp) //DEBUG
if (!(thistp specializes sym)) {
if (sym.isTerm)
for (alt <- tp.nonPrivateDecl(sym.name).alternatives)
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala
index 3e0681ccdf..694fc9fe91 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala
@@ -34,8 +34,8 @@ abstract class ICodeReader extends ClassfileParser {
var method: IMethod = _ // the current IMethod
val OBJECT: TypeKind = REFERENCE(definitions.ObjectClass)
- val nothingName = newTermName("scala.runtime.Nothing$")
- val nullName = newTermName("scala.runtime.Null$")
+ val nothingName = newTermName(SCALA_NOTHING)
+ val nullName = newTermName(SCALA_NULL)
var isScalaModule = false
/** Read back bytecode for the given class symbol. It returns
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala
index 84b08f1258..fb5e24b23d 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala
@@ -72,12 +72,15 @@ abstract class Pickler extends SubComponent {
// private var boundSyms: List[Symbol] = Nil
+ private def isRootSym(sym: Symbol) =
+ sym.name.toTermName == rootName && sym.owner == rootOwner
+
/** Returns usually symbol's owner, but picks classfile root instead
* for existentially bound variables that have a non-local owner.
* Question: Should this be done for refinement class symbols as well?
*/
private def localizedOwner(sym: Symbol) =
- if (sym.isAbstractType && sym.hasFlag(EXISTENTIAL) && !isLocal(sym.owner)) root
+ if (isLocal(sym) && !isRootSym(sym) && !isLocal(sym.owner)) root
else sym.owner
/** Is root in symbol.owner*, or should it be treated as a local symbol
@@ -85,12 +88,12 @@ abstract class Pickler extends SubComponent {
* an existentially bound variable, or a higher-order type parameter.
*/
private def isLocal(sym: Symbol): Boolean =
- !sym.isPackageClass &&
- (sym.name.toTermName == rootName && sym.owner == rootOwner ||
- sym != NoSymbol && isLocal(sym.owner) ||
+ !sym.isPackageClass && sym != NoSymbol &&
+ (isRootSym(sym) ||
sym.isRefinementClass ||
sym.isAbstractType && sym.hasFlag(EXISTENTIAL) || // existential param
- (locals contains sym)) // higher-order type param
+ (locals contains sym) || // higher-order type param
+ isLocal(sym.owner))
private def staticAnnotations(annots: List[AnnotationInfo]) =
annots filter(ann =>
@@ -402,11 +405,8 @@ abstract class Pickler extends SubComponent {
}
}
- private def putTrees(trees: List[Tree]) =
- trees.foreach(putTree _)
-
- private def putTreess(treess: List[List[Tree]]) =
- treess.foreach(putTrees _)
+ private def putTrees(trees: List[Tree]) = trees foreach putTree
+ private def putTreess(treess: List[List[Tree]]) = treess foreach putTrees
/** only used when pickling trees, i.e. in an
* argument of some Annotation */
@@ -486,6 +486,10 @@ abstract class Pickler extends SubComponent {
*/
private def writeRef(ref: AnyRef) { writeNat(index(ref)) }
private def writeRefs(refs: List[AnyRef]) { refs foreach writeRef }
+ private def writeRefsWithLength(refs: List[AnyRef]) {
+ writeNat(refs.length)
+ writeRefs(refs)
+ }
/** Write name, owner, flags, and info of a symbol.
*/
@@ -623,9 +627,9 @@ abstract class Pickler extends SubComponent {
args foreach writeClassfileAnnotArg
ANNOTARGARRAY
- case (target: Symbol, children: List[Symbol]) =>
+ case (target: Symbol, children: List[_]) =>
writeRef(target)
- for (c <- children) writeRef(c.asInstanceOf[Symbol])
+ writeRefs(children.asInstanceOf[List[Symbol]])
CHILDREN
case EmptyTree =>
@@ -676,13 +680,9 @@ abstract class Pickler extends SubComponent {
writeRef(tree.symbol)
writeRef(mods)
writeRef(name)
- writeNat(tparams.length)
- writeRefs(tparams)
+ writeRefsWithLength(tparams)
writeNat(vparamss.length)
- for(vparams <- vparamss) {
- writeNat(vparams.length)
- writeRefs(vparams)
- }
+ vparamss foreach writeRefsWithLength
writeRef(tpt)
writeRef(rhs)
TREE
@@ -728,8 +728,7 @@ abstract class Pickler extends SubComponent {
writeNat(TEMPLATEtree)
writeRef(tree.tpe)
writeRef(tree.symbol)
- writeNat(parents.length)
- writeRefs(parents)
+ writeRefsWithLength(parents)
writeRef(self)
writeRefs(body)
TREE
@@ -783,7 +782,6 @@ abstract class Pickler extends SubComponent {
writeRefs(trees)
TREE
-
case tree@Function(vparams, body) =>
writeNat(FUNCTIONtree)
writeRef(tree.tpe)
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/UnPickler.scala b/src/compiler/scala/tools/nsc/symtab/classfile/UnPickler.scala
index 909ecae77a..b6fdb28090 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/UnPickler.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/UnPickler.scala
@@ -132,7 +132,8 @@ abstract class UnPickler {
val savedIndex = readIndex
readIndex = index(i)
val tag = readByte().toInt
- if (tag != CLASSsym) assert(false)
+ assert(tag == CLASSsym)
+
readNat(); // read length
val result = readNameRef() == nme.REFINE_CLASS_NAME.toTypeName
readIndex = savedIndex
@@ -350,7 +351,7 @@ abstract class UnPickler {
val len = readNat()
(tag: @switch) match {
case LITERALunit => Constant(())
- case LITERALboolean => Constant(if (readLong(len) == 0L) false else true)
+ case LITERALboolean => Constant(readLong(len) != 0L)
case LITERALbyte => Constant(readLong(len).toByte)
case LITERALshort => Constant(readLong(len).toShort)
case LITERALchar => Constant(readLong(len).toChar)
@@ -390,17 +391,15 @@ abstract class UnPickler {
/** Read a ClassfileAnnotArg (argument to a classfile annotation)
*/
- private def readClassfileAnnotArg(): ClassfileAnnotArg = {
- val b = peekByte()
- if (peekByte() == ANNOTINFO) {
+ private def readClassfileAnnotArg(): ClassfileAnnotArg = peekByte() match {
+ case ANNOTINFO =>
NestedAnnotArg(readAnnotation())
- } else if (peekByte() == ANNOTARGARRAY) {
+ case ANNOTARGARRAY =>
readByte()
val end = readNat() + readIndex
ArrayAnnotArg(until(end, readClassfileAnnotArgRef).toArray)
- } else {
+ case _ =>
LiteralAnnotArg(readConstant())
- }
}
/** Read an AnnotationInfo. Not to be called directly, use
@@ -420,7 +419,6 @@ abstract class UnPickler {
AnnotationInfo(atp, args.toList, assocs.toList)
}
-
/** Read an annotation and as a side effect store it into
* the symbol it requests. Called at top-level, for all
* (symbol, annotInfo) entries. */
@@ -450,212 +448,184 @@ abstract class UnPickler {
errorBadSignature("tree expected (" + outerTag + ")")
val end = readNat() + readIndex
val tag = readByte()
- val tpe =
- if (tag != EMPTYtree)
- readTypeRef()
- else
- NoType
+ val tpe = if (tag == EMPTYtree) NoType else readTypeRef()
+
+ // Set by the three functions to follow. If symbol is non-null
+ // after the the new tree 't' has been created, t has its Symbol
+ // set to symbol; and it always has its Type set to tpe.
+ var symbol: Symbol = null
+ var mods: Modifiers = null
+ var name: Name = null
+
+ /** Read a Symbol, Modifiers, and a Name */
+ def setSymModsName() {
+ symbol = readSymbolRef()
+ mods = readModifiersRef()
+ name = readNameRef()
+ }
+ /** Read a Symbol and a Name */
+ def setSymName() {
+ symbol = readSymbolRef()
+ name = readNameRef()
+ }
+ /** Read a Symbol */
+ def setSym() {
+ symbol = readSymbolRef()
+ }
- tag match {
+ val t = tag match {
case EMPTYtree =>
EmptyTree
case PACKAGEtree =>
- val symbol = readSymbolRef()
+ setSym()
+ // val discardedSymbol = readSymbolRef() // XXX is symbol intentionally not set?
val pid = readTreeRef().asInstanceOf[RefTree]
val stats = until(end, readTreeRef)
- PackageDef(pid, stats) setType tpe
+ PackageDef(pid, stats)
case CLASStree =>
- val symbol = readSymbolRef()
- val mods = readModifiersRef()
- val name = readNameRef()
+ setSymModsName()
val impl = readTemplateRef()
val tparams = until(end, readTypeDefRef)
- (ClassDef(mods, name, tparams, impl).
- setSymbol(symbol).
- setType(tpe))
+ ClassDef(mods, name, tparams, impl)
case MODULEtree =>
- val symbol = readSymbolRef()
- val mods = readModifiersRef()
- val name = readNameRef()
- val impl = readTemplateRef()
- (ModuleDef(mods, name, impl).
- setSymbol(symbol).
- setType(tpe))
+ setSymModsName()
+ ModuleDef(mods, name, readTemplateRef())
case VALDEFtree =>
- val symbol = readSymbolRef()
- val mods = readModifiersRef()
- val name = readNameRef()
+ setSymModsName()
val tpt = readTreeRef()
val rhs = readTreeRef()
-
- (ValDef(mods, name, tpt, rhs).
- setSymbol(symbol).
- setType(tpe))
+ ValDef(mods, name, tpt, rhs)
case DEFDEFtree =>
- val symbol = readSymbolRef()
- val mods = readModifiersRef()
- val name = readNameRef()
- val numTparams = readNat()
- val tparams = times(numTparams, readTypeDefRef)
- val numVparamss = readNat
- val vparamss = times(numVparamss, () => {
- val len = readNat()
- times(len, readValDefRef)})
+ setSymModsName()
+ val tparams = times(readNat(), readTypeDefRef)
+ val vparamss = times(readNat(), () => times(readNat(), readValDefRef))
val tpt = readTreeRef()
val rhs = readTreeRef()
- (DefDef(mods, name, tparams, vparamss, tpt, rhs).
- setSymbol(symbol).
- setType(tpe))
+ DefDef(mods, name, tparams, vparamss, tpt, rhs)
case TYPEDEFtree =>
- val symbol = readSymbolRef()
- val mods = readModifiersRef()
- val name = readNameRef()
+ setSymModsName()
val rhs = readTreeRef()
val tparams = until(end, readTypeDefRef)
-
- (TypeDef(mods, name, tparams, rhs).
- setSymbol(symbol).
- setType(tpe))
+ TypeDef(mods, name, tparams, rhs)
case LABELtree =>
- val symbol = readSymbolRef()
- val name = readNameRef()
+ setSymName()
val rhs = readTreeRef()
val params = until(end, readIdentRef)
- (LabelDef(name, params, rhs).
- setSymbol(symbol).
- setType(tpe))
+ LabelDef(name, params, rhs)
case IMPORTtree =>
- val symbol = readSymbolRef()
+ setSym()
val expr = readTreeRef()
val selectors = until(end, () => {
val from = readNameRef()
val to = readNameRef()
ImportSelector(from, -1, to, -1)
})
- (Import(expr, selectors).
- setSymbol(symbol).
- setType(tpe))
+
+ Import(expr, selectors)
case DOCDEFtree =>
val comment = readConstantRef match {
- case Constant(com: String) => com
- case other =>
- errorBadSignature("Document comment not a string (" + other + ")")
+ case Constant(com: String) => com
+ case other => errorBadSignature("Document comment not a string (" + other + ")")
}
val definition = readTreeRef()
- (DocDef(comment, definition).setType(tpe))
+ DocDef(comment, definition)
case TEMPLATEtree =>
- val symbol = readSymbolRef()
- val numParents = readNat()
- val parents = times(numParents, readTreeRef)
+ setSym()
+ val parents = times(readNat(), readTreeRef)
val self = readValDefRef()
val body = until(end, readTreeRef)
- (Template(parents, self, body).
- setSymbol(symbol).
- setType(tpe))
+ Template(parents, self, body)
case BLOCKtree =>
val expr = readTreeRef()
val stats = until(end, readTreeRef)
- Block(stats, expr).setType(tpe)
+ Block(stats, expr)
case CASEtree =>
val pat = readTreeRef()
val guard = readTreeRef()
val body = readTreeRef()
- CaseDef(pat, guard, body).setType(tpe)
+ CaseDef(pat, guard, body)
case ALTERNATIVEtree =>
- val trees = until(end, readTreeRef)
- Alternative(trees).setType(tpe)
+ Alternative(until(end, readTreeRef))
case STARtree =>
- val elem = readTreeRef()
- Star(elem).setType(tpe)
+ Star(readTreeRef())
case BINDtree =>
- val symbol = readSymbolRef()
- val name = readNameRef()
- val body = readTreeRef()
- (Bind(name, body).
- setSymbol(symbol).
- setType(tpe))
+ setSymName()
+ Bind(name, readTreeRef())
case UNAPPLYtree =>
val fun = readTreeRef()
val args = until(end, readTreeRef)
- (UnApply(fun: Tree, args).setType(tpe))
+ UnApply(fun, args)
case ARRAYVALUEtree =>
val elemtpt = readTreeRef()
val trees = until(end, readTreeRef)
- (ArrayValue(elemtpt, trees).setType(tpe))
+ ArrayValue(elemtpt, trees)
case FUNCTIONtree =>
- val symbol = readSymbolRef()
+ setSym()
val body = readTreeRef()
val vparams = until(end, readValDefRef)
- (Function(vparams, body).
- setSymbol(symbol).
- setType(tpe))
+ Function(vparams, body)
case ASSIGNtree =>
val lhs = readTreeRef()
val rhs = readTreeRef()
- Assign(lhs, rhs).setType(tpe)
+ Assign(lhs, rhs)
case IFtree =>
val cond = readTreeRef()
val thenp = readTreeRef()
val elsep = readTreeRef()
- If(cond, thenp, elsep).setType(tpe)
+ If(cond, thenp, elsep)
case MATCHtree =>
val selector = readTreeRef()
val cases = until(end, readCaseDefRef)
- Match(selector, cases).setType(tpe)
+ Match(selector, cases)
case RETURNtree =>
- val symbol = readSymbolRef()
- val expr = readTreeRef()
- (Return(expr).
- setSymbol(symbol).
- setType(tpe))
+ setSym()
+ Return(readTreeRef())
case TREtree =>
val block = readTreeRef()
val finalizer = readTreeRef()
val catches = until(end, readCaseDefRef)
- Try(block, catches, finalizer).setType(tpe)
+ Try(block, catches, finalizer)
case THROWtree =>
- val expr = readTreeRef()
- Throw(expr).setType(tpe)
+ Throw(readTreeRef())
case NEWtree =>
- val tpt = readTreeRef()
- New(tpt).setType(tpe)
+ New(readTreeRef())
case TYPEDtree =>
val expr = readTreeRef()
val tpt = readTreeRef()
- Typed(expr, tpt).setType(tpe)
+ Typed(expr, tpt)
case TYPEAPPLYtree =>
val fun = readTreeRef()
val args = until(end, readTreeRef)
- TypeApply(fun, args).setType(tpe)
+ TypeApply(fun, args)
case APPLYtree =>
val fun = readTreeRef()
@@ -664,79 +634,77 @@ abstract class UnPickler {
fun.setType(fun.symbol.info)
typer.infer.inferMethodAlternative(fun, Nil, args map (_.tpe), tpe)
}
- Apply(fun, args).setType(tpe)
+ Apply(fun, args)
case APPLYDYNAMICtree =>
- val symbol = readSymbolRef()
+ setSym()
val qual = readTreeRef()
val args = until(end, readTreeRef)
- ApplyDynamic(qual, args).setSymbol(symbol).setType(tpe)
+ ApplyDynamic(qual, args)
case SUPERtree =>
- val symbol = readSymbolRef()
+ setSym()
val qual = readNameRef()
val mix = readNameRef()
- Super(qual, mix).setSymbol(symbol).setType(tpe)
+ Super(qual, mix)
case THIStree =>
- val symbol = readSymbolRef()
- val qual = readNameRef()
- This(qual).setSymbol(symbol).setType(tpe)
+ setSym()
+ This(readNameRef())
case SELECTtree =>
- val symbol = readSymbolRef()
+ setSym()
val qualifier = readTreeRef()
val selector = readNameRef()
- Select(qualifier, selector).setSymbol(symbol).setType(tpe)
+ Select(qualifier, selector)
case IDENTtree =>
- val symbol = readSymbolRef()
- val name = readNameRef()
- Ident(name).setSymbol(symbol).setType(tpe)
+ setSymName()
+ Ident(name)
case LITERALtree =>
- val value = readConstantRef()
- Literal(value).setType(tpe)
+ Literal(readConstantRef())
case TYPEtree =>
- TypeTree().setType(tpe)
+ TypeTree()
case ANNOTATEDtree =>
val annot = readTreeRef()
val arg = readTreeRef()
- Annotated(annot, arg).setType(tpe)
+ Annotated(annot, arg)
case SINGLETONTYPEtree =>
- val ref = readTreeRef()
- SingletonTypeTree(ref).setType(tpe)
+ SingletonTypeTree(readTreeRef())
case SELECTFROMTYPEtree =>
val qualifier = readTreeRef()
val selector = readNameRef()
- SelectFromTypeTree(qualifier, selector).setType(tpe)
+ SelectFromTypeTree(qualifier, selector)
case COMPOUNDTYPEtree =>
- val templ = readTemplateRef()
- CompoundTypeTree(templ: Template).setType(tpe)
+ CompoundTypeTree(readTemplateRef())
case APPLIEDTYPEtree =>
val tpt = readTreeRef()
val args = until(end, readTreeRef)
- AppliedTypeTree(tpt, args).setType(tpe)
+ AppliedTypeTree(tpt, args)
case TYPEBOUNDStree =>
val lo = readTreeRef()
val hi = readTreeRef()
- TypeBoundsTree(lo, hi).setType(tpe)
+ TypeBoundsTree(lo, hi)
case EXISTENTIALTYPEtree =>
val tpt = readTreeRef()
val whereClauses = until(end, readTreeRef)
- ExistentialTypeTree(tpt, whereClauses).setType(tpe)
+ ExistentialTypeTree(tpt, whereClauses)
case _ =>
errorBadSignature("unknown tree type (" + tag + ")")
}
+
+ if (symbol == null) t setType tpe
+ else t setSymbol symbol setType tpe
}
def readModifiers(): Modifiers = {
@@ -799,7 +767,6 @@ abstract class UnPickler {
errorBadSignature("expected an TypeDef (" + other + ")")
}
-
private def errorBadSignature(msg: String) =
throw new RuntimeException("malformed Scala signature of " + classRoot.name + " at " + readIndex + "; " + msg)
diff --git a/src/compiler/scala/tools/nsc/transform/CleanUp.scala b/src/compiler/scala/tools/nsc/transform/CleanUp.scala
index dafec5e1c3..9bd8337106 100644
--- a/src/compiler/scala/tools/nsc/transform/CleanUp.scala
+++ b/src/compiler/scala/tools/nsc/transform/CleanUp.scala
@@ -1,5 +1,5 @@
/* NSC -- new Scala compiler
- * Copyright 2005-2009 LAMP/EPFL
+ * Copyrights 2005-2009 LAMP/EPFL
* @author Martin Odersky
*/
// $Id$
@@ -28,6 +28,7 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
private val newInits = new ListBuffer[Tree]
private val classConstantMeth = new HashMap[String, Symbol]
+ private val symbolStaticFields = new HashMap[String, (Symbol, Tree, Tree)]
private var localTyper: analyzer.Typer = null
@@ -52,7 +53,7 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
localTyper typed { atPos(pos)(tree) }
private def classConstantMethod(pos: Position, sig: String): Symbol =
- (classConstantMeth get sig) getOrElse {
+ classConstantMeth.getOrElseUpdate(sig, {
val forName = getMember(ClassClass.linkedModuleOfClass, nme.forName)
val owner = currentOwner.enclClass
@@ -69,9 +70,8 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
)
newDefs.append(cdef, mdef)
- classConstantMeth.update(sig, meth)
meth
- }
+ })
override def transformUnit(unit: CompilationUnit) =
unit.body = transform(unit.body)
@@ -79,7 +79,7 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
/** A value class is defined to be only Java-compatible values: unit is
* not part of it, as opposed to isValueClass in definitions. scala.Int is
* a value class, java.lang.Integer is not. */
- def isValueClass(sym: Symbol) = boxedClass contains sym
+ def isJavaValueClass(sym: Symbol) = boxedClass contains sym
override def transform(tree: Tree): Tree = tree match {
@@ -349,13 +349,16 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
}
val qualSym = qual.tpe.typeSymbol
val methSym = ad.symbol
+ def args = qual :: params
+
+ /** Normal non-Array call */
def defaultCall = {
// reflective method call machinery
- val invokeName = MethodClass.tpe member nme.invoke_ // reflect.Method.invoke(...)
- def cache = REF(reflectiveMethodCache(ad.symbol.name.toString, paramTypes)) // cache Symbol
- def lookup = Apply(cache, List(qual GETCLASS)) // get Method object from cache
- def args = ArrayValue(TypeTree(ObjectClass.tpe), params) // args for invocation
- def invocation = (lookup DOT invokeName)(qual, args) // .invoke(qual, ...)
+ val invokeName = MethodClass.tpe member nme.invoke_ // reflect.Method.invoke(...)
+ def cache = REF(reflectiveMethodCache(ad.symbol.name.toString, paramTypes)) // cache Symbol
+ def lookup = Apply(cache, List(qual GETCLASS)) // get Method object from cache
+ def invokeArgs = ArrayValue(TypeTree(ObjectClass.tpe), params) // args for invocation
+ def invocation = (lookup DOT invokeName)(qual, invokeArgs) // .invoke(qual, ...)
// exception catching machinery
val invokeExc = currentOwner.newValue(ad.pos, mkTerm()) setInfo InvocationTargetExceptionClass.tpe
@@ -363,50 +366,43 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
def catchBody = Throw(Apply(Select(Ident(invokeExc), nme.getCause), Nil))
// try { method.invoke } catch { case e: InvocationTargetExceptionClass => throw e.getCause() }
- TRY (invocation) CATCH { CASE (catchVar) ==> catchBody } ENDTRY
+ fixResult( TRY (invocation) CATCH { CASE (catchVar) ==> catchBody } ENDTRY )
}
- def useValueOperator = {
- def isBoxed(qualSym: Symbol): Boolean =
- (qualSym isNonBottomSubClass BoxedNumberClass) ||
- (!forMSIL && (qualSym isNonBottomSubClass BoxedCharacterClass))
- ((qualSym == definitions.ObjectClass) || isBoxed(qualSym)) && // may be a boxed value class
+
+ def useValueOperator =
+ isMaybeBoxed(qualSym) && // may be a boxed value class
(getPrimitiveReplacementForStructuralCall isDefinedAt methSym.name) &&
- ((resType :: paramTypes) forall (x => isValueClass(x.typeSymbol))) // issue #1110
- }
- def useArrayOperator =
- ((qualSym == definitions.ObjectClass) || (qualSym == definitions.ArrayClass)) &&
- ((methSym.name == nme.length) || (methSym.name == nme.update) || (methSym.name == nme.apply))
- val callCode = if (useValueOperator) {
- val (operator, test) = getPrimitiveReplacementForStructuralCall(methSym.name)
- def args = qual :: params
- fixResult((IF (test) THEN (REF(operator) APPLY args) ELSE defaultCall))
+ ((resType :: paramTypes) forall (x => isJavaValueClass(x.typeSymbol))) // issue #1110
+
+ def isArrayMethodSignature =
+ (methSym.name == nme.length && params.isEmpty) ||
+ (methSym.name == nme.update && (structResType.typeSymbol eq UnitClass)) ||
+ (methSym.name == nme.apply && params.size == 1)
+
+ def isDefinitelyArray = isArrayMethodSignature && (qualSym == ArrayClass)
+ def isMaybeArray = isArrayMethodSignature && (qualSym == ObjectClass) // precondition: !isDefinitelyArray
+
+ def genArrayCall = methSym.name match {
+ case nme.length => REF(boxMethod(IntClass)) APPLY (REF(arrayLengthMethod) APPLY args)
+ case nme.update => REF(arrayUpdateMethod) APPLY List(args(0), (REF(unboxMethod(IntClass)) APPLY args(1)), args(2))
+ case nme.apply => REF(arrayApplyMethod) APPLY List(args(0), (REF(unboxMethod(IntClass)) APPLY args(1)))
}
- else if (useArrayOperator) {
- val args = qual :: params
- val operatorCall = // what follows is incredibly ugly. this dirty fix should be deal with at the next cleanup of cleanup.
- if (methSym.name == nme.length)
- (REF(boxMethod(IntClass)) APPLY (REF(arrayLengthMethod) APPLY args))
- else if (methSym.name == nme.update)
- (REF(arrayUpdateMethod) APPLY List(args(0), (REF(unboxMethod(IntClass)) APPLY args(1)), args(2)))
- else
- (REF(arrayApplyMethod) APPLY List(args(0), (REF(unboxMethod(IntClass)) APPLY args(1))))
- (IF (qual IS_OBJ arrayType(ObjectClass.tpe)) THEN operatorCall
- ELSE (IF (qual IS_OBJ arrayType(ByteClass.tpe)) THEN operatorCall
- ELSE (IF (qual IS_OBJ arrayType(ShortClass.tpe)) THEN operatorCall
- ELSE (IF (qual IS_OBJ arrayType(IntClass.tpe)) THEN operatorCall
- ELSE (IF (qual IS_OBJ arrayType(LongClass.tpe)) THEN operatorCall
- ELSE (IF (qual IS_OBJ arrayType(FloatClass.tpe)) THEN operatorCall
- ELSE (IF (qual IS_OBJ arrayType(DoubleClass.tpe)) THEN operatorCall
- ELSE (IF (qual IS_OBJ arrayType(CharClass.tpe)) THEN operatorCall
- ELSE (IF (qual IS_OBJ arrayType(BooleanClass.tpe)) THEN operatorCall
- ELSE fixResult(defaultCall)
- )))))))))
+ def genArrayTest = {
+ def oneTest(s: Symbol) = qual IS_OBJ arrayType(s.tpe)
+ OR((ObjectClass :: ScalaValueClasses filterNot (_ eq UnitClass)) map oneTest: _*)
}
- else fixResult(defaultCall)
- localTyper.typed(callCode)
- }
- def getClass(q: Tree): Tree = (q DOT nme.getClass_)()
+ val callCode =
+ if (useValueOperator) {
+ val (operator, test) = getPrimitiveReplacementForStructuralCall(methSym.name)
+ IF (test) THEN fixResult(REF(operator) APPLY args) ELSE defaultCall
+ }
+ else if (isDefinitelyArray) genArrayCall
+ else if (isMaybeArray) IF (genArrayTest) THEN genArrayCall ELSE defaultCall
+ else defaultCall
+
+ localTyper typed callCode
+ }
if (settings.refinementMethodDispatch.value == "invoke-dynamic") {
/* val guardCallSite: Tree = {
@@ -452,7 +448,7 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
val sym = currentOwner.newValue(ad.pos, mkTerm("qual")) setInfo qual0.tpe
qual = REF(sym)
- def structResType = if (isValueClass(resType.typeSymbol)) boxedClass(resType.typeSymbol).tpe else resType
+ def structResType = if (isJavaValueClass(resType.typeSymbol)) boxedClass(resType.typeSymbol).tpe else resType
BLOCK(
VAL(sym) === qual0,
callAsReflective(mparams map (_.tpe), resType, structResType)
@@ -498,7 +494,7 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
* constructor. */
case Template(parents, self, body) =>
localTyper = typer.atOwner(tree, currentClass)
- if (!forMSIL) {
+ val transformedTemplate = if (!forMSIL) {
classConstantMeth.clear
newDefs.clear
newInits.clear
@@ -522,11 +518,12 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
treeCopy.Template(tree, parents, self, newBody)
}
else super.transform(tree)
+ applySymbolFieldInitsToStaticCtor(transformedTemplate.asInstanceOf[Template]) // postprocess to include static ctors
case Literal(c) if (c.tag == ClassTag) && !forMSIL=>
val tpe = c.typeValue
typedWithPos(tree.pos) {
- if (isValueClass(tpe.typeSymbol) || tpe.typeSymbol == definitions.UnitClass) {
+ if (isValueClass(tpe.typeSymbol)) {
if (tpe.typeSymbol == UnitClass)
Select(REF(BoxedUnit_TYPE), BoxedUnit_TYPE)
else
@@ -566,9 +563,124 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
}
super.transform(tree)
+ /*
+ * This transformation should identify Scala symbol invocations in the tree and replace them
+ * with references to a static member. Also, whenever a class has at least a single symbol invocation
+ * somewhere in its methods, a new static member should be created and initialized for that symbol.
+ * For instance, say we have a Scala class:
+ *
+ * class Cls {
+ * // ...
+ * def someSymbol = `symbolic
+ * // ...
+ * }
+ *
+ * After transformation, this class looks like this:
+ *
+ * class Cls {
+ * private "static" val <some_name>$symbolic = Symbol("symbolic")
+ * // ...
+ * def someSymbol = <some_name>$symbolic
+ * // ...
+ * }
+ *
+ * The reasoning behind this transformation is the following. Symbols get interned - they are stored
+ * in a global map which is protected with a lock. The reason for this is making equality checks
+ * quicker. But calling Symbol.apply, although it does return a unique symbol, accesses a locked object,
+ * making symbol access slow. To solve this, the unique symbol from the global symbol map in Symbol
+ * is accessed only once during class loading, and after that, the unique symbol is in the static
+ * member. Hence, it is cheap to both reach the unique symbol and do equality checks on it.
+ *
+ * And, finally, be advised - scala symbol literal and the Symbol class of the compiler
+ * have little in common.
+ */
+ case symapp @ Apply(Select(Select(a @ Ident(nme.scala_), b @ nme.Symbol), nme.apply),
+ List(Literal(Constant(symname: String)))) =>
+ // add the symbol name to a map if it's not there already
+ val rhs = gen.mkCast(Apply(gen.scalaDot(nme.Symbol), List(Literal(Constant(symname)))), symbolType)
+ val (staticFieldSym, sfdef, sfinit) = getSymbolStaticField(symapp.pos, symname, rhs, symapp)
+
+ // create a reference to a static field
+ val ntree = typedWithPos(symapp.pos)(REF(staticFieldSym))
+
+ super.transform(ntree)
case _ =>
super.transform(tree)
}
+
+ /* Returns the symbol and the tree for the symbol field interning a reference to a symbol 'synmname'.
+ * If it doesn't exist, i.e. the symbol is encountered the first time,
+ * it creates a new static field definition and initalization and returns it.
+ */
+ private def getSymbolStaticField(pos: Position, symname: String, rhs: Tree, tree: Tree): (Symbol, Tree, Tree) =
+ symbolStaticFields.getOrElseUpdate(symname, {
+ val freshname = unit.fresh.newName(pos, "symbol$")
+ val theTyper = typer.atOwner(tree, currentClass)
+
+ // create a symbol for the static field
+ val stfieldSym = currentClass.newVariable(pos, freshname)
+ .setFlag(PRIVATE | STATIC | SYNTHETIC | FINAL)
+ .setInfo(symbolType)
+ currentClass.info.decls enter stfieldSym
+
+ // create field definition and initialization
+ val stfieldDef = theTyper.typed { atPos(pos)(VAL(stfieldSym) === rhs) }
+ val stfieldInit = theTyper.typed { atPos(pos)(REF(stfieldSym) === rhs) }
+
+ // add field definition to new defs
+ newDefs append stfieldDef
+
+ (stfieldSym, stfieldDef, stfieldInit)
+ })
+
+ /* returns a list of all trees for symbol static fields, and clear the list */
+ private def flushSymbolFieldsInitializations: List[Tree] = {
+ val fields = (symbolStaticFields.valuesIterator map (_._3)).toList
+ symbolStaticFields.clear
+ fields
+ }
+
+ /* finds the static ctor DefDef tree within the template if it exists. */
+ def findStaticCtor(template: Template): Option[Tree] =
+ template.body find {
+ case defdef @ DefDef(mods, nme.CONSTRUCTOR, tparam, vparam, tp, rhs) => defdef.symbol hasFlag STATIC
+ case _ => false
+ }
+
+ /* changes the template for the class so that it contains a static constructor with symbol fields inits,
+ * augments an existing static ctor if one already existed.
+ */
+ def applySymbolFieldInitsToStaticCtor(template: Template): Template = {
+ val symbolInitTrees = flushSymbolFieldsInitializations
+ if (symbolInitTrees.isEmpty) template
+ else {
+ val theTyper = typer.atOwner(template, currentClass)
+ val newCtor = findStaticCtor(template) match {
+ // in case there already were static ctors - augment existing ones
+ // currently, however, static ctors aren't being generated anywhere else
+ case Some(ctor @ DefDef(mods, name, tparams, vparamss, tpt, rhs)) =>
+ // modify existing static ctor
+ val newBlock = rhs match {
+ case block @ Block(stats, expr) =>
+ // need to add inits to existing block
+ treeCopy.Block(block, symbolInitTrees ::: stats, expr)
+ case term: TermTree =>
+ // need to create a new block with inits and the old term
+ treeCopy.Block(term, symbolInitTrees, term)
+ }
+ treeCopy.DefDef(ctor, mods, name, tparams, vparamss, tpt, newBlock)
+ case None =>
+ // create new static ctor
+ val staticCtorSym = currentClass.newConstructor(template.pos)
+ .setFlag(STATIC)
+ .setInfo(UnitClass.tpe)
+ val rhs = Block(symbolInitTrees, Literal(()))
+ val staticCtorTree = DefDef(staticCtorSym, rhs)
+ theTyper.typed { atPos(template.pos)(staticCtorTree) }
+ }
+ treeCopy.Template(template, template.parents, template.self, newCtor :: template.body)
+ }
+ }
} // CleanUpTransformer
}
diff --git a/src/compiler/scala/tools/nsc/transform/Erasure.scala b/src/compiler/scala/tools/nsc/transform/Erasure.scala
index 7c57b2e16f..af435c8c83 100644
--- a/src/compiler/scala/tools/nsc/transform/Erasure.scala
+++ b/src/compiler/scala/tools/nsc/transform/Erasure.scala
@@ -75,6 +75,13 @@ abstract class Erasure extends AddInterfaces with typechecker.Analyzer with ast.
case _ => 0
}
+ // @M #2585 when generating a java generic signature that includes a selection of an inner class p.I, (p = `pre`, I = `cls`)
+ // must rewrite to p'.I, where p' refers to the class that directly defines the nested class I
+ // see also #2585 marker in javaSig: there, type arguments must be included (use pre.baseType(cls.owner))
+ // requires cls.isClass
+ @inline private def rebindInnerClass(pre: Type, cls: Symbol): Type =
+ if(cls.owner.isClass) cls.owner.tpe else pre // why not cls.isNestedClass?
+
/** <p>
* The erasure <code>|T|</code> of a type <code>T</code>. This is:
* </p>
@@ -90,7 +97,7 @@ abstract class Erasure extends AddInterfaces with typechecker.Analyzer with ast.
* - For a typeref scala.Array+[T] where T is not an abstract type, scala.Array+[|T|].
* - For a typeref scala.Any or scala.AnyVal, java.lang.Object.
* - For a typeref scala.Unit, scala.runtime.BoxedUnit.
- * - For a typeref P.C[Ts] where C refers to a class, |P|.C.
+ * - For a typeref P.C[Ts] where C refers to a class, |P|.C. (Where P is first rebound to the class that directly defines C.)
* - For a typeref P.C[Ts] where C refers to an alias type, the erasure of C's alias.
* - For a typeref P.C[Ts] where C refers to an abstract type, the
* erasure of C's upper bound.
@@ -122,9 +129,8 @@ abstract class Erasure extends AddInterfaces with typechecker.Analyzer with ast.
else typeRef(apply(pre), sym, args map this)
else if (sym == AnyClass || sym == AnyValClass || sym == SingletonClass) erasedTypeRef(ObjectClass)
else if (sym == UnitClass) erasedTypeRef(BoxedUnitClass)
- else if (sym.isClass)
- typeRef(apply(if (sym.owner.isClass) sym.owner.tpe else pre), sym, List())
- else apply(sym.info)
+ else if (sym.isClass) typeRef(apply(rebindInnerClass(pre, sym)), sym, List()) // #2585
+ else apply(sym.info) // alias type or abstract type
case PolyType(tparams, restpe) =>
apply(restpe)
case ExistentialType(tparams, restpe) =>
@@ -164,6 +170,7 @@ abstract class Erasure extends AddInterfaces with typechecker.Analyzer with ast.
case TypeRef(pre, sym, args) =>
if (sym == ArrayClass) args foreach traverse
else if (sym.isTypeParameterOrSkolem || sym.isExistential || !args.isEmpty) result = true
+ else if (sym.isClass) traverse(rebindInnerClass(pre, sym)) // #2585
else if (!sym.owner.isPackageClass) traverse(pre)
case PolyType(_, _) | ExistentialType(_, _) =>
result = true
@@ -243,8 +250,9 @@ abstract class Erasure extends AddInterfaces with typechecker.Analyzer with ast.
tagOfClass(sym).toString
else if (sym.isClass)
{
- if (needsJavaSig(pre)) {
- val s = jsig(pre)
+ val preRebound = pre.baseType(sym.owner) // #2585
+ if (needsJavaSig(preRebound)) {
+ val s = jsig(preRebound)
if (s.charAt(0) == 'L') s.substring(0, s.length - 1) + classSigSuffix
else classSig
} else classSig
@@ -277,6 +285,9 @@ abstract class Erasure extends AddInterfaces with typechecker.Analyzer with ast.
(parents map jsig).mkString
case AnnotatedType(_, atp, _) =>
jsig(atp)
+ case BoundedWildcardType(bounds) =>
+ println("something's wrong: "+sym+":"+sym.tpe+" has a bounded wildcard type")
+ jsig(bounds.hi)
case _ =>
val etp = erasure(tp)
if (etp eq tp) throw new UnknownSig
@@ -417,17 +428,6 @@ abstract class Erasure extends AddInterfaces with typechecker.Analyzer with ast.
})
}
- /** generate ScalaRuntime.boxArray(tree)
- * !!! todo: optimize this in case the runtime type is known
- */
- private def boxArray(tree: Tree): Tree = tree match {
- case LabelDef(name, params, rhs) =>
- val rhs1 = boxArray(rhs)
- treeCopy.LabelDef(tree, name, params, rhs1) setType rhs1.tpe
- case _ =>
- typedPos(tree.pos) { gen.mkRuntimeCall(nme.boxArray, List(tree)) }
- }
-
/** Unbox <code>tree</code> of boxed type to expected type <code>pt</code>.
*
* @param tree the given tree
@@ -914,7 +914,7 @@ abstract class Erasure extends AddInterfaces with typechecker.Analyzer with ast.
List()),
isArrayTest(qual1()))
}
- }
+ }
case TypeApply(fun, args) if (fun.symbol.owner != AnyClass &&
fun.symbol != Object_asInstanceOf &&
fun.symbol != Object_isInstanceOf) =>
diff --git a/src/compiler/scala/tools/nsc/transform/LazyVals.scala b/src/compiler/scala/tools/nsc/transform/LazyVals.scala
index 16c5f8754d..b2f9489480 100644
--- a/src/compiler/scala/tools/nsc/transform/LazyVals.scala
+++ b/src/compiler/scala/tools/nsc/transform/LazyVals.scala
@@ -94,10 +94,11 @@ abstract class LazyVals extends Transform with ast.TreeDSL {
}
val bmps = bitmaps(methSym) map (ValDef(_, ZERO))
+ def isMatch(params: List[Ident]) = (params.tail, methSym.tpe.paramTypes).zipped forall (_.tpe == _)
+
if (bmps.isEmpty) rhs else rhs match {
case Block(assign, l @ LabelDef(name, params, rhs1))
- if (name.toString.equals("_" + methSym.name)
- && List.forall2(params.tail, methSym.tpe.paramTypes) { (ident, tpe) => ident.tpe == tpe }) =>
+ if name.toString == ("_" + methSym.name) && isMatch(params) =>
val sym = l.symbol
Block(assign, treeCopy.LabelDef(l, name, params, typed(prependStats(bmps, rhs1))))
diff --git a/src/compiler/scala/tools/nsc/transform/Mixin.scala b/src/compiler/scala/tools/nsc/transform/Mixin.scala
index 0daad7f2a7..fa436a4305 100644
--- a/src/compiler/scala/tools/nsc/transform/Mixin.scala
+++ b/src/compiler/scala/tools/nsc/transform/Mixin.scala
@@ -671,7 +671,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
val bitmapSym = bitmapFor(clazz, offset)
val mask = LIT(1 << (offset % FLAGS_PER_WORD))
def cond = mkTest(clazz, mask, bitmapSym, true)
- val nulls = (lazyValNullables(lzyVal).toList.sort(_.id < _.id) map nullify)
+ val nulls = (lazyValNullables(lzyVal).toList sortBy (_.id) map nullify)
def syncBody = init ::: List(mkSetFlag(clazz, offset), UNIT)
log("nulling fields inside " + lzyVal + ": " + nulls)
diff --git a/src/compiler/scala/tools/nsc/transform/TailCalls.scala b/src/compiler/scala/tools/nsc/transform/TailCalls.scala
index 93cb5baefa..3e4cbf8f24 100644
--- a/src/compiler/scala/tools/nsc/transform/TailCalls.scala
+++ b/src/compiler/scala/tools/nsc/transform/TailCalls.scala
@@ -344,12 +344,7 @@ abstract class TailCalls extends Transform
typed(t)
}
- private def isSameTypes(ts1: List[Symbol], ts2: List[Symbol]): Boolean = {
- def isSameType(t1: Symbol, t2: Symbol) = {
- t1 == t2
- }
- List.forall2(ts1, ts2)(isSameType)
- }
+ private def isSameTypes(ts1: List[Symbol], ts2: List[Symbol]) = ts1 sameElements ts2
/** Returns <code>true</code> if the fun tree refers to the same method as
* the one saved in <code>ctx</code>.
diff --git a/src/compiler/scala/tools/nsc/transform/UnCurry.scala b/src/compiler/scala/tools/nsc/transform/UnCurry.scala
index 67c963acf5..5409a0fe45 100644
--- a/src/compiler/scala/tools/nsc/transform/UnCurry.scala
+++ b/src/compiler/scala/tools/nsc/transform/UnCurry.scala
@@ -297,11 +297,11 @@ abstract class UnCurry extends InfoTransform with TypingTransformers {
* new $anon()
*
* transform a function node (x => body) of type PartialFunction[T, R] where
- * body = x match { case P_i if G_i => E_i }_i=1..n
+ * body = expr match { case P_i if G_i => E_i }_i=1..n
* to:
*
* class $anon() extends Object() with PartialFunction[T, R] with ScalaObject {
- * def apply(x: T): R = (x: @unchecked) match {
+ * def apply(x: T): R = (expr: @unchecked) match {
* { case P_i if G_i => E_i }_i=1..n
* def isDefinedAt(x: T): boolean = (x: @unchecked) match {
* case P_1 if G_1 => true
@@ -361,8 +361,8 @@ abstract class UnCurry extends InfoTransform with TypingTransformers {
BooleanClass.tpe))
anonClass.info.decls enter isDefinedAtMethod
def idbody(idparam: Symbol) = fun.body match {
- case Match(_, cases) =>
- val substParam = new TreeSymSubstituter(List(fun.vparams.head.symbol), List(idparam));
+ case Match(selector, cases) =>
+ val substParam = new TreeSymSubstituter(List(fun.vparams.head.symbol), List(idparam))
def transformCase(cdef: CaseDef): CaseDef =
substParam(
resetLocalAttrs(
@@ -370,7 +370,7 @@ abstract class UnCurry extends InfoTransform with TypingTransformers {
if (cases exists treeInfo.isDefaultCase) Literal(true)
else
Match(
- Ident(idparam),
+ substParam(resetLocalAttrs(selector.duplicate)),
(cases map transformCase) :::
List(CaseDef(Ident(nme.WILDCARD), EmptyTree, Literal(false))))
}
@@ -467,7 +467,7 @@ abstract class UnCurry extends InfoTransform with TypingTransformers {
case _ =>
args
}
- List.map2(formals, args1) { (formal, arg) =>
+ (formals, args1).zipped map { (formal, arg) =>
if (formal.typeSymbol != ByNameParamClass) {
arg
} else if (isByNameRef(arg)) {
diff --git a/src/compiler/scala/tools/nsc/typechecker/ConstantFolder.scala b/src/compiler/scala/tools/nsc/typechecker/ConstantFolder.scala
index ba328b9f48..8d9cee7f03 100644
--- a/src/compiler/scala/tools/nsc/typechecker/ConstantFolder.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/ConstantFolder.scala
@@ -154,7 +154,7 @@ abstract class ConstantFolder {
private def foldBinop(op: Name, x: Constant, y: Constant): Constant = {
val optag =
if (x.tag == y.tag) x.tag
- else if (isNumeric(x.tag) && isNumeric(y.tag)) Math.max(x.tag, y.tag)
+ else if (isNumeric(x.tag) && isNumeric(y.tag)) math.max(x.tag, y.tag)
else NoTag
try optag match {
diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
index b3e4666f35..596c11bcac 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
@@ -481,8 +481,7 @@ trait Contexts { self: Analyzer =>
}
impls
}
- if (settings.debug.value)
- log("collect implicit imports " + imp + "=" + collect(imp.tree.selectors))//debug
+ //if (settings.debug.value) log("collect implicit imports " + imp + "=" + collect(imp.tree.selectors))//DEBUG
collect(imp.tree.selectors)
}
@@ -499,17 +498,14 @@ trait Contexts { self: Analyzer =>
val newImplicits: List[ImplicitInfo] =
if (owner != nextOuter.owner && owner.isClass && !owner.isPackageClass && !inSelfSuperCall) {
if (!owner.isInitialized) return nextOuter.implicitss
- if (settings.debug.value)
- log("collect member implicits " + owner + ", implicit members = " +
- owner.thisType.implicitMembers)//debug
+ // if (settings.debug.value) log("collect member implicits " + owner + ", implicit members = " + owner.thisType.implicitMembers)//DEBUG
val savedEnclClass = enclClass
this.enclClass = this
val res = collectImplicits(owner.thisType.implicitMembers, owner.thisType)
this.enclClass = savedEnclClass
res
} else if (scope != nextOuter.scope && !owner.isPackageClass) {
- if (settings.debug.value)
- log("collect local implicits " + scope.toList)//debug
+ if (settings.debug.value) log("collect local implicits " + scope.toList)//DEBUG
collectImplicits(scope.toList, NoPrefix)
} else if (imports != nextOuter.imports) {
assert(imports.tail == nextOuter.imports)
diff --git a/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala b/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala
index c9a2a377c1..796d6f8134 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala
@@ -241,6 +241,9 @@ abstract class Duplicators extends Analyzer {
super.typed(atPos(tree.pos)(tree1))
case _ =>
+ if (tree.hasSymbol && tree.symbol != NoSymbol && (tree.symbol.owner == definitions.AnyClass)) {
+ tree.symbol = NoSymbol // maybe we can find a more specific member in a subclass of Any
+ }
tree.tpe = null
super.typed(tree, mode, pt)
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala b/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala
index 7d75994ef3..375dd5a4a5 100644
--- a/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala
@@ -20,12 +20,13 @@ trait EtaExpansion { self: Analyzer =>
import global._
object etaExpansion {
+ private def isMatch(vparam: ValDef, arg: Tree) = arg match {
+ case Ident(name) => vparam.name == name
+ case _ => false
+ }
+
def unapply(tree: Tree): Option[(List[ValDef], Tree, List[Tree])] = tree match {
- case Function(vparams, Apply(fn, args))
- if (List.forall2(vparams, args) {
- case (vparam, Ident(name)) => vparam.name == name
- case _ => false
- }) =>
+ case Function(vparams, Apply(fn, args)) if (vparams, args).zipped forall isMatch =>
Some((vparams, fn, args))
case _ =>
None
diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
index 6bbacfa311..f577042024 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
@@ -368,21 +368,28 @@ self: Analyzer =>
* correspond to the HasMethodMatching type,
* or otherwise if `tp' is compatible with `pt'.
*/
- def matchesPt(tp: Type, pt: Type, undet: List[Symbol]) =
- isCompatible(tp, pt) || {
+ def matchesPt(tp: Type, pt: Type, undet: List[Symbol]) = {
+ isCompatible(tp, pt) ||
+ isView && {
pt match {
- case Function1(arg, HasMethodMatching(name, argtpes, restpe)) =>
+ case Function1(arg, res) =>
normalize(tp) match {
case Function1(arg1, res1) =>
- (arg <:< arg1) &&
- (res1.member(name) filter (m => isApplicableSafe(undet, m.tpe, argtpes, restpe))) != NoSymbol
- case _ =>
- false
+ (arg.deconst weak_<:< arg1) && {
+ res match {
+ case HasMethodMatching(name, argtpes, restpe) =>
+ (res1.member(name) filter (m =>
+ isApplicableSafe(undet, m.tpe, argtpes, restpe))) != NoSymbol
+ case _ =>
+ res1 <:< res
+ }
+ }
+ case _ => false
}
- case _ =>
- false
+ case _ => false
}
}
+ }
//if (traceImplicits) println("typed impl for "+wildPt+"? "+info.name+":"+depoly(info.tpe)+"/"+undetParams+"/"+isPlausiblyCompatible(info.tpe, wildPt)+"/"+matchesPt(depoly(info.tpe), wildPt, List()))
if (isPlausiblyCompatible(info.tpe, wildPt) &&
@@ -734,47 +741,50 @@ self: Analyzer =>
def findSubManifest(tp: Type) = findManifest(tp, if (full) FullManifestClass else OptManifestClass)
- def mot(tp0: Type): Tree = tp0.normalize match {
- case ThisType(_) | SingleType(_, _) =>
- manifestFactoryCall("singleType", tp, gen.mkAttributedQualifier(tp0))
- case ConstantType(value) =>
- manifestOfType(tp0.deconst, full)
- case TypeRef(pre, sym, args) =>
- if (isValueClass(sym) || isPhantomClass(sym)) {
- typed { atPos(tree.pos.focus) {
- Select(gen.mkAttributedRef(FullManifestModule), sym.name.toString)
- }}
- } else if (sym == ArrayClass && args.length == 1) {
- manifestFactoryCall("arrayType", args.head, findSubManifest(args.head))
- } else if (sym.isClass) {
- val suffix = gen.mkClassOf(tp0) :: (args map findSubManifest)
- manifestFactoryCall(
- "classType", tp,
- (if ((pre eq NoPrefix) || pre.typeSymbol.isStaticOwner) suffix
- else findSubManifest(pre) :: suffix): _*)
- } else if (sym.isAbstractType) {
- if (sym.isExistential)
- EmptyTree // todo: change to existential parameter manifest
- else if (sym.isTypeParameterOrSkolem)
- EmptyTree // a manifest should have been found by normal searchImplicit
- else
+ def mot(tp0: Type): Tree = {
+ val tp1 = tp0.normalize
+ tp1 match {
+ case ThisType(_) | SingleType(_, _) =>
+ manifestFactoryCall("singleType", tp, gen.mkAttributedQualifier(tp1))
+ case ConstantType(value) =>
+ manifestOfType(tp1.deconst, full)
+ case TypeRef(pre, sym, args) =>
+ if (isValueClass(sym) || isPhantomClass(sym)) {
+ typed { atPos(tree.pos.focus) {
+ Select(gen.mkAttributedRef(FullManifestModule), sym.name.toString)
+ }}
+ } else if (sym == ArrayClass && args.length == 1) {
+ manifestFactoryCall("arrayType", args.head, findSubManifest(args.head))
+ } else if (sym.isClass) {
+ val suffix = gen.mkClassOf(tp1) :: (args map findSubManifest)
manifestFactoryCall(
- "abstractType", tp,
- findSubManifest(pre) :: Literal(sym.name.toString) :: findManifest(tp0.bounds.hi) :: (args map findSubManifest): _*)
- } else {
- EmptyTree // a manifest should have been found by normal searchImplicit
- }
- case RefinedType(parents, decls) =>
- // refinement is not generated yet
- if (parents.length == 1) findManifest(parents.head)
- else manifestFactoryCall("intersectionType", tp, parents map (findSubManifest(_)): _*)
- case ExistentialType(tparams, result) =>
- existentialAbstraction(tparams, result) match {
- case ExistentialType(_, _) => mot(result)
- case t => mot(t)
- }
- case _ =>
- EmptyTree
+ "classType", tp,
+ (if ((pre eq NoPrefix) || pre.typeSymbol.isStaticOwner) suffix
+ else findSubManifest(pre) :: suffix): _*)
+ } else if (sym.isAbstractType) {
+ if (sym.isExistential)
+ EmptyTree // todo: change to existential parameter manifest
+ else if (sym.isTypeParameterOrSkolem)
+ EmptyTree // a manifest should have been found by normal searchImplicit
+ else
+ manifestFactoryCall(
+ "abstractType", tp,
+ findSubManifest(pre) :: Literal(sym.name.toString) :: findManifest(tp1.bounds.hi) :: (args map findSubManifest): _*)
+ } else {
+ EmptyTree // a manifest should have been found by normal searchImplicit
+ }
+ case RefinedType(parents, decls) =>
+ // refinement is not generated yet
+ if (parents.length == 1) findManifest(parents.head)
+ else manifestFactoryCall("intersectionType", tp, parents map (findSubManifest(_)): _*)
+ case ExistentialType(tparams, result) =>
+ existentialAbstraction(tparams, result) match {
+ case ExistentialType(_, _) => mot(result)
+ case t => mot(t)
+ }
+ case _ =>
+ EmptyTree
+ }
}
mot(tp)
diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
index 0c3e34f8c5..d11f263677 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala
@@ -404,7 +404,7 @@ trait Infer {
val l = args.length - 1
l == formals.length &&
sym == FunctionClass(l) &&
- List.forall2(args, formals) (isPlausiblySubType) &&
+ ((args, formals).zipped forall isPlausiblySubType) &&
isPlausiblySubType(tp.resultApprox, args.last)
}
case _ =>
@@ -418,7 +418,8 @@ trait Infer {
case TypeRef(_, sym1, _) =>
!sym1.isClass || {
tp2.normalize match {
- case TypeRef(_, sym2, _) => !sym2.isClass || (sym1 isSubClass sym2)
+ case TypeRef(_, sym2, _) =>
+ !sym2.isClass || (sym1 isSubClass sym2) || isNumericSubType(tp1, tp2)
case _ => true
}
}
@@ -458,7 +459,7 @@ trait Infer {
def isCoercible(tp: Type, pt: Type): Boolean = false
def isCompatibleArgs(tps: List[Type], pts: List[Type]): Boolean =
- List.map2(tps, pts)((tp, pt) => isCompatibleArg(tp, pt)) forall (x => x)
+ (tps, pts).zipped forall isCompatibleArg
/* -- Type instantiation------------------------------------------------ */
@@ -495,9 +496,9 @@ trait Infer {
* @param pt ...
* @return ...
*/
- private def exprTypeArgs(tparams: List[Symbol], restpe: Type, pt: Type): List[Type] = {
+ private def exprTypeArgs(tparams: List[Symbol], restpe: Type, pt: Type, checkCompat: (Type, Type) => Boolean = isCompatible): List[Type] = {
val tvars = tparams map freshVar
- if (isCompatible(restpe.instantiateTypeParams(tparams, tvars), pt)) {
+ if (checkCompat(restpe.instantiateTypeParams(tparams, tvars), pt)) {
try {
// If the restpe is an implicit method, and the expected type is fully defined
// optimze type varianbles wrt to the implicit formals only; ignore the result type.
@@ -562,7 +563,7 @@ trait Infer {
}
val tvars = tparams map freshVar
if (isConservativelyCompatible(restpe.instantiateTypeParams(tparams, tvars), pt))
- List.map2(tparams, tvars) ((tparam, tvar) =>
+ (tparams, tvars).zipped map ((tparam, tvar) =>
instantiateToBound(tvar, varianceInTypes(formals)(tparam)))
else
tvars map (tvar => WildcardType)
@@ -582,7 +583,7 @@ trait Infer {
@inline def notCovariantIn(tparam: Symbol, restpe: Type) =
(varianceInType(restpe)(tparam) & COVARIANT) == 0 // tparam occurred non-covariantly (in invariant or contravariant position)
- List.map2(tparams, targs) {(tparam, targ) =>
+ (tparams, targs).zipped map { (tparam, targ) =>
if (targ.typeSymbol == NothingClass && (restpe == WildcardType || notCovariantIn(tparam, restpe))) {
uninstantiated += tparam
tparam.tpeHK //@M tparam.tpe was wrong: we only want the type constructor,
@@ -659,7 +660,7 @@ trait Infer {
if (!isFullyDefined(tvar)) tvar.constr.inst = NoType
// Then define remaining type variables from argument types.
- List.map2(argtpes, formals) {(argtpe, formal) =>
+ (argtpes, formals).zipped map { (argtpe, formal) =>
//@M isCompatible has side-effect: isSubtype0 will register subtype checks in the tvar's bounds
if (!isCompatibleArg(argtpe.deconst.instantiateTypeParams(tparams, tvars),
formal.instantiateTypeParams(tparams, tvars))) {
@@ -787,7 +788,8 @@ trait Infer {
try {
val uninstantiated = new ListBuffer[Symbol]
val targs = methTypeArgs(undetparams, formals, restpe, argtpes, pt, uninstantiated)
- (exprTypeArgs(uninstantiated.toList, restpe.instantiateTypeParams(undetparams, targs), pt) ne null) &&
+ // #2665: must use weak conformance, not regular one (follow the monorphic case above)
+ (exprTypeArgs(uninstantiated.toList, restpe.instantiateTypeParams(undetparams, targs), pt, isWeaklyCompatible) ne null) &&
isWithinBounds(NoPrefix, NoSymbol, undetparams, targs)
} catch {
case ex: NoInstance => false
@@ -1032,8 +1034,8 @@ trait Infer {
(tparams map (_.defString)).mkString("[", ",", "]"))
if (settings.explaintypes.value) {
val bounds = tparams map (tp => tp.info.instantiateTypeParams(tparams, targs).bounds)
- List.map2(targs, bounds)((targ, bound) => explainTypes(bound.lo, targ))
- List.map2(targs, bounds)((targ, bound) => explainTypes(targ, bound.hi))
+ (targs, bounds).zipped foreach ((targ, bound) => explainTypes(bound.lo, targ))
+ (targs, bounds).zipped foreach ((targ, bound) => explainTypes(targ, bound.hi))
()
}
}
@@ -1620,7 +1622,7 @@ trait Infer {
* assignment expression.
*/
def inferMethodAlternative(tree: Tree, undetparams: List[Symbol],
- argtpes: List[Type], pt0: Type): Unit = tree.tpe match {
+ argtpes: List[Type], pt0: Type, varArgsOnly: Boolean = false): Unit = tree.tpe match {
case OverloadedType(pre, alts) =>
val pt = if (pt0.typeSymbol == UnitClass) WildcardType else pt0
tryTwice {
@@ -1631,6 +1633,9 @@ trait Infer {
var allApplicable = alts filter (alt =>
isApplicable(undetparams, followApply(pre.memberType(alt)), argtpes, pt))
+ if (varArgsOnly)
+ allApplicable = allApplicable filter (alt => isVarArgs(alt.tpe.paramTypes))
+
// if there are multiple, drop those that use a default
// (keep those that use vararg / tupling conversion)
val applicable =
@@ -1736,7 +1741,7 @@ trait Infer {
if (sym.hasFlag(OVERLOADED)) {
val tparams = new AsSeenFromMap(pre, sym.alternatives.head.owner).mapOver(
sym.alternatives.head.typeParams)
- val bounds = tparams map (_.tpe) //@M TODO: might be affected by change to tpe in Symbol
+ val bounds = tparams map (_.tpeHK) // see e.g., #1236
val tpe =
PolyType(tparams,
OverloadedType(AntiPolyType(pre, bounds), sym.alternatives))
diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
index d4a7b9e1e7..f4216f7958 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
@@ -290,52 +290,55 @@ trait Namers { self: Analyzer =>
if (m.isModule && inCurrentScope(m) && currentRun.compiles(m)) m
else enterSyntheticSym(creator)
}
-
- def enterSym(tree: Tree): Context = try {
-
- def finishWith(tparams: List[TypeDef]) {
- val sym = tree.symbol
- if (settings.debug.value) log("entered " + sym + " in " + context.owner + ", scope-id = " + context.scope.hashCode());
- var ltype = namerOf(sym).typeCompleter(tree)
- if (!tparams.isEmpty) {
- //@M! TypeDef's type params are handled differently
- //@M e.g., in [A[x <: B], B], A and B are entered first as both are in scope in the definition of x
- //@M x is only in scope in `A[x <: B]'
- if(!sym.isAbstractType) //@M TODO: change to isTypeMember ?
- newNamer(context.makeNewScope(tree, sym)).enterSyms(tparams)
-
- ltype = new PolyTypeCompleter(tparams, ltype, tree, sym, context) //@M
- if (sym.isTerm) skolemize(tparams)
- }
- def copyIsSynthetic() = sym.owner.info.member(nme.copy).hasFlag(SYNTHETIC)
- if (sym.name == nme.copy && sym.hasFlag(SYNTHETIC) ||
- sym.name.startsWith(nme.copy + "$default$") && copyIsSynthetic()){
- // the 'copy' method of case classes needs a special type completer to make bug0054.scala (and others)
- // work. the copy method has to take exactly the same parameter types as the primary constructor.
- setInfo(sym)(mkTypeCompleter(tree)(copySym => {
- val constrType = copySym.owner.primaryConstructor.tpe
- val subst = new SubstSymMap(copySym.owner.typeParams, tparams map (_.symbol))
- for ((params, cparams) <- tree.asInstanceOf[DefDef].vparamss.zip(constrType.paramss);
- (param, cparam) <- params.zip(cparams)) {
- // need to clone the type cparam.tpe??? problem is: we don't have the new owner yet (the new param symbol)
- param.tpt.setType(subst(cparam.tpe))
- () // @LUC TODO workaround for #1996
- }
- ltype.complete(sym)
- }))
- } else setInfo(sym)(ltype)
+ private def enterSymFinishWith(tree: Tree, tparams: List[TypeDef]) {
+ val sym = tree.symbol
+ if (settings.debug.value) log("entered " + sym + " in " + context.owner + ", scope-id = " + context.scope.hashCode());
+ var ltype = namerOf(sym).typeCompleter(tree)
+ if (!tparams.isEmpty) {
+ //@M! TypeDef's type params are handled differently
+ //@M e.g., in [A[x <: B], B], A and B are entered first as both are in scope in the definition of x
+ //@M x is only in scope in `A[x <: B]'
+ if(!sym.isAbstractType) //@M TODO: change to isTypeMember ?
+ newNamer(context.makeNewScope(tree, sym)).enterSyms(tparams)
+
+ ltype = new PolyTypeCompleter(tparams, ltype, tree, sym, context) //@M
+ if (sym.isTerm) skolemize(tparams)
}
- def finish = finishWith(List())
+ def copyIsSynthetic() = sym.owner.info.member(nme.copy).hasFlag(SYNTHETIC)
+ if (sym.name == nme.copy && sym.hasFlag(SYNTHETIC) ||
+ sym.name.startsWith(nme.copy + "$default$") && copyIsSynthetic()){
+ // the 'copy' method of case classes needs a special type completer to make bug0054.scala (and others)
+ // work. the copy method has to take exactly the same parameter types as the primary constructor.
+ setInfo(sym)(mkTypeCompleter(tree)(copySym => {
+ val constrType = copySym.owner.primaryConstructor.tpe
+ val subst = new SubstSymMap(copySym.owner.typeParams, tparams map (_.symbol))
+ for ((params, cparams) <- tree.asInstanceOf[DefDef].vparamss.zip(constrType.paramss);
+ (param, cparam) <- params.zip(cparams)) {
+ // need to clone the type cparam.tpe??? problem is: we don't have the new owner yet (the new param symbol)
+ param.tpt.setType(subst(cparam.tpe))
+ () // @LUC TODO workaround for #1996
+ }
+ ltype.complete(sym)
+ }))
+ } else setInfo(sym)(ltype)
+ }
+
+ def enterSym(tree: Tree): Context = {
+ def finishWith(tparams: List[TypeDef]) { enterSymFinishWith(tree, tparams) }
+ def finish = finishWith(Nil)
+ def sym = tree.symbol
+ if (sym != NoSymbol)
+ return this.context
- if (tree.symbol == NoSymbol) {
+ try {
val owner = context.owner
tree match {
case PackageDef(pid, stats) =>
tree.symbol = enterPackageSymbol(tree.pos, pid,
if (context.owner == EmptyPackageClass) RootClass else context.owner)
- val namer = newNamer(
- context.make(tree, tree.symbol.moduleClass, tree.symbol.info.decls))
- namer.enterSyms(stats)
+ val namer = newNamer(context.make(tree, sym.moduleClass, sym.info.decls))
+ namer enterSyms stats
+
case tree @ ClassDef(mods, name, tparams, impl) =>
tree.symbol = enterClassSymbol(tree)
finishWith(tparams)
@@ -343,26 +346,23 @@ trait Namers { self: Analyzer =>
val m = ensureCompanionObject(tree, caseModuleDef(tree))
caseClassOfModuleClass(m.moduleClass) = tree
}
- val constrs = impl.body filter {
- case DefDef(_, name, _, _, _, _) => name == nme.CONSTRUCTOR
- case _ => false
- }
- val hasDefault = constrs.exists(c => {
- val DefDef(_, _, _, vparamss, _, _) = c
- vparamss.exists(_.exists(_.mods hasFlag DEFAULTPARAM))
- })
+ val hasDefault = impl.body flatMap {
+ case DefDef(_, nme.CONSTRUCTOR, _, vparamss, _, _) => vparamss.flatten
+ case _ => Nil
+ } exists (_.mods hasFlag DEFAULTPARAM)
+
if (hasDefault) {
val m = ensureCompanionObject(tree, companionModuleDef(tree, List(gen.scalaScalaObjectConstr)))
classAndNamerOfModule(m) = (tree, null)
}
case tree @ ModuleDef(mods, name, _) =>
tree.symbol = enterModuleSymbol(tree)
- tree.symbol.moduleClass.setInfo(namerOf(tree.symbol).moduleClassTypeCompleter((tree)))
+ sym.moduleClass setInfo namerOf(sym).moduleClassTypeCompleter(tree)
finish
case vd @ ValDef(mods, name, tp, rhs) =>
if ((!context.owner.isClass ||
- (mods.flags & (PRIVATE | LOCAL)) == (PRIVATE | LOCAL).toLong ||
+ (mods.flags & (PRIVATE | LOCAL | CASEACCESSOR)) == (PRIVATE | LOCAL) ||
name.endsWith(nme.OUTER, nme.OUTER.length) ||
context.unit.isJava) &&
!mods.isLazy) {
@@ -370,38 +370,44 @@ trait Namers { self: Analyzer =>
.setFlag(mods.flags))
finish
} else {
+ val mods1 =
+ if (mods.hasFlag(PRIVATE) && mods.hasFlag(LOCAL) && !mods.isLazy) {
+ context.error(tree.pos, "private[this] not allowed for case class parameters")
+ mods &~ LOCAL
+ } else mods
// add getter and possibly also setter
val accflags: Long = ACCESSOR |
- (if ((mods.flags & MUTABLE) != 0L) mods.flags & ~MUTABLE & ~PRESUPER
- else mods.flags & ~PRESUPER | STABLE)
+ (if (mods1.isVariable) mods1.flags & ~MUTABLE & ~PRESUPER
+ else mods1.flags & ~PRESUPER | STABLE)
if (nme.isSetterName(name))
context.error(tree.pos, "Names of vals or vars may not end in `_='")
// .isInstanceOf[..]: probably for (old) IDE hook. is this obsolete?
- val getter = enterAliasMethod(tree, name, accflags, mods)
+ val getter = enterAliasMethod(tree, name, accflags, mods1)
setInfo(getter)(namerOf(getter).getterTypeCompleter(vd))
- if ((mods.flags & MUTABLE) != 0L) {
+ if (mods1.isVariable) {
val setter = enterAliasMethod(tree, nme.getterToSetter(name),
accflags & ~STABLE & ~CASEACCESSOR,
- mods)
+ mods1)
setInfo(setter)(namerOf(setter).setterTypeCompleter(vd))
}
+
tree.symbol =
- if (mods.isDeferred) {
+ if (mods1.isDeferred) {
getter setPos tree.pos // unfocus getter position, because there won't be a separate value
} else {
val vsym =
if (!context.owner.isClass) {
- assert(mods.isLazy) // if not a field, it has to be a lazy val
- owner.newValue(tree.pos, name + "$lzy" ).setFlag(mods.flags | MUTABLE)
+ assert(mods1.isLazy) // if not a field, it has to be a lazy val
+ owner.newValue(tree.pos, name + "$lzy" ).setFlag(mods1.flags | MUTABLE)
} else {
- val mflag = if (mods.isLazy) MUTABLE else 0
- val newflags = mods.flags & FieldFlags | PRIVATE | LOCAL | mflag
-
+ val mFlag = if (mods1.isLazy) MUTABLE else 0
+ val lFlag = if (mods.hasFlag(PRIVATE) && mods.hasFlag(LOCAL)) 0 else LOCAL
+ val newflags = mods1.flags & FieldFlags | PRIVATE | lFlag | mFlag
owner.newValue(tree.pos, nme.getterToLocal(name)) setFlag newflags
}
enterInScope(vsym)
setInfo(vsym)(namerOf(vsym).typeCompleter(tree))
- if (mods.isLazy)
+ if (mods1.isLazy)
vsym.setLazyAccessor(getter)
vsym
@@ -427,17 +433,18 @@ trait Namers { self: Analyzer =>
enterSym(defn)
case imp @ Import(_, _) =>
tree.symbol = NoSymbol.newImport(tree.pos)
- setInfo(tree.symbol)(namerOf(tree.symbol).typeCompleter(tree))
+ setInfo(sym)(namerOf(sym).typeCompleter(tree))
return (context.makeNewImport(imp))
case _ =>
}
}
+ catch {
+ case ex: TypeError =>
+ //Console.println("caught " + ex + " in enterSym")//DEBUG
+ typer.reportTypeError(tree.pos, ex)
+ this.context
+ }
this.context
- } catch {
- case ex: TypeError =>
- //Console.println("caught " + ex + " in enterSym")//DEBUG
- typer.reportTypeError(tree.pos, ex)
- this.context
}
def enterSyntheticSym(tree: Tree): Symbol = {
@@ -615,7 +622,7 @@ trait Namers { self: Analyzer =>
clazz.typeOfThis = selfTypeCompleter(self.tpt)
self.symbol = clazz.thisSym.setPos(self.pos)
} else {
- self.tpt.tpe = NoType
+ self.tpt defineType NoType
if (self.name != nme.WILDCARD) {
clazz.typeOfThis = clazz.tpe
self.symbol = clazz.thisSym
@@ -747,7 +754,7 @@ trait Namers { self: Analyzer =>
var vparamSymss = enterValueParams(meth, vparamss)
if (tpt.isEmpty && meth.name == nme.CONSTRUCTOR) {
- tpt.tpe = context.enclClass.owner.tpe
+ tpt defineType context.enclClass.owner.tpe
tpt setPos meth.pos.focus
}
@@ -873,7 +880,7 @@ trait Namers { self: Analyzer =>
var pfs = resultPt.paramTypes
for (vparam <- vparams) {
if (vparam.tpt.isEmpty) {
- vparam.tpt.tpe = pfs.head
+ vparam.tpt defineType pfs.head
vparam.tpt setPos vparam.pos.focus
vparam.symbol setInfo pfs.head
}
@@ -900,7 +907,7 @@ trait Namers { self: Analyzer =>
}
for (vparams <- vparamss; vparam <- vparams if vparam.tpt.isEmpty) {
context.error(vparam.pos, "missing parameter type")
- vparam.tpt.tpe = ErrorType
+ vparam.tpt defineType ErrorType
}
addDefaultGetters(meth, vparamss, tparams, overriddenSymbol)
@@ -910,7 +917,7 @@ trait Namers { self: Analyzer =>
// replace deSkolemized symbols with skolemized ones (for resultPt computed by looking at overridden symbol, right?)
val pt = resultPt.substSym(tparamSyms, tparams map (_.symbol))
// compute result type from rhs
- tpt.tpe = widenIfNotFinal(meth, typer.computeType(rhs, pt), pt)
+ tpt defineType widenIfNotFinal(meth, typer.computeType(rhs, pt), pt)
tpt setPos meth.pos.focus
tpt.tpe
} else typer.typedType(tpt).tpe
@@ -1148,7 +1155,7 @@ trait Namers { self: Analyzer =>
context.error(tpt.pos, "missing parameter type");
ErrorType
} else {
- tpt.tpe = widenIfNotFinal(
+ tpt defineType widenIfNotFinal(
sym,
newTyper(typer1.context.make(vdef, sym)).computeType(rhs, WildcardType),
WildcardType)
@@ -1268,7 +1275,7 @@ trait Namers { self: Analyzer =>
context.error(sym.pos, "`lazy' definitions may not be initialized early")
if (sym.info.typeSymbol == FunctionClass(0) &&
sym.isValueParameter && sym.owner.isClass && sym.owner.hasFlag(CASE))
- context.error(sym.pos, "pass-by-name arguments not allowed for case class parameters");
+ context.error(sym.pos, "pass-by-name arguments not allowed for case class parameters")
if (sym hasFlag DEFERRED) { // virtual classes count, too
if (sym.hasAnnotation(definitions.NativeAttr))
sym.resetFlag(DEFERRED)
diff --git a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
index 773b2cf561..d36d68163f 100644
--- a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
@@ -223,7 +223,7 @@ trait NamesDefaults { self: Analyzer =>
*/
def argValDefs(args: List[Tree], paramTypes: List[Type], blockTyper: Typer): List[ValDef] = {
val context = blockTyper.context
- val symPs = List.map2(args, paramTypes)((arg, tpe) => {
+ val symPs = (args, paramTypes).zipped map ((arg, tpe) => {
val byName = tpe.typeSymbol == ByNameParamClass
val s = context.owner.newValue(arg.pos, unit.fresh.newName(arg.pos, "x$"))
val valType = if (byName) functionType(List(), arg.tpe)
@@ -231,7 +231,7 @@ trait NamesDefaults { self: Analyzer =>
s.setInfo(valType)
(context.scope.enter(s), byName)
})
- List.map2(symPs, args)((symP, arg) => {
+ (symPs, args).zipped map ((symP, arg) => {
val (sym, byName) = symP
// resetAttrs required for #2290. given a block { val x = 1; x }, when wrapping into a function
// () => { val x = 1; x }, the owner of symbol x must change (to the apply method of the function).
@@ -270,7 +270,7 @@ trait NamesDefaults { self: Analyzer =>
reorderArgsInv(formals, argPos),
blockTyper)
// refArgs: definition-site order again
- val refArgs = List.map2(reorderArgs(valDefs, argPos), formals)((vDef, tpe) => {
+ val refArgs = (reorderArgs(valDefs, argPos), formals).zipped map ((vDef, tpe) => {
val ref = gen.mkAttributedRef(vDef.symbol)
atPos(vDef.pos.focus) {
// for by-name parameters, the local value is a nullary function returning the argument
diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
index 9b8be2aaec..c4a3981a51 100644
--- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
@@ -368,6 +368,9 @@ abstract class RefChecks extends InfoTransform {
"The kind of the right-hand side "+memberTp.normalize+" of "+member.keyString+" "+
member.varianceString + member.nameString+ " does not conform to its expected kind."+
kindErrors.toList.mkString("\n", ", ", ""))
+ } else if (member.isAbstractType) {
+ if (memberTp.isVolatile && !otherTp.bounds.hi.isVolatile)
+ overrideError("is a volatile type; cannot override a type with non-volatile upper bound")
}
} else if (other.isTerm) {
other.cookJavaRawInfo() // #2454
@@ -398,9 +401,7 @@ abstract class RefChecks extends InfoTransform {
else clazz.toString() + " needs to be abstract") + ", since " + msg);
clazz.setFlag(ABSTRACT)
}
- // Find a concrete Java method that overrides `sym' under the erasure model.
- // Bridge symbols qualify.
- // Used as a fall back if no overriding symbol of a Java abstract method can be found
+
def javaErasedOverridingSym(sym: Symbol): Symbol =
clazz.tpe.nonPrivateMemberAdmitting(sym.name, BRIDGE).filter(other =>
!other.isDeferred &&
@@ -410,10 +411,12 @@ abstract class RefChecks extends InfoTransform {
atPhase(currentRun.erasurePhase.next)(tp1 matches tp2)
})
+ def ignoreDeferred(member: Symbol) =
+ isAbstractTypeWithoutFBound(member) ||
+ ((member hasFlag JAVA) && javaErasedOverridingSym(member) != NoSymbol)
+
for (member <- clazz.tpe.nonPrivateMembersAdmitting(VBRIDGE))
- if (member.isDeferred && !(clazz hasFlag ABSTRACT) &&
- !isAbstractTypeWithoutFBound(member) &&
- !((member hasFlag JAVA) && javaErasedOverridingSym(member) != NoSymbol)) {
+ if (member.isDeferred && !(clazz hasFlag ABSTRACT) && !ignoreDeferred(member)) {
abstractClassError(
false, infoString(member) + " is not defined" + analyzer.varNotice(member))
} else if ((member hasFlag ABSOVERRIDE) && member.isIncompleteIn(clazz)) {
@@ -435,8 +438,8 @@ abstract class RefChecks extends InfoTransform {
// (3) is violated but not (2).
def checkNoAbstractDecls(bc: Symbol) {
for (decl <- bc.info.decls.iterator) {
- if (decl.isDeferred && !isAbstractTypeWithoutFBound(decl)) {
- val impl = decl.matchingSymbol(clazz.thisType)
+ if (decl.isDeferred && !ignoreDeferred(decl)) {
+ val impl = decl.matchingSymbol(clazz.thisType, admit = VBRIDGE)
if (impl == NoSymbol || (decl.owner isSubClass impl.owner)) {
abstractClassError(false, "there is a deferred declaration of "+infoString(decl)+
" which is not implemented in a subclass"+analyzer.varNotice(decl))
@@ -450,21 +453,25 @@ abstract class RefChecks extends InfoTransform {
if (!(clazz hasFlag ABSTRACT)) checkNoAbstractDecls(clazz)
}
- /** Does there exists a symbol declared in class `inclazz` with name `name` and
- * whose type seen as a member of `class.thisType` matches `tpe`?
+ /** Returns whether there is a symbol declared in class `inclazz`
+ * (which must be different from `clazz`) whose name and type
+ * seen as a member of `class.thisType` matches `member`'s.
*/
- def hasMatchingSym(inclazz: Symbol, name: Name, tpe: Type): Boolean =
- inclazz.info.nonPrivateDecl(name).filter(sym =>
- !sym.isTerm || (tpe matches clazz.thisType.memberType(sym))) != NoSymbol
+ def hasMatchingSym(inclazz: Symbol, member: Symbol): Boolean =
+ inclazz != clazz && {
+ val isVarargs = hasRepeatedParam(member.tpe)
+ inclazz.info.nonPrivateDecl(member.name).filter { sym =>
+ !sym.isTerm || {
+ val symtpe = clazz.thisType.memberType(sym)
+ (member.tpe matches symtpe) || isVarargs && (toJavaRepeatedParam(member.tpe) matches symtpe)
+ }
+ } != NoSymbol
+ }
// 4. Check that every defined member with an `override' modifier overrides some other member.
for (member <- clazz.info.decls.toList)
if ((member hasFlag (OVERRIDE | ABSOVERRIDE)) &&
- !(clazz.ancestors exists { bc =>
- hasMatchingSym(bc, member.name, member.tpe) ||
- hasRepeatedParam(member.tpe) &&
- hasMatchingSym(bc, member.name, toJavaRepeatedParam(member.tpe))
- })) {
+ !(clazz.thisType.baseClasses exists (hasMatchingSym(_, member)))) {
// for (bc <- clazz.info.baseClasses.tail) Console.println("" + bc + " has " + bc.info.decl(member.name) + ":" + bc.info.decl(member.name).tpe);//DEBUG
unit.error(member.pos, member.toString() + " overrides nothing");
member resetFlag OVERRIDE
@@ -665,7 +672,7 @@ abstract class RefChecks extends InfoTransform {
class LevelInfo(val outer: LevelInfo) {
val scope: Scope = if (outer eq null) new Scope else new Scope(outer.scope)
- var maxindex: Int = Math.MIN_INT
+ var maxindex: Int = Int.MinValue
var refpos: Position = _
var refsym: Symbol = _
}
@@ -839,7 +846,7 @@ abstract class RefChecks extends InfoTransform {
if (tree.symbol.hasFlag(LAZY)) {
assert(tree.symbol.isTerm, tree.symbol)
val vsym = tree.symbol
- val hasUnitType = (tree.symbol.tpe.typeSymbol == definitions.UnitClass)
+ val hasUnitType = (tree.symbol.tpe.typeSymbol == UnitClass)
val lazyDefSym = vsym.lazyAccessor
assert(lazyDefSym != NoSymbol, vsym)
val ownerTransformer = new ChangeOwnerTraverser(vsym, lazyDefSym)
@@ -870,135 +877,210 @@ abstract class RefChecks extends InfoTransform {
List(transform(tree))
}
- override def transform(tree: Tree): Tree = try {
+ /******** Begin transform inner function section ********/
- /* Check whether argument types conform to bounds of type parameters */
- def checkBounds(pre: Type, owner: Symbol, tparams: List[Symbol], argtps: List[Type]): Unit =
- checkBoundsWithPos(pre, owner, tparams, argtps, tree.pos)
- def checkBoundsWithPos(pre: Type, owner: Symbol, tparams: List[Symbol], argtps: List[Type], pos: Position): Unit = try {
- typer.infer.checkBounds(pos, pre, owner, tparams, argtps, "");
- } catch {
+ /** The private functions between here and 'transform' are conceptually
+ * inner functions to that method, but have been moved outside of it to
+ * ease the burden on the optimizer.
+ */
+
+ /* Check whether argument types conform to bounds of type parameters */
+ private def checkBounds(pre: Type, owner: Symbol, tparams: List[Symbol], argtps: List[Type], pos: Position): Unit =
+ try typer.infer.checkBounds(pos, pre, owner, tparams, argtps, "")
+ catch {
case ex: TypeError =>
unit.error(pos, ex.getMessage());
if (settings.explaintypes.value) {
val bounds = tparams map (tp => tp.info.instantiateTypeParams(tparams, argtps).bounds)
- List.map2(argtps, bounds)((targ, bound) => explainTypes(bound.lo, targ))
- List.map2(argtps, bounds)((targ, bound) => explainTypes(targ, bound.hi))
+ (argtps, bounds).zipped map ((targ, bound) => explainTypes(bound.lo, targ))
+ (argtps, bounds).zipped map ((targ, bound) => explainTypes(targ, bound.hi))
()
}
}
+ private def isIrrefutable(pat: Tree, seltpe: Type): Boolean = {
+ val result = pat match {
+ case Apply(_, args) =>
+ val clazz = pat.tpe.typeSymbol;
+ clazz == seltpe.typeSymbol &&
+ clazz.isClass && (clazz hasFlag CASE) &&
+ ((args, clazz.primaryConstructor.tpe.asSeenFrom(seltpe, clazz).paramTypes).zipped forall isIrrefutable)
+ case Typed(pat, tpt) =>
+ seltpe <:< tpt.tpe
+ case Ident(nme.WILDCARD) =>
+ true
+ case Bind(_, pat) =>
+ isIrrefutable(pat, seltpe)
+ case _ =>
+ false
+ }
+ //Console.println("is irefutable? " + pat + ":" + pat.tpe + " against " + seltpe + ": " + result);//DEBUG
+ result
+ }
+ /** If symbol is deprecated and is not contained in a deprecated definition,
+ * issue a deprecated warning
+ */
+ private def checkDeprecated(sym: Symbol, pos: Position) {
+ if (sym.isDeprecated && !currentOwner.ownerChain.exists(_.isDeprecated)) {
+ val dmsg = sym.deprecationMessage
+ val msg = sym.toString + sym.locationString +" is deprecated"+
+ (if (dmsg.isDefined) ": "+ dmsg.get
+ else "")
+ unit.deprecationWarning(pos, msg)
+ }
+ }
+ /** Check that a deprecated val or def does not override a
+ * concrete, non-deprecated method. If it does, then
+ * deprecation is meaningless.
+ */
+ private def checkDeprecatedOvers(tree: Tree) {
+ val symbol = tree.symbol
+ if (symbol.isDeprecated) {
+ val concrOvers =
+ symbol.allOverriddenSymbols.filter(sym =>
+ !sym.isDeprecated && !sym.isDeferred)
+ if(!concrOvers.isEmpty)
+ unit.deprecationWarning(
+ tree.pos,
+ symbol.toString + " overrides concrete, non-deprecated symbol(s):" +
+ concrOvers.map(_.name.decode).mkString(" ", ", ", ""))
+ }
+ }
+ private def isRepeatedParamArg(tree: Tree) = currentApplication match {
+ case Apply(fn, args) =>
+ !args.isEmpty && (args.last eq tree) &&
+ fn.tpe.paramTypes.length == args.length && isRepeatedParamType(fn.tpe.paramTypes.last)
+ case _ =>
+ false
+ }
+ private def checkTypeRef(tp: Type, pos: Position) = tp match {
+ case TypeRef(pre, sym, args) =>
+ checkDeprecated(sym, pos)
+ if (!tp.isHigherKinded)
+ checkBounds(pre, sym.owner, sym.typeParams, args, pos)
+ case _ =>
+ }
- def isIrrefutable(pat: Tree, seltpe: Type): Boolean = {
- val result = pat match {
- case Apply(_, args) =>
- val clazz = pat.tpe.typeSymbol;
- clazz == seltpe.typeSymbol &&
- clazz.isClass && (clazz hasFlag CASE) &&
- List.forall2(
- args,
- clazz.primaryConstructor.tpe.asSeenFrom(seltpe, clazz).paramTypes)(isIrrefutable)
- case Typed(pat, tpt) =>
- seltpe <:< tpt.tpe
- case Ident(nme.WILDCARD) =>
- true
- case Bind(_, pat) =>
- isIrrefutable(pat, seltpe)
- case _ =>
- false
- }
- //Console.println("is irefutable? " + pat + ":" + pat.tpe + " against " + seltpe + ": " + result);//DEBUG
- result
+ private def checkAnnotations(tpes: List[Type], pos: Position) = tpes foreach (tp => checkTypeRef(tp, pos))
+ private def doTypeTraversal(tree: Tree)(f: Type => Unit) = if (!inPattern) tree.tpe foreach f
+
+ private def applyRefchecksToAnnotations(tree: Tree) = tree match {
+ case m: MemberDef =>
+ checkAnnotations(m.symbol.annotations map (_.atp), tree.pos)
+ transformTrees(m.symbol.annotations.flatMap(_.args))
+ case TypeTree() => doTypeTraversal(tree) {
+ case AnnotatedType(annots, _, _) =>
+ checkAnnotations(annots map (_.atp), tree.pos)
+ transformTrees(annots.flatMap(_.args))
+ case _ =>
}
+ case _ =>
+ }
- /** If symbol is deprecated and is not contained in a deprecated definition,
- * issue a deprecated warning
- */
- def checkDeprecated(sym: Symbol, pos: Position) {
- if (sym.isDeprecated && !currentOwner.ownerChain.exists(_.isDeprecated)) {
- val dmsg = sym.deprecationMessage
- val msg = sym.toString + sym.locationString +" is deprecated"+
- (if (dmsg.isDefined) ": "+ dmsg.get
- else "")
- unit.deprecationWarning(pos, msg)
- }
+ private def transformCaseApply(tree: Tree, ifNot: => Unit) = {
+ val sym = tree.symbol
+
+ if (sym.isSourceMethod && sym.hasFlag(CASE) && sym.name == nme.apply)
+ toConstructor(tree.pos, tree.tpe)
+ else {
+ ifNot
+ tree
}
+ }
- /** Check that a deprecated val or def does not override a
- * concrete, non-deprecated method. If it does, then
- * deprecation is meaningless.
- */
- def checkDeprecatedOvers() {
- val symbol = tree.symbol
- if (symbol.isDeprecated) {
- val concrOvers =
- symbol.allOverriddenSymbols.filter(sym =>
- !sym.isDeprecated && !sym.isDeferred)
- if(!concrOvers.isEmpty)
- unit.deprecationWarning(
- tree.pos,
- symbol.toString + " overrides concrete, non-deprecated symbol(s):" +
- concrOvers.map(_.name.decode).mkString(" ", ", ", ""))
+ private def transformApply(tree: Apply): Tree = tree match {
+ case Apply(
+ Select(qual, nme.filter),
+ List(Function(
+ List(ValDef(_, pname, tpt, _)),
+ Match(_, CaseDef(pat1, _, _) :: _))))
+ if ((pname startsWith nme.CHECK_IF_REFUTABLE_STRING) &&
+ isIrrefutable(pat1, tpt.tpe) && (qual.tpe <:< tree.tpe)) =>
+
+ qual
+
+ case Apply(Select(New(tpt), name), args)
+ if (tpt.tpe.typeSymbol == ArrayClass && args.length >= 2) =>
+ unit.deprecationWarning(tree.pos,
+ "new Array(...) with multiple dimensions has been deprecated; use Array.ofDim(...) instead")
+ val manif = {
+ var etpe = tpt.tpe
+ for (_ <- args) { etpe = etpe.typeArgs.headOption.getOrElse(NoType) }
+ if (etpe == NoType) {
+ unit.error(tree.pos, "too many dimensions for array creation")
+ Literal(Constant(null))
+ } else {
+ localTyper.getManifestTree(tree.pos, etpe, false)
+ }
}
- }
+ val newResult = localTyper.typedPos(tree.pos) {
+ Apply(Apply(Select(gen.mkAttributedRef(ArrayModule), nme.ofDim), args), List(manif))
+ }
+ currentApplication = tree
+ newResult
- def isRepeatedParamArg(tree: Tree) = currentApplication match {
- case Apply(fn, args) =>
- !args.isEmpty && (args.last eq tree) &&
- fn.tpe.paramTypes.length == args.length && isRepeatedParamType(fn.tpe.paramTypes.last)
- case _ =>
- false
+ case Apply(fn, args) =>
+ checkSensible(tree.pos, fn, args)
+ currentApplication = tree
+ tree
+ }
+ private def transformSelect(tree: Select): Tree = {
+ val Select(qual, name) = tree
+ val sym = tree.symbol
+ checkDeprecated(sym, tree.pos)
+
+ if (currentClass != sym.owner && (sym hasFlag LOCAL)) {
+ var o = currentClass
+ var hidden = false
+ while (!hidden && o != sym.owner && o != sym.owner.moduleClass && !o.isPackage) {
+ hidden = o.isTerm || o.isPrivateLocal
+ o = o.owner
+ }
+ if (!hidden) escapedPrivateLocals += sym
}
- def isCaseApply(sym : Symbol) = sym.isSourceMethod && sym.hasFlag(CASE) && sym.name == nme.apply
+ def checkSuper(mix: Name) =
+ // term should have been eliminated by super accessors
+ assert(!(qual.symbol.isTrait && sym.isTerm && mix == nme.EMPTY.toTypeName))
- def checkTypeRef(tp: Type, pos: Position) = tp match {
- case TypeRef(pre, sym, args) =>
- checkDeprecated(sym, pos)
- if (!tp.isHigherKinded)
- checkBoundsWithPos(pre, sym.owner, sym.typeParams, args, pos)
- case _ =>
- }
- def checkAnnotations(tpes: List[(Type, Position)]) {
- for ((tp, pos) <- tpes) checkTypeRef(tp, pos)
+ transformCaseApply(tree,
+ qual match {
+ case Super(_, mix) => checkSuper(mix)
+ case _ =>
+ }
+ )
+ }
+ private def transformIf(tree: If): Tree = {
+ val If(cond, thenpart, elsepart) = tree
+ def unitIfEmpty(t: Tree): Tree =
+ if (t == EmptyTree) Literal(()).setPos(tree.pos).setType(UnitClass.tpe) else t
+
+ cond.tpe match {
+ case ConstantType(value) =>
+ val res = if (value.booleanValue) thenpart else elsepart
+ unitIfEmpty(res)
+ case _ => tree
}
+ }
+ override def transform(tree: Tree): Tree = try {
val savedLocalTyper = localTyper
val savedCurrentApplication = currentApplication
val sym = tree.symbol
- var result = tree
-
- def doTypeTraversal(f: (Type) => Unit) =
- if (!inPattern) {
- for (tp <- tree.tpe) f(tp)
- }
// Apply RefChecks to annotations. Makes sure the annotations conform to
// type bounds (bug #935), issues deprecation warnings for symbols used
// inside annotations.
- tree match {
- case m: MemberDef =>
- checkAnnotations(m.symbol.annotations.map(a => (a.atp, tree.pos)))
- transformTrees(m.symbol.annotations.flatMap(_.args))
- case TypeTree() => doTypeTraversal {
- case AnnotatedType(annots, _, _) =>
- checkAnnotations(annots.map(a => (a.atp, tree.pos)))
- transformTrees(annots.flatMap(_.args))
- case _ =>
- }
- case _ =>
- }
+ applyRefchecksToAnnotations(tree)
- tree match {
- case DefDef(mods, name, tparams, vparams, tpt, EmptyTree) if tree.symbol.hasAnnotation(definitions.NativeAttr) =>
+ var result: Tree = tree match {
+ case DefDef(mods, name, tparams, vparams, tpt, EmptyTree) if tree.symbol.hasAnnotation(NativeAttr) =>
tree.symbol.resetFlag(DEFERRED)
- result = transform(treeCopy.DefDef(tree, mods, name, tparams, vparams, tpt,
- typed(Apply(gen.mkAttributedRef(definitions.Predef_error), List(Literal("native method stub"))))))
-
- case DefDef(_, _, _, _, _, _) =>
- checkDeprecatedOvers()
+ transform(treeCopy.DefDef(tree, mods, name, tparams, vparams, tpt,
+ typed(Apply(gen.mkAttributedRef(Predef_error), List(Literal("native method stub"))))))
- case ValDef(_, _, _, _) =>
- checkDeprecatedOvers()
+ case ValDef(_, _, _, _) | DefDef(_, _, _, _, _, _) =>
+ checkDeprecatedOvers(tree)
+ tree
case Template(parents, self, body) =>
localTyper = localTyper.atOwner(tree, currentOwner)
@@ -1006,12 +1088,13 @@ abstract class RefChecks extends InfoTransform {
checkDefaultsInOverloaded(currentOwner)
val bridges = addVarargBridges(currentOwner)
checkAllOverrides(currentOwner)
- if (bridges.nonEmpty)
- result = treeCopy.Template(tree, parents, self, body ::: bridges)
+
+ if (bridges.nonEmpty) treeCopy.Template(tree, parents, self, body ::: bridges)
+ else tree
case TypeTree() =>
val existentialParams = new ListBuffer[Symbol]
- doTypeTraversal { // check all bounds, except those that are
+ doTypeTraversal(tree) { // check all bounds, except those that are
// existential type parameters
case ExistentialType(tparams, tpe) =>
existentialParams ++= tparams
@@ -1021,88 +1104,39 @@ abstract class RefChecks extends InfoTransform {
checkTypeRef(t.subst(exparams, wildcards), tree.pos)
case _ =>
}
+ tree
case TypeApply(fn, args) =>
- checkBounds(NoPrefix, NoSymbol, fn.tpe.typeParams, args map (_.tpe))
- if (isCaseApply(sym)) result = toConstructor(tree.pos, tree.tpe)
-
- case Apply(
- Select(qual, nme.filter),
- List(Function(
- List(ValDef(_, pname, tpt, _)),
- Match(_, CaseDef(pat1, _, _) :: _))))
- if ((pname startsWith nme.CHECK_IF_REFUTABLE_STRING) &&
- isIrrefutable(pat1, tpt.tpe) && (qual.tpe <:< tree.tpe)) =>
- result = qual
-
- case Apply(Select(New(tpt), name), args)
- if (tpt.tpe.typeSymbol == ArrayClass && args.length >= 2) =>
- unit.deprecationWarning(tree.pos,
- "new Array(...) with multiple dimensions has been deprecated; use Array.ofDim(...) instead")
- val manif = {
- var etpe = tpt.tpe
- for (_ <- args) { etpe = etpe.typeArgs.headOption.getOrElse(NoType) }
- if (etpe == NoType) {
- unit.error(tree.pos, "too many dimensions for array creation")
- Literal(Constant(null))
- } else {
- localTyper.getManifestTree(tree.pos, etpe, false)
- }
- }
- result = localTyper.typedPos(tree.pos) {
- Apply(Apply(Select(gen.mkAttributedRef(ArrayModule), nme.ofDim), args), List(manif))
- }
- currentApplication = tree
+ checkBounds(NoPrefix, NoSymbol, fn.tpe.typeParams, args map (_.tpe), tree.pos)
+ transformCaseApply(tree, ())
- case Apply(fn, args) =>
- checkSensible(tree.pos, fn, args)
- currentApplication = tree
+ case x @ Apply(_, _) =>
+ transformApply(x)
- case If(cond, thenpart, elsepart) =>
- cond.tpe match {
- case ConstantType(value) =>
- result = if (value.booleanValue) thenpart else elsepart;
- if (result == EmptyTree) result = Literal(()).setPos(tree.pos).setType(UnitClass.tpe)
- case _ =>
- }
+ case x @ If(_, _, _) =>
+ transformIf(x)
case New(tpt) =>
enterReference(tree.pos, tpt.tpe.typeSymbol)
+ tree
- case Typed(expr, tpt @ Ident(name)) if (name == nme.WILDCARD_STAR.toTypeName) =>
- if (!isRepeatedParamArg(tree))
- unit.error(tree.pos, "no `: _*' annotation allowed here\n"+
- "(such annotations are only allowed in arguments to *-parameters)")
+ case Typed(expr, tpt @ Ident(name)) if name == nme.WILDCARD_STAR.toTypeName && !isRepeatedParamArg(tree) =>
+ unit.error(tree.pos, "no `: _*' annotation allowed here\n"+
+ "(such annotations are only allowed in arguments to *-parameters)")
+ tree
case Ident(name) =>
- if (isCaseApply(sym))
- result = toConstructor(tree.pos, tree.tpe)
- else if (name != nme.WILDCARD && name != nme.WILDCARD_STAR.toTypeName) {
- assert(sym != NoSymbol, tree)//debug
- enterReference(tree.pos, sym)
- }
-
- case Select(qual, name) =>
- checkDeprecated(sym, tree.pos)
- if (currentClass != sym.owner && (sym hasFlag LOCAL)) {
- var o = currentClass
- var hidden = false
- while (!hidden && o != sym.owner && o != sym.owner.moduleClass && !o.isPackage) {
- hidden = o.isTerm || o.isPrivateLocal
- o = o.owner
+ transformCaseApply(tree,
+ if (name != nme.WILDCARD && name != nme.WILDCARD_STAR.toTypeName) {
+ assert(sym != NoSymbol, tree) //debug
+ enterReference(tree.pos, sym)
}
- if (!hidden) escapedPrivateLocals += sym
- }
- if (isCaseApply(sym))
- result = toConstructor(tree.pos, tree.tpe)
- else qual match {
- case Super(qualifier, mix) =>
- val base = qual.symbol;
- //Console.println("super: " + tree + " in " + base);//DEBUG
- assert(!(base.isTrait && sym.isTerm && mix == nme.EMPTY.toTypeName)) // term should have been eliminated by super accessors
- case _ =>
- }
- case _ =>
+ )
+
+ case x @ Select(_, _) =>
+ transformSelect(x)
+
+ case _ => tree
}
result = result match {
case CaseDef(pat, guard, body) =>
diff --git a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala
index 5001f8b9bf..fac10bfd72 100644
--- a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala
@@ -27,7 +27,6 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
// inherits abstract value `global' and class `Phase' from Transform
import global._
- import typer.typed
/** the following two members override abstract members in Transform */
val phaseName: String = "superaccessors"
@@ -38,7 +37,6 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
class SuperAccTransformer(unit: CompilationUnit) extends TypingTransformer(unit) {
private var validCurrentOwner = true
private var accDefs: List[(Symbol, ListBuffer[Tree])] = List()
- private val typer = analyzer.newTyper(analyzer.rootContext(unit))
private def accDefBuf(clazz: Symbol) = accDefs find (_._1 == clazz) match {
case Some((_, buf)) => buf
@@ -54,17 +52,17 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
}
*/
private def transformArgs(args: List[Tree], formals: List[Type]) =
- List.map2(args, formals){ (arg, formal) =>
+ ((args, formals).zipped map { (arg, formal) =>
if (formal.typeSymbol == definitions.ByNameParamClass)
withInvalidOwner { checkPackedConforms(transform(arg), formal.typeArgs.head) }
else transform(arg)
- } :::
+ }) :::
(args drop formals.length map transform)
private def checkPackedConforms(tree: Tree, pt: Type): Tree = {
if (tree.tpe exists (_.typeSymbol.isExistentialSkolem)) {
- val packed = typer.packedType(tree, NoSymbol)
- if (!(packed <:< pt)) typer.infer.typeError(tree.pos, packed, pt)
+ val packed = localTyper.packedType(tree, NoSymbol)
+ if (!(packed <:< pt)) localTyper.infer.typeError(tree.pos, packed, pt)
}
tree
}
@@ -115,7 +113,7 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
superAcc.setInfo(superAccTpe.cloneInfo(superAcc))
//println("creating super acc "+superAcc+":"+superAcc.tpe)//DEBUG
clazz.info.decls enter superAcc;
- accDefBuf(clazz) += typed(DefDef(superAcc, EmptyTree))
+ accDefBuf(clazz) += typers(clazz).typed(DefDef(superAcc, EmptyTree))
}
atPos(sup.pos) {
Select(gen.mkAttributedThis(clazz), superAcc) setType tree.tpe;
@@ -128,112 +126,107 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
tree
}
- override def transform(tree: Tree): Tree = try { tree match {
- case ClassDef(_, _, _, _) =>
- checkCompanionNameClashes(tree.symbol)
- val decls = tree.symbol.info.decls
- for (sym <- decls.toList) {
- if (sym.privateWithin.isClass && !sym.privateWithin.isModuleClass &&
- !sym.hasFlag(EXPANDEDNAME) && !sym.isConstructor) {
- decls.unlink(sym)
- sym.expandName(sym.privateWithin)
- decls.enter(sym)
- }
- }
- super.transform(tree)
- case ModuleDef(_, _, _) =>
- checkCompanionNameClashes(tree.symbol)
- super.transform(tree)
- case Template(parents, self, body) =>
- val ownAccDefs = new ListBuffer[Tree];
- accDefs = (currentOwner, ownAccDefs) :: accDefs;
-
- // ugly hack... normally, the following line should not be
- // necessary, the 'super' method taking care of that. but because
- // that one is iterating through parents (and we dont want that here)
- // we need to inline it.
- curTree = tree
- val body1 = atOwner(currentOwner) { transformTrees(body) }
- accDefs = accDefs.tail;
- treeCopy.Template(tree, parents, self, ownAccDefs.toList ::: body1);
-
- case TypeApply(sel @ Select(This(_), name), args) =>
- val sym = tree.symbol
- if (needsProtectedAccessor(sym, tree.pos)) {
- if (settings.debug.value) log("Adding protected accessor for " + tree);
- transform(makeAccessor(sel.asInstanceOf[Select], args))
- } else
- tree
+ override def transform(tree: Tree): Tree = {
+ val sym = tree.symbol
- case Select(qual @ This(_), name) =>
- val sym = tree.symbol
- if ((sym hasFlag PARAMACCESSOR) && (sym.alias != NoSymbol)) {
- val result = typed {
- Select(
- Super(qual.symbol, nme.EMPTY.toTypeName/*qual.symbol.info.parents.head.symbol.name*/) setPos qual.pos,
- sym.alias) setPos tree.pos
- }
+ def mayNeedProtectedAccessor(sel: Select, args: List[Tree], goToSuper: Boolean) =
+ if (needsProtectedAccessor(sym, tree.pos)) {
if (settings.debug.value)
- Console.println("alias replacement: " + tree + " ==> " + result);//debug
- transformSuperSelect(result)
- } else {
- if (needsProtectedAccessor(sym, tree.pos)) {
- if (settings.debug.value) log("Adding protected accessor for " + tree);
- transform(makeAccessor(tree.asInstanceOf[Select], List(EmptyTree)))
- } else
- tree
- }
- case Select(sup @ Super(_, mix), name) =>
- val sym = tree.symbol
- if (sym.isValue && !sym.isMethod || sym.hasFlag(ACCESSOR)) {
- unit.error(tree.pos, "super may be not be used on "+
- (if (sym.hasFlag(ACCESSOR)) sym.accessed else sym))
- }
- transformSuperSelect(tree)
+ log("Adding protected accessor for " + tree)
- case TypeApply(sel @ Select(qual, name), args) =>
- val sym = tree.symbol
- if (needsProtectedAccessor(sym, tree.pos)) {
- if (settings.debug.value) log("Adding protected accessor for tree: " + tree);
- transform(makeAccessor(sel.asInstanceOf[Select], args))
- } else
+ transform(makeAccessor(sel, args))
+ }
+ else if (goToSuper) super.transform(tree)
+ else tree
+
+ try tree match {
+ case ClassDef(_, _, _, _) =>
+ checkCompanionNameClashes(sym)
+ val decls = sym.info.decls
+ for (s <- decls.toList) {
+ if (s.privateWithin.isClass && !s.privateWithin.isModuleClass &&
+ !s.hasFlag(EXPANDEDNAME) && !s.isConstructor) {
+ decls.unlink(s)
+ s.expandName(s.privateWithin)
+ decls.enter(s)
+ }
+ }
super.transform(tree)
-
- case Select(qual, name) =>
- val sym = tree.symbol
- if (needsProtectedAccessor(sym, tree.pos)) {
- if (settings.debug.value) log("Adding protected accessor for tree: " + tree);
- transform(makeAccessor(tree.asInstanceOf[Select], List(EmptyTree)))
- } else
+ case ModuleDef(_, _, _) =>
+ checkCompanionNameClashes(sym)
super.transform(tree)
+ case Template(parents, self, body) =>
+ val ownAccDefs = new ListBuffer[Tree];
+ accDefs = (currentOwner, ownAccDefs) :: accDefs;
+
+ // ugly hack... normally, the following line should not be
+ // necessary, the 'super' method taking care of that. but because
+ // that one is iterating through parents (and we dont want that here)
+ // we need to inline it.
+ curTree = tree
+ val body1 = atOwner(currentOwner) { transformTrees(body) }
+ accDefs = accDefs.tail;
+ treeCopy.Template(tree, parents, self, ownAccDefs.toList ::: body1);
+
+ case TypeApply(sel @ Select(This(_), name), args) =>
+ mayNeedProtectedAccessor(sel, args, false)
+
+ case sel @ Select(qual @ This(_), name) =>
+ if ((sym hasFlag PARAMACCESSOR) && (sym.alias != NoSymbol)) {
+ val result = localTyper.typed {
+ Select(
+ Super(qual.symbol, nme.EMPTY.toTypeName/*qual.symbol.info.parents.head.symbol.name*/) setPos qual.pos,
+ sym.alias) setPos tree.pos
+ }
+ if (settings.debug.value)
+ Console.println("alias replacement: " + tree + " ==> " + result);//debug
+ transformSuperSelect(result)
+ }
+ else mayNeedProtectedAccessor(sel, List(EmptyTree), false)
- case Assign(lhs @ Select(qual, name), rhs) =>
- if (lhs.symbol.isVariable &&
- lhs.symbol.hasFlag(JAVA) &&
- needsProtectedAccessor(lhs.symbol, tree.pos)) {
- if (settings.debug.value) log("Adding protected setter for " + tree)
- val setter = makeSetter(lhs);
- if (settings.debug.value)
- log("Replaced " + tree + " with " + setter);
- transform(typed(Apply(setter, List(qual, rhs))))
- } else
+ case Select(sup @ Super(_, mix), name) =>
+ if (sym.isValue && !sym.isMethod || sym.hasFlag(ACCESSOR)) {
+ unit.error(tree.pos, "super may be not be used on "+
+ (if (sym.hasFlag(ACCESSOR)) sym.accessed else sym))
+ }
+ transformSuperSelect(tree)
+
+ case TypeApply(sel @ Select(qual, name), args) =>
+ mayNeedProtectedAccessor(sel, args, true)
+
+ case sel @ Select(qual, name) =>
+ mayNeedProtectedAccessor(sel, List(EmptyTree), true)
+
+ case Assign(lhs @ Select(qual, name), rhs) =>
+ if (lhs.symbol.isVariable &&
+ lhs.symbol.hasFlag(JAVA) &&
+ needsProtectedAccessor(lhs.symbol, tree.pos)) {
+ if (settings.debug.value) log("Adding protected setter for " + tree)
+ val setter = makeSetter(lhs);
+ if (settings.debug.value)
+ log("Replaced " + tree + " with " + setter);
+ transform(localTyper.typed(Apply(setter, List(qual, rhs))))
+ } else
+ super.transform(tree)
+
+ case Apply(fn, args) =>
+ assert(fn.tpe != null, tree)
+ treeCopy.Apply(tree, transform(fn), transformArgs(args, fn.tpe.paramTypes))
+ case Function(vparams, body) =>
+ withInvalidOwner {
+ treeCopy.Function(tree, vparams, transform(body))
+ }
+ case _ =>
super.transform(tree)
+ }
+ catch {
+ case ex : AssertionError =>
+ if (sym != null && sym != NoSymbol)
+ Console.println("TRANSFORM: " + tree.symbol.sourceFile)
- case Apply(fn, args) =>
- assert(fn.tpe != null, tree)
- treeCopy.Apply(tree, transform(fn), transformArgs(args, fn.tpe.paramTypes))
- case Function(vparams, body) =>
- withInvalidOwner {
- treeCopy.Function(tree, vparams, transform(body))
- }
- case _ =>
- super.transform(tree)
- }} catch {
- case ex : AssertionError =>
- if (tree.symbol != null && tree.symbol != NoSymbol)
- Console.println("TRANSFORM: " + tree.symbol.sourceFile)
- Console.println("TREE: " + tree)
- throw ex
+ Console.println("TREE: " + tree)
+ throw ex
+ }
}
override def atOwner[A](owner: Symbol)(trans: => A): A = {
@@ -295,7 +288,7 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
val obj = protAcc.paramss.head.head // receiver
protAcc.paramss.tail.zip(allParamTypes(sym.tpe)).foldLeft(Select(Ident(obj), sym): Tree) (
(fun, pvparams) => {
- Apply(fun, (List.map2(pvparams._1, pvparams._2) { (v, origTpe) => makeArg(v, obj, origTpe) } ))
+ Apply(fun, (pvparams._1, pvparams._2).zipped map (makeArg(_, obj, _)))
})
})
@@ -311,7 +304,7 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT
}
if (settings.debug.value)
log("Replaced " + tree + " with " + res)
- if (hasArgs) typer.typedOperator(res) else typer.typed(res)
+ if (hasArgs) localTyper.typedOperator(res) else localTyper.typed(res)
}
/** Adapt the given argument in call to protected member.
diff --git a/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala b/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala
index a3f628ebb4..d6e6afdac0 100644
--- a/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala
@@ -182,7 +182,7 @@ trait SyntheticMethods extends ast.TreeDSL {
}
// Creates list of parameters and a guard for each
- val (guards, params) = List.map2(clazz.caseFieldAccessors, constrParamTypes)(makeTrees) unzip
+ val (guards, params) = (clazz.caseFieldAccessors, constrParamTypes).zipped map makeTrees unzip
// Verify with canEqual method before returning true.
def canEqualCheck() = {
diff --git a/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala b/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala
index 3e8e803c13..9eb89e26bb 100644
--- a/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala
@@ -42,7 +42,6 @@ abstract class TreeCheckers extends Analyzer {
override def newTyper(context: Context): Typer = new TreeChecker(context)
class TreeChecker(context0: Context) extends Typer(context0) {
-
import infer._
override def typed(tree: Tree, mode: Int, pt: Type): Tree = {
@@ -50,104 +49,80 @@ abstract class TreeCheckers extends Analyzer {
case EmptyTree | TypeTree() =>
;
case _ =>
- try {
- if (!tpeOfTree.contains(tree)) {
- tpeOfTree.update(tree, tree.tpe)
- tree.tpe = null
- }
- val newtree = super.typed(tree, mode, pt);
- if ((newtree ne tree) && !newtree.isInstanceOf[Literal])
- error(tree.pos, "trees differ\n old: " + tree + " [" + tree.getClass() +
- "]\n new: " + newtree + " [" + newtree.getClass() + "]")
- } catch {
- case ex: Throwable =>
- Console.println("exception while typing "+tree)
- throw ex
+ if (!tpeOfTree.contains(tree)) {
+ tpeOfTree.update(tree, tree.tpe)
+ tree.tpe = null
}
+ val newtree = super.typed(tree, mode, pt);
+ if ((newtree ne tree) && !newtree.isInstanceOf[Literal])
+ error(tree.pos, "trees differ\n old: " + tree + " [" + tree.getClass() +
+ "]\n new: " + newtree + " [" + newtree.getClass() + "]")
}
tree
}
object precheck extends Traverser {
override def traverse(tree: Tree) {
- try {
- tree match {
- case DefDef(_, _, _, _, _, _) =>
- if (tree.symbol.hasFlag(ACCESSOR) &&
- !tree.symbol.isDeferred &&
- !tree.symbol.tpe.resultType.isInstanceOf[ConstantType]) {
- assert(tree.symbol.accessed != NoSymbol, tree.symbol)
- assert(tree.symbol.accessed.getter(tree.symbol.owner) == tree.symbol ||
- tree.symbol.accessed.setter(tree.symbol.owner) == tree.symbol)
- }
- case ValDef(_, _, _, _) =>
- if (tree.symbol.hasGetter) {
- assert(tree.symbol.getter(tree.symbol.owner) != NoSymbol, tree.symbol)
- }
- case Apply(_, args) =>
- assert(args forall (EmptyTree !=))
- case Select(_, _) =>
- assert(tree.symbol != NoSymbol, tree)
- case This(_) =>
- if (!(tree.symbol.isStatic && (tree.symbol hasFlag MODULE))) {
- var o = currentOwner
- while (o != tree.symbol) {
- o = o.owner
- if (o == NoSymbol) {
- error(tree.pos, "tree symbol "+tree.symbol+" does not point to enclosing class; tree = "+tree)
- return
- }
- }
+ tree match {
+ case DefDef(_, _, _, _, _, _) =>
+ if (tree.symbol.hasFlag(ACCESSOR) &&
+ !tree.symbol.isDeferred &&
+ !tree.symbol.tpe.resultType.isInstanceOf[ConstantType]) {
+ assert(tree.symbol.accessed != NoSymbol, tree.symbol)
+ assert(tree.symbol.accessed.getter(tree.symbol.owner) == tree.symbol ||
+ tree.symbol.accessed.setter(tree.symbol.owner) == tree.symbol)
+ }
+ case ValDef(_, _, _, _) =>
+ if (tree.symbol.hasGetter) {
+ assert(tree.symbol.getter(tree.symbol.owner) != NoSymbol, tree.symbol)
+ }
+ case Apply(_, args) =>
+ assert(args forall (EmptyTree !=))
+ case Select(_, _) =>
+ assert(tree.symbol != NoSymbol, tree)
+ case This(_) =>
+ if (!(tree.symbol.isStatic && (tree.symbol hasFlag MODULE))) {
+ if (currentOwner.ownerChain takeWhile (_ != tree.symbol) exists (_ == NoSymbol)) {
+ error(tree.pos, "tree symbol "+tree.symbol+" does not point to enclosing class; tree = "+tree)
+ return
}
- case _ =>
- }
- if (tree.pos == NoPosition && tree != EmptyTree) {
- error(tree.pos, "tree without position: " + tree)
- } else if ((tree.tpe eq null) && phase.id >= currentRun.typerPhase.id) {
- error(tree.pos, "tree without type: " + tree)
- } else if (tree.isDef && tree.symbol.owner != currentOwner) {
- var owner = currentOwner
- while (owner.isTerm && !owner.isMethod && tree.symbol.owner != owner)
- owner = owner.owner;
- if (tree.symbol.owner != owner) {
- error(tree.pos, "" + tree.symbol + " has wrong owner: " + tree.symbol.owner +
- tree.symbol.owner.locationString + ", should be: " +
- currentOwner + currentOwner.locationString)
}
- } else {
- super.traverse(tree)
+ case _ =>
+ }
+ if (tree.pos == NoPosition && tree != EmptyTree) {
+ error(tree.pos, "tree without position: " + tree)
+ } else if ((tree.tpe eq null) && phase.id >= currentRun.typerPhase.id) {
+ error(tree.pos, "tree without type: " + tree)
+ } else if (tree.isDef && tree.symbol.owner != currentOwner) {
+ var owner = currentOwner
+ while (owner.isTerm && !owner.isMethod && tree.symbol.owner != owner)
+ owner = owner.owner;
+ if (tree.symbol.owner != owner) {
+ error(tree.pos, "" + tree.symbol + " has wrong owner: " + tree.symbol.owner +
+ tree.symbol.owner.locationString + ", should be: " +
+ currentOwner + currentOwner.locationString)
}
- } catch {
- case ex: Throwable =>
- if (settings.debug.value)
- Console.println("exception when traversing " + tree);
- throw(ex)
+ } else {
+ super.traverse(tree)
}
}
}
object postcheck extends Traverser {
override def traverse(tree: Tree) {
- try {
- tree match {
- case EmptyTree | TypeTree() =>
- ;
- case _ =>
- tpeOfTree.get(tree) match {
- case Some(oldtpe) =>
- if (!(oldtpe =:= tree.tpe))
- error(tree.pos, "types differ\n old: " + oldtpe +
- "\n new: " + tree.tpe + "\n tree: " + tree)
- tree.tpe = oldtpe
- super.traverse(tree)
- case None =>
- }
- }
- } catch {
- case ex: Throwable =>
- if (settings.debug.value)
- Console.println("exception when traversing " + tree);
- throw(ex)
+ tree match {
+ case EmptyTree | TypeTree() =>
+ ;
+ case _ =>
+ tpeOfTree.get(tree) match {
+ case Some(oldtpe) =>
+ if (!(oldtpe =:= tree.tpe))
+ error(tree.pos, "types differ\n old: " + oldtpe +
+ "\n new: " + tree.tpe + "\n tree: " + tree)
+ tree.tpe = oldtpe
+ super.traverse(tree)
+ case None =>
+ }
}
}
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
index 6e6fdd3c35..c3ca5d7e30 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
@@ -1141,7 +1141,7 @@ trait Typers { self: Analyzer =>
if (!supertparams.isEmpty) error(supertpt.pos, "missing type arguments")
}
- List.map2(cstats1, treeInfo.preSuperFields(templ.body)) {
+ (cstats1, treeInfo.preSuperFields(templ.body)).zipped map {
(ldef, gdef) => gdef.tpt.tpe = ldef.symbol.tpe
}
case _ =>
@@ -1314,16 +1314,34 @@ trait Typers { self: Analyzer =>
case ValDef(mods, name, tpt, rhs)
if (mods.flags & (PRIVATE | LOCAL)) != (PRIVATE | LOCAL).toLong && !stat.symbol.isModuleVar =>
+ /** The annotations amongst `annots` that should go on a member of class
+ * `memberClass` (field, getter, setter, beanGetter, beanSetter)
+ */
def memberAnnots(annots: List[AnnotationInfo], memberClass: Symbol) = {
+
+ def hasMatching(metaAnnots: List[AnnotationInfo], orElse: => Boolean) = {
+ // either one of the meta-annotations matches the `memberClass`
+ metaAnnots.exists(_.atp.typeSymbol == memberClass) ||
+ // else, if there is no `target` meta-annotation at all, use the default case
+ (metaAnnots.forall(ann => {
+ val annClass = ann.atp.typeSymbol
+ annClass != GetterClass && annClass != SetterClass &&
+ annClass != BeanGetterClass && annClass != BeanSetterClass
+ }) && orElse)
+ }
+
+ // there was no meta-annotation on `ann`. Look if the class annotations of
+ // `ann` has a `target` annotation, otherwise put `ann` only on fields.
+ def noMetaAnnot(ann: AnnotationInfo) = {
+ hasMatching(ann.atp.typeSymbol.annotations, memberClass == FieldClass)
+ }
+
annots.filter(ann => ann.atp match {
- case AnnotatedType(annots, _, _) =>
- annots.exists(_.atp.typeSymbol == memberClass) ||
- (memberClass == FieldClass && annots.forall(ann => {
- val annClass = ann.atp.typeSymbol
- annClass != GetterClass && annClass != SetterClass &&
- annClass != BeanGetterClass && annClass != BeanSetterClass
- }))
- case _ => memberClass == FieldClass
+ // the annotation type has meta-annotations, e.g. @(foo @getter)
+ case AnnotatedType(metaAnnots, _, _) =>
+ hasMatching(metaAnnots, noMetaAnnot(ann))
+ // there are no meta-annotations, e.g. @foo
+ case _ => noMetaAnnot(ann)
})
}
@@ -1555,7 +1573,7 @@ trait Typers { self: Analyzer =>
if (!superClazz.hasFlag(JAVA)) {
val superParamAccessors = superClazz.constrParamAccessors
if (superParamAccessors.length == superArgs.length) {
- List.map2(superParamAccessors, superArgs) { (superAcc, superArg) =>
+ (superParamAccessors, superArgs).zipped map { (superAcc, superArg) =>
superArg match {
case Ident(name) =>
if (vparamss.exists(_.exists(_.symbol == superArg.symbol))) {
@@ -1715,7 +1733,7 @@ trait Typers { self: Analyzer =>
ddef.tpt.setType(tpt1.tpe)
val typedMods = removeAnnotations(ddef.mods)
var rhs1 =
- if (ddef.name == nme.CONSTRUCTOR) {
+ if (ddef.name == nme.CONSTRUCTOR && !ddef.symbol.hasFlag(STATIC)) { // need this to make it possible to generate static ctors
if (!meth.isPrimaryConstructor &&
(!meth.owner.isClass ||
meth.owner.isModuleClass ||
@@ -1906,7 +1924,7 @@ trait Typers { self: Analyzer =>
if (fun.vparams.length != argpts.length)
errorTree(fun, "wrong number of parameters; expected = " + argpts.length)
else {
- val vparamSyms = List.map2(fun.vparams, argpts) { (vparam, argpt) =>
+ val vparamSyms = (fun.vparams, argpts).zipped map { (vparam, argpt) =>
if (vparam.tpt.isEmpty) {
vparam.tpt.tpe =
if (isFullyDefined(argpt)) argpt
@@ -2083,13 +2101,13 @@ trait Typers { self: Analyzer =>
val losym = tparam.info.bounds.lo.typeSymbol
losym != NothingClass && losym != NullClass
}
- List.exists2(formals, args) {
+ (formals, args).zipped exists {
case (formal, Function(vparams, _)) =>
(vparams exists (_.tpt.isEmpty)) &&
vparams.length <= MaxFunctionArity &&
(formal baseType FunctionClass(vparams.length) match {
case TypeRef(_, _, formalargs) =>
- List.exists2(formalargs, vparams) ((formalarg, vparam) =>
+ (formalargs, vparams).zipped.exists ((formalarg, vparam) =>
vparam.tpt.isEmpty && (tparams exists (formalarg contains))) &&
(tparams forall isLowerBounded)
case _ =>
@@ -2130,6 +2148,10 @@ trait Typers { self: Analyzer =>
val pre = fun.symbol.tpe.prefix
var sym = fun.symbol filter { alt =>
+ // must use pt as expected type, not WildcardType (a tempting quick fix to #2665)
+ // now fixed by using isWeaklyCompatible in exprTypeArgs
+ // TODO: understand why exactly -- some types were not inferred anymore (`ant clean quick.bin` failed)
+ // (I had expected inferMethodAlternative to pick up the slack introduced by using WildcardType here)
isApplicableSafe(context.undetparams, followApply(pre.memberType(alt)), argtypes, pt)
}
if (sym hasFlag OVERLOADED) {
@@ -2164,7 +2186,8 @@ trait Typers { self: Analyzer =>
arg1
}
context.undetparams = undetparams
- inferMethodAlternative(fun, undetparams, argtpes.toList, pt)
+ inferMethodAlternative(fun, undetparams, argtpes.toList, pt,
+ varArgsOnly = args.nonEmpty && treeInfo.isWildcardStarArg(args.last))
doTypedApply(tree, adapt(fun, funMode(mode), WildcardType), args1, mode, pt)
case mt @ MethodType(params, _) =>
@@ -2318,7 +2341,7 @@ trait Typers { self: Analyzer =>
} else {
assert((mode & PATTERNmode) == 0); // this case cannot arise for patterns
val lenientTargs = protoTypeArgs(tparams, formals, mt.resultApprox, pt)
- val strictTargs = List.map2(lenientTargs, tparams)((targ, tparam) =>
+ val strictTargs = (lenientTargs, tparams).zipped map ((targ, tparam) =>
if (targ == WildcardType) tparam.tpe else targ) //@M TODO: should probably be .tpeHK
def typedArgToPoly(arg: Tree, formal: Type): Tree = {
val lenientPt = formal.instantiateTypeParams(tparams, lenientTargs)
@@ -2330,7 +2353,7 @@ trait Typers { self: Analyzer =>
}
arg1
}
- val args1 = List.map2(args, formals)(typedArgToPoly)
+ val args1 = (args, formals).zipped map typedArgToPoly
if (args1 exists (_.tpe.isError)) setError(tree)
else {
if (settings.debug.value) log("infer method inst "+fun+", tparams = "+tparams+", args = "+args1.map(_.tpe)+", pt = "+pt+", lobounds = "+tparams.map(_.tpe.bounds.lo)+", parambounds = "+tparams.map(_.info));//debug
@@ -2843,7 +2866,7 @@ trait Typers { self: Analyzer =>
* class NPE[T <: NPE[T] @peer]
*
* (Note: -Yself-in-annots must be on to see the problem)
- **/
+ * */
val sym =
context.owner.newLocalDummy(ann.pos)
.newValue(ann.pos, nme.self)
@@ -3591,7 +3614,7 @@ trait Typers { self: Analyzer =>
// @M! added the latter condition
appliedType(tpt1.tpe, argtypes)
else tpt1.tpe.instantiateTypeParams(tparams, argtypes)
- List.map2(args, tparams) { (arg, tparam) => arg match {
+ (args, tparams).zipped map { (arg, tparam) => arg match {
// note: can't use args1 in selector, because Bind's got replaced
case Bind(_, _) =>
if (arg.symbol.isAbstractType)
@@ -3675,7 +3698,7 @@ trait Typers { self: Analyzer =>
case UnApply(fun, args) =>
val fun1 = typed(fun)
val tpes = formalTypes(unapplyTypeList(fun.symbol, fun1.tpe), args.length)
- val args1 = List.map2(args, tpes)(typedPattern(_, _))
+ val args1 = (args, tpes).zipped map (typedPattern(_, _))
treeCopy.UnApply(tree, fun1, args1) setType pt
case ArrayValue(elemtpt, elems) =>
diff --git a/src/compiler/scala/tools/nsc/util/ClassPath.scala b/src/compiler/scala/tools/nsc/util/ClassPath.scala
index aab74d8c02..cb48e432cd 100644
--- a/src/compiler/scala/tools/nsc/util/ClassPath.scala
+++ b/src/compiler/scala/tools/nsc/util/ClassPath.scala
@@ -308,7 +308,7 @@ abstract class MergedClassPath[T] extends ClassPath[T] {
def sourcepaths: List[AbstractFile] = entries.flatMap(_.sourcepaths)
private def addPackage(to: ClassPath[T], pkg: ClassPath[T]) = to match {
- case cp: MergedClassPath[T] =>
+ case cp: MergedClassPath[_] =>
newMergedClassPath(cp.entries ::: List(pkg))
case _ =>
newMergedClassPath(List(to, pkg))
diff --git a/src/compiler/scala/tools/nsc/util/HashSet.scala b/src/compiler/scala/tools/nsc/util/HashSet.scala
index ebc517266b..32aef80d25 100644
--- a/src/compiler/scala/tools/nsc/util/HashSet.scala
+++ b/src/compiler/scala/tools/nsc/util/HashSet.scala
@@ -24,7 +24,7 @@ class HashSet[T >: Null <: AnyRef](val label: String, initialCapacity: Int) exte
table = new Array[AnyRef](capacity)
}
- private def index(x: Int): Int = Math.abs(x % capacity)
+ private def index(x: Int): Int = math.abs(x % capacity)
def findEntry(x: T): T = {
var h = index(x.hashCode())
diff --git a/src/library/scala/util/ScalaClassLoader.scala b/src/compiler/scala/tools/nsc/util/ScalaClassLoader.scala
index 3393f0413f..fa1227a859 100644
--- a/src/library/scala/util/ScalaClassLoader.scala
+++ b/src/compiler/scala/tools/nsc/util/ScalaClassLoader.scala
@@ -3,7 +3,8 @@
* @author Paul Phillips
*/
-package scala.util
+package scala.tools.nsc
+package util
import java.lang.{ ClassLoader => JavaClassLoader }
import java.lang.reflect.{ Constructor, Modifier, Method }
@@ -55,15 +56,15 @@ trait ScalaClassLoader extends JavaClassLoader
}
}
-class URLClassLoader(urls: Seq[URL], parent: JavaClassLoader)
- extends java.net.URLClassLoader(urls.toArray, parent)
- with ScalaClassLoader
-{
- /** Override to widen to public */
- override def addURL(url: URL) = super.addURL(url)
-}
object ScalaClassLoader {
+ class URLClassLoader(urls: Seq[URL], parent: JavaClassLoader)
+ extends java.net.URLClassLoader(urls.toArray, parent)
+ with ScalaClassLoader {
+ /** Override to widen to public */
+ override def addURL(url: URL) = super.addURL(url)
+ }
+
def setContextLoader(cl: JavaClassLoader) = Thread.currentThread.setContextClassLoader(cl)
def getContextLoader() = Thread.currentThread.getContextClassLoader()
def getSystemLoader() = JavaClassLoader.getSystemClassLoader()
diff --git a/src/compiler/scala/tools/nsc/util/TreeSet.scala b/src/compiler/scala/tools/nsc/util/TreeSet.scala
index 1862212467..059ffd26e4 100644
--- a/src/compiler/scala/tools/nsc/util/TreeSet.scala
+++ b/src/compiler/scala/tools/nsc/util/TreeSet.scala
@@ -43,14 +43,10 @@ class TreeSet[T >: Null <: AnyRef](less: (T, T) => Boolean) extends Set[T] {
def iterator = {
def elems(t: Tree): Iterator[T] = {
- var it = Iterator single t.elem
- if (t.l ne null) it = elems(t.l) append it
- if (t.r ne null) it = it append elems(t.r)
- // if (t.l ne null) it = elems(t.l) ++ it
- // if (t.r ne null) it = it ++ elems(t.r)
- it
+ if (t eq null) Iterator.empty
+ else elems(t.l) ++ (Iterator single t.elem) ++ elems(t.r)
}
- if (tree eq null) Iterator.empty else elems(tree)
+ elems(tree)
}
override def toString(): String = {
diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JExtendedCode.java b/src/fjbg/ch/epfl/lamp/fjbg/JExtendedCode.java
index b8f29a6a2b..6ee18a59df 100644
--- a/src/fjbg/ch/epfl/lamp/fjbg/JExtendedCode.java
+++ b/src/fjbg/ch/epfl/lamp/fjbg/JExtendedCode.java
@@ -74,7 +74,7 @@ public class JExtendedCode extends JCode {
},
{
/* T_SHORT -> T_BOOLEAN */ forbidden,
- /* T_SHORT -> T_CHAR */ nothingToDo,
+ /* T_SHORT -> T_CHAR */ {JOpcode.I2C},
/* T_SHORT -> T_FLOAT */ {JOpcode.I2F},
/* T_SHORT -> T_DOUBLE */ {JOpcode.I2D},
/* T_SHORT -> T_BYTE */ {JOpcode.I2B},
diff --git a/src/library/scala/Array.scala b/src/library/scala/Array.scala
index a323dccffc..acd3d17059 100644
--- a/src/library/scala/Array.scala
+++ b/src/library/scala/Array.scala
@@ -42,10 +42,6 @@ class FallbackArrayBuilding {
* @version 1.0
*/
object Array extends FallbackArrayBuilding {
-
- import runtime.BoxedArray;
- import scala.runtime.ScalaRunTime.boxArray;
-
implicit def canBuildFrom[T](implicit m: ClassManifest[T]): CanBuildFrom[Array[_], T, Array[T]] =
new CanBuildFrom[Array[_], T, Array[T]] {
def apply(from: Array[_]) = ArrayBuilder.make[T]()(m)
diff --git a/src/library/scala/Enumeration.scala b/src/library/scala/Enumeration.scala
index 01f99d550a..86e0dbe708 100644
--- a/src/library/scala/Enumeration.scala
+++ b/src/library/scala/Enumeration.scala
@@ -176,7 +176,7 @@ abstract class Enumeration(initial: Int, names: String*) {
val value = m.invoke(this)
// invoke `id` method
val idMeth = classOf[Val].getMethod("id")
- val id: Int = idMeth.invoke(value).asInstanceOf[Integer].intValue()
+ val id: Int = idMeth.invoke(value).asInstanceOf[java.lang.Integer].intValue()
nmap += (id -> name)
}
nmap(i)
diff --git a/src/library/scala/Math.scala b/src/library/scala/Math.scala
index c3e619594b..5527642e91 100644
--- a/src/library/scala/Math.scala
+++ b/src/library/scala/Math.scala
@@ -8,66 +8,99 @@
// $Id$
-
package scala
/** The object <code>Math</code> contains methods for performing basic numeric
* operations such as the elementary exponential, logarithm, square root, and
* trigonometric functions.
*/
+@deprecated("use scala.math package instead")
object Math {
+ @deprecated("Use scala.Byte.MinValue instead")
+ val MIN_BYTE = java.lang.Byte.MIN_VALUE
+
+ @deprecated("Use scala.Byte.MaxValue instead")
+ val MAX_BYTE = java.lang.Byte.MAX_VALUE
+
+ @deprecated("Use scala.Short.MinValue instead")
+ val MIN_SHORT = java.lang.Short.MIN_VALUE
+
+ @deprecated("Use scala.Short.MaxValue instead")
+ val MAX_SHORT = java.lang.Short.MAX_VALUE
+
+ @deprecated("Use scala.Char.MinValue instead")
+ val MIN_CHAR = java.lang.Character.MIN_VALUE
- /** The smallest possible value for <a href="Byte.html" target="_self">scala.Byte</a>. */
- val MIN_BYTE = java.lang.Byte.MIN_VALUE
- /** The greatest possible value for <a href="Byte.html" target="_self">scala.Byte</a>. */
- val MAX_BYTE = java.lang.Byte.MAX_VALUE
+ @deprecated("Use scala.Char.MaxValue instead")
+ val MAX_CHAR = java.lang.Character.MAX_VALUE
- /** The smallest possible value for <a href="Short.html" target="_self">scala.Short</a>. */
- val MIN_SHORT = java.lang.Short.MIN_VALUE
- /** The greatest possible value for <a href="Short.html" target="_self">scala.Short</a>. */
- val MAX_SHORT = java.lang.Short.MAX_VALUE
+ @deprecated("Use scala.Int.MinValue instead")
+ val MIN_INT = java.lang.Integer.MIN_VALUE
- /** The smallest possible value for <a href="Char.html" target="_self">scala.Char</a>. */
- val MIN_CHAR = java.lang.Character.MIN_VALUE
- /** The greatest possible value for <a href="Char.html" target="_self">scala.Char</a>. */
- val MAX_CHAR = java.lang.Character.MAX_VALUE
+ @deprecated("Use scala.Int.MaxValue instead")
+ val MAX_INT = java.lang.Integer.MAX_VALUE
- /** The smallest possible value for <a href="Int.html" target="_self">scala.Int</a>. */
- val MIN_INT = java.lang.Integer.MIN_VALUE
- /** The greatest possible value for <a href="Int.html" target="_self">scala.Int</a>. */
- val MAX_INT = java.lang.Integer.MAX_VALUE
+ @deprecated("Use scala.Long.MinValue instead")
+ val MIN_LONG = java.lang.Long.MIN_VALUE
- /** The smallest possible value for <a href="Long.html" target="_self">scala.Long</a>. */
- val MIN_LONG = java.lang.Long.MIN_VALUE
- /** The greatest possible value for <a href="Long.html" target="_self">scala.Long</a>. */
- val MAX_LONG = java.lang.Long.MAX_VALUE
+ @deprecated("Use scala.Long.MaxValue instead")
+ val MAX_LONG = java.lang.Long.MAX_VALUE
/** The smallest possible value for <a href="Float.html" target="_self">scala.Float</a>. */
+ @deprecated("Use scala.Float.MinValue instead")
val MIN_FLOAT = -java.lang.Float.MAX_VALUE
+
/** The smallest difference between two values of <a href="Float.html" target="_self">scala.Float</a>. */
+ @deprecated("Use scala.Float.Epsilon instead")
val EPS_FLOAT = java.lang.Float.MIN_VALUE
+
/** The greatest possible value for <a href="Float.html" target="_self">scala.Float</a>. */
+ @deprecated("Use scala.Float.MaxValue instead")
val MAX_FLOAT = java.lang.Float.MAX_VALUE
+
/** A value of type <a href="Float.html" target="_self">scala.Float</a> that represents no number. */
+ @deprecated("Use scala.Float.NaN instead")
val NaN_FLOAT = java.lang.Float.NaN
+
/** Negative infinity of type <a href="Float.html" target="_self">scala.Float</a>. */
+ @deprecated("Use scala.Float.NegativeInfinity instead")
val NEG_INF_FLOAT = java.lang.Float.NEGATIVE_INFINITY
+
/** Positive infinity of type <a href="Float.html" target="_self">scala.Float</a>. */
+ @deprecated("Use scala.Float.PositiveInfinity instead")
val POS_INF_FLOAT = java.lang.Float.POSITIVE_INFINITY
/** The smallest possible value for <a href="Double.html" target="_self">scala.Double</a>. */
+ @deprecated("Use scala.Double.MinValue instead")
val MIN_DOUBLE = -java.lang.Double.MAX_VALUE
+
/** The smallest difference between two values of <a href="Double.html" target="_self">scala.Double</a>. */
+ @deprecated("Use scala.Double.Epsilon instead")
val EPS_DOUBLE = java.lang.Double.MIN_VALUE
+
/** The greatest possible value for <a href="Double.html" target="_self">scala.Double</a>. */
+ @deprecated("Use scala.Double.MaxValue instead")
val MAX_DOUBLE = java.lang.Double.MAX_VALUE
+
/** A value of type <a href="Double.html" target="_self">scala.Double</a> that represents no number. */
+ @deprecated("Use scala.Double.NaN instead")
val NaN_DOUBLE = java.lang.Double.NaN
+
/** Negative infinity of type <a href="Double.html" target="_self">scala.Double</a>. */
+ @deprecated("Use scala.Double.NegativeInfinity instead")
val NEG_INF_DOUBLE = java.lang.Double.NEGATIVE_INFINITY
+
/** Positive infinity of type <a href="Double.html" target="_self">scala.Double</a>. */
+ @deprecated("Use scala.Double.PositiveInfinity instead")
val POS_INF_DOUBLE = java.lang.Double.POSITIVE_INFINITY
+ /** The code from here down is cut/pasted from the math package object.
+ * It should properly be in a shared trait but as of this writing
+ * inherited members in package objects are not visible.
+ */
+
+ /*******************************************************************/
+
/** The <code>double</code> value that is closer than any other to
* <code>e</code>, the base of the natural logarithms.
*/
@@ -174,29 +207,25 @@ object Math {
def min(x: Float, y: Float): Float = java.lang.Math.min(x, y)
def min(x: Double, y: Double): Double = java.lang.Math.min(x, y)
- def signum(x: Double): Double = x match { case 0 => 0
- case y if y < 0 => -1.0
- case y if y > 0 => 1.0 }
- def signum(x: Float): Float = x match { case 0f => 0f
- case y if y < 0f => -1.0f
- case y if y > 0f => 1.0f }
- def signum(x: Long): Long = x match { case 0l => 0l
- case y if y < 0l => -1l
- case y if y > 0l => 1l }
- def signum(x: Int): Int = x match { case 0 => 0
- case y if y < 0 => -1
- case y if y > 0 => 1}
-
- // from Java 1.5
- // def log10(x: Double): Double = java.lang.Math.log10(x)
- // def cbrt(x: Double): Double = java.lang.Math.cbrt(x)
- //
- // def ulp(x: Double): Double = java.lang.Math.ulp(x)
- // def ulp(x: Float): Float = java.lang.Math.ulp(x)
- // def sinh(x: Double): Double = java.lang.Math.sinh(x)
- // def cosh(x: Double): Double = java.lang.Math.cosh(x)
- // def tanh(x: Double):Double = java.lang.Math.tanh(x)
- // def hypot(x: Double, y: Double): Double = java.lang.Math.hypot(x, y)
- // def expm1(x: Double): Double = java.lang.Math.expm1(x)
- // def log1p(x: Double): Double = java.lang.Math.log1p(x)
+ def signum(x: Double): Double =
+ if (x == 0d) 0d
+ else if (x < 0) -1.0
+ else if (x > 0) 1.0
+ else x // NaN
+
+ def signum(x: Float): Float =
+ if (x == 0f) 0f
+ else if (x < 0) -1.0f
+ else if (x > 0) 1.0f
+ else x // NaN
+
+ def signum(x: Long): Long =
+ if (x == 0l) 0l
+ else if (x < 0) -1l
+ else 1l
+
+ def signum(x: Int): Int =
+ if (x == 0) 0
+ else if (x < 0) -1
+ else 1
}
diff --git a/src/library/scala/Predef.scala b/src/library/scala/Predef.scala
index 892e90d581..d163bca25b 100644
--- a/src/library/scala/Predef.scala
+++ b/src/library/scala/Predef.scala
@@ -25,17 +25,6 @@ object Predef extends LowPriorityImplicits {
*/
def classOf[T]: Class[T] = null
- // aliases ------------------------------------------------------------
- @deprecated("lower-case type aliases will be removed") type byte = scala.Byte
- @deprecated("lower-case type aliases will be removed") type short = scala.Short
- @deprecated("lower-case type aliases will be removed") type char = scala.Char
- @deprecated("lower-case type aliases will be removed") type int = scala.Int
- @deprecated("lower-case type aliases will be removed") type long = scala.Long
- @deprecated("lower-case type aliases will be removed") type float = scala.Float
- @deprecated("lower-case type aliases will be removed") type double = scala.Double
- @deprecated("lower-case type aliases will be removed") type boolean = scala.Boolean
- @deprecated("lower-case type aliases will be removed") type unit = scala.Unit
-
type String = java.lang.String
type Class[T] = java.lang.Class[T]
@@ -156,16 +145,6 @@ object Predef extends LowPriorityImplicits {
}
implicit def any2ArrowAssoc[A](x: A): ArrowAssoc[A] = new ArrowAssoc(x)
- def Tuple[A1](x1: A1) = Tuple1(x1)
- def Tuple[A1, A2](x1: A1, x2: A2) = Tuple2(x1, x2)
- def Tuple[A1, A2, A3](x1: A1, x2: A2, x3: A3) = Tuple3(x1, x2, x3)
- def Tuple[A1, A2, A3, A4](x1: A1, x2: A2, x3: A3, x4: A4) = Tuple4(x1, x2, x3, x4)
- def Tuple[A1, A2, A3, A4, A5](x1: A1, x2: A2, x3: A3, x4: A4, x5: A5) = Tuple5(x1, x2, x3, x4, x5)
- def Tuple[A1, A2, A3, A4, A5, A6](x1: A1, x2: A2, x3: A3, x4: A4, x5: A5, x6: A6) = Tuple6(x1, x2, x3, x4, x5, x6)
- def Tuple[A1, A2, A3, A4, A5, A6, A7](x1: A1, x2: A2, x3: A3, x4: A4, x5: A5, x6: A6, x7: A7) = Tuple7(x1, x2, x3, x4, x5, x6, x7)
- def Tuple[A1, A2, A3, A4, A5, A6, A7, A8](x1: A1, x2: A2, x3: A3, x4: A4, x5: A5, x6: A6, x7: A7, x8: A8) = Tuple8(x1, x2, x3, x4, x5, x6, x7, x8)
- def Tuple[A1, A2, A3, A4, A5, A6, A7, A8, A9](x1: A1, x2: A2, x3: A3, x4: A4, x5: A5, x6: A6, x7: A7, x8: A8, x9: A9) = Tuple9(x1, x2, x3, x4, x5, x6, x7, x8, x9)
-
// printing and reading -----------------------------------------------
def print(x: Any) = Console.print(x)
diff --git a/src/library/scala/Responder.scala b/src/library/scala/Responder.scala
index f628b08b9e..32665a2bcf 100644
--- a/src/library/scala/Responder.scala
+++ b/src/library/scala/Responder.scala
@@ -67,6 +67,7 @@ object Responder {
* @version 1.0
* @since 2.1
*/
+@serializable
abstract class Responder[+A] {
def respond(k: A => Unit): Unit
@@ -90,5 +91,7 @@ abstract class Responder[+A] {
Responder.this.respond(x => if (p(x)) k(x) else ())
}
}
+
+ override def toString = "Responder"
}
diff --git a/src/library/scala/Tuple2.scala b/src/library/scala/Tuple2.scala
index 8a08552149..94ccc4c3c3 100644
--- a/src/library/scala/Tuple2.scala
+++ b/src/library/scala/Tuple2.scala
@@ -12,7 +12,7 @@
package scala
-import scala.collection.{TraversableLike, Traversable, IterableLike}
+import scala.collection.{ TraversableLike, IterableLike }
import scala.collection.generic.CanBuildFrom
diff --git a/src/library/scala/Tuple3.scala b/src/library/scala/Tuple3.scala
index 42418ecdd5..8c669a4c28 100644
--- a/src/library/scala/Tuple3.scala
+++ b/src/library/scala/Tuple3.scala
@@ -12,7 +12,7 @@
package scala
-import scala.collection.{TraversableLike, Traversable, IterableLike}
+import scala.collection.{ TraversableLike, IterableLike }
import scala.collection.generic.CanBuildFrom
diff --git a/src/library/scala/annotation/experimental.scala b/src/library/scala/annotation/experimental.scala
deleted file mode 100644
index 43ce631e2d..0000000000
--- a/src/library/scala/annotation/experimental.scala
+++ /dev/null
@@ -1,19 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-package scala.annotation
-
-/** <p>
- * An annotation for experimental features.
- * </p>
- *
- * @since 2.8
- */
-@experimental // and an experiment which may soon be ending
-final class experimental(message: String) extends StaticAnnotation {
- def this() = this("")
-}
diff --git a/src/library/scala/annotation/target/beanGetter.scala b/src/library/scala/annotation/target/beanGetter.scala
index cc39a32874..8518049f2c 100644
--- a/src/library/scala/annotation/target/beanGetter.scala
+++ b/src/library/scala/annotation/target/beanGetter.scala
@@ -14,8 +14,8 @@ package scala.annotation.target
* used to control to which of the above members the annotations on
* the field are copied. By default, field annotations are only added
* to the actual field, but not to any of the accessors. By annotating
- * the annotation type with one or several of the meta-annotations this
- * behavior can be changed.
+ * the annotation type or the annotation class with one or several of
+ * the meta-annotations this behavior can be changed.
*
* In the following example, the annotation {{{@Id}}} will be added
* only to the bean getter {{{getX}}}. In order to annotate the field
@@ -39,5 +39,13 @@ package scala.annotation.target
* @Id @BeanProperty val x = 0
* }
* }}}
+ *
+ * For annotations defined in Scala, a default target can be specified
+ * in the annotation class itself, for example
+ *
+ * {{{
+ * @getter
+ * class myAnnotation extends Annotation
+ * }}}
*/
final class beanGetter extends StaticAnnotation
diff --git a/src/library/scala/annotation/target/beanSetter.scala b/src/library/scala/annotation/target/beanSetter.scala
index 5f1513fd51..a5df4c933f 100644
--- a/src/library/scala/annotation/target/beanSetter.scala
+++ b/src/library/scala/annotation/target/beanSetter.scala
@@ -14,8 +14,8 @@ package scala.annotation.target
* used to control to which of the above members the annotations on
* the field are copied. By default, field annotations are only added
* to the actual field, but not to any of the accessors. By annotating
- * the annotation type with one or several of the meta-annotations this
- * behavior can be changed.
+ * the annotation type or the annotation class with one or several of
+ * the meta-annotations this behavior can be changed.
*
* In the following example, the annotation {{{@Id}}} will be added
* only to the bean getter {{{getX}}}. In order to annotate the field
@@ -39,5 +39,13 @@ package scala.annotation.target
* @Id @BeanProperty val x = 0
* }
* }}}
+ *
+ * For annotations defined in Scala, a default target can be specified
+ * in the annotation class itself, for example
+ *
+ * {{{
+ * @getter
+ * class myAnnotation extends Annotation
+ * }}}
*/
final class beanSetter extends StaticAnnotation
diff --git a/src/library/scala/annotation/target/field.scala b/src/library/scala/annotation/target/field.scala
index 19533c6908..c202d276cc 100644
--- a/src/library/scala/annotation/target/field.scala
+++ b/src/library/scala/annotation/target/field.scala
@@ -14,8 +14,8 @@ package scala.annotation.target
* used to control to which of the above members the annotations on
* the field are copied. By default, field annotations are only added
* to the actual field, but not to any of the accessors. By annotating
- * the annotation type with one or several of the meta-annotations this
- * behavior can be changed.
+ * the annotation type or the annotation class with one or several of
+ * the meta-annotations this behavior can be changed.
*
* In the following example, the annotation {{{@Id}}} will be added
* only to the bean getter {{{getX}}}. In order to annotate the field
@@ -39,5 +39,13 @@ package scala.annotation.target
* @Id @BeanProperty val x = 0
* }
* }}}
+ *
+ * For annotations defined in Scala, a default target can be specified
+ * in the annotation class itself, for example
+ *
+ * {{{
+ * @getter
+ * class myAnnotation extends Annotation
+ * }}}
*/
final class field extends StaticAnnotation
diff --git a/src/library/scala/annotation/target/getter.scala b/src/library/scala/annotation/target/getter.scala
index 45e2a8ac4b..02c372fe2e 100644
--- a/src/library/scala/annotation/target/getter.scala
+++ b/src/library/scala/annotation/target/getter.scala
@@ -14,8 +14,8 @@ package scala.annotation.target
* used to control to which of the above members the annotations on
* the field are copied. By default, field annotations are only added
* to the actual field, but not to any of the accessors. By annotating
- * the annotation type with one or several of the meta-annotations this
- * behavior can be changed.
+ * the annotation type or the annotation class with one or several of
+ * the meta-annotations this behavior can be changed.
*
* In the following example, the annotation {{{@Id}}} will be added
* only to the bean getter {{{getX}}}. In order to annotate the field
@@ -39,5 +39,13 @@ package scala.annotation.target
* @Id @BeanProperty val x = 0
* }
* }}}
+ *
+ * For annotations defined in Scala, a default target can be specified
+ * in the annotation class itself, for example
+ *
+ * {{{
+ * @getter
+ * class myAnnotation extends Annotation
+ * }}}
*/
final class getter extends StaticAnnotation
diff --git a/src/library/scala/annotation/target/setter.scala b/src/library/scala/annotation/target/setter.scala
index 6f270a6116..b4d09263b4 100644
--- a/src/library/scala/annotation/target/setter.scala
+++ b/src/library/scala/annotation/target/setter.scala
@@ -14,8 +14,8 @@ package scala.annotation.target
* used to control to which of the above members the annotations on
* the field are copied. By default, field annotations are only added
* to the actual field, but not to any of the accessors. By annotating
- * the annotation type with one or several of the meta-annotations this
- * behavior can be changed.
+ * the annotation type or the annotation class with one or several of
+ * the meta-annotations this behavior can be changed.
*
* In the following example, the annotation {{{@Id}}} will be added
* only to the bean getter {{{getX}}}. In order to annotate the field
@@ -39,5 +39,13 @@ package scala.annotation.target
* @Id @BeanProperty val x = 0
* }
* }}}
+ *
+ * For annotations defined in Scala, a default target can be specified
+ * in the annotation class itself, for example
+ *
+ * {{{
+ * @getter
+ * class myAnnotation extends Annotation
+ * }}}
*/
final class setter extends StaticAnnotation
diff --git a/src/library/scala/collection/Iterator.scala b/src/library/scala/collection/Iterator.scala
index ea186c7a7e..70462cb8d2 100644
--- a/src/library/scala/collection/Iterator.scala
+++ b/src/library/scala/collection/Iterator.scala
@@ -48,14 +48,6 @@ object Iterator {
*/
def apply[A](elems: A*): Iterator[A] = elems.iterator
- /** Concatenates the given argument iterators into a single iterator.
- *
- * @param its the argument iterators that are to be concatenated
- * @return the concatenation of all the argument iterators
- */
- @deprecated("use <code>++</code>")
- def concat[A](xss: Iterator[A]*): Iterator[A] = xss.iterator.flatten
-
/** An iterator that returns the results of some element computation a number of times.
* @param len The number of elements returned
* @param elem The element computation determinining each result
@@ -324,10 +316,17 @@ trait Iterator[+A] { self =>
/** Returns a new iterator that first yields the elements of this
* iterator followed by the elements provided by iterator <code>that</code>.
*/
- def ++[B >: A](that: => Iterator[B]) = new Iterator[B] {
+ def ++[B >: A](that: => Iterator[B]): Iterator[B] = new Iterator[B] {
// optimize a little bit to prevent n log n behavior.
- var cur : Iterator[B] = self
- def hasNext = cur.hasNext || (cur eq self) && { cur = that; hasNext }
+ private var cur : Iterator[B] = self
+ // this was unnecessarily looping forever on x ++ x
+ def hasNext = cur.hasNext || ((cur eq self) && {
+ val it = that
+ it.hasNext && {
+ cur = it
+ true
+ }
+ })
def next() = { hasNext; cur.next() }
}
@@ -356,10 +355,19 @@ trait Iterator[+A] { self =>
def filter(p: A => Boolean): Iterator[A] = {
val self = buffered
new Iterator[A] {
- private def skip() = while (self.hasNext && !p(self.head)) self.next()
- def hasNext = { skip(); self.hasNext }
- def next() = { skip(); self.next() }
- }
+ var computedHasNext = false
+ private def skip() = {
+ while (self.hasNext && !p(self.head)) self.next()
+ computedHasNext = self.hasNext
+ }
+ def hasNext = { if (!computedHasNext) skip(); computedHasNext }
+ def next() = {
+ if (!computedHasNext)
+ skip()
+ computedHasNext = false
+ self.next()
+ }
+ }
}
/** !!! Temporary, awaiting more general implementation.
@@ -402,8 +410,19 @@ trait Iterator[+A] { self =>
def takeWhile(p: A => Boolean): Iterator[A] = {
val self = buffered
new Iterator[A] {
- def hasNext = { self.hasNext && p(self.head) }
- def next() = (if (hasNext) self else empty).next()
+ var computedHasNext = false
+
+ def hasNext = {
+ val result = computedHasNext || (self.hasNext && p(self.head))
+ computedHasNext = result
+ result
+ }
+
+ def next() = {
+ val result = (if (computedHasNext || hasNext) self else empty).next()
+ computedHasNext = false
+ result
+ }
}
}
@@ -1081,10 +1100,7 @@ trait Iterator[+A] { self =>
* iterator followed by the elements provided by iterator <code>that</code>.
*/
@deprecated("use <code>++</code>")
- def append[B >: A](that: Iterator[B]) = new Iterator[B] {
- def hasNext = self.hasNext || that.hasNext
- def next() = (if (self.hasNext) self else that).next()
- }
+ def append[B >: A](that: Iterator[B]) = self ++ that
/** Returns index of the first element satisfying a predicate, or -1. */
@deprecated("use `indexWhere` instead")
diff --git a/src/library/scala/collection/LinearSeqLike.scala b/src/library/scala/collection/LinearSeqLike.scala
index a9b33305fd..70ff3463c8 100644
--- a/src/library/scala/collection/LinearSeqLike.scala
+++ b/src/library/scala/collection/LinearSeqLike.scala
@@ -60,7 +60,10 @@ trait LinearSeqLike[+A, +Repr <: LinearSeqLike[A, Repr]] extends SeqLike[A, Repr
* @return the element at position <code>n</code> in this linear sequence.
* @throws Predef.NoSuchElementException if the linear sequence is too short.
*/
- def apply(n: Int): A = drop(n).head
+ def apply(n: Int): A = {
+ require(n >= 0)
+ drop(n).head
+ }
/** Returns the elements in the sequence as an iterator
*/
diff --git a/src/library/scala/collection/MapLike.scala b/src/library/scala/collection/MapLike.scala
index 3b188acab6..3287a4524c 100644
--- a/src/library/scala/collection/MapLike.scala
+++ b/src/library/scala/collection/MapLike.scala
@@ -129,7 +129,7 @@ self =>
protected class DefaultKeySet extends Set[A] {
def contains(key : A) = self.contains(key)
- def iterator = self.iterator.map(_._1)
+ def iterator = keysIterator
def + (elem: A): Set[A] = (Set[A]() ++ this + elem).asInstanceOf[Set[A]] // !!! concrete overrides abstract problem
def - (elem: A): Set[A] = (Set[A]() ++ this - elem).asInstanceOf[Set[A]] // !!! concrete overrides abstract problem
override def size = self.size
@@ -158,7 +158,7 @@ self =>
def valuesIterable: Iterable[B] = new DefaultValuesIterable
protected class DefaultValuesIterable extends Iterable[B] {
- def iterator = self.iterator.map(_._2)
+ def iterator = valuesIterator
override def size = self.size
override def foreach[C](f: B => C) = for ((k, v) <- self) f(v)
}
diff --git a/src/library/scala/collection/SeqProxyLike.scala b/src/library/scala/collection/SeqProxyLike.scala
index 1ca3bfa155..06bae39aef 100644
--- a/src/library/scala/collection/SeqProxyLike.scala
+++ b/src/library/scala/collection/SeqProxyLike.scala
@@ -56,7 +56,6 @@ trait SeqProxyLike[+A, +This <: SeqLike[A, This] with Seq[A]] extends SeqLike[A,
override def indices: Range = self.indices
override def view = self.view
override def view(from: Int, until: Int) = self.view(from, until)
- override def findLastIndexOf(p: A => Boolean): Int = self.lastIndexWhere(p)
override def equalsWith[B](that: Seq[B])(f: (A,B) => Boolean): Boolean = (self zip that) forall { case (x,y) => f(x,y) }
override def containsSlice[B](that: Seq[B]): Boolean = self.indexOfSeq(that) != -1
}
diff --git a/src/library/scala/collection/SetLike.scala b/src/library/scala/collection/SetLike.scala
index 0752f4185f..401c272516 100644
--- a/src/library/scala/collection/SetLike.scala
+++ b/src/library/scala/collection/SetLike.scala
@@ -176,7 +176,7 @@ self =>
* contain the same elements.
*/
override def equals(that: Any): Boolean = that match {
- case that: Set[A] =>
+ case that: Set[_] =>
(this eq that) ||
(that canEqual this) &&
(this.size == that.size) &&
diff --git a/src/library/scala/collection/TraversableLike.scala b/src/library/scala/collection/TraversableLike.scala
index a0e330831f..b06eecc104 100644
--- a/src/library/scala/collection/TraversableLike.scala
+++ b/src/library/scala/collection/TraversableLike.scala
@@ -72,6 +72,9 @@ self =>
protected[this] def thisCollection: Traversable[A] = this.asInstanceOf[Traversable[A]]
protected[this] def toCollection(repr: Repr): Traversable[A] = repr.asInstanceOf[Traversable[A]]
+ /** The type implementing this traversable */
+ protected type Self = Repr
+
/** Create a new builder for this collection type.
*/
protected[this] def newBuilder: Builder[A, Repr]
diff --git a/src/library/scala/collection/TraversableViewLike.scala b/src/library/scala/collection/TraversableViewLike.scala
index 8177658026..0c214d956c 100644
--- a/src/library/scala/collection/TraversableViewLike.scala
+++ b/src/library/scala/collection/TraversableViewLike.scala
@@ -13,7 +13,6 @@ package scala.collection
import generic._
import mutable.Builder
-import Math.MAX_INT
import TraversableView.NoBuilder
/** <p>
@@ -167,7 +166,7 @@ self =>
override def filter(p: A => Boolean): This = newFiltered(p).asInstanceOf[This]
override def init: This = newSliced(0, size - 1).asInstanceOf[This]
- override def drop(n: Int): This = newSliced(n max 0, MAX_INT).asInstanceOf[This]
+ override def drop(n: Int): This = newSliced(n max 0, Int.MaxValue).asInstanceOf[This]
override def take(n: Int): This = newSliced(0, n).asInstanceOf[This]
override def slice(from: Int, until: Int): This = newSliced(from max 0, until).asInstanceOf[This]
override def dropWhile(p: A => Boolean): This = newDroppedWhile(p).asInstanceOf[This]
diff --git a/src/library/scala/collection/immutable/HashMap.scala b/src/library/scala/collection/immutable/HashMap.scala
index 64608d163f..a036a9abfb 100644
--- a/src/library/scala/collection/immutable/HashMap.scala
+++ b/src/library/scala/collection/immutable/HashMap.scala
@@ -122,7 +122,7 @@ class HashMap[A, +B] extends Map[A,B] with MapLike[A, B, HashMap[A, B]] with mut
private def getValue(e: Entry) =
e.value.asInstanceOf[B]
- private def logLimit: Int = Math.sqrt(table.length).toInt
+ private def logLimit: Int = math.sqrt(table.length).toInt
private[this] def markUpdated(key: A, ov: Option[B], delta: Int) {
val lv = loadFactor
diff --git a/src/library/scala/collection/immutable/HashSet.scala b/src/library/scala/collection/immutable/HashSet.scala
index 93b9678751..e55469d173 100644
--- a/src/library/scala/collection/immutable/HashSet.scala
+++ b/src/library/scala/collection/immutable/HashSet.scala
@@ -96,7 +96,7 @@ class HashSet[A] extends Set[A]
cached.iterator
}
- private def logLimit: Int = Math.sqrt(table.length).toInt
+ private def logLimit: Int = math.sqrt(table.length).toInt
private def markUpdated(elem: A, del: Boolean) {
val lv = loadFactor
diff --git a/src/library/scala/collection/immutable/List.scala b/src/library/scala/collection/immutable/List.scala
index d22865cad4..0c43620465 100644
--- a/src/library/scala/collection/immutable/List.scala
+++ b/src/library/scala/collection/immutable/List.scala
@@ -299,8 +299,6 @@ sealed abstract class List[+A] extends LinearSeq[A]
if (isEmpty) Stream.Empty
else new Stream.Cons(head, tail.toStream)
- // !!! todo: work in patch
-
/** Computes the difference between this list and the given list
* <code>that</code>.
*
@@ -308,7 +306,7 @@ sealed abstract class List[+A] extends LinearSeq[A]
* @return this list without the elements of the given list
* <code>that</code>.
*/
- @deprecated("use `diff' instead")
+ @deprecated("use `list1 filterNot (list2 contains)` instead")
def -- [B >: A](that: List[B]): List[B] = {
val b = new ListBuffer[B]
var these = this
@@ -326,7 +324,7 @@ sealed abstract class List[+A] extends LinearSeq[A]
* @return this list without occurrences of the given object
* <code>x</code>.
*/
- @deprecated("use `diff' instead")
+ @deprecated("use `filterNot (_ == x)` instead")
def - [B >: A](x: B): List[B] = {
val b = new ListBuffer[B]
var these = this
@@ -362,13 +360,13 @@ sealed abstract class List[+A] extends LinearSeq[A]
var left2 = l2
while (!left1.isEmpty && !left2.isEmpty) {
- if(lt(left1.head, left2.head)) {
- res += left1.head
- left1 = left1.tail
- } else {
- res += left2.head
- left2 = left2.tail
- }
+ if(lt(left1.head, left2.head)) {
+ res += left1.head
+ left1 = left1.tail
+ } else {
+ res += left2.head
+ left2 = left2.tail
+ }
}
res ++= left1
@@ -384,12 +382,12 @@ sealed abstract class List[+A] extends LinearSeq[A]
var left = lst
while (!left.isEmpty) {
- res1 += left.head
- left = left.tail
- if (!left.isEmpty) {
- res2 += left.head
- left = left.tail
- }
+ res1 += left.head
+ left = left.tail
+ if (!left.isEmpty) {
+ res2 += left.head
+ left = left.tail
+ }
}
(res1.toList, res2.toList)
@@ -399,15 +397,15 @@ sealed abstract class List[+A] extends LinearSeq[A]
/** Merge-sort the specified list */
def ms(lst: List[A]): List[A] =
lst match {
- case Nil => lst
- case x :: Nil => lst
- case x :: y :: Nil =>
- if (lt(x,y))
- lst
- else
- y :: x :: Nil
-
- case lst =>
+ case Nil => lst
+ case x :: Nil => lst
+ case x :: y :: Nil =>
+ if (lt(x,y))
+ lst
+ else
+ y :: x :: Nil
+
+ case lst =>
val (l1, l2) = split(lst)
val l1s = ms(l1)
val l2s = ms(l2)
@@ -717,7 +715,7 @@ object List extends SeqFactory[List] {
* <code>[a0, ..., ak]</code>, <code>[b0, ..., bl]</code> and
* <code>n = min(k,l)</code>
*/
- @deprecated("use `(xs, ys).map(f)' instead")
+ @deprecated("use `(xs, ys).zipped.map(f)' instead")
def map2[A,B,C](xs: List[A], ys: List[B])(f: (A, B) => C): List[C] = {
val b = new ListBuffer[C]
var xc = xs
@@ -741,7 +739,7 @@ object List extends SeqFactory[List] {
* <code>[c<sub>0</sub>, ..., c<sub>m</sub>]</code> and
* <code>n = min(k,l,m)</code>
*/
- @deprecated("use `(xs, ys, zs).map(f)' instead")
+ @deprecated("use `(xs, ys, zs).zipped.map(f)' instead")
def map3[A,B,C,D](xs: List[A], ys: List[B], zs: List[C])(f: (A, B, C) => D): List[D] = {
val b = new ListBuffer[D]
var xc = xs
@@ -766,7 +764,7 @@ object List extends SeqFactory[List] {
* <code>[b<sub>0</sub>, ..., b<sub>l</sub>]</code>
* and <code>n = min(k,l)</code>
*/
- @deprecated("use `(xs, ys).forall(f)' instead")
+ @deprecated("use `(xs, ys).zipped.forall(f)' instead")
def forall2[A,B](xs: List[A], ys: List[B])(f: (A, B) => Boolean): Boolean = {
var xc = xs
var yc = ys
@@ -788,7 +786,7 @@ object List extends SeqFactory[List] {
* <code>[b<sub>0</sub>, ..., b<sub>l</sub>]</code> and
* <code>n = min(k,l)</code>
*/
- @deprecated("use `(xs, ys).exists(f)' instead")
+ @deprecated("use `(xs, ys).zipped.exists(f)' instead")
def exists2[A,B](xs: List[A], ys: List[B])(f: (A, B) => Boolean): Boolean = {
var xc = xs
var yc = ys
diff --git a/src/library/scala/collection/immutable/NumericRange.scala b/src/library/scala/collection/immutable/NumericRange.scala
index 9c70ba3ca6..6c5f2ddc09 100644
--- a/src/library/scala/collection/immutable/NumericRange.scala
+++ b/src/library/scala/collection/immutable/NumericRange.scala
@@ -25,7 +25,7 @@ import generic._
* the Int-based scala.Range should be more performant.
* </p><pre>
* <b>val</b> r1 = new Range(0, 100, 1)
- * <b>val</b> veryBig = Math.MAX_INT.toLong + 1
+ * <b>val</b> veryBig = Int.MaxValue.toLong + 1
* <b>val</b> r2 = Range.Long(veryBig, veryBig + 100, 1)
* assert(r1 sameElements r2.map(_ - veryBig))
* </pre>
@@ -33,6 +33,7 @@ import generic._
* @author Paul Phillips
* @version 2.8
*/
+@serializable
abstract class NumericRange[+T]
(val start: T, val end: T, val step: T, val isInclusive: Boolean)
(implicit num: Integral[T])
@@ -48,9 +49,9 @@ extends IndexedSeq[T]
// todo? - we could lift the length restriction by implementing a range as a sequence of
// subranges and limiting the subranges to MAX_INT. There's no other way around it because
// the generics we inherit assume integer-based indexing (as well they should.)
- // The second condition is making sure type T can meaningfully be compared to Math.MAX_INT.
- if (genericLength > fromInt(Math.MAX_INT) && (Math.MAX_INT == toInt(fromInt(Math.MAX_INT))))
- fail("Implementation restricts ranges to Math.MAX_INT elements.")
+ // The second condition is making sure type T can meaningfully be compared to Int.MaxValue.
+ if (genericLength > fromInt(Int.MaxValue) && (Int.MaxValue == toInt(fromInt(Int.MaxValue))))
+ fail("Implementation restricts ranges to Int.MaxValue elements.")
// inclusive/exclusiveness captured this way because we do not have any
// concept of a "unit", we can't just add an epsilon to an exclusive
@@ -75,12 +76,12 @@ extends IndexedSeq[T]
if (step > zero) {
while (i < end) {
f(i)
- i = i + step
+ i += step
}
} else {
while (i > end) {
f(i)
- i = i + step
+ i += step
}
}
if (limitTest(i)) f(i)
@@ -100,7 +101,7 @@ extends IndexedSeq[T]
}
def length: Int = toInt(genericLength)
- override def isEmpty =
+ override def isEmpty: Boolean =
if (step > zero)
if (isInclusive) end < start
else end <= start
diff --git a/src/library/scala/collection/immutable/PagedSeq.scala b/src/library/scala/collection/immutable/PagedSeq.scala
index 08979ac347..fadf21f819 100644
--- a/src/library/scala/collection/immutable/PagedSeq.scala
+++ b/src/library/scala/collection/immutable/PagedSeq.scala
@@ -21,7 +21,7 @@ import scala.util.matching.Regex
* @since 2.7
*/
object PagedSeq {
- final val UndeterminedEnd = Math.MAX_INT
+ final val UndeterminedEnd = Int.MaxValue
/** Constructs a character sequence from a character iterator */
def fromIterator[T: ClassManifest](source: Iterator[T]): PagedSeq[T] =
diff --git a/src/library/scala/collection/immutable/Range.scala b/src/library/scala/collection/immutable/Range.scala
index d9a7725d1a..fca5de2640 100644
--- a/src/library/scala/collection/immutable/Range.scala
+++ b/src/library/scala/collection/immutable/Range.scala
@@ -25,6 +25,7 @@ package scala.collection.immutable
* @version 2.8
* @since 2.5
*/
+@serializable @SerialVersionUID(7618862778670199309L)
class Range(val start: Int, val end: Int, val step: Int) extends IndexedSeq[Int] {
require(step != 0)
@@ -64,14 +65,17 @@ class Range(val start: Int, val end: Int, val step: Int) extends IndexedSeq[Int]
start + idx * step
}
+ // take and drop have to be tolerant of large values without overflowing
+ private def locationAfterN(n: Int) = start + step * (0 max n min length)
+
final override def take(n: Int): Range = {
- val limit1 = start + step * (n max 0)
+ val limit1 = locationAfterN(n)
if (step > 0) Range(start, limit1 min limit, step)
else Range(start, limit1 max limit, step)
}
final override def drop(n: Int): Range =
- copy(start + step * (n max 0), end, step)
+ copy(locationAfterN(n), end, step)
final override def init: Range =
take(length - 1)
@@ -135,7 +139,7 @@ object Range {
class Inclusive(start: Int, end: Int, step: Int) extends Range(start, end, step) {
override def isInclusive = true
- override protected val limit = end + Math.signum(step)
+ override protected val limit = end + math.signum(step)
override protected def copy(start: Int, end: Int, step: Int): Range = new Inclusive(start, end, step)
}
diff --git a/src/library/scala/collection/immutable/Set.scala b/src/library/scala/collection/immutable/Set.scala
index cc935afe93..15a6152e6c 100644
--- a/src/library/scala/collection/immutable/Set.scala
+++ b/src/library/scala/collection/immutable/Set.scala
@@ -21,10 +21,10 @@ import generic._
* <pre>
* <b>def</b> contains(elem: A): Boolean
* <b>def</b> iterator: Iterator[A]
- * <b>def</b> + (elem: A): This
- * <b>def</b> - (elem: A): This</pre>
+ * <b>def</b> + (elem: A): Self
+ * <b>def</b> - (elem: A): Self</pre>
* <p>
- * where <code>This</code> is the type of the set.
+ * where <code>Self</code> is the type of the set.
* </p>
*
* @author Matthias Zenger
@@ -57,7 +57,7 @@ object Set extends SetFactory[Set] {
}
/** An optimized representation for immutable sets of size 1 */
- @serializable
+ @serializable @SerialVersionUID(1233385750652442003L)
class Set1[A](elem1: A) extends Set[A] {
override def size: Int = 1
def contains(elem: A): Boolean =
@@ -76,7 +76,7 @@ object Set extends SetFactory[Set] {
}
/** An optimized representation for immutable sets of size 2 */
- @serializable
+ @serializable @SerialVersionUID(-6443011234944830092L)
class Set2[A](elem1: A, elem2: A) extends Set[A] {
override def size: Int = 2
def contains(elem: A): Boolean =
@@ -96,7 +96,7 @@ object Set extends SetFactory[Set] {
}
/** An optimized representation for immutable sets of size 3 */
- @serializable
+ @serializable @SerialVersionUID(-3590273538119220064L)
class Set3[A](elem1: A, elem2: A, elem3: A) extends Set[A] {
override def size: Int = 3
def contains(elem: A): Boolean =
@@ -117,7 +117,7 @@ object Set extends SetFactory[Set] {
}
/** An optimized representation for immutable sets of size 4 */
- @serializable
+ @serializable @SerialVersionUID(-3622399588156184395L)
class Set4[A](elem1: A, elem2: A, elem3: A, elem4: A) extends Set[A] {
override def size: Int = 4
def contains(elem: A): Boolean =
diff --git a/src/library/scala/collection/immutable/Stack.scala b/src/library/scala/collection/immutable/Stack.scala
index 2813bc6656..288b1707cb 100644
--- a/src/library/scala/collection/immutable/Stack.scala
+++ b/src/library/scala/collection/immutable/Stack.scala
@@ -69,7 +69,6 @@ class Stack[+A] protected (protected val elems: List[A]) extends Seq[A] {
*
* @param elems the iterator object.
* @return the stack with the new elements on top.
- * @deprecated
*/
def pushAll[B >: A](elems: Iterator[B]): Stack[B] =
((this: Stack[B]) /: elems)(_ push _)
diff --git a/src/library/scala/collection/immutable/Stream.scala b/src/library/scala/collection/immutable/Stream.scala
index 6a57c3596d..9a6a4a3b4c 100644
--- a/src/library/scala/collection/immutable/Stream.scala
+++ b/src/library/scala/collection/immutable/Stream.scala
@@ -480,6 +480,7 @@ object Stream extends SeqFactory[Stream] {
}
/** A lazy cons cell, from which streams are built. */
+ @serializable @SerialVersionUID(-602202424901551803L)
final class Cons[+A](hd: A, tl: => Stream[A]) extends Stream[A] {
override def isEmpty = false
override def head = hd
diff --git a/src/library/scala/collection/immutable/Vector.scala b/src/library/scala/collection/immutable/Vector.scala
index e0a73fe427..35e35db029 100644
--- a/src/library/scala/collection/immutable/Vector.scala
+++ b/src/library/scala/collection/immutable/Vector.scala
@@ -6,7 +6,7 @@
** |/ **
\* */
-// $Id: Vector.scala 19072 2009-10-13 12:19:59Z rompf $
+// $Id$
package scala.collection
package immutable
@@ -564,7 +564,7 @@ final class VectorIterator[+A](_startIndex: Int, _endIndex: Int) extends Iterato
private var lo: Int = _startIndex & 31
private var endIndex: Int = _endIndex
- private var endLo = Math.min(endIndex - blockIndex, 32)
+ private var endLo = math.min(endIndex - blockIndex, 32)
def hasNext = _hasNext
@@ -582,7 +582,7 @@ final class VectorIterator[+A](_startIndex: Int, _endIndex: Int) extends Iterato
gotoNextBlockStart(newBlockIndex, blockIndex ^ newBlockIndex)
blockIndex = newBlockIndex
- endLo = Math.min(endIndex - blockIndex, 32)
+ endLo = math.min(endIndex - blockIndex, 32)
lo = 0
} else {
_hasNext = false
@@ -1011,7 +1011,7 @@ private[immutable] trait VectorPointer[T] {
final def copyRange(array: Array[AnyRef], oldLeft: Int, newLeft: Int) = {
val elems = new Array[AnyRef](32)
- Platform.arraycopy(array, oldLeft, elems, newLeft, 32 - Math.max(newLeft,oldLeft))
+ Platform.arraycopy(array, oldLeft, elems, newLeft, 32 - math.max(newLeft,oldLeft))
elems
}
diff --git a/src/library/scala/collection/interfaces/SeqMethods.scala b/src/library/scala/collection/interfaces/SeqMethods.scala
index 8256c5304c..c7c138c2c3 100644
--- a/src/library/scala/collection/interfaces/SeqMethods.scala
+++ b/src/library/scala/collection/interfaces/SeqMethods.scala
@@ -48,7 +48,6 @@ trait SeqMethods[+A, +This <: SeqLike[A, This] with Seq[A]] extends IterableMeth
def reverse: This
def reverseIterator: Iterator[A]
def segmentLength(p: A => Boolean, from: Int): Int
- def slice(from: Int): Seq[A]
def startsWith[B](that: Seq[B]): Boolean
def startsWith[B](that: Seq[B], offset: Int): Boolean
def union[B >: A, That](that: Seq[B])(implicit bf: CanBuildFrom[This, B, That]): That
diff --git a/src/library/scala/collection/mutable/ArrayBuilder.scala b/src/library/scala/collection/mutable/ArrayBuilder.scala
index 05ad865862..cdb01c9ded 100644
--- a/src/library/scala/collection/mutable/ArrayBuilder.scala
+++ b/src/library/scala/collection/mutable/ArrayBuilder.scala
@@ -19,6 +19,7 @@ import scala.reflect.ClassManifest
*
* @since 2.8
*/
+@serializable
abstract class ArrayBuilder[T] extends Builder[T, Array[T]]
/**
@@ -84,6 +85,13 @@ object ArrayBuilder {
if (capacity != 0 && capacity == size) elems
else mkArray(size)
}
+
+ override def equals(other: Any): Boolean = other match {
+ case x: ofRef[_] => (size == x.size) && (elems == x.elems)
+ case _ => false
+ }
+
+ override def toString = "ArrayBuilder.ofRef"
}
class ofByte extends ArrayBuilder[Byte] {
@@ -141,6 +149,13 @@ object ArrayBuilder {
if (capacity != 0 && capacity == size) elems
else mkArray(size)
}
+
+ override def equals(other: Any): Boolean = other match {
+ case x: ofByte => (size == x.size) && (elems == x.elems)
+ case _ => false
+ }
+
+ override def toString = "ArrayBuilder.ofByte"
}
class ofShort extends ArrayBuilder[Short] {
@@ -198,6 +213,13 @@ object ArrayBuilder {
if (capacity != 0 && capacity == size) elems
else mkArray(size)
}
+
+ override def equals(other: Any): Boolean = other match {
+ case x: ofShort => (size == x.size) && (elems == x.elems)
+ case _ => false
+ }
+
+ override def toString = "ArrayBuilder.ofShort"
}
class ofChar extends ArrayBuilder[Char] {
@@ -255,6 +277,13 @@ object ArrayBuilder {
if (capacity != 0 && capacity == size) elems
else mkArray(size)
}
+
+ override def equals(other: Any): Boolean = other match {
+ case x: ofChar => (size == x.size) && (elems == x.elems)
+ case _ => false
+ }
+
+ override def toString = "ArrayBuilder.ofChar"
}
class ofInt extends ArrayBuilder[Int] {
@@ -312,6 +341,13 @@ object ArrayBuilder {
if (capacity != 0 && capacity == size) elems
else mkArray(size)
}
+
+ override def equals(other: Any): Boolean = other match {
+ case x: ofInt => (size == x.size) && (elems == x.elems)
+ case _ => false
+ }
+
+ override def toString = "ArrayBuilder.ofInt"
}
class ofLong extends ArrayBuilder[Long] {
@@ -369,6 +405,13 @@ object ArrayBuilder {
if (capacity != 0 && capacity == size) elems
else mkArray(size)
}
+
+ override def equals(other: Any): Boolean = other match {
+ case x: ofLong => (size == x.size) && (elems == x.elems)
+ case _ => false
+ }
+
+ override def toString = "ArrayBuilder.ofLong"
}
class ofFloat extends ArrayBuilder[Float] {
@@ -426,6 +469,13 @@ object ArrayBuilder {
if (capacity != 0 && capacity == size) elems
else mkArray(size)
}
+
+ override def equals(other: Any): Boolean = other match {
+ case x: ofFloat => (size == x.size) && (elems == x.elems)
+ case _ => false
+ }
+
+ override def toString = "ArrayBuilder.ofFloat"
}
class ofDouble extends ArrayBuilder[Double] {
@@ -483,6 +533,13 @@ object ArrayBuilder {
if (capacity != 0 && capacity == size) elems
else mkArray(size)
}
+
+ override def equals(other: Any): Boolean = other match {
+ case x: ofDouble => (size == x.size) && (elems == x.elems)
+ case _ => false
+ }
+
+ override def toString = "ArrayBuilder.ofDouble"
}
class ofBoolean extends ArrayBuilder[Boolean] {
@@ -540,6 +597,13 @@ object ArrayBuilder {
if (capacity != 0 && capacity == size) elems
else mkArray(size)
}
+
+ override def equals(other: Any): Boolean = other match {
+ case x: ofBoolean => (size == x.size) && (elems == x.elems)
+ case _ => false
+ }
+
+ override def toString = "ArrayBuilder.ofBoolean"
}
class ofUnit extends ArrayBuilder[Unit] {
@@ -597,5 +661,12 @@ object ArrayBuilder {
if (capacity != 0 && capacity == size) elems
else mkArray(size)
}
+
+ override def equals(other: Any): Boolean = other match {
+ case x: ofUnit => (size == x.size) && (elems == x.elems)
+ case _ => false
+ }
+
+ override def toString = "ArrayBuilder.ofUnit"
}
}
diff --git a/src/library/scala/collection/mutable/ArrayStack.scala b/src/library/scala/collection/mutable/ArrayStack.scala
index 0059d5ff6b..4cfebaf76d 100644
--- a/src/library/scala/collection/mutable/ArrayStack.scala
+++ b/src/library/scala/collection/mutable/ArrayStack.scala
@@ -35,7 +35,7 @@ private object Utils {
* @author David MacIver
* @since 2.7
*/
-@cloneable
+@cloneable @serializable @SerialVersionUID(8565219180626620510L)
class ArrayStack[T] private(private var table : Array[AnyRef],
private var index : Int) extends scala.collection.Seq[T] with Cloneable[ArrayStack[T]] {
def this() = this(new Array[AnyRef](1), 0)
diff --git a/src/library/scala/collection/mutable/BufferLike.scala b/src/library/scala/collection/mutable/BufferLike.scala
index 3a92f4f7da..e941a4e438 100644
--- a/src/library/scala/collection/mutable/BufferLike.scala
+++ b/src/library/scala/collection/mutable/BufferLike.scala
@@ -71,12 +71,11 @@ trait BufferLike[A, +This <: BufferLike[A, This] with Buffer[A]]
def clear()
// Abstract methods new in this class
-
/** Prepend a single element to this buffer and return
* the identity of the buffer.
* @param elem the element to prepend.
*/
- def +=:(elem: A): This
+ def +=:(elem: A): this.type
@deprecated("use `+=:' instead")
final def +:(elem: A): This = +=:(elem)
@@ -126,7 +125,7 @@ trait BufferLike[A, +This <: BufferLike[A, This] with Buffer[A]]
*
* @param iter the iterable object.
*/
- def ++=:(iter: Traversable[A]): This = { insertAll(0, iter); this }
+ def ++=:(iter: Traversable[A]): this.type = { insertAll(0, iter); this }
@deprecated("use ++=: instead")
final def ++:(iter: Traversable[A]): This = ++=:(iter)
@@ -139,9 +138,6 @@ trait BufferLike[A, +This <: BufferLike[A, This] with Buffer[A]]
*/
def ++=:(iter: Iterator[A]): This = { insertAll(0, iter.toSeq); this }
- @deprecated("use ++=: instead")
- final def ++:(iter: Iterator[A]): This = ++=:(iter)
-
/** Appends elements to this buffer.
*
* @param elems the elements to append.
diff --git a/src/library/scala/collection/mutable/BufferProxy.scala b/src/library/scala/collection/mutable/BufferProxy.scala
index a31beda57b..4dd2280e02 100644
--- a/src/library/scala/collection/mutable/BufferProxy.scala
+++ b/src/library/scala/collection/mutable/BufferProxy.scala
@@ -88,10 +88,10 @@ trait BufferProxy[A] extends Buffer[A] with Proxy {
*
* @param elem the element to append.
*/
- def +=:(elem: A): Buffer[A] = self.+=:(elem)
+ def +=:(elem: A): this.type = { self.+=:(elem); this }
- override def ++=:(iter: scala.collection.Traversable[A]): Buffer[A] = self.++=:(iter)
- override def ++=:(iter: scala.collection.Iterator[A]): Buffer[A] = self.++=:(iter)
+ override def ++=:(iter: scala.collection.Traversable[A]): this.type = { self.++=:(iter); this }
+ override def ++=:(iter: scala.collection.Iterator[A]): this.type = { self.++=:(iter); this }
/** Prepend an element to this list.
*
diff --git a/src/library/scala/collection/mutable/DoubleLinkedList.scala b/src/library/scala/collection/mutable/DoubleLinkedList.scala
index 6a4476d743..6767b5c7c2 100644
--- a/src/library/scala/collection/mutable/DoubleLinkedList.scala
+++ b/src/library/scala/collection/mutable/DoubleLinkedList.scala
@@ -4,7 +4,7 @@
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
-* */
+\* */
// $Id$
@@ -14,7 +14,7 @@ package mutable
import generic._
-/** This class implements single linked lists where both the head (<code>elem</code>)
+/** This class implements double linked lists where both the head (<code>elem</code>)
* and the tail (<code>next</code>) are mutable.
*
* @author Matthias Zenger
@@ -22,12 +22,20 @@ import generic._
* @version 2.8
* @since 1
*/
-@serializable @SerialVersionUID(419155950203746706L)
-class DoubleLinkedList[A](_elem: A, _next: DoubleLinkedList[A]) extends LinearSeq[A]
- with GenericTraversableTemplate[A, DoubleLinkedList]
- with DoubleLinkedListLike[A, DoubleLinkedList[A]] {
- elem = _elem
- next = _next
+@serializable @SerialVersionUID(-8144992287952814767L)
+class DoubleLinkedList[A]() extends LinearSeq[A]
+ with GenericTraversableTemplate[A, DoubleLinkedList]
+ with DoubleLinkedListLike[A, DoubleLinkedList[A]] {
+ next = this
+
+ def this(elem: A, next: DoubleLinkedList[A]) {
+ this()
+ if (next != null) {
+ this.elem = elem
+ this.next = next
+ }
+ }
+
override def companion: GenericCompanion[DoubleLinkedList] = DoubleLinkedList
}
diff --git a/src/library/scala/collection/mutable/HashMap.scala b/src/library/scala/collection/mutable/HashMap.scala
index b2f259e4e9..e4969a3af0 100644
--- a/src/library/scala/collection/mutable/HashMap.scala
+++ b/src/library/scala/collection/mutable/HashMap.scala
@@ -58,6 +58,32 @@ class HashMap[A, B] extends Map[A, B]
def -=(key: A): this.type = { removeEntry(key); this }
def iterator = entriesIterator map {e => (e.key, e.value)}
+
+ override def foreach[C](f: ((A, B)) => C): Unit = foreachEntry(e => f(e.key, e.value))
+
+ /* Override to avoid tuple allocation in foreach */
+ override def keySet: collection.Set[A] = new DefaultKeySet {
+ override def foreach[C](f: A => C) = foreachEntry(e => f(e.key))
+ }
+
+ /* Override to avoid tuple allocation in foreach */
+ override def valuesIterable: collection.Iterable[B] = new DefaultValuesIterable {
+ override def foreach[C](f: B => C) = foreachEntry(e => f(e.value))
+ }
+
+ /* Override to avoid tuple allocation */
+ override def keysIterator: Iterator[A] = new Iterator[A] {
+ val iter = entriesIterator
+ def hasNext = iter.hasNext
+ def next = iter.next.key
+ }
+
+ /* Override to avoid tuple allocation */
+ override def valuesIterator: Iterator[B] = new Iterator[B] {
+ val iter = entriesIterator
+ def hasNext = iter.hasNext
+ def next = iter.next.value
+ }
}
/** This class implements mutable maps using a hashtable.
diff --git a/src/library/scala/collection/mutable/HashTable.scala b/src/library/scala/collection/mutable/HashTable.scala
index 9dd8a7aeb0..db4e100634 100644
--- a/src/library/scala/collection/mutable/HashTable.scala
+++ b/src/library/scala/collection/mutable/HashTable.scala
@@ -144,6 +144,19 @@ trait HashTable[A] {
}
}
+ protected final def foreachEntry[C](f: Entry => C) {
+ val t = table
+ var index = t.length - 1
+ while (index >= 0) {
+ var entry = t(index)
+ while (entry ne null) {
+ f(entry.asInstanceOf[Entry])
+ entry = entry.next
+ }
+ index -= 1
+ }
+ }
+
/** An iterator returning all entries */
@deprecated("use entriesIterator instead")
protected def entries: Iterator[Entry] = entriesIterator
diff --git a/src/library/scala/collection/mutable/History.scala b/src/library/scala/collection/mutable/History.scala
index dcc6539630..1e65814edb 100644
--- a/src/library/scala/collection/mutable/History.scala
+++ b/src/library/scala/collection/mutable/History.scala
@@ -1,6 +1,6 @@
/* __ *\
** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/tPFL **
** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | | **
** |/ **
@@ -24,16 +24,16 @@ package mutable
*/
@serializable
@SerialVersionUID(5219213543849892588L)
-class History[A, B] extends AnyRef with Subscriber[A, B] with Iterable[(B, A)]
+class History[Evt, Pub] extends Subscriber[Evt, Pub] with Iterable[(Pub, Evt)]
{
- protected val log: Queue[(B, A)] = new Queue[(B, A)]
+ protected val log: Queue[(Pub, Evt)] = new Queue
val maxHistory: Int = 1000
/**
* @param pub ...
* @param event ...
*/
- def notify(pub: B, event: A) {
+ def notify(pub: Pub, event: Evt) {
if (log.length >= maxHistory)
log.dequeue
@@ -41,8 +41,8 @@ class History[A, B] extends AnyRef with Subscriber[A, B] with Iterable[(B, A)]
}
override def size: Int = log.length
- def iterator: Iterator[(B, A)] = log.iterator
- def events: Iterator[A] = log.iterator.map { case (_, e) => e }
+ def iterator: Iterator[(Pub, Evt)] = log.iterator
+ def events: Iterator[Evt] = log.iterator map (_._2)
def clear() { log.clear }
diff --git a/src/library/scala/collection/mutable/IndexedSeqView.scala b/src/library/scala/collection/mutable/IndexedSeqView.scala
index 77b8862421..ca952bfbaa 100644
--- a/src/library/scala/collection/mutable/IndexedSeqView.scala
+++ b/src/library/scala/collection/mutable/IndexedSeqView.scala
@@ -13,7 +13,6 @@ package scala.collection
package mutable
import generic._
-import Math.MAX_INT
import TraversableView.NoBuilder
@@ -79,7 +78,7 @@ self =>
override def filter(p: A => Boolean): IndexedSeqView[A, Coll] = newFiltered(p)
override def init: IndexedSeqView[A, Coll] = newSliced(0, size - 1).asInstanceOf[IndexedSeqView[A, Coll]]
- override def drop(n: Int): IndexedSeqView[A, Coll] = newSliced(n max 0, MAX_INT).asInstanceOf[IndexedSeqView[A, Coll]]
+ override def drop(n: Int): IndexedSeqView[A, Coll] = newSliced(n max 0, Int.MaxValue).asInstanceOf[IndexedSeqView[A, Coll]]
override def take(n: Int): IndexedSeqView[A, Coll] = newSliced(0, n).asInstanceOf[IndexedSeqView[A, Coll]]
override def slice(from: Int, until: Int): IndexedSeqView[A, Coll] = newSliced(from max 0, until).asInstanceOf[IndexedSeqView[A, Coll]]
override def dropWhile(p: A => Boolean): IndexedSeqView[A, Coll] = newDroppedWhile(p).asInstanceOf[IndexedSeqView[A, Coll]]
diff --git a/src/library/scala/collection/mutable/LinkedHashMap.scala b/src/library/scala/collection/mutable/LinkedHashMap.scala
index b0cad79ef4..308db1a4d4 100644
--- a/src/library/scala/collection/mutable/LinkedHashMap.scala
+++ b/src/library/scala/collection/mutable/LinkedHashMap.scala
@@ -15,8 +15,8 @@ package mutable
import generic._
/** This class implements mutable maps using a hashtable.
+ * The iterator and all traversal methods of this class visit elements in the order they were inserted.
*
- * @author Matthias Zenger
* @author Martin Odersky
* @version 2.8
* @since 2.7
diff --git a/src/library/scala/collection/mutable/LinkedList.scala b/src/library/scala/collection/mutable/LinkedList.scala
index abd691ef5c..ca59a90efb 100644
--- a/src/library/scala/collection/mutable/LinkedList.scala
+++ b/src/library/scala/collection/mutable/LinkedList.scala
@@ -22,7 +22,7 @@ import generic._
* @version 2.8
* @since 1
*/
-@serializable
+@serializable @SerialVersionUID(-7308240733518833071L)
class LinkedList[A]() extends LinearSeq[A]
with GenericTraversableTemplate[A, LinkedList]
with LinkedListLike[A, LinkedList[A]] {
diff --git a/src/library/scala/collection/mutable/LinkedListLike.scala b/src/library/scala/collection/mutable/LinkedListLike.scala
index 42f37766b2..87dc095cd2 100644
--- a/src/library/scala/collection/mutable/LinkedListLike.scala
+++ b/src/library/scala/collection/mutable/LinkedListLike.scala
@@ -31,7 +31,7 @@ trait LinkedListLike[A, This <: Seq[A] with LinkedListLike[A, This]] extends Seq
override def isEmpty = next eq this
- override def length: Int = if (isEmpty) 0 else next.length
+ override def length: Int = if (isEmpty) 0 else next.length + 1
override def head: A = elem
diff --git a/src/library/scala/collection/mutable/ObservableBuffer.scala b/src/library/scala/collection/mutable/ObservableBuffer.scala
index e1cd8ace4d..b90f1a805c 100644
--- a/src/library/scala/collection/mutable/ObservableBuffer.scala
+++ b/src/library/scala/collection/mutable/ObservableBuffer.scala
@@ -23,10 +23,9 @@ import script._
* @version 1.0, 08/07/2003
* @since 1
*/
-trait ObservableBuffer[A, This <: ObservableBuffer[A, This]]
- extends Buffer[A]
- with Publisher[Message[A] with Undoable, This]
-{ self: This =>
+trait ObservableBuffer[A] extends Buffer[A] with Publisher[Message[A] with Undoable]
+{
+ type Pub <: ObservableBuffer[A]
abstract override def +=(element: A): this.type = {
super.+=(element)
diff --git a/src/library/scala/collection/mutable/ObservableMap.scala b/src/library/scala/collection/mutable/ObservableMap.scala
index 4de49c34b2..64b61dfe95 100644
--- a/src/library/scala/collection/mutable/ObservableMap.scala
+++ b/src/library/scala/collection/mutable/ObservableMap.scala
@@ -25,10 +25,10 @@ import script._
* @version 2.0, 31/12/2006
* @since 1
*/
-trait ObservableMap[A, B, This <: ObservableMap[A, B, This]]
- extends Map[A, B]
- with Publisher[Message[(A, B)] with Undoable, This]
-{ self: This =>
+trait ObservableMap[A, B] extends Map[A, B] with Publisher[Message[(A, B)] with Undoable]
+{
+
+ type Pub <: ObservableMap[A, B]
abstract override def += (kv: (A, B)): this.type = {
val (key, value) = kv
diff --git a/src/library/scala/collection/mutable/ObservableSet.scala b/src/library/scala/collection/mutable/ObservableSet.scala
index bb61e6c363..899a9a16aa 100644
--- a/src/library/scala/collection/mutable/ObservableSet.scala
+++ b/src/library/scala/collection/mutable/ObservableSet.scala
@@ -23,10 +23,10 @@ import script._
* @version 1.0, 08/07/2003
* @since 1
*/
-trait ObservableSet[A, This <: ObservableSet[A, This]]
- extends Set[A]
- with Publisher[Message[A] with Undoable, This]
-{ self: This =>
+trait ObservableSet[A] extends Set[A] with Publisher[Message[A] with Undoable]
+{
+
+ type Pub <: ObservableSet[A]
abstract override def +=(elem: A): this.type = {
if (!contains(elem)) {
diff --git a/src/library/scala/collection/mutable/OpenHashMap.scala b/src/library/scala/collection/mutable/OpenHashMap.scala
index 044ae59fd8..42625092e4 100644
--- a/src/library/scala/collection/mutable/OpenHashMap.scala
+++ b/src/library/scala/collection/mutable/OpenHashMap.scala
@@ -24,9 +24,10 @@ object OpenHashMap{
def empty[K, V] = new OpenHashMap[K, V];
- private[mutable] class Entry[Key, Value](val key : Key,
- val hash : Int,
- var value : Option[Value])
+ final private class OpenEntry[Key, Value](val key: Key,
+ val hash: Int,
+ var value: Option[Value])
+ extends HashEntry[Key, OpenEntry[Key, Value]]
private[mutable] def highestOneBit(j : Int) = { // This should really go somewhere central as we're now code sharing by cut and paste. :(
var i = j;
@@ -41,8 +42,6 @@ object OpenHashMap{
private[mutable] def nextPowerOfTwo(i : Int) = highestOneBit(i) << 1;
}
-import OpenHashMap.Entry;
-
/**
* A mutable hash map based on an open hashing scheme. The precise scheme is undefined,
* but it should make a reasonable effort to ensure that an insert with consecutive hash
@@ -52,15 +51,20 @@ import OpenHashMap.Entry;
* @author David MacIver
* @since 2.7
*/
-class OpenHashMap[Key, Value](initialSize : Int) extends scala.collection.mutable.Map[Key, Value]{
+class OpenHashMap[Key, Value](initialSize : Int) extends Map[Key, Value]
+ with MapLike[Key, Value, OpenHashMap[Key, Value]] {
+
+ import OpenHashMap.OpenEntry
+ type Entry = OpenEntry[Key, Value]
+
def this() = this(8);
- override def empty = OpenHashMap.empty
+ override def empty: OpenHashMap[Key, Value] = OpenHashMap.empty[Key, Value]
private[this] val actualInitialSize = OpenHashMap.nextPowerOfTwo(initialSize);
private var mask = actualInitialSize - 1;;
- private var table : Array[Entry[Key, Value]] = new Array[Entry[Key, Value]](actualInitialSize);
+ private var table : Array[Entry] = new Array[Entry](actualInitialSize);
private var _size = 0;
private var deleted = 0;
@@ -80,7 +84,7 @@ class OpenHashMap[Key, Value](initialSize : Int) extends scala.collection.mutabl
val oldSize = mask + 1;
val newSize = 4 * oldSize;
val oldTable = table;
- table = new Array[Entry[Key, Value]](newSize);
+ table = new Array[Entry](newSize);
mask = newSize - 1;
oldTable.foreach( entry =>
if (entry != null && entry.value != None) addEntry(entry));
@@ -104,7 +108,7 @@ class OpenHashMap[Key, Value](initialSize : Int) extends scala.collection.mutabl
index;
}
- private[this] def addEntry(entry : Entry[Key, Value]) =
+ private[this] def addEntry(entry : Entry) =
if (entry != null) table(findIndex(entry.key, entry.hash)) = entry;
override def update(key : Key, value : Value) {
@@ -122,7 +126,7 @@ class OpenHashMap[Key, Value](initialSize : Int) extends scala.collection.mutabl
val index = findIndex(key, hash);
val entry = table(index);
if (entry == null) {
- table(index) = new Entry(key, hash, Some(value));
+ table(index) = new OpenEntry(key, hash, Some(value));
modCount += 1;
size += 1;
None
@@ -189,10 +193,7 @@ class OpenHashMap[Key, Value](initialSize : Int) extends scala.collection.mutabl
}
}
- @deprecated("use `iterator' instead")
- override def elements: Iterator[(Key, Value)] = iterator
-
- override def clone : OpenHashMap[Key, Value] = {
+ override def clone = {
val it = new OpenHashMap[Key, Value]
foreachUndeletedEntry(entry => it.put(entry.key, entry.hash, entry.value.get));
it
@@ -219,7 +220,7 @@ class OpenHashMap[Key, Value](initialSize : Int) extends scala.collection.mutabl
);
}
- private[this] def foreachUndeletedEntry(f : Entry[Key, Value] => Unit){
+ private[this] def foreachUndeletedEntry(f : Entry => Unit){
table.foreach(entry => if (entry != null && entry.value != None) f(entry));
}
override def transform(f : (Key, Value) => Value) = {
diff --git a/src/library/scala/collection/mutable/PriorityQueue.scala b/src/library/scala/collection/mutable/PriorityQueue.scala
index b3955acacf..5338d358f3 100644
--- a/src/library/scala/collection/mutable/PriorityQueue.scala
+++ b/src/library/scala/collection/mutable/PriorityQueue.scala
@@ -12,19 +12,13 @@
package scala.collection
package mutable
-import generic.{ Addable, Growable }
+import generic._
/** This class implements priority queues using a heap.
* To prioritize elements of type T there must be an implicit
* Ordering[T] available at creation.
*
- * Martin: This class is utterly broken. It uses a resizable array
- * as a heap, yet pretends to be a sequence via this same resizable array.
- * Needless to say, order of elements is different in the two.
- * So this class needs to be redesigned so that it uses the array only
- * in its implementation, but implements a sequence interface separately.
- *
* @author Matthias Zenger
* @version 1.0, 03/05/2004
* @since 1
@@ -32,32 +26,76 @@ import generic.{ Addable, Growable }
@serializable @cloneable
class PriorityQueue[A](implicit ord: Ordering[A])
- extends ResizableArray[A]
+ extends Seq[A]
+ with SeqLike[A, PriorityQueue[A]]
with Addable[A, PriorityQueue[A]]
with Growable[A]
with Cloneable[PriorityQueue[A]]
+ with Builder[A, PriorityQueue[A]]
{
import ord._
- size0 += 1 // we do not use array(0)
- override def length: Int = super.length - 1 // adjust length accordingly
- override def isEmpty: Boolean = size0 < 2
+ private class ResizableArrayAccess[A] extends ResizableArray[A] {
+ @inline def p_size0 = size0
+ @inline def p_size0_=(s: Int) = size0 = s
+ @inline def p_array = array
+ @inline def p_ensureSize(n: Int) = super.ensureSize(n)
+ @inline def p_swap(a: Int, b: Int) = super.swap(a, b)
+ }
+
+ protected[this] override def newBuilder = new PriorityQueue[A]
+
+ private val resarr = new ResizableArrayAccess[A]
+
+ resarr.p_size0 += 1 // we do not use array(0)
+ override def length: Int = resarr.length - 1 // adjust length accordingly
+ override def size: Int = length
+ override def isEmpty: Boolean = resarr.p_size0 < 2
override def repr = this
// hey foreach, our 0th element doesn't exist
override def foreach[U](f: A => U) {
var i = 1
- while (i < size) {
- f(toA(array(i)))
+ while (i < resarr.p_size0) {
+ f(toA(resarr.p_array(i)))
+ i += 1
+ }
+ }
+
+ def update(idx: Int, elem: A) {
+ if (idx < 0 || idx >= size) throw new IndexOutOfBoundsException("Indices must be nonnegative and lesser than the size.")
+
+ var i = 0
+ val iter = iterator
+ clear
+ while (iter.hasNext) {
+ val curr = iter.next
+ if (i == idx) this += elem
+ else this += curr
i += 1
}
}
+ def apply(idx: Int) = {
+ if (idx < 0 || idx >= size) throw new IndexOutOfBoundsException("Indices must be nonnegative and lesser than the size.")
+
+ var left = idx
+ val iter = iterator
+ var curr = iter.next
+ while (left > 0) {
+ curr = iter.next
+ left -= 1
+ }
+ curr
+ }
+
+ def result = clone
+
private def toA(x: AnyRef): A = x.asInstanceOf[A]
protected def fixUp(as: Array[AnyRef], m: Int): Unit = {
var k: Int = m
while (k > 1 && toA(as(k / 2)) < toA(as(k))) {
- swap(k, k / 2)
+ resarr.p_swap(k, k / 2)
k = k / 2
}
}
@@ -83,10 +121,10 @@ class PriorityQueue[A](implicit ord: Ordering[A])
* @param elem the element to insert
*/
def +=(elem: A): this.type = {
- ensureSize(size0 + 1)
- array(size0) = elem.asInstanceOf[AnyRef]
- fixUp(array, size0)
- size0 += 1
+ resarr.p_ensureSize(resarr.p_size0 + 1)
+ resarr.p_array(resarr.p_size0) = elem.asInstanceOf[AnyRef]
+ fixUp(resarr.p_array, resarr.p_size0)
+ resarr.p_size0 += 1
this
}
@@ -104,7 +142,7 @@ class PriorityQueue[A](implicit ord: Ordering[A])
*
* @param iter an iterable object
*/
- override def ++(elems: scala.collection.Traversable[A]) = { this.clone() ++= elems } // ??? XXX why does this "override nothing" with override?
+ override def ++(elems: scala.collection.Traversable[A]) = { this.clone() ++= elems }
/** Adds all elements provided by an iterator into the priority queue.
*
@@ -125,11 +163,11 @@ class PriorityQueue[A](implicit ord: Ordering[A])
* @return the element with the highest priority.
*/
def dequeue(): A =
- if (size0 > 1) {
- size0 = size0 - 1
- swap(1, size0)
- fixDown(array, 1, size0 - 1)
- toA(array(size0))
+ if (resarr.p_size0 > 1) {
+ resarr.p_size0 = resarr.p_size0 - 1
+ resarr.p_swap(1, resarr.p_size0)
+ fixDown(resarr.p_array, 1, resarr.p_size0 - 1)
+ toA(resarr.p_array(resarr.p_size0))
} else
throw new NoSuchElementException("no element to remove from heap")
@@ -138,12 +176,12 @@ class PriorityQueue[A](implicit ord: Ordering[A])
*
* @return the element with the highest priority.
*/
- def max: A = if (size0 > 1) toA(array(1)) else throw new NoSuchElementException("queue is empty")
+ def max: A = if (resarr.p_size0 > 1) toA(resarr.p_array(1)) else throw new NoSuchElementException("queue is empty")
/** Removes all elements from the queue. After this operation is completed,
* the queue will be empty.
*/
- def clear(): Unit = { size0 = 1 }
+ def clear(): Unit = { resarr.p_size0 = 1 }
/** Returns an iterator which yields all the elements of the priority
* queue in descending priority order.
@@ -151,9 +189,9 @@ class PriorityQueue[A](implicit ord: Ordering[A])
* @return an iterator over all elements sorted in descending order.
*/
override def iterator: Iterator[A] = new Iterator[A] {
- val as: Array[AnyRef] = new Array[AnyRef](size0)
- Array.copy(array, 0, as, 0, size0)
- var i = size0 - 1
+ val as: Array[AnyRef] = new Array[AnyRef](resarr.p_size0)
+ Array.copy(resarr.p_array, 0, as, 0, resarr.p_size0)
+ var i = resarr.p_size0 - 1
def hasNext: Boolean = i > 0
def next(): A = {
val res = toA(as(1))
@@ -164,12 +202,36 @@ class PriorityQueue[A](implicit ord: Ordering[A])
}
}
- /** This is utterly broken: Two priority queues of different length can still be equal!
- * The method should be removed once PriotyQueue inserts correctly into the sequence class hierarchy.
+ /**
+ * Returns the reverse of this queue. The priority queue that gets
+ * returned will have an inversed ordering - if for some elements
+ * <code>x</code> and <code>y</code> the original queue's ordering
+ * had <code>compare</code> returning an integer w, the new one will return -w,
+ * assuming the original ordering abides its contract.
+ *
+ * Note that the order of the elements will be reversed unless the
+ * <code>compare</code> method returns 0. In this case, such elements
+ * will be subsequent, but their corresponding subinterval may be inappropriately
+ * reversed. However, due to the compare-equals contract, they will also be equal.
*/
- override def equals(obj: Any): Boolean = obj match {
- case that: PriorityQueue[_] => (this.iterator zip that.iterator) forall { case (x, y) => x == y }
- case _ => false
+ override def reverse = {
+ val revq = new PriorityQueue[A]()(new math.Ordering[A] {
+ def compare(x: A, y: A) = ord.compare(y, x)
+ })
+ for (i <- 1 until resarr.length) revq += resarr(i)
+ revq
+ }
+
+ override def reverseIterator = new Iterator[A] {
+ val arr = new Array[Any](size)
+ iterator.copyToArray(arr)
+ var i = arr.size - 1
+ def hasNext: Boolean = i >= 0
+ def next(): A = {
+ val curr = arr(i)
+ i -= 1
+ curr.asInstanceOf[A]
+ }
}
/** The hashCode method always yields an error, since it is not
@@ -196,4 +258,18 @@ class PriorityQueue[A](implicit ord: Ordering[A])
* @return a priority queue with the same elements.
*/
override def clone(): PriorityQueue[A] = new PriorityQueue[A] ++= this.iterator
+
+ // def printstate {
+ // println("-----------------------")
+ // println("Size: " + resarr.p_size0)
+ // println("Internal array: " + resarr.p_array.toList)
+ // println(toString)
+ // }
}
+
+
+
+
+
+
+
diff --git a/src/library/scala/collection/mutable/Publisher.scala b/src/library/scala/collection/mutable/Publisher.scala
index 6d1eae7b78..29e7e9b371 100644
--- a/src/library/scala/collection/mutable/Publisher.scala
+++ b/src/library/scala/collection/mutable/Publisher.scala
@@ -18,32 +18,38 @@ package mutable
* a filter which can be used to constrain the number of events sent to the
* subscriber. Subscribers may suspend their subscription, or reactivate a
* suspended subscription. Class <code>Publisher</code> is typically used
- * as a mixin. The type variable <code>This</code> models self types.
+ * as a mixin. The abstract type <code>Pub</code> models the type of the publisher itself.
*
* @author Matthias Zenger
- * @version 1.0, 08/07/2003
+ * @author Martin Odersky
+ * @version 2.8
* @since 1
*/
-trait Publisher[A, This <: Publisher[A, This]] {
- self: This =>
+trait Publisher[Evt] {
- type SubThis = Subscriber[A, This]
- type Filter = A => Boolean
+ type Pub <: Publisher[Evt]
+ type Sub = Subscriber[Evt, Pub]
+ type Filter = Evt => Boolean
- private val filters = new HashMap[SubThis, Set[Filter]] with MultiMap[SubThis, Filter]
- private val suspended = new HashSet[SubThis]
+ /** The publisher itself of type `Pub'. Implemented by a cast from `this' here.
+ * Needs to be overridden if the actual publisher is different from `this'.
+ */
+ protected val self: Pub = this.asInstanceOf[Pub]
+
+ private val filters = new HashMap[Sub, Set[Filter]] with MultiMap[Sub, Filter]
+ private val suspended = new HashSet[Sub]
- def subscribe(sub: SubThis) { subscribe(sub, event => true) }
- def subscribe(sub: SubThis, filter: Filter) { filters(sub) += filter }
- def suspendSubscription(sub: SubThis) { suspended += sub }
- def activateSubscription(sub: SubThis) { suspended -= sub }
- def removeSubscription(sub: SubThis) { filters -= sub }
+ def subscribe(sub: Sub) { subscribe(sub, event => true) }
+ def subscribe(sub: Sub, filter: Filter) { filters(sub) += filter }
+ def suspendSubscription(sub: Sub) { suspended += sub }
+ def activateSubscription(sub: Sub) { suspended -= sub }
+ def removeSubscription(sub: Sub) { filters -= sub }
def removeSubscriptions() { filters.clear }
- protected def publish(event: A) {
+ protected def publish(event: Evt) {
filters.keysIterator.foreach(sub =>
if (filters.entryExists(sub, p => p(event)))
- sub.notify(this, event)
+ sub.notify(self, event)
)
}
@@ -52,7 +58,7 @@ trait Publisher[A, This <: Publisher[A, This]] {
* @return true, iff both publishers contain the same sequence of elements.
*/
override def equals(obj: Any): Boolean = obj match {
- case that: Publisher[_, _] =>
+ case that: Publisher[_] =>
(this.filters equals that.filters) &&
(this.suspended equals that.suspended)
case _ =>
diff --git a/src/library/scala/collection/mutable/ResizableArray.scala b/src/library/scala/collection/mutable/ResizableArray.scala
index a8fd82f726..8653ec6adf 100644
--- a/src/library/scala/collection/mutable/ResizableArray.scala
+++ b/src/library/scala/collection/mutable/ResizableArray.scala
@@ -90,9 +90,12 @@ trait ResizableArray[A] extends IndexedSeq[A]
var newsize = array.length * 2
while (n > newsize)
newsize = newsize * 2
+ // println("Internal array before, size " + size0 + ": " + array.toList)
val newar: Array[AnyRef] = new Array(newsize)
Array.copy(array, 0, newar, 0, size0)
+ // println("Internal array after, size " + size0 + ": " + array.toList)
array = newar
+ // println("New array after, size " + size0 + ": " + newar.toList)
}
}
diff --git a/src/library/scala/collection/mutable/RevertibleHistory.scala b/src/library/scala/collection/mutable/RevertibleHistory.scala
index 08b6b56cac..8706cab585 100644
--- a/src/library/scala/collection/mutable/RevertibleHistory.scala
+++ b/src/library/scala/collection/mutable/RevertibleHistory.scala
@@ -23,7 +23,7 @@ package mutable
* @since 2.8
*/
@serializable
-class RevertibleHistory[A <: Undoable, B] extends History[A, B] with Undoable {
+class RevertibleHistory[Evt <: Undoable, Pub] extends History[Evt, Pub] with Undoable {
/** Rollback the full history.
*/
diff --git a/src/library/scala/collection/mutable/Stack.scala b/src/library/scala/collection/mutable/Stack.scala
index 6e7205c3fa..5725580139 100644
--- a/src/library/scala/collection/mutable/Stack.scala
+++ b/src/library/scala/collection/mutable/Stack.scala
@@ -62,7 +62,6 @@ class Stack[A] private (var elems: List[A]) extends scala.collection.Seq[A] with
*
* @param elems the iterator object.
* @return the stack with the new elements on top.
- * @deprecated
*/
def pushAll(elems: Iterator[A]): this.type = { for (elem <- elems) { push(elem); () }; this }
diff --git a/src/library/scala/collection/mutable/StringBuilder.scala b/src/library/scala/collection/mutable/StringBuilder.scala
index 47bac8ad47..e83a30b180 100644
--- a/src/library/scala/collection/mutable/StringBuilder.scala
+++ b/src/library/scala/collection/mutable/StringBuilder.scala
@@ -115,7 +115,7 @@ final class StringBuilder(initCapacity: Int, private val initValue: String)
*/
def ensureCapacity(n: Int) {
if (n > array.length) {
- var newsize = array.length * 2
+ var newsize = (array.length * 2) max 1
while (n > newsize)
newsize = newsize * 2
val newar = new Array[Char](newsize)
@@ -727,7 +727,7 @@ final class StringBuilder(initCapacity: Int, private val initValue: String)
* specified substring, starting at the specified index. The integer
* returned is the smallest value <code>k</code> for which:
* </p><pre>
- * k >= Math.min(fromIndex, str.length()) &&
+ * k >= math.min(fromIndex, str.length()) &&
* this.toString().startsWith(str, k)</pre>
* <p>
* If no such value of <code>k</code> exists, then <code>-1</code>
@@ -768,7 +768,7 @@ final class StringBuilder(initCapacity: Int, private val initValue: String)
* specified substring. The integer returned is the largest value
* <code>k</code> such that:
* </p><pre>val
- * k <= Math.min(fromIndex, str.length()) &&
+ * k <= math.min(fromIndex, str.length()) &&
* this.toString().startsWith(str, k)</pre>
* <p>
* If no such value of <code>k</code> exists, then <code>-1</code>
@@ -852,7 +852,7 @@ object StringBuilder
// method <code>java.util.Arrays.copyOf</code> exists since 1.6
private def copyOf(src: Array[Char], newLength: Int): Array[Char] = {
val dest = new Array[Char](newLength)
- arraycopy(src, 0, dest, 0, Math.min(src.length, newLength))
+ arraycopy(src, 0, dest, 0, src.length min newLength)
dest
}
}
diff --git a/src/library/scala/collection/mutable/Subscriber.scala b/src/library/scala/collection/mutable/Subscriber.scala
index abf92f0840..431c157449 100644
--- a/src/library/scala/collection/mutable/Subscriber.scala
+++ b/src/library/scala/collection/mutable/Subscriber.scala
@@ -19,9 +19,10 @@ package mutable
* target="contentFrame"><code>Publisher</code></a>.
*
* @author Matthias Zenger
- * @version 1.0, 08/07/2003
+ * @author Martin Odersky
+ * @version 2.8
* @since 1
*/
-trait Subscriber[-A, -B] {
- def notify(pub: B, event: A): Unit
+trait Subscriber[-Evt, -Pub] {
+ def notify(pub: Pub, event: Evt): Unit
}
diff --git a/src/library/scala/collection/mutable/SynchronizedBuffer.scala b/src/library/scala/collection/mutable/SynchronizedBuffer.scala
index 590757be61..269b359e28 100644
--- a/src/library/scala/collection/mutable/SynchronizedBuffer.scala
+++ b/src/library/scala/collection/mutable/SynchronizedBuffer.scala
@@ -42,7 +42,7 @@ trait SynchronizedBuffer[A] extends Buffer[A] {
*
* @param elem the element to append.
*/
- override def +(elem: A): Buffer[A] = synchronized {
+ override def +(elem: A): Self = synchronized {
super.+(elem)
}
@@ -60,7 +60,7 @@ trait SynchronizedBuffer[A] extends Buffer[A] {
*
* @param iter the iterable object.
*/
- override def ++(iter: Traversable[A]): Buffer[A] = synchronized {
+ override def ++(iter: Traversable[A]): Self = synchronized {
super.++(iter)
}
@@ -95,7 +95,7 @@ trait SynchronizedBuffer[A] extends Buffer[A] {
*
* @param elem the element to append.
*/
- abstract override def +=:(elem: A): Buffer[A] = synchronized {
+ abstract override def +=:(elem: A): this.type = synchronized[this.type] {
super.+=:(elem)
}
@@ -105,7 +105,7 @@ trait SynchronizedBuffer[A] extends Buffer[A] {
*
* @param iter the iterable object.
*/
- override def ++=:(iter: Traversable[A]): Buffer[A] = synchronized {
+ override def ++=:(iter: Traversable[A]): this.type = synchronized[this.type] {
super.++=:(iter)
}
@@ -181,7 +181,7 @@ trait SynchronizedBuffer[A] extends Buffer[A] {
*
* @return an <code>ArrayBuffer</code> with the same elements.
*/
- override def clone(): Buffer[A] = synchronized {
+ override def clone(): Self = synchronized {
super.clone()
}
diff --git a/src/library/scala/collection/mutable/SynchronizedMap.scala b/src/library/scala/collection/mutable/SynchronizedMap.scala
index 650c939936..a2ad697985 100644
--- a/src/library/scala/collection/mutable/SynchronizedMap.scala
+++ b/src/library/scala/collection/mutable/SynchronizedMap.scala
@@ -38,7 +38,7 @@ trait SynchronizedMap[A, B] extends Map[A, B] {
override def valuesIterable: scala.collection.Iterable[B] = synchronized { super.valuesIterable }
@deprecated("Use `valuesIterator' instead") override def values: Iterator[B] = synchronized { super.valuesIterator }
override def valuesIterator: Iterator[B] = synchronized { super.valuesIterator }
- override def clone() = synchronized { super.clone() }
+ override def clone(): Self = synchronized { super.clone() }
override def foreach[U](f: ((A, B)) => U) = synchronized { super.foreach(f) }
override def apply(key: A): B = synchronized { super.apply(key) }
override def keySet: scala.collection.Set[A] = synchronized { super.keySet }
@@ -50,7 +50,7 @@ trait SynchronizedMap[A, B] extends Map[A, B] {
@deprecated("See Map.+ for explanation") override def +(kv: (A, B)): this.type = synchronized[this.type] { super.+(kv) }
// can't override -, -- same type!
- // @deprecated override def -(key: A): This = synchronized { super.-(key) }
+ // @deprecated override def -(key: A): Self = synchronized { super.-(key) }
// !!! todo: also add all other methods
}
diff --git a/src/library/scala/concurrent/DelayedLazyVal.scala b/src/library/scala/concurrent/DelayedLazyVal.scala
index a5d8219a02..4fcf131f5f 100644
--- a/src/library/scala/concurrent/DelayedLazyVal.scala
+++ b/src/library/scala/concurrent/DelayedLazyVal.scala
@@ -10,8 +10,7 @@
package scala.concurrent
-import annotation.experimental
-import ops._
+import ops.future
/** A <code>DelayedLazyVal</code> is a wrapper for lengthy
* computations which have a valid partially computed result.
@@ -27,7 +26,6 @@ import ops._
* @author Paul Phillips
* @version 2.8
*/
-@experimental
class DelayedLazyVal[T](f: () => T, body: => Unit) {
@volatile private[this] var isDone = false
private[this] lazy val complete = f()
diff --git a/src/library/scala/deprecated.scala b/src/library/scala/deprecated.scala
index 1a176d1bc2..0b10b1ab74 100644
--- a/src/library/scala/deprecated.scala
+++ b/src/library/scala/deprecated.scala
@@ -11,12 +11,15 @@
package scala
+import annotation.target._
+
/**
* An annotation that designates the definition to which it is applied as deprecated.
* Access to the member then generates a deprecated warning.
*
* @since 2.3
*/
+@getter @setter @beanGetter @beanSetter
class deprecated(message: String) extends StaticAnnotation {
def this() = this("")
}
diff --git a/src/library/scala/io/UTF8Codec.scala b/src/library/scala/io/UTF8Codec.scala
index 4fac5bca47..ec328700cc 100644
--- a/src/library/scala/io/UTF8Codec.scala
+++ b/src/library/scala/io/UTF8Codec.scala
@@ -17,7 +17,7 @@ package scala.io
object UTF8Codec
{
final val UNI_REPLACEMENT_CHAR: Int = 0x0000FFFD
- final val UNI_REPLACEMENT_BYTES = encode(UNI_REPLACEMENT_CHAR)
+ final val UNI_REPLACEMENT_BYTES = Array[Byte](-17, -65, -67)
// Note, from http://unicode.org/faq/utf_bom.html#utf8-5
//
diff --git a/src/library/scala/math/Ordering.scala b/src/library/scala/math/Ordering.scala
index 30e13d9f0e..3eeebc8aea 100644
--- a/src/library/scala/math/Ordering.scala
+++ b/src/library/scala/math/Ordering.scala
@@ -27,11 +27,11 @@ import java.util.Comparator
* <li>reflexive: <code>compare(x, x) == 0</code>, for any <code>x</code> of
* type <code>T</code>.</li>
* <li>symmetry: <code>compare(x, y) == z</code> and <code>compare(y, x) == w</code>
- * then <code>Math.signum(z) == -Math.signum(w)</code>, for any <code>x</code> and <code>y</code> of
+ * then <code>math.signum(z) == -math.signum(w)</code>, for any <code>x</code> and <code>y</code> of
* type <code>T</code> and <code>z</code> and <code>w</code> of type <code>Int</code>.</li>
* <li>transitive: if <code>compare(x, y) == z</code> and <code>compare(y, w) == v</code>
- * and <code>Math.signum(z) &gt;= 0</code> and <code>Math.signum(v) &gt;= 0</code> then
- * <code>compare(x, w) == u</code> and <code>Math.signum(z + v) == Math.signum(u)</code>,
+ * and <code>math.signum(z) &gt;= 0</code> and <code>math.signum(v) &gt;= 0</code> then
+ * <code>compare(x, w) == u</code> and <code>math.signum(z + v) == math.signum(u)</code>,
* for any <code>x</code>, <code>y</code>,
* and <code>w</code> of type <code>T</code> and <code>z</code>, <code>v</code>, and <code>u</code>
* of type <code>Int</code>.</li>
diff --git a/src/library/scala/math/package.scala b/src/library/scala/math/package.scala
new file mode 100644
index 0000000000..3d17f36f12
--- /dev/null
+++ b/src/library/scala/math/package.scala
@@ -0,0 +1,156 @@
+/* __ *\
+** ________ ___ / / ___ Scala API **
+** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
+** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
+** /____/\___/_/ |_/____/_/ | | **
+** |/ **
+\* */
+
+package scala
+
+/** The package object <code>scala.math</code> contains methods for performing basic numeric
+ * operations such as the elementary exponential, logarithm, square root, and
+ * trigonometric functions.
+ */
+
+package object math {
+ /** The <code>double</code> value that is closer than any other to
+ * <code>e</code>, the base of the natural logarithms.
+ */
+ val E = java.lang.Math.E
+
+ /** The <code>double</code> value that is closer than any other to
+ * <code>pi</code>, the ratio of the circumference of a circle to its
+ * diameter.
+ */
+ val Pi = java.lang.Math.PI
+
+ /** Returns a <code>double</code> value with a positive sign, greater than
+ * or equal to <code>0.0</code> and less than <code>1.0</code>.
+ */
+ def random: Double = java.lang.Math.random()
+
+ def sin(x: Double): Double = java.lang.Math.sin(x)
+ def cos(x: Double): Double = java.lang.Math.cos(x)
+ def tan(x: Double): Double = java.lang.Math.tan(x)
+ def asin(x: Double): Double = java.lang.Math.asin(x)
+ def acos(x: Double): Double = java.lang.Math.acos(x)
+ def atan(x: Double): Double = java.lang.Math.atan(x)
+
+ /** Converts an angle measured in degrees to an approximately equivalent
+ * angle measured in radians.
+ *
+ * @param x an angle, in degrees
+ * @return the measurement of the angle <code>x</code> in radians.
+ */
+ def toRadians(x: Double): Double = java.lang.Math.toRadians(x)
+
+ /** Converts an angle measured in radians to an approximately equivalent
+ * angle measured in degrees.
+ *
+ * @param x angle, in radians
+ * @return the measurement of the angle <code>x</code> in degrees.
+ */
+ def toDegrees(x: Double): Double = java.lang.Math.toDegrees(x)
+
+ /** Returns Euler's number <code>e</code> raised to the power of a
+ * <code>double</code> value.
+ *
+ * @param x the exponent to raise <code>e</code> to.
+ * @return the value <code>e<sup>a</sup></code>, where <code>e</code>
+ * is the base of the natural logarithms.
+ */
+ def exp(x: Double): Double = java.lang.Math.exp(x)
+ def log(x: Double): Double = java.lang.Math.log(x)
+ def sqrt(x: Double): Double = java.lang.Math.sqrt(x)
+ def IEEEremainder(x: Double, y: Double): Double = java.lang.Math.IEEEremainder(x, y)
+
+ def ceil(x: Double): Double = java.lang.Math.ceil(x)
+ def floor(x: Double): Double = java.lang.Math.floor(x)
+
+ /** Returns the <code>double</code> value that is closest in value to the
+ * argument and is equal to a mathematical integer.
+ *
+ * @param x a <code>double</code> value
+ * @return the closest floating-point value to a that is equal to a
+ * mathematical integer.
+ */
+ def rint(x: Double): Double = java.lang.Math.rint(x)
+
+ /** Converts rectangular coordinates <code>(x, y)</code> to polar
+ * <code>(r, theta)</code>.
+ *
+ * @param x the ordinate coordinate
+ * @param y the abscissa coordinate
+ * @return the <em>theta</em> component of the point <code>(r, theta)</code>
+ * in polar coordinates that corresponds to the point
+ * <code>(x, y)</code> in Cartesian coordinates.
+ */
+ def atan2(y: Double, x: Double): Double = java.lang.Math.atan2(y, x)
+
+ /** Returns the value of the first argument raised to the power of the
+ * second argument.
+ *
+ * @param x the base.
+ * @param y the exponent.
+ * @return the value <code>x<sup>y</sup></code>.
+ */
+ def pow(x: Double, y: Double): Double = java.lang.Math.pow(x, y)
+
+ /** Returns the closest <code>long</code> to the argument.
+ *
+ * @param x a floating-point value to be rounded to a <code>long</code>.
+ * @return the value of the argument rounded to the nearest
+ * <code>long</code> value.
+ */
+ def round(x: Float): Int = java.lang.Math.round(x)
+ def round(x: Double): Long = java.lang.Math.round(x)
+ def abs(x: Int): Int = java.lang.Math.abs(x)
+ def abs(x: Long): Long = java.lang.Math.abs(x)
+ def abs(x: Float): Float = java.lang.Math.abs(x)
+ def abs(x: Double): Double = java.lang.Math.abs(x)
+
+ def max(x: Int, y: Int): Int = java.lang.Math.max(x, y)
+ def max(x: Long, y: Long): Long = java.lang.Math.max(x, y)
+ def max(x: Float, y: Float): Float = java.lang.Math.max(x, y)
+ def max(x: Double, y: Double): Double = java.lang.Math.max(x, y)
+
+ def min(x: Int, y: Int): Int = java.lang.Math.min(x, y)
+ def min(x: Long, y: Long): Long = java.lang.Math.min(x, y)
+ def min(x: Float, y: Float): Float = java.lang.Math.min(x, y)
+ def min(x: Double, y: Double): Double = java.lang.Math.min(x, y)
+
+ def signum(x: Double): Double =
+ if (x == 0d) 0d
+ else if (x < 0) -1.0
+ else if (x > 0) 1.0
+ else x // NaN
+
+ def signum(x: Float): Float =
+ if (x == 0f) 0f
+ else if (x < 0) -1.0f
+ else if (x > 0) 1.0f
+ else x // NaN
+
+ def signum(x: Long): Long =
+ if (x == 0l) 0l
+ else if (x < 0) -1l
+ else 1l
+
+ def signum(x: Int): Int =
+ if (x == 0) 0
+ else if (x < 0) -1
+ else 1
+
+ def log10(x: Double): Double = java.lang.Math.log10(x)
+ def cbrt(x: Double): Double = java.lang.Math.cbrt(x)
+
+ def ulp(x: Double): Double = java.lang.Math.ulp(x)
+ def ulp(x: Float): Float = java.lang.Math.ulp(x)
+ def sinh(x: Double): Double = java.lang.Math.sinh(x)
+ def cosh(x: Double): Double = java.lang.Math.cosh(x)
+ def tanh(x: Double):Double = java.lang.Math.tanh(x)
+ def hypot(x: Double, y: Double): Double = java.lang.Math.hypot(x, y)
+ def expm1(x: Double): Double = java.lang.Math.expm1(x)
+ def log1p(x: Double): Double = java.lang.Math.log1p(x)
+} \ No newline at end of file
diff --git a/src/library/scala/net/Utility.scala b/src/library/scala/net/Utility.scala
deleted file mode 100644
index 59ce1c0ceb..0000000000
--- a/src/library/scala/net/Utility.scala
+++ /dev/null
@@ -1,23 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-package scala.net
-
-import java.net.{ URL, MalformedURLException }
-import scala.util.control.Exception._
-
-/**
- * Skeleton in anticipation of more convenience methods.
- *
- * @since 2.8
- */
-object Utility
-{
- def parseURL(s: String): Option[URL] =
- catching(classOf[MalformedURLException]) opt new URL(s)
-}
diff --git a/src/library/scala/package.scala b/src/library/scala/package.scala
index 7c25757e57..b9a262030f 100644
--- a/src/library/scala/package.scala
+++ b/src/library/scala/package.scala
@@ -93,6 +93,51 @@ package object scala {
type PartialOrdering[T] = scala.math.PartialOrdering[T]
type PartiallyOrdered[T] = scala.math.PartiallyOrdered[T]
+ @deprecated("Use Tuple1(x) to create a 1-tuple.")
+ def Tuple[A1](x1: A1) = Tuple1(x1)
+ @deprecated("Use ((x1, x2, ...)) syntax to create Tuples")
+ def Tuple[A1, A2](x1: A1, x2: A2) = Tuple2(x1, x2)
+ @deprecated("Use ((x1, x2, ...)) syntax to create Tuples")
+ def Tuple[A1, A2, A3](x1: A1, x2: A2, x3: A3) = Tuple3(x1, x2, x3)
+ @deprecated("Use ((x1, x2, ...)) syntax to create Tuples")
+ def Tuple[A1, A2, A3, A4](x1: A1, x2: A2, x3: A3, x4: A4) = Tuple4(x1, x2, x3, x4)
+ @deprecated("Use ((x1, x2, ...)) syntax to create Tuples")
+ def Tuple[A1, A2, A3, A4, A5](x1: A1, x2: A2, x3: A3, x4: A4, x5: A5) = Tuple5(x1, x2, x3, x4, x5)
+ @deprecated("Use ((x1, x2, ...)) syntax to create Tuples")
+ def Tuple[A1, A2, A3, A4, A5, A6](x1: A1, x2: A2, x3: A3, x4: A4, x5: A5, x6: A6) = Tuple6(x1, x2, x3, x4, x5, x6)
+ @deprecated("Use ((x1, x2, ...)) syntax to create Tuples")
+ def Tuple[A1, A2, A3, A4, A5, A6, A7](x1: A1, x2: A2, x3: A3, x4: A4, x5: A5, x6: A6, x7: A7) = Tuple7(x1, x2, x3, x4, x5, x6, x7)
+ @deprecated("Use ((x1, x2, ...)) syntax to create Tuples")
+ def Tuple[A1, A2, A3, A4, A5, A6, A7, A8](x1: A1, x2: A2, x3: A3, x4: A4, x5: A5, x6: A6, x7: A7, x8: A8) = Tuple8(x1, x2, x3, x4, x5, x6, x7, x8)
+ @deprecated("Use ((x1, x2, ...)) syntax to create Tuples")
+ def Tuple[A1, A2, A3, A4, A5, A6, A7, A8, A9](x1: A1, x2: A2, x3: A3, x4: A4, x5: A5, x6: A6, x7: A7, x8: A8, x9: A9) = Tuple9(x1, x2, x3, x4, x5, x6, x7, x8, x9)
+ @deprecated("Use ((x1, x2, ...)) syntax to create Tuples")
+ def Tuple[A1, A2, A3, A4, A5, A6, A7, A8, A9, A10](x1: A1, x2: A2, x3: A3, x4: A4, x5: A5, x6: A6, x7: A7, x8: A8, x9: A9, x10: A10) = Tuple10(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10)
+ @deprecated("Use ((x1, x2, ...)) syntax to create Tuples")
+ def Tuple[A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11](x1: A1, x2: A2, x3: A3, x4: A4, x5: A5, x6: A6, x7: A7, x8: A8, x9: A9, x10: A10, x11: A11) = Tuple11(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11)
+ @deprecated("Use ((x1, x2, ...)) syntax to create Tuples")
+ def Tuple[A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12](x1: A1, x2: A2, x3: A3, x4: A4, x5: A5, x6: A6, x7: A7, x8: A8, x9: A9, x10: A10, x11: A11, x12: A12) = Tuple12(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12)
+ @deprecated("Use ((x1, x2, ...)) syntax to create Tuples")
+ def Tuple[A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13](x1: A1, x2: A2, x3: A3, x4: A4, x5: A5, x6: A6, x7: A7, x8: A8, x9: A9, x10: A10, x11: A11, x12: A12, x13: A13) = Tuple13(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13)
+ @deprecated("Use ((x1, x2, ...)) syntax to create Tuples")
+ def Tuple[A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14](x1: A1, x2: A2, x3: A3, x4: A4, x5: A5, x6: A6, x7: A7, x8: A8, x9: A9, x10: A10, x11: A11, x12: A12, x13: A13, x14: A14) = Tuple14(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14)
+ @deprecated("Use ((x1, x2, ...)) syntax to create Tuples")
+ def Tuple[A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15](x1: A1, x2: A2, x3: A3, x4: A4, x5: A5, x6: A6, x7: A7, x8: A8, x9: A9, x10: A10, x11: A11, x12: A12, x13: A13, x14: A14, x15: A15) = Tuple15(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15)
+ @deprecated("Use ((x1, x2, ...)) syntax to create Tuples")
+ def Tuple[A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16](x1: A1, x2: A2, x3: A3, x4: A4, x5: A5, x6: A6, x7: A7, x8: A8, x9: A9, x10: A10, x11: A11, x12: A12, x13: A13, x14: A14, x15: A15, x16: A16) = Tuple16(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16)
+ @deprecated("Use ((x1, x2, ...)) syntax to create Tuples")
+ def Tuple[A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17](x1: A1, x2: A2, x3: A3, x4: A4, x5: A5, x6: A6, x7: A7, x8: A8, x9: A9, x10: A10, x11: A11, x12: A12, x13: A13, x14: A14, x15: A15, x16: A16, x17: A17) = Tuple17(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17)
+ @deprecated("Use ((x1, x2, ...)) syntax to create Tuples")
+ def Tuple[A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18](x1: A1, x2: A2, x3: A3, x4: A4, x5: A5, x6: A6, x7: A7, x8: A8, x9: A9, x10: A10, x11: A11, x12: A12, x13: A13, x14: A14, x15: A15, x16: A16, x17: A17, x18: A18) = Tuple18(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18)
+ @deprecated("Use ((x1, x2, ...)) syntax to create Tuples")
+ def Tuple[A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19](x1: A1, x2: A2, x3: A3, x4: A4, x5: A5, x6: A6, x7: A7, x8: A8, x9: A9, x10: A10, x11: A11, x12: A12, x13: A13, x14: A14, x15: A15, x16: A16, x17: A17, x18: A18, x19: A19) = Tuple19(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19)
+ @deprecated("Use ((x1, x2, ...)) syntax to create Tuples")
+ def Tuple[A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20](x1: A1, x2: A2, x3: A3, x4: A4, x5: A5, x6: A6, x7: A7, x8: A8, x9: A9, x10: A10, x11: A11, x12: A12, x13: A13, x14: A14, x15: A15, x16: A16, x17: A17, x18: A18, x19: A19, x20: A20) = Tuple20(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20)
+ @deprecated("Use ((x1, x2, ...)) syntax to create Tuples")
+ def Tuple[A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21](x1: A1, x2: A2, x3: A3, x4: A4, x5: A5, x6: A6, x7: A7, x8: A8, x9: A9, x10: A10, x11: A11, x12: A12, x13: A13, x14: A14, x15: A15, x16: A16, x17: A17, x18: A18, x19: A19, x20: A20, x21: A21) = Tuple21(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20, x21)
+ @deprecated("Use ((x1, x2, ...)) syntax to create Tuples")
+ def Tuple[A1, A2, A3, A4, A5, A6, A7, A8, A9, A10, A11, A12, A13, A14, A15, A16, A17, A18, A19, A20, A21, A22](x1: A1, x2: A2, x3: A3, x4: A4, x5: A5, x6: A6, x7: A7, x8: A8, x9: A9, x10: A10, x11: A11, x12: A12, x13: A13, x14: A14, x15: A15, x16: A16, x17: A17, x18: A18, x19: A19, x20: A20, x21: A21, x22: A22) = Tuple22(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20, x21, x22)
+
@deprecated("use <code>java.lang.Integer</code> instead")
type Integer = java.lang.Integer
@deprecated("use <code>java.lang.Character</code> instead")
diff --git a/src/library/scala/reflect/Invocation.scala b/src/library/scala/reflect/Invocation.scala
deleted file mode 100644
index 795c74fef4..0000000000
--- a/src/library/scala/reflect/Invocation.scala
+++ /dev/null
@@ -1,134 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-// $Id$
-
-
-package scala.reflect
-
-import scala.annotation.experimental
-import scala.util.control.Exception.catching
-import java.lang.{ Class => JClass }
-import java.lang.reflect.{ Method => JMethod }
-import scala.{ Symbol => ScalaSymbol }
-
-/** <p>
- * A more convenient syntax for reflective invocation.<br/>
- * Example usage:
- * </p><pre>
- * <b>class</b> Obj { <b>private def</b> foo(x: Int, y: String): Long = x + y.length }</pre>
- * <p>
- * You can call it reflectively one of two ways:
- * </p><pre>
- * <b>import</b> scala.reflect.Invocation._
- * (<b>new</b> Obj) o 'foo(5, "abc") // the 'o' method returns Any
- * <b>val</b> x: Long = (<b>new</b> Obj) oo 'foo(5, "abc") // the 'oo' method casts to expected type.</pre>
- * <p>
- * If you call the <code>oo</code> method and do not give the type inferencer
- * enough help, it will most likely infer <code>Nothing</code>, which will
- * result in a <code>ClassCastException</code>.
- * </p>
- *
- * @author Paul Phillips
- */
-@experimental
-object Invocation
-{
- /** <p>
- * In order to encapsulate anything to do with reflection, we must
- * overcome an issue with the boxing of primitives. If we declare a
- * method which takes arguments of type <code>Any</code>, by the time the
- * method parameters can be examined, the primitives have already been boxed.
- * The reflective call will then fail because <code>classOf[java.lang.Integer]</code>
- * is not the same thing as <code>classOf[scala.Int].</code>
- * </p>
- * <p>
- * Any useful workaround will require examining the arguments before
- * the method is called. The approach here is to define two implicits,
- * one for <code>AnyRef</code>'s and one for <code>AnyVal</code>'s, and
- * box them in a container which preserves their original class identity.
- * </p>
- */
- trait PrimitivePreserver[T] {
- val value: T
- val clazz: JClass[_]
- }
- case class PreservedAnyVal[T <: AnyVal](value: T) extends PrimitivePreserver[T] {
- val clazz = getAnyValClass(value)
- }
- case class PreservedAnyRef[T <: AnyRef](value: T) extends PrimitivePreserver[T] {
- val clazz = value.getClass
- }
- implicit def makePreservedAnyRef[T <: AnyRef](x: T) = PreservedAnyRef(x)
- implicit def makePreservedAnyVal[T <: AnyVal](x: T) = PreservedAnyVal(x)
-
- /** We also require an implicit on scala.Symbol so they appear to contain
- * an apply method, which packages the method arguments. The type parameter
- * is the method's expected result type.
- */
- class SymbolWithArguments(val sym: ScalaSymbol, val args: PrimitivePreserver[_]*) {
- def getArgs = args map (_.value.asInstanceOf[AnyRef])
- def getArgTypes = args.toList map (_.clazz)
- def argsMatch(m: JMethod) =
- List.map2(m.getParameterTypes.toList, getArgTypes)(_ isAssignableFrom _) forall (_ == true)
-
- // only called if getMethod() fails - searches private methods too.
- def getDeclaredMethodsOn(x: AnyRef) =
- (x.getClass.getDeclaredMethods filter (_.getName == sym.name) find argsMatch) match {
- case Some(m) => m setAccessible true ; m
- case None => throw new NoSuchMethodException(sym.name)
- }
-
- def getMethodOn(x: AnyRef) =
- catching(classOf[NoSuchMethodException]) .
- opt (x.getClass.getMethod(sym.name, getArgTypes: _*)) .
- getOrElse (getDeclaredMethodsOn(x))
-
- }
- class RichSymbol(sym: ScalaSymbol) {
- def apply(args: PrimitivePreserver[_]*): SymbolWithArguments =
- new SymbolWithArguments(sym, args: _*)
- }
- implicit def makeRichSymbol(sym: ScalaSymbol): RichSymbol = new RichSymbol(sym)
-
- /** An implicit on AnyRef provides it with the 'o' method, which is supposed
- * to look like a giant '.' and present the feel of method invocation.
- */
- class ReflectionOperators[T <: AnyRef](self: T) {
- val clazz = self.getClass.asInstanceOf[JClass[T]]
-
- /** Issue call without touching result - returns Any.
- */
- def o(sym: ScalaSymbol): Any = oo(new SymbolWithArguments(sym))
- def o(symApp: SymbolWithArguments): Any = oo(symApp)
-
- /** Issue call expecting return type R - casts result to R.
- */
- def oo[R](sym: ScalaSymbol): R = oo[R](new SymbolWithArguments(sym))
- def oo[R](symApp: SymbolWithArguments): R = {
- def method = symApp getMethodOn self
- method.invoke(self, symApp.getArgs: _*).asInstanceOf[R]
- }
- }
- implicit def makeReflectionOperators[T <: AnyRef](x: T): ReflectionOperators[T] =
- new ReflectionOperators(x)
-
- /** Obtain the class object for an <code>AnyVal</code>.
- */
- def getAnyValClass(x: AnyVal): JClass[_] = x match {
- case _: Byte => classOf[Byte]
- case _: Short => classOf[Short]
- case _: Int => classOf[Int]
- case _: Long => classOf[Long]
- case _: Float => classOf[Float]
- case _: Double => classOf[Double]
- case _: Char => classOf[Char]
- case _: Boolean => classOf[Boolean]
- case _: Unit => classOf[Unit]
- }
-}
diff --git a/src/library/scala/reflect/Print.scala b/src/library/scala/reflect/Print.scala
index 6df7fb032a..5b773e69b4 100644
--- a/src/library/scala/reflect/Print.scala
+++ b/src/library/scala/reflect/Print.scala
@@ -103,9 +103,8 @@ object Print extends Function1[Any, String] {
case reflect.MethodType(formals, resultType) =>
formals.map(Print).mkString("(", ", ", ")") + " => " + Print(resultType)
case reflect.PolyType(typeParams, typeBounds, resultType) =>
- (List.map2(typeParams, typeBounds)
- ((tp, tb) => "[" + Print(tb._1) + " :> " + Print(tp) + " :> " + Print(tb._2) + "]")).
- mkString("[", ", ", "]") + " -> " + Print(resultType)
+ val z = (typeParams, typeBounds).zip map { case (tp, tb) => "[" + Print(tb._1) + " :> " + Print(tp) + " :> " + Print(tb._2) + "]" }
+ z.mkString("[", ", ", "]") + " -> " + Print(resultType)
case _ =>
"???"
}
diff --git a/src/library/scala/reflect/RichClass.scala b/src/library/scala/reflect/RichClass.scala
deleted file mode 100644
index 7d690f360d..0000000000
--- a/src/library/scala/reflect/RichClass.scala
+++ /dev/null
@@ -1,93 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-// $Id$
-
-package scala.reflect
-
-import scala.annotation.experimental
-import scala.util.control.Exception._
-import scala.util.ScalaClassLoader._
-import java.lang.{ Class => JClass }
-import java.lang.reflect. { Constructor => JConstructor }
-
-object RichClass
-{
- // We can't put this in Predef at the moment because everything referenced
- // from Predef has to be buildable at the first bootstraping phase.
- implicit def classWrapper[T](x: JClass[T]): RichClass[T] = new RichClass(x)
-}
-
-@experimental
-final class RichClass[T](val self: JClass[T]) extends Proxy
-{
- // The getConstructors and getDeclaredConstructors methods on java.lang.Class[T]
- // return "raw type" Constructors rather than Constructor[T]s as one would want.
- // The "why" from http://java.sun.com/javase/6/docs/api/java/lang/Class.html is:
- //
- // Note that while this method returns an array of Constructor<T> objects (that is an array
- // of constructors from this class), the return type of this method is Constructor<?>[] and
- // not Constructor<T>[] as might be expected. This less informative return type is necessary
- // since after being returned from this method, the array could be modified to hold Constructor
- // objects for different classes, which would violate the type guarantees of Constructor<T>[]
- //
- // Since this reasoning is invalid in scala due to its abandonment of Array covariance,
- // these methods exist to correct the return types.
- //
- // In addition, at this writing because of ticket #1560 the compiler crashes on the
- // untyped constructors but not on these.
-
- def getConstructorsTyped(): Array[JConstructor[T]] =
- self.getConstructors() map (_.asInstanceOf[JConstructor[T]])
-
- def getDeclaredConstructorsTyped(): Array[JConstructor[T]] =
- self.getDeclaredConstructors() map (_.asInstanceOf[JConstructor[T]])
-
- private lazy val classLoader = self.getClassLoader match {
- case null => getSystemLoader
- case x => x
- }
- private val exceptions = List(
- classOf[ClassNotFoundException],
- classOf[NoSuchMethodException],
- classOf[SecurityException],
- classOf[NullPointerException],
- classOf[ClassCastException]
- )
-
- // Experimental!
- // scala> classOf[String].reflectiveCall[Array[String]]("ababab", "split")("b")
- // res0: Array[String] = Array(a, a, a)
-
- /** A class representing a reflective method call. It is a function object
- * and will make the call with whatever args are given via apply, or it will
- * throw an exception at that point if there was an error in creation.
- */
- class ReflectiveCall[+U](obj: T, name: String) {
- def methodForArgs(args: AnyRef*) = self.getMethod(name, args map (_.getClass) : _*)
- def isErroneous = false
- def apply(args: Any*): U = {
- val ps = args map (_.asInstanceOf[AnyRef])
- val m = methodForArgs(ps: _*)
- m.invoke(obj, ps: _*).asInstanceOf[U]
- }
- }
-
- class FailedReflectiveCall[+U](ex: Throwable) extends ReflectiveCall[U](null.asInstanceOf[T], null) {
- override def isErroneous = true
- override def apply(args: Any*) = throw ex
- }
-
- def reflectiveCall[U](obj: T, method: String): ReflectiveCall[U] = {
- (catching(exceptions: _*) either (new ReflectiveCall[U](obj, method))) match {
- case Left(x) => new FailedReflectiveCall[U](x)
- case Right(x) => x
- }
- }
-}
-
diff --git a/src/library/scala/runtime/BoxedAnyArray.scala b/src/library/scala/runtime/BoxedAnyArray.scala
deleted file mode 100644
index e0f6bd5be2..0000000000
--- a/src/library/scala/runtime/BoxedAnyArray.scala
+++ /dev/null
@@ -1,224 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-// $Id$
-
-
-package scala.runtime
-import scala.reflect.ClassManifest
-import compat.Platform
-
-/**
- * Arrays created by <code>new Array[T](length)</code> where <code>T</code>
- * is a type variable.
- *
- * @author Martin Odersky
- */
-@serializable
-final class BoxedAnyArray[A](val length: Int) extends BoxedArray[A] {
-
- def elemManifest: ClassManifest[A] = null
-
- private var boxed = new Array[AnyRef](length)
-// private val hash = boxed.hashCode()
- private var unboxed: AnyRef = null
- private var elemClass: Class[_] = null
-
- def apply(index: Int): A = synchronized {
- if (unboxed eq null)
- boxed(index)
- else if (elemClass eq classOf[Int])
- Int.box(unboxed.asInstanceOf[Array[Int]](index))
- else if (elemClass eq classOf[Double])
- Double.box(unboxed.asInstanceOf[Array[Double]](index))
- else if (elemClass eq classOf[Float])
- Float.box(unboxed.asInstanceOf[Array[Float]](index))
- else if (elemClass eq classOf[Long])
- Long.box(unboxed.asInstanceOf[Array[Long]](index))
- else if (elemClass eq classOf[Char])
- Char.box(unboxed.asInstanceOf[Array[Char]](index))
- else if (elemClass eq classOf[Byte])
- Byte.box(unboxed.asInstanceOf[Array[Byte]](index))
- else if (elemClass eq classOf[Short])
- Short.box(unboxed.asInstanceOf[Array[Short]](index))
- else if (elemClass eq classOf[Boolean])
- Boolean.box(unboxed.asInstanceOf[Array[Boolean]](index))
- else
- unboxed.asInstanceOf[Array[AnyRef]](index)
- }.asInstanceOf[A]
-
- def update(index: Int, _elem: A): Unit = synchronized {
- val elem = _elem.asInstanceOf[AnyRef]
- if (unboxed eq null)
- boxed(index) = elem
- else if (elemClass eq classOf[Int])
- unboxed.asInstanceOf[Array[Int]](index) = Int.unbox(elem)
- else if (elemClass eq classOf[Double])
- unboxed.asInstanceOf[Array[Double]](index) = Double.unbox(elem)
- else if (elemClass eq classOf[Float])
- unboxed.asInstanceOf[Array[Float]](index) = Float.unbox(elem)
- else if (elemClass eq classOf[Long])
- unboxed.asInstanceOf[Array[Long]](index) = Long.unbox(elem)
- else if (elemClass eq classOf[Char])
- unboxed.asInstanceOf[Array[Char]](index) = Char.unbox(elem)
- else if (elemClass eq classOf[Byte])
- unboxed.asInstanceOf[Array[Byte]](index) = Byte.unbox(elem)
- else if (elemClass eq classOf[Short])
- unboxed.asInstanceOf[Array[Short]](index) = Short.unbox(elem)
- else if (elemClass eq classOf[Boolean])
- unboxed.asInstanceOf[Array[Boolean]](index) = Boolean.unbox(elem)
- else
- unboxed.asInstanceOf[Array[AnyRef]](index) = elem
- }
-
- def unbox(elemClass: Class[_]): AnyRef = synchronized {
- if (unboxed eq null) {
- this.elemClass = elemClass;
- if (elemClass eq classOf[Int]) {
- val newvalue = new Array[Int](length)
- var i = 0
- while (i < length) {
- newvalue(i) = Int.unbox(boxed(i))
- i += 1
- }
- unboxed = newvalue
- } else if (elemClass eq classOf[Double]) {
- val newvalue = new Array[Double](length)
- var i = 0
- while (i < length) {
- newvalue(i) = Double.unbox(boxed(i))
- i += 1
- }
- unboxed = newvalue;
- } else if (elemClass eq classOf[Float]) {
- val newvalue = new Array[Float](length)
- var i = 0
- while (i < length) {
- newvalue(i) = Float.unbox(boxed(i))
- i += 1
- }
- unboxed = newvalue;
- } else if (elemClass eq classOf[Long]) {
- val newvalue = new Array[Long](length)
- var i = 0
- while (i < length) {
- newvalue(i) = Long.unbox(boxed(i))
- i += 1
- }
- unboxed = newvalue;
- } else if (elemClass eq classOf[Char]) {
- val newvalue = new Array[Char](length)
- var i = 0
- while (i < length) {
- newvalue(i) = Char.unbox(boxed(i))
- i += 1
- }
- unboxed = newvalue
- } else if (elemClass eq classOf[Byte]) {
- val newvalue = new Array[Byte](length)
- var i = 0
- while (i < length) {
- newvalue(i) = Byte.unbox(boxed(i))
- i += 1
- }
- unboxed = newvalue;
- } else if (elemClass eq classOf[Short]) {
- val newvalue = new Array[Short](length)
- var i = 0
- while (i < length) {
- newvalue(i) = Short.unbox(boxed(i))
- i += 1
- }
- unboxed = newvalue;
- } else if (elemClass eq classOf[Boolean]) {
- val newvalue = new Array[Boolean](length)
- var i = 0
- while (i < length) {
- newvalue(i) = Boolean.unbox(boxed(i))
- i += 1
- }
- unboxed = newvalue;
- } else if (elemClass == classOf[AnyRef]) {
- unboxed = boxed
- } else {
- unboxed = Platform.createArray(elemClass, length)
- if (elemClass.isArray) {
- var i = 0
- while (i < length) {
- boxed(i) match {
- case ba: BoxedArray[_] => boxed(i) = ba.unbox(elemClass.getComponentType())
- case _ =>
- }
- i += 1
- }
- }
- Platform.arraycopy(boxed, 0, unboxed, 0, length)
- }
- boxed = null
- }
- unboxed
- }
-
- def value: AnyRef = {
- if (unboxed eq null) throw new NotDefinedError("BoxedAnyArray.value")
- unboxed
- }
-
- private def adapt(other: AnyRef): AnyRef =
- if (this.unboxed eq null)
- other match {
- case that: BoxedAnyArray[_] =>
- if (that.unboxed eq null) {
- that.boxed
- } else {
- if (ScalaRunTime.isValueClass(that.elemClass)) unbox(that.elemClass);
- that.unboxed
- }
- case that: BoxedArray[_] =>
- adapt(that.value)
- case that: Array[Int] =>
- unbox(classOf[Int]); that
- case that: Array[Double] =>
- unbox(classOf[Double]); that
- case that: Array[Float] =>
- unbox(classOf[Float]); that
- case that: Array[Long] =>
- unbox(classOf[Long]); that
- case that: Array[Char] =>
- unbox(classOf[Char]); that
- case that: Array[Short] =>
- unbox(classOf[Short]); that
- case that: Array[Byte] =>
- unbox(classOf[Byte]); that
- case that: Array[Boolean] =>
- unbox(classOf[Boolean]); that
- case _ =>
- other
- }
- else
- other match {
- case that: BoxedAnyArray[_] =>
- if (that.unboxed ne null) that.unboxed
- else if (ScalaRunTime.isValueClass(this.elemClass)) that.unbox(this.elemClass)
- else that.boxed
- case that: BoxedArray[_] =>
- adapt(that.value)
- case _ =>
- other
- }
-
- override def copyFrom(src: AnyRef, from: Int, to: Int, len: Int) {
- val src1 = adapt(src)
- Array.copy(src1, from, if (unboxed ne null) unboxed else boxed, to, len)
- }
-
- override def copyTo(from: Int, dest: AnyRef, to: Int, len: Int) {
- var dest1 = adapt(dest)
- Array.copy(if (unboxed ne null) unboxed else boxed, from, dest1, to, len)
- }
-}
diff --git a/src/library/scala/runtime/BoxedArray.scala b/src/library/scala/runtime/BoxedArray.scala
deleted file mode 100644
index 0a8659ffe6..0000000000
--- a/src/library/scala/runtime/BoxedArray.scala
+++ /dev/null
@@ -1,165 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-// $Id$
-
-
-package scala.runtime
-
-import scala.reflect.ClassManifest
-import collection.mutable._
-import collection.Seq
-
-/**
- * <p>A class representing <code>Array[T]</code></p>
- *
- * @author Martin Odersky, Stephane Micheloud
- * @version 1.0
- */
-abstract class BoxedArray[A] extends IndexedSeq[A] with IndexedSeqLike[A, BoxedArray[A]] with Boxed { self =>
-
- val ex = new Error("trying to create a BoxedArray")
- ex.printStackTrace()
- throw ex
-
- /** The manifest of the element type */
- def elemManifest: ClassManifest[A]
-
- /** The length of the array */
- def length: Int
-
- /** The element at given index */
- def apply(index: Int): A
-
- /** Update element at given index */
- def update(index: Int, elem: A): Unit
-
- /** Creates new builder for this collection ==> move to subclasses
- */
- override protected[this] def newBuilder: Builder[A, BoxedArray[A]] =
- genericBuilder[A]
-
- // !!! todo: remove
- override def genericBuilder[B]: Builder[B, BoxedArray[B]] = new ArrayBuffer[B].mapResult {
- _.toArray(null).asInstanceOf[BoxedArray[B]]
- }
-
- /** Convert to Java array.
- * @param elemTag Either one of the tags ".N" where N is the name of a primitive type
- * (@see ScalaRunTime), or a full class name.
- */
- def unbox(elemClass: Class[_]): AnyRef
-
- /** The underlying array value
- */
- def value: AnyRef
-
- def copyFrom(src: AnyRef, from: Int, to: Int, len: Int): Unit =
- Array.copy(src, from, value, to, len)
-
- def copyTo(from: Int, dest: AnyRef, to: Int, len: Int): Unit = {
- Array.copy(value, from, dest, to, len)
- }
-
- override def toArray[B >: A](implicit m: ClassManifest[B]): Array[B] = {
- if ((elemManifest ne null) && (elemManifest.erasure eq m.erasure)) this.asInstanceOf[Array[B]]
- else super.toArray[B]
- }
-
-/*
- override def equals(other: Any) =
- (value eq other) ||
-
- other.isInstanceOf[BoxedArray[_]] && (value == other.asInstanceOf[BoxedArray[_]].value)
-
- override def hashCode(): Int = value.hashCode()
-*/
- /** Fills the given array <code>xs</code> with the elements of
- * this sequence starting at position <code>start</code>.
- *
- * @param xs the array to fill.
- * @param start starting index.
- * @pre the array must be large enough to hold all elements.
- */
- override def copyToArray[B](xs: Array[B], start: Int, len: Int): Unit =
- copyTo(0, xs, start, len)
-
- /** Creates a possible nested IndexedSeq which consists of all the elements
- * of this array. If the elements are arrays themselves, the `deep' transformation
- * is applied recursively to them. The stringPrefix of the IndexedSeq is
- * "Array", hence the IndexedSeq prints like an array with all its
- * elements shown, and the same recursively for any subarrays.
- *
- * Example: Array(Array(1, 2), Array(3, 4)).deep.toString
- * prints: Array(Array(1, 2), Array(3, 4))
- */
- def deep: collection.IndexedSeq[Any] = new collection.IndexedSeq[Any] {
- def length = self.length
- def apply(idx: Int): Any = self.apply(idx) match {
- case elem: AnyRef if ScalaRunTime.isArray(elem) => ScalaRunTime.boxArray(elem).deep
- case elem => elem
- }
- override def stringPrefix = "Array"
- }
-
- @deprecated("use deep.toString instead")
- final def deepToString() = deepMkString(stringPrefix + "(", ", ", ")")
-
- @deprecated("use deep.mkString instead")
- final def deepMkString(start: String, sep: String, end: String): String = {
- def _deepToString(x: Any) = x match {
- case a: AnyRef if ScalaRunTime.isArray(a) =>
- ScalaRunTime.boxArray(a).deepMkString(start, sep, end)
- case _ =>
- ScalaRunTime.stringOf(x)
- }
- val buf = new StringBuilder()
- buf.append(start)
- val iter = this.iterator
- if (iter.hasNext) buf.append(_deepToString(iter.next))
- while (iter.hasNext) {
- buf.append(sep); buf.append(_deepToString(iter.next))
- }
- buf.append(end)
- buf.toString
- }
-
- @deprecated("use deep.mkString instead")
- final def deepMkString(sep: String): String = this.deepMkString("", sep, "")
-
- @deprecated("use array1.deep.equals(array2.deep) instead")
- final def deepEquals(that: Any): Boolean = {
- def _deepEquals(x1: Any, x2: Any) = (x1, x2) match {
- case (a1: BoxedArray[_], a2: BoxedArray[_]) =>
- _sameElements(a1, a2)
- case (a1: AnyRef, a2: AnyRef)
- if ScalaRunTime.isArray(a1) && ScalaRunTime.isArray(a2) =>
- _sameElements(ScalaRunTime.boxArray(a1), ScalaRunTime.boxArray(a2))
- case _ =>
- x1.equals(x2)
- }
- def _sameElements(a1: BoxedArray[_], a2: BoxedArray[_]): Boolean = {
- val it1 = a1.iterator
- val it2 = a2.iterator
- var res = true
- while (res && it1.hasNext && it2.hasNext)
- res = _deepEquals(it1.next, it2.next)
- !it1.hasNext && !it2.hasNext && res
- }
- that match {
- case a: BoxedArray[_] =>
- _sameElements(this, a)
- case a: AnyRef if ScalaRunTime.isArray(a) =>
- _sameElements(this, ScalaRunTime.boxArray(a))
- case _ =>
- false
- }
- }
-
- override final def stringPrefix: String = "Array"
-}
diff --git a/src/library/scala/runtime/BoxedBooleanArray.scala b/src/library/scala/runtime/BoxedBooleanArray.scala
deleted file mode 100644
index ff781004d4..0000000000
--- a/src/library/scala/runtime/BoxedBooleanArray.scala
+++ /dev/null
@@ -1,28 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-// $Id$
-
-
-package scala.runtime
-import scala.reflect.ClassManifest
-
-@serializable
-final class BoxedBooleanArray(val value: Array[Boolean]) extends BoxedArray[Boolean] {
-
- def elemManifest = ClassManifest.Boolean
-
- def length: Int = value.length
-
- def apply(index: Int): Boolean = value(index)
-
- def update(index: Int, elem: Boolean) {
- value(index) = elem
- }
- def unbox(elemClass: Class[_]): AnyRef = value
-}
diff --git a/src/library/scala/runtime/BoxedByteArray.scala b/src/library/scala/runtime/BoxedByteArray.scala
deleted file mode 100644
index 9f81dc2a86..0000000000
--- a/src/library/scala/runtime/BoxedByteArray.scala
+++ /dev/null
@@ -1,28 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-// $Id$
-
-
-package scala.runtime
-import scala.reflect.ClassManifest
-
-@serializable
-final class BoxedByteArray(val value: Array[Byte]) extends BoxedArray[Byte] {
-
- def elemManifest = ClassManifest.Byte
-
- def length: Int = value.length
-
- def apply(index: Int): Byte = value(index)
-
- def update(index: Int, elem: Byte) {
- value(index) = elem
- }
- def unbox(elemClass: Class[_]): AnyRef = value
-}
diff --git a/src/library/scala/runtime/BoxedCharArray.scala b/src/library/scala/runtime/BoxedCharArray.scala
deleted file mode 100644
index 8924ed52f5..0000000000
--- a/src/library/scala/runtime/BoxedCharArray.scala
+++ /dev/null
@@ -1,28 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-// $Id$
-
-
-package scala.runtime
-import scala.reflect.ClassManifest
-
-@serializable
-final class BoxedCharArray(val value: Array[Char]) extends BoxedArray[Char] {
-
- def elemManifest = ClassManifest.Char
-
- def length: Int = value.length
-
- def apply(index: Int): Char = value(index)
-
- def update(index: Int, elem: Char) {
- value(index) = elem
- }
- def unbox(elemClass: Class[_]): AnyRef = value
-}
diff --git a/src/library/scala/runtime/BoxedDoubleArray.scala b/src/library/scala/runtime/BoxedDoubleArray.scala
deleted file mode 100644
index 2a9a657915..0000000000
--- a/src/library/scala/runtime/BoxedDoubleArray.scala
+++ /dev/null
@@ -1,28 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-// $Id$
-
-
-package scala.runtime
-import scala.reflect.ClassManifest
-
-@serializable
-final class BoxedDoubleArray(val value: Array[Double]) extends BoxedArray[Double] {
-
- def elemManifest = ClassManifest.Double
-
- def length: Int = value.length
-
- def apply(index: Int): Double = value(index)
-
- def update(index: Int, elem: Double) {
- value(index) = elem
- }
- def unbox(elemClass: Class[_]): AnyRef = value
-}
diff --git a/src/library/scala/runtime/BoxedFloatArray.scala b/src/library/scala/runtime/BoxedFloatArray.scala
deleted file mode 100644
index 038356150e..0000000000
--- a/src/library/scala/runtime/BoxedFloatArray.scala
+++ /dev/null
@@ -1,28 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-// $Id$
-
-
-package scala.runtime
-import scala.reflect.ClassManifest
-
-@serializable
-final class BoxedFloatArray(val value: Array[Float]) extends BoxedArray[Float] {
-
- def elemManifest = ClassManifest.Float
-
- def length: Int = value.length
-
- def apply(index: Int): Float = value(index)
-
- def update(index: Int, elem: Float) {
- value(index) = elem
- }
- def unbox(elemClass: Class[_]): AnyRef = value
-}
diff --git a/src/library/scala/runtime/BoxedIntArray.scala b/src/library/scala/runtime/BoxedIntArray.scala
deleted file mode 100644
index fac3b24b77..0000000000
--- a/src/library/scala/runtime/BoxedIntArray.scala
+++ /dev/null
@@ -1,28 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-// $Id$
-
-
-package scala.runtime
-import scala.reflect.ClassManifest
-
-@serializable
-final class BoxedIntArray(val value: Array[Int]) extends BoxedArray[Int] {
-
- def elemManifest = ClassManifest.Int
-
- def length: Int = value.length
-
- def apply(index: Int): Int = value(index)
-
- def update(index: Int, elem: Int) {
- value(index) = elem
- }
- def unbox(elemClass: Class[_]): AnyRef = value
-}
diff --git a/src/library/scala/runtime/BoxedLongArray.scala b/src/library/scala/runtime/BoxedLongArray.scala
deleted file mode 100644
index cbf9d68498..0000000000
--- a/src/library/scala/runtime/BoxedLongArray.scala
+++ /dev/null
@@ -1,28 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-// $Id$
-
-
-package scala.runtime
-import scala.reflect.ClassManifest
-
-@serializable
-final class BoxedLongArray(val value: Array[Long]) extends BoxedArray[Long] {
-
- def elemManifest = ClassManifest.Long
-
- def length: Int = value.length
-
- def apply(index: Int): Long = value(index)
-
- def update(index: Int, elem: Long) {
- value(index) = elem
- }
- def unbox(elemClass: Class[_]): AnyRef = value
-}
diff --git a/src/library/scala/runtime/BoxedObjectArray.scala b/src/library/scala/runtime/BoxedObjectArray.scala
deleted file mode 100644
index 47ad9a3b48..0000000000
--- a/src/library/scala/runtime/BoxedObjectArray.scala
+++ /dev/null
@@ -1,40 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-// $Id$
-
-
-package scala.runtime
-import scala.reflect.ClassManifest
-
-@serializable
-final class BoxedObjectArray[A <: AnyRef](val value: Array[AnyRef], val elemManifest: ClassManifest[A]) extends BoxedArray[A] {
-
-// @deprecated("creating array w/o manifest")
- def this(value: Array[AnyRef]) = this(value, null) // !!! todo: remove
-
- def length: Int = value.length
-
- def apply(index: Int): A = value(index).asInstanceOf[A]
-
- def update(index: Int, elem: A) {
- value(index) = elem
- }
-
- def unbox(elemClass: Class[_]): AnyRef = value
-
-/*
- override def equals(other: Any): Boolean =
- (value eq other.asInstanceOf[AnyRef]) ||
- other.isInstanceOf[BoxedObjectArray[_]] && (value eq other.asInstanceOf[BoxedObjectArray[_]].value)
-
- override def hashCode(): Int = (value.asInstanceOf[AnyRef]).hashCode()
-*/
-
-}
-
diff --git a/src/library/scala/runtime/BoxedShortArray.scala b/src/library/scala/runtime/BoxedShortArray.scala
deleted file mode 100644
index 4da6b0c1c9..0000000000
--- a/src/library/scala/runtime/BoxedShortArray.scala
+++ /dev/null
@@ -1,28 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-// $Id$
-
-
-package scala.runtime
-import scala.reflect.ClassManifest
-
-@serializable
-final class BoxedShortArray(val value: Array[Short]) extends BoxedArray[Short] {
-
- def elemManifest = ClassManifest.Short
-
- def length: Int = value.length
-
- def apply(index: Int): Short = value(index)
-
- def update(index: Int, elem: Short) {
- value(index) = elem
- }
- def unbox(elemClass: Class[_]): AnyRef = value
-}
diff --git a/src/library/scala/runtime/BoxedUnitArray.scala b/src/library/scala/runtime/BoxedUnitArray.scala
deleted file mode 100644
index 351237c4b2..0000000000
--- a/src/library/scala/runtime/BoxedUnitArray.scala
+++ /dev/null
@@ -1,28 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2002-2009, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-// $Id$
-
-
-package scala.runtime
-import scala.reflect.ClassManifest
-
-@serializable
-final class BoxedUnitArray(val value: Array[Unit]) extends BoxedArray[Unit] {
-
- def elemManifest = ClassManifest.Unit
-
- def length: Int = value.length
-
- def apply(index: Int): Unit = value(index)
-
- def update(index: Int, elem: Unit) {
- value(index) = elem
- }
- def unbox(elemClass: Class[_]): AnyRef = value
-}
diff --git a/src/library/scala/runtime/BoxesRunTime.java b/src/library/scala/runtime/BoxesRunTime.java
index 869eb375ac..bd38e37503 100644
--- a/src/library/scala/runtime/BoxesRunTime.java
+++ b/src/library/scala/runtime/BoxesRunTime.java
@@ -54,30 +54,7 @@ public class BoxesRunTime
}
public static Character boxToCharacter(char c) {
- // !!! Temporarily working around the "impossible" (?) fact that
- // c can have a negative value here. In any revision since r17461 try:
- // def foo = new (Short => Char) { def apply(x: Short) = x.toChar }
- // foo(-100)
- // and the -100 will get to Character, which will duly crash.
- // The bug was masked before because the Characters were created
- // with "new Character(c)", but now the static method uses the argument
- // as an index into a cache array, which can't be negative.
- //
- // It appears to be Short-specific; I can't get anything similar
- // out of Byte or Int.
- Character ret;
-
- // straightforward workarounds like bitmasking do not seem to
- // work here; is java optimizing out "impossible" tests/ops? I
- // don't know, but this is the safe way:
- try {
- ret = Character.valueOf(c);
- }
- catch (ArrayIndexOutOfBoundsException e) {
- ret = new Character(c);
- }
-
- return ret;
+ return Character.valueOf(c);
}
public static Byte boxToByte(byte b) {
@@ -140,59 +117,71 @@ public class BoxesRunTime
/* COMPARISON ... COMPARISON ... COMPARISON ... COMPARISON ... COMPARISON ... COMPARISON */
+ private static int eqTypeCode(Number a) {
+ if ((a instanceof Integer) || (a instanceof Byte)) return INT;
+ if (a instanceof Long) return LONG;
+ if (a instanceof Double) return DOUBLE;
+ if (a instanceof Short) return INT;
+ if (a instanceof Float) return FLOAT;
+ return OTHER;
+ }
+
+ public static boolean equals(Object x, Object y) {
+ if (x == y) return true;
+ if (x == null) return false;
+ return equals2(x, y);
+ }
+
/** Since all applicable logic has to be present in the equals method of a ScalaNumber
* in any case, we dispatch to it as soon as we spot one on either side.
*/
- public static boolean equals(Object x, Object y) {
+ public static boolean equals2(Object x, Object y) {
if (x instanceof Number) {
- if (x instanceof ScalaNumber)
- return x.equals(y);
-
Number xn = (Number)x;
- if (y instanceof Number) {
- if (y instanceof ScalaNumber)
- return y.equals(x);
+ if (y instanceof Number) {
Number yn = (Number)y;
- if ((xn instanceof Double) || (yn instanceof Double))
- return xn.doubleValue() == yn.doubleValue();
- if ((xn instanceof Float) || (yn instanceof Float))
- return xn.floatValue() == yn.floatValue();
- if ((xn instanceof Long) || (yn instanceof Long))
- return xn.longValue() == yn.longValue();
- if (typeCode(x) <= INT && typeCode(y) <= INT)
+ int xcode = eqTypeCode(xn);
+ int ycode = eqTypeCode(yn);
+ switch (ycode > xcode ? ycode : xcode) {
+ case INT:
return xn.intValue() == yn.intValue();
-
- return x.equals(y);
- }
- if (y instanceof Character)
+ case LONG:
+ return xn.longValue() == yn.longValue();
+ case FLOAT:
+ return xn.floatValue() == yn.floatValue();
+ case DOUBLE:
+ return xn.doubleValue() == yn.doubleValue();
+ default:
+ if ((yn instanceof ScalaNumber) && !(xn instanceof ScalaNumber))
+ return y.equals(x);
+ }
+ } else if (y instanceof Character)
return equalsNumChar(xn, (Character)y);
} else if (x instanceof Character) {
Character xc = (Character)x;
if (y instanceof Character)
- return xc.equals(y);
+ return xc.charValue() == ((Character)y).charValue();
if (y instanceof Number)
return equalsNumChar((Number)y, xc);
- } else if (x == null) {
- return y == null;
}
return x.equals(y);
}
- private static boolean equalsNumChar(Number x, Character y) {
- char ch = y.charValue();
- if (x instanceof Double)
- return x.doubleValue() == ch;
- if (x instanceof Float)
- return x.floatValue() == ch;
- if (x instanceof Long)
- return x.longValue() == ch;
- if (x instanceof ScalaNumber)
- return x.equals(y);
- if (typeCode(x) <= INT)
- return x.intValue() == ch;
-
- return x.equals(y);
+ private static boolean equalsNumChar(Number xn, Character yc) {
+ char ch = yc.charValue();
+ switch (eqTypeCode(xn)) {
+ case INT:
+ return xn.intValue() == ch;
+ case LONG:
+ return xn.longValue() == ch;
+ case FLOAT:
+ return xn.floatValue() == ch;
+ case DOUBLE:
+ return xn.doubleValue() == ch;
+ default:
+ return xn.equals(yc);
+ }
}
/** Hashcode algorithm is driven by the requirements imposed
diff --git a/src/library/scala/runtime/RichDouble.scala b/src/library/scala/runtime/RichDouble.scala
index d30cd899a2..8d2203f9a5 100644
--- a/src/library/scala/runtime/RichDouble.scala
+++ b/src/library/scala/runtime/RichDouble.scala
@@ -19,13 +19,13 @@ final class RichDouble(x: Double) extends Proxy with Ordered[Double] {
def compare(y: Double): Int = java.lang.Double.compare(x, y)
- def min(y: Double): Double = Math.min(x, y)
- def max(y: Double): Double = Math.max(x, y)
- def abs: Double = Math.abs(x)
+ def min(y: Double): Double = math.min(x, y)
+ def max(y: Double): Double = math.max(x, y)
+ def abs: Double = math.abs(x)
- def round: Long = Math.round(x)
- def ceil: Double = Math.ceil(x)
- def floor: Double = Math.floor(x)
+ def round: Long = math.round(x)
+ def ceil: Double = math.ceil(x)
+ def floor: Double = math.floor(x)
/** See <code>BigDecimal.until</code>. */
def until(end: Double): Range.Partial[Double, NumericRange[Double]] =
@@ -49,7 +49,7 @@ final class RichDouble(x: Double) extends Proxy with Ordered[Double] {
* @param x an angle, in degrees
* @return the measurement of the angle <code>x</code> in radians.
*/
- def toRadians: Double = Math.toRadians(x)
+ def toRadians: Double = math.toRadians(x)
/** Converts an angle measured in radians to an approximately equivalent
* angle measured in degrees
@@ -57,7 +57,7 @@ final class RichDouble(x: Double) extends Proxy with Ordered[Double] {
* @param x angle, in radians
* @return the measurement of the angle <code>x</code> in degrees.
*/
- def toDegrees: Double = Math.toDegrees(x)
+ def toDegrees: Double = math.toDegrees(x)
// isNaN is provided by the implicit conversion to java.lang.Double
// def isNaN: Boolean = java.lang.Double.isNaN(x)
diff --git a/src/library/scala/runtime/RichException.scala b/src/library/scala/runtime/RichException.scala
index 27b3c719c6..6bb5d9e627 100644
--- a/src/library/scala/runtime/RichException.scala
+++ b/src/library/scala/runtime/RichException.scala
@@ -14,14 +14,5 @@ package scala.runtime
import compat.Platform.EOL
final class RichException(exc: Throwable) {
-
- def getStackTraceString: String = {
- val s = new StringBuilder()
- for (trElem <- exc.getStackTrace()) {
- s.append(trElem.toString())
- s.append(EOL)
- }
- s.toString()
- }
-
+ def getStackTraceString = exc.getStackTrace().mkString("", EOL, EOL)
}
diff --git a/src/library/scala/runtime/RichFloat.scala b/src/library/scala/runtime/RichFloat.scala
index ac47dcc934..7a547f083c 100644
--- a/src/library/scala/runtime/RichFloat.scala
+++ b/src/library/scala/runtime/RichFloat.scala
@@ -21,13 +21,13 @@ final class RichFloat(x: Float) extends Proxy with Ordered[Float] {
//def compare(y: Float): Int = if (x < y) -1 else if (x > y) 1 else 0
def compare(y: Float): Int = java.lang.Float.compare(x, y)
- def min(y: Float) = Math.min(x, y)
- def max(y: Float) = Math.max(x, y)
- def abs: Float = Math.abs(x)
+ def min(y: Float) = math.min(x, y)
+ def max(y: Float) = math.max(x, y)
+ def abs: Float = math.abs(x)
- def round: Int = Math.round(x)
- def ceil: Float = Math.ceil(x).toFloat
- def floor: Float = Math.floor(x).toFloat
+ def round: Int = math.round(x)
+ def ceil: Float = math.ceil(x).toFloat
+ def floor: Float = math.floor(x).toFloat
/** Converts an angle measured in degrees to an approximately equivalent
* angle measured in radians.
@@ -35,7 +35,7 @@ final class RichFloat(x: Float) extends Proxy with Ordered[Float] {
* @param x an angle, in degrees
* @return the measurement of the angle <code>x</code> in radians.
*/
- def toRadians: Float = Math.toRadians(x).toFloat
+ def toRadians: Float = math.toRadians(x).toFloat
/** Converts an angle measured in radians to an approximately equivalent
* angle measured in degrees.
@@ -43,7 +43,7 @@ final class RichFloat(x: Float) extends Proxy with Ordered[Float] {
* @param x angle, in radians
* @return the measurement of the angle <code>x</code> in degrees.
*/
- def toDegrees: Float = Math.toDegrees(x).toFloat
+ def toDegrees: Float = math.toDegrees(x).toFloat
// isNaN is provided by the implicit conversion to java.lang.Float
// def isNaN: Boolean = java.lang.Float.isNaN(x)
diff --git a/src/library/scala/runtime/ScalaRunTime.scala b/src/library/scala/runtime/ScalaRunTime.scala
index a93ff61a41..79c97af3d7 100644
--- a/src/library/scala/runtime/ScalaRunTime.scala
+++ b/src/library/scala/runtime/ScalaRunTime.scala
@@ -19,10 +19,7 @@ import scala.collection.immutable.{List, Stream, Nil, ::}
/* The object <code>ScalaRunTime</code> provides ...
*/
object ScalaRunTime {
-
- def isArray(x: AnyRef): Boolean = // !!! remove once newarrays
- x != null && (x.getClass.isArray || x.isInstanceOf[BoxedArray[_]])
-
+ def isArray(x: AnyRef): Boolean = isArray(x, 1)
def isArray(x: Any, atLevel: Int): Boolean =
x != null && isArrayClass(x.asInstanceOf[AnyRef].getClass, atLevel)
@@ -31,10 +28,6 @@ object ScalaRunTime {
def isValueClass(clazz: Class[_]) = clazz.isPrimitive()
- // todo: remove?
- def forceBoxedArray[A <: Any](xs: Seq[A]): Array[A] =
- throw new Error(" not implemented: forceBoxedArray")
-
/** Retrieve generic array element */
def array_apply(xs: AnyRef, idx: Int): Any = java.lang.reflect.Array.get(xs, idx)
@@ -123,8 +116,9 @@ object ScalaRunTime {
/** Fast path equality method for inlining; used when -optimise is set.
*/
@inline def inlinedEquals(x: Object, y: Object): Boolean =
- if (x eq null) y eq null
- else if (x.isInstanceOf[Number] || x.isInstanceOf[Character]) BoxesRunTime.equals(x, y)
+ if (x eq y) true
+ else if (x eq null) false
+ else if (x.isInstanceOf[java.lang.Number] || x.isInstanceOf[java.lang.Character]) BoxesRunTime.equals2(x, y)
else x.equals(y)
def _equals(x: Product, y: Any): Boolean = y match {
@@ -156,36 +150,8 @@ object ScalaRunTime {
false
}
- //def checkDefined[T >: Null](x: T): T =
- // if (x == null) throw new UndefinedException else x
-
def Seq[a](xs: a*): Seq[a] = null // interpreted specially by new backend.
- def arrayValue[A](x: BoxedArray[A], elemClass: Class[_]): AnyRef =
- if (x eq null) null else x.unbox(elemClass)
-
- /** Temporary method to go to new array representation
- * !!! can be reomved once bootstrap is complete !!!
- */
- def unboxedArray[A](x: AnyRef): AnyRef = x match {
- case ba: BoxedArray[_] => ba.value
- case _ => x
- }
-
- def boxArray(value: AnyRef): BoxedArray[_] = value match {
- case x: Array[AnyRef] => new BoxedObjectArray(x, ClassManifest.classType(x.getClass.getComponentType))
- case x: Array[Int] => new BoxedIntArray(x)
- case x: Array[Double] => new BoxedDoubleArray(x)
- case x: Array[Long] => new BoxedLongArray(x)
- case x: Array[Float] => new BoxedFloatArray(x)
- case x: Array[Char] => new BoxedCharArray(x)
- case x: Array[Byte] => new BoxedByteArray(x)
- case x: Array[Short] => new BoxedShortArray(x)
- case x: Array[Boolean] => new BoxedBooleanArray(x)
- case x: BoxedArray[_] => x
- case null => null
- }
-
/** Given any Scala value, convert it to a String.
*
* The primary motivation for this method is to provide a means for
diff --git a/src/library/scala/util/Hashable.scala b/src/library/scala/util/Hashable.scala
deleted file mode 100644
index 117d749316..0000000000
--- a/src/library/scala/util/Hashable.scala
+++ /dev/null
@@ -1,62 +0,0 @@
-/* __ *\
-** ________ ___ / / ___ Scala API **
-** / __/ __// _ | / / / _ | (c) 2003-2009, LAMP/EPFL **
-** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
-** /____/\___/_/ |_/____/_/ | | **
-** |/ **
-\* */
-
-// $Id$
-
-
-package scala.util
-
-/** <p>
- * A convenience trait for simplifying hashCode creation.
- * Mix this into a class and define <code>val hashValues = Seq(x1, x2, ...)</code>
- * and your <code>hashCode</code> will be derived from those values.
- * If you define <code>equals</code> in terms of <code>equalHashValues</code>
- * then your <code>hashCode</code> and <code>equals</code> methods will
- * never be out of sync. Something like:
- * </p><pre>
- * <b>override def</b> equals(other: Any) = other <b>match</b> {
- * <b>case</b> x: YourClass => <b>this</b> equalHashValues x
- * <b>case</b> _ => <b>false</b>
- * }</pre>
- *
- * @author Paul Phillips
- */
-abstract trait Hashable extends AnyRef
-{
- import Hashable._
- protected def hashValues: Seq[Any] // in an ideal universe this would be more like Seq[Hashable]
- protected def hashSeed: Int = 1
-
- override def hashCode: Int =
- (hashValues map calculateHashCode).foldLeft(hashSeed)((x, y) => x * 41 + y)
-
- protected def equalHashValues(other: Any) = other match {
- case x: Hashable => hashValues sameElements x.hashValues
- case _ => false
- }
-}
-abstract trait StrictHashable extends Hashable
-{
- protected def hashValues: Seq[Hashable]
-}
-
-object Hashable
-{
- /** This implicit is for StrictHashable's benefit, so your hashValues Seq
- * can contain both explicitly Hashable classes and value types.
- */
- implicit def anyVal2Hashable(x: AnyVal): Hashable =
- new Hashable { protected def hashValues = Seq(x) }
-
- private def calculateHashCode(x: Any) = x match {
- case null => 0
- case x: AnyRef => x.hashCode
- case x => x.asInstanceOf[AnyRef].hashCode
- }
-}
-
diff --git a/src/library/scala/util/Random.scala b/src/library/scala/util/Random.scala
index d1c04e996f..4a61377eea 100644
--- a/src/library/scala/util/Random.scala
+++ b/src/library/scala/util/Random.scala
@@ -119,7 +119,7 @@ object Random extends Random
* @return the shuffled Traversable
*/
def shuffle[T, CC[X] <: Traversable[X]](coll: CC[T])(implicit bf: CanBuildFrom[CC[T], T, CC[T]]): CC[T] = {
- val buf = new ArrayBuffer[T] ++ coll
+ val buf = new ArrayBuffer[T] ++= coll
def swap(i1: Int, i2: Int) {
val tmp = buf(i1)
diff --git a/src/library/scala/util/Sorting.scala b/src/library/scala/util/Sorting.scala
index a7c83a5f43..73228b53d5 100644
--- a/src/library/scala/util/Sorting.scala
+++ b/src/library/scala/util/Sorting.scala
@@ -174,9 +174,9 @@ object Sorting {
// Swap partition elements back to middle
val n = off + len
- var s = Math.min(a-off, b-a)
+ var s = math.min(a-off, b-a)
vecswap(off, b-s, s)
- s = Math.min(d-c, n-d-1)
+ s = math.min(d-c, n-d-1)
vecswap(b, n-s, s)
// Recursively sort non-partition-elements
@@ -275,9 +275,9 @@ object Sorting {
// Swap partition elements back to middle
val n = off + len
- var s = Math.min(a-off, b-a)
+ var s = math.min(a-off, b-a)
vecswap(off, b-s, s)
- s = Math.min(d-c, n-d-1)
+ s = math.min(d-c, n-d-1)
vecswap(b, n-s, s)
// Recursively sort non-partition-elements
@@ -383,9 +383,9 @@ object Sorting {
// Swap partition elements back to middle
val n = off + len
- var s = Math.min(a-off, b-a)
+ var s = math.min(a-off, b-a)
vecswap(off, b-s, s)
- s = Math.min(d-c, n-d-1)
+ s = math.min(d-c, n-d-1)
vecswap(b, n-s, s)
// Recursively sort non-partition-elements
@@ -491,9 +491,9 @@ object Sorting {
// Swap partition elements back to middle
val n = off + len
- var s = Math.min(a-off, b-a)
+ var s = math.min(a-off, b-a)
vecswap(off, b-s, s)
- s = Math.min(d-c, n-d-1)
+ s = math.min(d-c, n-d-1)
vecswap(b, n-s, s)
// Recursively sort non-partition-elements
diff --git a/src/library/scala/util/automata/DetWordAutom.scala b/src/library/scala/util/automata/DetWordAutom.scala
index 07ffdaf21b..cf78accbef 100644
--- a/src/library/scala/util/automata/DetWordAutom.scala
+++ b/src/library/scala/util/automata/DetWordAutom.scala
@@ -31,23 +31,9 @@ abstract class DetWordAutom[T <: AnyRef] {
val delta: Array[Map[T,Int]]
val default: Array[Int]
- /**
- * @param q ...
- * @return ...
- */
def isFinal(q: Int) = finals(q) != 0
-
- /**
- * @param q ...
- * @return ...
- */
def isSink(q: Int) = delta(q).isEmpty && default(q) == q
- /**
- * @param q ...
- * @param label ...
- * @return ...
- */
def next(q: Int, label: T) = {
delta(q).get(label) match {
case Some(p) => p
@@ -59,25 +45,15 @@ abstract class DetWordAutom[T <: AnyRef] {
val sb = new StringBuilder("[DetWordAutom nstates=")
sb.append(nstates)
sb.append(" finals=")
- var map = scala.collection.immutable.Map[Int,Int]()
- var j = 0; while( j < nstates ) {
- if (j < finals.length)
- map = map.updated(j, finals(j))
- j += 1
- }
+ val map = Map(finals.zipWithIndex map (_.swap): _*)
sb.append(map.toString())
sb.append(" delta=\n")
+
for (i <- 0 until nstates) {
- sb.append( i )
- sb.append("->")
- sb.append(delta(i).toString())
- sb.append('\n')
- if (i < default.length) {
- sb.append("_>")
- sb.append(default(i).toString())
- sb.append('\n')
- }
+ sb append "%d->%s\n".format(i, delta(i))
+ if (i < default.length)
+ sb append "_>%s\n".format(default(i))
}
- sb.toString()
+ sb.toString
}
}
diff --git a/src/library/scala/util/parsing/combinator/PackratParsers.scala b/src/library/scala/util/parsing/combinator/PackratParsers.scala
index fc8200a390..051fb056fb 100644
--- a/src/library/scala/util/parsing/combinator/PackratParsers.scala
+++ b/src/library/scala/util/parsing/combinator/PackratParsers.scala
@@ -289,7 +289,7 @@ to update each parser involved in the recursion.
//all setupLR does is change the heads of the recursions, so the seed will stay the same
recDetect match {case LR(seed, _, _) => seed.asInstanceOf[ParseResult[T]]}
}
- case MemoEntry(Right(res: ParseResult[T])) => res
+ case MemoEntry(Right(res: ParseResult[_])) => res.asInstanceOf[ParseResult[T]]
}
}
}
@@ -316,7 +316,7 @@ to update each parser involved in the recursion.
//we're done with growing, we can remove data from recursion head
rest.recursionHeads -= rest.pos
rest.getFromCache(p).get match {
- case MemoEntry(Right(x: ParseResult[T])) => x
+ case MemoEntry(Right(x: ParseResult[_])) => x.asInstanceOf[ParseResult[T]]
case _ => throw new Exception("impossible match")
}
}
diff --git a/src/library/scala/util/parsing/combinator/Parsers.scala b/src/library/scala/util/parsing/combinator/Parsers.scala
index b2c72153fe..9943ed68a9 100644
--- a/src/library/scala/util/parsing/combinator/Parsers.scala
+++ b/src/library/scala/util/parsing/combinator/Parsers.scala
@@ -153,9 +153,7 @@ trait Parsers {
def get: Nothing = error("No result when parsing failed")
}
- /** An extractor so NoSuccess(msg, next) can be used in matches
- * Note: case class inheritance is currently sketchy and may be
- * deprecated, so an explicit extractor is better.
+ /** An extractor so NoSuccess(msg, next) can be used in matches.
*/
object NoSuccess {
def unapply[T](x: ParseResult[T]) = x match {
diff --git a/src/library/scala/util/parsing/json/Lexer.scala b/src/library/scala/util/parsing/json/Lexer.scala
index 9026f45f11..73e58f189e 100644
--- a/src/library/scala/util/parsing/json/Lexer.scala
+++ b/src/library/scala/util/parsing/json/Lexer.scala
@@ -83,7 +83,7 @@ class Lexer extends StdLexical with ImplicitConversions {
private def unicodeBlock = hexDigit ~ hexDigit ~ hexDigit ~ hexDigit ^^ {
case a ~ b ~ c ~ d =>
- new String(io.UTF8Codec.encode(Integer.parseInt(List(a, b, c, d) mkString "", 16)), "UTF-8")
+ new String(Array(Integer.parseInt(List(a, b, c, d) mkString "", 16)), 0, 1)
}
//private def lift[T](f: String => T)(xs: List[Any]): T = f(xs mkString "")
diff --git a/src/library/scala/xml/Attribute.scala b/src/library/scala/xml/Attribute.scala
index 222642fe47..2f3856ed89 100644
--- a/src/library/scala/xml/Attribute.scala
+++ b/src/library/scala/xml/Attribute.scala
@@ -13,7 +13,6 @@ package scala.xml
import collection.Seq
import collection.mutable.StringBuilder
-
/** Attribute defines the interface shared by both
* PrefixedAttribute and UnprefixedAttribute
*/
diff --git a/src/library/scala/xml/Document.scala b/src/library/scala/xml/Document.scala
index 4ddcc2bf66..f9d8b579ea 100644
--- a/src/library/scala/xml/Document.scala
+++ b/src/library/scala/xml/Document.scala
@@ -19,6 +19,7 @@ package scala.xml
* @author Burak Emir
* @version 1.0, 26/04/2005
*/
+@serializable @SerialVersionUID(-2289320563321795109L)
class Document extends NodeSeq with pull.XMLEvent {
/** An ordered list of child information items, in document
diff --git a/src/library/scala/xml/HasKeyValue.scala b/src/library/scala/xml/HasKeyValue.scala
index 0522924270..6f98d0ed3b 100644
--- a/src/library/scala/xml/HasKeyValue.scala
+++ b/src/library/scala/xml/HasKeyValue.scala
@@ -20,6 +20,7 @@ package scala.xml
*
* @author Burak Emir
*/
+@deprecated("Use UnprefixedAttribute's extractor")
class HasKeyValue(key: String) {
def unapplySeq(x: MetaData): Option[Seq[Node]] = x.get(key)
}
diff --git a/src/library/scala/xml/NamespaceBinding.scala b/src/library/scala/xml/NamespaceBinding.scala
index 93485a17fe..8adda4197a 100644
--- a/src/library/scala/xml/NamespaceBinding.scala
+++ b/src/library/scala/xml/NamespaceBinding.scala
@@ -25,7 +25,7 @@ import collection.mutable.StringBuilder
@SerialVersionUID(0 - 2518644165573446725L)
case class NamespaceBinding(prefix: String, uri: String, parent: NamespaceBinding) extends AnyRef
{
- if (prefix != null && prefix.isEmpty)
+ if (prefix == "")
throw new IllegalArgumentException("zero length prefix not allowed")
def getURI(_prefix: String): String =
diff --git a/src/library/scala/xml/Node.scala b/src/library/scala/xml/Node.scala
index 5636c7ddcc..c5b990fe88 100644
--- a/src/library/scala/xml/Node.scala
+++ b/src/library/scala/xml/Node.scala
@@ -142,6 +142,7 @@ abstract class Node extends NodeSeq {
this.prefix == that.prefix &&
this.label == that.label &&
this.attributes == that.attributes &&
+ this.scope == that.scope &&
equalChildren(that)
case _ => false
}
@@ -157,13 +158,11 @@ abstract class Node extends NodeSeq {
}
/** <p>
- * Returns a hashcode. The default implementation here calls only
- * super.hashcode (which is the same as for objects). A more useful
- * implementation can be invoked by calling
- * <code>Utility.hashCode(pre, label, attributes.hashCode(), child)</code>.
+ * Returns a hashcode.
* </p>
*/
- override def hashCode(): Int = super.hashCode
+ override def hashCode(): Int =
+ Utility.hashCode(prefix, label, attributes.hashCode(), scope.hashCode(), child)
// implementations of NodeSeq methods
diff --git a/src/library/scala/xml/NodeSeq.scala b/src/library/scala/xml/NodeSeq.scala
index de6e38d5b7..cae43cb621 100644
--- a/src/library/scala/xml/NodeSeq.scala
+++ b/src/library/scala/xml/NodeSeq.scala
@@ -56,7 +56,7 @@ abstract class NodeSeq extends immutable.Seq[Node] with SeqLike[Node, NodeSeq] {
def apply(i: Int): Node = theSeq(i)
def apply(f: Node => Boolean): NodeSeq = filter(f)
- /** structural equality */
+ /** structural equality (XXX - this shatters any hope of hashCode equality) */
override def equals(x: Any): Boolean = x match {
case z:Node => (length == 1) && z == apply(0)
case z:Seq[_] => sameElements(z)
diff --git a/src/library/scala/xml/PrettyPrinter.scala b/src/library/scala/xml/PrettyPrinter.scala
index 8b2193fba8..bc9086a91f 100644
--- a/src/library/scala/xml/PrettyPrinter.scala
+++ b/src/library/scala/xml/PrettyPrinter.scala
@@ -84,20 +84,13 @@ class PrettyPrinter(width: Int, step: Int) {
* @param s ...
* @return ...
*/
- protected def makeBox(ind: Int, s: String) = {
- // XXX um...
- if (cur < ind)
- cur == ind
+ protected def makeBox(ind: Int, s: String) =
if (cur + s.length > width) { // fits in this line
- items = Box(ind, s) :: items
+ items ::= Box(ind, s)
cur += s.length
- } else try {
- for (b <- cut(s, ind).iterator) // break it up
- items = b :: items
- } catch {
- case _:BrokenException => makePara(ind, s) // give up, para
}
- }
+ else try cut(s, ind) foreach (items ::= _) // break it up
+ catch { case _: BrokenException => makePara(ind, s) } // give up, para
// dont respect indent in para, but afterwards
protected def makePara(ind: Int, s: String) = {
diff --git a/src/library/scala/xml/TopScope.scala b/src/library/scala/xml/TopScope.scala
index c458248ae3..e0309757fb 100644
--- a/src/library/scala/xml/TopScope.scala
+++ b/src/library/scala/xml/TopScope.scala
@@ -12,12 +12,11 @@ package scala.xml
import collection.mutable.StringBuilder
-
/** top level namespace scope. only contains the predefined binding
* for the &quot;xml&quot; prefix which is bound to
* &quot;http://www.w3.org/XML/1998/namespace&quot;
*/
-case object TopScope extends NamespaceBinding(null, null, null)
+object TopScope extends NamespaceBinding(null, null, null)
{
import XML.{ xml, namespace }
diff --git a/src/library/scala/xml/dtd/ContentModel.scala b/src/library/scala/xml/dtd/ContentModel.scala
index d5d45bdaa0..eed8a24dbd 100644
--- a/src/library/scala/xml/dtd/ContentModel.scala
+++ b/src/library/scala/xml/dtd/ContentModel.scala
@@ -51,11 +51,9 @@ object ContentModel extends WordExp {
/* precond: rs.length >= 1 */
private def buildString(rs: Seq[RegExp], sb: StringBuilder, sep: Char) {
- val it = rs.iterator
- val fst = it.next
- buildString(fst, sb)
- for (z <- it) {
- sb.append(sep)
+ buildString(rs.head, sb)
+ for (z <- rs.tail) {
+ sb append sep
buildString(z, sb)
}
sb
@@ -121,7 +119,7 @@ case class MIXED(r: ContentModel.RegExp) extends DFAContentModel {
}
}
-case class ELEMENTS(r: ContentModel.RegExp) extends DFAContentModel {
+case class ELEMENTS(r: ContentModel.RegExp) extends DFAContentModel {
override def buildString(sb: StringBuilder): StringBuilder =
ContentModel.buildString(r, sb)
}
diff --git a/src/library/scala/xml/dtd/ElementValidator.scala b/src/library/scala/xml/dtd/ElementValidator.scala
index 8c375ca1c8..8b24104ce0 100644
--- a/src/library/scala/xml/dtd/ElementValidator.scala
+++ b/src/library/scala/xml/dtd/ElementValidator.scala
@@ -14,22 +14,26 @@ package dtd
import PartialFunction._
import ContentModel.ElemName
+import MakeValidationException._ // @todo other exceptions
+
import scala.util.automata._
+import scala.collection.mutable.BitSet
/** validate children and/or attributes of an element
* exceptions are created but not thrown.
*/
class ElementValidator() extends Function1[Node,Boolean] {
- var exc: List[ValidationException] = Nil
+ private var exc: List[ValidationException] = Nil
protected var contentModel: ContentModel = _
protected var dfa: DetWordAutom[ElemName] = _
protected var adecls: List[AttrDecl] = _
/** set content model, enabling element validation */
- def setContentModel(cm:ContentModel) = {
- contentModel = cm; cm match {
+ def setContentModel(cm: ContentModel) = {
+ contentModel = cm
+ cm match {
case ELEMENTS(r) =>
val nfa = ContentModel.Translator.automatonFrom(r, 1)
dfa = new SubsetConstruction(nfa).determinize
@@ -43,7 +47,7 @@ class ElementValidator() extends Function1[Node,Boolean] {
/** set meta data, enabling attribute validation */
def setMetaData(adecls: List[AttrDecl]) { this.adecls = adecls }
- def getIterator(nodes: Seq[Node], skipPCDATA: Boolean): Iterator[ElemName] = {
+ def getIterable(nodes: Seq[Node], skipPCDATA: Boolean): Iterable[ElemName] = {
def isAllWhitespace(a: Atom[_]) = cond(a.data) { case s: String if s.trim.isEmpty => true }
nodes.filter {
@@ -52,57 +56,41 @@ class ElementValidator() extends Function1[Node,Boolean] {
case _ => !skipPCDATA
}
case x => x.namespace eq null
- } . map (x => ElemName(x.label)) iterator
+ } . map (x => ElemName(x.label))
}
/** check attributes, return true if md corresponds to attribute declarations in adecls.
*/
def check(md: MetaData): Boolean = {
- //@todo other exceptions
- import MakeValidationException._;
- val len: Int = exc.length;
- var j = 0;
- var ok = new scala.collection.mutable.BitSet(adecls.length);
- def find(Key:String): AttrDecl = {
- var attr: AttrDecl = null;
- val jt = adecls.iterator; while(j < adecls.length) {
- jt.next match {
- case a @ AttrDecl(Key, _, _) => attr = a; ok += j; j = adecls.length;
- case _ => j = j + 1;
+ val len: Int = exc.length
+ var ok = new BitSet(adecls.length)
+
+ for (attr <- md) {
+ def attrStr = attr.value.toString
+ def find(Key: String): Option[AttrDecl] = {
+ adecls.zipWithIndex find {
+ case (a @ AttrDecl(Key, _, _), j) => ok += j ; return Some(a)
+ case _ => false
}
+ None
}
- attr
- }
- val it = md.iterator; while(it.hasNext) {
- val attr = it.next
- j = 0
- find(attr.key) match {
- case null =>
- //Console.println("exc");
- exc = fromUndefinedAttribute( attr.key ) :: exc;
-
- case AttrDecl(_, tpe, DEFAULT(true, fixedValue)) if attr.value.toString != fixedValue =>
- exc = fromFixedAttribute( attr.key, fixedValue, attr.value.toString) :: exc;
+ find(attr.key) match {
+ case None =>
+ exc ::= fromUndefinedAttribute(attr.key)
- case s =>
- //Console.println("s: "+s);
+ case Some(AttrDecl(_, tpe, DEFAULT(true, fixedValue))) if attrStr != fixedValue =>
+ exc ::= fromFixedAttribute(attr.key, fixedValue, attrStr)
+ case _ =>
}
}
- //val missing = ok.toSet(false); FIXME: it doesn't seem to be used anywhere
- j = 0
- var kt = adecls.iterator
- while (kt.hasNext) {
- kt.next match {
- case AttrDecl(key, tpe, REQUIRED) if !ok(j) =>
- exc = fromMissingAttribute( key, tpe ) :: exc;
- j = j + 1;
- case _ =>
- j = j + 1;
- }
+ adecls.zipWithIndex foreach {
+ case (AttrDecl(key, tpe, REQUIRED), j) if !ok(j) => exc ::= fromMissingAttribute(key, tpe)
+ case _ =>
}
+
exc.length == len //- true if no new exception
}
@@ -111,28 +99,24 @@ class ElementValidator() extends Function1[Node,Boolean] {
*/
def check(nodes: Seq[Node]): Boolean = contentModel match {
case ANY => true
- case EMPTY => !getIterator(nodes, false).hasNext
- case PCDATA => !getIterator(nodes, true).hasNext
+ case EMPTY => getIterable(nodes, false).isEmpty
+ case PCDATA => getIterable(nodes, true).isEmpty
case MIXED(ContentModel.Alt(branches @ _*)) => // @todo
val j = exc.length
def find(Key: String): Boolean =
branches exists { case ContentModel.Letter(ElemName(Key)) => true ; case _ => false }
- getIterator(nodes, true) map (_.name) filterNot find foreach {
+ getIterable(nodes, true) map (_.name) filterNot find foreach {
exc ::= MakeValidationException fromUndefinedElement _
}
(exc.length == j) // - true if no new exception
case _: ELEMENTS =>
- var q = 0
- getIterator(nodes, false) foreach { e =>
- (dfa delta q get e) match {
- case Some(p) => q = p
- case _ => throw ValidationException("element %s not allowed here" format e)
+ dfa isFinal {
+ getIterable(nodes, false).foldLeft(0) { (q, e) =>
+ (dfa delta q get e) getOrElse (throw ValidationException("element %s not allowed here" format e))
}
}
-
- dfa isFinal q // - true if arrived in final state
}
/** applies various validations - accumulates error messages in exc
diff --git a/src/library/scala/xml/include/sax/Main.scala b/src/library/scala/xml/include/sax/Main.scala
index 4df95d1046..405fc670e5 100644
--- a/src/library/scala/xml/include/sax/Main.scala
+++ b/src/library/scala/xml/include/sax/Main.scala
@@ -47,13 +47,15 @@ object Main {
if (args.isEmpty)
return
- val (resolver, args2): (Option[EntityResolver], Array[String]) =
- if (args.size < 2 || args(0) != "-r") (None, args)
+ def dashR = args.size >= 2 && args(0) == "-r"
+ val args2 = if (dashR) args drop 2 else args
+ val resolver: Option[EntityResolver] =
+ if (dashR) None
else catching(classOf[Exception]) opt {
- val r = Class.forName(args(1)).newInstance().asInstanceOf[EntityResolver]
- parser setEntityResolver r
- (r, args drop 2)
- } orElse (return error("Could not load requested EntityResolver"))
+ val r = Class.forName(args(1)).newInstance().asInstanceOf[EntityResolver]
+ parser setEntityResolver r
+ r
+ } orElse (return error("Could not load requested EntityResolver"))
for (arg <- args2) {
try {
diff --git a/src/library/scala/xml/parsing/ConstructingParser.scala b/src/library/scala/xml/parsing/ConstructingParser.scala
index 962c629663..2eee8f8f2e 100644
--- a/src/library/scala/xml/parsing/ConstructingParser.scala
+++ b/src/library/scala/xml/parsing/ConstructingParser.scala
@@ -18,8 +18,6 @@ import scala.io.{ Source, Codec }
object ConstructingParser {
def fromFile(inp: File, preserveWS: Boolean) =
- // XXX why does the default implicit not work here when building locker,
- // unless the empty parameter list is supplied?
new ConstructingParser(Source.fromFile(inp)(), preserveWS) initialize
def fromSource(inp: Source, preserveWS: Boolean) =
diff --git a/src/library/scala/xml/parsing/MarkupParser.scala b/src/library/scala/xml/parsing/MarkupParser.scala
index 846895a21b..06fb313255 100644
--- a/src/library/scala/xml/parsing/MarkupParser.scala
+++ b/src/library/scala/xml/parsing/MarkupParser.scala
@@ -364,7 +364,7 @@ trait MarkupParser extends AnyRef with TokenTests
* [40] STag ::= '&lt;' Name { S Attribute } [S]
* [44] EmptyElemTag ::= '&lt;' Name { S Attribute } [S]
*/
- protected def xTag(pscope:NamespaceBinding): Tuple3[String, MetaData, NamespaceBinding] = {
+ protected def xTag(pscope:NamespaceBinding): (String, MetaData, NamespaceBinding) = {
val qname = xName
xSpaceOpt
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/AssemblyBuilder.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/AssemblyBuilder.scala
index 9fbfbb533b..834b2b3a74 100644
--- a/src/msil/ch/epfl/lamp/compiler/msil/emit/AssemblyBuilder.scala
+++ b/src/msil/ch/epfl/lamp/compiler/msil/emit/AssemblyBuilder.scala
@@ -72,7 +72,7 @@ class AssemblyBuilder(name: AssemblyName)
}
/** Sets a custom attribute. */
- def SetCustomAttribute(constr: ConstructorInfo, value: Array[byte]) {
+ def SetCustomAttribute(constr: ConstructorInfo, value: Array[Byte]) {
addCustomAttribute(constr, value)
}
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/ConstructorBuilder.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/ConstructorBuilder.scala
index ad3a82d040..cfb54844c9 100644
--- a/src/msil/ch/epfl/lamp/compiler/msil/emit/ConstructorBuilder.scala
+++ b/src/msil/ch/epfl/lamp/compiler/msil/emit/ConstructorBuilder.scala
@@ -21,7 +21,7 @@ import java.io.IOException
* @author Nikolay Mihaylov
* @version 1.0
*/
-class ConstructorBuilder(declType: Type, attrs: int, paramTypes: Array[Type])
+class ConstructorBuilder(declType: Type, attrs: Int, paramTypes: Array[Type])
extends ConstructorInfo(declType, attrs, paramTypes)
with ICustomAttributeSetter
with Visitable
@@ -31,7 +31,7 @@ class ConstructorBuilder(declType: Type, attrs: int, paramTypes: Array[Type])
// public interface
/** Defines a parameter of this constructor. */
- def DefineParameter(pos: int, attr: int, name: String): ParameterBuilder = {
+ def DefineParameter(pos: Int, attr: Int, name: String): ParameterBuilder = {
val param = new ParameterBuilder(name, params(pos).ParameterType, attr, pos)
params(pos) = param
return param
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/FieldBuilder.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/FieldBuilder.scala
index 1fc3e068b8..1306148975 100644
--- a/src/msil/ch/epfl/lamp/compiler/msil/emit/FieldBuilder.scala
+++ b/src/msil/ch/epfl/lamp/compiler/msil/emit/FieldBuilder.scala
@@ -18,7 +18,7 @@ import java.io.IOException
* @author Nikolay Mihaylov
* @version 1.0
*/
-class FieldBuilder(name: String, declType: Type, attrs: int, fieldType: Type)
+class FieldBuilder(name: String, declType: Type, attrs: Int, fieldType: Type)
extends FieldInfo(name, declType, attrs, fieldType)
with ICustomAttributeSetter
with Visitable
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/ICustomAttributeSetter.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/ICustomAttributeSetter.scala
index c19366cc33..88587a143d 100644
--- a/src/msil/ch/epfl/lamp/compiler/msil/emit/ICustomAttributeSetter.scala
+++ b/src/msil/ch/epfl/lamp/compiler/msil/emit/ICustomAttributeSetter.scala
@@ -15,5 +15,5 @@ import ch.epfl.lamp.compiler.msil.ConstructorInfo
* @version 1.0
*/
trait ICustomAttributeSetter {
- def SetCustomAttribute(constr: ConstructorInfo, value: Array[byte])
+ def SetCustomAttribute(constr: ConstructorInfo, value: Array[Byte])
}
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/ILPrinterVisitor.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/ILPrinterVisitor.scala
index 28d0f7787a..4644cade72 100644
--- a/src/msil/ch/epfl/lamp/compiler/msil/emit/ILPrinterVisitor.scala
+++ b/src/msil/ch/epfl/lamp/compiler/msil/emit/ILPrinterVisitor.scala
@@ -52,7 +52,7 @@ abstract class ILPrinterVisitor extends Visitor {
private var newline = true
// print types without or with members?
- protected var nomembers: boolean = false
+ protected var nomembers: Boolean = false
// external assemblies
protected var as: Array[Assembly] = null
@@ -90,13 +90,13 @@ abstract class ILPrinterVisitor extends Visitor {
// methods to print code
protected def print(s: String) { align(); out.print(s)}
protected def print(o: Object) { align(); out.print(o) }
- protected def print(c: char) { align(); out.print(c) }
- protected def print(`val`: int) { align(); out.print(`val`)}
- protected def print(`val`: long){ align(); out.print(`val`)}
+ protected def print(c: Char) { align(); out.print(c) }
+ protected def print(`val`: Int) { align(); out.print(`val`)}
+ protected def print(`val`: Long){ align(); out.print(`val`)}
protected def println() { out.println(); newline = true; padding = 0 }
- protected def println(c: char) { print(c); println() }
- protected def println(i: int) { print(i); println() }
- protected def println(l: long) { print(l); println() }
+ protected def println(c: Char) { print(c); println() }
+ protected def println(i: Int) { print(i); println() }
+ protected def println(l: Long) { print(l); println() }
protected def println(s: String){ print(s); println() }
protected def println(o: Object){ print(o); println() }
protected def printName(name: String) {
@@ -489,7 +489,7 @@ abstract class ILPrinterVisitor extends Visitor {
//##########################################################################
- def printAssemblySignature(assem: Assembly, extern: boolean) {
+ def printAssemblySignature(assem: Assembly, extern: Boolean) {
print(".assembly ")
if (extern)
print("extern ")
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/Label.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/Label.scala
index ac62dd9ccd..113121a5c1 100644
--- a/src/msil/ch/epfl/lamp/compiler/msil/emit/Label.scala
+++ b/src/msil/ch/epfl/lamp/compiler/msil/emit/Label.scala
@@ -78,7 +78,7 @@ object Label {
// : "this.stacksize = " + stacksize + " that.stacksize = "
// + that.stacksize
// stacksize = that.stacksize
- val ss: Int = Math.max(stacksize, that.getStacksize())
+ val ss: Int = math.max(stacksize, that.getStacksize())
stacksize = ss
that.setStacksize(ss)
}
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/MethodBuilder.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/MethodBuilder.scala
index e22c1fca13..eb86c96de5 100644
--- a/src/msil/ch/epfl/lamp/compiler/msil/emit/MethodBuilder.scala
+++ b/src/msil/ch/epfl/lamp/compiler/msil/emit/MethodBuilder.scala
@@ -45,7 +45,7 @@ class MethodBuilder(name: String, declType: Type, attrs: Int, returnType: Type,
}
/** Sets a custom attribute. */
- def SetCustomAttribute(constr: ConstructorInfo, value: Array[byte]) {
+ def SetCustomAttribute(constr: ConstructorInfo, value: Array[Byte]) {
addCustomAttribute(constr, value)
}
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/ModuleBuilder.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/ModuleBuilder.scala
index 037b8660ee..5b42d6df76 100644
--- a/src/msil/ch/epfl/lamp/compiler/msil/emit/ModuleBuilder.scala
+++ b/src/msil/ch/epfl/lamp/compiler/msil/emit/ModuleBuilder.scala
@@ -105,7 +105,7 @@ class ModuleBuilder(name: String, fullname: String, scopeName: String, assembly:
}
/** Sets a custom attribute. */
- def SetCustomAttribute(constr: ConstructorInfo, value: Array[byte]) {
+ def SetCustomAttribute(constr: ConstructorInfo, value: Array[Byte]) {
addCustomAttribute(constr, value)
}
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/OpCode.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/OpCode.scala
index e7bff447cc..1bd8e48633 100644
--- a/src/msil/ch/epfl/lamp/compiler/msil/emit/OpCode.scala
+++ b/src/msil/ch/epfl/lamp/compiler/msil/emit/OpCode.scala
@@ -23,23 +23,23 @@ class OpCode extends Visitable {
var CEE_string: String = _
/** The type of Microsoft intermediate language (MSIL) instruction. */
- var CEE_code: short = _
+ var CEE_code: Short = _
/** How the Microsoft intermediate language (MSIL) instruction pops the stack. */
- var CEE_pop: byte = _
+ var CEE_pop: Byte = _
/** How the Microsoft intermediate language (MSIL) instruction pushes operand onto the stack. */
- var CEE_push: byte = _
+ var CEE_push: Byte = _
/** Describes the type of flow control. */
- var CEE_flow: byte = _
+ var CEE_flow: Byte = _
/** ????? */
- var CEE_inline: byte = _
+ var CEE_inline: Byte = _
- var CEE_length: byte = _
+ var CEE_length: Byte = _
- var CEE_popush: byte = _
+ var CEE_popush: Byte = _
/**
* the apply method for a visitor
@@ -49,13 +49,13 @@ class OpCode extends Visitable {
v.caseOpCode(this)
}
- protected def length(): byte = {
+ protected def length(): Byte = {
val code = OpCode.length(CEE_code)
val inline = OpCode.INLINE_length(CEE_inline)
return if(inline < 0) { -1 } else { (code + inline).toByte }
}
- protected def popush(): byte = {
+ protected def popush(): Byte = {
val pop = OpCode.POP_size(CEE_pop)
val push = OpCode.PUSH_size(CEE_push)
return if(pop < 0 || push < 0) { OpCode.POPUSH_SPECIAL } else { (push - pop).toByte }
@@ -391,27 +391,27 @@ object OpCode {
//########################################################################
// Opcode's amount and type of poped data
- final val POP_NONE : byte = 0x00
- final val POP_1 : byte = 0x01
- final val POP_1_1 : byte = 0x02
- final val POP_I : byte = 0x03
- final val POP_I_1 : byte = 0x04
- final val POP_I_I : byte = 0x05
- final val POP_I_I8 : byte = 0x06
- final val POP_I_R4 : byte = 0x07
- final val POP_I_R8 : byte = 0x08
- final val POP_I_I_I : byte = 0x09
- final val POP_REF : byte = 0x0A
- final val POP_REF_1 : byte = 0x0B
- final val POP_REF_I : byte = 0x0C
- final val POP_REF_I_I : byte = 0x0D
- final val POP_REF_I_I8 : byte = 0x0E
- final val POP_REF_I_R4 : byte = 0x0F
- final val POP_REF_I_R8 : byte = 0x10
- final val POP_REF_I_REF : byte = 0x11
- final val POP_SPECIAL : byte = 0x12
+ final val POP_NONE : Byte = 0x00
+ final val POP_1 : Byte = 0x01
+ final val POP_1_1 : Byte = 0x02
+ final val POP_I : Byte = 0x03
+ final val POP_I_1 : Byte = 0x04
+ final val POP_I_I : Byte = 0x05
+ final val POP_I_I8 : Byte = 0x06
+ final val POP_I_R4 : Byte = 0x07
+ final val POP_I_R8 : Byte = 0x08
+ final val POP_I_I_I : Byte = 0x09
+ final val POP_REF : Byte = 0x0A
+ final val POP_REF_1 : Byte = 0x0B
+ final val POP_REF_I : Byte = 0x0C
+ final val POP_REF_I_I : Byte = 0x0D
+ final val POP_REF_I_I8 : Byte = 0x0E
+ final val POP_REF_I_R4 : Byte = 0x0F
+ final val POP_REF_I_R8 : Byte = 0x10
+ final val POP_REF_I_REF : Byte = 0x11
+ final val POP_SPECIAL : Byte = 0x12
final val POP_count : Int = 0x13
- final val POP_size : Array[byte] = new Array[byte](POP_count)
+ final val POP_size : Array[Byte] = new Array[Byte](POP_count)
POP_size(POP_NONE) = 0
POP_size(POP_1) = 1
@@ -436,17 +436,17 @@ object OpCode {
//########################################################################
// Opcode's amount and type of pushed data
- final val PUSH_NONE : byte = 0x00
- final val PUSH_1 : byte = 0x01
- final val PUSH_1_1 : byte = 0x02
- final val PUSH_I : byte = 0x03
- final val PUSH_I8 : byte = 0x04
- final val PUSH_R4 : byte = 0x05
- final val PUSH_R8 : byte = 0x06
- final val PUSH_REF : byte = 0x07
- final val PUSH_SPECIAL : byte = 0x08
+ final val PUSH_NONE : Byte = 0x00
+ final val PUSH_1 : Byte = 0x01
+ final val PUSH_1_1 : Byte = 0x02
+ final val PUSH_I : Byte = 0x03
+ final val PUSH_I8 : Byte = 0x04
+ final val PUSH_R4 : Byte = 0x05
+ final val PUSH_R8 : Byte = 0x06
+ final val PUSH_REF : Byte = 0x07
+ final val PUSH_SPECIAL : Byte = 0x08
final val PUSH_count : Int = 0x09
- final val PUSH_size : Array[byte] = new Array[byte](PUSH_count)
+ final val PUSH_size : Array[Byte] = new Array[Byte](PUSH_count)
PUSH_size(PUSH_NONE) = 0
PUSH_size(PUSH_1) = 1
@@ -461,30 +461,30 @@ object OpCode {
//########################################################################
// Opcode's amount of moved data
- final val POPUSH_SPECIAL : byte = -128
+ final val POPUSH_SPECIAL : Byte = -128
//########################################################################
// Opcode's inline argument types
- final val INLINE_NONE : byte = 0x00
- final val INLINE_VARIABLE_S : byte = 0x01
- final val INLINE_TARGET_S : byte = 0x02
- final val INLINE_I_S : byte = 0x03
- final val INLINE_VARIABLE : byte = 0x04
- final val INLINE_TARGET : byte = 0x05
- final val INLINE_I : byte = 0x06
- final val INLINE_I8 : byte = 0x07
- final val INLINE_R : byte = 0x08
- final val INLINE_R8 : byte = 0x09
- final val INLINE_STRING : byte = 0x0A
- final val INLINE_TYPE : byte = 0x0B
- final val INLINE_FIELD : byte = 0x0C
- final val INLINE_METHOD : byte = 0x0D
- final val INLINE_SIGNATURE : byte = 0x0E
- final val INLINE_TOKEN : byte = 0x0F
- final val INLINE_SWITCH : byte = 0x10
+ final val INLINE_NONE : Byte = 0x00
+ final val INLINE_VARIABLE_S : Byte = 0x01
+ final val INLINE_TARGET_S : Byte = 0x02
+ final val INLINE_I_S : Byte = 0x03
+ final val INLINE_VARIABLE : Byte = 0x04
+ final val INLINE_TARGET : Byte = 0x05
+ final val INLINE_I : Byte = 0x06
+ final val INLINE_I8 : Byte = 0x07
+ final val INLINE_R : Byte = 0x08
+ final val INLINE_R8 : Byte = 0x09
+ final val INLINE_STRING : Byte = 0x0A
+ final val INLINE_TYPE : Byte = 0x0B
+ final val INLINE_FIELD : Byte = 0x0C
+ final val INLINE_METHOD : Byte = 0x0D
+ final val INLINE_SIGNATURE : Byte = 0x0E
+ final val INLINE_TOKEN : Byte = 0x0F
+ final val INLINE_SWITCH : Byte = 0x10
final val INLINE_count : Int = 0x11
- final val INLINE_length : Array[byte] = new Array[byte](INLINE_count)
+ final val INLINE_length : Array[Byte] = new Array[Byte](INLINE_count)
INLINE_length(INLINE_NONE) = 0
INLINE_length(INLINE_VARIABLE_S) = 1
@@ -507,21 +507,21 @@ object OpCode {
//########################################################################
// Opcode's control flow implications
- final val FLOW_META : byte = 0x00
- final val FLOW_NEXT : byte = 0x01
- final val FLOW_BRANCH : byte = 0x02
- final val FLOW_COND_BRANCH : byte = 0x03
- final val FLOW_BREAK : byte = 0x04
- final val FLOW_CALL : byte = 0x05
- final val FLOW_RETURN : byte = 0x06
- final val FLOW_THROW : byte = 0x07
+ final val FLOW_META : Byte = 0x00
+ final val FLOW_NEXT : Byte = 0x01
+ final val FLOW_BRANCH : Byte = 0x02
+ final val FLOW_COND_BRANCH : Byte = 0x03
+ final val FLOW_BREAK : Byte = 0x04
+ final val FLOW_CALL : Byte = 0x05
+ final val FLOW_RETURN : Byte = 0x06
+ final val FLOW_THROW : Byte = 0x07
final val FLOW_count : Int = 0x08
//########################################################################
// Init methods for Opcode
- def opcode(that: OpCode, opcode: int, string: String, code: Int,
- pop: byte, push: byte, inline: byte, flow: byte) {
+ def opcode(that: OpCode, opcode: Int, string: String, code: Int,
+ pop: Byte, push: Byte, inline: Byte, flow: Byte) {
that.CEE_opcode = opcode
that.CEE_string = string
that.CEE_code = code.toShort
@@ -533,7 +533,7 @@ object OpCode {
that.CEE_popush = that.popush()
}
- def length(code: Int): byte = {
+ def length(code: Int): Byte = {
if ((code & 0xFFFFFF00) == 0xFFFFFF00) return 1
if ((code & 0xFFFFFF00) == 0xFFFFFE00) return 2
return 0
@@ -1862,7 +1862,7 @@ object OpCode {
/**
* Allocates a certain number of bytes from the local dynamic memory pool and pushes the
- * address (a transient pointer, type *) of the first allocated byte onto the evaluation stack.
+ * address (a transient pointer, type *) of the first allocated Byte onto the evaluation stack.
*/
final val Localloc = new OpCode()
opcode(Localloc, CEE_LOCALLOC, "localloc" , 0xFFFFFE0F, POP_I, PUSH_I, INLINE_NONE, FLOW_NEXT)
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/OpCodes.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/OpCodes.scala
index 57ef69ba7f..f8e0f140a8 100644
--- a/src/msil/ch/epfl/lamp/compiler/msil/emit/OpCodes.scala
+++ b/src/msil/ch/epfl/lamp/compiler/msil/emit/OpCodes.scala
@@ -1135,7 +1135,7 @@ object OpCodes {
/**
* Allocates a certain number of bytes from the local dynamic memory pool and pushes the
- * address (a transient pointer, type *) of the first allocated byte onto the evaluation stack.
+ * address (a transient pointer, type *) of the first allocated Byte onto the evaluation stack.
*/
final val Localloc = OpCode.Localloc
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/ParameterBuilder.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/ParameterBuilder.scala
index dabba58f0c..5a68f8f0ae 100644
--- a/src/msil/ch/epfl/lamp/compiler/msil/emit/ParameterBuilder.scala
+++ b/src/msil/ch/epfl/lamp/compiler/msil/emit/ParameterBuilder.scala
@@ -29,7 +29,7 @@ class ParameterBuilder(name: String, tpe: Type, attr: Int, pos: Int)
//##########################################################################
/** Sets a custom attribute. */
- def SetCustomAttribute(constr: ConstructorInfo, value: Array[byte]) {
+ def SetCustomAttribute(constr: ConstructorInfo, value: Array[Byte]) {
addCustomAttribute(constr, value)
}
diff --git a/src/msil/ch/epfl/lamp/compiler/msil/emit/TypeBuilder.scala b/src/msil/ch/epfl/lamp/compiler/msil/emit/TypeBuilder.scala
index 81bf28bc04..84fd2a4023 100644
--- a/src/msil/ch/epfl/lamp/compiler/msil/emit/TypeBuilder.scala
+++ b/src/msil/ch/epfl/lamp/compiler/msil/emit/TypeBuilder.scala
@@ -18,7 +18,7 @@ import java.io.IOException
* @author Nikolay Mihaylov
* @version 1.0
*/
-class TypeBuilder (module: Module, attributes: int, fullName: String, baseType: Type, interfaces: Array[Type], declType: Type)
+class TypeBuilder (module: Module, attributes: Int, fullName: String, baseType: Type, interfaces: Array[Type], declType: Type)
extends Type(module, attributes, fullName, baseType, interfaces, declType, 0)
with ICustomAttributeSetter
with Visitable
@@ -45,7 +45,7 @@ class TypeBuilder (module: Module, attributes: int, fullName: String, baseType:
* Adds a new field to the class, with the given name,
* attributes and field type.
*/
- def DefineField(name: String, `type`: Type, attrs: short): FieldBuilder = {
+ def DefineField(name: String, `type`: Type, attrs: Short): FieldBuilder = {
val field: FieldBuilder = new FieldBuilder(name, this, attrs, `type`)
fieldBuilders.add(field)
return field
@@ -55,7 +55,7 @@ class TypeBuilder (module: Module, attributes: int, fullName: String, baseType:
* Adds a new method to the class, with the given name and
* method signature.
*/
- def DefineMethod(name: String, attrs: short, returnType: Type, paramTypes: Array[Type]): MethodBuilder = {
+ def DefineMethod(name: String, attrs: Short, returnType: Type, paramTypes: Array[Type]): MethodBuilder = {
val method = new MethodBuilder(name, this, attrs, returnType, paramTypes)
val methods = methodBuilders.iterator()
while(methods.hasNext()) {
@@ -72,7 +72,7 @@ class TypeBuilder (module: Module, attributes: int, fullName: String, baseType:
* Adds a new constructor to the class, with the given attributes
* and signature.
*/
- def DefineConstructor(attrs: short, callingConvention: short, paramTypes: Array[Type]): ConstructorBuilder = {
+ def DefineConstructor(attrs: Short, callingConvention: Short, paramTypes: Array[Type]): ConstructorBuilder = {
val constr = new ConstructorBuilder(this, attrs, paramTypes)
constructorBuilders.add(constr)
return constr
@@ -81,7 +81,7 @@ class TypeBuilder (module: Module, attributes: int, fullName: String, baseType:
/**
* Defines a nested type given its name.
*/
- def DefineNestedType(name: String, attributes: int, baseType: Type, interfaces: Array[Type]): TypeBuilder = {
+ def DefineNestedType(name: String, attributes: Int, baseType: Type, interfaces: Array[Type]): TypeBuilder = {
val nested = nestedTypeBuilders.iterator()
while(nested.hasNext()) {
val nt = nested.next().asInstanceOf[TypeBuilder]
@@ -152,11 +152,11 @@ class TypeBuilder (module: Module, attributes: int, fullName: String, baseType:
}
/** Sets a custom attribute. */
- def SetCustomAttribute(constr: ConstructorInfo, value: Array[byte]) {
+ def SetCustomAttribute(constr: ConstructorInfo, value: Array[Byte]) {
addCustomAttribute(constr, value)
}
- def setPosition(sourceLine: int, sourceFilename: String) {
+ def setPosition(sourceLine: Int, sourceFilename: String) {
this.sourceLine = sourceLine
this.sourceFilename = sourceFilename
}
@@ -213,7 +213,7 @@ object TypeBuilder {
return s.toString()
}
- def methodsEqual(m1: MethodInfo, m2: MethodInfo): boolean = {
+ def methodsEqual(m1: MethodInfo, m2: MethodInfo): Boolean = {
if (!m1.Name.equals(m2.Name))
return false
if (m1.ReturnType != m2.ReturnType)
diff --git a/src/partest/scala/tools/partest/PartestTask.scala b/src/partest/scala/tools/partest/PartestTask.scala
index 28dd00b408..64d48156db 100644
--- a/src/partest/scala/tools/partest/PartestTask.scala
+++ b/src/partest/scala/tools/partest/PartestTask.scala
@@ -234,8 +234,9 @@ class PartestTask extends Task {
}
}
- val (allSuccesses, allFailures): (Int, Int) =
- (testFileSets map runSet).foldLeft((0, 0))((sums, x) => (sums._1 + x._1, sums._2 + x._2))
+ val _results = testFileSets map runSet
+ val allSuccesses = _results map (_._1) sum
+ val allFailures = _results map (_._2) sum
def f = if (errorOnFailed && allFailures > 0) error(_) else log(_: String)
def s = if (allFailures > 1) "s" else ""
diff --git a/src/partest/scala/tools/partest/nest/Worker.scala b/src/partest/scala/tools/partest/nest/Worker.scala
index 641950f7b9..ec02009728 100644
--- a/src/partest/scala/tools/partest/nest/Worker.scala
+++ b/src/partest/scala/tools/partest/nest/Worker.scala
@@ -82,7 +82,7 @@ class Worker(val fileManager: FileManager) extends Actor {
file.getAbsolutePath.substring(filesPathLen)
}
}
- NestUI.normal("[...]"+name+List.toString(List.fill(totalWidth-name.length)(' ')), printer)
+ NestUI.normal("[...]"+name+(List.fill(totalWidth-name.length)(' ')).mkString, printer)
}
def printInfoEnd(success: Boolean, printer: PrintWriter) {
diff --git a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ClassFileParser.scala b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ClassFileParser.scala
index 182054c01b..bc47092f71 100644
--- a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ClassFileParser.scala
+++ b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ClassFileParser.scala
@@ -217,7 +217,7 @@ case class ConstantPool(len : Int) {
}
def add(f : ConstantPool => Any) = {
- buffer + f
+ buffer += f
this
}
}
diff --git a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSigPrinter.scala b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSigPrinter.scala
index abff45fca5..733df3adcd 100644
--- a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSigPrinter.scala
+++ b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSigPrinter.scala
@@ -308,10 +308,10 @@ class ScalaSigPrinter(stream: PrintStream, printPrivates: Boolean) {
})
case TypeRefType(prefix, symbol, typeArgs) => sep + (symbol.path match {
case "scala.<repeated>" => flags match {
- case TypeFlags(true) => toString(typeArgs.first) + "*"
+ case TypeFlags(true) => toString(typeArgs.head) + "*"
case _ => "scala.Seq" + typeArgString(typeArgs)
}
- case "scala.<byname>" => "=> " + toString(typeArgs.first)
+ case "scala.<byname>" => "=> " + toString(typeArgs.head)
case _ => {
val path = StringUtil.cutSubstring(symbol.path)(".package") //remove package object reference
StringUtil.trimStart(processName(path) + typeArgString(typeArgs), "<empty>.")
@@ -357,7 +357,7 @@ class ScalaSigPrinter(stream: PrintStream, printPrivates: Boolean) {
"\\$times" -> "*", "\\$div" -> "/", "\\$bslash" -> "\\\\",
"\\$greater" -> ">", "\\$qmark" -> "?", "\\$percent" -> "%",
"\\$amp" -> "&", "\\$colon" -> ":", "\\$u2192" -> "→")
- val pattern = Pattern.compile(_syms.keys.foldLeft("")((x, y) => if (x == "") y else x + "|" + y))
+ val pattern = Pattern.compile(_syms.keysIterator.foldLeft("")((x, y) => if (x == "") y else x + "|" + y))
val placeholderPattern = "_\\$(\\d)+"
def processName(name: String) = {
diff --git a/src/swing/scala/swing/Table.scala b/src/swing/scala/swing/Table.scala
index 47d0b43c60..ec1c0e85fb 100644
--- a/src/swing/scala/swing/Table.scala
+++ b/src/swing/scala/swing/Table.scala
@@ -299,7 +299,7 @@ class Table extends Component with Scrollable.Wrapper {
def tableChanged(e: TableModelEvent) = publish(
e.getType match {
case TableModelEvent.UPDATE =>
- if (e.getFirstRow == 0 && e.getLastRow == Math.MAX_INT && e.getColumn == TableModelEvent.ALL_COLUMNS)
+ if (e.getFirstRow == 0 && e.getLastRow == Int.MaxValue && e.getColumn == TableModelEvent.ALL_COLUMNS)
TableChanged(Table.this)
else if (e.getFirstRow == TableModelEvent.HEADER_ROW)
TableStructureChanged(Table.this)
diff --git a/test/files/bench/equality/eq.scala b/test/files/bench/equality/eq.scala
new file mode 100755
index 0000000000..8ac5b5ef5c
--- /dev/null
+++ b/test/files/bench/equality/eq.scala
@@ -0,0 +1,34 @@
+object eq extends testing.Benchmark {
+
+ def eqtest[T](creator: Int => T, n: Int): Int = {
+ val elems = Array.tabulate[AnyRef](n)(i => creator(i % 2).asInstanceOf[AnyRef])
+
+ var sum = 0
+ var i = 0
+ while (i < n) {
+ var j = 0
+ while (j < n) {
+ if (elems(i) eq elems(j)) sum += 1
+ j += 1
+ }
+ i += 1
+ }
+ sum
+ }
+
+ val obj1 = new Object
+ val obj2 = new Object
+
+ def run() {
+ var sum = 0
+ sum += eqtest(x => if (x == 0) obj1 else obj2, 2000)
+ sum += eqtest(x => x, 1000)
+ sum += eqtest(x => x.toChar, 550)
+ sum += eqtest(x => x.toByte, 550)
+ sum += eqtest(x => x.toLong, 550)
+ sum += eqtest(x => x.toShort, 100)
+ sum += eqtest(x => x.toFloat, 100)
+ sum += eqtest(x => x.toDouble, 100)
+ assert(sum == 2958950)
+ }
+}
diff --git a/test/files/bench/equality/eqeq.log b/test/files/bench/equality/eqeq.log
new file mode 100755
index 0000000000..d1e27aceed
--- /dev/null
+++ b/test/files/bench/equality/eqeq.log
@@ -0,0 +1,42 @@
+Banchmark results for testing equality operations:
+eq.scala: Base case, use eq equality only
+eqeq.scala: Test case, use == instead of eq.
+All tests run on Thinkpad T400, 1.6.0_12 client VM.
+Test command: java eq 5 5
+ java eqeq 5 5
+eq.scala, no -optimise
+eq$ 109 78 79 63 63
+eq$ 94 63 63 78 78
+eq$ 94 62 62 62 78
+eq$ 94 78 78 78 78
+eq$ 94 78 78 78 78
+eq.scala, with -optimise
+eq$ 421 63 62 47 63
+eq$ 406 62 62 63 62
+eq$ 407 62 62 78 63
+eq$ 406 63 63 62 62
+eq$ 407 62 62 63 47
+eqeq.scala with version of BoxesRuntime as of Nov 13th, no -optimise
+eqeq$ 562 516 516 516 515
+eqeq$ 547 515 515 531 532
+eqeq$ 532 516 516 515 516
+eqeq$ 547 531 531 516 531
+eqeq$ 547 515 515 516 516
+eqeq.scala with version of BoxesRuntime as of Nov 13th, with -optimise
+eqeq$ 1031 390 391 391 391
+eqeq$ 1031 391 391 391 390
+eqeq$ 1031 390 390 391 391
+eqeq$ 1031 406 407 391 390
+eqeq$ 1031 390 390 391 391
+eqeq.scala with 1st optimized of Nov 14th, no -optimise
+eqeq$ 484 421 438 438 437
+eqeq$ 484 438 437 437 438
+eqeq$ 469 437 453 454 438
+eqeq$ 468 437 438 468 438
+eqeq$ 485 437 437 422 438
+eqeq.scala with 1st optimized of Nov 14th, with -optimise
+eqeq$ 1016 375 391 375 375
+eqeq$ 1016 375 391 390 375
+eqeq$ 1016 390 391 375 375
+eqeq$ 1015 375 391 390 375
+eqeq$ 1016 390 375 375 375
diff --git a/test/files/bench/equality/eqeq.scala b/test/files/bench/equality/eqeq.scala
new file mode 100755
index 0000000000..afccece88a
--- /dev/null
+++ b/test/files/bench/equality/eqeq.scala
@@ -0,0 +1,46 @@
+/** benchmark for testing equality.
+ * Mix: == between non-numbers ith Object.equals as equality: 66%
+ * 50% of these are tests where eq is true.
+ * == between boxed integers: 17%
+ * == between boxed characters: 5%
+ * == between boxed bytes: 5%
+ * == between boxed longs: 5%
+ * == between boxed shorts: < 1%
+ * == between boxed floats: < 1%
+ * == between boxed doubles: < 1%
+ * In all cases 50% of the tests return true.
+ */
+object eqeq extends testing.Benchmark {
+
+ def eqeqtest[T](creator: Int => T, n: Int): Int = {
+ val elems = Array.tabulate[AnyRef](n)(i => creator(i % 2).asInstanceOf[AnyRef])
+
+ var sum = 0
+ var i = 0
+ while (i < n) {
+ var j = 0
+ while (j < n) {
+ if (elems(i) == elems(j)) sum += 1
+ j += 1
+ }
+ i += 1
+ }
+ sum
+ }
+
+ val obj1 = new Object
+ val obj2 = new Object
+
+ def run() {
+ var sum = 0
+ sum += eqeqtest(x => if (x == 0) obj1 else obj2, 2000)
+ sum += eqeqtest(x => x, 1000)
+ sum += eqeqtest(x => x.toChar, 550)
+ sum += eqeqtest(x => x.toByte, 550)
+ sum += eqeqtest(x => x.toLong, 550)
+ sum += eqeqtest(x => x.toShort, 100)
+ sum += eqeqtest(x => x.toFloat, 100)
+ sum += eqeqtest(x => x.toDouble, 100)
+ assert(sum == 2968750)
+ }
+}
diff --git a/test/files/jvm/serialization.check b/test/files/jvm/serialization.check
index da11d7c7f0..f1b5b10ec6 100644
--- a/test/files/jvm/serialization.check
+++ b/test/files/jvm/serialization.check
@@ -1,126 +1,209 @@
-x0 = List(1, 2, 3)
-y0 = List(1, 2, 3)
-x0 eq y0: false - y0 eq x0: false
-x0 equals y0: true - y0 equals x0: true
+a1 = Array[1,2,3]
+_a1 = Array[1,2,3]
+arrayEquals(a1, _a1): true
-x1 = List()
-y1 = List()
-x1 eq y1: true - y1 eq x1: true
+c1 = Cell(a)
+_c1 = Cell(a)
+c1 eq _c1: false, _c1 eq c1: false
+c1 equals _c1: true, _c1 equals c1: true
-x2 = None
-y2 = None
-x2 eq y2: true - y2 eq x2: true
-
-x3 = Array[1,2,3]
-y3 = Array[1,2,3]
-arrayEquals(x3, y3): true
-
-x4 = <na>
-y4 = <na>
-x4(2): 4 - y4(2): 4
-
-x5 = 'hello
-y5 = 'hello
-x5 eq y5: true - y5 eq x5: true
-x5 equals y5: true - y5 equals x5: true
-
-x6 = (BannerLimit,12345)
-y6 = (BannerLimit,12345)
-x6 eq y6: false - y6 eq x6: false
-x6 equals y6: true - y6 equals x6: true
+e1 = Left(1)
+_e1 = Left(1)
+e1 eq _e1: false, _e1 eq e1: false
+e1 equals _e1: true, _e1 equals e1: true
x7 = RoundingMode
y7 = RoundingMode
-x7 eq y7: true - y7 eq x7: true
-x7 equals y7: true - y7 equals x7: true
+x7 eq y7: true, y7 eq x7: true
+x7 equals y7: true, y7 equals x7: true
x8 = WeekDay
y8 = WeekDay
-x8 eq y8: true - y8 eq x8: true
-x8 equals y8: true - y8 equals x8: true
+x8 eq y8: true, y8 eq x8: true
+x8 equals y8: true, y8 equals x8: true
x9 = UP
y9 = UP
-x9 eq y9: true - y9 eq x9: true
-x9 equals y9: true - y9 equals x9: true
+x9 eq y9: true, y9 eq x9: true
+x9 equals y9: true, y9 equals x9: true
x10 = Monday
y10 = Monday
-x10 eq y10: true - y10 eq x10: true
-x10 equals y10: true - y10 equals x10: true
+x10 eq y10: true, y10 eq x10: true
+x10 equals y10: true, y10 equals x10: true
+
+x9 eq x10: false, x10 eq x9: false
+x9 equals x10: true, x10 equals x9: true
+x9 eq y10: false, y10 eq x9: false
+x9 equals y10: true, y10 equals x9: true
+
+f1 = <na>
+_f1 = <na>
+f1(2): 4, _f1(2): 4
+
+xs0 = List(1, 2, 3)
+_xs0 = List(1, 2, 3)
+xs0 eq _xs0: false, _xs0 eq xs0: false
+xs0 equals _xs0: true, _xs0 equals xs0: true
+
+xs1 = List()
+_xs1 = List()
+xs1 eq _xs1: true, _xs1 eq xs1: true
+
+o1 = None
+_o1 = None
+o1 eq _o1: true, _o1 eq o1: true
+
+o2 = Some(1)
+_o2 = Some(1)
+o2 eq _o2: false, _o2 eq o2: false
+o2 equals _o2: true, _o2 equals o2: true
+
+s1 = 'hello
+_s1 = 'hello
+s1 eq _s1: true, _s1 eq s1: true
+s1 equals _s1: true, _s1 equals s1: true
+
+t1 = (BannerLimit,12345)
+_t1 = (BannerLimit,12345)
+t1 eq _t1: false, _t1 eq t1: false
+t1 equals _t1: true, _t1 equals t1: true
+
+x = BitSet(1, 2)
+y = BitSet(1, 2)
+x equals y: true, y equals x: true
+
+x = BitSet(2, 3)
+y = BitSet(2, 3)
+x equals y: true, y equals x: true
+
+x = Map(2 -> B, 1 -> A, 3 -> C)
+y = Map(2 -> B, 1 -> A, 3 -> C)
+x equals y: true, y equals x: true
-x9 eq x10: false - x10 eq x9: false
-x9 equals x10: true - x10 equals x9: true
-x9 eq y10: false - y10 eq x9: false
-x9 equals y10: true - y10 equals x9: true
+x = Set(1, 2)
+y = Set(1, 2)
+x equals y: true, y equals x: true
x = List((buffers,20), (layers,2), (title,3))
y = List((buffers,20), (layers,2), (title,3))
-x equals y: true - y equals x: true
+x equals y: true, y equals x: true
x = Map(buffers -> 20, layers -> 2, title -> 3)
y = Map(buffers -> 20, layers -> 2, title -> 3)
-x equals y: true - y equals x: true
-
-x = BitSet(2, 3)
-y = BitSet(2, 3)
-x equals y: true - y equals x: true
+x equals y: true, y equals x: true
x = Set(5, 3)
y = Set(5, 3)
-x equals y: true - y equals x: true
+x equals y: true, y equals x: true
x = Queue(a, b, c)
y = Queue(a, b, c)
-x equals y: true - y equals x: true
+x equals y: true, y equals x: true
+
+x = Range(0, 1, 2, 3, 4, 5, 6, 7, 8, 9)
+y = Range(0, 1, 2, 3, 4, 5, 6, 7, 8, 9)
+x equals y: true, y equals x: true
+
+x = NumericRange(0, 1, 2, 3, 4, 5, 6, 7, 8, 9)
+y = NumericRange(0, 1, 2, 3, 4, 5, 6, 7, 8, 9)
+x equals y: true, y equals x: true
+
+x = Map(1 -> A, 2 -> B, 3 -> C)
+y = Map(1 -> A, 2 -> B, 3 -> C)
+x equals y: true, y equals x: true
+
+x = TreeSet(1, 2, 3)
+y = TreeSet(1, 2, 3)
+x equals y: true, y equals x: true
x = Stack(c, b, a)
y = Stack(c, b, a)
-x equals y: true - y equals x: true
+x equals y: true, y equals x: true
+
+x = Stream(0, ?)
+y = Stream(0, ?)
+x equals y: true, y equals x: true
x = Map(42 -> FortyTwo)
y = Map(42 -> FortyTwo)
-x equals y: true - y equals x: true
+x equals y: true, y equals x: true
x = TreeSet(0, 2)
y = TreeSet(0, 2)
-x equals y: true - y equals x: true
+x equals y: true, y equals x: true
+
+x = Vector('a, 'b, 'c)
+y = Vector('a, 'b, 'c)
+x equals y: true, y equals x: true
x = ArrayBuffer(one, two)
y = ArrayBuffer(one, two)
-x equals y: true - y equals x: true
+x equals y: true, y equals x: true
+
+x = ArrayBuilder.ofLong
+y = ArrayBuilder.ofLong
+x equals y: true, y equals x: true
+
+x = ArrayBuilder.ofFloat
+y = ArrayBuilder.ofFloat
+x equals y: true, y equals x: true
-x = Map(title -> 3, buffers -> 20, layers -> 2)
-y = Map(title -> 3, buffers -> 20, layers -> 2)
-x equals y: true - y equals x: true
+x = ArrayStack(3, 2, 20)
+y = ArrayStack(3, 2, 20)
+x equals y: true, y equals x: true
x = BitSet(0, 8, 9)
y = BitSet(0, 8, 9)
-x equals y: true - y equals x: true
+x equals y: true, y equals x: true
+
+x = Map(A -> 1, C -> 3, B -> 2)
+y = Map(A -> 1, C -> 3, B -> 2)
+x equals y: true, y equals x: true
x = Set(layers, buffers, title)
y = Set(layers, buffers, title)
-x equals y: true - y equals x: true
+x equals y: true, y equals x: true
+
+x = History()
+y = History()
+x equals y: true, y equals x: true
+
+x = ListBuffer(white, black)
+y = ListBuffer(white, black)
+x equals y: true, y equals x: true
x = Queue(20, 2, 3)
y = Queue(20, 2, 3)
-x equals y: true - y equals x: true
+x equals y: true, y equals x: true
x = Stack(3, 2, 20)
y = Stack(3, 2, 20)
-x equals y: true - y equals x: true
+x equals y: true, y equals x: true
-x = ListBuffer(white, black)
-y = ListBuffer(white, black)
-x equals y: true - y equals x: true
+x = abc
+y = abc
+x equals y: true, y equals x: true
+
+x = WrappedArray(1, 2, 3)
+y = WrappedArray(1, 2, 3)
+x equals y: true, y equals x: true
+
+x = xml:src="hello"
+y = xml:src="hello"
+x equals y: true, y equals x: true
+
+x = <title></title>
+y = <title></title>
+x equals y: true, y equals x: true
x = <html><title>title</title><body></body></html>
y = <html><title>title</title><body></body></html>
-x equals y: true - y equals x: true
+x equals y: true, y equals x: true
x = <html>
<body>
- <table cellpadding="2" cellspacing="0">
+ <table cellpadding="2" cellspacing="0">
<tr>
<th>Last Name</th>
<th>First Name</th>
@@ -137,10 +220,10 @@ x = <html>
</tr>
</table>
</body>
- </html>
+ </html>
y = <html>
<body>
- <table cellpadding="2" cellspacing="0">
+ <table cellpadding="2" cellspacing="0">
<tr>
<th>Last Name</th>
<th>First Name</th>
@@ -157,26 +240,26 @@ y = <html>
</tr>
</table>
</body>
- </html>
-x equals y: true - y equals x: true
+ </html>
+x equals y: true, y equals x: true
x = Tim
y = Tim
-x equals y: true - y equals x: true
+x equals y: true, y equals x: true
x = Bob
y = Bob
-x equals y: true - y equals x: true
+x equals y: true, y equals x: true
x = John
y = John
-x equals y: true - y equals x: true
+x equals y: true, y equals x: true
x = Bill
y = Bill
-x equals y: true - y equals x: true
+x equals y: true, y equals x: true
x = Paul
y = Paul
-x equals y: true - y equals x: true
+x equals y: true, y equals x: true
diff --git a/test/files/jvm/serialization.scala b/test/files/jvm/serialization.scala
index 81d21e6dc5..06086f4038 100644
--- a/test/files/jvm/serialization.scala
+++ b/test/files/jvm/serialization.scala
@@ -2,17 +2,6 @@
// Serialization
//############################################################################
-import java.lang.System
-
-object EqualityTest {
- def check[A, B](x: A, y: B) {
- println("x = " + x)
- println("y = " + y)
- println("x equals y: " + (x equals y) + " - y equals x: " + (y equals x))
- println()
- }
-}
-
object Serialize {
@throws(classOf[java.io.IOException])
def write[A](o: A): Array[Byte] = {
@@ -29,7 +18,14 @@ object Serialize {
new java.io.ObjectInputStream(new java.io.ByteArrayInputStream(buffer))
in.readObject().asInstanceOf[A]
}
+ def check[A, B](x: A, y: B) {
+ println("x = " + x)
+ println("y = " + y)
+ println("x equals y: " + (x equals y) + ", y equals x: " + (y equals x))
+ println()
+ }
}
+import Serialize._
//############################################################################
// Test classes in package "scala"
@@ -50,92 +46,140 @@ object Test1_scala {
}
import WeekDay._, BigDecimal._, RoundingMode._
- val x0 = List(1, 2, 3)
- val x1 = Nil
- val x2 = None
- val x3 = Array(1, 2, 3)
- val x4 = { x: Int => 2 * x }
- val x5 = 'hello
- val x6 = ("BannerLimit", 12345)
- val x7 = BigDecimal.RoundingMode
- val x8 = WeekDay
- val x9 = UP
- val x10 = Monday
-
+ // in alphabetic order
try {
- val y0: List[Int] = Serialize.read(Serialize.write(x0))
- val y1: List[Nothing] = Serialize.read(Serialize.write(x1))
- val y2: Option[Nothing] = Serialize.read(Serialize.write(x2))
- val y3: Array[Int] = Serialize.read(Serialize.write(x3))
- val y4: Function[Int, Int] = Serialize.read(Serialize.write(x4))
- val y5: Symbol = Serialize.read(Serialize.write(x5))
- val y6: (String, Int) = Serialize.read(Serialize.write(x6))
- val y7: RoundingMode.type = Serialize.read(Serialize.write(x7))
- val y8: WeekDay.type = Serialize.read(Serialize.write(x8))
- val y9: RoundingMode = Serialize.read(Serialize.write(x9))
- val y10: WeekDay = Serialize.read(Serialize.write(x10))
-
- println("x0 = " + x0)
- println("y0 = " + y0)
- println("x0 eq y0: " + (x0 eq y0) + " - y0 eq x0: " + (y0 eq x0))
- println("x0 equals y0: " + (x0 equals y0) + " - y0 equals x0: " + (y0 equals x0))
- println()
- println("x1 = " + x1)
- println("y1 = " + y1)
- println("x1 eq y1: " + (x1 eq y1) + " - y1 eq x1: " + (y1 eq x1))
- println()
- println("x2 = " + x2)
- println("y2 = " + y2)
- println("x2 eq y2: " + (x2 eq y2) + " - y2 eq x2: " + (y2 eq x2))
- println()
- println("x3 = " + arrayToString(x3))
- println("y3 = " + arrayToString(y3))
- println("arrayEquals(x3, y3): " + arrayEquals(x3, y3))
+ // Array
+ val a1 = Array(1, 2, 3)
+ val _a1: Array[Int] = read(write(a1))
+ println("a1 = " + arrayToString(a1))
+ println("_a1 = " + arrayToString(_a1))
+ println("arrayEquals(a1, _a1): " + arrayEquals(a1, _a1))
println()
- println("x4 = <na>")
- println("y4 = <na>")
- println("x4(2): " + x4(2) + " - y4(2): " + y4(2))
- println()
- println("x5 = " + x5)
- println("y5 = " + y5)
- println("x5 eq y5: " + (x5 eq y5) + " - y5 eq x5: " + (y5 eq x5))
- println("x5 equals y5: " + (x5 equals y5) + " - y5 equals x5: " + (y5 equals x5))
+
+ // Cell
+ val c1 = new Cell('a')
+ val _c1: Cell[Char] = read(write(c1))
+ println("c1 = " + c1)
+ println("_c1 = " + _c1)
+ println("c1 eq _c1: " + (c1 eq _c1) + ", _c1 eq c1: " + (_c1 eq c1))
+ println("c1 equals _c1: " + (c1 equals _c1) + ", _c1 equals c1: " + (_c1 equals c1))
println()
- println("x6 = " + x6)
- println("y6 = " + y6)
- println("x6 eq y6: " + (x6 eq y6) + " - y6 eq x6: " + (y6 eq x6))
- println("x6 equals y6: " + (x6 equals y6) + " - y6 equals x6: " + (y6 equals x6))
+
+ // Either
+ val e1 = Left(1)
+ val _e1: Either[Int, String] = read(write(e1))
+ println("e1 = " + e1)
+ println("_e1 = " + _e1)
+ println("e1 eq _e1: " + (e1 eq _e1) + ", _e1 eq e1: " + (_e1 eq e1))
+ println("e1 equals _e1: " + (e1 equals _e1) + ", _e1 equals e1: " + (_e1 equals e1))
println()
+
+ // Enumeration
+ val x7 = BigDecimal.RoundingMode
+ val y7: RoundingMode.type = read(write(x7))
println("x7 = " + x7)
println("y7 = " + y7)
- println("x7 eq y7: " + (x7 eq y7) + " - y7 eq x7: " + (y7 eq x7))
- println("x7 equals y7: " + (x7 equals y7) + " - y7 equals x7: " + (y7 equals x7))
+ println("x7 eq y7: " + (x7 eq y7) + ", y7 eq x7: " + (y7 eq x7))
+ println("x7 equals y7: " + (x7 equals y7) + ", y7 equals x7: " + (y7 equals x7))
println()
+
+ val x8 = WeekDay
+ val y8: WeekDay.type = read(write(x8))
println("x8 = " + x8)
println("y8 = " + y8)
- println("x8 eq y8: " + (x8 eq y8) + " - y8 eq x8: " + (y8 eq x8))
- println("x8 equals y8: " + (x8 equals y8) + " - y8 equals x8: " + (y8 equals x8))
+ println("x8 eq y8: " + (x8 eq y8) + ", y8 eq x8: " + (y8 eq x8))
+ println("x8 equals y8: " + (x8 equals y8) + ", y8 equals x8: " + (y8 equals x8))
println()
+
+ val x9 = UP
+ val y9: RoundingMode = read(write(x9))
println("x9 = " + x9)
println("y9 = " + y9)
- println("x9 eq y9: " + (x9 eq y9) + " - y9 eq x9: " + (y9 eq x9))
- println("x9 equals y9: " + (x9 equals y9) + " - y9 equals x9: " + (y9 equals x9))
+ println("x9 eq y9: " + (x9 eq y9) + ", y9 eq x9: " + (y9 eq x9))
+ println("x9 equals y9: " + (x9 equals y9) + ", y9 equals x9: " + (y9 equals x9))
println()
+
+ val x10 = Monday
+ val y10: WeekDay = read(write(x10))
println("x10 = " + x10)
println("y10 = " + y10)
- println("x10 eq y10: " + (x10 eq y10) + " - y10 eq x10: " + (y10 eq x10))
- println("x10 equals y10: " + (x10 equals y10) + " - y10 equals x10: " + (y10 equals x10))
+ println("x10 eq y10: " + (x10 eq y10) + ", y10 eq x10: " + (y10 eq x10))
+ println("x10 equals y10: " + (x10 equals y10) + ", y10 equals x10: " + (y10 equals x10))
println()
- println("x9 eq x10: " + (x9 eq x10) + " - x10 eq x9: " + (x10 eq x9))
- println("x9 equals x10: " + (x9 equals x10) + " - x10 equals x9: " + (x10 equals x9))
- println("x9 eq y10: " + (x9 eq y10) + " - y10 eq x9: " + (y10 eq x9))
- println("x9 equals y10: " + (x9 equals y10) + " - y10 equals x9: " + (y10 equals x9))
+
+ println("x9 eq x10: " + (x9 eq x10) + ", x10 eq x9: " + (x10 eq x9))
+ println("x9 equals x10: " + (x9 equals x10) + ", x10 equals x9: " + (x10 equals x9))
+ println("x9 eq y10: " + (x9 eq y10) + ", y10 eq x9: " + (y10 eq x9))
+ println("x9 equals y10: " + (x9 equals y10) + ", y10 equals x9: " + (y10 equals x9))
+ println()
+
+ // Function
+ val f1 = { x: Int => 2 * x }
+ val _f1: Function[Int, Int] = read(write(f1))
+ println("f1 = <na>")
+ println("_f1 = <na>")
+ println("f1(2): " + f1(2) + ", _f1(2): " + _f1(2))
+ println()
+
+ // List
+ val xs0 = List(1, 2, 3)
+ val _xs0: List[Int] = read(write(xs0))
+ println("xs0 = " + xs0)
+ println("_xs0 = " + _xs0)
+ println("xs0 eq _xs0: " + (xs0 eq _xs0) + ", _xs0 eq xs0: " + (_xs0 eq xs0))
+ println("xs0 equals _xs0: " + (xs0 equals _xs0) + ", _xs0 equals xs0: " + (_xs0 equals xs0))
+ println()
+
+ val xs1 = Nil
+ val _xs1: List[Nothing] = read(write(xs1))
+ println("xs1 = " + xs1)
+ println("_xs1 = " + _xs1)
+ println("xs1 eq _xs1: " + (xs1 eq _xs1) + ", _xs1 eq xs1: " + (_xs1 eq xs1))
+ println()
+
+ // Option
+ val o1 = None
+ val _o1: Option[Nothing] = read(write(o1))
+ println("o1 = " + o1)
+ println("_o1 = " + _o1)
+ println("o1 eq _o1: " + (o1 eq _o1) + ", _o1 eq o1: " + (_o1 eq o1))
+ println()
+
+ val o2 = Some(1)
+ val _o2: Option[Int] = read(write(o2))
+ println("o2 = " + o2)
+ println("_o2 = " + _o2)
+ println("o2 eq _o2: " + (o2 eq _o2) + ", _o2 eq o2: " + (_o2 eq o2))
+ println("o2 equals _o2: " + (o2 equals _o2) + ", _o2 equals o2: " + (_o2 equals o2))
+ println()
+/*
+ // Responder
+ val r1 = Responder.constant("xyz")
+ val _r1: Responder[String] = read(write(r1))
+ check(r1, _r1)
+*/
+ // Symbol
+ val s1 = 'hello
+ val _s1: Symbol = read(write(s1))
+ println("s1 = " + s1)
+ println("_s1 = " + _s1)
+ println("s1 eq _s1: " + (s1 eq _s1) + ", _s1 eq s1: " + (_s1 eq s1))
+ println("s1 equals _s1: " + (s1 equals _s1) + ", _s1 equals s1: " + (_s1 equals s1))
+ println()
+
+ // Tuple
+ val t1 = ("BannerLimit", 12345)
+ val _t1: (String, Int) = read(write(t1))
+ println("t1 = " + t1)
+ println("_t1 = " + _t1)
+ println("t1 eq _t1: " + (t1 eq _t1) + ", _t1 eq t1: " + (_t1 eq t1))
+ println("t1 equals _t1: " + (t1 equals _t1) + ", _t1 equals t1: " + (_t1 equals t1))
println()
}
catch {
case e: Exception =>
- e.printStackTrace()
println("Error in Test1_scala: " + e)
+ throw e
}
}
@@ -145,50 +189,97 @@ object Test1_scala {
@serializable
object Test2_immutable {
import scala.collection.immutable.{
- BitSet, ListMap, ListSet, Queue, Stack, TreeSet, TreeMap}
-
- val x1 = List(
- Pair("buffers", 20),
- Pair("layers", 2),
- Pair("title", 3)
- )
-
- val x2 = new ListMap[String, Int] + ("buffers" -> 20, "layers" -> 2, "title" -> 3)
-
- val x3 = {
- val bs = new collection.mutable.BitSet()
- bs += 2; bs += 3
- bs.toImmutable
- }
-
- val x4 = new ListSet[Int]() + 3 + 5
-
- val x5 = Queue("a", "b", "c")
-
- val x6 = new Stack().push("a", "b", "c")
-
- val x7 = new TreeMap[Int, String] + (42 -> "FortyTwo")
-
- val x8 = new TreeSet[Int]() + 2 + 0
+ BitSet, HashMap, HashSet, ListMap, ListSet, Queue, Range, SortedMap,
+ SortedSet, Stack, Stream, TreeMap, TreeSet, Vector}
+ // in alphabetic order
try {
- val y1: List[Pair[String, Int]] = Serialize.read(Serialize.write(x1))
- val y2: ListMap[String, Int] = Serialize.read(Serialize.write(x2))
- val y3: BitSet = Serialize.read(Serialize.write(x3))
- val y4: ListSet[Int] = Serialize.read(Serialize.write(x4))
- val y5: Queue[String] = Serialize.read(Serialize.write(x5))
- val y6: Stack[String] = Serialize.read(Serialize.write(x6))
- val y7: TreeMap[Int, String] = Serialize.read(Serialize.write(x7))
- val y8: TreeSet[Int] = Serialize.read(Serialize.write(x8))
-
- EqualityTest.check(x1, y1)
- EqualityTest.check(x2, y2)
- EqualityTest.check(x3, y3)
- EqualityTest.check(x4, y4)
- EqualityTest.check(x5, y5)
- EqualityTest.check(x6, y6)
- EqualityTest.check(x7, y7)
- EqualityTest.check(x8, y8)
+ // BitSet
+ val bs1 = BitSet.empty + 1 + 2
+ val _bs1: BitSet = read(write(bs1))
+ check(bs1, _bs1)
+
+ val bs2 = {
+ val bs = new collection.mutable.BitSet()
+ bs += 2; bs += 3
+ bs.toImmutable
+ }
+ val _bs2: BitSet = read(write(bs2))
+ check(bs2, _bs2)
+
+ // HashMap
+ val hm1 = new HashMap[Int, String] + (1 -> "A", 2 -> "B", 3 -> "C")
+ val _hm1: HashMap[Int, String] = read(write(hm1))
+ check(hm1, _hm1)
+
+ // HashSet
+ val hs1 = new HashSet[Int] + 1 + 2
+ val _hs1: HashSet[Int] = read(write(hs1))
+ check(hs1, _hs1)
+
+ // List
+ val xs1 = List(("buffers", 20), ("layers", 2), ("title", 3))
+ val _xs1: List[(String, Int)] = read(write(xs1))
+ check(xs1, _xs1)
+
+ // ListMap
+ val lm1 = new ListMap[String, Int] + ("buffers" -> 20, "layers" -> 2, "title" -> 3)
+ val _lm1: ListMap[String, Int] = read(write(lm1))
+ check(lm1, _lm1)
+
+ // ListSet
+ val ls1 = new ListSet[Int] + 3 + 5
+ val _ls1: ListSet[Int] = read(write(ls1))
+ check(ls1, _ls1)
+
+ // Queue
+ val q1 = Queue("a", "b", "c")
+ val _q1: Queue[String] = read(write(q1))
+ check(q1, _q1)
+
+ // Range
+ val r1 = 0 until 10
+ val _r1: Range = read(write(r1))
+ check(r1, _r1)
+
+ val r2 = Range.Long(0L, 10L, 1)
+ val _r2: r2.type = read(write(r2))
+ check(r2, _r2)
+
+ // SortedMap
+ val sm1 = SortedMap.empty[Int, String] + (2 -> "B", 3 -> "C", 1 -> "A")
+ val _sm1: SortedMap[Int, String] = read(write(sm1))
+ check(sm1, _sm1)
+
+ // SortedSet
+ val ss1 = SortedSet.empty[Int] + 2 + 3 + 1
+ val _ss1: SortedSet[Int] = read(write(ss1))
+ check(ss1, _ss1)
+
+ // Stack
+ val s1 = new Stack().push("a", "b", "c")
+ val _s1: Stack[String] = read(write(s1))
+ check(s1, _s1)
+
+ // Stream
+ val st1 = Stream.range(0, 10)
+ val _st1: Stream[Int] = read(write(st1))
+ check(st1, _st1)
+
+ // TreeMap
+ val tm1 = new TreeMap[Int, String] + (42 -> "FortyTwo")
+ val _tm1: TreeMap[Int, String] = read(write(tm1))
+ check(tm1, _tm1)
+
+ // TreeSet
+ val ts1 = new TreeSet[Int]() + 2 + 0
+ val _ts1: TreeSet[Int] = read(write(ts1))
+ check(ts1, _ts1)
+
+ // Vector
+ val v1 = Vector('a, 'b, 'c)
+ val _v1: Vector[Symbol] = read(write(v1))
+ check(v1, _v1)
}
catch {
case e: Exception =>
@@ -201,65 +292,110 @@ object Test2_immutable {
// Test classes in package "scala.collection.mutable"
object Test3_mutable {
+ import scala.reflect.ClassManifest
import scala.collection.mutable.{
- ArrayBuffer, BitSet, HashMap, HashSet, History, LinkedList, ListBuffer,
- Publisher, Queue, Stack}
-
- val x0 = new ArrayBuffer[String]
- x0 ++= List("one", "two")
-
- val x2 = new BitSet()
- x2 += 0
- x2 += 8
- x2 += 9
-
- val x1 = new HashMap[String, Int]
- x1 ++= Test2_immutable.x1
-
- val x3 = new HashSet[String]
- x3 ++= Test2_immutable.x1.map(p => p._1)
-
- @serializable
- class Feed extends Publisher[String, Feed]
-
- val x8 = new History[String, Feed]
-
- val x4 = new LinkedList[Int](2, null)
- x4.append(new LinkedList(3, null))
-
- val x7 = new ListBuffer[String]
- x7 ++= List("white", "black")
-
- val x5 = new Queue[Int]
- x5 ++= Test2_immutable.x1.map(p => p._2)
-
- val x6 = new Stack[Int]
- x6 ++= x5
+ ArrayBuffer, ArrayBuilder, ArrayStack, BitSet, DoubleLinkedList,
+ HashMap, HashSet, History, LinkedList, ListBuffer, Publisher, Queue,
+ Stack, StringBuilder, WrappedArray}
+ // in alphabetic order
try {
- val y0: ArrayBuffer[String] = Serialize.read(Serialize.write(x0))
- val y1: HashMap[String, Int] = Serialize.read(Serialize.write(x1))
- val y2: BitSet = Serialize.read(Serialize.write(x2))
- val y3: HashSet[String] = Serialize.read(Serialize.write(x3))
-// val y4: LinkedList[Int] = Serialize.read(Serialize.write(x4))
- val y5: Queue[Int] = Serialize.read(Serialize.write(x5))
- val y6: Stack[Int] = Serialize.read(Serialize.write(x6))
- val y7: ListBuffer[String] = Serialize.read(Serialize.write(x7))
- val y8: History[String, Feed] = Serialize.read(Serialize.write(x8))
-
- EqualityTest.check(x0, y0)
- EqualityTest.check(x1, y1)
- EqualityTest.check(x2, y2)
- EqualityTest.check(x3, y3)
- //EqualityTest.check(x4, y4) //todo
- EqualityTest.check(x5, y5)
- EqualityTest.check(x6, y6)
- EqualityTest.check(x7, y7)
- //EqualityTest.check(x8, y8) //todo
+ // ArrayBuffer
+ val ab1 = new ArrayBuffer[String]
+ ab1 ++= List("one", "two")
+ val _ab1: ArrayBuffer[String] = read(write(ab1))
+ check(ab1, _ab1)
+
+ // ArrayBuilder
+ val abu1 = ArrayBuilder.make[Long]
+ val _abu1: ArrayBuilder[ClassManifest[Long]] = read(write(abu1))
+ check(abu1, _abu1)
+
+ val abu2 = ArrayBuilder.make[Float]
+ val _abu2: ArrayBuilder[ClassManifest[Float]] = read(write(abu2))
+ check(abu2, _abu2)
+
+ // ArrayStack
+ val as1 = new ArrayStack[Int]
+ as1 ++= List(20, 2, 3).iterator
+ val _as1: ArrayStack[Int] = read(write(as1))
+ check(as1, _as1)
+
+ // BitSet
+ val bs1 = new BitSet()
+ bs1 += 0
+ bs1 += 8
+ bs1 += 9
+ val _bs1: BitSet = read(write(bs1))
+ check(bs1, _bs1)
+/*
+ // DoubleLinkedList
+ val dl1 = new DoubleLinkedList[Int](2, null)
+ dl1.append(new DoubleLinkedList(3, null))
+ val _dl1: DoubleLinkedList[Int] = read(write(dl1))
+ check(dl1, _dl1)
+*/
+ // HashMap
+ val hm1 = new HashMap[String, Int]
+ hm1 ++= List(("A", 1), ("B", 2), ("C", 3)).iterator
+ val _hm1: HashMap[String, Int] = read(write(hm1))
+ check(hm1, _hm1)
+
+ // HashSet
+ val hs1 = new HashSet[String]
+ hs1 ++= List("layers", "buffers", "title").iterator
+ val _hs1: HashSet[String] = read(write(hs1))
+ check(hs1, _hs1)
+
+ // History
+ @serializable
+ class Feed extends Publisher[String]
+
+ val h1 = new History[String, Int]
+ val _h1: History[String, Int] = read(write(h1))
+ check(h1, _h1)
+/*
+ // LinkedList
+ val ll1 = new LinkedList[Int](2, null)
+ ll1.append(new LinkedList(3, null))
+ val _ll1: LinkedList[Int] = read(write(ll1))
+ check(ll1, _ll1)
+*/
+ // ListBuffer
+ val lb1 = new ListBuffer[String]
+ lb1 ++= List("white", "black")
+ val _lb1: ListBuffer[String] = read(write(lb1))
+ check(lb1, _lb1)
+
+ // Publisher
+
+ // Queue
+ val q1 = new Queue[Int]
+ q1 ++= List(20, 2, 3).iterator
+ val _q1: Queue[Int] = read(write(q1))
+ check(q1, _q1)
+
+ // Stack
+ val s1 = new Stack[Int]
+ s1 pushAll q1
+ val _s1: Stack[Int] = read(write(s1))
+ check(s1, _s1)
+
+ // StringBuilder
+ val sb1 = new StringBuilder
+ sb1 append "abc"
+ val _sb1: StringBuilder = read(write(sb1))
+ check(sb1, _sb1)
+
+ // WrappedArray
+ val wa1 = WrappedArray.make(Array(1, 2, 3))
+ val _wa1: WrappedArray[Int] = read(write(wa1))
+ check(wa1, _wa1)
}
catch {
case e: Exception =>
println("Error in Test3_mutable: " + e)
+ throw e
}
}
@@ -267,15 +403,31 @@ object Test3_mutable {
// Test classes in package "scala.xml"
object Test4_xml {
- import scala.xml.Elem
-
- val x1 = <html><title>title</title><body></body></html>;
+ import scala.xml.{Attribute, Document, Elem, Null, PrefixedAttribute, Text}
case class Person(name: String, age: Int)
- class AddressBook(a: Person*) {
- private val people: List[Person] = a.toList
- def toXHTML =
+ try {
+ // Attribute
+ val a1 = new PrefixedAttribute("xml", "src", Text("hello"), Null)
+ val _a1: Attribute = read(write(a1))
+ check(a1, _a1)
+
+ // Document
+ val d1 = new Document
+ d1.docElem = <title></title>
+ d1.encoding = Some("UTF-8")
+ val _d1: Document = read(write(d1))
+ check(d1, _d1)
+
+ // Elem
+ val e1 = <html><title>title</title><body></body></html>;
+ val _e1: Elem = read(write(e1))
+ check(e1, _e1)
+
+ class AddressBook(a: Person*) {
+ private val people: List[Person] = a.toList
+ def toXHTML =
<table cellpadding="2" cellspacing="0">
<tr>
<th>Last Name</th>
@@ -287,30 +439,26 @@ object Test4_xml {
<td> { p.age.toString() } </td>
</tr> }
</table>;
- }
+ }
- val people = new AddressBook(
- Person("Tom", 20),
- Person("Bob", 22),
- Person("James", 19))
+ val people = new AddressBook(
+ Person("Tom", 20),
+ Person("Bob", 22),
+ Person("James", 19))
- val x2 =
- <html>
+ val e2 =
+ <html>
<body>
- { people.toXHTML }
+ { people.toXHTML }
</body>
- </html>;
-
- try {
- val y1: scala.xml.Elem = Serialize.read(Serialize.write(x1))
- val y2: scala.xml.Elem = Serialize.read(Serialize.write(x2))
-
- EqualityTest.check(x1, y1)
- EqualityTest.check(x2, y2)
+ </html>;
+ val _e2: Elem = read(write(e2))
+ check(e2, _e2)
}
catch {
case e: Exception =>
println("Error in Test4_xml: " + e)
+ throw e
}
}
@@ -339,11 +487,11 @@ object Test5 {
val x2 = bob
try {
- val y1: Person = Serialize.read(Serialize.write(x1))
- val y2: Employee = Serialize.read(Serialize.write(x2))
+ val y1: Person = read(write(x1))
+ val y2: Employee = read(write(x2))
- EqualityTest.check(x1, y1)
- EqualityTest.check(x2, y2)
+ check(x1, y1)
+ check(x2, y2)
}
catch {
case e: Exception =>
@@ -369,13 +517,13 @@ object Test6 {
val x3 = paul
try {
- val y1: Person = Serialize.read(Serialize.write(x1))
- val y2: Employee = Serialize.read(Serialize.write(x2))
- val y3: Person = Serialize.read(Serialize.write(x3))
+ val y1: Person = read(write(x1))
+ val y2: Employee = read(write(x2))
+ val y3: Person = read(write(x3))
- EqualityTest.check(x1, y1)
- EqualityTest.check(x2, y2)
- EqualityTest.check(x3, y3)
+ check(x1, y1)
+ check(x2, y2)
+ check(x3, y3)
}
catch {
case e: Exception =>
diff --git a/test/files/jvm/stringbuilder.scala b/test/files/jvm/stringbuilder.scala
index bacd13c715..c86a8a7713 100644
--- a/test/files/jvm/stringbuilder.scala
+++ b/test/files/jvm/stringbuilder.scala
@@ -53,7 +53,7 @@ object Test2 extends TestCase("append") with Assert {
val j1 = new java.lang.StringBuilder // Java 1.5+
val s1 = new StringBuilder
j1 append "###" append Array('0', '1', '2') append "xyz".subSequence(0, 3)
- s1 append "###" append Array('0', '1', '2') append List('x', 'y', 'z')
+ s1 append "###" appendAll Array('0', '1', '2') appendAll List('x', 'y', 'z')
assertEquals("s1.toString equals j1.toString", true, s1.toString equals j1.toString)
}
}
@@ -72,7 +72,7 @@ object Test3 extends TestCase("insert") with Assert {
val j1 = new java.lang.StringBuilder // Java 1.5+
val s1 = new StringBuilder
j1 insert (0, "###") insert (0, Array('0', '1', '2')) insert (0, "xyz".subSequence(0, 3))
- s1 insert (0, "###") insert (0, Array('0', '1', '2')) insert (0, List('x', 'y', 'z'))
+ s1 insert (0, "###") insertAll (0, Array('0', '1', '2')) insertAll (0, List('x', 'y', 'z'))
//println("j1="+j1+", s1="+s1)//debug
assertEquals("s1.toString equals j1.toString", true, s1.toString equals j1.toString)
diff --git a/test/files/jvm/t2585.check b/test/files/jvm/t2585.check
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/test/files/jvm/t2585.check
diff --git a/test/files/jvm/t2585/Test.java b/test/files/jvm/t2585/Test.java
new file mode 100644
index 0000000000..51fe20d81e
--- /dev/null
+++ b/test/files/jvm/t2585/Test.java
@@ -0,0 +1,16 @@
+class J { S s ; }
+
+public class Test {
+ public static void main(String[] args) {
+ final X x = new X();
+ final OuterImpl o = new OuterImpl(x);
+
+ final OuterImpl.Inner i1 = o.newInner();
+ i1.getT().getI().getT().getI(); // <--- Error: "The method getI() is undefined for the type Object"
+
+ final Outer<X>.Inner i2 = o.newInner();
+ i2.getT().getI().getT().getI(); // <--- Error: "The method getI() is undefined for the type Object"
+
+ HashMap<String, String> map = new HashMap<String, String>();
+ }
+} \ No newline at end of file
diff --git a/test/files/jvm/t2585/genericouter.scala b/test/files/jvm/t2585/genericouter.scala
new file mode 100644
index 0000000000..e06aa8101e
--- /dev/null
+++ b/test/files/jvm/t2585/genericouter.scala
@@ -0,0 +1,25 @@
+case class S(n:Int)
+
+trait TraversableLike[+A, +Repr] {
+ class WithFilter(p: A => Boolean)
+ def withFilter(p: A => Boolean): WithFilter = new WithFilter(p)
+}
+
+class HashMap[K, +V] extends TraversableLike[(K, V), HashMap[K, V]]
+
+class Outer[T](val t: T) {
+ class Inner {
+ def getT : T = t
+ }
+}
+
+class OuterImpl(x: X) extends Outer[X](x) {
+ def newInner = new Inner
+}
+
+class X {
+ def getI : Outer[X]#Inner = {
+ val oImpl = new OuterImpl(this)
+ new oImpl.Inner
+ }
+} \ No newline at end of file
diff --git a/test/files/neg/bug563.scala b/test/files/neg/bug563.scala
index d8e026e656..624b83b1fa 100644
--- a/test/files/neg/bug563.scala
+++ b/test/files/neg/bug563.scala
@@ -1,7 +1,7 @@
object Test {
def map[A,R](a : List[A], f : A => R) : List[R] = a.map(f);
- def split(sn : Iterable[List[Cell[int]]]) : unit =
+ def split(sn : Iterable[List[Cell[Int]]]) : Unit =
for (n <- sn)
map(n,ptr => new Cell(ptr.elem));
}
diff --git a/test/files/neg/bug700.check b/test/files/neg/bug700.check
index 33a67e5094..5c2854069c 100644
--- a/test/files/neg/bug700.check
+++ b/test/files/neg/bug700.check
@@ -1,4 +1,4 @@
bug700.scala:6: error: method foobar in trait Foo is accessed from super. It may not be abstract unless it is overridden by a member declared `abstract' and `override'
- def foobar: unit = super.foobar
+ def foobar: Unit = super.foobar
^
one error found
diff --git a/test/files/neg/bug700.scala b/test/files/neg/bug700.scala
index 7477bb54f6..b08c8b5529 100644
--- a/test/files/neg/bug700.scala
+++ b/test/files/neg/bug700.scala
@@ -3,7 +3,7 @@ trait Foo {
}
trait Bar extends Foo {
- def foobar: unit = super.foobar
+ def foobar: Unit = super.foobar
}
// the following definition breaks the compiler
diff --git a/test/files/neg/bug875.check b/test/files/neg/bug875.check
index d547c8d69c..16a982241e 100644
--- a/test/files/neg/bug875.check
+++ b/test/files/neg/bug875.check
@@ -4,8 +4,8 @@ bug875.scala:3: error: no `: _*' annotation allowed here
^
bug875.scala:6: error: no `: _*' annotation allowed here
(such annotations are only allowed in arguments to *-parameters)
- mkList(xs: _*)
- ^
+ mkList1(xs: _*)
+ ^
bug875.scala:15: error: no `: _*' annotation allowed here
(such annotations are only allowed in arguments to *-parameters)
f(true, 1, xs: _*)
diff --git a/test/files/neg/bug875.scala b/test/files/neg/bug875.scala
index 9c579b0166..38affd5a43 100644
--- a/test/files/neg/bug875.scala
+++ b/test/files/neg/bug875.scala
@@ -1,9 +1,9 @@
object Test extends Application {
val xs = List(4, 5, 6)
val ys = List(1, 2, 3, xs: _*)
- def mkList(x: Int) = List(x)
- def mkList(x: Boolean) = List(x)
- mkList(xs: _*)
+ def mkList1(x: Int) = List(x)
+ def mkList2(x: Boolean) = List(x)
+ mkList1(xs: _*)
def f(x: Int*) = List(x: _*)
diff --git a/test/files/neg/bug910.check b/test/files/neg/bug910.check
index fe4ad4fca4..2bc2d986fa 100644
--- a/test/files/neg/bug910.check
+++ b/test/files/neg/bug910.check
@@ -1,6 +1,6 @@
bug910.scala:4: error: type mismatch;
found : Seq[Char]
- required: scala.Seq[int]
- val y: Seq[int] = rest
+ required: scala.Seq[Int]
+ val y: Seq[Int] = rest
^
one error found
diff --git a/test/files/neg/bug910.scala b/test/files/neg/bug910.scala
index 2f28ea408f..540ee7001d 100644
--- a/test/files/neg/bug910.scala
+++ b/test/files/neg/bug910.scala
@@ -1,7 +1,7 @@
object RegExpTest1 extends Application {
def co(x: Seq[Char]) = x match {
case Seq('s','c','a','l','a', rest @ _*) =>
- val y: Seq[int] = rest
+ val y: Seq[Int] = rest
y
}
}
diff --git a/test/files/neg/constrs.check b/test/files/neg/constrs.check
index 3524709785..4f4a12bc13 100644
--- a/test/files/neg/constrs.check
+++ b/test/files/neg/constrs.check
@@ -8,7 +8,7 @@ constrs.scala:10: error: called constructor's definition must precede calling co
def this() = this("abc")
^
constrs.scala:12: error: called constructor's definition must precede calling constructor's definition
- def this(x: boolean) = this(x)
+ def this(x: Boolean) = this(x)
^
constrs.scala:16: error: type mismatch;
found : Int(1)
diff --git a/test/files/neg/constrs.scala b/test/files/neg/constrs.scala
index 969f593a2d..016df098f0 100644
--- a/test/files/neg/constrs.scala
+++ b/test/files/neg/constrs.scala
@@ -9,7 +9,7 @@ object test {
class Foo(x: Int) {
def this() = this("abc")
def this(x: String) = this(1)
- def this(x: boolean) = this(x)
+ def this(x: Boolean) = this(x)
}
class Bar[a](x: a) {
diff --git a/test/files/neg/gadts1.scala b/test/files/neg/gadts1.scala
index 67aef4f2d9..07200ff7aa 100644
--- a/test/files/neg/gadts1.scala
+++ b/test/files/neg/gadts1.scala
@@ -1,8 +1,8 @@
object Test{
abstract class Number
-case class Int(n: int) extends Number
-case class Double(d: double) extends Number
+case class Int(n: scala.Int) extends Number
+case class Double(d: scala.Double) extends Number
trait Term[+a]
case class Cell[a](var x: a) extends Term[a]
@@ -10,7 +10,7 @@ case class NumTerm(val n: Number) extends Term[Number]
class IntTerm(n: Int) extends NumTerm(n) with Term[Int]
-def f[a](t:Term[a], c:Cell[a]): unit =
+def f[a](t:Term[a], c:Cell[a]): Unit =
t match {
case NumTerm(n) => c.x = Double(1.0)
}
@@ -18,7 +18,7 @@ def f[a](t:Term[a], c:Cell[a]): unit =
val x:Term[Number] = NumTerm(Int(5))
-def main(args: Array[String]): unit = {
+def main(args: Array[String]): Unit = {
val cell = Cell[Int](Int(6))
Console.println(cell)
f[Int](new IntTerm(Int(5)), cell)
diff --git a/test/files/neg/implicits.check b/test/files/neg/implicits.check
index d94e1f27f2..337560f423 100644
--- a/test/files/neg/implicits.check
+++ b/test/files/neg/implicits.check
@@ -3,7 +3,7 @@ implicits.scala:21: error: type mismatch;
required: ?{val +: ?}
Note that implicit conversions are not applicable because they are ambiguous:
both method any2plus in object Sub of type (x: Any)Sub.Plus
- and method pos2int in object Super of type (p: Pos)int
+ and method pos2int in object Super of type (p: Pos)Int
are possible conversion functions from Pos to ?{val +: ?}
f(p+1)
^
diff --git a/test/files/neg/implicits.scala b/test/files/neg/implicits.scala
index be85029660..846591e22d 100644
--- a/test/files/neg/implicits.scala
+++ b/test/files/neg/implicits.scala
@@ -3,7 +3,7 @@ class Pos
class Super
object Super {
- implicit def pos2int(p: Pos): int = 0
+ implicit def pos2int(p: Pos): Int = 0
}
object Sub extends Super {
@@ -17,7 +17,7 @@ object Test {
import Super._
import Sub._
val p = new Pos
- def f(x: int): int = x
+ def f(x: Int): Int = x
f(p+1)
}
diff --git a/test/files/neg/overload.check b/test/files/neg/overload.check
index 0faa97adb1..abfabaf3f2 100644
--- a/test/files/neg/overload.check
+++ b/test/files/neg/overload.check
@@ -1,6 +1,6 @@
overload.scala:10: error: ambiguous reference to overloaded definition,
both method f in class D of type (x: Any)Unit
-and method f in class C of type (x: int)Unit
+and method f in class C of type (x: Int)Unit
match argument types (Int)
(new D).f(1)
^
diff --git a/test/files/neg/overload.scala b/test/files/neg/overload.scala
index 311ea3874b..6ad911e90e 100644
--- a/test/files/neg/overload.scala
+++ b/test/files/neg/overload.scala
@@ -1,5 +1,5 @@
class C {
- def f(x: int) {}
+ def f(x: Int) {}
}
class D extends C {
diff --git a/test/files/neg/t0218.scala b/test/files/neg/t0218.scala
index 282e85e814..319be82a7a 100644
--- a/test/files/neg/t0218.scala
+++ b/test/files/neg/t0218.scala
@@ -6,7 +6,7 @@ trait APQ {
type PP = P
- def pq(numQueens: int, numRows: int) : List[Placement] = {
+ def pq(numQueens: Int, numRows: Int) : List[Placement] = {
List(new PP)
}
}
diff --git a/test/files/neg/t1422.check b/test/files/neg/t1422.check
new file mode 100644
index 0000000000..5931fcb049
--- /dev/null
+++ b/test/files/neg/t1422.check
@@ -0,0 +1,4 @@
+t1422.scala:1: error: private[this] not allowed for case class parameters
+case class A(private[this] val foo:String)
+ ^
+one error found
diff --git a/test/files/neg/t1422.scala b/test/files/neg/t1422.scala
new file mode 100644
index 0000000000..751f05a764
--- /dev/null
+++ b/test/files/neg/t1422.scala
@@ -0,0 +1 @@
+case class A(private[this] val foo:String)
diff --git a/test/files/neg/t1477.check b/test/files/neg/t1477.check
new file mode 100644
index 0000000000..e497637857
--- /dev/null
+++ b/test/files/neg/t1477.check
@@ -0,0 +1,5 @@
+t1477.scala:13: error: overriding type V in trait C with bounds >: Nothing <: Middle.this.D;
+ type V is a volatile type; cannot override a type with non-volatile upper bound
+ type V <: (D with U)
+ ^
+one error found
diff --git a/test/files/neg/t1477.scala b/test/files/neg/t1477.scala
new file mode 100644
index 0000000000..0cc0cd5f7a
--- /dev/null
+++ b/test/files/neg/t1477.scala
@@ -0,0 +1,25 @@
+object Test extends Application {
+ trait A
+ trait B extends A
+
+ trait C {
+ type U
+ trait D { type T >: B <: A }
+ type V <: D
+ val y: V#T = new B { }
+ }
+
+ trait Middle extends C {
+ type V <: (D with U)
+ }
+
+ class D extends Middle {
+ trait E
+ trait F { type T = E }
+ type U = F
+ def frob(arg : E) : E = arg
+ frob(y)
+ }
+
+ new D
+}
diff --git a/test/files/neg/t2179.check b/test/files/neg/t2179.check
new file mode 100644
index 0000000000..e454e117b5
--- /dev/null
+++ b/test/files/neg/t2179.check
@@ -0,0 +1,9 @@
+t2179.scala:2: error: inferred type arguments [scala.collection.immutable.Seq[Double]{def companion: scala.collection.generic.GenericCompanion[scala.collection.immutable.Seq[Any]{def sameElements[B >: Any](that: Iterable[B]): Boolean}]; def reverse: scala.collection.immutable.Seq[Double]{def companion: scala.collection.generic.GenericCompanion[scala.collection.immutable.Seq[Any]]}; protected def thisCollection: Seq[Double]{def companion: scala.collection.generic.GenericCompanion[Seq[Any]]}; def dropRight(n: Int): scala.collection.immutable.Seq[Double]{def companion: scala.collection.generic.GenericCompanion[scala.collection.immutable.Seq[Any]]}; def takeRight(n: Int): scala.collection.immutable.Seq[Double]{def companion: scala.collection.generic.GenericCompanion[scala.collection.immutable.Seq[Any]]}; def slice(start: Int,end: Int): scala.collection.immutable.Seq[Double]{def companion: scala.collection.generic.GenericCompanion[scala.collection.immutable.Seq[Any]]}; def take(n: Int): scala.collection.immutable.Seq[Double]{def companion: scala.collection.generic.GenericCompanion[scala.collection.immutable.Seq[Any]]}; def drop(n: Int): scala.collection.immutable.Seq[Double]{def companion: scala.collection.generic.GenericCompanion[scala.collection.immutable.Seq[Any]]}}] do not conform to method reduceLeft's type parameter bounds [B >: List[Double]]
+ (Nil:List[List[Double]]).reduceLeft((_: Any, _: Any) => Nil.indices.map(_ => 0d))
+ ^
+t2179.scala:2: error: type mismatch;
+ found : (Any, Any) => scala.collection.immutable.IndexedSeq[Double]
+ required: (scala.collection.immutable.Seq[Double]{def companion: scala.collection.generic.GenericCompanion[scala.collection.immutable.Seq[Any]{def sameElements[B >: Any](that: Iterable[B]): Boolean}]; def reverse: scala.collection.immutable.Seq[Double]{def companion: scala.collection.generic.GenericCompanion[scala.collection.immutable.Seq[Any]]}; protected def thisCollection: Seq[Double]{def companion: scala.collection.generic.GenericCompanion[Seq[Any]]}; def dropRight(n: Int): scala.collection.immutable.Seq[Double]{def companion: scala.collection.generic.GenericCompanion[scala.collection.immutable.Seq[Any]]}; def takeRight(n: Int): scala.collection.immutable.Seq[Double]{def companion: scala.collection.generic.GenericCompanion[scala.collection.immutable.Seq[Any]]}; def slice(start: Int,end: Int): scala.collection.immutable.Seq[Double]{def companion: scala.collection.generic.GenericCompanion[scala.collection.immutable.Seq[Any]]}; def take(n: Int): scala.collection.immutable.Seq[Double]{def companion: scala.collection.generic.GenericCompanion[scala.collection.immutable.Seq[Any]]}; def drop(n: Int): scala.collection.immutable.Seq[Double]{def companion: scala.collection.generic.GenericCompanion[scala.collection.immutable.Seq[Any]]}}, List[Double]) => scala.collection.immutable.Seq[Double]{def companion: scala.collection.generic.GenericCompanion[scala.collection.immutable.Seq[Any]{def sameElements[B >: Any](that: Iterable[B]): Boolean}]; def reverse: scala.collection.immutable.Seq[Double]{def companion: scala.collection.generic.GenericCompanion[scala.collection.immutable.Seq[Any]]}; protected def thisCollection: Seq[Double]{def companion: scala.collection.generic.GenericCompanion[Seq[Any]]}; def dropRight(n: Int): scala.collection.immutable.Seq[Double]{def companion: scala.collection.generic.GenericCompanion[scala.collection.immutable.Seq[Any]]}; def takeRight(n: Int): scala.collection.immutable.Seq[Double]{def companion: scala.collection.generic.GenericCompanion[scala.collection.immutable.Seq[Any]]}; def slice(start: Int,end: Int): scala.collection.immutable.Seq[Double]{def companion: scala.collection.generic.GenericCompanion[scala.collection.immutable.Seq[Any]]}; def take(n: Int): scala.collection.immutable.Seq[Double]{def companion: scala.collection.generic.GenericCompanion[scala.collection.immutable.Seq[Any]]}; def drop(n: Int): scala.collection.immutable.Seq[Double]{def companion: scala.collection.generic.GenericCompanion[scala.collection.immutable.Seq[Any]]}}
+ (Nil:List[List[Double]]).reduceLeft((_: Any, _: Any) => Nil.indices.map(_ => 0d))
+ ^
+two errors found
diff --git a/test/files/neg/t2179.scala b/test/files/neg/t2179.scala
new file mode 100755
index 0000000000..89e22b6e2a
--- /dev/null
+++ b/test/files/neg/t2179.scala
@@ -0,0 +1,3 @@
+object Test {
+ (Nil:List[List[Double]]).reduceLeft((_: Any, _: Any) => Nil.indices.map(_ => 0d))
+}
diff --git a/test/files/neg/t2641.check b/test/files/neg/t2641.check
new file mode 100644
index 0000000000..70123bfc58
--- /dev/null
+++ b/test/files/neg/t2641.check
@@ -0,0 +1,39 @@
+t2641.scala:19: error: illegal cyclic reference involving trait ManagedSeq
+ with TraversableViewLike[A, ManagedSeqStrict[A], ManagedSeq[A]]
+ ^
+t2641.scala:17: error: illegal inheritance;
+ self-type ManagedSeq does not conform to ManagedSeqStrict[A]'s selftype ManagedSeqStrict[A]
+ extends ManagedSeqStrict[A]
+ ^
+t2641.scala:18: error: illegal inheritance;
+ self-type ManagedSeq does not conform to scala.collection.TraversableView[A,ManagedSeqStrict[A]]'s selftype scala.collection.TraversableView[A,ManagedSeqStrict[A]]
+ with TraversableView[A, ManagedSeqStrict[A]]
+ ^
+t2641.scala:19: error: illegal inheritance;
+ self-type ManagedSeq does not conform to scala.collection.TraversableViewLike[A,ManagedSeqStrict[A],<error>]'s selftype scala.collection.TraversableViewLike[A,Coll,This]
+ with TraversableViewLike[A, ManagedSeqStrict[A], ManagedSeq[A]]
+ ^
+t2641.scala:17: error: illegal inheritance;
+ self-type ManagedSeq does not conform to ScalaObject's selftype ScalaObject
+ extends ManagedSeqStrict[A]
+ ^
+t2641.scala:25: error: something is wrong (wrong class file?): trait ManagedSeq with type parameters [A,Coll] gets applied to arguments [], phase = typer
+ trait Transformed[+B] extends ManagedSeq[B, Coll] with super.Transformed[B]
+ ^
+t2641.scala:27: error: something is wrong (wrong class file?): trait ManagedSeq with type parameters [A,Coll] gets applied to arguments [], phase = namer
+ trait Sliced extends Transformed[A] with super.Sliced {
+ ^
+t2641.scala:27: error: illegal inheritance; superclass Any
+ is not a subclass of the superclass ManagedSeqStrict
+ of the mixin trait Transformed
+ trait Sliced extends Transformed[A] with super.Sliced {
+ ^
+t2641.scala:27: error: illegal inheritance; superclass Any
+ is not a subclass of the superclass Object
+ of the mixin trait Sliced
+ trait Sliced extends Transformed[A] with super.Sliced {
+ ^
+t2641.scala:28: error: value managedIterator is not a member of ManagedSeq
+ override def managedIterator = self.managedIterator slice (from, until)
+ ^
+10 errors found
diff --git a/test/files/neg/t2641.scala b/test/files/neg/t2641.scala
new file mode 100644
index 0000000000..5529035f79
--- /dev/null
+++ b/test/files/neg/t2641.scala
@@ -0,0 +1,31 @@
+import scala.collection._
+import scala.collection.generic._
+import scala.collection.mutable.Builder
+
+
+abstract class ManagedSeqStrict[+A]
+ extends Traversable[A]
+ with GenericTraversableTemplate[A, ManagedSeqStrict]
+{
+ override def companion: GenericCompanion[ManagedSeqStrict] = null
+
+ override def foreach[U](f: A => U): Unit =
+ null
+}
+
+trait ManagedSeq[+A, +Coll]
+ extends ManagedSeqStrict[A]
+ with TraversableView[A, ManagedSeqStrict[A]]
+ with TraversableViewLike[A, ManagedSeqStrict[A], ManagedSeq[A]]
+{ self =>
+
+ override def underlying = throw new Exception("no underlying")
+
+ //trait Transformed[+B] extends ManagedSeq[B] with super.Transformed[B]
+ trait Transformed[+B] extends ManagedSeq[B, Coll] with super.Transformed[B]
+
+ trait Sliced extends Transformed[A] with super.Sliced {
+ override def managedIterator = self.managedIterator slice (from, until)
+ }
+
+}
diff --git a/test/files/neg/t771.check b/test/files/neg/t771.check
new file mode 100644
index 0000000000..c0d1e002f8
--- /dev/null
+++ b/test/files/neg/t771.check
@@ -0,0 +1,4 @@
+t771.scala:4: error: trait Iterator is abstract; cannot be instantiated
+ def c[A](it:java.util.Iterator[A]) = new scala.Iterator[A]
+ ^
+one error found
diff --git a/test/files/neg/t771.scala b/test/files/neg/t771.scala
new file mode 100755
index 0000000000..26bf441648
--- /dev/null
+++ b/test/files/neg/t771.scala
@@ -0,0 +1,5 @@
+class Foo {
+ def a = c(b)
+ def b[List[AnyRef]] = new java.util.Iterator[List[Object]] { }
+ def c[A](it:java.util.Iterator[A]) = new scala.Iterator[A]
+}
diff --git a/test/files/neg/viewtest.scala b/test/files/neg/viewtest.scala
index 778e672d91..5e7d624d23 100644
--- a/test/files/neg/viewtest.scala
+++ b/test/files/neg/viewtest.scala
@@ -12,13 +12,13 @@ trait Ordered[+a] {
*/
def compareTo [b >: a <% Ordered[b]](that: b): Int
- def < [b >: a <% Ordered[b]](that: b): boolean = (this compareTo that) < 0
+ def < [b >: a <% Ordered[b]](that: b): Boolean = (this compareTo that) < 0
- def > [b >: a <% Ordered[b]](that: b): boolean = (this compareTo that) > 0
+ def > [b >: a <% Ordered[b]](that: b): Boolean = (this compareTo that) > 0
- def <= [b >: a <% Ordered[b]](that: b): boolean = (this compareTo that) <= 0
+ def <= [b >: a <% Ordered[b]](that: b): Boolean = (this compareTo that) <= 0
- def >= [b >: a <% Ordered[b]](that: b): boolean = (this compareTo that) >= 0
+ def >= [b >: a <% Ordered[b]](that: b): Boolean = (this compareTo that) >= 0
}
@@ -30,9 +30,9 @@ object O {
case _ => -(y compareTo x)
}
}
- implicit def view2(x: char): Ordered[char] = new Ordered[char] {
- def compareTo [b >: char <% Ordered[b]](y: b): Int = y match {
- case y1: char => x - y1
+ implicit def view2(x: Char): Ordered[Char] = new Ordered[Char] {
+ def compareTo [b >: Char <% Ordered[b]](y: b): Int = y match {
+ case y1: Char => x - y1
case _ => -(y compareTo x)
}
}
@@ -106,7 +106,7 @@ object Test {
Console.println(t.elements)
}
{
- var t: Tree[List[char]] = Empty
+ var t: Tree[List[Char]] = Empty
for (s <- args) {
t = t insert toCharList(s)
}
diff --git a/test/files/pos/bug0091.scala b/test/files/pos/bug0091.scala
index 54c821b41c..d491b7cfb9 100644
--- a/test/files/pos/bug0091.scala
+++ b/test/files/pos/bug0091.scala
@@ -1,6 +1,6 @@
class Bug {
def main(args: Array[String]) = {
var msg: String = null; // no bug if "null" instead of "_"
- val f: PartialFunction[Any, unit] = { case 42 => msg = "coucou" };
+ val f: PartialFunction[Any, Unit] = { case 42 => msg = "coucou" };
}
}
diff --git a/test/files/pos/bug1075.scala b/test/files/pos/bug1075.scala
index 936ef72272..0f518b24db 100644
--- a/test/files/pos/bug1075.scala
+++ b/test/files/pos/bug1075.scala
@@ -5,7 +5,7 @@ class Directory(var dir_ : String)
}
dir_ = dir_.replaceAll("/{2,}", "/")
- def this(serialized : Array[byte]) = {
+ def this(serialized : Array[Byte]) = {
this(new String(serialized, "UTF-8"))
}
diff --git a/test/files/pos/bug287.scala b/test/files/pos/bug287.scala
index 81a01951b2..8e5e8831c1 100644
--- a/test/files/pos/bug287.scala
+++ b/test/files/pos/bug287.scala
@@ -1,7 +1,7 @@
object testBuf {
class mystream extends java.io.BufferedOutputStream(new java.io.FileOutputStream("/dev/null")) {
def w( x:String ):Unit = {
- val foo = new Array[byte](2);
+ val foo = new Array[Byte](2);
// write( byte[] ) is defined in FilterOutputStream, the superclass of BufferedOutputStream
super.write( foo ); // error
diff --git a/test/files/pos/collections.scala b/test/files/pos/collections.scala
index 61a25528c7..23b23d016e 100644
--- a/test/files/pos/collections.scala
+++ b/test/files/pos/collections.scala
@@ -2,7 +2,7 @@ package mixins;
import scala.collection.mutable._;
-class Collections extends HashSet[Int] with ObservableSet[Int,Collections] {
+class Collections extends HashSet[Int] with ObservableSet[Int] {
override def +=(elem: Int): this.type = super.+=(elem);
override def -=(elem: Int): this.type = super.-=(elem);
override def clear: Unit = super.clear;
diff --git a/test/files/pos/depexists.scala b/test/files/pos/depexists.scala
new file mode 100644
index 0000000000..d539c844c1
--- /dev/null
+++ b/test/files/pos/depexists.scala
@@ -0,0 +1,5 @@
+object depexists {
+
+ val c: Cell[(a, b)] forSome { type a <: Number; type b <: (a, a) } = null
+ val d = c
+}
diff --git a/test/files/pos/implicits.scala b/test/files/pos/implicits.scala
index aeb6591507..4979835e21 100644
--- a/test/files/pos/implicits.scala
+++ b/test/files/pos/implicits.scala
@@ -1,3 +1,17 @@
+// #1435
+object t1435 {
+ implicit def a(s:String):String = error("")
+ implicit def a(i:Int):String = error("")
+ implicit def b(i:Int):String = error("")
+}
+
+class C1435 {
+ val v:String = {
+ import t1435.a
+ 2
+ }
+}
+
// #1579
object Test1579 {
class Column
@@ -36,3 +50,8 @@ object Test2188 {
val x: java.util.List[String] = List("foo")
}
+
+object TestNumericWidening {
+ val y = 1
+ val x: java.lang.Long = y
+}
diff --git a/test/files/pos/nested2.scala b/test/files/pos/nested2.scala
index 302688a0ef..421ea6facf 100644
--- a/test/files/pos/nested2.scala
+++ b/test/files/pos/nested2.scala
@@ -5,5 +5,5 @@ class C[A] {
object Test {
val x = new C[String]
- val y: C[String]#D[int] = new x.D[int]
+ val y: C[String]#D[Int] = new x.D[Int]
}
diff --git a/test/files/pos/switchUnbox.scala b/test/files/pos/switchUnbox.scala
index a97bff5521..4f5467de29 100644
--- a/test/files/pos/switchUnbox.scala
+++ b/test/files/pos/switchUnbox.scala
@@ -2,7 +2,7 @@
// that contains -Xsqueeze:on
//
object Foo {
- var xyz: (int, String) = (1, "abc")
+ var xyz: (Int, String) = (1, "abc")
xyz._1 match {
case 1 => Console.println("OK")
case 2 => Console.println("OK")
diff --git a/test/files/pos/t1164.scala b/test/files/pos/t1164.scala
index 3acda88ba9..b238bf54d9 100644
--- a/test/files/pos/t1164.scala
+++ b/test/files/pos/t1164.scala
@@ -15,7 +15,7 @@ object test {
// Try the same thing as above but use function as arguemnt to Bar
// constructor
- type FunIntToA [a] = (int) => a
+ type FunIntToA [a] = (Int) => a
class Bar[a] (var f: FunIntToA[a])
object Bar {
diff --git a/test/files/pos/t1226.scala b/test/files/pos/t1226.scala
new file mode 100644
index 0000000000..0af21cbb61
--- /dev/null
+++ b/test/files/pos/t1226.scala
@@ -0,0 +1,8 @@
+package graphs;
+
+abstract class Graph (private[graphs] val mappings : Any){
+}
+
+class Nodes (mappings : Any) extends Graph(mappings) {
+ mappings.toString;
+}
diff --git a/test/files/pos/t1236.scala b/test/files/pos/t1236.scala
new file mode 100644
index 0000000000..5e221ce411
--- /dev/null
+++ b/test/files/pos/t1236.scala
@@ -0,0 +1,14 @@
+trait Empty[E[_]] {
+ def e[A]: E[A]
+}
+
+object T {
+ val ListEmpty = new Empty[List] {
+ def e[A] = Nil
+ }
+
+ def foo[F[_]](q:(String,String)) = "hello"
+ def foo[F[_]](e: Empty[F]) = "world"
+
+ val x = foo[List](ListEmpty)
+} \ No newline at end of file
diff --git a/test/files/pos/t1422.scala b/test/files/pos/t1422.scala
new file mode 100644
index 0000000000..658f5c730d
--- /dev/null
+++ b/test/files/pos/t1422.scala
@@ -0,0 +1,2 @@
+case class A(private val foo:String)
+case class B(protected[this] val foo:String)
diff --git a/test/files/pos/t1459/AbstractBase.java b/test/files/pos/t1459/AbstractBase.java
new file mode 100755
index 0000000000..492419416c
--- /dev/null
+++ b/test/files/pos/t1459/AbstractBase.java
@@ -0,0 +1,5 @@
+package base;
+
+public abstract class AbstractBase {
+ public abstract void doStuff(String... params); // !!! was Object..
+} \ No newline at end of file
diff --git a/test/files/pos/t1459/App.scala b/test/files/pos/t1459/App.scala
new file mode 100755
index 0000000000..651b285b17
--- /dev/null
+++ b/test/files/pos/t1459/App.scala
@@ -0,0 +1,18 @@
+package foo
+import base._
+
+object App extends Application {
+ class Concrete extends AbstractBase {
+ override def doStuff(params:java.lang.String*): Unit = println("doStuff invoked")
+ }
+
+ val impl = new Concrete
+
+ //succeeds
+ impl.doStuff(null)
+
+ val caller = new Caller
+
+ // fails with AbstractMethodError
+ caller.callDoStuff(impl)
+}
diff --git a/test/files/pos/t1459/Caller.java b/test/files/pos/t1459/Caller.java
new file mode 100755
index 0000000000..4ae51d8c57
--- /dev/null
+++ b/test/files/pos/t1459/Caller.java
@@ -0,0 +1,7 @@
+package base;
+
+public class Caller {
+ public void callDoStuff(AbstractBase impl) {
+ impl.doStuff("abc"); // was new Object());
+ }
+} \ No newline at end of file
diff --git a/test/pending/neg/t1545.scala b/test/files/pos/t1545.scala
index d7c0245725..d7c0245725 100755
--- a/test/pending/neg/t1545.scala
+++ b/test/files/pos/t1545.scala
diff --git a/test/files/pos/t2484.scala b/test/files/pos/t2484.scala
new file mode 100755
index 0000000000..6990c46099
--- /dev/null
+++ b/test/files/pos/t2484.scala
@@ -0,0 +1,17 @@
+class Admin extends javax.swing.JApplet {
+ val jScrollPane = new javax.swing.JScrollPane (null, 0, 0)
+ def bug2484: Unit = {
+ scala.concurrent.ops.spawn {jScrollPane.synchronized {
+ def someFunction () = {}
+ //scala.concurrent.ops.spawn {someFunction ()}
+ jScrollPane.addComponentListener (new java.awt.event.ComponentAdapter {override def componentShown (e: java.awt.event.ComponentEvent) = {
+ someFunction (); jScrollPane.removeComponentListener (this)}})
+ }}
+ }
+}
+// t2630.scala
+object Test {
+ def meh(xs: List[Any]) {
+ xs map { x => (new AnyRef {}) }
+ }
+}
diff --git a/test/files/pos/t2635.scala b/test/files/pos/t2635.scala
new file mode 100755
index 0000000000..7cd5531356
--- /dev/null
+++ b/test/files/pos/t2635.scala
@@ -0,0 +1,16 @@
+abstract class Base
+
+object Test
+{
+ def run(c: Class[_ <: Base]): Unit = {
+ }
+
+ def main(args: Array[String]): Unit =
+ {
+ val sc: Option[Class[_ <: Base]] = Some(classOf[Base])
+ sc match {
+ case Some(c) => run(c)
+ case None =>
+ }
+ }
+}
diff --git a/test/files/pos/t2664.scala b/test/files/pos/t2664.scala
new file mode 100644
index 0000000000..7b667d0106
--- /dev/null
+++ b/test/files/pos/t2664.scala
@@ -0,0 +1,9 @@
+package pkg1 {
+ class C {
+ private[pkg1] def foo: Int = 1
+ }
+
+ trait T extends C {
+ private[pkg1] abstract override def foo = super.foo + 1
+ }
+}
diff --git a/test/files/pos/t2665.scala b/test/files/pos/t2665.scala
new file mode 100644
index 0000000000..3163e31326
--- /dev/null
+++ b/test/files/pos/t2665.scala
@@ -0,0 +1,3 @@
+object Test {
+ val x: Unit = Array("")
+} \ No newline at end of file
diff --git a/test/files/pos/t2667.scala b/test/files/pos/t2667.scala
new file mode 100644
index 0000000000..b214cc7f37
--- /dev/null
+++ b/test/files/pos/t2667.scala
@@ -0,0 +1,6 @@
+object A {
+ def foo(x: Int, y: Int*): Int = 45
+ def foo[T](x: T*): Int = 55
+
+ val x: Unit = foo(23, 23f)
+} \ No newline at end of file
diff --git a/test/files/pos/t2669.scala b/test/files/pos/t2669.scala
new file mode 100644
index 0000000000..72e931178c
--- /dev/null
+++ b/test/files/pos/t2669.scala
@@ -0,0 +1,28 @@
+// #2629, #2639, #2669
+object Test2669 {
+
+ def test[T](l: java.util.ArrayList[_ <: T]) = 1
+ test(new java.util.ArrayList[String]())
+
+}
+
+import java.util.ArrayList
+
+object Test2629 {
+ def main(args: Array[String]): Unit = {
+ val l = new ArrayList[String](1)
+ val m = new ArrayList(l)
+
+ println(l.size)
+ println(m.size)
+ }
+}
+
+
+import java.util.Vector
+
+// scalac cannot detect lack of type params, but then throws AssertionError later:
+class TVector2639 {
+ val b = new Vector // this line passed without error detected
+ val a = new Vector(1) // this line caused throwing AssertionError when scalac
+}
diff --git a/test/files/pos/t2698.scala b/test/files/pos/t2698.scala
new file mode 100644
index 0000000000..0e2662de61
--- /dev/null
+++ b/test/files/pos/t2698.scala
@@ -0,0 +1,10 @@
+import scala.collection._
+import scala.util.regexp._
+
+abstract class S2 {
+ val lang: WordExp
+ type __labelT = lang._labelT
+
+ var deltaq: Array[__labelT] = _
+ def delta1 = immutable.Map(deltaq.zipWithIndex: _*)
+}
diff --git a/test/files/run/Course-2002-09.scala b/test/files/run/Course-2002-09.scala
index fac39e0841..384a91efd8 100644
--- a/test/files/run/Course-2002-09.scala
+++ b/test/files/run/Course-2002-09.scala
@@ -81,7 +81,7 @@ class Constant(q: Quantity, v: Double) extends Constraint {
class Probe(name: String, q: Quantity) extends Constraint {
def newValue: Unit = printProbe(q.getValue);
def dropValue: Unit = printProbe(None);
- private def printProbe(v: Option[double]) {
+ private def printProbe(v: Option[Double]) {
val vstr = v match {
case Some(x) => x.toString()
case None => "?"
@@ -103,7 +103,7 @@ class Quantity() {
if (v != v1) error("Error! contradiction: " + v + " and " + v1);
case None =>
informant = setter; value = Some(v);
- for (val c <- constraints; !(c == informant)) {
+ for (c <- constraints; if !(c == informant)) {
c.newValue;
}
}
@@ -112,7 +112,7 @@ class Quantity() {
def forgetValue(retractor: Constraint): Unit = {
if (retractor == informant) {
value = None;
- for (val c <- constraints; !(c == informant)) c.dropValue;
+ for (c <- constraints; if !(c == informant)) c.dropValue;
}
}
def forgetValue: Unit = forgetValue(NoConstraint);
@@ -258,7 +258,7 @@ object M2 {
};
}
- def show(x: Option[int], y: Option[Int], z: Option[int]) = {
+ def show(x: Option[Int], y: Option[Int], z: Option[Int]) = {
Console.print("a = " +set(a,x)+ ", b = " +set(b,y)+ ", c = " +set(c,z));
Console.println(" => " + a.str + " * " + b.str + " = " + c.str);
a.forgetValue; b.forgetValue; c.forgetValue;
diff --git a/test/files/run/SymbolsTest.scala b/test/files/run/SymbolsTest.scala
new file mode 100644
index 0000000000..53caa5e62f
--- /dev/null
+++ b/test/files/run/SymbolsTest.scala
@@ -0,0 +1,283 @@
+
+
+
+
+class Slazz {
+ val s1 = 'myFirstSymbol
+ val s2 = 'mySecondSymbol
+ def s3 = 'myThirdSymbol
+ var s4: Symbol = null
+
+ s4 = 'myFourthSymbol
+}
+
+class Base {
+ val basesymbol = 'symbase
+}
+
+class Sub extends Base {
+ val subsymbol = 'symsub
+}
+
+trait Signs {
+ val ind = 'indication
+ val trace = 'trace
+}
+
+trait Lazy1 {
+ lazy val v1 = "lazy v1"
+ lazy val s1 = 'lazySymbol1
+}
+
+trait Lazy2 {
+ lazy val v2 = "lazy v2"
+ lazy val s2 = 'lazySymbol2
+}
+
+trait Lazy3 {
+ lazy val v3 = "lazy v3"
+ lazy val s3 = 'lazySymbol3
+}
+
+object SingletonOfLazyness {
+ lazy val lazysym = 'lazySymbol
+ lazy val another = 'another
+ lazy val lastone = 'lastone
+}
+
+/*
+ * Tests symbols to see if they work correct.
+ */
+object Test {
+ class Inner {
+ val simba = 'smba
+ var mfs: Symbol = null
+ mfs = Symbol("mfsa")
+ }
+
+ object InnerObject {
+ val o1 = 'aaa
+ val o2 = 'ddd
+ }
+
+ def aSymbol = 'myFirstSymbol
+ val anotherSymbol = 'mySecondSymbol
+
+ def main(args: Array[String]) {
+ testLiterals
+ testForLoop
+ testInnerClasses
+ testInnerObjects
+ testWithHashMaps
+ testLists
+ testAnonymous
+ testNestedObject
+ testInheritance
+ testTraits
+ testLazyTraits
+ testLazyObjects
+ }
+
+ def testLiterals {
+ val scl = new Slazz
+ assert(scl.s1 == aSymbol)
+ assert(scl.s2 == anotherSymbol)
+ assert(scl.s3 == 'myThirdSymbol)
+ assert(scl.s4 == Symbol.apply("myFourthSymbol"))
+ assert(scl.s1 == Symbol("myFirstSymbol"))
+ }
+
+ def testForLoop {
+ for (i <- 0 until 100) List("Val" + i)
+ }
+
+ def testInnerClasses {
+ val innerPower = new Inner
+ assert(innerPower.simba == 'smba)
+ assert(innerPower.mfs == 'mfsa)
+ }
+
+ def testInnerObjects {
+ assert(InnerObject.o1 == 'aaa)
+ assert(InnerObject.o2 == 'ddd)
+ }
+
+ def testWithHashMaps {
+ val map = new collection.mutable.HashMap[Symbol, Symbol]
+ map.put(InnerObject.o1, 'smba)
+ map.put(InnerObject.o2, 'mfsa)
+ map.put(Symbol("WeirdKey" + 1), Symbol("Weird" + "Val" + 1))
+ assert(map('aaa) == 'smba)
+ assert(map('ddd) == 'mfsa)
+ assert(map('WeirdKey1) == Symbol("WeirdVal1"))
+
+ map.clear
+ for (i <- 0 until 100) map.put(Symbol("symKey" + i), Symbol("symVal" + i))
+ assert(map(Symbol("symKey15")) == Symbol("symVal15"))
+ assert(map('symKey22) == 'symVal22)
+ assert(map('symKey73) == 'symVal73)
+ assert(map('symKey56) == 'symVal56)
+ assert(map('symKey91) == 'symVal91)
+ }
+
+ def testLists {
+ var lst: List[Symbol] = Nil
+ for (i <- 0 until 100) lst ::= Symbol("lsym" + (99 - i))
+ assert(lst(0) == 'lsym0)
+ assert(lst(10) == 'lsym10)
+ assert(lst(30) == 'lsym30)
+ assert(lst(40) == 'lsym40)
+ assert(lst(65) == 'lsym65)
+ assert(lst(90) == 'lsym90)
+ }
+
+ def testAnonymous { // TODO complaints classdef can't be found for some reason, runs fine in my case
+ // val anon = () => {
+ // val simba = 'smba
+ // simba
+ // }
+ // val an2 = () => {
+ // object nested {
+ // val m = 'mfsa
+ // }
+ // nested.m
+ // }
+ // val an3 = () => {
+ // object nested {
+ // val f = () => {
+ // 'layered
+ // }
+ // def gets = f()
+ // }
+ // nested.gets
+ // }
+ // val inner = new Inner
+ // assert(anon() == inner.simba)
+ // assert(anon().toString == "'smba")
+ // assert(an2() == 'mfsa)
+ // assert(an3() == Symbol("layered" + ""))
+ }
+
+ def testNestedObject {
+ object nested {
+ def sign = 'sign
+ def insignia = 'insignia
+ }
+ assert(nested.sign == 'sign)
+ assert(nested.insignia == 'insignia)
+ assert(('insignia).toString == "'insignia")
+ }
+
+ def testInheritance {
+ val base = new Base
+ val sub = new Sub
+ assert(base.basesymbol == 'symbase)
+ assert(sub.subsymbol == 'symsub)
+ assert(sub.basesymbol == 'symbase)
+
+ val anon = new Sub {
+ def subsubsymbol = 'symsubsub
+ }
+ assert(anon.subsubsymbol == 'symsubsub)
+ assert(anon.subsymbol == 'symsub)
+ assert(anon.basesymbol == 'symbase)
+
+ object nested extends Sub {
+ def objsymbol = 'symobj
+ }
+ assert(nested.objsymbol == 'symobj)
+ assert(nested.subsymbol == 'symsub)
+ assert(nested.basesymbol == 'symbase)
+ assert(('symbase).toString == "'symbase")
+ }
+
+ def testTraits {
+ val fromTrait = new AnyRef with Signs {
+ def traitsymbol = 'traitSymbol
+ }
+
+ assert(fromTrait.traitsymbol == 'traitSymbol)
+ assert(fromTrait.ind == 'indication)
+ assert(fromTrait.trace == 'trace)
+ assert(('trace).toString == "'trace")
+
+ trait Compl {
+ val s1 = 's1
+ def s2 = 's2
+ object inner {
+ val s3 = 's3
+ val s4 = 's4
+ }
+ }
+
+ val compl = new Sub with Signs with Compl
+ assert(compl.s1 == 's1)
+ assert(compl.s2 == 's2)
+ assert(compl.inner.s3 == 's3)
+ assert(compl.inner.s4 == 's4)
+ assert(compl.ind == 'indication)
+ assert(compl.trace == 'trace)
+ assert(compl.subsymbol == 'symsub)
+ assert(compl.basesymbol == 'symbase)
+
+ object Local extends Signs with Compl {
+ val s5 = 's5
+ def s6 = 's6
+ object inner2 {
+ val s7 = 's7
+ def s8 = 's8
+ }
+ }
+ assert(Local.s5 == 's5)
+ assert(Local.s6 == 's6)
+ assert(Local.inner2.s7 == 's7)
+ assert(Local.inner2.s8 == 's8)
+ assert(Local.inner.s3 == 's3)
+ assert(Local.inner.s4 == 's4)
+ assert(Local.s1 == 's1)
+ assert(Local.s2 == 's2)
+ assert(Local.trace == 'trace)
+ assert(Local.ind == 'indication)
+ assert(('s8).toString == "'s8")
+ }
+
+ def testLazyTraits {
+ val l1 = new AnyRef with Lazy1
+ val l2 = new AnyRef with Lazy2
+ val l3 = new AnyRef with Lazy3
+
+ l1.v1
+ l2.v2
+ l3.v3
+ assert((l1.s1).toString == "'lazySymbol1")
+ assert(l2.s2 == Symbol("lazySymbol" + 2))
+ assert(l3.s3 == 'lazySymbol3)
+ }
+
+ def testLazyObjects {
+ assert(SingletonOfLazyness.lazysym == 'lazySymbol)
+ assert(SingletonOfLazyness.another == Symbol("ano" + "ther"))
+ assert((SingletonOfLazyness.lastone).toString == "'lastone")
+
+ object nested {
+ lazy val sym1 = 'snested1
+ lazy val sym2 = 'snested2
+ }
+
+ assert(nested.sym1 == 'snested1)
+ assert(nested.sym2 == Symbol("snested" + "2"))
+ }
+
+}
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/test/files/run/bug2552.check b/test/files/run/bug2552.check
new file mode 100644
index 0000000000..774e360d13
--- /dev/null
+++ b/test/files/run/bug2552.check
@@ -0,0 +1,49 @@
+p(0)
+0
+p(1)
+1
+p(2)
+2
+p(3)
+3
+p(4)
+4
+p(5)
+5
+p(6)
+6
+p(7)
+7
+p(8)
+8
+p(9)
+9
+p(10)
+p(0)
+true
+true
+0
+p(1)
+true
+1
+p(2)
+false
+p(2)
+false
+p(0)
+true
+true
+0
+p(1)
+p(2)
+2
+p(3)
+p(4)
+4
+p(5)
+p(6)
+6
+p(7)
+p(8)
+8
+p(9)
diff --git a/test/files/run/bug2552.scala b/test/files/run/bug2552.scala
new file mode 100644
index 0000000000..911d98decc
--- /dev/null
+++ b/test/files/run/bug2552.scala
@@ -0,0 +1,34 @@
+object Test extends Application {
+ def testTakeWhile = {
+ val numbers = Iterator.range(0, 50)
+ val zeroTo9 = numbers.takeWhile(x => { println("p(" + x + ")"); x < 10 } )
+
+ zeroTo9.foreach(println _)
+
+ val zeroTo1 = Iterator.range(0, 20).takeWhile(x => { println("p(" + x + ")"); x < 2 } )
+
+ println(zeroTo1.hasNext)
+ println(zeroTo1.hasNext)
+ println(zeroTo1.next)
+ println(zeroTo1.hasNext)
+ println(zeroTo1.next)
+ println(zeroTo1.hasNext)
+ println(zeroTo1.hasNext)
+ }
+
+ def testFilter = {
+ val predicate = (x: Int) => { println("p(" + x + ")"); x % 2 == 0 }
+
+ val evens = Iterator.range(0, 10).filter(predicate)
+
+ println(evens.hasNext)
+ println(evens.hasNext)
+ println(evens.next)
+
+ evens.foreach(println _)
+ }
+
+ testTakeWhile
+ testFilter
+}
+
diff --git a/test/files/run/bug2636.scala b/test/files/run/bug2636.scala
new file mode 100644
index 0000000000..8c49a733fd
--- /dev/null
+++ b/test/files/run/bug2636.scala
@@ -0,0 +1,35 @@
+object Test
+{
+ type Foo = { def update(x: Int, value: String): Unit }
+ type Foo2 = { def update(x: Int, value: String): Int }
+ type Foo3 = { def update(x: Int, value: String): Array[Int] }
+
+ def alen() = {
+ type L1 = { def length: Int }
+ def len(p: L1) = p.length
+ val x: L1 = Array(1,2,3)
+ len(x)
+ }
+
+ type A1 = { def apply(x: Int): String }
+ def arrApply(a: A1, x: Int) = a(x)
+
+ def main(args: Array[String]): Unit = {
+ val arr = new Array[String](3)
+ val p1: Foo = arr
+ def a1 = p1(0) = "b"
+
+ val p2: Foo2 = new { def update(x: Int, value: String) = { p1(1) = "o" ; 1 } }
+ def a2 = p2(0) = "c"
+
+ val p3: Foo3 = new { def update(x: Int, value: String) = { p1(2) = "b" ; Array(1) } }
+ def a3 = p3(10) = "hi mom"
+
+ a1 ; a2 ; a3 ;
+
+ assert(arr.mkString == "bob")
+ assert(alen() == 3)
+ assert(arrApply(arr, 1) == "o")
+ assert(arrApply(new { def apply(x: Int) = "tom" }, -100) == "tom")
+ }
+} \ No newline at end of file
diff --git a/test/files/run/bug627.scala b/test/files/run/bug627.scala
index 6415694ffe..ecaf150741 100644
--- a/test/files/run/bug627.scala
+++ b/test/files/run/bug627.scala
@@ -1,6 +1,6 @@
object Test {
def main(args: Array[String]) {
- val s: Seq[int] = Array(1, 2, 3, 4)
+ val s: Seq[Int] = Array(1, 2, 3, 4)
println(s)
}
}
diff --git a/test/files/run/bugs2087-and-2400.scala b/test/files/run/bugs2087-and-2400.scala
new file mode 100644
index 0000000000..19a5df26e3
--- /dev/null
+++ b/test/files/run/bugs2087-and-2400.scala
@@ -0,0 +1,20 @@
+object Test
+{
+ def negativeCharMaker = new (Short => Char) { def apply(x: Short) = x.toChar }
+ def main(args: Array[String]): Unit = {
+ // throws exception if -100 gets to Character.valueOf
+ val x = negativeCharMaker(-100)
+
+ // chars are unsigned, they should never be equal to negative values
+ assert((-100).toShort != (-100).toChar)
+ assert((-100).toChar != (-100).toShort)
+ assert((-100).toChar != (-100).toByte)
+ assert((-100).toByte != (-100).toChar)
+
+ // BoxesRunTime must agree as well
+ assert(((-100).toShort: Any) != (-100).toChar)
+ assert(((-100).toChar: Any) != (-100).toShort)
+ assert(((-100).toChar: Any) != (-100).toByte)
+ assert(((-100).toByte: Any) != (-100).toChar)
+ }
+}
diff --git a/test/files/run/priorityQueue.scala b/test/files/run/priorityQueue.scala
index 9f453788fc..20f7a3cb44 100644
--- a/test/files/run/priorityQueue.scala
+++ b/test/files/run/priorityQueue.scala
@@ -1,24 +1,346 @@
+
+
+import scala.collection.mutable.PriorityQueue
+
+
+
+
+
+
// populate a priority queue a few different ways and make sure they all seem equal
-object Test extends Application {
- import scala.collection.mutable.PriorityQueue
- import scala.util.Random.nextInt
- val pq1 = new PriorityQueue[String]
- val pq2 = new PriorityQueue[String]
- val pq3 = new PriorityQueue[String]
- val pq4 = new PriorityQueue[String]
+object Test {
+
+ def main(args: Array[String]) {
+ testInsertionsAndEqualities
+ testIntensiveEnqueueDequeue
+ testIndexing
+ testTails
+ testInits
+ testFilters
+ testDrops
+ testUpdates
+ testEquality
+ testMisc
+ testReverse
+ }
+
+ def testInsertionsAndEqualities {
+ import scala.util.Random.nextInt
+ val pq1 = new PriorityQueue[String]
+ val pq2 = new PriorityQueue[String]
+ val pq3 = new PriorityQueue[String]
+ val pq4 = new PriorityQueue[String]
+
+ val strings = (1 to 20).toList map (i => List.fill((Math.abs(nextInt % 20)) + 1)("x").mkString)
+
+ pq1 ++= strings
+ pq2 ++= strings.reverse
+ for (s <- strings) pq3 += s
+ for (s <- strings.reverse) pq4 += s
+
+ val pqs = List(pq1, pq2, pq3, pq4, pq1.clone, pq2.clone)
+
+ for (queue1 <- pqs ; queue2 <- pqs) {
+ assert(queue1 == queue2)
+ assert(queue1.max == queue2.max)
+ }
+
+ assertPriority(pq1)
+ }
+
+ def testIndexing {
+ val pq = new PriorityQueue[Char]
+ "The quick brown fox jumps over the lazy dog".foreach(pq += _)
+
+ // val iter = pq.iterator
+ // while (iter.hasNext) println("`" + iter.next + "`")
+ assert(pq(0) == 'z')
+ assert(pq(1) == 'y')
+ assert(pq(2) == 'x')
+ assert(pq(3) == 'w')
+ assert(pq(4) == 'v')
+ assert(pq(5) == 'u')
+ assert(pq(7) == 't')
+ assert(pq(8) == 's')
+ assert(pq(9) == 'r')
+ assert(pq(10) == 'r')
+
+ pq.clear
+ "abcdefghijklmnopqrstuvwxyz".foreach(pq += _)
+ for (i <- 0 until 26) assert(pq(i) == ('z' - i))
+
+ val intpq = new PriorityQueue[Int]
+ val intlst = new collection.mutable.ArrayBuffer ++ (0 until 100)
+ val random = new util.Random(101)
+ while (intlst.nonEmpty) {
+ val idx = random.nextInt(intlst.size)
+ intpq += intlst(idx)
+ intlst.remove(idx)
+ }
+ for (i <- 0 until 100) assert(intpq(i) == (99 - i))
+ }
+
+ def testTails {
+ val pq = new PriorityQueue[Int]
+ for (i <- 0 until 10) pq += i * 4321 % 200
+
+ assert(pq.size == 10)
+ assert(pq.nonEmpty)
+
+ val tailpq = pq.tail
+ // pq.printstate
+ // tailpq.printstate
+ assert(tailpq.size == 9)
+ assert(tailpq.nonEmpty)
+ assertPriorityDestructive(tailpq)
+ }
+
+ def assertPriorityDestructive[A](pq: PriorityQueue[A])(implicit ord: Ordering[A]) {
+ import ord._
+ var prev: A = null.asInstanceOf[A]
+ while (pq.nonEmpty) {
+ val curr = pq.dequeue
+ if (prev != null) assert(curr <= prev)
+ prev = curr
+ }
+ }
+
+ def assertPriority[A](pq: PriorityQueue[A])(implicit ord: Ordering[A]) {
+ import ord._
+ var prev: A = null.asInstanceOf[A]
+ val iter = pq.iterator
+ while (iter.hasNext) {
+ val curr = iter.next
+ if (prev != null) assert(curr <= prev)
+ prev = curr
+ }
+ }
+
+ def testInits {
+ val pq = new PriorityQueue[Long]
+ for (i <- 0 until 20) pq += (i + 313) * 111 % 300
+
+ assert(pq.size == 20)
+
+ val initpq = pq.init
+ assert(initpq.size == 19)
+ assertPriorityDestructive(initpq)
+ }
+
+ def testFilters {
+ val pq = new PriorityQueue[String]
+ for (i <- 0 until 100) pq += "Some " + (i * 312 % 200)
+
+ val filpq = pq.filter(_.indexOf('0') != -1)
+ assertPriorityDestructive(filpq)
+ }
+
+ def testIntensiveEnqueueDequeue {
+ val pq = new PriorityQueue[Int]
+
+ testIntensive(1000, pq)
+ pq.clear
+ testIntensive(200, pq)
+ }
+
+ def testIntensive(sz: Int, pq: PriorityQueue[Int]) {
+ val lst = new collection.mutable.ArrayBuffer[Int] ++ (0 until sz)
+ val rand = new util.Random(7)
+ while (lst.nonEmpty) {
+ val idx = rand.nextInt(lst.size)
+ pq.enqueue(lst(idx))
+ lst.remove(idx)
+ if (rand.nextDouble < 0.25 && pq.nonEmpty) pq.dequeue
+ assertPriority(pq)
+ }
+ }
+
+ def testDrops {
+ val pq = new PriorityQueue[Int]
+ pq ++= (0 until 100)
+ val droppq = pq.drop(50)
+ assertPriority(droppq)
+
+ pq.clear
+ pq ++= droppq
+ assertPriorityDestructive(droppq)
+ assertPriority(pq)
+ assertPriorityDestructive(pq)
+ }
+
+ def testUpdates {
+ val pq = new PriorityQueue[Int]
+ pq ++= (0 until 36)
+ assertPriority(pq)
+
+ pq(0) = 100
+ assert(pq(0) == 100)
+ assert(pq.dequeue == 100)
+ assertPriority(pq)
+
+ pq.clear
+
+ pq ++= (1 to 100)
+ pq(5) = 200
+ assert(pq(0) == 200)
+ assert(pq(1) == 100)
+ assert(pq(2) == 99)
+ assert(pq(3) == 98)
+ assert(pq(4) == 97)
+ assert(pq(5) == 96)
+ assert(pq(6) == 94)
+ assert(pq(7) == 93)
+ assert(pq(98) == 2)
+ assert(pq(99) == 1)
+ assertPriority(pq)
+
+ pq(99) = 450
+ assert(pq(0) == 450)
+ assert(pq(1) == 200)
+ assert(pq(99) == 2)
+ assertPriority(pq)
+
+ pq(1) = 0
+ assert(pq(1) == 100)
+ assert(pq(99) == 0)
+ assertPriority(pq)
+ assertPriorityDestructive(pq)
+ }
+
+ def testEquality {
+ val pq1 = new PriorityQueue[Int]
+ val pq2 = new PriorityQueue[Int]
+
+ pq1 ++= (0 until 50)
+ var i = 49
+ while (i >= 0) {
+ pq2 += i
+ i -= 1
+ }
+ assert(pq1 == pq2)
+ assertPriority(pq2)
+
+ pq1 += 100
+ assert(pq1 != pq2)
+ pq2 += 100
+ assert(pq1 == pq2)
+ pq2 += 200
+ assert(pq1 != pq2)
+ pq1 += 200
+ assert(pq1 == pq2)
+ assertPriorityDestructive(pq1)
+ assertPriorityDestructive(pq2)
+ }
+
+ def testMisc {
+ val pq = new PriorityQueue[Int]
+ pq ++= (0 until 100)
+ assert(pq.size == 100)
+
+ val (p1, p2) = pq.partition(_ < 50)
+ assertPriorityDestructive(p1)
+ assertPriorityDestructive(p2)
+
+ val spq = pq.slice(25, 75)
+ assertPriorityDestructive(spq)
+
+ pq.clear
+ pq ++= (0 until 10)
+ pq += 5
+ assert(pq.size == 11)
+
+ val ind = pq.lastIndexWhere(_ == 5)
+ assert(ind == 5)
+ assertPriorityDestructive(pq)
+
+ pq.clear
+ pq ++= (0 until 10)
+ assert(pq.lastIndexWhere(_ == 9) == 0)
+ assert(pq.lastIndexOf(8) == 1)
+ assert(pq.lastIndexOf(7) == 2)
+
+ pq += 5
+ pq += 9
+ assert(pq.lastIndexOf(9) == 1)
+ assert(pq.lastIndexWhere(_ % 2 == 1) == 10)
+ assert(pq.lastIndexOf(5) == 6)
+
+ val lst = pq.reverseIterator.toList
+ for (i <- 0 until 5) assert(lst(i) == i)
+ assert(lst(5) == 5)
+ assert(lst(6) == 5)
+ assert(lst(7) == 6)
+ assert(lst(8) == 7)
+ assert(lst(9) == 8)
+ assert(lst(10) == 9)
+ assert(lst(11) == 9)
+
+ pq.clear
+ assert(pq.reverseIterator.toList.isEmpty)
+
+ pq ++= (50 to 75)
+ assert(pq.lastIndexOf(70) == 5)
- val strings = (1 to 20).toList map (i => List.fill((Math.abs(nextInt % 20)) + 1)("x").mkString)
+ pq += 55
+ pq += 70
+ assert(pq.lastIndexOf(70) == 6)
+ assert(pq.lastIndexOf(55) == 22)
+ assert(pq.lastIndexOf(55, 21) == 21)
+ assert(pq.lastIndexWhere(_ > 54) == 22)
+ assert(pq.lastIndexWhere(_ > 54, 21) == 21)
+ assert(pq.lastIndexWhere(_ > 69, 5) == 5)
+ }
+
+ def testReverse {
+ val pq = new PriorityQueue[(Int, Int)]
+ pq ++= (for (i <- 0 until 10) yield (i, i * i % 10))
+
+ assert(pq.reverse.size == pq.reverseIterator.toList.size)
+ assert((pq.reverse zip pq.reverseIterator.toList).forall(p => p._1 == p._2))
+ assert(pq.reverse.sameElements(pq.reverseIterator.toSeq))
+ assert(pq.reverse(0)._1 == pq(9)._1)
+ assert(pq.reverse(1)._1 == pq(8)._1)
+ assert(pq.reverse(4)._1 == pq(5)._1)
+ assert(pq.reverse(9)._1 == pq(0)._1)
- pq1 ++= strings
- pq2 ++= strings.reverse
- for (s <- strings) pq3 += s
- for (s <- strings.reverse) pq4 += s
+ pq += ((7, 7))
+ pq += ((7, 9))
+ pq += ((7, 8))
+ assert(pq.reverse.reverse == pq)
+ assert(pq.reverse.lastIndexWhere(_._2 == 6) == 6)
+ assertPriorityDestructive(pq.reverse.reverse)
- val pqs = List(pq1, pq2, pq3, pq4, pq1.clone, pq2.clone)
+ val iq = new PriorityQueue[Int]
+ iq ++= (0 until 50)
+ assert(iq.reverse == iq.reverseIterator.toSeq)
+ assert(iq.reverse.reverse == iq)
- for (queue1 <- pqs ; queue2 <- pqs) {
- assert(queue1 == queue2)
- assert(queue1.max == queue2.max)
+ iq += 25
+ iq += 40
+ iq += 10
+ assert(iq.reverse == iq.reverseIterator.toList)
+ assert(iq.reverse.reverse == iq)
+ assert(iq.reverse.lastIndexWhere(_ == 10) == 11)
+ assertPriorityDestructive(iq.reverse.reverse)
}
+
}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/test/files/run/randomAccessSeq-apply.scala b/test/files/run/randomAccessSeq-apply.scala
index 863a4d42a2..1f74050bb7 100644
--- a/test/files/run/randomAccessSeq-apply.scala
+++ b/test/files/run/randomAccessSeq-apply.scala
@@ -6,7 +6,7 @@ object Test extends Application {
assert(List(1) == single.toList)
val two = RandomAccessSeq("a", "b")
- assert("a" == two.first)
+ assert("a" == two.head)
assert("b" == two.apply(1))
println("OK")
diff --git a/test/files/run/t1524.scala b/test/files/run/t1524.scala
index ecd90adec7..4f6c65d052 100644
--- a/test/files/run/t1524.scala
+++ b/test/files/run/t1524.scala
@@ -3,5 +3,5 @@ object Test extends Application {
val buf = new scala.collection.mutable.ArrayBuffer[String] { override val initialSize = 0 }
buf += "initial"
buf += "second"
- println(buf.first)
+ println(buf.head)
}
diff --git a/test/files/run/t153.check b/test/files/run/t153.check
index 504fd7fc7f..648a6de7c3 100644
--- a/test/files/run/t153.check
+++ b/test/files/run/t153.check
@@ -1 +1 @@
-Stream(524288, 262144, 131072, 65536, 32768, 16384, 8192, 4096, 2048, 1024, 512, 256, 128, 64, 32, 16, 8, 4, 2, 1)
+Stream(262144, 131072, 65536, 32768, 16384, 8192, 4096, 2048, 1024, 512, 256, 128, 64, 32, 16, 8, 4, 2, 1)
diff --git a/test/files/run/t153.scala b/test/files/run/t153.scala
index c7b3c1c762..359e40407b 100644
--- a/test/files/run/t153.scala
+++ b/test/files/run/t153.scala
@@ -1,5 +1,5 @@
object Test extends Application {
def powers(x: Int) = if ((x&(x-1))==0) Some(x) else None
- val res = (Stream.range(1, 1000000) flatMap powers).reverse
+ val res = (Stream.range(1, 500000) flatMap powers).reverse
println(res take 42 force)
} \ No newline at end of file
diff --git a/test/files/run/t2526.scala b/test/files/run/t2526.scala
new file mode 100644
index 0000000000..5f6d60546a
--- /dev/null
+++ b/test/files/run/t2526.scala
@@ -0,0 +1,54 @@
+/**
+ * Checks that various foreach methods overridden in mutable.HashMap as part of ticket #2526
+ * still work correctly.
+ */
+object Test {
+ import collection._
+
+ def main(args: Array[String]) {
+ val m = new mutable.HashMap[String, String]
+
+ /* Use non hash-based structure for verification */
+ val keys = List("a", "b", "c", "d", "e")
+ val valueSuffix = "value"
+ val values = keys.map(_ + valueSuffix)
+ val entries = keys.zip(values)
+
+ for (k <- keys) m(k) = k + valueSuffix
+
+ assertForeach(keys, m.keySet.iterator)
+ assertForeach(keys, m.keysIterator)
+ assertForeach(keys, m.keySet)
+
+ assertForeach(values, m.valuesIterable.iterator)
+ assertForeach(values, m.valuesIterator)
+ assertForeach(values, m.valuesIterable)
+
+ assertForeach(entries, m)
+ }
+
+ /* Checks foreach of `actual` goes over all the elements in `expected` */
+ private def assertForeach[E](expected: Traversable[E], actual: Iterator[E]): Unit = {
+ val notYetFound = new mutable.ArrayBuffer[E]() ++= expected
+ actual.foreach { e =>
+ assert(notYetFound.contains(e))
+ notYetFound -= e
+ }
+ assert(notYetFound.size == 0, "mutable.HashMap.foreach should have iterated over: " + notYetFound)
+ }
+
+ /*
+ * Checks foreach of `actual` goes over all the elements in `expected`
+ * We duplicate the method above because there is no common inteface between Traverable and
+ * Iterator and we want to avoid converting between collections to ensure that we test what
+ * we mean to test.
+ */
+ private def assertForeach[E](expected: Traversable[E], actual: Traversable[E]): Unit = {
+ val notYetFound = new mutable.ArrayBuffer[E]() ++= expected
+ actual.foreach { e =>
+ assert(notYetFound.contains(e))
+ notYetFound -= e
+ }
+ assert(notYetFound.size == 0, "mutable.HashMap.foreach should have iterated over: " + notYetFound)
+ }
+}
diff --git a/test/files/run/unapply.scala b/test/files/run/unapply.scala
index 72a4b0ac64..acbce58d35 100644
--- a/test/files/run/unapply.scala
+++ b/test/files/run/unapply.scala
@@ -111,7 +111,7 @@ object StreamFoo extends TestCase("unapply for Streams") with Assert {
case Stream.cons(hd, tl) => hd + sum(tl)
}
override def runTest {
- val str: Stream[int] = Stream.fromIterator(List(1,2,3).iterator)
+ val str: Stream[Int] = Stream.fromIterator(List(1,2,3).iterator)
assertEquals(sum(str), 6)
}
}
diff --git a/test/files/run/unapplyArray.scala b/test/files/run/unapplyArray.scala
index bf6582dadf..bf7c9e2300 100644
--- a/test/files/run/unapplyArray.scala
+++ b/test/files/run/unapplyArray.scala
@@ -1,7 +1,7 @@
object Test {
def main(args:Array[String]): Unit = {
val z = Array(1,2,3,4)
- val zs: Seq[int] = z
+ val zs: Seq[Int] = z
val za: Any = z
/*
diff --git a/test/files/pos/bug1357.scala b/test/pending/pos/bug1357.scala
index fcdecb3ad3..fcdecb3ad3 100644
--- a/test/files/pos/bug1357.scala
+++ b/test/pending/pos/bug1357.scala
diff --git a/test/pending/pos/t2610.scala b/test/pending/pos/t2610.scala
new file mode 100644
index 0000000000..8dd4cde66e
--- /dev/null
+++ b/test/pending/pos/t2610.scala
@@ -0,0 +1,17 @@
+package mada; package defects; package tests
+
+package object bbb {
+ def bar = ()
+ aaa.foo // value foo is not a member of package mada.defects.tests.aaa
+}
+
+package object aaa {
+ def foo = ()
+}
+
+/* compiles successfully if placed here..
+package object bbb {
+ def bar = ()
+ aaa.foo // value foo is not a member of package mada.defects.tests.aaa
+}
+*/ \ No newline at end of file
diff --git a/test/pending/pos/t2619.scala b/test/pending/pos/t2619.scala
new file mode 100644
index 0000000000..565bc9572b
--- /dev/null
+++ b/test/pending/pos/t2619.scala
@@ -0,0 +1,80 @@
+abstract class F {
+ final def apply(x: Int): AnyRef = null
+}
+abstract class AbstractModule {
+ def as: List[AnyRef]
+ def ms: List[AbstractModule]
+ def fs: List[F] = Nil
+ def rs(x: Int): List[AnyRef] = fs.map(_(x))
+}
+abstract class ModuleType1 extends AbstractModule {}
+abstract class ModuleType2 extends AbstractModule {}
+
+object ModuleAE extends ModuleType1 {
+ def as = Nil
+ def ms = Nil
+}
+object ModuleAF extends ModuleType2 {
+ def as = Nil
+ def ms = List(ModuleAE)
+}
+object ModuleAG extends ModuleType1 {
+ def as = List("")
+ def ms = Nil
+}
+object ModuleAI extends ModuleType1 {
+ def as = Nil
+ def ms = List(ModuleAE)
+}
+object ModuleAK extends ModuleType2 {
+ def as = Nil
+ def ms = List(ModuleAF)
+}
+object ModuleAL extends ModuleType1 {
+ def as = Nil
+ def ms = List(
+ ModuleAG,
+ ModuleAI
+ )
+}
+object ModuleAM extends ModuleType1 {
+ def as = Nil
+ def ms = List(
+ ModuleAL,
+ ModuleAE
+ ) ::: List(ModuleAK)
+}
+object ModuleBE extends ModuleType1 {
+ def as = Nil
+ def ms = Nil
+}
+object ModuleBF extends ModuleType2 {
+ def as = Nil
+ def ms = List(ModuleBE)
+}
+object ModuleBG extends ModuleType1 {
+ def as = List("")
+ def ms = Nil
+}
+object ModuleBI extends ModuleType1 {
+ def as = Nil
+ def ms = List(ModuleBE)
+}
+object ModuleBK extends ModuleType2 {
+ def as = Nil
+ def ms = List(ModuleBF)
+}
+object ModuleBL extends ModuleType1 {
+ def as = Nil
+ def ms = List(
+ ModuleBG,
+ ModuleBI
+ )
+}
+object ModuleBM extends ModuleType1 {
+ def as = Nil
+ def ms = List(
+ ModuleBL,
+ ModuleBE
+ ) ::: List(ModuleBK)
+} \ No newline at end of file
diff --git a/test/pending/pos/t2625.scala b/test/pending/pos/t2625.scala
new file mode 100644
index 0000000000..94240cb6c6
--- /dev/null
+++ b/test/pending/pos/t2625.scala
@@ -0,0 +1,9 @@
+package t
+
+object T {
+ case class A(x: Int)(x: Int)
+
+ def A(x: Boolean): Int = 34
+
+ A(23)
+} \ No newline at end of file
diff --git a/test/pending/pos/t2635.scala b/test/pending/pos/t2635.scala
new file mode 100644
index 0000000000..378631b23d
--- /dev/null
+++ b/test/pending/pos/t2635.scala
@@ -0,0 +1,16 @@
+abstract class Base
+
+object Test
+{
+ def run(c: Class[_ <: Base]): Unit = {
+ }
+
+ def main(args: Array[String]): Unit =
+ {
+ val sc: Option[Class[_ <: Base]] = Some(classOf[Base])
+ sc match {
+ case Some((c: Class[_ <: Base])) => run(c)
+ case None =>
+ }
+ }
+} \ No newline at end of file
diff --git a/test/pending/pos/t2641.scala b/test/pending/pos/t2641.scala
new file mode 100644
index 0000000000..fec825c4f9
--- /dev/null
+++ b/test/pending/pos/t2641.scala
@@ -0,0 +1,16 @@
+import scala.collection._
+import scala.collection.generic._
+
+abstract class ManagedSeqStrict[+A]
+ extends Traversable[A]
+ with GenericTraversableTemplate[A, ManagedSeqStrict]
+
+trait ManagedSeq[+A, +Coll]
+ extends ManagedSeqStrict[A]
+ with TraversableView[A, ManagedSeqStrict[A]]
+ with TraversableViewLike[A, ManagedSeqStrict[A], ManagedSeq[A/*ERROR: too few type args*/]]
+{ self =>
+ trait Transformed[+B] extends ManagedSeq[B, Coll] with super.Transformed[B]
+
+ trait Sliced extends Transformed[A] with super.Sliced
+} \ No newline at end of file
diff --git a/test/pending/pos/t2660.scala b/test/pending/pos/t2660.scala
new file mode 100644
index 0000000000..b1908b201b
--- /dev/null
+++ b/test/pending/pos/t2660.scala
@@ -0,0 +1,25 @@
+package hoho
+
+class G
+
+class H extends G
+
+class A[T](x: T) {
+
+ def this(y: G, z: T) = {
+ this(z)
+ print(1)
+ }
+
+ def this(z: H, h: T) = {
+ this(h)
+ print(2)
+ }
+}
+
+object T {
+ def main(args: Array[String]) {
+ implicit def g2h(g: G): H = new H
+ new A(new H, 23)
+ }
+} \ No newline at end of file
diff --git a/test/pending/pos/t2691.scala b/test/pending/pos/t2691.scala
new file mode 100644
index 0000000000..ba2e52f1fe
--- /dev/null
+++ b/test/pending/pos/t2691.scala
@@ -0,0 +1,9 @@
+object Breakdown {
+ def unapplySeq(x: Int): Some[List[String]] = Some(List("", "there"))
+}
+object Test {
+ 42 match {
+ case Breakdown("") => // needed to trigger bug
+ case Breakdown("", who) => println ("hello " + who)
+ }
+} \ No newline at end of file
diff --git a/test/postreview.py b/test/postreview.py
new file mode 100644
index 0000000000..2e2518f7ee
--- /dev/null
+++ b/test/postreview.py
@@ -0,0 +1,2540 @@
+#!/usr/bin/env python
+import cookielib
+import difflib
+import getpass
+import marshal
+import mimetools
+import ntpath
+import os
+import re
+import socket
+import stat
+import subprocess
+import sys
+import tempfile
+import urllib
+import urllib2
+from optparse import OptionParser
+from tempfile import mkstemp
+from urlparse import urljoin, urlparse
+
+try:
+ from hashlib import md5
+except ImportError:
+ # Support Python versions before 2.5.
+ from md5 import md5
+
+try:
+ import json
+except ImportError:
+ import simplejson as json
+
+# This specific import is necessary to handle the paths for
+# cygwin enabled machines.
+if (sys.platform.startswith('win')
+ or sys.platform.startswith('cygwin')):
+ import ntpath as cpath
+else:
+ import posixpath as cpath
+
+###
+# Default configuration -- user-settable variables follow.
+###
+
+# The following settings usually aren't needed, but if your Review
+# Board crew has specific preferences and doesn't want to express
+# them with command line switches, set them here and you're done.
+# In particular, setting the REVIEWBOARD_URL variable will allow
+# you to make it easy for people to submit reviews regardless of
+# their SCM setup.
+#
+# Note that in order for this script to work with a reviewboard site
+# that uses local paths to access a repository, the 'Mirror path'
+# in the repository setup page must be set to the remote URL of the
+# repository.
+
+#
+# Reviewboard URL.
+#
+# Set this if you wish to hard-code a default server to always use.
+# It's generally recommended to set this using your SCM repository
+# (for those that support it -- currently only SVN, Git, and Perforce).
+#
+# For example, on SVN:
+# $ svn propset reviewboard:url http://reviewboard.example.com .
+#
+# Or with Git:
+# $ git config reviewboard.url http://reviewboard.example.com
+#
+# On Perforce servers version 2008.1 and above:
+# $ p4 counter reviewboard.url http://reviewboard.example.com
+#
+# Older Perforce servers only allow numerical counters, so embedding
+# the url in the counter name is also supported:
+# $ p4 counter reviewboard.url.http:\|\|reviewboard.example.com 1
+#
+# Note that slashes are not allowed in Perforce counter names, so replace them
+# with pipe characters (they are a safe substitute as they are not used
+# unencoded in URLs). You may need to escape them when issuing the p4 counter
+# command as above.
+#
+# If this is not possible or desired, setting the value here will let
+# you get started quickly.
+#
+# For all other repositories, a .reviewboardrc file present at the top of
+# the checkout will also work. For example:
+#
+# $ cat .reviewboardrc
+# REVIEWBOARD_URL = "http://reviewboard.example.com"
+#
+REVIEWBOARD_URL = None
+
+# Default submission arguments. These are all optional; run this
+# script with --help for descriptions of each argument.
+TARGET_GROUPS = None
+TARGET_PEOPLE = None
+SUBMIT_AS = None
+PUBLISH = False
+OPEN_BROWSER = False
+
+# Debugging. For development...
+DEBUG = False
+
+###
+# End user-settable variables.
+###
+
+
+VERSION = "0.8"
+
+user_config = None
+tempfiles = []
+options = None
+
+
+class APIError(Exception):
+ pass
+
+
+class RepositoryInfo:
+ """
+ A representation of a source code repository.
+ """
+ def __init__(self, path=None, base_path=None, supports_changesets=False,
+ supports_parent_diffs=False):
+ self.path = path
+ self.base_path = base_path
+ self.supports_changesets = supports_changesets
+ self.supports_parent_diffs = supports_parent_diffs
+ debug("repository info: %s" % self)
+
+ def __str__(self):
+ return "Path: %s, Base path: %s, Supports changesets: %s" % \
+ (self.path, self.base_path, self.supports_changesets)
+
+ def set_base_path(self, base_path):
+ if not base_path.startswith('/'):
+ base_path = '/' + base_path
+ debug("changing repository info base_path from %s to %s" % \
+ (self.base_path, base_path))
+ self.base_path = base_path
+
+ def find_server_repository_info(self, server):
+ """
+ Try to find the repository from the list of repositories on the server.
+ For Subversion, this could be a repository with a different URL. For
+ all other clients, this is a noop.
+ """
+ return self
+
+
+class SvnRepositoryInfo(RepositoryInfo):
+ """
+ A representation of a SVN source code repository. This version knows how to
+ find a matching repository on the server even if the URLs differ.
+ """
+ def __init__(self, path, base_path, uuid, supports_parent_diffs=False):
+ RepositoryInfo.__init__(self, path, base_path,
+ supports_parent_diffs=supports_parent_diffs)
+ self.uuid = uuid
+
+ def find_server_repository_info(self, server):
+ """
+ The point of this function is to find a repository on the server that
+ matches self, even if the paths aren't the same. (For example, if self
+ uses an 'http' path, but the server uses a 'file' path for the same
+ repository.) It does this by comparing repository UUIDs. If the
+ repositories use the same path, you'll get back self, otherwise you'll
+ get a different SvnRepositoryInfo object (with a different path).
+ """
+ repositories = server.get_repositories()
+
+ for repository in repositories:
+ if repository['tool'] != 'Subversion':
+ continue
+
+ info = self._get_repository_info(server, repository)
+
+ if not info or self.uuid != info['uuid']:
+ continue
+
+ repos_base_path = info['url'][len(info['root_url']):]
+ relpath = self._get_relative_path(self.base_path, repos_base_path)
+ if relpath:
+ return SvnRepositoryInfo(info['url'], relpath, self.uuid)
+
+ # We didn't find a matching repository on the server. We'll just return
+ # self and hope for the best.
+ return self
+
+ def _get_repository_info(self, server, repository):
+ try:
+ return server.get_repository_info(repository['id'])
+ except APIError, e:
+ # If the server couldn't fetch the repository info, it will return
+ # code 210. Ignore those.
+ # Other more serious errors should still be raised, though.
+ rsp = e.args[0]
+ if rsp['err']['code'] == 210:
+ return None
+
+ raise e
+
+ def _get_relative_path(self, path, root):
+ pathdirs = self._split_on_slash(path)
+ rootdirs = self._split_on_slash(root)
+
+ # root is empty, so anything relative to that is itself
+ if len(rootdirs) == 0:
+ return path
+
+ # If one of the directories doesn't match, then path is not relative
+ # to root.
+ if rootdirs != pathdirs:
+ return None
+
+ # All the directories matched, so the relative path is whatever
+ # directories are left over. The base_path can't be empty, though, so
+ # if the paths are the same, return '/'
+ if len(pathdirs) == len(rootdirs):
+ return '/'
+ else:
+ return '/'.join(pathdirs[len(rootdirs):])
+
+ def _split_on_slash(self, path):
+ # Split on slashes, but ignore multiple slashes and throw away any
+ # trailing slashes.
+ split = re.split('/*', path)
+ if split[-1] == '':
+ split = split[0:-1]
+ return split
+
+
+class ReviewBoardHTTPPasswordMgr(urllib2.HTTPPasswordMgr):
+ """
+ Adds HTTP authentication support for URLs.
+
+ Python 2.4's password manager has a bug in http authentication when the
+ target server uses a non-standard port. This works around that bug on
+ Python 2.4 installs. This also allows post-review to prompt for passwords
+ in a consistent way.
+
+ See: http://bugs.python.org/issue974757
+ """
+ def __init__(self, reviewboard_url):
+ self.passwd = {}
+ self.rb_url = reviewboard_url
+ self.rb_user = None
+ self.rb_pass = None
+
+ def find_user_password(self, realm, uri):
+ if uri.startswith(self.rb_url):
+ if self.rb_user is None or self.rb_pass is None:
+ print "==> HTTP Authentication Required"
+ print 'Enter username and password for "%s" at %s' % \
+ (realm, urlparse(uri)[1])
+ self.rb_user = raw_input('Username: ')
+ self.rb_pass = getpass.getpass('Password: ')
+
+ return self.rb_user, self.rb_pass
+ else:
+ # If this is an auth request for some other domain (since HTTP
+ # handlers are global), fall back to standard password management.
+ return urllib2.HTTPPasswordMgr.find_user_password(self, realm, uri)
+
+
+class ReviewBoardServer(object):
+ """
+ An instance of a Review Board server.
+ """
+ def __init__(self, url, info, cookie_file):
+ self.url = url
+ if self.url[-1] != '/':
+ self.url += '/'
+ self._info = info
+ self._server_info = None
+ self.cookie_file = cookie_file
+ self.cookie_jar = cookielib.MozillaCookieJar(self.cookie_file)
+
+ # Set up the HTTP libraries to support all of the features we need.
+ cookie_handler = urllib2.HTTPCookieProcessor(self.cookie_jar)
+ password_mgr = ReviewBoardHTTPPasswordMgr(self.url)
+ auth_handler = urllib2.HTTPBasicAuthHandler(password_mgr)
+
+ opener = urllib2.build_opener(cookie_handler, auth_handler)
+ opener.addheaders = [('User-agent', 'post-review/' + VERSION)]
+ urllib2.install_opener(opener)
+
+ def login(self, force=False):
+ """
+ Logs in to a Review Board server, prompting the user for login
+ information if needed.
+ """
+ if not force and self.has_valid_cookie():
+ return
+
+ print "==> Review Board Login Required"
+ print "Enter username and password for Review Board at %s" % self.url
+ if options.username:
+ username = options.username
+ elif options.submit_as:
+ username = options.submit_as
+ else:
+ username = raw_input('Username: ')
+
+ if not options.password:
+ password = getpass.getpass('Password: ')
+ else:
+ password = options.password
+
+ debug('Logging in with username "%s"' % username)
+ try:
+ self.api_post('api/json/accounts/login/', {
+ 'username': username,
+ 'password': password,
+ })
+ except APIError, e:
+ rsp, = e.args
+
+ die("Unable to log in: %s (%s)" % (rsp["err"]["msg"],
+ rsp["err"]["code"]))
+
+ debug("Logged in.")
+
+ def has_valid_cookie(self):
+ """
+ Load the user's cookie file and see if they have a valid
+ 'rbsessionid' cookie for the current Review Board server. Returns
+ true if so and false otherwise.
+ """
+ try:
+ parsed_url = urlparse(self.url)
+ host = parsed_url[1]
+ path = parsed_url[2] or '/'
+
+ # Cookie files don't store port numbers, unfortunately, so
+ # get rid of the port number if it's present.
+ host = host.split(":")[0]
+
+ debug("Looking for '%s %s' cookie in %s" % \
+ (host, path, self.cookie_file))
+ self.cookie_jar.load(self.cookie_file, ignore_expires=True)
+
+ try:
+ cookie = self.cookie_jar._cookies[host][path]['rbsessionid']
+
+ if not cookie.is_expired():
+ debug("Loaded valid cookie -- no login required")
+ return True
+
+ debug("Cookie file loaded, but cookie has expired")
+ except KeyError:
+ debug("Cookie file loaded, but no cookie for this server")
+ except IOError, error:
+ debug("Couldn't load cookie file: %s" % error)
+
+ return False
+
+ def new_review_request(self, changenum, submit_as=None):
+ """
+ Creates a review request on a Review Board server, updating an
+ existing one if the changeset number already exists.
+
+ If submit_as is provided, the specified user name will be recorded as
+ the submitter of the review request (given that the logged in user has
+ the appropriate permissions).
+ """
+ try:
+ debug("Attempting to create review request for %s" % changenum)
+ data = { 'repository_path': self.info.path }
+
+ if changenum:
+ data['changenum'] = changenum
+
+ if submit_as:
+ debug("Submitting the review request as %s" % submit_as)
+ data['submit_as'] = submit_as
+
+ rsp = self.api_post('api/json/reviewrequests/new/', data)
+ except APIError, e:
+ rsp, = e.args
+
+ if not options.diff_only:
+ if rsp['err']['code'] == 204: # Change number in use
+ debug("Review request already exists. Updating it...")
+ rsp = self.api_post(
+ 'api/json/reviewrequests/%s/update_from_changenum/' %
+ rsp['review_request']['id'])
+ else:
+ raise e
+
+ debug("Review request created")
+ return rsp['review_request']
+
+ def set_review_request_field(self, review_request, field, value):
+ """
+ Sets a field in a review request to the specified value.
+ """
+ rid = review_request['id']
+
+ debug("Attempting to set field '%s' to '%s' for review request '%s'" %
+ (field, value, rid))
+
+ self.api_post('api/json/reviewrequests/%s/draft/set/' % rid, {
+ field: value,
+ })
+
+ def get_review_request(self, rid):
+ """
+ Returns the review request with the specified ID.
+ """
+ rsp = self.api_get('api/json/reviewrequests/%s/' % rid)
+ return rsp['review_request']
+
+ def get_repositories(self):
+ """
+ Returns the list of repositories on this server.
+ """
+ rsp = self.api_get('/api/json/repositories/')
+ return rsp['repositories']
+
+ def get_repository_info(self, rid):
+ """
+ Returns detailed information about a specific repository.
+ """
+ rsp = self.api_get('/api/json/repositories/%s/info/' % rid)
+ return rsp['info']
+
+ def save_draft(self, review_request):
+ """
+ Saves a draft of a review request.
+ """
+ self.api_post("api/json/reviewrequests/%s/draft/save/" %
+ review_request['id'])
+ debug("Review request draft saved")
+
+ def upload_diff(self, review_request, diff_content, parent_diff_content):
+ """
+ Uploads a diff to a Review Board server.
+ """
+ debug("Uploading diff, size: %d" % len(diff_content))
+
+ if parent_diff_content:
+ debug("Uploading parent diff, size: %d" % len(parent_diff_content))
+
+ fields = {}
+ files = {}
+
+ if self.info.base_path:
+ fields['basedir'] = self.info.base_path
+
+ files['path'] = {
+ 'filename': 'diff',
+ 'content': diff_content
+ }
+
+ if parent_diff_content:
+ files['parent_diff_path'] = {
+ 'filename': 'parent_diff',
+ 'content': parent_diff_content
+ }
+
+ self.api_post('api/json/reviewrequests/%s/diff/new/' %
+ review_request['id'], fields, files)
+
+ def publish(self, review_request):
+ """
+ Publishes a review request.
+ """
+ debug("Publishing")
+ self.api_post('api/json/reviewrequests/%s/publish/' %
+ review_request['id'])
+
+ def _get_server_info(self):
+ if not self._server_info:
+ self._server_info = self._info.find_server_repository_info(self)
+
+ return self._server_info
+
+ info = property(_get_server_info)
+
+ def process_json(self, data):
+ """
+ Loads in a JSON file and returns the data if successful. On failure,
+ APIError is raised.
+ """
+ rsp = json.loads(data)
+
+ if rsp['stat'] == 'fail':
+ raise APIError, rsp
+
+ return rsp
+
+ def http_get(self, path):
+ """
+ Performs an HTTP GET on the specified path, storing any cookies that
+ were set.
+ """
+ debug('HTTP GETting %s' % path)
+
+ url = self._make_url(path)
+
+ try:
+ rsp = urllib2.urlopen(url).read()
+ self.cookie_jar.save(self.cookie_file)
+ return rsp
+ except urllib2.HTTPError, e:
+ print "Unable to access %s (%s). The host path may be invalid" % \
+ (url, e.code)
+ try:
+ debug(e.read())
+ except AttributeError:
+ pass
+ die()
+
+ def _make_url(self, path):
+ """Given a path on the server returns a full http:// style url"""
+ app = urlparse(self.url)[2]
+ if path[0] == '/':
+ url = urljoin(self.url, app[:-1] + path)
+ else:
+ url = urljoin(self.url, app + path)
+
+ if not url.startswith('http'):
+ url = 'http://%s' % url
+ return url
+
+ def api_get(self, path):
+ """
+ Performs an API call using HTTP GET at the specified path.
+ """
+ return self.process_json(self.http_get(path))
+
+ def http_post(self, path, fields, files=None):
+ """
+ Performs an HTTP POST on the specified path, storing any cookies that
+ were set.
+ """
+ if fields:
+ debug_fields = fields.copy()
+ else:
+ debug_fields = {}
+
+ if 'password' in debug_fields:
+ debug_fields["password"] = "**************"
+ url = self._make_url(path)
+ debug('HTTP POSTing to %s: %s' % (url, debug_fields))
+
+ content_type, body = self._encode_multipart_formdata(fields, files)
+ headers = {
+ 'Content-Type': content_type,
+ 'Content-Length': str(len(body))
+ }
+
+ try:
+ r = urllib2.Request(url, body, headers)
+ data = urllib2.urlopen(r).read()
+ self.cookie_jar.save(self.cookie_file)
+ return data
+ except urllib2.URLError, e:
+ try:
+ debug(e.read())
+ except AttributeError:
+ pass
+
+ die("Unable to access %s. The host path may be invalid\n%s" % \
+ (url, e))
+ except urllib2.HTTPError, e:
+ die("Unable to access %s (%s). The host path may be invalid\n%s" % \
+ (url, e.code, e.read()))
+
+ def api_post(self, path, fields=None, files=None):
+ """
+ Performs an API call using HTTP POST at the specified path.
+ """
+ return self.process_json(self.http_post(path, fields, files))
+
+ def _encode_multipart_formdata(self, fields, files):
+ """
+ Encodes data for use in an HTTP POST.
+ """
+ BOUNDARY = mimetools.choose_boundary()
+ content = ""
+
+ fields = fields or {}
+ files = files or {}
+
+ for key in fields:
+ content += "--" + BOUNDARY + "\r\n"
+ content += "Content-Disposition: form-data; name=\"%s\"\r\n" % key
+ content += "\r\n"
+ content += fields[key] + "\r\n"
+
+ for key in files:
+ filename = files[key]['filename']
+ value = files[key]['content']
+ content += "--" + BOUNDARY + "\r\n"
+ content += "Content-Disposition: form-data; name=\"%s\"; " % key
+ content += "filename=\"%s\"\r\n" % filename
+ content += "\r\n"
+ content += value + "\r\n"
+
+ content += "--" + BOUNDARY + "--\r\n"
+ content += "\r\n"
+
+ content_type = "multipart/form-data; boundary=%s" % BOUNDARY
+
+ return content_type, content
+
+
+class SCMClient(object):
+ """
+ A base representation of an SCM tool for fetching repository information
+ and generating diffs.
+ """
+ def get_repository_info(self):
+ return None
+
+ def scan_for_server(self, repository_info):
+ """
+ Scans the current directory on up to find a .reviewboard file
+ containing the server path.
+ """
+ server_url = self._get_server_from_config(user_config, repository_info)
+ if server_url:
+ return server_url
+
+ for path in walk_parents(os.getcwd()):
+ filename = os.path.join(path, ".reviewboardrc")
+ if os.path.exists(filename):
+ config = load_config_file(filename)
+ server_url = self._get_server_from_config(config,
+ repository_info)
+ if server_url:
+ return server_url
+
+ return None
+
+ def diff(self, args):
+ """
+ Returns the generated diff and optional parent diff for this
+ repository.
+
+ The returned tuple is (diff_string, parent_diff_string)
+ """
+ return (None, None)
+
+ def diff_between_revisions(self, revision_range, args, repository_info):
+ """
+ Returns the generated diff between revisions in the repository.
+ """
+ return None
+
+ def _get_server_from_config(self, config, repository_info):
+ if 'REVIEWBOARD_URL' in config:
+ return config['REVIEWBOARD_URL']
+ elif 'TREES' in config:
+ trees = config['TREES']
+ if not isinstance(trees, dict):
+ die("Warning: 'TREES' in config file is not a dict!")
+
+ if repository_info.path in trees and \
+ 'REVIEWBOARD_URL' in trees[repository_info.path]:
+ return trees[repository_info.path]['REVIEWBOARD_URL']
+
+ return None
+
+
+class CVSClient(SCMClient):
+ """
+ A wrapper around the cvs tool that fetches repository
+ information and generates compatible diffs.
+ """
+ def get_repository_info(self):
+ if not check_install("cvs"):
+ return None
+
+ cvsroot_path = os.path.join("CVS", "Root")
+
+ if not os.path.exists(cvsroot_path):
+ return None
+
+ fp = open(cvsroot_path, "r")
+ repository_path = fp.read().strip()
+ fp.close()
+
+ i = repository_path.find("@")
+ if i != -1:
+ repository_path = repository_path[i + 1:]
+
+ i = repository_path.find(":")
+ if i != -1:
+ host = repository_path[:i]
+ try:
+ canon = socket.getfqdn(host)
+ repository_path = repository_path.replace('%s:' % host,
+ '%s:' % canon)
+ except socket.error, msg:
+ debug("failed to get fqdn for %s, msg=%s" % (host, msg))
+
+ return RepositoryInfo(path=repository_path)
+
+ def diff(self, files):
+ """
+ Performs a diff across all modified files in a CVS repository.
+
+ CVS repositories do not support branches of branches in a way that
+ makes parent diffs possible, so we never return a parent diff
+ (the second value in the tuple).
+ """
+ return (self.do_diff(files), None)
+
+ def diff_between_revisions(self, revision_range, args, repository_info):
+ """
+ Performs a diff between 2 revisions of a CVS repository.
+ """
+ revs = []
+
+ for rev in revision_range.split(":"):
+ revs += ["-r", rev]
+
+ return self.do_diff(revs)
+
+ def do_diff(self, params):
+ """
+ Performs the actual diff operation through cvs diff, handling
+ fake errors generated by CVS.
+ """
+ # Diff returns "1" if differences were found.
+ return execute(["cvs", "diff", "-uN"] + params,
+ extra_ignore_errors=(1,))
+
+
+class ClearCaseClient(SCMClient):
+ """
+ A wrapper around the clearcase tool that fetches repository
+ information and generates compatible diffs.
+ This client assumes that cygwin is installed on windows.
+ """
+ ccroot_path = "/view/reviewboard.diffview/vobs/"
+ viewinfo = ""
+ viewtype = "snapshot"
+
+ def get_filename_hash(self, fname):
+ # Hash the filename string so its easy to find the file later on.
+ return md5(fname).hexdigest()
+
+ def get_repository_info(self):
+ if not check_install('cleartool help'):
+ return None
+
+ # We must be running this from inside a view.
+ # Otherwise it doesn't make sense.
+ self.viewinfo = execute(["cleartool", "pwv", "-short"])
+ if self.viewinfo.startswith('\*\* NONE'):
+ return None
+
+ # Returning the hardcoded clearcase root path to match the server
+ # respository path.
+ # There is no reason to have a dynamic path unless you have
+ # multiple clearcase repositories. This should be implemented.
+ return RepositoryInfo(path=self.ccroot_path,
+ base_path=self.ccroot_path,
+ supports_parent_diffs=False)
+
+ def get_previous_version(self, files):
+ file = []
+ curdir = os.getcwd()
+
+ # Cygwin case must transform a linux-like path to windows like path
+ # including drive letter.
+ if 'cygdrive' in curdir:
+ where = curdir.index('cygdrive') + 9
+ drive_letter = curdir[where:where+1]
+ curdir = drive_letter + ":\\" + curdir[where+2:len(curdir)]
+
+ for key in files:
+ # Sometimes there is a quote in the filename. It must be removed.
+ key = key.replace('\'', '')
+ elem_path = cpath.normpath(os.path.join(curdir, key))
+
+ # Removing anything before the last /vobs
+ # because it may be repeated.
+ elem_path_idx = elem_path.rfind("/vobs")
+ if elem_path_idx != -1:
+ elem_path = elem_path[elem_path_idx:len(elem_path)].strip("\"")
+
+ # Call cleartool to get this version and the previous version
+ # of the element.
+ curr_version, pre_version = execute(
+ ["cleartool", "desc", "-pre", elem_path])
+ curr_version = cpath.normpath(curr_version)
+ pre_version = pre_version.split(':')[1].strip()
+
+ # If a specific version was given, remove it from the path
+ # to avoid version duplication
+ if "@@" in elem_path:
+ elem_path = elem_path[:elem_path.rfind("@@")]
+ file.append(elem_path + "@@" + pre_version)
+ file.append(curr_version)
+
+ # Determnine if the view type is snapshot or dynamic.
+ if os.path.exists(file[0]):
+ self.viewtype = "dynamic"
+
+ return file
+
+ def get_extended_namespace(self, files):
+ """
+ Parses the file path to get the extended namespace
+ """
+ versions = self.get_previous_version(files)
+
+ evfiles = []
+ hlist = []
+
+ for vkey in versions:
+ # Verify if it is a checkedout file.
+ if "CHECKEDOUT" in vkey:
+ # For checkedout files just add it to the file list
+ # since it cannot be accessed outside the view.
+ splversions = vkey[:vkey.rfind("@@")]
+ evfiles.append(splversions)
+ else:
+ # For checkedin files.
+ ext_path = []
+ ver = []
+ fname = "" # fname holds the file name without the version.
+ (bpath, fpath) = cpath.splitdrive(vkey)
+ if bpath :
+ # Windows.
+ # The version (if specified like file.c@@/main/1)
+ # should be kept as a single string
+ # so split the path and concat the file name
+ # and version in the last position of the list.
+ ver = fpath.split("@@")
+ splversions = fpath[:vkey.rfind("@@")].split("\\")
+ fname = splversions.pop()
+ splversions.append(fname + ver[1])
+ else :
+ # Linux.
+ bpath = vkey[:vkey.rfind("vobs")+4]
+ fpath = vkey[vkey.rfind("vobs")+5:]
+ ver = fpath.split("@@")
+ splversions = ver[0][:vkey.rfind("@@")].split("/")
+ fname = splversions.pop()
+ splversions.append(fname + ver[1])
+
+ filename = splversions.pop()
+ bpath = cpath.normpath(bpath + "/")
+ elem_path = bpath
+
+ for key in splversions:
+ # For each element (directory) in the path,
+ # get its version from clearcase.
+ elem_path = cpath.join(elem_path, key)
+
+ # This is the version to be appended to the extended
+ # path list.
+ this_version = execute(
+ ["cleartool", "desc", "-fmt", "%Vn",
+ cpath.normpath(elem_path)])
+ if this_version:
+ ext_path.append(key + "/@@" + this_version + "/")
+ else:
+ ext_path.append(key + "/")
+
+ # This must be done in case we haven't specified
+ # the version on the command line.
+ ext_path.append(cpath.normpath(fname + "/@@" +
+ vkey[vkey.rfind("@@")+2:len(vkey)]))
+ epstr = cpath.join(bpath, cpath.normpath(''.join(ext_path)))
+ evfiles.append(epstr)
+
+ """
+ In windows, there is a problem with long names(> 254).
+ In this case, we hash the string and copy the unextended
+ filename to a temp file whose name is the hash.
+ This way we can get the file later on for diff.
+ The same problem applies to snapshot views where the
+ extended name isn't available.
+ The previous file must be copied from the CC server
+ to a local dir.
+ """
+ if cpath.exists(epstr) :
+ pass
+ else:
+ if len(epstr) > 254 or self.viewtype == "snapshot":
+ name = self.get_filename_hash(epstr)
+ # Check if this hash is already in the list
+ try:
+ i = hlist.index(name)
+ die("ERROR: duplicate value %s : %s" %
+ (name, epstr))
+ except ValueError:
+ hlist.append(name)
+
+ normkey = cpath.normpath(vkey)
+ td = tempfile.gettempdir()
+ # Cygwin case must transform a linux-like path to
+ # windows like path including drive letter
+ if 'cygdrive' in td:
+ where = td.index('cygdrive') + 9
+ drive_letter = td[where:where+1] + ":"
+ td = cpath.join(drive_letter, td[where+1:])
+ tf = cpath.normpath(cpath.join(td, name))
+ if cpath.exists(tf):
+ debug("WARNING: FILE EXISTS")
+ os.unlink(tf)
+ execute(["cleartool", "get", "-to", tf, normkey])
+ else:
+ die("ERROR: FILE NOT FOUND : %s" % epstr)
+
+ return evfiles
+
+ def get_files_from_label(self, label):
+ voblist=[]
+ # Get the list of vobs for the current view
+ allvoblist = execute(["cleartool", "lsvob", "-short"]).split()
+ # For each vob, find if the label is present
+ for vob in allvoblist:
+ try:
+ execute(["cleartool", "describe", "-local",
+ "lbtype:%s@%s" % (label, vob)]).split()
+ voblist.append(vob)
+ except:
+ pass
+
+ filelist=[]
+ # For each vob containing the label, get the file list
+ for vob in voblist:
+ try:
+ res = execute(["cleartool", "find", vob, "-all", "-version",
+ "lbtype(%s)" % label, "-print"])
+ filelist.extend(res.split())
+ except :
+ pass
+
+ # Return only the unique itens
+ return set(filelist)
+
+ def diff(self, files):
+ """
+ Performs a diff of the specified file and its previous version.
+ """
+ # We must be running this from inside a view.
+ # Otherwise it doesn't make sense.
+ return self.do_diff(self.get_extended_namespace(files))
+
+ def diff_label(self, label):
+ """
+ Get the files that are attached to a label and diff them
+ TODO
+ """
+ return self.diff(self.get_files_from_label(label))
+
+ def diff_between_revisions(self, revision_range, args, repository_info):
+ """
+ Performs a diff between 2 revisions of a CC repository.
+ """
+ rev_str = ''
+
+ for rev in revision_range.split(":"):
+ rev_str += "-r %s " % rev
+
+ return self.do_diff(rev_str)
+
+ def do_diff(self, params):
+ # Diff returns "1" if differences were found.
+ # Add the view name and view type to the description
+ if options.description:
+ options.description = ("VIEW: " + self.viewinfo +
+ "VIEWTYPE: " + self.viewtype + "\n" + options.description)
+ else:
+ options.description = (self.viewinfo +
+ "VIEWTYPE: " + self.viewtype + "\n")
+
+ o = []
+ Feol = False
+ while len(params) > 0:
+ # Read both original and modified files.
+ onam = params.pop(0)
+ mnam = params.pop(0)
+ file_data = []
+ do_rem = False
+ # If the filename length is greater than 254 char for windows,
+ # we copied the file to a temp file
+ # because the open will not work for path greater than 254.
+ # This is valid for the original and
+ # modified files if the name size is > 254.
+ for filenam in (onam, mnam) :
+ if cpath.exists(filenam) and self.viewtype == "dynamic":
+ do_rem = False
+ fn = filenam
+ elif len(filenam) > 254 or self.viewtype == "snapshot":
+ fn = self.get_filename_hash(filenam)
+ fn = cpath.join(tempfile.gettempdir(), fn)
+ do_rem = True
+ fd = open(cpath.normpath(fn))
+ fdata = fd.readlines()
+ fd.close()
+ file_data.append(fdata)
+ # If the file was temp, it should be removed.
+ if do_rem:
+ os.remove(filenam)
+
+ modi = file_data.pop()
+ orig = file_data.pop()
+
+ # For snapshot views, the local directories must be removed because
+ # they will break the diff on the server. Just replacing
+ # everything before the view name (including the view name) for
+ # vobs do the work.
+ if (self.viewtype == "snapshot"
+ and (sys.platform.startswith('win')
+ or sys.platform.startswith('cygwin'))):
+ vinfo = self.viewinfo.rstrip("\r\n")
+ mnam = "c:\\\\vobs" + mnam[mnam.rfind(vinfo) + len(vinfo):]
+ onam = "c:\\\\vobs" + onam[onam.rfind(vinfo) + len(vinfo):]
+ # Call the diff lib to generate a diff.
+ # The dates are bogus, since they don't natter anyway.
+ # The only thing is that two spaces are needed to the server
+ # so it can identify the heades correctly.
+ diff = difflib.unified_diff(orig, modi, onam, mnam,
+ ' 2002-02-21 23:30:39.942229878 -0800',
+ ' 2002-02-21 23:30:50.442260588 -0800', lineterm=' \n')
+ # Transform the generator output into a string output
+ # Use a comprehension instead of a generator,
+ # so 2.3.x doesn't fail to interpret.
+ diffstr = ''.join([str(l) for l in diff])
+ # Workaround for the difflib no new line at end of file
+ # problem.
+ if not diffstr.endswith('\n'):
+ diffstr = diffstr + ("\n\\ No newline at end of file\n")
+ o.append(diffstr)
+
+ ostr = ''.join(o)
+ return (ostr, None) # diff, parent_diff (not supported)
+
+
+class SVNClient(SCMClient):
+ """
+ A wrapper around the svn Subversion tool that fetches repository
+ information and generates compatible diffs.
+ """
+ def get_repository_info(self):
+ if not check_install('svn help'):
+ return None
+
+ # Get the SVN repository path (either via a working copy or
+ # a supplied URI)
+ svn_info_params = ["svn", "info"]
+ if options.repository_url:
+ svn_info_params.append(options.repository_url)
+ data = execute(svn_info_params,
+ ignore_errors=True)
+ m = re.search(r'^Repository Root: (.+)$', data, re.M)
+ if not m:
+ return None
+
+ path = m.group(1)
+
+ m = re.search(r'^URL: (.+)$', data, re.M)
+ if not m:
+ return None
+
+ base_path = m.group(1)[len(path):] or "/"
+
+ m = re.search(r'^Repository UUID: (.+)$', data, re.M)
+ if not m:
+ return None
+
+ return SvnRepositoryInfo(path, base_path, m.group(1))
+
+ def scan_for_server(self, repository_info):
+ # Scan first for dot files, since it's faster and will cover the
+ # user's $HOME/.reviewboardrc
+ server_url = super(SVNClient, self).scan_for_server(repository_info)
+ if server_url:
+ return server_url
+
+ return self.scan_for_server_property(repository_info)
+
+ def scan_for_server_property(self, repository_info):
+ def get_url_prop(path):
+ url = execute(["svn", "propget", "reviewboard:url", path]).strip()
+ return url or None
+
+ for path in walk_parents(os.getcwd()):
+ if not os.path.exists(os.path.join(path, ".svn")):
+ break
+
+ prop = get_url_prop(path)
+ if prop:
+ return prop
+
+ return get_url_prop(repository_info.path)
+
+ def diff(self, files):
+ """
+ Performs a diff across all modified files in a Subversion repository.
+
+ SVN repositories do not support branches of branches in a way that
+ makes parent diffs possible, so we never return a parent diff
+ (the second value in the tuple).
+ """
+ return (self.do_diff(["svn", "diff", "--diff-cmd=diff"] + files),
+ None)
+
+ def diff_between_revisions(self, revision_range, args, repository_info):
+ """
+ Performs a diff between 2 revisions of a Subversion repository.
+ """
+ if options.repository_url:
+ revisions = revision_range.split(':')
+ if len(revisions) < 1:
+ return None
+ elif len(revisions) == 1:
+ revisions.append('HEAD')
+
+ # if a new path was supplied at the command line, set it
+ if len(args):
+ repository_info.set_base_path(args[0])
+
+ url = repository_info.path + repository_info.base_path
+
+ old_url = url + '@' + revisions[0]
+ new_url = url + '@' + revisions[1]
+
+ return self.do_diff(["svn", "diff", "--diff-cmd=diff", old_url,
+ new_url],
+ repository_info)
+ # Otherwise, perform the revision range diff using a working copy
+ else:
+ return self.do_diff(["svn", "diff", "--diff-cmd=diff", "-r",
+ revision_range],
+ repository_info)
+
+ def do_diff(self, cmd, repository_info=None):
+ """
+ Performs the actual diff operation, handling renames and converting
+ paths to absolute.
+ """
+ diff = execute(cmd, split_lines=True)
+ diff = self.handle_renames(diff)
+ diff = self.convert_to_absolute_paths(diff, repository_info)
+
+ return ''.join(diff)
+
+ def handle_renames(self, diff_content):
+ """
+ The output of svn diff is incorrect when the file in question came
+ into being via svn mv/cp. Although the patch for these files are
+ relative to its parent, the diff header doesn't reflect this.
+ This function fixes the relevant section headers of the patch to
+ portray this relationship.
+ """
+
+ # svn diff against a repository URL on two revisions appears to
+ # handle moved files properly, so only adjust the diff file names
+ # if they were created using a working copy.
+ if options.repository_url:
+ return diff_content
+
+ result = []
+
+ from_line = ""
+ for line in diff_content:
+ if line.startswith('--- '):
+ from_line = line
+ continue
+
+ # This is where we decide how mangle the previous '--- '
+ if line.startswith('+++ '):
+ to_file, _ = self.parse_filename_header(line[4:])
+ info = self.svn_info(to_file)
+ if info.has_key("Copied From URL"):
+ url = info["Copied From URL"]
+ root = info["Repository Root"]
+ from_file = urllib.unquote(url[len(root):])
+ result.append(from_line.replace(to_file, from_file))
+ else:
+ result.append(from_line) #as is, no copy performed
+
+ # We only mangle '---' lines. All others get added straight to
+ # the output.
+ result.append(line)
+
+ return result
+
+
+ def convert_to_absolute_paths(self, diff_content, repository_info):
+ """
+ Converts relative paths in a diff output to absolute paths.
+ This handles paths that have been svn switched to other parts of the
+ repository.
+ """
+
+ result = []
+
+ for line in diff_content:
+ front = None
+ if line.startswith('+++ ') or line.startswith('--- ') or line.startswith('Index: '):
+ front, line = line.split(" ", 1)
+
+ if front:
+ if line.startswith('/'): #already absolute
+ line = front + " " + line
+ else:
+ # filename and rest of line (usually the revision
+ # component)
+ file, rest = self.parse_filename_header(line)
+
+ # If working with a diff generated outside of a working
+ # copy, then file paths are already absolute, so just
+ # add initial slash.
+ if options.repository_url:
+ path = urllib.unquote(
+ "%s/%s" % (repository_info.base_path, file))
+ else:
+ info = self.svn_info(file)
+ url = info["URL"]
+ root = info["Repository Root"]
+ path = urllib.unquote(url[len(root):])
+
+ line = front + " " + path + rest
+
+ result.append(line)
+
+ return result
+
+ def svn_info(self, path):
+ """Return a dict which is the result of 'svn info' at a given path."""
+ svninfo = {}
+ for info in execute(["svn", "info", path],
+ split_lines=True):
+ parts = info.strip().split(": ", 1)
+ if len(parts) == 2:
+ key, value = parts
+ svninfo[key] = value
+
+ return svninfo
+
+ # Adapted from server code parser.py
+ def parse_filename_header(self, s):
+ parts = None
+ if "\t" in s:
+ # There's a \t separating the filename and info. This is the
+ # best case scenario, since it allows for filenames with spaces
+ # without much work.
+ parts = s.split("\t")
+
+ # There's spaces being used to separate the filename and info.
+ # This is technically wrong, so all we can do is assume that
+ # 1) the filename won't have multiple consecutive spaces, and
+ # 2) there's at least 2 spaces separating the filename and info.
+ if " " in s:
+ parts = re.split(r" +", s)
+
+ if parts:
+ parts[1] = '\t' + parts[1]
+ return parts
+
+ # strip off ending newline, and return it as the second component
+ return [s.split('\n')[0], '\n']
+
+
+class PerforceClient(SCMClient):
+ """
+ A wrapper around the p4 Perforce tool that fetches repository information
+ and generates compatible diffs.
+ """
+ def get_repository_info(self):
+ if not check_install('p4 help'):
+ return None
+
+ data = execute(["p4", "info"], ignore_errors=True)
+
+ m = re.search(r'^Server address: (.+)$', data, re.M)
+ if not m:
+ return None
+
+ repository_path = m.group(1).strip()
+
+ try:
+ hostname, port = repository_path.split(":")
+ info = socket.gethostbyaddr(hostname)
+ repository_path = "%s:%s" % (info[0], port)
+ except (socket.gaierror, socket.herror):
+ pass
+
+ return RepositoryInfo(path=repository_path, supports_changesets=True)
+
+ def scan_for_server(self, repository_info):
+ # Scan first for dot files, since it's faster and will cover the
+ # user's $HOME/.reviewboardrc
+ server_url = \
+ super(PerforceClient, self).scan_for_server(repository_info)
+
+ if server_url:
+ return server_url
+
+ return self.scan_for_server_counter(repository_info)
+
+ def scan_for_server_counter(self, repository_info):
+ """
+ Checks the Perforce counters to see if the Review Board server's url
+ is specified. Since Perforce only started supporting non-numeric
+ counter values in server version 2008.1, we support both a normal
+ counter 'reviewboard.url' with a string value and embedding the url in
+ a counter name like 'reviewboard.url.http:||reviewboard.example.com'.
+ Note that forward slashes aren't allowed in counter names, so
+ pipe ('|') characters should be used. These should be safe because they
+ should not be used unencoded in urls.
+ """
+
+ counters_text = execute(["p4", "counters"])
+
+ # Try for a "reviewboard.url" counter first.
+ m = re.search(r'^reviewboard.url = (\S+)', counters_text, re.M)
+
+ if m:
+ return m.group(1)
+
+ # Next try for a counter of the form:
+ # reviewboard_url.http:||reviewboard.example.com
+ m2 = re.search(r'^reviewboard.url\.(\S+)', counters_text, re.M)
+
+ if m2:
+ return m2.group(1).replace('|', '/')
+
+ return None
+
+ def get_changenum(self, args):
+ if len(args) == 1:
+ try:
+ return str(int(args[0]))
+ except ValueError:
+ pass
+ return None
+
+ def diff(self, args):
+ """
+ Goes through the hard work of generating a diff on Perforce in order
+ to take into account adds/deletes and to provide the necessary
+ revision information.
+ """
+ # set the P4 enviroment:
+ if options.p4_client:
+ os.environ['P4CLIENT'] = options.p4_client
+
+ if options.p4_port:
+ os.environ['P4PORT'] = options.p4_port
+
+ changenum = self.get_changenum(args)
+ if changenum is None:
+ return self._path_diff(args)
+ else:
+ return self._changenum_diff(changenum)
+
+
+ def _path_diff(self, args):
+ """
+ Process a path-style diff. See _changenum_diff for the alternate
+ version that handles specific change numbers.
+
+ Multiple paths may be specified in `args`. The path styles supported
+ are:
+
+ //path/to/file
+ Upload file as a "new" file.
+
+ //path/to/dir/...
+ Upload all files as "new" files.
+
+ //path/to/file[@#]rev
+ Upload file from that rev as a "new" file.
+
+ //path/to/file[@#]rev,[@#]rev
+ Upload a diff between revs.
+
+ //path/to/dir/...[@#]rev,[@#]rev
+ Upload a diff of all files between revs in that directory.
+ """
+ r_revision_range = re.compile(r'^(?P<path>//[^@#]+)' +
+ r'(?P<revision1>[#@][^,]+)?' +
+ r'(?P<revision2>,[#@][^,]+)?$')
+
+ empty_filename = make_tempfile()
+ tmp_diff_from_filename = make_tempfile()
+ tmp_diff_to_filename = make_tempfile()
+
+ diff_lines = []
+
+ for path in args:
+ m = r_revision_range.match(path)
+
+ if not m:
+ die('Path %r does not match a valid Perforce path.' % (path,))
+ revision1 = m.group('revision1')
+ revision2 = m.group('revision2')
+ first_rev_path = m.group('path')
+
+ if revision1:
+ first_rev_path += revision1
+ records = self._run_p4(['files', first_rev_path])
+
+ # Make a map for convenience.
+ files = {}
+
+ # Records are:
+ # 'rev': '1'
+ # 'func': '...'
+ # 'time': '1214418871'
+ # 'action': 'edit'
+ # 'type': 'ktext'
+ # 'depotFile': '...'
+ # 'change': '123456'
+ for record in records:
+ if record['action'] != 'delete':
+ if revision2:
+ files[record['depotFile']] = [record, None]
+ else:
+ files[record['depotFile']] = [None, record]
+
+ if revision2:
+ # [1:] to skip the comma.
+ second_rev_path = m.group('path') + revision2[1:]
+ records = self._run_p4(['files', second_rev_path])
+ for record in records:
+ if record['action'] != 'delete':
+ try:
+ m = files[record['depotFile']]
+ m[1] = record
+ except KeyError:
+ files[record['depotFile']] = [None, record]
+
+ old_file = new_file = empty_filename
+ changetype_short = None
+
+ for depot_path, (first_record, second_record) in files.items():
+ old_file = new_file = empty_filename
+ if first_record is None:
+ self._write_file(depot_path + '#' + second_record['rev'],
+ tmp_diff_to_filename)
+ new_file = tmp_diff_to_filename
+ changetype_short = 'A'
+ base_revision = 0
+ elif second_record is None:
+ self._write_file(depot_path + '#' + first_record['rev'],
+ tmp_diff_from_filename)
+ old_file = tmp_diff_from_filename
+ changetype_short = 'D'
+ base_revision = int(first_record['rev'])
+ else:
+ self._write_file(depot_path + '#' + first_record['rev'],
+ tmp_diff_from_filename)
+ self._write_file(depot_path + '#' + second_record['rev'],
+ tmp_diff_to_filename)
+ new_file = tmp_diff_to_filename
+ old_file = tmp_diff_from_filename
+ changetype_short = 'M'
+ base_revision = int(first_record['rev'])
+
+ dl = self._do_diff(old_file, new_file, depot_path,
+ base_revision, changetype_short,
+ ignore_unmodified=True)
+ diff_lines += dl
+
+ os.unlink(empty_filename)
+ os.unlink(tmp_diff_from_filename)
+ os.unlink(tmp_diff_to_filename)
+ return (''.join(diff_lines), None)
+
+ def _run_p4(self, command):
+ """Execute a perforce command using the python marshal API.
+
+ - command: A list of strings of the command to execute.
+
+ The return type depends on the command being run.
+ """
+ command = ['p4', '-G'] + command
+ p = subprocess.Popen(command, stdout=subprocess.PIPE)
+ result = []
+ has_error = False
+
+ while 1:
+ try:
+ data = marshal.load(p.stdout)
+ except EOFError:
+ break
+ else:
+ result.append(data)
+ if data.get('code', None) == 'error':
+ has_error = True
+
+ rc = p.wait()
+
+ if rc or has_error:
+ for record in result:
+ if 'data' in record:
+ print record['data']
+ die('Failed to execute command: %s\n' % (command,))
+
+ return result
+
+ def _changenum_diff(self, changenum):
+ """
+ Process a diff for a particular change number. This handles both
+ pending and submitted changelists.
+
+ See _path_diff for the alternate version that does diffs of depot
+ paths.
+ """
+ # TODO: It might be a good idea to enhance PerforceDiffParser to
+ # understand that newFile could include a revision tag for post-submit
+ # reviewing.
+ cl_is_pending = False
+
+ debug("Generating diff for changenum %s" % changenum)
+
+ description = execute(["p4", "describe", "-s", changenum],
+ split_lines=True)
+
+ if '*pending*' in description[0]:
+ cl_is_pending = True
+
+ # Get the file list
+ for line_num, line in enumerate(description):
+ if 'Affected files ...' in line:
+ break
+ else:
+ # Got to the end of all the description lines and didn't find
+ # what we were looking for.
+ die("Couldn't find any affected files for this change.")
+
+ description = description[line_num+2:]
+
+ diff_lines = []
+
+ empty_filename = make_tempfile()
+ tmp_diff_from_filename = make_tempfile()
+ tmp_diff_to_filename = make_tempfile()
+
+ for line in description:
+ line = line.strip()
+ if not line:
+ continue
+
+ m = re.search(r'\.\.\. ([^#]+)#(\d+) (add|edit|delete|integrate|branch)', line)
+ if not m:
+ die("Unsupported line from p4 opened: %s" % line)
+
+ depot_path = m.group(1)
+ base_revision = int(m.group(2))
+ if not cl_is_pending:
+ # If the changelist is pending our base revision is the one that's
+ # currently in the depot. If we're not pending the base revision is
+ # actually the revision prior to this one
+ base_revision -= 1
+
+ changetype = m.group(3)
+
+ debug('Processing %s of %s' % (changetype, depot_path))
+
+ old_file = new_file = empty_filename
+ old_depot_path = new_depot_path = None
+ changetype_short = None
+
+ if changetype == 'edit' or changetype == 'integrate':
+ # A big assumption
+ new_revision = base_revision + 1
+
+ # We have an old file, get p4 to take this old version from the
+ # depot and put it into a plain old temp file for us
+ old_depot_path = "%s#%s" % (depot_path, base_revision)
+ self._write_file(old_depot_path, tmp_diff_from_filename)
+ old_file = tmp_diff_from_filename
+
+ # Also print out the new file into a tmpfile
+ if cl_is_pending:
+ new_file = self._depot_to_local(depot_path)
+ else:
+ new_depot_path = "%s#%s" %(depot_path, new_revision)
+ self._write_file(new_depot_path, tmp_diff_to_filename)
+ new_file = tmp_diff_to_filename
+
+ changetype_short = "M"
+
+ elif changetype == 'add' or changetype == 'branch':
+ # We have a new file, get p4 to put this new file into a pretty
+ # temp file for us. No old file to worry about here.
+ if cl_is_pending:
+ new_file = self._depot_to_local(depot_path)
+ else:
+ self._write_file(depot_path, tmp_diff_to_filename)
+ new_file = tmp_diff_to_filename
+ changetype_short = "A"
+
+ elif changetype == 'delete':
+ # We've deleted a file, get p4 to put the deleted file into a temp
+ # file for us. The new file remains the empty file.
+ old_depot_path = "%s#%s" % (depot_path, base_revision)
+ self._write_file(old_depot_path, tmp_diff_from_filename)
+ old_file = tmp_diff_from_filename
+ changetype_short = "D"
+ else:
+ die("Unknown change type '%s' for %s" % (changetype, depot_path))
+
+ dl = self._do_diff(old_file, new_file, depot_path, base_revision, changetype_short)
+ diff_lines += dl
+
+ os.unlink(empty_filename)
+ os.unlink(tmp_diff_from_filename)
+ os.unlink(tmp_diff_to_filename)
+ return (''.join(diff_lines), None)
+
+ def _do_diff(self, old_file, new_file, depot_path, base_revision,
+ changetype_short, ignore_unmodified=False):
+ """
+ Do the work of producing a diff for Perforce.
+
+ old_file - The absolute path to the "old" file.
+ new_file - The absolute path to the "new" file.
+ depot_path - The depot path in Perforce for this file.
+ base_revision - The base perforce revision number of the old file as
+ an integer.
+ changetype_short - The change type as a single character string.
+ ignore_unmodified - If True, will return an empty list if the file
+ is not changed.
+
+ Returns a list of strings of diff lines.
+ """
+ if hasattr(os, 'uname') and os.uname()[0] == 'SunOS':
+ diff_cmd = ["gdiff", "-urNp", old_file, new_file]
+ else:
+ diff_cmd = ["diff", "-urNp", old_file, new_file]
+ # Diff returns "1" if differences were found.
+ dl = execute(diff_cmd, extra_ignore_errors=(1,2),
+ translate_newlines=False)
+
+ # If the input file has ^M characters at end of line, lets ignore them.
+ dl = dl.replace('\r\r\n', '\r\n')
+ dl = dl.splitlines(True)
+
+ cwd = os.getcwd()
+ if depot_path.startswith(cwd):
+ local_path = depot_path[len(cwd) + 1:]
+ else:
+ local_path = depot_path
+
+ # Special handling for the output of the diff tool on binary files:
+ # diff outputs "Files a and b differ"
+ # and the code below expects the output to start with
+ # "Binary files "
+ if len(dl) == 1 and \
+ dl[0] == ('Files %s and %s differ'% (old_file, new_file)):
+ dl = ['Binary files %s and %s differ'% (old_file, new_file)]
+
+ if dl == [] or dl[0].startswith("Binary files "):
+ if dl == []:
+ if ignore_unmodified:
+ return []
+ else:
+ print "Warning: %s in your changeset is unmodified" % \
+ local_path
+
+ dl.insert(0, "==== %s#%s ==%s== %s ====\n" % \
+ (depot_path, base_revision, changetype_short, local_path))
+ dl.append('\n')
+ else:
+ m = re.search(r'(\d\d\d\d-\d\d-\d\d \d\d:\d\d:\d\d)', dl[1])
+ if m:
+ timestamp = m.group(1)
+ else:
+ # Thu Sep 3 11:24:48 2007
+ m = re.search(r'(\w+)\s+(\w+)\s+(\d+)\s+(\d\d:\d\d:\d\d)\s+(\d\d\d\d)', dl[1])
+ if not m:
+ die("Unable to parse diff header: %s" % dl[1])
+
+ month_map = {
+ "Jan": "01",
+ "Feb": "02",
+ "Mar": "03",
+ "Apr": "04",
+ "May": "05",
+ "Jun": "06",
+ "Jul": "07",
+ "Aug": "08",
+ "Sep": "09",
+ "Oct": "10",
+ "Nov": "11",
+ "Dec": "12",
+ }
+ month = month_map[m.group(2)]
+ day = m.group(3)
+ timestamp = m.group(4)
+ year = m.group(5)
+
+ timestamp = "%s-%s-%s %s" % (year, month, day, timestamp)
+
+ dl[0] = "--- %s\t%s#%s\n" % (local_path, depot_path, base_revision)
+ dl[1] = "+++ %s\t%s\n" % (local_path, timestamp)
+
+ return dl
+
+ def _write_file(self, depot_path, tmpfile):
+ """
+ Grabs a file from Perforce and writes it to a temp file. p4 print sets
+ the file readonly and that causes a later call to unlink fail. So we
+ make the file read/write.
+ """
+ debug('Writing "%s" to "%s"' % (depot_path, tmpfile))
+ execute(["p4", "print", "-o", tmpfile, "-q", depot_path])
+ os.chmod(tmpfile, stat.S_IREAD | stat.S_IWRITE)
+
+ def _depot_to_local(self, depot_path):
+ """
+ Given a path in the depot return the path on the local filesystem to
+ the same file. If there are multiple results, take only the last
+ result from the where command.
+ """
+ where_output = self._run_p4(['where', depot_path])
+ return where_output[-1]['path']
+
+
+class MercurialClient(SCMClient):
+ """
+ A wrapper around the hg Mercurial tool that fetches repository
+ information and generates compatible diffs.
+ """
+ def get_repository_info(self):
+ if not check_install('hg --help'):
+ return None
+
+ data = execute(["hg", "root"], ignore_errors=True)
+ if data.startswith('abort:'):
+ # hg aborted => no mercurial repository here.
+ return None
+
+ # Elsewhere, hg root output give us the repository path.
+
+ # We save data here to use it as a fallback. See below
+ local_data = data.strip()
+
+ svn = execute(["hg", "svn", "info", ], ignore_errors=True)
+
+ if (not svn.startswith('abort:') and
+ not svn.startswith("hg: unknown command")):
+ self.type = 'svn'
+ m = re.search(r'^Repository Root: (.+)$', svn, re.M)
+
+ if not m:
+ return None
+
+ path = m.group(1)
+ m2 = re.match(r'^(svn\+ssh|http|https)://([-a-zA-Z0-9.]*@)(.*)$',
+ path)
+ if m2:
+ path = '%s://%s' % (m2.group(1), m2.group(3))
+
+ m = re.search(r'^URL: (.+)$', svn, re.M)
+
+ if not m:
+ return None
+
+ base_path = m.group(1)[len(path):] or "/"
+ return RepositoryInfo(path=path,
+ base_path=base_path,
+ supports_parent_diffs=True)
+
+ self.type = 'hg'
+
+ # We are going to search .hg/hgrc for the default path.
+ file_name = os.path.join(local_data,'.hg', 'hgrc')
+
+ if not os.path.exists(file_name):
+ return RepositoryInfo(path=local_data, base_path='/',
+ supports_parent_diffs=True)
+
+ f = open(file_name)
+ data = f.read()
+ f.close()
+
+ m = re.search(r'^default\s+=\s+(.+)$', data, re.M)
+
+ if not m:
+ # Return the local path, if no default value is found.
+ return RepositoryInfo(path=local_data, base_path='/',
+ supports_parent_diffs=True)
+
+ path = m.group(1).strip()
+
+ return RepositoryInfo(path=path, base_path='',
+ supports_parent_diffs=True)
+
+ def diff(self, files):
+ """
+ Performs a diff across all modified files in a Mercurial repository.
+ """
+ # We don't support parent diffs with Mercurial yet, so we always
+ # return None for the parent diff.
+ if self.type == 'svn':
+ parent = execute(['hg', 'parent', '--svn', '--template',
+ '{node}\n']).strip()
+
+ if options.parent_branch:
+ parent = options.parent_branch
+
+ if options.guess_summary and not options.summary:
+ options.summary = execute(['hg', 'log', '-r.', '--template',
+ r'{desc|firstline}\n'])
+
+ if options.guess_description and not options.description:
+ numrevs = len(execute(['hg', 'log', '-r.:%s' % parent,
+ '--follow', '--template',
+ r'{rev}\n']).strip().split('\n'))
+ options.description = execute(['hg', 'log', '-r.:%s' % parent,
+ '--follow', '--template',
+ r'{desc}\n\n', '--limit',
+ str(numrevs-1)]).strip()
+
+ return (execute(["hg", "diff", "--svn", '-r%s:.' % parent]), None)
+
+ return (execute(["hg", "diff"] + files), None)
+
+ def diff_between_revisions(self, revision_range, args, repository_info):
+ """
+ Performs a diff between 2 revisions of a Mercurial repository.
+ """
+ if self.type != 'hg':
+ raise NotImplementedError
+
+ r1, r2 = revision_range.split(':')
+ return execute(["hg", "diff", "-r", r1, "-r", r2])
+
+
+class GitClient(SCMClient):
+ """
+ A wrapper around git that fetches repository information and generates
+ compatible diffs. This will attempt to generate a diff suitable for the
+ remote repository, whether git, SVN or Perforce.
+ """
+ def get_repository_info(self):
+ if not check_install('git --help'):
+ return None
+
+ git_dir = execute(["git", "rev-parse", "--git-dir"],
+ ignore_errors=True).strip()
+
+ if git_dir.startswith("fatal:") or not os.path.isdir(git_dir):
+ return None
+
+ # post-review in directories other than the top level of
+ # of a work-tree would result in broken diffs on the server
+ os.chdir(os.path.dirname(os.path.abspath(git_dir)))
+
+ # We know we have something we can work with. Let's find out
+ # what it is. We'll try SVN first.
+ data = execute(["git", "svn", "info"], ignore_errors=True)
+
+ m = re.search(r'^Repository Root: (.+)$', data, re.M)
+ if m:
+ path = m.group(1)
+ m = re.search(r'^URL: (.+)$', data, re.M)
+
+ if m:
+ base_path = m.group(1)[len(path):] or "/"
+ m = re.search(r'^Repository UUID: (.+)$', data, re.M)
+
+ if m:
+ uuid = m.group(1)
+ self.type = "svn"
+
+ return SvnRepositoryInfo(path=path, base_path=base_path,
+ uuid=uuid,
+ supports_parent_diffs=True)
+ else:
+ # Versions of git-svn before 1.5.4 don't (appear to) support
+ # 'git svn info'. If we fail because of an older git install,
+ # here, figure out what version of git is installed and give
+ # the user a hint about what to do next.
+ version = execute(["git", "svn", "--version"], ignore_errors=True)
+ version_parts = re.search('version (\d+)\.(\d+)\.(\d+)',
+ version)
+ svn_remote = execute(["git", "config", "--get",
+ "svn-remote.svn.url"], ignore_errors=True)
+
+ if (version_parts and
+ not self.is_valid_version((int(version_parts.group(1)),
+ int(version_parts.group(2)),
+ int(version_parts.group(3))),
+ (1, 5, 4)) and
+ svn_remote):
+ die("Your installation of git-svn must be upgraded to " + \
+ "version 1.5.4 or later")
+
+ # Okay, maybe Perforce.
+ # TODO
+
+ # Nope, it's git then.
+ origin = execute(["git", "remote", "show", "origin"])
+ m = re.search(r'URL: (.+)', origin)
+ if m:
+ url = m.group(1).rstrip('/')
+ if url:
+ self.type = "git"
+ return RepositoryInfo(path=url, base_path='',
+ supports_parent_diffs=True)
+
+ return None
+
+ def is_valid_version(self, actual, expected):
+ """
+ Takes two tuples, both in the form:
+ (major_version, minor_version, micro_version)
+ Returns true if the actual version is greater than or equal to
+ the expected version, and false otherwise.
+ """
+ return (actual[0] > expected[0]) or \
+ (actual[0] == expected[0] and actual[1] > expected[1]) or \
+ (actual[0] == expected[0] and actual[1] == expected[1] and \
+ actual[2] >= expected[2])
+
+ def scan_for_server(self, repository_info):
+ # Scan first for dot files, since it's faster and will cover the
+ # user's $HOME/.reviewboardrc
+ server_url = super(GitClient, self).scan_for_server(repository_info)
+
+ if server_url:
+ return server_url
+
+ # TODO: Maybe support a server per remote later? Is that useful?
+ url = execute(["git", "config", "--get", "reviewboard.url"],
+ ignore_errors=True).strip()
+ if url:
+ return url
+
+ if self.type == "svn":
+ # Try using the reviewboard:url property on the SVN repo, if it
+ # exists.
+ prop = SVNClient().scan_for_server_property(repository_info)
+
+ if prop:
+ return prop
+
+ return None
+
+ def diff(self, args):
+ """
+ Performs a diff across all modified files in the branch, taking into
+ account a parent branch.
+ """
+ parent_branch = options.parent_branch or "master"
+
+ diff_lines = self.make_diff(parent_branch)
+
+ if parent_branch != "master":
+ parent_diff_lines = self.make_diff("master", parent_branch)
+ else:
+ parent_diff_lines = None
+
+ if options.guess_summary and not options.summary:
+ options.summary = execute(["git", "log", "--pretty=format:%s",
+ "HEAD^.."], ignore_errors=True).strip()
+
+ if options.guess_description and not options.description:
+ options.description = execute(
+ ["git", "log", "--pretty=format:%s%n%n%b", parent_branch + ".."],
+ ignore_errors=True).strip()
+
+ return (diff_lines, parent_diff_lines)
+
+ def make_diff(self, parent_branch, source_branch=""):
+ """
+ Performs a diff on a particular branch range.
+ """
+ if self.type == "svn":
+ diff_lines = execute(["git", "diff", "--no-color", "--no-prefix",
+ "-r", "-u", "%s..%s" % (parent_branch,
+ source_branch)],
+ split_lines=True)
+ return self.make_svn_diff(parent_branch, diff_lines)
+ elif self.type == "git":
+ return execute(["git", "diff", "--no-color", "--full-index",
+ parent_branch])
+
+ return None
+
+ def make_svn_diff(self, parent_branch, diff_lines):
+ """
+ Formats the output of git diff such that it's in a form that
+ svn diff would generate. This is needed so the SVNTool in Review
+ Board can properly parse this diff.
+ """
+ rev = execute(["git", "svn", "find-rev", "master"]).strip()
+
+ if not rev:
+ return None
+
+ diff_data = ""
+ filename = ""
+ revision = ""
+ newfile = False
+
+ for line in diff_lines:
+ if line.startswith("diff "):
+ # Grab the filename and then filter this out.
+ # This will be in the format of:
+ #
+ # diff --git a/path/to/file b/path/to/file
+ info = line.split(" ")
+ diff_data += "Index: %s\n" % info[2]
+ diff_data += "=" * 67
+ diff_data += "\n"
+ elif line.startswith("index "):
+ # Filter this out.
+ pass
+ elif line.strip() == "--- /dev/null":
+ # New file
+ newfile = True
+ elif line.startswith("--- "):
+ newfile = False
+ diff_data += "--- %s\t(revision %s)\n" % \
+ (line[4:].strip(), rev)
+ elif line.startswith("+++ "):
+ filename = line[4:].strip()
+ if newfile:
+ diff_data += "--- %s\t(revision 0)\n" % filename
+ diff_data += "+++ %s\t(revision 0)\n" % filename
+ else:
+ # We already printed the "--- " line.
+ diff_data += "+++ %s\t(working copy)\n" % filename
+ else:
+ diff_data += line
+
+ return diff_data
+
+ def diff_between_revisions(self, revision_range, args, repository_info):
+ pass
+
+
+SCMCLIENTS = (
+ SVNClient(),
+ CVSClient(),
+ GitClient(),
+ MercurialClient(),
+ PerforceClient(),
+ ClearCaseClient(),
+)
+
+def debug(s):
+ """
+ Prints debugging information if post-review was run with --debug
+ """
+ if DEBUG or options and options.debug:
+ print ">>> %s" % s
+
+
+def make_tempfile():
+ """
+ Creates a temporary file and returns the path. The path is stored
+ in an array for later cleanup.
+ """
+ fd, tmpfile = mkstemp()
+ os.close(fd)
+ tempfiles.append(tmpfile)
+ return tmpfile
+
+
+def check_install(command):
+ """
+ Try executing an external command and return a boolean indicating whether
+ that command is installed or not. The 'command' argument should be
+ something that executes quickly, without hitting the network (for
+ instance, 'svn help' or 'git --version').
+ """
+ try:
+ p = subprocess.Popen(command.split(' '),
+ stdin=subprocess.PIPE,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE)
+ return True
+ except OSError:
+ return False
+
+
+def execute(command, env=None, split_lines=False, ignore_errors=False,
+ extra_ignore_errors=(), translate_newlines=True):
+ """
+ Utility function to execute a command and return the output.
+ """
+ if isinstance(command, list):
+ debug(subprocess.list2cmdline(command))
+ else:
+ debug(command)
+
+ if env:
+ env.update(os.environ)
+ else:
+ env = os.environ.copy()
+
+ env['LC_ALL'] = 'en_US.UTF-8'
+ env['LANGUAGE'] = 'en_US.UTF-8'
+
+ if sys.platform.startswith('win'):
+ p = subprocess.Popen(command,
+ stdin=subprocess.PIPE,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT,
+ shell=False,
+ universal_newlines=translate_newlines,
+ env=env)
+ else:
+ p = subprocess.Popen(command,
+ stdin=subprocess.PIPE,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT,
+ shell=False,
+ close_fds=True,
+ universal_newlines=translate_newlines,
+ env=env)
+ if split_lines:
+ data = p.stdout.readlines()
+ else:
+ data = p.stdout.read()
+ rc = p.wait()
+ if rc and not ignore_errors and rc not in extra_ignore_errors:
+ die('Failed to execute command: %s\n%s' % (command, data))
+
+ return data
+
+
+def die(msg=None):
+ """
+ Cleanly exits the program with an error message. Erases all remaining
+ temporary files.
+ """
+ for tmpfile in tempfiles:
+ try:
+ os.unlink(tmpfile)
+ except:
+ pass
+
+ if msg:
+ print msg
+
+ sys.exit(1)
+
+
+def walk_parents(path):
+ """
+ Walks up the tree to the root directory.
+ """
+ while os.path.splitdrive(path)[1] != os.sep:
+ yield path
+ path = os.path.dirname(path)
+
+
+def load_config_file(filename):
+ """
+ Loads data from a config file.
+ """
+ config = {
+ 'TREES': {},
+ }
+
+ if os.path.exists(filename):
+ try:
+ execfile(filename, config)
+ except:
+ pass
+
+ return config
+
+
+def tempt_fate(server, tool, changenum, diff_content=None,
+ parent_diff_content=None, submit_as=None, retries=3):
+ """
+ Attempts to create a review request on a Review Board server and upload
+ a diff. On success, the review request path is displayed.
+ """
+ try:
+ save_draft = False
+
+ if options.rid:
+ review_request = server.get_review_request(options.rid)
+ else:
+ review_request = server.new_review_request(changenum, submit_as)
+
+ if options.target_groups:
+ server.set_review_request_field(review_request, 'target_groups',
+ options.target_groups)
+ save_draft = True
+
+ if options.target_people:
+ server.set_review_request_field(review_request, 'target_people',
+ options.target_people)
+ save_draft = True
+
+ if options.summary:
+ server.set_review_request_field(review_request, 'summary',
+ options.summary)
+ save_draft = True
+
+ if options.branch:
+ server.set_review_request_field(review_request, 'branch',
+ options.branch)
+ save_draft = True
+
+ if options.bugs_closed:
+ server.set_review_request_field(review_request, 'bugs_closed',
+ options.bugs_closed)
+ save_draft = True
+
+ if options.description:
+ server.set_review_request_field(review_request, 'description',
+ options.description)
+ save_draft = True
+
+ if options.testing_done:
+ server.set_review_request_field(review_request, 'testing_done',
+ options.testing_done)
+ save_draft = True
+
+ if save_draft:
+ server.save_draft(review_request)
+ except APIError, e:
+ rsp, = e.args
+ if rsp['err']['code'] == 103: # Not logged in
+ retries = retries - 1
+
+ # We had an odd issue where the server ended up a couple of
+ # years in the future. Login succeeds but the cookie date was
+ # "odd" so use of the cookie appeared to fail and eventually
+ # ended up at max recursion depth :-(. Check for a maximum
+ # number of retries.
+ if retries >= 0:
+ server.login(force=True)
+ tempt_fate(server, tool, changenum, diff_content,
+ parent_diff_content, submit_as, retries=retries)
+ return
+
+ if options.rid:
+ die("Error getting review request %s: %s (code %s)" % \
+ (options.rid, rsp['err']['msg'], rsp['err']['code']))
+ else:
+ die("Error creating review request: %s (code %s)" % \
+ (rsp['err']['msg'], rsp['err']['code']))
+
+
+ if not server.info.supports_changesets or not options.change_only:
+ try:
+ server.upload_diff(review_request, diff_content,
+ parent_diff_content)
+ except APIError, e:
+ rsp, = e.args
+ print "Error uploading diff: %s (%s)" % (rsp['err']['msg'],
+ rsp['err']['code'])
+ debug(rsp)
+ die("Your review request still exists, but the diff is not " +
+ "attached.")
+
+ if options.publish:
+ server.publish(review_request)
+
+ request_url = 'r/' + str(review_request['id'])
+ review_url = urljoin(server.url, request_url)
+
+ if not review_url.startswith('http'):
+ review_url = 'http://%s' % review_url
+
+ print "Review request #%s posted." % (review_request['id'],)
+ print
+ print review_url
+
+ return review_url
+
+
+def parse_options(args):
+ parser = OptionParser(usage="%prog [-pond] [-r review_id] [changenum]",
+ version="%prog " + VERSION)
+
+ parser.add_option("-p", "--publish",
+ dest="publish", action="store_true", default=PUBLISH,
+ help="publish the review request immediately after "
+ "submitting")
+ parser.add_option("-r", "--review-request-id",
+ dest="rid", metavar="ID", default=None,
+ help="existing review request ID to update")
+ parser.add_option("-o", "--open",
+ dest="open_browser", action="store_true",
+ default=OPEN_BROWSER,
+ help="open a web browser to the review request page")
+ parser.add_option("-n", "--output-diff",
+ dest="output_diff_only", action="store_true",
+ default=False,
+ help="outputs a diff to the console and exits. "
+ "Does not post")
+ parser.add_option("--server",
+ dest="server", default=REVIEWBOARD_URL,
+ metavar="SERVER",
+ help="specify a different Review Board server "
+ "to use")
+ parser.add_option("--diff-only",
+ dest="diff_only", action="store_true", default=False,
+ help="uploads a new diff, but does not update "
+ "info from changelist")
+ parser.add_option("--target-groups",
+ dest="target_groups", default=TARGET_GROUPS,
+ help="names of the groups who will perform "
+ "the review")
+ parser.add_option("--target-people",
+ dest="target_people", default=TARGET_PEOPLE,
+ help="names of the people who will perform "
+ "the review")
+ parser.add_option("--summary",
+ dest="summary", default=None,
+ help="summary of the review ")
+ parser.add_option("--description",
+ dest="description", default=None,
+ help="description of the review ")
+ parser.add_option("--description-file",
+ dest="description_file", default=None,
+ help="text file containing a description of the review")
+ parser.add_option("--guess-summary",
+ dest="guess_summary", action="store_true",
+ default=False,
+ help="guess summary from the latest commit (git/"
+ "hgsubversion only)")
+ parser.add_option("--guess-description",
+ dest="guess_description", action="store_true",
+ default=False,
+ help="guess description based on commits on this branch "
+ "(git/hgsubversion only)")
+ parser.add_option("--testing-done",
+ dest="testing_done", default=None,
+ help="details of testing done ")
+ parser.add_option("--testing-done-file",
+ dest="testing_file", default=None,
+ help="text file containing details of testing done ")
+ parser.add_option("--branch",
+ dest="branch", default=None,
+ help="affected branch ")
+ parser.add_option("--bugs-closed",
+ dest="bugs_closed", default=None,
+ help="list of bugs closed ")
+ parser.add_option("--revision-range",
+ dest="revision_range", default=None,
+ help="generate the diff for review based on given "
+ "revision range")
+ parser.add_option("--label",
+ dest="label", default=None,
+ help="label (ClearCase Only) ")
+ parser.add_option("--submit-as",
+ dest="submit_as", default=SUBMIT_AS, metavar="USERNAME",
+ help="user name to be recorded as the author of the "
+ "review request, instead of the logged in user")
+ parser.add_option("--username",
+ dest="username", default=None, metavar="USERNAME",
+ help="user name to be supplied to the reviewboard server")
+ parser.add_option("--password",
+ dest="password", default=None, metavar="PASSWORD",
+ help="password to be supplied to the reviewboard server")
+ parser.add_option("--change-only",
+ dest="change_only", action="store_true",
+ default=False,
+ help="updates info from changelist, but does "
+ "not upload a new diff (only available if your "
+ "repository supports changesets)")
+ parser.add_option("--parent",
+ dest="parent_branch", default=None,
+ metavar="PARENT_BRANCH",
+ help="the parent branch this diff should be against "
+ "(only available if your repository supports "
+ "parent diffs)")
+ parser.add_option("--p4-client",
+ dest="p4_client", default=None,
+ help="the Perforce client name that the review is in")
+ parser.add_option("--p4-port",
+ dest="p4_port", default=None,
+ help="the Perforce servers IP address that the review is on")
+ parser.add_option("--repository-url",
+ dest="repository_url", default=None,
+ help="the url for a repository for creating a diff "
+ "outside of a working copy (currently only supported "
+ "by Subversion). Requires --revision-range")
+ parser.add_option("-d", "--debug",
+ action="store_true", dest="debug", default=DEBUG,
+ help="display debug output")
+
+ (globals()["options"], args) = parser.parse_args(args)
+
+ if options.description and options.description_file:
+ sys.stderr.write("The --description and --description-file options "
+ "are mutually exclusive.\n")
+ sys.exit(1)
+
+ if options.description_file:
+ if os.path.exists(options.description_file):
+ fp = open(options.description_file, "r")
+ options.description = fp.read()
+ fp.close()
+ else:
+ sys.stderr.write("The description file %s does not exist.\n" %
+ options.description_file)
+ sys.exit(1)
+
+ if options.testing_done and options.testing_file:
+ sys.stderr.write("The --testing-done and --testing-done-file options "
+ "are mutually exclusive.\n")
+ sys.exit(1)
+
+ if options.testing_file:
+ if os.path.exists(options.testing_file):
+ fp = open(options.testing_file, "r")
+ options.testing_done = fp.read()
+ fp.close()
+ else:
+ sys.stderr.write("The testing file %s does not exist.\n" %
+ options.testing_file)
+ sys.exit(1)
+
+ if options.repository_url and not options.revision_range:
+ sys.stderr.write("The --repository-url option requires the "
+ "--revision-range option.\n")
+ sys.exit(1)
+
+ return args
+
+def determine_client():
+
+ repository_info = None
+ tool = None
+
+ # Try to find the SCM Client we're going to be working with.
+ for tool in SCMCLIENTS:
+ repository_info = tool.get_repository_info()
+
+ if repository_info:
+ break
+
+ if not repository_info:
+ if options.repository_url:
+ print "No supported repository could be access at the supplied url."
+ else:
+ print "The current directory does not contain a checkout from a"
+ print "supported source code repository."
+ sys.exit(1)
+
+ # Verify that options specific to an SCM Client have not been mis-used.
+ if options.change_only and not repository_info.supports_changesets:
+ sys.stderr.write("The --change-only option is not valid for the "
+ "current SCM client.\n")
+ sys.exit(1)
+
+ if options.parent_branch and not repository_info.supports_parent_diffs:
+ sys.stderr.write("The --parent option is not valid for the "
+ "current SCM client.\n")
+ sys.exit(1)
+
+ if ((options.p4_client or options.p4_port) and \
+ not isinstance(tool, PerforceClient)):
+ sys.stderr.write("The --p4-client and --p4-port options are not valid "
+ "for the current SCM client.\n")
+ sys.exit(1)
+
+ return (repository_info, tool)
+
+def main():
+ if 'USERPROFILE' in os.environ:
+ homepath = os.path.join(os.environ["USERPROFILE"], "Local Settings",
+ "Application Data")
+ elif 'HOME' in os.environ:
+ homepath = os.environ["HOME"]
+ else:
+ homepath = ''
+
+ # Load the config and cookie files
+ globals()['user_config'] = \
+ load_config_file(os.path.join(homepath, ".reviewboardrc"))
+ cookie_file = os.path.join(homepath, ".post-review-cookies.txt")
+
+ args = parse_options(sys.argv[1:])
+
+ repository_info, tool = determine_client()
+
+ # Try to find a valid Review Board server to use.
+ if options.server:
+ server_url = options.server
+ else:
+ server_url = tool.scan_for_server(repository_info)
+
+ if not server_url:
+ print "Unable to find a Review Board server for this source code tree."
+ sys.exit(1)
+
+ server = ReviewBoardServer(server_url, repository_info, cookie_file)
+
+ if repository_info.supports_changesets:
+ changenum = tool.get_changenum(args)
+ else:
+ changenum = None
+
+ if options.revision_range:
+ diff = tool.diff_between_revisions(options.revision_range, args,
+ repository_info)
+ parent_diff = None
+ elif options.label and isinstance(tool, ClearCaseClient):
+ diff, parent_diff = tool.diff_label(options.label)
+ else:
+ diff, parent_diff = tool.diff(args)
+
+ if options.output_diff_only:
+ print diff
+ sys.exit(0)
+
+ # Let's begin.
+ server.login()
+
+ review_url = tempt_fate(server, tool, changenum, diff_content=diff,
+ parent_diff_content=parent_diff,
+ submit_as=options.submit_as)
+
+ # Load the review up in the browser if requested to:
+ if options.open_browser:
+ try:
+ import webbrowser
+ if 'open_new_tab' in dir(webbrowser):
+ # open_new_tab is only in python 2.5+
+ webbrowser.open_new_tab(review_url)
+ elif 'open_new' in dir(webbrowser):
+ webbrowser.open_new(review_url)
+ else:
+ os.system( 'start %s' % review_url )
+ except:
+ print 'Error opening review URL: %s' % review_url
+
+
+if __name__ == "__main__":
+ main()
diff --git a/test/review b/test/review
new file mode 100755
index 0000000000..e1ccb9c0af
--- /dev/null
+++ b/test/review
@@ -0,0 +1,44 @@
+#!/bin/sh
+
+if [ -z $1 ] || [ "$1" = "-h" ] || [ "$1" = "--help" ] || [ "$1" = "-help" ] || [ "$1" = "-?" ]; then
+ echo "Usage: `basename $0` [rev] [args]\n"
+ echo " [rev] : either the revision number without leading 'r' (post-commit),"
+ echo " or '-loc' to create a review from current local changes (pre-commit)\n"
+ echo " [args] : optional arguments:"
+ echo " -r ID existing review request ID to update\n"
+ exit 1
+fi
+
+POSTREVIEW=`dirname $0`/postreview.py
+
+if [ "$1" = "-loc" ]; then
+ echo "creating review request from local changes..."
+ REVARG=""
+ LOG=""
+ SUMMARY="local changes"
+ REPO=""
+else
+ REV=$1
+ PREV=`expr $REV - 1`
+ if [ $? -ne 0 ]; then
+ echo "argument revision not a number: $REV"
+ exit 1
+ fi
+
+ echo "creating review request for changeset $REV..."
+
+ LOG="`svn log http://lampsvn.epfl.ch/svn-repos/scala -c $REV`"
+ if [ $? -ne 0 ]; then
+ echo "could not get svn log for revision $REV"
+ exit 1
+ fi
+
+ REVARG="--revision-range=$PREV:$REV"
+ SUMMARY="r$REV"
+ REPO="--repository-url=http://lampsvn.epfl.ch/svn-repos/scala"
+fi
+
+
+shift # remove parameter $1 (revision)
+
+python $POSTREVIEW --server="https://chara2.epfl.ch" $REVARG --summary="$SUMMARY" --description="$LOG" $REPO -o $@
diff --git a/test/simplejson/__init__.py b/test/simplejson/__init__.py
new file mode 100644
index 0000000000..d5b4d39913
--- /dev/null
+++ b/test/simplejson/__init__.py
@@ -0,0 +1,318 @@
+r"""JSON (JavaScript Object Notation) <http://json.org> is a subset of
+JavaScript syntax (ECMA-262 3rd edition) used as a lightweight data
+interchange format.
+
+:mod:`simplejson` exposes an API familiar to users of the standard library
+:mod:`marshal` and :mod:`pickle` modules. It is the externally maintained
+version of the :mod:`json` library contained in Python 2.6, but maintains
+compatibility with Python 2.4 and Python 2.5 and (currently) has
+significant performance advantages, even without using the optional C
+extension for speedups.
+
+Encoding basic Python object hierarchies::
+
+ >>> import simplejson as json
+ >>> json.dumps(['foo', {'bar': ('baz', None, 1.0, 2)}])
+ '["foo", {"bar": ["baz", null, 1.0, 2]}]'
+ >>> print json.dumps("\"foo\bar")
+ "\"foo\bar"
+ >>> print json.dumps(u'\u1234')
+ "\u1234"
+ >>> print json.dumps('\\')
+ "\\"
+ >>> print json.dumps({"c": 0, "b": 0, "a": 0}, sort_keys=True)
+ {"a": 0, "b": 0, "c": 0}
+ >>> from StringIO import StringIO
+ >>> io = StringIO()
+ >>> json.dump(['streaming API'], io)
+ >>> io.getvalue()
+ '["streaming API"]'
+
+Compact encoding::
+
+ >>> import simplejson as json
+ >>> json.dumps([1,2,3,{'4': 5, '6': 7}], separators=(',',':'))
+ '[1,2,3,{"4":5,"6":7}]'
+
+Pretty printing::
+
+ >>> import simplejson as json
+ >>> s = json.dumps({'4': 5, '6': 7}, sort_keys=True, indent=4)
+ >>> print '\n'.join([l.rstrip() for l in s.splitlines()])
+ {
+ "4": 5,
+ "6": 7
+ }
+
+Decoding JSON::
+
+ >>> import simplejson as json
+ >>> obj = [u'foo', {u'bar': [u'baz', None, 1.0, 2]}]
+ >>> json.loads('["foo", {"bar":["baz", null, 1.0, 2]}]') == obj
+ True
+ >>> json.loads('"\\"foo\\bar"') == u'"foo\x08ar'
+ True
+ >>> from StringIO import StringIO
+ >>> io = StringIO('["streaming API"]')
+ >>> json.load(io)[0] == 'streaming API'
+ True
+
+Specializing JSON object decoding::
+
+ >>> import simplejson as json
+ >>> def as_complex(dct):
+ ... if '__complex__' in dct:
+ ... return complex(dct['real'], dct['imag'])
+ ... return dct
+ ...
+ >>> json.loads('{"__complex__": true, "real": 1, "imag": 2}',
+ ... object_hook=as_complex)
+ (1+2j)
+ >>> import decimal
+ >>> json.loads('1.1', parse_float=decimal.Decimal) == decimal.Decimal('1.1')
+ True
+
+Specializing JSON object encoding::
+
+ >>> import simplejson as json
+ >>> def encode_complex(obj):
+ ... if isinstance(obj, complex):
+ ... return [obj.real, obj.imag]
+ ... raise TypeError(repr(o) + " is not JSON serializable")
+ ...
+ >>> json.dumps(2 + 1j, default=encode_complex)
+ '[2.0, 1.0]'
+ >>> json.JSONEncoder(default=encode_complex).encode(2 + 1j)
+ '[2.0, 1.0]'
+ >>> ''.join(json.JSONEncoder(default=encode_complex).iterencode(2 + 1j))
+ '[2.0, 1.0]'
+
+
+Using simplejson.tool from the shell to validate and pretty-print::
+
+ $ echo '{"json":"obj"}' | python -m simplejson.tool
+ {
+ "json": "obj"
+ }
+ $ echo '{ 1.2:3.4}' | python -m simplejson.tool
+ Expecting property name: line 1 column 2 (char 2)
+"""
+__version__ = '2.0.9'
+__all__ = [
+ 'dump', 'dumps', 'load', 'loads',
+ 'JSONDecoder', 'JSONEncoder',
+]
+
+__author__ = 'Bob Ippolito <bob@redivi.com>'
+
+from decoder import JSONDecoder
+from encoder import JSONEncoder
+
+_default_encoder = JSONEncoder(
+ skipkeys=False,
+ ensure_ascii=True,
+ check_circular=True,
+ allow_nan=True,
+ indent=None,
+ separators=None,
+ encoding='utf-8',
+ default=None,
+)
+
+def dump(obj, fp, skipkeys=False, ensure_ascii=True, check_circular=True,
+ allow_nan=True, cls=None, indent=None, separators=None,
+ encoding='utf-8', default=None, **kw):
+ """Serialize ``obj`` as a JSON formatted stream to ``fp`` (a
+ ``.write()``-supporting file-like object).
+
+ If ``skipkeys`` is true then ``dict`` keys that are not basic types
+ (``str``, ``unicode``, ``int``, ``long``, ``float``, ``bool``, ``None``)
+ will be skipped instead of raising a ``TypeError``.
+
+ If ``ensure_ascii`` is false, then the some chunks written to ``fp``
+ may be ``unicode`` instances, subject to normal Python ``str`` to
+ ``unicode`` coercion rules. Unless ``fp.write()`` explicitly
+ understands ``unicode`` (as in ``codecs.getwriter()``) this is likely
+ to cause an error.
+
+ If ``check_circular`` is false, then the circular reference check
+ for container types will be skipped and a circular reference will
+ result in an ``OverflowError`` (or worse).
+
+ If ``allow_nan`` is false, then it will be a ``ValueError`` to
+ serialize out of range ``float`` values (``nan``, ``inf``, ``-inf``)
+ in strict compliance of the JSON specification, instead of using the
+ JavaScript equivalents (``NaN``, ``Infinity``, ``-Infinity``).
+
+ If ``indent`` is a non-negative integer, then JSON array elements and object
+ members will be pretty-printed with that indent level. An indent level
+ of 0 will only insert newlines. ``None`` is the most compact representation.
+
+ If ``separators`` is an ``(item_separator, dict_separator)`` tuple
+ then it will be used instead of the default ``(', ', ': ')`` separators.
+ ``(',', ':')`` is the most compact JSON representation.
+
+ ``encoding`` is the character encoding for str instances, default is UTF-8.
+
+ ``default(obj)`` is a function that should return a serializable version
+ of obj or raise TypeError. The default simply raises TypeError.
+
+ To use a custom ``JSONEncoder`` subclass (e.g. one that overrides the
+ ``.default()`` method to serialize additional types), specify it with
+ the ``cls`` kwarg.
+
+ """
+ # cached encoder
+ if (not skipkeys and ensure_ascii and
+ check_circular and allow_nan and
+ cls is None and indent is None and separators is None and
+ encoding == 'utf-8' and default is None and not kw):
+ iterable = _default_encoder.iterencode(obj)
+ else:
+ if cls is None:
+ cls = JSONEncoder
+ iterable = cls(skipkeys=skipkeys, ensure_ascii=ensure_ascii,
+ check_circular=check_circular, allow_nan=allow_nan, indent=indent,
+ separators=separators, encoding=encoding,
+ default=default, **kw).iterencode(obj)
+ # could accelerate with writelines in some versions of Python, at
+ # a debuggability cost
+ for chunk in iterable:
+ fp.write(chunk)
+
+
+def dumps(obj, skipkeys=False, ensure_ascii=True, check_circular=True,
+ allow_nan=True, cls=None, indent=None, separators=None,
+ encoding='utf-8', default=None, **kw):
+ """Serialize ``obj`` to a JSON formatted ``str``.
+
+ If ``skipkeys`` is false then ``dict`` keys that are not basic types
+ (``str``, ``unicode``, ``int``, ``long``, ``float``, ``bool``, ``None``)
+ will be skipped instead of raising a ``TypeError``.
+
+ If ``ensure_ascii`` is false, then the return value will be a
+ ``unicode`` instance subject to normal Python ``str`` to ``unicode``
+ coercion rules instead of being escaped to an ASCII ``str``.
+
+ If ``check_circular`` is false, then the circular reference check
+ for container types will be skipped and a circular reference will
+ result in an ``OverflowError`` (or worse).
+
+ If ``allow_nan`` is false, then it will be a ``ValueError`` to
+ serialize out of range ``float`` values (``nan``, ``inf``, ``-inf``) in
+ strict compliance of the JSON specification, instead of using the
+ JavaScript equivalents (``NaN``, ``Infinity``, ``-Infinity``).
+
+ If ``indent`` is a non-negative integer, then JSON array elements and
+ object members will be pretty-printed with that indent level. An indent
+ level of 0 will only insert newlines. ``None`` is the most compact
+ representation.
+
+ If ``separators`` is an ``(item_separator, dict_separator)`` tuple
+ then it will be used instead of the default ``(', ', ': ')`` separators.
+ ``(',', ':')`` is the most compact JSON representation.
+
+ ``encoding`` is the character encoding for str instances, default is UTF-8.
+
+ ``default(obj)`` is a function that should return a serializable version
+ of obj or raise TypeError. The default simply raises TypeError.
+
+ To use a custom ``JSONEncoder`` subclass (e.g. one that overrides the
+ ``.default()`` method to serialize additional types), specify it with
+ the ``cls`` kwarg.
+
+ """
+ # cached encoder
+ if (not skipkeys and ensure_ascii and
+ check_circular and allow_nan and
+ cls is None and indent is None and separators is None and
+ encoding == 'utf-8' and default is None and not kw):
+ return _default_encoder.encode(obj)
+ if cls is None:
+ cls = JSONEncoder
+ return cls(
+ skipkeys=skipkeys, ensure_ascii=ensure_ascii,
+ check_circular=check_circular, allow_nan=allow_nan, indent=indent,
+ separators=separators, encoding=encoding, default=default,
+ **kw).encode(obj)
+
+
+_default_decoder = JSONDecoder(encoding=None, object_hook=None)
+
+
+def load(fp, encoding=None, cls=None, object_hook=None, parse_float=None,
+ parse_int=None, parse_constant=None, **kw):
+ """Deserialize ``fp`` (a ``.read()``-supporting file-like object containing
+ a JSON document) to a Python object.
+
+ If the contents of ``fp`` is encoded with an ASCII based encoding other
+ than utf-8 (e.g. latin-1), then an appropriate ``encoding`` name must
+ be specified. Encodings that are not ASCII based (such as UCS-2) are
+ not allowed, and should be wrapped with
+ ``codecs.getreader(fp)(encoding)``, or simply decoded to a ``unicode``
+ object and passed to ``loads()``
+
+ ``object_hook`` is an optional function that will be called with the
+ result of any object literal decode (a ``dict``). The return value of
+ ``object_hook`` will be used instead of the ``dict``. This feature
+ can be used to implement custom decoders (e.g. JSON-RPC class hinting).
+
+ To use a custom ``JSONDecoder`` subclass, specify it with the ``cls``
+ kwarg.
+
+ """
+ return loads(fp.read(),
+ encoding=encoding, cls=cls, object_hook=object_hook,
+ parse_float=parse_float, parse_int=parse_int,
+ parse_constant=parse_constant, **kw)
+
+
+def loads(s, encoding=None, cls=None, object_hook=None, parse_float=None,
+ parse_int=None, parse_constant=None, **kw):
+ """Deserialize ``s`` (a ``str`` or ``unicode`` instance containing a JSON
+ document) to a Python object.
+
+ If ``s`` is a ``str`` instance and is encoded with an ASCII based encoding
+ other than utf-8 (e.g. latin-1) then an appropriate ``encoding`` name
+ must be specified. Encodings that are not ASCII based (such as UCS-2)
+ are not allowed and should be decoded to ``unicode`` first.
+
+ ``object_hook`` is an optional function that will be called with the
+ result of any object literal decode (a ``dict``). The return value of
+ ``object_hook`` will be used instead of the ``dict``. This feature
+ can be used to implement custom decoders (e.g. JSON-RPC class hinting).
+
+ ``parse_float``, if specified, will be called with the string
+ of every JSON float to be decoded. By default this is equivalent to
+ float(num_str). This can be used to use another datatype or parser
+ for JSON floats (e.g. decimal.Decimal).
+
+ ``parse_int``, if specified, will be called with the string
+ of every JSON int to be decoded. By default this is equivalent to
+ int(num_str). This can be used to use another datatype or parser
+ for JSON integers (e.g. float).
+
+ ``parse_constant``, if specified, will be called with one of the
+ following strings: -Infinity, Infinity, NaN, null, true, false.
+ This can be used to raise an exception if invalid JSON numbers
+ are encountered.
+
+ To use a custom ``JSONDecoder`` subclass, specify it with the ``cls``
+ kwarg.
+
+ """
+ if (cls is None and encoding is None and object_hook is None and
+ parse_int is None and parse_float is None and
+ parse_constant is None and not kw):
+ return _default_decoder.decode(s)
+ if cls is None:
+ cls = JSONDecoder
+ if object_hook is not None:
+ kw['object_hook'] = object_hook
+ if parse_float is not None:
+ kw['parse_float'] = parse_float
+ if parse_int is not None:
+ kw['parse_int'] = parse_int
+ if parse_constant is not None:
+ kw['parse_constant'] = parse_constant
+ return cls(encoding=encoding, **kw).decode(s)
diff --git a/test/simplejson/decoder.py b/test/simplejson/decoder.py
new file mode 100644
index 0000000000..b769ea486c
--- /dev/null
+++ b/test/simplejson/decoder.py
@@ -0,0 +1,354 @@
+"""Implementation of JSONDecoder
+"""
+import re
+import sys
+import struct
+
+from simplejson.scanner import make_scanner
+try:
+ from simplejson._speedups import scanstring as c_scanstring
+except ImportError:
+ c_scanstring = None
+
+__all__ = ['JSONDecoder']
+
+FLAGS = re.VERBOSE | re.MULTILINE | re.DOTALL
+
+def _floatconstants():
+ _BYTES = '7FF80000000000007FF0000000000000'.decode('hex')
+ if sys.byteorder != 'big':
+ _BYTES = _BYTES[:8][::-1] + _BYTES[8:][::-1]
+ nan, inf = struct.unpack('dd', _BYTES)
+ return nan, inf, -inf
+
+NaN, PosInf, NegInf = _floatconstants()
+
+
+def linecol(doc, pos):
+ lineno = doc.count('\n', 0, pos) + 1
+ if lineno == 1:
+ colno = pos
+ else:
+ colno = pos - doc.rindex('\n', 0, pos)
+ return lineno, colno
+
+
+def errmsg(msg, doc, pos, end=None):
+ # Note that this function is called from _speedups
+ lineno, colno = linecol(doc, pos)
+ if end is None:
+ #fmt = '{0}: line {1} column {2} (char {3})'
+ #return fmt.format(msg, lineno, colno, pos)
+ fmt = '%s: line %d column %d (char %d)'
+ return fmt % (msg, lineno, colno, pos)
+ endlineno, endcolno = linecol(doc, end)
+ #fmt = '{0}: line {1} column {2} - line {3} column {4} (char {5} - {6})'
+ #return fmt.format(msg, lineno, colno, endlineno, endcolno, pos, end)
+ fmt = '%s: line %d column %d - line %d column %d (char %d - %d)'
+ return fmt % (msg, lineno, colno, endlineno, endcolno, pos, end)
+
+
+_CONSTANTS = {
+ '-Infinity': NegInf,
+ 'Infinity': PosInf,
+ 'NaN': NaN,
+}
+
+STRINGCHUNK = re.compile(r'(.*?)(["\\\x00-\x1f])', FLAGS)
+BACKSLASH = {
+ '"': u'"', '\\': u'\\', '/': u'/',
+ 'b': u'\b', 'f': u'\f', 'n': u'\n', 'r': u'\r', 't': u'\t',
+}
+
+DEFAULT_ENCODING = "utf-8"
+
+def py_scanstring(s, end, encoding=None, strict=True, _b=BACKSLASH, _m=STRINGCHUNK.match):
+ """Scan the string s for a JSON string. End is the index of the
+ character in s after the quote that started the JSON string.
+ Unescapes all valid JSON string escape sequences and raises ValueError
+ on attempt to decode an invalid string. If strict is False then literal
+ control characters are allowed in the string.
+
+ Returns a tuple of the decoded string and the index of the character in s
+ after the end quote."""
+ if encoding is None:
+ encoding = DEFAULT_ENCODING
+ chunks = []
+ _append = chunks.append
+ begin = end - 1
+ while 1:
+ chunk = _m(s, end)
+ if chunk is None:
+ raise ValueError(
+ errmsg("Unterminated string starting at", s, begin))
+ end = chunk.end()
+ content, terminator = chunk.groups()
+ # Content is contains zero or more unescaped string characters
+ if content:
+ if not isinstance(content, unicode):
+ content = unicode(content, encoding)
+ _append(content)
+ # Terminator is the end of string, a literal control character,
+ # or a backslash denoting that an escape sequence follows
+ if terminator == '"':
+ break
+ elif terminator != '\\':
+ if strict:
+ msg = "Invalid control character %r at" % (terminator,)
+ #msg = "Invalid control character {0!r} at".format(terminator)
+ raise ValueError(errmsg(msg, s, end))
+ else:
+ _append(terminator)
+ continue
+ try:
+ esc = s[end]
+ except IndexError:
+ raise ValueError(
+ errmsg("Unterminated string starting at", s, begin))
+ # If not a unicode escape sequence, must be in the lookup table
+ if esc != 'u':
+ try:
+ char = _b[esc]
+ except KeyError:
+ msg = "Invalid \\escape: " + repr(esc)
+ raise ValueError(errmsg(msg, s, end))
+ end += 1
+ else:
+ # Unicode escape sequence
+ esc = s[end + 1:end + 5]
+ next_end = end + 5
+ if len(esc) != 4:
+ msg = "Invalid \\uXXXX escape"
+ raise ValueError(errmsg(msg, s, end))
+ uni = int(esc, 16)
+ # Check for surrogate pair on UCS-4 systems
+ if 0xd800 <= uni <= 0xdbff and sys.maxunicode > 65535:
+ msg = "Invalid \\uXXXX\\uXXXX surrogate pair"
+ if not s[end + 5:end + 7] == '\\u':
+ raise ValueError(errmsg(msg, s, end))
+ esc2 = s[end + 7:end + 11]
+ if len(esc2) != 4:
+ raise ValueError(errmsg(msg, s, end))
+ uni2 = int(esc2, 16)
+ uni = 0x10000 + (((uni - 0xd800) << 10) | (uni2 - 0xdc00))
+ next_end += 6
+ char = unichr(uni)
+ end = next_end
+ # Append the unescaped character
+ _append(char)
+ return u''.join(chunks), end
+
+
+# Use speedup if available
+scanstring = c_scanstring or py_scanstring
+
+WHITESPACE = re.compile(r'[ \t\n\r]*', FLAGS)
+WHITESPACE_STR = ' \t\n\r'
+
+def JSONObject((s, end), encoding, strict, scan_once, object_hook, _w=WHITESPACE.match, _ws=WHITESPACE_STR):
+ pairs = {}
+ # Use a slice to prevent IndexError from being raised, the following
+ # check will raise a more specific ValueError if the string is empty
+ nextchar = s[end:end + 1]
+ # Normally we expect nextchar == '"'
+ if nextchar != '"':
+ if nextchar in _ws:
+ end = _w(s, end).end()
+ nextchar = s[end:end + 1]
+ # Trivial empty object
+ if nextchar == '}':
+ return pairs, end + 1
+ elif nextchar != '"':
+ raise ValueError(errmsg("Expecting property name", s, end))
+ end += 1
+ while True:
+ key, end = scanstring(s, end, encoding, strict)
+
+ # To skip some function call overhead we optimize the fast paths where
+ # the JSON key separator is ": " or just ":".
+ if s[end:end + 1] != ':':
+ end = _w(s, end).end()
+ if s[end:end + 1] != ':':
+ raise ValueError(errmsg("Expecting : delimiter", s, end))
+
+ end += 1
+
+ try:
+ if s[end] in _ws:
+ end += 1
+ if s[end] in _ws:
+ end = _w(s, end + 1).end()
+ except IndexError:
+ pass
+
+ try:
+ value, end = scan_once(s, end)
+ except StopIteration:
+ raise ValueError(errmsg("Expecting object", s, end))
+ pairs[key] = value
+
+ try:
+ nextchar = s[end]
+ if nextchar in _ws:
+ end = _w(s, end + 1).end()
+ nextchar = s[end]
+ except IndexError:
+ nextchar = ''
+ end += 1
+
+ if nextchar == '}':
+ break
+ elif nextchar != ',':
+ raise ValueError(errmsg("Expecting , delimiter", s, end - 1))
+
+ try:
+ nextchar = s[end]
+ if nextchar in _ws:
+ end += 1
+ nextchar = s[end]
+ if nextchar in _ws:
+ end = _w(s, end + 1).end()
+ nextchar = s[end]
+ except IndexError:
+ nextchar = ''
+
+ end += 1
+ if nextchar != '"':
+ raise ValueError(errmsg("Expecting property name", s, end - 1))
+
+ if object_hook is not None:
+ pairs = object_hook(pairs)
+ return pairs, end
+
+def JSONArray((s, end), scan_once, _w=WHITESPACE.match, _ws=WHITESPACE_STR):
+ values = []
+ nextchar = s[end:end + 1]
+ if nextchar in _ws:
+ end = _w(s, end + 1).end()
+ nextchar = s[end:end + 1]
+ # Look-ahead for trivial empty array
+ if nextchar == ']':
+ return values, end + 1
+ _append = values.append
+ while True:
+ try:
+ value, end = scan_once(s, end)
+ except StopIteration:
+ raise ValueError(errmsg("Expecting object", s, end))
+ _append(value)
+ nextchar = s[end:end + 1]
+ if nextchar in _ws:
+ end = _w(s, end + 1).end()
+ nextchar = s[end:end + 1]
+ end += 1
+ if nextchar == ']':
+ break
+ elif nextchar != ',':
+ raise ValueError(errmsg("Expecting , delimiter", s, end))
+
+ try:
+ if s[end] in _ws:
+ end += 1
+ if s[end] in _ws:
+ end = _w(s, end + 1).end()
+ except IndexError:
+ pass
+
+ return values, end
+
+class JSONDecoder(object):
+ """Simple JSON <http://json.org> decoder
+
+ Performs the following translations in decoding by default:
+
+ +---------------+-------------------+
+ | JSON | Python |
+ +===============+===================+
+ | object | dict |
+ +---------------+-------------------+
+ | array | list |
+ +---------------+-------------------+
+ | string | unicode |
+ +---------------+-------------------+
+ | number (int) | int, long |
+ +---------------+-------------------+
+ | number (real) | float |
+ +---------------+-------------------+
+ | true | True |
+ +---------------+-------------------+
+ | false | False |
+ +---------------+-------------------+
+ | null | None |
+ +---------------+-------------------+
+
+ It also understands ``NaN``, ``Infinity``, and ``-Infinity`` as
+ their corresponding ``float`` values, which is outside the JSON spec.
+
+ """
+
+ def __init__(self, encoding=None, object_hook=None, parse_float=None,
+ parse_int=None, parse_constant=None, strict=True):
+ """``encoding`` determines the encoding used to interpret any ``str``
+ objects decoded by this instance (utf-8 by default). It has no
+ effect when decoding ``unicode`` objects.
+
+ Note that currently only encodings that are a superset of ASCII work,
+ strings of other encodings should be passed in as ``unicode``.
+
+ ``object_hook``, if specified, will be called with the result
+ of every JSON object decoded and its return value will be used in
+ place of the given ``dict``. This can be used to provide custom
+ deserializations (e.g. to support JSON-RPC class hinting).
+
+ ``parse_float``, if specified, will be called with the string
+ of every JSON float to be decoded. By default this is equivalent to
+ float(num_str). This can be used to use another datatype or parser
+ for JSON floats (e.g. decimal.Decimal).
+
+ ``parse_int``, if specified, will be called with the string
+ of every JSON int to be decoded. By default this is equivalent to
+ int(num_str). This can be used to use another datatype or parser
+ for JSON integers (e.g. float).
+
+ ``parse_constant``, if specified, will be called with one of the
+ following strings: -Infinity, Infinity, NaN.
+ This can be used to raise an exception if invalid JSON numbers
+ are encountered.
+
+ """
+ self.encoding = encoding
+ self.object_hook = object_hook
+ self.parse_float = parse_float or float
+ self.parse_int = parse_int or int
+ self.parse_constant = parse_constant or _CONSTANTS.__getitem__
+ self.strict = strict
+ self.parse_object = JSONObject
+ self.parse_array = JSONArray
+ self.parse_string = scanstring
+ self.scan_once = make_scanner(self)
+
+ def decode(self, s, _w=WHITESPACE.match):
+ """Return the Python representation of ``s`` (a ``str`` or ``unicode``
+ instance containing a JSON document)
+
+ """
+ obj, end = self.raw_decode(s, idx=_w(s, 0).end())
+ end = _w(s, end).end()
+ if end != len(s):
+ raise ValueError(errmsg("Extra data", s, end, len(s)))
+ return obj
+
+ def raw_decode(self, s, idx=0):
+ """Decode a JSON document from ``s`` (a ``str`` or ``unicode`` beginning
+ with a JSON document) and return a 2-tuple of the Python
+ representation and the index in ``s`` where the document ended.
+
+ This can be used to decode a JSON document from a string that may
+ have extraneous data at the end.
+
+ """
+ try:
+ obj, end = self.scan_once(s, idx)
+ except StopIteration:
+ raise ValueError("No JSON object could be decoded")
+ return obj, end
diff --git a/test/simplejson/encoder.py b/test/simplejson/encoder.py
new file mode 100644
index 0000000000..cf58290366
--- /dev/null
+++ b/test/simplejson/encoder.py
@@ -0,0 +1,440 @@
+"""Implementation of JSONEncoder
+"""
+import re
+
+try:
+ from simplejson._speedups import encode_basestring_ascii as c_encode_basestring_ascii
+except ImportError:
+ c_encode_basestring_ascii = None
+try:
+ from simplejson._speedups import make_encoder as c_make_encoder
+except ImportError:
+ c_make_encoder = None
+
+ESCAPE = re.compile(r'[\x00-\x1f\\"\b\f\n\r\t]')
+ESCAPE_ASCII = re.compile(r'([\\"]|[^\ -~])')
+HAS_UTF8 = re.compile(r'[\x80-\xff]')
+ESCAPE_DCT = {
+ '\\': '\\\\',
+ '"': '\\"',
+ '\b': '\\b',
+ '\f': '\\f',
+ '\n': '\\n',
+ '\r': '\\r',
+ '\t': '\\t',
+}
+for i in range(0x20):
+ #ESCAPE_DCT.setdefault(chr(i), '\\u{0:04x}'.format(i))
+ ESCAPE_DCT.setdefault(chr(i), '\\u%04x' % (i,))
+
+# Assume this produces an infinity on all machines (probably not guaranteed)
+INFINITY = float('1e66666')
+FLOAT_REPR = repr
+
+def encode_basestring(s):
+ """Return a JSON representation of a Python string
+
+ """
+ def replace(match):
+ return ESCAPE_DCT[match.group(0)]
+ return '"' + ESCAPE.sub(replace, s) + '"'
+
+
+def py_encode_basestring_ascii(s):
+ """Return an ASCII-only JSON representation of a Python string
+
+ """
+ if isinstance(s, str) and HAS_UTF8.search(s) is not None:
+ s = s.decode('utf-8')
+ def replace(match):
+ s = match.group(0)
+ try:
+ return ESCAPE_DCT[s]
+ except KeyError:
+ n = ord(s)
+ if n < 0x10000:
+ #return '\\u{0:04x}'.format(n)
+ return '\\u%04x' % (n,)
+ else:
+ # surrogate pair
+ n -= 0x10000
+ s1 = 0xd800 | ((n >> 10) & 0x3ff)
+ s2 = 0xdc00 | (n & 0x3ff)
+ #return '\\u{0:04x}\\u{1:04x}'.format(s1, s2)
+ return '\\u%04x\\u%04x' % (s1, s2)
+ return '"' + str(ESCAPE_ASCII.sub(replace, s)) + '"'
+
+
+encode_basestring_ascii = c_encode_basestring_ascii or py_encode_basestring_ascii
+
+class JSONEncoder(object):
+ """Extensible JSON <http://json.org> encoder for Python data structures.
+
+ Supports the following objects and types by default:
+
+ +-------------------+---------------+
+ | Python | JSON |
+ +===================+===============+
+ | dict | object |
+ +-------------------+---------------+
+ | list, tuple | array |
+ +-------------------+---------------+
+ | str, unicode | string |
+ +-------------------+---------------+
+ | int, long, float | number |
+ +-------------------+---------------+
+ | True | true |
+ +-------------------+---------------+
+ | False | false |
+ +-------------------+---------------+
+ | None | null |
+ +-------------------+---------------+
+
+ To extend this to recognize other objects, subclass and implement a
+ ``.default()`` method with another method that returns a serializable
+ object for ``o`` if possible, otherwise it should call the superclass
+ implementation (to raise ``TypeError``).
+
+ """
+ item_separator = ', '
+ key_separator = ': '
+ def __init__(self, skipkeys=False, ensure_ascii=True,
+ check_circular=True, allow_nan=True, sort_keys=False,
+ indent=None, separators=None, encoding='utf-8', default=None):
+ """Constructor for JSONEncoder, with sensible defaults.
+
+ If skipkeys is false, then it is a TypeError to attempt
+ encoding of keys that are not str, int, long, float or None. If
+ skipkeys is True, such items are simply skipped.
+
+ If ensure_ascii is true, the output is guaranteed to be str
+ objects with all incoming unicode characters escaped. If
+ ensure_ascii is false, the output will be unicode object.
+
+ If check_circular is true, then lists, dicts, and custom encoded
+ objects will be checked for circular references during encoding to
+ prevent an infinite recursion (which would cause an OverflowError).
+ Otherwise, no such check takes place.
+
+ If allow_nan is true, then NaN, Infinity, and -Infinity will be
+ encoded as such. This behavior is not JSON specification compliant,
+ but is consistent with most JavaScript based encoders and decoders.
+ Otherwise, it will be a ValueError to encode such floats.
+
+ If sort_keys is true, then the output of dictionaries will be
+ sorted by key; this is useful for regression tests to ensure
+ that JSON serializations can be compared on a day-to-day basis.
+
+ If indent is a non-negative integer, then JSON array
+ elements and object members will be pretty-printed with that
+ indent level. An indent level of 0 will only insert newlines.
+ None is the most compact representation.
+
+ If specified, separators should be a (item_separator, key_separator)
+ tuple. The default is (', ', ': '). To get the most compact JSON
+ representation you should specify (',', ':') to eliminate whitespace.
+
+ If specified, default is a function that gets called for objects
+ that can't otherwise be serialized. It should return a JSON encodable
+ version of the object or raise a ``TypeError``.
+
+ If encoding is not None, then all input strings will be
+ transformed into unicode using that encoding prior to JSON-encoding.
+ The default is UTF-8.
+
+ """
+
+ self.skipkeys = skipkeys
+ self.ensure_ascii = ensure_ascii
+ self.check_circular = check_circular
+ self.allow_nan = allow_nan
+ self.sort_keys = sort_keys
+ self.indent = indent
+ if separators is not None:
+ self.item_separator, self.key_separator = separators
+ if default is not None:
+ self.default = default
+ self.encoding = encoding
+
+ def default(self, o):
+ """Implement this method in a subclass such that it returns
+ a serializable object for ``o``, or calls the base implementation
+ (to raise a ``TypeError``).
+
+ For example, to support arbitrary iterators, you could
+ implement default like this::
+
+ def default(self, o):
+ try:
+ iterable = iter(o)
+ except TypeError:
+ pass
+ else:
+ return list(iterable)
+ return JSONEncoder.default(self, o)
+
+ """
+ raise TypeError(repr(o) + " is not JSON serializable")
+
+ def encode(self, o):
+ """Return a JSON string representation of a Python data structure.
+
+ >>> JSONEncoder().encode({"foo": ["bar", "baz"]})
+ '{"foo": ["bar", "baz"]}'
+
+ """
+ # This is for extremely simple cases and benchmarks.
+ if isinstance(o, basestring):
+ if isinstance(o, str):
+ _encoding = self.encoding
+ if (_encoding is not None
+ and not (_encoding == 'utf-8')):
+ o = o.decode(_encoding)
+ if self.ensure_ascii:
+ return encode_basestring_ascii(o)
+ else:
+ return encode_basestring(o)
+ # This doesn't pass the iterator directly to ''.join() because the
+ # exceptions aren't as detailed. The list call should be roughly
+ # equivalent to the PySequence_Fast that ''.join() would do.
+ chunks = self.iterencode(o, _one_shot=True)
+ if not isinstance(chunks, (list, tuple)):
+ chunks = list(chunks)
+ return ''.join(chunks)
+
+ def iterencode(self, o, _one_shot=False):
+ """Encode the given object and yield each string
+ representation as available.
+
+ For example::
+
+ for chunk in JSONEncoder().iterencode(bigobject):
+ mysocket.write(chunk)
+
+ """
+ if self.check_circular:
+ markers = {}
+ else:
+ markers = None
+ if self.ensure_ascii:
+ _encoder = encode_basestring_ascii
+ else:
+ _encoder = encode_basestring
+ if self.encoding != 'utf-8':
+ def _encoder(o, _orig_encoder=_encoder, _encoding=self.encoding):
+ if isinstance(o, str):
+ o = o.decode(_encoding)
+ return _orig_encoder(o)
+
+ def floatstr(o, allow_nan=self.allow_nan, _repr=FLOAT_REPR, _inf=INFINITY, _neginf=-INFINITY):
+ # Check for specials. Note that this type of test is processor- and/or
+ # platform-specific, so do tests which don't depend on the internals.
+
+ if o != o:
+ text = 'NaN'
+ elif o == _inf:
+ text = 'Infinity'
+ elif o == _neginf:
+ text = '-Infinity'
+ else:
+ return _repr(o)
+
+ if not allow_nan:
+ raise ValueError(
+ "Out of range float values are not JSON compliant: " +
+ repr(o))
+
+ return text
+
+
+ if _one_shot and c_make_encoder is not None and not self.indent and not self.sort_keys:
+ _iterencode = c_make_encoder(
+ markers, self.default, _encoder, self.indent,
+ self.key_separator, self.item_separator, self.sort_keys,
+ self.skipkeys, self.allow_nan)
+ else:
+ _iterencode = _make_iterencode(
+ markers, self.default, _encoder, self.indent, floatstr,
+ self.key_separator, self.item_separator, self.sort_keys,
+ self.skipkeys, _one_shot)
+ return _iterencode(o, 0)
+
+def _make_iterencode(markers, _default, _encoder, _indent, _floatstr, _key_separator, _item_separator, _sort_keys, _skipkeys, _one_shot,
+ ## HACK: hand-optimized bytecode; turn globals into locals
+ False=False,
+ True=True,
+ ValueError=ValueError,
+ basestring=basestring,
+ dict=dict,
+ float=float,
+ id=id,
+ int=int,
+ isinstance=isinstance,
+ list=list,
+ long=long,
+ str=str,
+ tuple=tuple,
+ ):
+
+ def _iterencode_list(lst, _current_indent_level):
+ if not lst:
+ yield '[]'
+ return
+ if markers is not None:
+ markerid = id(lst)
+ if markerid in markers:
+ raise ValueError("Circular reference detected")
+ markers[markerid] = lst
+ buf = '['
+ if _indent is not None:
+ _current_indent_level += 1
+ newline_indent = '\n' + (' ' * (_indent * _current_indent_level))
+ separator = _item_separator + newline_indent
+ buf += newline_indent
+ else:
+ newline_indent = None
+ separator = _item_separator
+ first = True
+ for value in lst:
+ if first:
+ first = False
+ else:
+ buf = separator
+ if isinstance(value, basestring):
+ yield buf + _encoder(value)
+ elif value is None:
+ yield buf + 'null'
+ elif value is True:
+ yield buf + 'true'
+ elif value is False:
+ yield buf + 'false'
+ elif isinstance(value, (int, long)):
+ yield buf + str(value)
+ elif isinstance(value, float):
+ yield buf + _floatstr(value)
+ else:
+ yield buf
+ if isinstance(value, (list, tuple)):
+ chunks = _iterencode_list(value, _current_indent_level)
+ elif isinstance(value, dict):
+ chunks = _iterencode_dict(value, _current_indent_level)
+ else:
+ chunks = _iterencode(value, _current_indent_level)
+ for chunk in chunks:
+ yield chunk
+ if newline_indent is not None:
+ _current_indent_level -= 1
+ yield '\n' + (' ' * (_indent * _current_indent_level))
+ yield ']'
+ if markers is not None:
+ del markers[markerid]
+
+ def _iterencode_dict(dct, _current_indent_level):
+ if not dct:
+ yield '{}'
+ return
+ if markers is not None:
+ markerid = id(dct)
+ if markerid in markers:
+ raise ValueError("Circular reference detected")
+ markers[markerid] = dct
+ yield '{'
+ if _indent is not None:
+ _current_indent_level += 1
+ newline_indent = '\n' + (' ' * (_indent * _current_indent_level))
+ item_separator = _item_separator + newline_indent
+ yield newline_indent
+ else:
+ newline_indent = None
+ item_separator = _item_separator
+ first = True
+ if _sort_keys:
+ items = dct.items()
+ items.sort(key=lambda kv: kv[0])
+ else:
+ items = dct.iteritems()
+ for key, value in items:
+ if isinstance(key, basestring):
+ pass
+ # JavaScript is weakly typed for these, so it makes sense to
+ # also allow them. Many encoders seem to do something like this.
+ elif isinstance(key, float):
+ key = _floatstr(key)
+ elif key is True:
+ key = 'true'
+ elif key is False:
+ key = 'false'
+ elif key is None:
+ key = 'null'
+ elif isinstance(key, (int, long)):
+ key = str(key)
+ elif _skipkeys:
+ continue
+ else:
+ raise TypeError("key " + repr(key) + " is not a string")
+ if first:
+ first = False
+ else:
+ yield item_separator
+ yield _encoder(key)
+ yield _key_separator
+ if isinstance(value, basestring):
+ yield _encoder(value)
+ elif value is None:
+ yield 'null'
+ elif value is True:
+ yield 'true'
+ elif value is False:
+ yield 'false'
+ elif isinstance(value, (int, long)):
+ yield str(value)
+ elif isinstance(value, float):
+ yield _floatstr(value)
+ else:
+ if isinstance(value, (list, tuple)):
+ chunks = _iterencode_list(value, _current_indent_level)
+ elif isinstance(value, dict):
+ chunks = _iterencode_dict(value, _current_indent_level)
+ else:
+ chunks = _iterencode(value, _current_indent_level)
+ for chunk in chunks:
+ yield chunk
+ if newline_indent is not None:
+ _current_indent_level -= 1
+ yield '\n' + (' ' * (_indent * _current_indent_level))
+ yield '}'
+ if markers is not None:
+ del markers[markerid]
+
+ def _iterencode(o, _current_indent_level):
+ if isinstance(o, basestring):
+ yield _encoder(o)
+ elif o is None:
+ yield 'null'
+ elif o is True:
+ yield 'true'
+ elif o is False:
+ yield 'false'
+ elif isinstance(o, (int, long)):
+ yield str(o)
+ elif isinstance(o, float):
+ yield _floatstr(o)
+ elif isinstance(o, (list, tuple)):
+ for chunk in _iterencode_list(o, _current_indent_level):
+ yield chunk
+ elif isinstance(o, dict):
+ for chunk in _iterencode_dict(o, _current_indent_level):
+ yield chunk
+ else:
+ if markers is not None:
+ markerid = id(o)
+ if markerid in markers:
+ raise ValueError("Circular reference detected")
+ markers[markerid] = o
+ o = _default(o)
+ for chunk in _iterencode(o, _current_indent_level):
+ yield chunk
+ if markers is not None:
+ del markers[markerid]
+
+ return _iterencode
diff --git a/test/simplejson/scanner.py b/test/simplejson/scanner.py
new file mode 100644
index 0000000000..adbc6ec979
--- /dev/null
+++ b/test/simplejson/scanner.py
@@ -0,0 +1,65 @@
+"""JSON token scanner
+"""
+import re
+try:
+ from simplejson._speedups import make_scanner as c_make_scanner
+except ImportError:
+ c_make_scanner = None
+
+__all__ = ['make_scanner']
+
+NUMBER_RE = re.compile(
+ r'(-?(?:0|[1-9]\d*))(\.\d+)?([eE][-+]?\d+)?',
+ (re.VERBOSE | re.MULTILINE | re.DOTALL))
+
+def py_make_scanner(context):
+ parse_object = context.parse_object
+ parse_array = context.parse_array
+ parse_string = context.parse_string
+ match_number = NUMBER_RE.match
+ encoding = context.encoding
+ strict = context.strict
+ parse_float = context.parse_float
+ parse_int = context.parse_int
+ parse_constant = context.parse_constant
+ object_hook = context.object_hook
+
+ def _scan_once(string, idx):
+ try:
+ nextchar = string[idx]
+ except IndexError:
+ raise StopIteration
+
+ if nextchar == '"':
+ return parse_string(string, idx + 1, encoding, strict)
+ elif nextchar == '{':
+ return parse_object((string, idx + 1), encoding, strict, _scan_once, object_hook)
+ elif nextchar == '[':
+ return parse_array((string, idx + 1), _scan_once)
+ elif nextchar == 'n' and string[idx:idx + 4] == 'null':
+ return None, idx + 4
+ elif nextchar == 't' and string[idx:idx + 4] == 'true':
+ return True, idx + 4
+ elif nextchar == 'f' and string[idx:idx + 5] == 'false':
+ return False, idx + 5
+
+ m = match_number(string, idx)
+ if m is not None:
+ integer, frac, exp = m.groups()
+ if frac or exp:
+ res = parse_float(integer + (frac or '') + (exp or ''))
+ else:
+ res = parse_int(integer)
+ return res, m.end()
+ elif nextchar == 'N' and string[idx:idx + 3] == 'NaN':
+ return parse_constant('NaN'), idx + 3
+ elif nextchar == 'I' and string[idx:idx + 8] == 'Infinity':
+ return parse_constant('Infinity'), idx + 8
+ elif nextchar == '-' and string[idx:idx + 9] == '-Infinity':
+ return parse_constant('-Infinity'), idx + 9
+ else:
+ raise StopIteration
+
+ return _scan_once
+
+make_scanner = c_make_scanner or py_make_scanner
diff --git a/test/simplejson/tool.py b/test/simplejson/tool.py
new file mode 100644
index 0000000000..90443317b2
--- /dev/null
+++ b/test/simplejson/tool.py
@@ -0,0 +1,37 @@
+r"""Command-line tool to validate and pretty-print JSON
+
+Usage::
+
+ $ echo '{"json":"obj"}' | python -m simplejson.tool
+ {
+ "json": "obj"
+ }
+ $ echo '{ 1.2:3.4}' | python -m simplejson.tool
+ Expecting property name: line 1 column 2 (char 2)
+
+"""
+import sys
+import simplejson
+
+def main():
+ if len(sys.argv) == 1:
+ infile = sys.stdin
+ outfile = sys.stdout
+ elif len(sys.argv) == 2:
+ infile = open(sys.argv[1], 'rb')
+ outfile = sys.stdout
+ elif len(sys.argv) == 3:
+ infile = open(sys.argv[1], 'rb')
+ outfile = open(sys.argv[2], 'wb')
+ else:
+ raise SystemExit(sys.argv[0] + " [infile [outfile]]")
+ try:
+ obj = simplejson.load(infile)
+ except ValueError, e:
+ raise SystemExit(e)
+ simplejson.dump(obj, outfile, sort_keys=True, indent=4)
+ outfile.write('\n')
+
+
+if __name__ == '__main__':
+ main()