summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rwxr-xr-xbuild.xml812
-rw-r--r--docs/examples/plugintemplate/.classpath11
-rw-r--r--docs/examples/plugintemplate/.project18
-rw-r--r--src/build/bnd/scala-parser-combinators.bnd2
-rw-r--r--src/build/bnd/scala-xml.bnd2
-rw-r--r--src/build/maven/maven-deploy.xml191
-rw-r--r--src/build/maven/plugins/continuations-pom.xml95
-rw-r--r--src/build/maven/scala-actors-pom.xml96
-rw-r--r--src/build/maven/scala-compiler-pom.xml163
-rw-r--r--src/build/maven/scala-library-pom.xml86
-rw-r--r--src/build/maven/scala-reflect-pom.xml96
-rw-r--r--src/build/maven/scala-swing-pom.xml100
-rw-r--r--src/build/maven/scalap-pom.xml95
-rw-r--r--src/build/pack.xml263
-rw-r--r--src/compiler/scala/reflect/macros/compiler/Errors.scala21
-rw-r--r--src/compiler/scala/reflect/macros/contexts/Names.scala4
-rw-r--r--src/compiler/scala/reflect/macros/contexts/Parsers.scala2
-rw-r--r--src/compiler/scala/reflect/macros/util/Helpers.scala7
-rw-r--r--src/compiler/scala/tools/ant/Scalac.scala2
-rw-r--r--src/compiler/scala/tools/cmd/FromString.scala19
-rw-r--r--src/compiler/scala/tools/nsc/CompilationUnits.scala10
-rw-r--r--src/compiler/scala/tools/nsc/Global.scala19
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Parsers.scala248
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Scanners.scala50
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala29
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala3
-rw-r--r--src/compiler/scala/tools/nsc/settings/ScalaSettings.scala2
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala852
-rw-r--r--src/compiler/scala/tools/nsc/transform/Delambdafy.scala449
-rw-r--r--src/compiler/scala/tools/nsc/transform/Erasure.scala187
-rw-r--r--src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala2
-rw-r--r--src/compiler/scala/tools/nsc/transform/PostErasure.scala2
-rw-r--r--src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala8
-rw-r--r--src/compiler/scala/tools/nsc/transform/TypeAdaptingTransformer.scala187
-rw-r--r--src/compiler/scala/tools/nsc/transform/UnCurry.scala127
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/Logic.scala15
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/Solving.scala14
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Duplicators.scala13
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Implicits.scala35
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Macros.scala2
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Namers.scala5
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala2
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala2
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/RefChecks.scala6
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala8
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Typers.scala15
-rw-r--r--src/compiler/scala/tools/nsc/util/FreshNameCreator.scala40
-rw-r--r--src/compiler/scala/tools/reflect/ToolBoxFactory.scala5
-rw-r--r--src/compiler/scala/tools/reflect/quasiquotes/Holes.scala2
-rw-r--r--src/compiler/scala/tools/reflect/quasiquotes/Parsers.scala32
-rw-r--r--src/compiler/scala/tools/reflect/quasiquotes/Placeholders.scala21
-rw-r--r--src/compiler/scala/tools/reflect/quasiquotes/Quasiquotes.scala16
-rw-r--r--src/compiler/scala/tools/reflect/quasiquotes/Reifiers.scala113
-rw-r--r--src/eclipse/README.md11
-rw-r--r--src/eclipse/partest/.classpath2
-rw-r--r--src/eclipse/scaladoc/.classpath6
-rw-r--r--src/library/scala/collection/IterableLike.scala4
-rw-r--r--src/library/scala/collection/SeqLike.scala6
-rw-r--r--src/library/scala/collection/TraversableLike.scala4
-rw-r--r--src/library/scala/collection/TraversableOnce.scala4
-rw-r--r--src/library/scala/collection/convert/Wrappers.scala9
-rw-r--r--src/library/scala/collection/generic/Growable.scala2
-rw-r--r--src/library/scala/collection/generic/Shrinkable.scala2
-rwxr-xr-xsrc/library/scala/collection/immutable/DefaultMap.scala2
-rw-r--r--src/library/scala/collection/mutable/ArrayStack.scala2
-rw-r--r--src/library/scala/collection/mutable/Stack.scala2
-rw-r--r--src/library/scala/collection/parallel/ParIterableLike.scala4
-rw-r--r--src/library/scala/collection/parallel/Tasks.scala95
-rw-r--r--src/library/scala/collection/parallel/package.scala2
-rw-r--r--src/library/scala/concurrent/ExecutionContext.scala2
-rw-r--r--src/reflect/scala/reflect/api/BuildUtils.scala35
-rw-r--r--src/reflect/scala/reflect/api/Quasiquotes.scala4
-rw-r--r--src/reflect/scala/reflect/api/Trees.scala28
-rw-r--r--src/reflect/scala/reflect/internal/AnnotationInfos.scala2
-rw-r--r--src/reflect/scala/reflect/internal/BaseTypeSeqs.scala2
-rw-r--r--src/reflect/scala/reflect/internal/BuildUtils.scala80
-rw-r--r--src/reflect/scala/reflect/internal/Constants.scala10
-rw-r--r--src/reflect/scala/reflect/internal/Definitions.scala39
-rw-r--r--src/reflect/scala/reflect/internal/Mirrors.scala48
-rw-r--r--src/reflect/scala/reflect/internal/Scopes.scala10
-rw-r--r--src/reflect/scala/reflect/internal/StdNames.scala22
-rw-r--r--src/reflect/scala/reflect/internal/SymbolTable.scala18
-rw-r--r--src/reflect/scala/reflect/internal/Symbols.scala51
-rw-r--r--src/reflect/scala/reflect/internal/TreeGen.scala6
-rw-r--r--src/reflect/scala/reflect/internal/Trees.scala195
-rw-r--r--src/reflect/scala/reflect/internal/Types.scala74
-rw-r--r--src/reflect/scala/reflect/internal/Variances.scala3
-rw-r--r--src/reflect/scala/reflect/internal/pickling/Translations.scala128
-rw-r--r--src/reflect/scala/reflect/internal/pickling/UnPickler.scala514
-rw-r--r--src/reflect/scala/reflect/internal/settings/MutableSettings.scala25
-rw-r--r--src/reflect/scala/reflect/internal/tpe/GlbLubs.scala7
-rw-r--r--src/reflect/scala/reflect/internal/tpe/TypeComparers.scala64
-rw-r--r--src/reflect/scala/reflect/internal/tpe/TypeConstraints.scala43
-rw-r--r--src/reflect/scala/reflect/internal/tpe/TypeMaps.scala7
-rw-r--r--src/reflect/scala/reflect/internal/tpe/TypeToStrings.scala4
-rw-r--r--src/reflect/scala/reflect/internal/transform/Erasure.scala7
-rw-r--r--src/reflect/scala/reflect/internal/util/FreshNameCreator.scala27
-rw-r--r--src/reflect/scala/reflect/macros/ExprUtils.scala13
-rw-r--r--src/reflect/scala/reflect/macros/Parsers.scala3
-rw-r--r--src/reflect/scala/reflect/macros/TreeBuilder.scala19
-rw-r--r--src/reflect/scala/reflect/macros/Universe.scala1
-rw-r--r--src/reflect/scala/reflect/runtime/Gil.scala25
-rw-r--r--src/reflect/scala/reflect/runtime/JavaMirrors.scala94
-rw-r--r--src/reflect/scala/reflect/runtime/JavaUniverse.scala74
-rw-r--r--src/reflect/scala/reflect/runtime/JavaUniverseForce.scala496
-rw-r--r--src/reflect/scala/reflect/runtime/ReflectionUtils.scala14
-rw-r--r--src/reflect/scala/reflect/runtime/Settings.scala5
-rw-r--r--src/reflect/scala/reflect/runtime/SymbolLoaders.scala89
-rw-r--r--src/reflect/scala/reflect/runtime/SymbolTable.scala2
-rw-r--r--src/reflect/scala/reflect/runtime/SynchronizedOps.scala53
-rw-r--r--src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala129
-rw-r--r--src/reflect/scala/reflect/runtime/SynchronizedTypes.scala85
-rw-r--r--src/reflect/scala/reflect/runtime/ThreadLocalStorage.scala28
-rw-r--r--src/reflect/scala/reflect/runtime/TwoWayCaches.scala68
-rw-r--r--src/repl/scala/tools/nsc/interpreter/Phased.scala3
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/html/resource/lib/index.js71
-rw-r--r--test/disabled/presentation/doc.check (renamed from test/files/presentation/doc.check)0
-rwxr-xr-xtest/disabled/presentation/doc/doc.scala (renamed from test/files/presentation/doc/doc.scala)0
-rwxr-xr-xtest/disabled/presentation/doc/src/Class.scala (renamed from test/files/presentation/doc/src/Class.scala)0
-rwxr-xr-xtest/disabled/presentation/doc/src/p/Base.scala (renamed from test/files/presentation/doc/src/p/Base.scala)0
-rwxr-xr-xtest/disabled/presentation/doc/src/p/Derived.scala (renamed from test/files/presentation/doc/src/p/Derived.scala)0
-rw-r--r--test/disabled/presentation/ide-bug-1000450.check0
-rw-r--r--test/disabled/presentation/ide-bug-1000545.check0
-rw-r--r--test/disabled/run/reflection-sync-subtypes.scala20
-rw-r--r--test/disabled/run/t4602.scala (renamed from test/files/disabled/run/t4602.scala)0
-rw-r--r--test/files/continuations-run/z1673.check0
-rw-r--r--test/files/filters3
-rw-r--r--test/files/instrumented/inline-in-constructors.flags2
-rw-r--r--test/files/jvm/deprecation.check2
-rw-r--r--test/files/jvm/future-spec/FutureTests.scala2
-rw-r--r--test/files/jvm/future-spec/PromiseTests.scala2
-rw-r--r--test/files/jvm/future-spec/TryTests.scala2
-rw-r--r--test/files/jvm/future-spec/main.scala6
-rw-r--r--test/files/jvm/t1116.check0
-rw-r--r--test/files/jvm/t1143.check0
-rw-r--r--test/files/jvm/t1948.check0
-rw-r--r--test/files/jvm/t2104.check0
-rw-r--r--test/files/jvm/t2570.check0
-rw-r--r--test/files/jvm/t2585.check0
-rw-r--r--test/files/jvm/t680.check0
-rw-r--r--test/files/jvm/t7006.check1
-rw-r--r--test/files/neg/delambdafy_t6260_method.check13
-rw-r--r--test/files/neg/delambdafy_t6260_method.flags1
-rw-r--r--test/files/neg/delambdafy_t6260_method.scala17
-rw-r--r--test/files/neg/divergent-implicit.check16
-rw-r--r--test/files/neg/macro-basic-mamdmi.check2
-rw-r--r--test/files/neg/macro-basic-mamdmi/Impls_Macros_Test_1.scala9
-rw-r--r--test/files/neg/macro-bundle-object.check1
-rw-r--r--test/files/neg/macro-divergence-controlled/Impls_Macros_1.scala2
-rw-r--r--test/files/neg/macro-invalidimpl.check2
-rw-r--r--test/files/neg/macro-invalidimpl/Impls_1.scala3
-rw-r--r--test/files/neg/macro-invalidret.check2
-rw-r--r--test/files/neg/macro-invalidshape/Macros_Test_2.scala2
-rw-r--r--test/files/neg/macro-invalidsig-params-badtype.check1
-rw-r--r--test/files/neg/macro-invalidsig.check11
-rw-r--r--test/files/neg/macro-invalidsig/Impls_1.scala22
-rw-r--r--test/files/neg/macro-invalidusage-badbounds/Impls_1.scala2
-rw-r--r--test/files/neg/macro-invalidusage-methodvaluesyntax/Impls_1.scala3
-rw-r--r--test/files/neg/macro-invalidusage-presuper/Impls_1.scala2
-rw-r--r--test/files/neg/macro-override-method-overrides-macro/Impls_1.scala3
-rw-r--r--test/files/neg/macro-quasiquotes.check3
-rw-r--r--test/files/neg/macro-without-xmacros-a/Impls_1.scala6
-rw-r--r--test/files/neg/macro-without-xmacros-b/Impls_1.scala6
-rw-r--r--test/files/neg/quasiquotes-syntax-error-position.check4
-rw-r--r--test/files/neg/sammy_restrictions.check49
-rw-r--r--test/files/neg/sammy_restrictions.flags1
-rw-r--r--test/files/neg/sammy_restrictions.scala45
-rw-r--r--test/files/neg/t3346b.check4
-rw-r--r--test/files/neg/t3346b.scala15
-rw-r--r--test/files/neg/t3346c.check4
-rw-r--r--test/files/neg/t3346c.scala61
-rw-r--r--test/files/neg/t3346i.check7
-rw-r--r--test/files/neg/t3346i.scala30
-rw-r--r--test/files/neg/t3871.check7
-rw-r--r--test/files/neg/t3871.scala11
-rw-r--r--test/files/neg/t3871b.check97
-rw-r--r--test/files/neg/t3871b.scala127
-rw-r--r--test/files/neg/t5578.check5
-rw-r--r--test/files/neg/t5689.check1
-rw-r--r--test/files/neg/t5845.check7
-rw-r--r--test/files/neg/t6123-explaintypes-macros.check1
-rw-r--r--test/files/neg/t6231.flags1
-rw-r--r--test/files/neg/t6260.flags1
-rw-r--r--test/files/neg/t6260b.check7
-rw-r--r--test/files/neg/t6260b.scala3
-rw-r--r--test/files/neg/t6260c.check7
-rw-r--r--test/files/neg/t6260c.scala4
-rw-r--r--test/files/neg/t6385.check7
-rwxr-xr-xtest/files/neg/t6446-additional.check27
-rwxr-xr-xtest/files/neg/t6446-missing.check23
-rw-r--r--test/files/neg/t6446-show-phases.check23
-rw-r--r--test/files/neg/t6666.flags1
-rw-r--r--test/files/neg/t6666c.flags1
-rw-r--r--test/files/neg/t6680b.flags1
-rw-r--r--test/files/neg/t6680c.check6
-rw-r--r--test/files/neg/t6680c.flags1
-rw-r--r--test/files/neg/t7020.check8
-rw-r--r--test/files/neg/t7157/Impls_Macros_1.scala24
-rw-r--r--test/files/neg/t7494-no-options.check27
-rw-r--r--test/files/neg/t7519-b.check6
-rw-r--r--test/files/neg/t7519-b/Mac_1.scala14
-rw-r--r--test/files/neg/t7519-b/Use_2.scala8
-rw-r--r--test/files/neg/t7519.check8
-rw-r--r--test/files/neg/t7605-deprecation.check12
-rw-r--r--test/files/neg/t7605-deprecation.flags1
-rw-r--r--test/files/neg/t7605-deprecation.scala5
-rw-r--r--test/files/neg/t7694b.check7
-rw-r--r--test/files/neg/t7783.check18
-rw-r--r--test/files/neg/t7783.flags1
-rw-r--r--test/files/neg/t7783.scala15
-rw-r--r--test/files/neg/xmltruncated6.check2
-rw-r--r--test/files/pos/annotated-treecopy.check0
-rw-r--r--test/files/pos/annotated-treecopy/Impls_Macros_1.scala6
-rw-r--r--test/files/pos/attachments-typed-another-ident.check0
-rw-r--r--test/files/pos/attachments-typed-ident.check0
-rw-r--r--test/files/pos/delambdafy-lambdalift.scala8
-rw-r--r--test/files/pos/delambdafy-patterns.scala15
-rw-r--r--test/files/pos/macro-qmarkqmarkqmark.check0
-rw-r--r--test/files/pos/t5692a/Macros_1.scala2
-rw-r--r--test/files/pos/t5692b/Macros_1.scala2
-rw-r--r--test/files/pos/t5692c.check0
-rw-r--r--test/files/pos/t5845.scala (renamed from test/files/neg/t5845.scala)0
-rw-r--r--test/files/pos/t6260a.scala15
-rw-r--r--test/files/pos/t7461.check0
-rw-r--r--test/files/pos/t7461/Macros_1.scala2
-rw-r--r--test/files/pos/t7649.scala2
-rw-r--r--test/files/pos/t7688.scala7
-rw-r--r--test/files/pos/t7928.scala16
-rw-r--r--test/files/pos/t7944.scala24
-rw-r--r--test/files/pos/virtpatmat_anonfun_for.flags0
-rw-r--r--test/files/presentation/partial-fun/partial-fun.check1
-rw-r--r--test/files/run/dead-code-elimination.check0
-rw-r--r--test/files/run/delambdafy-nested-by-name.check2
-rw-r--r--test/files/run/delambdafy-nested-by-name.scala11
-rw-r--r--test/files/run/delambdafy-two-lambdas.check2
-rw-r--r--test/files/run/delambdafy-two-lambdas.scala12
-rw-r--r--test/files/run/delambdafy_t6028.check57
-rw-r--r--test/files/run/delambdafy_t6028.scala21
-rw-r--r--test/files/run/delambdafy_t6555.check15
-rw-r--r--test/files/run/delambdafy_t6555.scala15
-rw-r--r--test/files/run/delambdafy_uncurry_byname_inline.check21
-rw-r--r--test/files/run/delambdafy_uncurry_byname_inline.scala20
-rw-r--r--test/files/run/delambdafy_uncurry_byname_method.check15
-rw-r--r--test/files/run/delambdafy_uncurry_byname_method.scala20
-rw-r--r--test/files/run/delambdafy_uncurry_inline.check23
-rw-r--r--test/files/run/delambdafy_uncurry_inline.scala20
-rw-r--r--test/files/run/delambdafy_uncurry_method.check17
-rw-r--r--test/files/run/delambdafy_uncurry_method.scala20
-rw-r--r--test/files/run/exoticnames.check0
-rw-r--r--test/files/run/intmap.check0
-rw-r--r--test/files/run/longmap.check0
-rw-r--r--test/files/run/macro-abort-fresh.check2
-rw-r--r--test/files/run/macro-bundle-repl.check4
-rw-r--r--test/files/run/macro-bundle-repl.scala4
-rw-r--r--test/files/run/macro-bundle-static/Impls_Macros_1.scala8
-rw-r--r--test/files/run/macro-bundle-toplevel/Impls_Macros_1.scala8
-rw-r--r--test/files/run/macro-divergence-spurious/Impls_Macros_1.scala2
-rw-r--r--test/files/run/macro-enclosures/Impls_Macros_1.scala17
-rw-r--r--test/files/run/macro-expand-tparams-bounds.check0
-rw-r--r--test/files/run/macro-expand-tparams-bounds/Impls_1.scala4
-rw-r--r--test/files/run/macro-expand-tparams-prefix/Impls_1.scala15
-rw-r--r--test/files/run/macro-impl-default-params/Impls_Macros_1.scala13
-rw-r--r--test/files/run/macro-impl-rename-context/Impls_Macros_1.scala5
-rw-r--r--test/files/run/macro-impl-tparam-only-in-impl/Impls_1.scala2
-rw-r--r--test/files/run/macro-openmacros/Impls_Macros_1.scala4
-rw-r--r--test/files/run/macro-reify-nested-a.check0
-rw-r--r--test/files/run/macro-reify-nested-b.check0
-rw-r--r--test/files/run/macro-reify-splice-outside-reify/Impls_Macros_1.scala4
-rw-r--r--test/files/run/macro-settings/Impls_Macros_1.scala7
-rw-r--r--test/files/run/macro-sip19-revised/Impls_Macros_1.scala3
-rw-r--r--test/files/run/macro-sip19/Impls_Macros_1.scala3
-rw-r--r--test/files/run/macro-system-properties.check2
-rw-r--r--test/files/run/macro-system-properties.scala2
-rw-r--r--test/files/run/macro-typecheck-implicitsdisabled/Impls_Macros_1.scala6
-rw-r--r--test/files/run/macro-typecheck-macrosdisabled/Impls_Macros_1.scala4
-rw-r--r--test/files/run/macro-typecheck-macrosdisabled2/Impls_Macros_1.scala4
-rw-r--r--test/files/run/macro-undetparams-consfromsls/Impls_Macros_1.scala18
-rw-r--r--test/files/run/macro-undetparams-macroitself/Impls_Macros_1.scala5
-rw-r--r--test/files/run/origins.flags2
-rw-r--r--test/files/run/primitive-sigs-2-new.flags1
-rw-r--r--test/files/run/primitive-sigs-2-old.flags1
-rw-r--r--test/files/run/programmatic-main.check9
-rw-r--r--test/files/run/range.check0
-rw-r--r--test/files/run/reflection-fancy-java-classes.check12
-rw-r--r--test/files/run/reflection-fancy-java-classes/Foo_1.java5
-rw-r--r--test/files/run/reflection-fancy-java-classes/Test_2.scala20
-rw-r--r--test/files/run/reflection-sync-potpourri.scala32
-rw-r--r--test/files/run/reify_for1.check0
-rw-r--r--test/files/run/reify_fors_oldpatmat.flags0
-rw-r--r--test/files/run/reify_maps_oldpatmat.flags0
-rw-r--r--test/files/run/repl-term-macros.check6
-rw-r--r--test/files/run/repl-term-macros.scala6
-rw-r--r--test/files/run/static-module-method.check1
-rw-r--r--test/files/run/static-module-method.scala14
-rw-r--r--test/files/run/t0668.check0
-rw-r--r--test/files/run/t1167.flags1
-rw-r--r--test/files/run/t1829.check0
-rw-r--r--test/files/run/t2594_tcpoly.check0
-rw-r--r--test/files/run/t3346a.check1
-rw-r--r--test/files/run/t3346a.scala11
-rw-r--r--test/files/run/t3346d.scala21
-rw-r--r--test/files/run/t3346e.check12
-rw-r--r--test/files/run/t3346e.scala81
-rw-r--r--test/files/run/t3346f.check2
-rw-r--r--test/files/run/t3346f.scala15
-rw-r--r--test/files/run/t3346g.check1
-rw-r--r--test/files/run/t3346g.scala9
-rw-r--r--test/files/run/t3346h.check1
-rw-r--r--test/files/run/t3346h.scala9
-rw-r--r--test/files/run/t3346j.check1
-rw-r--r--test/files/run/t3346j.scala11
-rw-r--r--test/files/run/t3897.flags1
-rw-r--r--test/files/run/t4542.check3
-rw-r--r--test/files/run/t5229_1.check0
-rw-r--r--test/files/run/t5271_4.check0
-rw-r--r--test/files/run/t5272_1_oldpatmat.flags0
-rw-r--r--test/files/run/t5272_2_oldpatmat.flags0
-rw-r--r--test/files/run/t5273_1_oldpatmat.flags0
-rw-r--r--test/files/run/t5273_2a_oldpatmat.flags0
-rw-r--r--test/files/run/t5273_2b_oldpatmat.flags0
-rw-r--r--test/files/run/t5415.check0
-rw-r--r--test/files/run/t5418.check0
-rw-r--r--test/files/run/t5545.check0
-rw-r--r--test/files/run/t5894.scala2
-rw-r--r--test/files/run/t5923a/Macros_1.scala2
-rw-r--r--test/files/run/t5923d.check0
-rw-r--r--test/files/run/t5940.scala4
-rw-r--r--test/files/run/t5942.check0
-rw-r--r--test/files/run/t6028.scala2
-rw-r--r--test/files/run/t6102.check1
-rw-r--r--test/files/run/t6197.check0
-rw-r--r--test/files/run/t6198.check0
-rw-r--r--test/files/run/t6199-toolbox.scala2
-rw-r--r--test/files/run/t6240-universe-code-gen.scala82
-rw-r--r--test/files/run/t6240a.check1
-rw-r--r--test/files/run/t6240a/StepOne.java41
-rw-r--r--test/files/run/t6240a/StepTwo.scala7
-rw-r--r--test/files/run/t6240a/Test.scala15
-rw-r--r--test/files/run/t6240b.check1
-rw-r--r--test/files/run/t6240b/StepOne.java41
-rw-r--r--test/files/run/t6240b/StepThree.scala4
-rw-r--r--test/files/run/t6240b/StepTwo.scala10
-rw-r--r--test/files/run/t6240b/Test.scala15
-rw-r--r--test/files/run/t6260b.scala13
-rw-r--r--test/files/run/t6381.check6
-rw-r--r--test/files/run/t6381.scala6
-rw-r--r--test/files/run/t6385.scala (renamed from test/files/neg/t6385.scala)4
-rw-r--r--test/files/run/t6555.scala2
-rw-r--r--test/files/run/t7008-scala-defined/Impls_Macros_2.scala3
-rw-r--r--test/files/run/t7008/Impls_Macros_2.scala3
-rw-r--r--test/files/run/t7045.check2
-rw-r--r--test/files/run/t7045.scala12
-rw-r--r--test/files/run/t7047/Impls_Macros_1.scala2
-rw-r--r--test/files/run/t7240.check0
-rw-r--r--test/files/run/t7341.check0
-rw-r--r--test/files/run/t7375b/Macros_1.scala2
-rw-r--r--test/files/run/t7510.check0
-rw-r--r--test/files/run/t7852.check0
-rw-r--r--test/files/run/toolbox_parse_package.check8
-rw-r--r--test/files/run/toolbox_parse_package.scala9
-rw-r--r--test/files/run/typetags_without_scala_reflect_manifest_lookup.check0
-rw-r--r--test/files/scalacheck/parallel-collections/ParallelArrayCheck.scala5
-rw-r--r--test/files/scalacheck/parallel-collections/ParallelCtrieCheck.scala5
-rw-r--r--test/files/scalacheck/parallel-collections/ParallelHashMapCheck.scala5
-rw-r--r--test/files/scalacheck/parallel-collections/ParallelHashSetCheck.scala11
-rw-r--r--test/files/scalacheck/parallel-collections/ParallelHashTrieCheck.scala16
-rw-r--r--test/files/scalacheck/parallel-collections/ParallelIterableCheck.scala49
-rw-r--r--test/files/scalacheck/parallel-collections/ParallelRangeCheck.scala8
-rw-r--r--test/files/scalacheck/parallel-collections/ParallelVectorCheck.scala14
-rw-r--r--test/files/scalacheck/parallel-collections/pc.scala65
-rw-r--r--test/files/scalacheck/quasiquotes/ArbitraryTreesAndNames.scala2
-rw-r--r--test/files/scalacheck/quasiquotes/DefinitionConstructionProps.scala54
-rw-r--r--test/files/scalacheck/quasiquotes/DefinitionDeconstructionProps.scala24
-rw-r--r--test/files/scalacheck/quasiquotes/ErrorProps.scala5
-rw-r--r--test/files/scalacheck/quasiquotes/TermConstructionProps.scala19
-rw-r--r--test/files/scalacheck/quasiquotes/TermDeconstructionProps.scala29
-rw-r--r--test/files/specialized/constant_lambda.check2
-rw-r--r--test/files/specialized/constant_lambda.scala16
-rw-r--r--test/junit/scala/collection/convert/MapWrapperTest.scala49
-rw-r--r--test/junit/scala/tools/nsc/symtab/SymbolTableForUnitTesting.scala2
-rw-r--r--test/pending/run/macro-term-declared-in-anonymous-explicit-import.check0
-rw-r--r--test/pending/run/t5866b.scala17
-rw-r--r--test/pending/run/virtpatmat_anonfun_underscore.check0
-rwxr-xr-xtools/buildcp11
-rwxr-xr-xtools/diffPickled51
-rwxr-xr-xtools/epfl-build28
-rwxr-xr-xtools/epfl-publish32
-rwxr-xr-xtools/locker_scala6
-rwxr-xr-xtools/locker_scalac6
-rwxr-xr-xtools/lockercp4
-rw-r--r--tools/make-release-notes.scala129
-rwxr-xr-xtools/packcp5
-rwxr-xr-xtools/quick_scala6
-rwxr-xr-xtools/quick_scalac6
-rwxr-xr-xtools/quickcp4
-rwxr-xr-xtools/remotetest230
-rwxr-xr-xtools/showPickled32
-rwxr-xr-xtools/starr_scala6
-rwxr-xr-xtools/starr_scalac6
-rwxr-xr-xtools/starrcp5
-rwxr-xr-xtools/strapcp11
-rwxr-xr-xtools/test-renamer82
-rwxr-xr-xtools/updatescalacheck130
403 files changed, 6445 insertions, 4274 deletions
diff --git a/build.xml b/build.xml
index cb159b6c9e..bc7a3cf24b 100755
--- a/build.xml
+++ b/build.xml
@@ -4,6 +4,7 @@
xmlns:artifact="urn:maven-artifact-ant"
xmlns:rsel="antlib:org.apache.tools.ant.types.resources.selectors">
<include file="test/build-partest.xml" as="partest"/>
+ <include file="src/build/maven/maven-deploy.xml" as="maven-deploy"/>
<description>
SuperSabbus for Scala core, builds the scala library and compiler. It can also package it as a simple distribution, tests it for stable bootstrapping and against the Scala test suite.
@@ -28,7 +29,10 @@ scalacArgs examples:
"-Dscalac.args=\"-Yrangepos\" -Dpartest.scalac_opts=\"-Yrangepos\""
targets exercised:
- locker.done build-opt nightly test.suite test.continuations.suite test.scaladoc
+ distpack-maven-opt nightly locker.done build-opt test.suite test.continuations.suite test.scaladoc
+
+NOTE: after distpack-maven-opt, it is expected there's a build file in dists/maven/latest that defines targets deploy and deploy.local
+TODO: get rid of this separate step
-->
<!-- To use Zinc with the ant build:
@@ -65,20 +69,30 @@ TODO:
<target name="fastdist-opt" description="Optimized version of fastdist."> <optimized name="fastdist"/></target>
<!-- packaging -->
- <target name="distpack" depends="dist.done, docs.done">
- <ant antfile="${src.dir}/build/pack.xml" target="pack-all.done" inheritall="yes" inheritrefs="yes"/></target>
-
- <target name="distpack-maven" depends="dist.done, docs.done">
- <ant antfile="${src.dir}/build/pack.xml" target="pack-maven.done" inheritall="yes" inheritrefs="yes"/></target>
+ <target name="distpack" depends="pack-archives.done, pack-maven.done"/>
+ <target name="distpack-maven" depends="pack-maven.done"/>
<target name="distpack-opt" description="Builds an optimised distribution."> <optimized name="distpack"/></target>
<target name="distpack-maven-opt" description="Builds an optimised maven distribution."><optimized name="distpack-maven"/></target>
- <target name="all.done" depends="dist.done, test.done"/>
+ <target name="publish-core-signed-opt" description="Builds an untested optimised core (library/reflect/compiler) and publishes to maven, signed.">
+ <optimized name="publish-core-signed"/>
+ </target>
+ <target name="publish-core-signed-opt-nodocs" description="Builds an untested, undocumented optimised core (library/reflect/compiler) and publishes to maven, signed.">
+ <antcall target="publish-core-signed">
+ <param name="docs.skip" value="1"/>
+ <param name="scalac.args.optimise" value="-optimise"/>
+ </antcall>
+ </target>
+ <target name="publish-core-local-nodocs" description="Builds an untested, undocumented core (library/reflect/compiler) and locally publishes to maven">
+ <antcall target="publish-core-local">
+ <param name="docs.skip" value="1"/>
+ </antcall>
+ </target>
+
+ <target name="all.done" depends="test.done, distpack"/>
- <!-- must use depends for all.done, not antcall: need the properties defined in there (dist.dir) -->
- <target name="nightly-nopt" depends="all.done, docs.done">
- <ant antfile="${src.dir}/build/pack.xml" target="pack-all.done" inheritall="yes" inheritrefs="yes"/></target>
+ <target name="nightly-nopt" depends="all.done"/>
<target name="nightly"><optimized name="nightly-nopt"/></target>
<target name="nightly.checkall">
@@ -561,7 +575,8 @@ TODO:
<if><isset property="locker.skip"/><then>
<echo message="Using STARR to build the quick stage (skipping locker)."/>
<path id="locker.compiler.path" refid="starr.compiler.path"/>
- <!-- this is cheating, but should be close enough: -->
+ <!-- this is cheating (we don't know the classpath used to build starr)
+ but should be close enough: -->
<path id="locker.compiler.build.path" refid="starr.compiler.path"/>
<property name="locker.locked" value="locker skipped"/></then>
<else>
@@ -586,11 +601,6 @@ TODO:
<path refid="aux.libs"/>
</path>
- <path id="locker.actors.build.path">
- <path refid="locker.library.build.path"/>
- <pathelement location="${build-locker.dir}/classes/actors"/>
- </path>
-
<path id="locker.reflect.build.path">
<path refid="locker.library.build.path"/>
<pathelement location="${build-locker.dir}/classes/reflect"/>
@@ -753,18 +763,34 @@ TODO:
<path refid="asm.classpath"/>
</path>
- <!-- MISC -->
- <path id="docs.compiler.path">
+ <!-- DOCS -->
+ <path id="docs.library.build.path"> <path refid="quick.library.build.path"/> </path>
+ <path id="docs.reflect.build.path"> <path refid="quick.reflect.build.path"/> </path>
+ <path id="docs.compiler.build.path"> <path refid="quick.compiler.build.path"/> </path>
+ <path id="docs.scalap.build.path"> <path refid="quick.scalap.build.path"/> </path>
+ <path id="docs.continuations-plugin.build.path"> <path refid="quick.plugins.build.path"/> </path>
+ <path id="docs.continuations-library.build.path"> <path refid="quick.plugins.build.path"/> </path>
+ <path id="docs.actors.build.path"> <path refid="quick.actors.build.path"/> </path>
+ <path id="docs.swing.build.path"> <path refid="quick.swing.build.path"/> </path>
+
+ <!-- run-time classpath for scaladoc: should be resolved through maven once it's an actual module -->
+ <path id="scaladoc.classpath">
<path refid="external-modules-nocore"/>
<pathelement location="${build-pack.dir}/lib/scala-library.jar"/>
<pathelement location="${build-pack.dir}/lib/scala-reflect.jar"/>
<pathelement location="${build-pack.dir}/lib/scala-compiler.jar"/>
- <pathelement location="${build-pack.dir}/lib/scalap.jar"/>
- <pathelement location="${build-pack.dir}/lib/scala-actors.jar"/>
<pathelement location="${ant.jar}"/>
<path refid="aux.libs"/>
</path>
+ <path id="manual.build.path">
+ <path refid="external-modules-nocore"/> <!-- xml -->
+ <pathelement location="${build-pack.dir}/lib/scala-library.jar"/>
+ <pathelement location="${build.dir}/manmaker/classes"/>
+ <path refid="aux.libs"/> <!-- for ant -->
+ </path>
+
+ <!-- MISC -->
<path id="sbt.compile.build.path">
<path refid="quick.compiler.build.path"/>
<pathelement location="${build-quick.dir}/classes/repl"/>
@@ -773,10 +799,6 @@ TODO:
<pathelement location="${sbt.interface.jar}"/>
</path>
- <path id="manual.classpath">
- <pathelement location="${build-pack.dir}/lib/scala-library.jar"/>
- <pathelement location="${build.dir}/manmaker/classes"/>
- </path>
<!--
This is the classpath used to run partest, which is what it uses to run the compiler and find other required jars.
@@ -1180,52 +1202,57 @@ TODO:
<attribute name="dir" default="@{project}"/>
<attribute name="title"/>
<attribute name="docroot" default="NOT SET"/>
+ <attribute name="skipPackages" default=""/>
+
<element name="includes" implicit="true"/>
<sequential>
<staged-uptodate stage="docs" project="@{project}">
<check><srcfiles dir="${src.dir}/@{dir}"/></check>
<do>
- <if><not><isset property="docs.skip"/></not><then>
- <stopwatch name="docs.@{project}.timer"/>
- <mkdir dir="${build-docs.dir}/@{project}"/>
- <if><equals arg1="@{docroot}" arg2="NOT SET"/><then>
- <!-- TODO: introduce docs.@{project}.build.path for classpathref -->
- <scaladoc
- destdir="${build-docs.dir}/@{project}"
- doctitle="@{title}"
- docversion="${version.number}"
- sourcepath="${src.dir}"
- classpathref="docs.compiler.path"
- srcdir="${src.dir}/@{dir}"
- addparams="${scalac.args.all}"
- implicits="on"
- diagrams="on"
- groups="on"
- rawOutput="${scaladoc.raw.output}"
- noPrefixes="${scaladoc.no.prefixes}">
- <includes/>
- </scaladoc>
- </then><else>
- <scaladoc
- destdir="${build-docs.dir}/@{project}"
- doctitle="@{title}"
- docversion="${version.number}"
- sourcepath="${src.dir}"
- classpathref="docs.compiler.path"
- srcdir="${src.dir}/@{dir}"
- docRootContent="${src.dir}/@{project}/@{docroot}"
- addparams="${scalac.args.all}"
- implicits="on"
- diagrams="on"
- groups="on"
- rawOutput="${scaladoc.raw.output}"
- noPrefixes="${scaladoc.no.prefixes}">
- <includes/>
- </scaladoc>
- </else></if>
- <stopwatch name="docs.@{project}.timer" action="total"/>
- </then></if>
+ <stopwatch name="docs.@{project}.timer"/>
+ <mkdir dir="${build-docs.dir}/@{project}"/>
+ <if><equals arg1="@{docroot}" arg2="NOT SET"/><then>
+ <scaladoc
+ destdir="${build-docs.dir}/@{project}"
+ doctitle="@{title}"
+ docfooter="epfl"
+ docversion="${version.number}"
+ sourcepath="${src.dir}"
+ classpathref="docs.@{project}.build.path"
+ srcdir="${src.dir}/@{dir}"
+ addparams="${scalac.args.all}"
+ implicits="on"
+ diagrams="on"
+ groups="on"
+ rawOutput="${scaladoc.raw.output}"
+ noPrefixes="${scaladoc.no.prefixes}"
+ docUncompilable="${src.dir}/library-aux"
+ skipPackages="@{skipPackages}">
+ <includes/>
+ </scaladoc>
+ </then><else>
+ <scaladoc
+ destdir="${build-docs.dir}/@{project}"
+ doctitle="@{title}"
+ docfooter="epfl"
+ docversion="${version.number}"
+ sourcepath="${src.dir}"
+ classpathref="docs.@{project}.build.path"
+ srcdir="${src.dir}/@{dir}"
+ docRootContent="${src.dir}/@{project}/@{docroot}"
+ addparams="${scalac.args.all}"
+ implicits="on"
+ diagrams="on"
+ groups="on"
+ rawOutput="${scaladoc.raw.output}"
+ noPrefixes="${scaladoc.no.prefixes}"
+ docUncompilable="${src.dir}/library-aux"
+ skipPackages="@{skipPackages}">
+ <includes/>
+ </scaladoc>
+ </else></if>
+ <stopwatch name="docs.@{project}.timer" action="total"/>
</do>
</staged-uptodate>
</sequential>
@@ -1240,9 +1267,6 @@ TODO:
<target name="locker.lib" depends="locker.start" unless="locker.locked">
<staged-build with="starr" stage="locker" project="library" srcpath="${src.dir}/library" includes="lib.includes"/></target>
- <target name="locker.actors" depends="locker.lib" unless="locker.locked">
- <staged-build with="starr" stage="locker" project="actors"/> </target>
-
<target name="locker.reflect" depends="locker.lib" unless="locker.locked">
<staged-build with="starr" stage="locker" project="reflect"/></target>
@@ -1264,9 +1288,6 @@ TODO:
<target name="quick.lib" depends="quick.start">
<staged-build with="locker" stage="quick" project="library" srcpath="${src.dir}/library" includes="lib.rootdoc.includes"/></target>
- <target name="quick.actors" depends="quick.lib">
- <staged-build with="locker" stage="quick" project="actors"/> </target>
-
<target name="quick.reflect" depends="quick.lib">
<staged-build with="locker" stage="quick" project="reflect"/> </target>
@@ -1276,15 +1297,18 @@ TODO:
<target name="quick.repl" depends="quick.comp">
<staged-build with="locker" stage="quick" project="repl"/> </target>
- <target name="quick.scalap" depends="quick.repl">
- <staged-build with="locker" stage="quick" project="scalap"/> </target>
-
<target name="quick.scaladoc" depends="quick.comp">
<staged-build with="locker" stage="quick" project="scaladoc" version="scaladoc"/> </target>
<target name="quick.interactive" depends="quick.comp, quick.scaladoc">
<staged-build with="locker" stage="quick" project="interactive"/> </target>
+ <target name="quick.scalap" depends="quick.repl">
+ <staged-build with="locker" stage="quick" project="scalap"/> </target>
+
+ <target name="quick.actors" depends="quick.lib">
+ <staged-build with="locker" stage="quick" project="actors"/> </target>
+
<target name="quick.swing" depends="quick.actors, quick.lib" if="has.java6">
<staged-build with="locker" stage="quick" project="swing"/> </target>
@@ -1323,7 +1347,9 @@ TODO:
</staged-uptodate>
</target>
- <target name="quick.bin" depends="quick.lib, quick.reflect, quick.comp, quick.repl, quick.scalap, quick.interactive, quick.swing, quick.plugins, quick.scaladoc, quick.partest-extras">
+ <target name="quick.modules" depends="quick.repl, quick.scaladoc, quick.interactive, quick.scalap, quick.swing, quick.plugins"/>
+
+ <target name="quick.bin" depends="quick.lib, quick.reflect, quick.comp, quick.modules">
<staged-bin stage="quick" classpathref="quick.bin.tool.path"/>
</target>
@@ -1334,17 +1360,9 @@ TODO:
<!-- ===========================================================================
PACKED QUICK BUILD (PACK)
============================================================================ -->
- <target name="pack.lib" depends="quick.lib, quick.plugins, forkjoin.done">
- <staged-pack project="library"/></target>
+ <target name="pack.lib" depends="quick.lib, quick.plugins, forkjoin.done"> <staged-pack project="library"/></target>
- <target name="pack.actors" depends="quick.lib"> <staged-pack project="actors"/> </target>
- <target name="pack.swing" if="has.java6" depends="quick.swing"> <staged-pack project="swing"/> </target>
- <target name="pack.reflect" depends="quick.reflect"> <staged-pack project="reflect"/> </target>
- <target name="pack.partest-extras" depends="quick.partest-extras">
- <staged-pack project="partest-extras"/>
- <staged-pack project="partest-javaagent"
- manifest="${src.dir}/partest-javaagent/scala/tools/partest/javaagent/MANIFEST.MF"/>
- </target>
+ <target name="pack.reflect" depends="quick.reflect"> <staged-pack project="reflect"/> </target>
<target name="pack.comp" depends="quick.comp, quick.scaladoc, quick.interactive, quick.repl, asm.done">
<staged-pack project="compiler" manifest="${build-pack.dir}/META-INF/MANIFEST.MF">
@@ -1372,24 +1390,38 @@ TODO:
</staged-pack>
</target>
+ <target name="pack.actors" depends="quick.actors"> <staged-pack project="actors"/> </target>
+ <target name="pack.swing" if="has.java6" depends="quick.swing"> <staged-pack project="swing"/> </target>
+
<target name="pack.plugins" depends="quick.plugins"> <staged-pack project="plugins" targetdir="misc/scala-devel/plugins" targetjar="continuations.jar"/> </target>
<target name="pack.scalap" depends="quick.scalap"> <staged-pack project="scalap" targetjar="scalap.jar"/> </target>
- <target name="pack.bin" depends="pack.comp, pack.lib, pack.actors, pack.plugins, pack.reflect, pack.scalap, pack.swing, pack.partest-extras">
+ <target name="pack.core" depends="pack.reflect, pack.comp, pack.lib"/>
+
+ <target name="pack.modules" depends="pack.core, pack.actors, pack.swing, pack.plugins, pack.scalap">
<copy todir="${build-pack.dir}/lib">
<path refid="external-modules-nocore" />
<mapper type="flatten" />
</copy>
+ </target>
+
+ <target name="scaladoc.task" depends="pack.modules" unless="docs.skip">
+ <taskdef resource="scala/tools/ant/antlib.xml" classpathref="scaladoc.classpath"/>
+ </target>
+ <target name="pack.partest-extras" depends="quick.partest-extras">
+ <staged-pack project="partest-extras"/>
+ <staged-pack project="partest-javaagent"
+ manifest="${src.dir}/partest-javaagent/scala/tools/partest/javaagent/MANIFEST.MF"/>
+ </target>
+
+ <target name="pack.bin" depends="pack.core, pack.modules, pack.partest-extras">
<staged-bin stage="pack"/>
</target>
<!-- depend on quick.done so quick.bin is run when pack.done is -->
- <target name="pack.done" depends="quick.done, pack.bin">
- <!-- copy dependencies to build/pack/lib, it only takes a second so don't bother with uptodate checks -->
- <taskdef resource="scala/tools/ant/antlib.xml" classpathref="docs.compiler.path"/>
- </target>
+ <target name="pack.done" depends="quick.done, pack.bin"/>
<!-- ===========================================================================
@@ -1431,68 +1463,121 @@ TODO:
<!-- ===========================================================================
OSGi Artifacts
============================================================================ -->
- <target name="osgi.done" depends="pack.done">
+ <macrodef name="make-bundle">
+ <attribute name="name" />
+ <attribute name="bundleName" description="A value for Bundle-Name, usually a textual description"/>
+ <attribute name="jar" default="${build-pack.dir}/lib/@{name}.jar" />
+ <element name="srcs" description="Sources for this bundle" optional="true" implicit="true"/>
+ <attribute name="src" default="true"/>
+ <attribute name="version" default="${osgi.version.number}"/>
+ <attribute name="namesuffix" default=""/>
+ <attribute name="pkg" default=""/>
+
+ <sequential>
+ <copy file="${src.dir}/build/bnd/@{name}.bnd" tofile="${build-osgi.dir}/@{name}.bnd" overwrite="true">
+ <filterset>
+ <filter token="VERSION" value="${osgi.version.number}" />
+ <filter token="SCALA_BINARY_VERSION" value="${scala.binary.version}" />
+ </filterset>
+ </copy>
+ <bnd classpath="@{jar}"
+ eclipse="false"
+ failok="false"
+ exceptions="true"
+ files="${build-osgi.dir}/@{name}.bnd"
+ output="${build-osgi.dir}"/>
+ <if><equals arg1="@{src}" arg2="true"/><then>
+ <!--
+ A jar-like task that creates an OSGi source bundle. It adds the required MANIFEST.MF headers that allow
+ Eclipse to match sources with the corresponding binaries.
+ -->
+ <jar whenmanifestonly="fail" destfile="${build-osgi.dir}/@{name}-src.jar">
+ <srcs/>
+ <manifest>
+ <attribute name="Manifest-Version" value="1.0"/>
+ <attribute name="Bundle-Name" value="@{bundleName} Sources"/>
+ <attribute name="Bundle-SymbolicName" value="org.scala-lang.@{pkg}@{name}@{namesuffix}.source"/>
+ <attribute name="Bundle-Version" value="@{version}"/>
+ <attribute name="Eclipse-SourceBundle" value="org.scala-lang.@{pkg}@{name}@{namesuffix};version=&quot;@{version}&quot;;roots:=&quot;.&quot;" />
+ </manifest>
+ </jar>
+ </then></if>
+ </sequential>
+ </macrodef>
+
+
+ <target name="osgi.core" depends="pack.core">
<mkdir dir="${build-osgi.dir}"/>
- <!-- simplify fixing pom versions -->
- <macrodef name="make-bundle">
- <attribute name="name" />
- <attribute name="version" />
- <attribute name="jar" default="${build-pack.dir}/lib/@{name}.jar" />
- <sequential>
- <copy file="${src.dir}/build/bnd/@{name}.bnd" tofile="${build-osgi.dir}/@{name}.bnd" overwrite="true">
- <filterset>
- <filter token="VERSION" value="@{version}" />
- </filterset>
- </copy>
- <bnd classpath="@{jar}"
- eclipse="false"
- failok="false"
- exceptions="true"
- files="${build-osgi.dir}/@{name}.bnd"
- output="${build-osgi.dir}"/>
- </sequential>
- </macrodef>
- <macrodef name="make-plugin-bundle">
- <attribute name="name" />
- <attribute name="version" />
- <sequential>
- <copy file="${src.dir}/build/bnd/@{name}.bnd" tofile="${build-osgi.dir}/@{name}.bnd" overwrite="true">
- <filterset>
- <filter token="VERSION" value="@{version}" />
- </filterset>
- </copy>
- <bnd classpath="${build-pack.dir}/misc/scala-devel/plugins/@{name}.jar"
- eclipse="false"
- failok="false"
- exceptions="true"
- files="${build-osgi.dir}/@{name}.bnd"
- output="${build-osgi.dir}"/>
- </sequential>
- </macrodef>
+ <uptodate property="osgi.bundles.available" targetfile="${build-osgi.dir}/bundles.core.complete">
+ <srcfiles dir="${basedir}">
+ <include name="build.xml"/>
+ <include name="src/build/bnd/*.bnd"/>
+ <include name="${build-pack.dir}/lib/scala-library.jar"/>
+ <include name="${build-pack.dir}/lib/scala-reflect.jar"/>
+ <include name="${build-pack.dir}/lib/scala-compiler.jar"/>
+ </srcfiles>
+ </uptodate>
+
+ <if><not><isset property="osgi.bundles.available"/></not><then>
+ <stopwatch name="osgi.core.timer"/>
+ <make-bundle name="scala-library" bundleName="Scala Library">
+ <fileset dir="${src.dir}/library"/>
+ <fileset dir="${src.dir}/continuations/library"/>
+ </make-bundle>
+
+ <make-bundle name="scala-reflect" bundleName="Scala Reflect">
+ <fileset dir="${src.dir}/reflect"/>
+ </make-bundle>
+
+ <make-bundle name="scala-compiler" bundleName="Scala Compiler">
+ <fileset dir="${src.dir}/compiler"/>
+ <fileset dir="${src.dir}/repl"/>
+ <fileset dir="${src.dir}/scaladoc"/>
+ <fileset dir="${src.dir}/interactive"/>
+ </make-bundle>
+
+ <touch file="${build-osgi.dir}/bundles.core.complete" verbose="no"/>
+ <stopwatch name="osgi.core.timer" action="total"/>
+ </then></if>
+ </target>
- <uptodate property="osgi.bundles.available" targetfile="${build-osgi.dir}/bundles.complete">
+ <target name="osgi.done" depends="pack.done, osgi.core">
+ <uptodate property="osgi.bundles.available" targetfile="${build-osgi.dir}/bundles.all.complete">
<srcfiles dir="${basedir}">
<include name="build.xml"/>
<include name="src/build/bnd/*.bnd"/>
+ <include name="${build-pack.dir}/lib/scala-actors.jar"/>
+ <include name="${build-pack.dir}/misc/scala-devel/plugins/continuations.jar"/>
+ <include name="${build-pack.dir}/lib/scala-swing.jar"/>
+ <include name="${scala-parser-combinators}"/>
+ <include name="${scala-xml}"/>
</srcfiles>
</uptodate>
<if><not><isset property="osgi.bundles.available"/></not><then>
- <stopwatch name="osgi.bundle.timer"/>
- <make-bundle name="scala-library" version="${osgi.version.number}" />
- <make-bundle name="scala-actors" version="${osgi.version.number}" />
- <make-bundle name="scala-parser-combinators" version="${osgi.version.number}" jar="${scala-parser-combinators}"/>
- <make-bundle name="scala-reflect" version="${osgi.version.number}" />
- <make-bundle name="scala-compiler" version="${osgi.version.number}" />
- <make-plugin-bundle name="continuations" version="${osgi.version.number}" />
- <make-bundle name="scala-xml" version="${osgi.version.number}" jar="${scala-xml}"/>
- <touch file="${build-osgi.dir}/bundles.complete" verbose="no"/>
+ <stopwatch name="osgi.all.timer"/>
+
+
+ <make-bundle name="scala-actors" bundleName="Scala Actors">
+ <fileset dir="${src.dir}/actors"/>
+ </make-bundle>
+
+ <make-bundle name="continuations" pkg="plugins." jar="${build-pack.dir}/misc/scala-devel/plugins/continuations.jar" bundleName="Scala Continuations Plugin">
+ <fileset dir="${src.dir}/continuations/plugin"/>
+ </make-bundle>
<if><isset property="has.java6"/><then>
- <make-bundle name="scala-swing" version="${osgi.version.number}"/></then>
- </if>
- <stopwatch name="osgi.bundle.timer" action="total"/>
+ <make-bundle name="scala-swing" version="${osgi.version.number}" bundleName="Scala Swing">
+ <fileset dir="${src.dir}/swing"/>
+ </make-bundle>
+ </then></if>
+
+ <make-bundle name="scala-parser-combinators" pkg="modules." jar="${scala-parser-combinators}" src="false" bundleName="Scala Parser Combinators"/>
+ <make-bundle name="scala-xml" pkg="modules." jar="${scala-xml}" src="false" bundleName="Scala XML"/>
+
+ <touch file="${build-osgi.dir}/bundles.all.complete" verbose="no"/>
+ <stopwatch name="osgi.all.timer" action="total"/>
</then></if>
</target>
@@ -1537,6 +1622,7 @@ TODO:
<stopwatch name="test.osgi.timer"/>
<mkdir dir="${test.osgi.classes}"/>
+ <echo message="Running OSGi JUnit tests. Output in ${build-osgi.dir}"/>
<junit fork="yes" haltonfailure="yes">
<classpath refid="test.osgi.compiler.build.path"/>
<batchtest fork="yes" todir="${build-osgi.dir}">
@@ -1544,7 +1630,7 @@ TODO:
<include name="**/*Test.class"/>
</fileset>
</batchtest>
- <formatter type="brief" /> <!-- silenced by having it use a file; I tried for an hour to use other formatters but classpath issues drove me to this usefile="false" -->
+ <formatter type="xml" /> <!-- silenced by having it use a file; I tried for an hour to use other formatters but classpath issues drove me to this usefile="false" -->
</junit>
<stopwatch name="test.osgi.timer" action="total"/>
</target>
@@ -1746,7 +1832,7 @@ TODO:
<!-- ===========================================================================
DOCUMENTATION
============================================================================ -->
- <target name="docs.start" depends="pack.done">
+ <target name="docs.start" depends="scaladoc.task" unless="docs.skip">
<!-- Set the github commit scaladoc sources point to -->
<!-- For releases, look for the tag with the same name as the maven version -->
<condition property="scaladoc.git.commit" value="v${maven.version.number}">
@@ -1767,76 +1853,64 @@ TODO:
<property name="scaladoc.no.prefixes" value="no"/>
</target>
- <target name="docs.lib" depends="docs.start">
- <staged-uptodate stage="docs" project="library">
- <check><srcfiles dir="${src.dir}">
- <include name="library/**"/>
- <include name="swing/**"/>
- <include name="actors/**"/>
- <include name="reflect/**"/>
- <include name="continuations/library/**"/>
- </srcfiles></check>
- <do>
- <stopwatch name="docs.lib.timer"/>
- <mkdir dir="${build-docs.dir}/library"/>
- <!-- last three attributes not supported by staged-docs: -->
- <scaladoc
- destdir="${build-docs.dir}/library"
- doctitle="Scala Standard Library API (Scaladoc)"
- docversion="${version.number}"
- docsourceurl="${scaladoc.url}€{FILE_PATH}.scala#L1"
- sourcepath="${src.dir}"
- classpathref="docs.compiler.path"
- addparams="${scalac.args.all}"
- docRootContent="${src.dir}/library/rootdoc.txt"
- implicits="on"
- diagrams="on"
- groups="on"
- rawOutput="${scaladoc.raw.output}"
- noPrefixes="${scaladoc.no.prefixes}"
- docfooter="epfl"
- docUncompilable="${src.dir}/library-aux"
- skipPackages="scala.reflect.macros.internal:scala.reflect.internal:scala.reflect.io:scala.concurrent.impl">
- <src>
- <files includes="${src.dir}/actors"/>
- <files includes="${src.dir}/library"/>
- <files includes="${src.dir}/reflect"/>
- <files includes="${src.dir}/swing"/>
- <files includes="${src.dir}/continuations/library"/>
- </src>
- <include name="**/*.scala"/>
- <exclude name="reflect/Code.scala"/>
- <exclude name="reflect/Print.scala"/>
- <exclude name="reflect/Symbol.scala"/>
- <exclude name="reflect/Tree.scala"/>
- <exclude name="reflect/Type.scala"/>
- <exclude name="runtime/*$.scala"/>
- <exclude name="runtime/ScalaRunTime.scala"/>
- <exclude name="runtime/StringAdd.scala"/>
- </scaladoc>
- <stopwatch name="docs.lib.timer" action="total"/>
- </do>
- </staged-uptodate>
+ <target name="docs.lib" depends="docs.start" unless="docs.skip">
+ <staged-docs project="library" title="Scala Standard Library" docroot="rootdoc.txt"
+ skipPackages="scala.concurrent.impl">
+ <include name="**/*.scala"/>
+ <exclude name="runtime/*$.scala"/>
+ <exclude name="runtime/ScalaRunTime.scala"/>
+ <exclude name="runtime/StringAdd.scala"/>
+ </staged-docs>
+ </target>
+
+ <target name="docs.reflect" depends="docs.start" unless="docs.skip">
+ <staged-docs project="reflect" title="Scala Reflection Library"
+ skipPackages="scala.reflect.macros.internal:scala.reflect.internal:scala.reflect.io">
+ <include name="**/*.scala"/>
+ <exclude name="reflect/Code.scala"/>
+ <exclude name="reflect/Print.scala"/>
+ <exclude name="reflect/Symbol.scala"/>
+ <exclude name="reflect/Tree.scala"/>
+ <exclude name="reflect/Type.scala"/>
+ </staged-docs>
</target>
- <target name="docs.comp" depends="docs.start">
+ <target name="docs.comp" depends="docs.start" unless="docs.skip">
<staged-docs project="compiler" title="Scala Compiler" docroot="rootdoc.txt">
<include name="**/*.scala"/>
</staged-docs>
</target>
- <target name="docs.scalap" depends="docs.start">
+ <target name="docs.actors" depends="docs.start" unless="docs.skip">
+ <staged-docs project="actors" title="Scala Actors Library">
+ <include name="**/*.scala"/>
+ </staged-docs>
+ </target>
+
+ <target name="docs.swing" depends="docs.start" unless="docs.skip">
+ <staged-docs project="swing" title="Scala Swing Library">
+ <include name="**/*.scala"/>
+ </staged-docs>
+ </target>
+
+ <target name="docs.scalap" depends="docs.start" unless="docs.skip">
<staged-docs project="scalap" title="Scalap">
<include name="**/*.scala"/>
</staged-docs>
</target>
- <target name="docs.continuations-plugin" depends="docs.start">
+ <target name="docs.continuations-plugin" depends="docs.start" unless="docs.skip">
<staged-docs project="continuations-plugin" dir="continuations/plugin" title="Delimited Continuations Compiler Plugin">
<include name="**/*.scala"/>
</staged-docs>
</target>
+ <target name="docs.continuations-library" depends="docs.start" unless="docs.skip">
+ <staged-docs project="continuations-library" dir="continuations/library" title="Delimited Continuations Library">
+ <include name="**/*.scala"/>
+ </staged-docs>
+ </target>
+
<target name="docs.man" depends="docs.start">
<staged-uptodate stage="docs" project="manual">
<check><srcfiles dir="${src.dir}/manual"/></check>
@@ -1844,16 +1918,16 @@ TODO:
<mkdir dir="${build.dir}/manmaker/classes"/>
<scalac
destdir="${build.dir}/manmaker/classes"
- classpathref="docs.compiler.path"
+ classpathref="manual.build.path"
srcdir="${src.dir}/manual"
includes="**/*.scala"
- addparams="${scalac.args.all}"/>
+ addparams="${scalac.args.all} -language:implicitConversions"/>
<mkdir dir="${build-docs.dir}/manual/man/man1"/>
<mkdir dir="${build-docs.dir}/manual/html"/>
<mkdir dir="${build-docs.dir}/manual/genman/man1"/>
<taskdef name="genman"
classname="scala.tools.docutil.ManMaker"
- classpathref="manual.classpath"/>
+ classpathref="manual.build.path"/>
<genman command="fsc, scala, scalac, scaladoc, scalap"
htmlout="${build-docs.dir}/manual/html"
manout="${build-docs.dir}/manual/genman"/>
@@ -1862,7 +1936,7 @@ TODO:
srcdir="${build-docs.dir}/manual/genman"
destdir="${build-docs.dir}/manual/man"
eol="unix" includes="**/*.1"/>
- <copy todir="${build-docs.dir}/manual/html">
+ <copy todir="${build-docs.dir}/manual/html" overwrite="true">
<fileset dir="${src.dir}/manual/scala/tools/docutil/resources">
<include name="**/*.html"/>
<include name="**/*.css"/>
@@ -1874,55 +1948,60 @@ TODO:
</staged-uptodate>
</target>
- <target name="docs.done" depends="docs.comp, docs.man, docs.lib, docs.scalap, docs.continuations-plugin"/>
+ <target name="docs.core" depends="docs.lib, docs.reflect, docs.comp" unless="docs.skip"/>
+ <target name="docs.done" depends="docs.core, docs.actors, docs.swing, docs.scalap, docs.continuations-plugin, docs.continuations-library" unless="docs.skip"/>
<!-- ===========================================================================
DISTRIBUTION
============================================================================ -->
- <target name="dist.base" depends="pack.done, osgi.done">
+ <target name="dist.base" depends="osgi.done">
<property name="dist.name" value="scala-${version.number}"/>
<property name="dist.dir" value="${dists.dir}/${dist.name}"/>
<macrodef name="copy-bundle">
<attribute name="name" />
+ <attribute name="pkg" default=""/>
+ <attribute name="lib" default="lib/"/>
+ <attribute name="srcjar" default="${build-osgi.dir}/@{name}-src.jar"/>
+
<sequential>
- <copy file="${build-osgi.dir}/org.scala-lang.@{name}.jar"
- tofile="${dist.dir}/lib/@{name}.jar"/>
- </sequential>
- </macrodef>
- <macrodef name="copy-plugin-bundle">
- <attribute name="name" />
- <sequential>
- <copy file="${build-osgi.dir}/org.scala-lang.plugins.@{name}.jar"
- tofile="${dist.dir}/misc/scala-devel/plugins/@{name}.jar"
- overwrite="yes"/>
+ <copy tofile="${dist.dir}/@{lib}@{name}.jar" file="${build-osgi.dir}/org.scala-lang.@{pkg}@{name}.jar" overwrite="true"/>
+ <copy tofile="${dist.dir}/src/@{name}-src.jar" file="@{srcjar}" overwrite="true"/>
</sequential>
</macrodef>
<mkdir dir="${dist.dir}/lib"/>
- <copy toDir="${dist.dir}/lib">
- <fileset dir="${build-pack.dir}/lib">
- <include name="scalap.jar"/>
- </fileset>
- </copy>
+ <mkdir dir="${dist.dir}/misc/scala-devel/plugins"/>
+ <mkdir dir="${dist.dir}/src"/>
- <!-- TODO -->
- <copy todir="${dist.dir}/lib">
+ <copy todir="${dist.dir}/lib" overwrite="true">
<resources refid="repl.fileset"/>
<mapper classpathref="maven-ant-tasks.classpath" classname="org.apache.maven.artifact.ant.VersionMapper"
from="${repl.deps.versions}" to="flatten"/>
</copy>
- <mkdir dir="${dist.dir}/bin"/>
- <!-- TODO - Stop being inefficient and don't copy OSGi bundles overtop other jars. -->
+ <!-- copy classfile jars and source jars from osgi build to dist -->
<copy-bundle name="scala-library"/>
- <copy-bundle name="scala-xml"/>
- <copy-bundle name="scala-parser-combinators"/>
<copy-bundle name="scala-reflect"/>
+ <copy-bundle name="scala-compiler"/>
+
<copy-bundle name="scala-swing"/>
<copy-bundle name="scala-actors"/>
- <copy-bundle name="scala-compiler"/>
- <copy toDir="${dist.dir}/bin">
+
+ <copy-bundle pkg="modules." name="scala-xml" srcjar="${scala-xml-sources}"/>
+ <copy-bundle pkg="modules." name="scala-parser-combinators" srcjar="${scala-parser-combinators-sources}"/>
+ <copy-bundle pkg="plugins." name="continuations" lib="misc/scala-devel/plugins/"/>
+
+ <!-- scalap -->
+ <copy toDir="${dist.dir}/lib" overwrite="true">
+ <fileset dir="${build-pack.dir}/lib">
+ <include name="scalap.jar"/>
+ </fileset>
+ </copy>
+ <jar whenmanifestonly="fail" destfile="${dist.dir}/src/scalap-src.jar" basedir="${src.dir}/scalap"/>
+
+ <mkdir dir="${dist.dir}/bin"/>
+ <copy toDir="${dist.dir}/bin" overwrite="true">
<fileset dir="${build-pack.dir}/bin"/>
</copy>
<chmod perm="ugo+rx" file="${dist.dir}/bin/scala"/>
@@ -1930,118 +2009,251 @@ TODO:
<chmod perm="ugo+rx" file="${dist.dir}/bin/scaladoc"/>
<chmod perm="ugo+rx" file="${dist.dir}/bin/fsc"/>
<chmod perm="ugo+rx" file="${dist.dir}/bin/scalap"/>
- <mkdir dir="${dist.dir}/misc/scala-devel/plugins"/>
- <copy-plugin-bundle name="continuations"/>
</target>
<target name="dist.doc" depends="dist.base, docs.done">
<mkdir dir="${dist.dir}/doc/scala-devel-docs"/>
- <copy toDir="${dist.dir}/doc/scala-devel-docs">
+ <copy toDir="${dist.dir}/doc/scala-devel-docs" overwrite="true" flatten="true">
<file file="${scala-xml-javadoc}"/>
<file file="${scala-parser-combinators-javadoc}"/>
</copy>
- <copy file="${docs.dir}/LICENSE" toDir="${dist.dir}/doc"/>
- <copy file="${docs.dir}/README" toDir="${dist.dir}/doc"/>
+ <copy file="${docs.dir}/LICENSE" toDir="${dist.dir}/doc" overwrite="true"/>
+ <copy file="${docs.dir}/README" toDir="${dist.dir}/doc" overwrite="true"/>
<mkdir dir="${dist.dir}/doc/scala-devel-docs/api"/>
- <copy toDir="${dist.dir}/doc/scala-devel-docs/api">
+ <copy toDir="${dist.dir}/doc/scala-devel-docs/api" overwrite="true">
<fileset dir="${build-docs.dir}/library"/>
</copy>
<mkdir dir="${dist.dir}/doc/scala-devel-docs/examples"/>
- <copy toDir="${dist.dir}/doc/scala-devel-docs/examples">
+ <copy toDir="${dist.dir}/doc/scala-devel-docs/examples" overwrite="true">
<fileset dir="${docs.dir}/examples">
<exclude name="**/*.desired.sha1"/>
</fileset>
</copy>
- <mkdir dir="${dist.dir}/doc/scala-devel-docs/tools"/>
- <copy toDir="${dist.dir}/doc/scala-devel-docs/tools">
- <fileset dir="${build-docs.dir}/manual/html"/>
- </copy>
<copy file="${src.dir}/swing/doc/README"
- toFile="${dist.dir}/doc/scala-devel-docs/README.scala-swing"/>
+ toFile="${dist.dir}/doc/scala-devel-docs/README.scala-swing"
+ overwrite="true"/>
</target>
- <target name="dist.man" depends="dist.base">
+ <target name="dist.man" depends="dist.base, docs.man">
<mkdir dir="${dist.dir}/man"/>
- <copy toDir="${dist.dir}/man">
+ <copy toDir="${dist.dir}/man" overwrite="true">
<fileset dir="${build-docs.dir}/manual/man"/>
</copy>
+ <mkdir dir="${dist.dir}/doc/scala-devel-docs/tools"/>
+ <copy toDir="${dist.dir}/doc/scala-devel-docs/tools" overwrite="true">
+ <fileset dir="${build-docs.dir}/manual/html"/>
+ </copy>
</target>
- <!--
- A jar-like task that creates an OSGi source bundle. It adds the required MANIFEST.MF headers that allow
- Eclipse to match sources with the corresponding binaries.
- -->
- <macrodef name="osgi.source.bundle">
- <attribute name="destfile" description="The jar file name"/>
- <attribute name="symbolicName" description="The original bundle symbolic name (without .source at the end)"/>
- <attribute name="bundleName" description="A value for Bundle-Name, usually a textual description"/>
- <element name="file-sets" description="A sequence of fileset elements to be included in the jar" optional="true" implicit="true"/>
+
+ <target name="dist.partial" depends="dist.base">
+ <if><not><os family="windows"/></not><then>
+ <symlink link="${dists.dir}/latest" resource="${dist.name}" overwrite="true"/>
+ </then><else> <!-- XXX THIS PROBABLY DOES NOT WORK: copying must happen last during dist.done! is this guaranteed? -->
+ <copydir dest="${dists.dir}/latest" src="${dist.dir}"/>
+ </else></if>
+ </target>
+
+ <target name="dist.done" depends="dist.doc, dist.man, dist.partial"/>
+
+<!-- ===========================================================================
+MAIN DISTRIBUTION PACKAGING
+============================================================================ -->
+
+ <target name="pack-archives.done" depends="dist.done, docs.done">
+ <macrodef name="tarz">
+ <attribute name="name" description="The tar file name (without extension)."/>
+ <element name="file-sets" description="A sequence of fileset elements to be included in the tar balls." optional="false" implicit="true"/>
+
+ <sequential>
+ <tar destfile="@{name}.tar" compression="none" longfile="gnu">
+ <file-sets/>
+ </tar>
+ <gzip src="@{name}.tar" destfile="@{name}.tgz"/>
+ <if>
+ <not><equals arg1="${archives.skipxz}" arg2="true" /></not>
+ <then>
+ <exec executable="xz" failifexecutionfails="false">
+ <arg line="-k -9e -S .xz @{name}.tar"/>
+ </exec>
+ <move file="@{name}.tar.xz" tofile="@{name}.txz" failonerror="false"/>
+ </then>
+ </if>
+ <delete file="@{name}.tar" />
+ </sequential>
+ </macrodef>
+
+ <mkdir dir="${dists.dir}/archives"/>
+ <property name="archive-base" value="${dists.dir}/archives/${dist.name}"/>
+
+ <tarz name="${archive-base}">
+ <tarfileset dir="${dist.dir}" prefix="${dist.name}" includes="bin/**" mode="755"/>
+ <tarfileset dir="${dist.dir}" prefix="${dist.name}" excludes="bin/**"/>
+ </tarz>
+
+ <zip destfile="${archive-base}.zip">
+ <zipfileset prefix="${dist.name}" dir="${dist.dir}"/>
+ </zip>
+
+ <if><not><isset property="docs.skip"/></not><then>
+ <tarz name="${archive-base}-devel-docs">
+ <tarfileset dir="${dist.dir}/doc/scala-devel-docs" prefix="${dist.name}-devel-docs"/>
+ </tarz>
+ </then></if>
+
+ <tarz name="${archive-base}-sources">
+ <tarfileset dir="${basedir}" prefix="${dist.name}-sources">
+ <exclude name="bin/**"/>
+ <exclude name="build/**"/>
+ <exclude name="debian/**"/>
+ <exclude name="dists/**"/>
+ <exclude name="logs/**"/>
+ <exclude name="sandbox/**"/>
+ <exclude name="test/partest"/>
+ <exclude name=".git"/>
+ </tarfileset>
+ <tarfileset dir="${basedir}" prefix="${dist.name}-sources" filemode="755">
+ <include name="test/partest"/>
+ </tarfileset>
+ </tarz>
+
+ <!-- checksum everything -->
+ <checksum fileext=".md5">
+ <fileset dir="${dists.dir}/archives">
+ <include name="${dist.name}*"/>
+ </fileset>
+ </checksum>
+
+ <!-- UNUSED: create 'scala-latest-sources.tgz' alias (or copy, on windows)
+ we use github's source download feature
+ <if><isset property="os.win"/><then>
+ <copy tofile="${dists.dir}/archives/scala-latest-sources.tgz" overwrite="true">
+ <fileset dir="${dists.dir}/archives">
+ <include name="scala-${version.number}-sources.tgz"/>
+ </fileset>
+ </copy>
+ </then><else>
+ (be sure to use a relative symlink to make the distribution portable,
+ `resource` is relative to directory of `link`)
+ <symlink link="${dists.dir}/archives/scala-latest-sources.tgz"
+ resource="scala-${version.number}-sources.tgz"
+ overwrite="true"/>
+ </else></if> -->
+ </target>
+
+ <macrodef name="mvn-package">
+ <attribute name="dir" default=""/>
+ <attribute name="pkg" default=""/>
+ <attribute name="project"/>
+ <attribute name="name" default="scala-@{project}"/>
+ <attribute name="jarsuffix" default=""/>
<sequential>
- <jar whenmanifestonly="fail" destfile="@{destFile}">
- <file-sets/>
- <manifest>
- <attribute name="Manifest-Version" value="1.0"/>
- <attribute name="Bundle-Name" value="@{bundleName}"/>
- <attribute name="Bundle-SymbolicName" value="@{symbolicName}.source"/>
- <attribute name="Bundle-Version" value="${osgi.version.number}"/>
- <attribute name="Eclipse-SourceBundle" value="@{symbolicName};version=&quot;${osgi.version.number}&quot;;roots:=&quot;.&quot;" />
- </manifest>
- </jar>
+ <local name="artifact-base"/> <property name="artifact-base" value="${maven-base}/@{dir}@{name}/@{name}"/>
+
+ <mkdir dir="${maven-base}/@{dir}@{name}"/>
+ <copy tofile="${artifact-base}.jar" file="${build-osgi.dir}/org.scala-lang.@{pkg}@{name}@{jarsuffix}.jar" overwrite="true"/>
+ <copy tofile="${artifact-base}-src.jar" file="${build-osgi.dir}/@{name}-src.jar" overwrite="true"/>
+ <copy tofile="${artifact-base}-pom.xml" file="${src.dir}/build/maven/@{dir}/@{name}-pom.xml" overwrite="true"/>
+
+ <if><not><isset property="docs.skip"/></not><then>
+ <jar destfile="${artifact-base}-docs.jar" basedir="${build-docs.dir}/@{project}" whenmanifestonly="fail">
+ <include name="**/*"/>
+ </jar>
+ </then></if>
</sequential>
</macrodef>
- <target name="dist.src" depends="dist.base">
- <mkdir dir="${dist.dir}/src"/>
- <copy toDir="${dist.dir}/src">
- <file file="${scala-xml-sources}"/>
- <file file="${scala-parser-combinators-sources}"/>
- </copy>
+ <target name="pack-maven.core" depends="osgi.core, docs.core">
+ <property name="maven-base" value="${dists.dir}/maven/${version.number}"/>
+ <mkdir dir="${maven-base}"/>
- <osgi.source.bundle destfile="${dist.dir}/src/scala-library-src.jar"
- symbolicName="org.scala-lang.scala-library"
- bundleName="Scala Library Sources">
- <fileset dir="${src.dir}/library"/>
- <fileset dir="${src.dir}/continuations/library"/>
- </osgi.source.bundle>
- <osgi.source.bundle destfile="${dist.dir}/src/scala-actors-src.jar"
- symbolicName="org.scala-lang.scala-actors"
- bundleName="Scala Actors Sources">
- <fileset dir="${src.dir}/actors"/>
- </osgi.source.bundle>
- <osgi.source.bundle destfile="${dist.dir}/src/scala-compiler-src.jar"
- symbolicName="org.scala-lang.scala-compiler"
- bundleName="Scala Compiler Sources">
- <fileset dir="${src.dir}/compiler"/>
- <fileset dir="${src.dir}/repl"/>
- <fileset dir="${src.dir}/scaladoc"/>
- <fileset dir="${src.dir}/interactive"/>
- <fileset dir="${src.dir}/continuations/plugin"/>
- </osgi.source.bundle>
- <osgi.source.bundle destfile="${dist.dir}/src/scala-swing-src.jar"
- symbolicName="org.scala-lang.scala-swing"
- bundleName="Scala Swing Sources">
- <fileset dir="${src.dir}/swing"/>
- </osgi.source.bundle>
- <osgi.source.bundle destfile="${dist.dir}/src/scala-reflect-src.jar"
- symbolicName="org.scala-lang.scala-reflect"
- bundleName="Scala Reflect Sources">
- <fileset dir="${src.dir}/reflect"/>
- </osgi.source.bundle>
- <jar whenmanifestonly="fail" destfile="${dist.dir}/src/scalap-src.jar" basedir="${src.dir}/scalap"/>
+ <mvn-package project="library"/>
+ <mvn-package project="reflect"/>
+ <mvn-package project="compiler"/>
</target>
- <target name="dist.partial" depends="dist.base">
- <if><not><os family="windows"/></not><then>
- <symlink link="${dists.dir}/latest" resource="${dist.name}" overwrite="yes"/>
- </then><else> <!-- XXX THIS PROBABLY DOES NOT WORK: copying must happen last during dist.done! is this guaranteed? -->
- <copydir dest="${dists.dir}/latest" src="${dist.dir}"/>
+ <target name="pack-maven.base" depends="pack-maven.core, osgi.done, docs.done">
+ <mvn-package project="swing"/>
+ <mvn-package project="actors"/>
+ <mvn-package project="continuations-plugin" name="continuations" dir="plugins/" pkg="plugins."/>
+
+ <!-- don't bother fitting scalap into the mould: it will move out soon -->
+ <copy tofile="${maven-base}/scalap/scalap-pom.xml" file="${src.dir}/build/maven/scalap-pom.xml" overwrite="true"/>
+ <copy tofile="${maven-base}/scalap/scalap.jar" file="${build-pack.dir}/lib/scalap.jar" overwrite="true"/>
+ <jar destfile="${maven-base}/scalap/scalap-src.jar" basedir="${src.dir}/scalap" whenmanifestonly="fail"/>
+ <if><not><isset property="docs.skip"/></not><then>
+ <jar destfile="${maven-base}/scalap/scalap-docs.jar" basedir="${build-docs.dir}/scalap"/>
+ </then></if>
+ </target>
+
+ <target name="pack-maven.done" depends="pack-maven.base">
+ <!-- Create dists/maven/latest alias and copy maven-deploy ant build there. -->
+ <if><isset property="os.win"/><then>
+ <copy todir="${dists.dir}/maven/latest" overwrite="true">
+ <fileset dir="${maven-base}"/>
+ </copy>
+ </then><else>
+ <symlink link="${dists.dir}/maven/latest"
+ resource="${version.number}"
+ overwrite="true"/>
</else></if>
+ <!-- copy build file and its dependencies -->
+ <copy todir="${maven-base}"
+ file="${lib-ant.dir}/ant-contrib.jar" overwrite="true"/>
+ <copy todir="${maven-base}"
+ file="${lib-ant.dir}/maven-ant-tasks-2.1.1.jar" overwrite="true"/>
+ <copy tofile="${maven-base}/build.xml"
+ file="${src.dir}/build/maven/maven-deploy.xml"/>
+ <!-- export properties for use when deploying -->
+ <echoproperties destfile="${maven-base}/build.properties"/>
</target>
- <target name="dist.done" depends="dist.doc, dist.man, dist.src, dist.partial"/>
+ <!-- keep these properties out of ${maven-base}/build.properties, dumped in pack-maven.done -->
+ <target name="init.maven" depends="init">
+ <property name="remote.snapshot.repository" value="https://oss.sonatype.org/content/repositories/snapshots" />
+ <property name="remote.release.repository" value="https://oss.sonatype.org/service/local/staging/deploy/maven2" />
+ <property name="local.snapshot.repository" value="${user.home}/.m2/repository" />
+ <property name="local.release.repository" value="${user.home}/.m2/repository" />
+
+ <property name="repository.credentials.id" value="sonatype-nexus" />
+ <property name="settings.file" value="${user.home}/.m2/settings.xml" />
+
+ <if><contains string="${maven.version.number}" substring="-SNAPSHOT"/><then>
+ <property name="remote.repository" value="${remote.snapshot.repository}"/>
+ <property name="local.repository" value="${local.snapshot.repository}"/>
+ </then><else>
+ <property name="remote.repository" value="${remote.release.repository}"/>
+ <property name="local.repository" value="${local.release.repository}"/>
+ </else></if>
+ </target>
+
+
+<!-- ===========================================================================
+ MAVEN PUBLISHING
+============================================================================ -->
+ <!-- TODO: inline maven-deploy.xml here and remove it, once jenkins jobs no longer rely on it -->
+ <target name="publish" depends="pack-maven.base, init.maven" description="Publishes unsigned artifacts to the maven repo."> <deploy dir="${maven-base}/"/> </target>
+ <target name="publish.local" depends="pack-maven.base, init.maven" description="Publishes unsigned artifacts to the local maven repo."> <deploy dir="${maven-base}/" local="true"/> </target>
+ <target name="publish.signed" depends="pack-maven.base, init.maven" description="Publishes signed artifacts to the remote maven repo."> <deploy dir="${maven-base}/" signed="true"/> </target>
+
+ <target name="publish-core" depends="pack-maven.core, init.maven">
+ <deploy-one dir="${maven-base}/" name="scala-compiler" />
+ <deploy-one dir="${maven-base}/" name="scala-library" />
+ <deploy-one dir="${maven-base}/" name="scala-reflect" />
+ </target>
+ <target name="publish-core-local" depends="pack-maven.core, init.maven">
+ <deploy-one dir="${maven-base}/" name="scala-compiler" local="true"/>
+ <deploy-one dir="${maven-base}/" name="scala-library" local="true"/>
+ <deploy-one dir="${maven-base}/" name="scala-reflect" local="true"/>
+ </target>
+ <target name="publish-core-signed" depends="pack-maven.core, init.maven">
+ <deploy-one dir="${maven-base}/" name="scala-compiler" signed="true"/>
+ <deploy-one dir="${maven-base}/" name="scala-library" signed="true"/>
+ <deploy-one dir="${maven-base}/" name="scala-reflect" signed="true"/>
+ </target>
<!-- ===========================================================================
STABLE REFERENCE (STARR)
@@ -2064,7 +2276,7 @@ TODO:
</target>
<target name="starr.jars" depends="starr.start">
- <copy toDir="${lib.dir}" overwrite="yes">
+ <copy toDir="${lib.dir}" overwrite="true">
<fileset dir="${build-pack.dir}/lib">
<include name="scala-library.jar"/>
<include name="scala-reflect.jar"/>
diff --git a/docs/examples/plugintemplate/.classpath b/docs/examples/plugintemplate/.classpath
deleted file mode 100644
index e9069639ae..0000000000
--- a/docs/examples/plugintemplate/.classpath
+++ /dev/null
@@ -1,11 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<classpath>
- <classpathentry kind="src" path="src"/>
- <classpathentry kind="src" path="doc/examples"/>
- <classpathentry kind="src" path="test"/>
- <classpathentry kind="con" path="ch.epfl.lamp.sdt.launching.SCALA_CONTAINER"/>
- <classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
- <classpathentry kind="lib" path="/Applications/eclipse-3.4M6a/plugins/ch.epfl.lamp.sdt.core_2.7.1.r14724-b20080421111118/lib/scala-compiler.jar"/>
- <classpathentry kind="lib" path="lib/scalatest.jar"/>
- <classpathentry kind="output" path="build/eclipse"/>
-</classpath>
diff --git a/docs/examples/plugintemplate/.project b/docs/examples/plugintemplate/.project
deleted file mode 100644
index 075b0c580c..0000000000
--- a/docs/examples/plugintemplate/.project
+++ /dev/null
@@ -1,18 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<projectDescription>
- <name>plugintemplate</name>
- <comment></comment>
- <projects>
- </projects>
- <buildSpec>
- <buildCommand>
- <name>ch.epfl.lamp.sdt.core.scalabuilder</name>
- <arguments>
- </arguments>
- </buildCommand>
- </buildSpec>
- <natures>
- <nature>ch.epfl.lamp.sdt.core.scalanature</nature>
- <nature>org.eclipse.jdt.core.javanature</nature>
- </natures>
-</projectDescription>
diff --git a/src/build/bnd/scala-parser-combinators.bnd b/src/build/bnd/scala-parser-combinators.bnd
index d712a4ba2a..6ffc3b2760 100644
--- a/src/build/bnd/scala-parser-combinators.bnd
+++ b/src/build/bnd/scala-parser-combinators.bnd
@@ -1,5 +1,5 @@
Bundle-Name: Scala Parser Combinators Library
-Bundle-SymbolicName: org.scala-lang.scala-parser-combinators
+Bundle-SymbolicName: org.scala-lang.modules.scala-parser-combinators
ver: @VERSION@
Bundle-Version: ${ver}
Export-Package: *;version=${ver}
diff --git a/src/build/bnd/scala-xml.bnd b/src/build/bnd/scala-xml.bnd
index 6203c57dfe..5d64c05e65 100644
--- a/src/build/bnd/scala-xml.bnd
+++ b/src/build/bnd/scala-xml.bnd
@@ -1,5 +1,5 @@
Bundle-Name: Scala XML Library
-Bundle-SymbolicName: org.scala-lang.scala-xml
+Bundle-SymbolicName: org.scala-lang.modules.scala-xml
ver: @VERSION@
Bundle-Version: ${ver}
Export-Package: *;version=${ver}
diff --git a/src/build/maven/maven-deploy.xml b/src/build/maven/maven-deploy.xml
index 946b712b6c..9e4215d297 100644
--- a/src/build/maven/maven-deploy.xml
+++ b/src/build/maven/maven-deploy.xml
@@ -6,10 +6,106 @@
SuperSabbus extension for deploying a distribution to Maven. THIS FILE IS MEANT TO BE RUN STANDALONE IN THE MAVEN "distpack" DIRECTORY
</description>
- <target name="boot">
+ <macrodef name="deploy-one">
+ <attribute name="dir" default=""/>
+ <attribute name="name" />
+ <attribute name="local" default="false"/>
+ <attribute name="signed" default="false"/>
+
+ <sequential>
+ <local name="path"/> <property name="path" value="@{dir}@{name}/@{name}"/>
+
+ <echo>Deploying ${path}-[pom.xml|src.jar|docs.jar].</echo>
+
+ <copy file="${path}-pom.xml" tofile="${path}-pom-filtered.xml" overwrite="true">
+ <filterset>
+ <filter token="VERSION" value="${maven.version.number}" />
+ <filter token="SCALA_BINARY_VERSION" value="${scala.binary.version}" />
+ <filter token="XML_VERSION" value="${scala-xml.version.number}" />
+ <filter token="PARSER_COMBINATORS_VERSION" value="${scala-parser-combinators.version.number}" />
+ <filter token="RELEASE_REPOSITORY" value="${remote.release.repository}" />
+ <filter token="SNAPSHOT_REPOSITORY" value="${remote.snapshot.repository}" />
+ <filter token="JLINE_VERSION" value="${jline.version}" />
+ </filterset>
+ </copy>
+ <artifact:pom id="@{name}.pom" file="${path}-pom-filtered.xml" />
+
+ <if><equals arg1="@{signed}" arg2="false"/><then>
+ <if><equals arg1="@{local}" arg2="false"/><then>
+ <artifact:deploy file="${path}.jar" settingsFile="${settings.file}">
+ <artifact:remoteRepository url="${remote.repository}" id="${repository.credentials.id}" />
+ <artifact:pom refid="@{name}.pom" />
+ <artifact:attach type="jar" file="${path}-src.jar" classifier="sources" />
+ <artifact:attach type="jar" file="${path}-docs.jar" classifier="javadoc" />
+ </artifact:deploy>
+ </then><else>
+ <if><isset property="docs.skip"/><then>
+ <artifact:install file="${path}.jar">
+ <artifact:localRepository path="${local.repository}" id="${repository.credentials.id}" />
+ <artifact:pom refid="@{name}.pom" />
+ <artifact:attach type="jar" file="${path}-src.jar" classifier="sources" />
+ </artifact:install>
+ </then>
+ <else>
+ <artifact:install file="${path}.jar">
+ <artifact:localRepository path="${local.repository}" id="${repository.credentials.id}" />
+ <artifact:pom refid="@{name}.pom" />
+ <artifact:attach type="jar" file="${path}-src.jar" classifier="sources" />
+ <artifact:attach type="jar" file="${path}-docs.jar" classifier="javadoc" />
+ </artifact:install>
+ </else>
+ </if>
+ </else></if>
+ </then><else>
+ <local name="repo"/>
+ <if><equals arg1="@{local}" arg2="false"/><then>
+ <property name="repo" value="${remote.repository}"/>
+ </then><else>
+ <property name="repo" value="${local.repository}"/>
+ </else></if>
+ <artifact:mvn failonerror="true">
+ <arg value="org.apache.maven.plugins:maven-gpg-plugin:1.3:sign-and-deploy-file" />
+ <arg value="-Durl=${repo}" />
+ <arg value="-DrepositoryId=${repository.credentials.id}" />
+ <arg value="-DpomFile=${path}-pom-filtered.xml" />
+ <arg value= "-Dfile=${path}.jar" />
+ <arg value="-Dsources=${path}-src.jar" />
+ <arg value="-Djavadoc=${path}-docs.jar" />
+ <arg value="-Pgpg" />
+ <arg value="-Dgpg.useagent=true" />
+ </artifact:mvn>
+ </else></if>
+ </sequential>
+ </macrodef>
+
+ <macrodef name="deploy">
+ <attribute name="dir" default=""/>
+ <attribute name="local" default="false"/>
+ <attribute name="signed" default="false"/>
+
+ <sequential>
+ <deploy-one dir="@{dir}" name="scala-library" local="@{local}" signed="@{signed}"/>
+ <deploy-one dir="@{dir}" name="scala-reflect" local="@{local}" signed="@{signed}"/>
+ <deploy-one dir="@{dir}" name="scala-compiler" local="@{local}" signed="@{signed}"/>
+ <deploy-one dir="@{dir}" name="scala-actors" local="@{local}" signed="@{signed}"/>
+ <deploy-one dir="@{dir}" name="scala-swing" local="@{local}" signed="@{signed}"/>
+ <deploy-one dir="@{dir}" name="scalap" local="@{local}" signed="@{signed}"/>
+ <deploy-one dir="@{dir}plugins/" name="continuations" local="@{local}" signed="@{signed}"/>
+ </sequential>
+ </macrodef>
+
+ <target name="boot.maven">
<!-- Pull in properties from build -->
<property file="build.properties" />
+ <!-- Set up Ant contrib tasks so we can use <if><then><else> instead of the clunky `unless` attribute -->
+ <taskdef resource="net/sf/antcontrib/antlib.xml" classpath="ant-contrib.jar"/>
+ <!-- Add our maven ant tasks -->
+ <path id="maven-ant-tasks.classpath" path="maven-ant-tasks-2.1.1.jar" />
+ <typedef resource="org/apache/maven/artifact/ant/antlib.xml" uri="urn:maven-artifact-ant" classpathref="maven-ant-tasks.classpath" />
+ </target>
+
+ <target name="init.maven" depends="boot.maven">
<property name="remote.snapshot.repository" value="https://oss.sonatype.org/content/repositories/snapshots" />
<property name="remote.release.repository" value="https://oss.sonatype.org/service/local/staging/deploy/maven2" />
@@ -19,15 +115,6 @@
<property name="repository.credentials.id" value="sonatype-nexus" />
<property name="settings.file" value="${user.home}/.m2/settings.xml" />
- <!-- Set up Ant contrib tasks so we can use <if><then><else> instead of the clunky `unless` attribute -->
- <taskdef resource="net/sf/antcontrib/antlib.xml" classpath="ant-contrib.jar"/>
-
- <!-- Add our maven ant tasks -->
- <path id="maven-ant-tasks.classpath" path="maven-ant-tasks-2.1.1.jar" />
- <typedef resource="org/apache/maven/artifact/ant/antlib.xml" uri="urn:maven-artifact-ant" classpathref="maven-ant-tasks.classpath" />
- </target>
-
- <target name="init" depends="boot">
<if><contains string="${maven.version.number}" substring="-SNAPSHOT"/><then>
<property name="remote.repository" value="${remote.snapshot.repository}"/>
<property name="local.repository" value="${local.snapshot.repository}"/>
@@ -39,87 +126,9 @@
<echo>Using server[${repository.credentials.id}] for maven repository credentials.
Please make sure that your ~/.m2/settings.xml has the needed username/password for this server id
</echo>
-
- <macrodef name="deploy-one">
- <attribute name="dir" default=""/>
- <attribute name="name" />
- <attribute name="version" />
- <attribute name="local" />
- <attribute name="signed" />
-
- <sequential>
- <local name="path"/> <property name="path" value="@{dir}@{name}/@{name}"/>
-
- <echo>Deploying ${path}-[pom.xml|src.jar|docs.jar].</echo>
-
- <copy file="${path}-pom.xml" tofile="${path}-pom-filtered.xml" overwrite="true">
- <filterset>
- <filter token="VERSION" value="@{version}" />
- <filter token="SCALA_BINARY_VERSION" value="${scala.binary.version}" />
- <filter token="XML_VERSION" value="${scala-xml.version.number}" />
- <filter token="PARSER_COMBINATORS_VERSION" value="${scala-parser-combinators.version.number}" />
- <filter token="RELEASE_REPOSITORY" value="${remote.release.repository}" />
- <filter token="SNAPSHOT_REPOSITORY" value="${remote.snapshot.repository}" />
- <filter token="JLINE_VERSION" value="${jline.version}" />
- </filterset>
- </copy>
- <artifact:pom id="@{name}.pom" file="${path}-pom-filtered.xml" />
-
- <if><equals arg1="@{signed}" arg2="false"/><then>
- <if><equals arg1="@{local}" arg2="false"/><then>
- <artifact:deploy file="${path}.jar" settingsFile="${settings.file}">
- <artifact:remoteRepository url="${remote.repository}" id="${repository.credentials.id}" />
- <artifact:pom refid="@{name}.pom" />
- <artifact:attach type="jar" file="${path}-src.jar" classifier="sources" />
- <artifact:attach type="jar" file="${path}-docs.jar" classifier="javadoc" />
- </artifact:deploy>
- </then><else>
- <artifact:install file="${path}.jar">
- <artifact:localRepository path="${local.repository}" id="${repository.credentials.id}" />
- <artifact:pom refid="@{name}.pom" />
- <artifact:attach type="jar" file="${path}-src.jar" classifier="sources" />
- <artifact:attach type="jar" file="${path}-docs.jar" classifier="javadoc" />
- </artifact:install>
- </else></if>
- </then><else>
- <local name="repo"/>
- <if><equals arg1="@{local}" arg2="false"/><then>
- <property name="repo" value="${remote.repository}"/>
- </then><else>
- <property name="repo" value="${local.repository}"/>
- </else></if>
- <artifact:mvn>
- <arg value="org.apache.maven.plugins:maven-gpg-plugin:1.3:sign-and-deploy-file" />
- <arg value="-Durl=${repo}" />
- <arg value="-DrepositoryId=${repository.credentials.id}" />
- <arg value="-DpomFile=${path}-pom-filtered.xml" />
- <arg value= "-Dfile=${path}.jar" />
- <arg value="-Dsources=${path}-src.jar" />
- <arg value="-Djavadoc=${path}-docs.jar" />
- <arg value="-Pgpg" />
- <arg value="-Dgpg.useagent=true" />
- </artifact:mvn>
- </else></if>
- </sequential>
- </macrodef>
-
- <macrodef name="deploy">
- <attribute name="local" default="false"/>
- <attribute name="signed" default="false"/>
-
- <sequential>
- <deploy-one name="scala-actors" version="${maven.version.number}" local="@{local}" signed="@{signed}"/>
- <deploy-one name="scala-compiler" version="${maven.version.number}" local="@{local}" signed="@{signed}"/>
- <deploy-one name="scala-library" version="${maven.version.number}" local="@{local}" signed="@{signed}"/>
- <deploy-one name="scala-reflect" version="${maven.version.number}" local="@{local}" signed="@{signed}"/>
- <deploy-one name="scala-swing" version="${maven.version.number}" local="@{local}" signed="@{signed}"/>
- <deploy-one name="scalap" version="${maven.version.number}" local="@{local}" signed="@{signed}"/>
- <deploy-one dir="plugins/" name="continuations" version="${maven.version.number}" local="@{local}" signed="@{signed}"/>
- </sequential>
- </macrodef>
</target>
- <target name="deploy" depends="init" description="Deploys unsigned artifacts to the maven repo."> <deploy/> </target>
- <target name="deploy.local" depends="init" description="Deploys unsigned artifacts to the local maven repo."> <deploy local="true"/> </target>
- <target name="deploy.signed" depends="init" description="Deploys signed artifacts to the remote maven repo."> <deploy signed="true"/> </target>
+ <target name="deploy" depends="init.maven" description="Deploys unsigned artifacts to the maven repo."> <deploy/> </target>
+ <target name="deploy.local" depends="init.maven" description="Deploys unsigned artifacts to the local maven repo."> <deploy local="true"/> </target>
+ <target name="deploy.signed" depends="init.maven" description="Deploys signed artifacts to the remote maven repo."> <deploy signed="true"/> </target>
</project>
diff --git a/src/build/maven/plugins/continuations-pom.xml b/src/build/maven/plugins/continuations-pom.xml
index 9abb0a36f0..8dc79c8664 100644
--- a/src/build/maven/plugins/continuations-pom.xml
+++ b/src/build/maven/plugins/continuations-pom.xml
@@ -1,54 +1,51 @@
-<project
- xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
- xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
- <modelVersion>4.0.0</modelVersion>
- <groupId>org.scala-lang.plugins</groupId>
- <artifactId>continuations</artifactId>
- <packaging>jar</packaging>
- <version>@VERSION@</version>
+<?xml version="1.0"?>
+<project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>org.scala-lang.plugins</groupId>
+ <artifactId>continuations</artifactId>
+ <packaging>jar</packaging>
+ <version>@VERSION@</version>
<name>Scala Continuations Plugin</name>
<description>Delimited continuations compilation for Scala</description>
- <url>http://www.scala-lang.org/</url>
- <inceptionYear>2010</inceptionYear>
- <organization>
- <name>LAMP/EPFL</name>
- <url>http://lamp.epfl.ch/</url>
- </organization>
- <licenses>
- <license>
- <name>BSD-like</name>
- <url>http://www.scala-lang.org/downloads/license.html
- </url>
- <distribution>repo</distribution>
- </license>
- </licenses>
- <scm>
- <connection>scm:git:git://github.com/scala/scala.git</connection>
- <url>https://github.com/scala/scala.git</url>
- </scm>
- <issueManagement>
- <system>JIRA</system>
- <url>https://issues.scala-lang.org/</url>
- </issueManagement>
-
- <dependencies>
- <dependency>
- <groupId>org.scala-lang</groupId>
- <artifactId>scala-compiler</artifactId>
- <version>@VERSION@</version>
- </dependency>
- </dependencies>
- <distributionManagement>
- <repository>
- <id>scala-tools.org</id>
- <url>@RELEASE_REPOSITORY@</url>
- </repository>
- <snapshotRepository>
- <id>scala-tools.org</id>
- <url>@SNAPSHOT_REPOSITORY@</url>
- <uniqueVersion>false</uniqueVersion>
- </snapshotRepository>
- </distributionManagement>
+ <url>http://www.scala-lang.org/</url>
+ <inceptionYear>2010</inceptionYear>
+ <organization>
+ <name>LAMP/EPFL</name>
+ <url>http://lamp.epfl.ch/</url>
+ </organization>
+ <licenses>
+ <license>
+ <name>BSD 3-Clause</name>
+ <url>http://www.scala-lang.org/license.html</url>
+ <distribution>repo</distribution>
+ </license>
+ </licenses>
+ <scm>
+ <connection>scm:git:git://github.com/scala/scala.git</connection>
+ <url>https://github.com/scala/scala.git</url>
+ </scm>
+ <issueManagement>
+ <system>JIRA</system>
+ <url>https://issues.scala-lang.org/</url>
+ </issueManagement>
+ <dependencies>
+ <dependency>
+ <groupId>org.scala-lang</groupId>
+ <artifactId>scala-compiler</artifactId>
+ <version>@VERSION@</version>
+ </dependency>
+ </dependencies>
+ <distributionManagement>
+ <repository>
+ <id>scala-tools.org</id>
+ <url>@RELEASE_REPOSITORY@</url>
+ </repository>
+ <snapshotRepository>
+ <id>scala-tools.org</id>
+ <url>@SNAPSHOT_REPOSITORY@</url>
+ <uniqueVersion>false</uniqueVersion>
+ </snapshotRepository>
+ </distributionManagement>
<developers>
<developer>
<id>lamp</id>
diff --git a/src/build/maven/scala-actors-pom.xml b/src/build/maven/scala-actors-pom.xml
index 3d37ef8174..424ac2898c 100644
--- a/src/build/maven/scala-actors-pom.xml
+++ b/src/build/maven/scala-actors-pom.xml
@@ -1,56 +1,54 @@
-<project
- xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
- xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
- <modelVersion>4.0.0</modelVersion>
- <groupId>org.scala-lang</groupId>
- <artifactId>scala-actors</artifactId>
- <packaging>jar</packaging>
- <version>@VERSION@</version>
+<?xml version="1.0"?>
+<project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>org.scala-lang</groupId>
+ <artifactId>scala-actors</artifactId>
+ <packaging>jar</packaging>
+ <version>@VERSION@</version>
<name>Scala Actors library</name>
<description>Deprecated Actors Library for Scala</description>
- <url>http://www.scala-lang.org/</url>
+ <url>http://www.scala-lang.org/</url>
<inceptionYear>2006</inceptionYear>
<organization>
- <name>LAMP/EPFL</name>
- <url>http://lamp.epfl.ch/</url>
- </organization>
- <licenses>
- <license>
- <name>BSD-like</name>
- <url>http://www.scala-lang.org/downloads/license.html
- </url>
- <distribution>repo</distribution>
- </license>
- </licenses>
- <scm>
- <connection>scm:git:git://github.com/scala/scala.git</connection>
- <url>https://github.com/scala/scala.git</url>
- </scm>
- <issueManagement>
- <system>JIRA</system>
- <url>https://issues.scala-lang.org/</url>
- </issueManagement>
- <properties>
- <info.apiURL>http://www.scala-lang.org/api/@VERSION@/</info.apiURL>
- </properties>
- <dependencies>
- <dependency>
- <groupId>org.scala-lang</groupId>
- <artifactId>scala-library</artifactId>
- <version>@VERSION@</version>
- </dependency>
- </dependencies>
- <distributionManagement>
- <repository>
- <id>scala-tools.org</id>
- <url>@RELEASE_REPOSITORY@</url>
- </repository>
- <snapshotRepository>
- <id>scala-tools.org</id>
- <url>@SNAPSHOT_REPOSITORY@</url>
- <uniqueVersion>false</uniqueVersion>
- </snapshotRepository>
- </distributionManagement>
+ <name>LAMP/EPFL</name>
+ <url>http://lamp.epfl.ch/</url>
+ </organization>
+ <licenses>
+ <license>
+ <name>BSD 3-Clause</name>
+ <url>http://www.scala-lang.org/license.html</url>
+ <distribution>repo</distribution>
+ </license>
+ </licenses>
+ <scm>
+ <connection>scm:git:git://github.com/scala/scala.git</connection>
+ <url>https://github.com/scala/scala.git</url>
+ </scm>
+ <issueManagement>
+ <system>JIRA</system>
+ <url>https://issues.scala-lang.org/</url>
+ </issueManagement>
+ <properties>
+ <info.apiURL>http://www.scala-lang.org/api/@VERSION@/</info.apiURL>
+ </properties>
+ <dependencies>
+ <dependency>
+ <groupId>org.scala-lang</groupId>
+ <artifactId>scala-library</artifactId>
+ <version>@VERSION@</version>
+ </dependency>
+ </dependencies>
+ <distributionManagement>
+ <repository>
+ <id>scala-tools.org</id>
+ <url>@RELEASE_REPOSITORY@</url>
+ </repository>
+ <snapshotRepository>
+ <id>scala-tools.org</id>
+ <url>@SNAPSHOT_REPOSITORY@</url>
+ <uniqueVersion>false</uniqueVersion>
+ </snapshotRepository>
+ </distributionManagement>
<developers>
<developer>
<id>lamp</id>
diff --git a/src/build/maven/scala-compiler-pom.xml b/src/build/maven/scala-compiler-pom.xml
index 8cc42c22ae..442fe6a8d5 100644
--- a/src/build/maven/scala-compiler-pom.xml
+++ b/src/build/maven/scala-compiler-pom.xml
@@ -1,83 +1,82 @@
-<project
- xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
- xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
- <modelVersion>4.0.0</modelVersion>
- <groupId>org.scala-lang</groupId>
- <artifactId>scala-compiler</artifactId>
- <packaging>jar</packaging>
- <version>@VERSION@</version>
- <name>Scala Compiler</name>
- <description>Compiler for the Scala Programming Language</description>
- <url>http://www.scala-lang.org/</url>
- <inceptionYear>2002</inceptionYear>
- <organization>
- <name>LAMP/EPFL</name>
- <url>http://lamp.epfl.ch/</url>
- </organization>
- <licenses>
- <license>
- <name>BSD-like</name>
- <url>http://www.scala-lang.org/downloads/license.html
- </url>
- <distribution>repo</distribution>
- </license>
- </licenses>
- <scm>
- <connection>scm:git:git://github.com/scala/scala.git</connection>
- <url>https://github.com/scala/scala.git</url>
- </scm>
- <issueManagement>
- <system>JIRA</system>
- <url>https://issues.scala-lang.org/</url>
- </issueManagement>
-
- <dependencies>
- <dependency>
- <groupId>org.scala-lang</groupId>
- <artifactId>scala-library</artifactId>
- <version>@VERSION@</version>
- </dependency>
- <dependency> <!-- for scaladoc -->
- <groupId>org.scala-lang.modules</groupId>
- <artifactId>scala-xml_@SCALA_BINARY_VERSION@</artifactId>
- <version>@XML_VERSION@</version>
- </dependency>
- <dependency> <!-- for scaladoc -->
- <groupId>org.scala-lang.modules</groupId>
- <artifactId>scala-parser-combinators_@SCALA_BINARY_VERSION@</artifactId>
- <version>@PARSER_COMBINATORS_VERSION@</version>
- </dependency>
- <dependency>
- <groupId>org.scala-lang</groupId>
- <artifactId>scala-reflect</artifactId>
- <version>@VERSION@</version>
- </dependency>
- <dependency>
- <groupId>jline</groupId>
- <artifactId>jline</artifactId>
- <version>@JLINE_VERSION@</version>
- <optional>true</optional>
- </dependency>
- </dependencies>
- <distributionManagement>
- <repository>
- <id>scala-tools.org</id>
- <url>@RELEASE_REPOSITORY@</url>
- </repository>
- <snapshotRepository>
- <id>scala-tools.org</id>
- <url>@SNAPSHOT_REPOSITORY@</url>
- <uniqueVersion>false</uniqueVersion>
- </snapshotRepository>
- </distributionManagement>
- <developers>
- <developer>
- <id>lamp</id>
- <name>EPFL LAMP</name>
- </developer>
- <developer>
- <id>Typesafe</id>
- <name>Typesafe, Inc.</name>
- </developer>
- </developers>
+<?xml version="1.0"?>
+<project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>org.scala-lang</groupId>
+ <artifactId>scala-compiler</artifactId>
+ <packaging>jar</packaging>
+ <version>@VERSION@</version>
+ <name>Scala Compiler</name>
+ <description>Compiler for the Scala Programming Language</description>
+ <url>http://www.scala-lang.org/</url>
+ <inceptionYear>2002</inceptionYear>
+ <organization>
+ <name>LAMP/EPFL</name>
+ <url>http://lamp.epfl.ch/</url>
+ </organization>
+ <licenses>
+ <license>
+ <name>BSD 3-Clause</name>
+ <url>http://www.scala-lang.org/license.html</url>
+ <distribution>repo</distribution>
+ </license>
+ </licenses>
+ <scm>
+ <connection>scm:git:git://github.com/scala/scala.git</connection>
+ <url>https://github.com/scala/scala.git</url>
+ </scm>
+ <issueManagement>
+ <system>JIRA</system>
+ <url>https://issues.scala-lang.org/</url>
+ </issueManagement>
+ <dependencies>
+ <dependency>
+ <groupId>org.scala-lang</groupId>
+ <artifactId>scala-library</artifactId>
+ <version>@VERSION@</version>
+ </dependency>
+ <dependency>
+ <!-- for scaladoc -->
+ <groupId>org.scala-lang.modules</groupId>
+ <artifactId>scala-xml_@SCALA_BINARY_VERSION@</artifactId>
+ <version>@XML_VERSION@</version>
+ </dependency>
+ <dependency>
+ <!-- for scaladoc -->
+ <groupId>org.scala-lang.modules</groupId>
+ <artifactId>scala-parser-combinators_@SCALA_BINARY_VERSION@</artifactId>
+ <version>@PARSER_COMBINATORS_VERSION@</version>
+ </dependency>
+ <dependency>
+ <groupId>org.scala-lang</groupId>
+ <artifactId>scala-reflect</artifactId>
+ <version>@VERSION@</version>
+ </dependency>
+ <dependency>
+ <groupId>jline</groupId>
+ <artifactId>jline</artifactId>
+ <version>@JLINE_VERSION@</version>
+ <optional>true</optional>
+ </dependency>
+ </dependencies>
+ <distributionManagement>
+ <repository>
+ <id>scala-tools.org</id>
+ <url>@RELEASE_REPOSITORY@</url>
+ </repository>
+ <snapshotRepository>
+ <id>scala-tools.org</id>
+ <url>@SNAPSHOT_REPOSITORY@</url>
+ <uniqueVersion>false</uniqueVersion>
+ </snapshotRepository>
+ </distributionManagement>
+ <developers>
+ <developer>
+ <id>lamp</id>
+ <name>EPFL LAMP</name>
+ </developer>
+ <developer>
+ <id>Typesafe</id>
+ <name>Typesafe, Inc.</name>
+ </developer>
+ </developers>
</project>
diff --git a/src/build/maven/scala-library-pom.xml b/src/build/maven/scala-library-pom.xml
index 684474e79a..d40cee2656 100644
--- a/src/build/maven/scala-library-pom.xml
+++ b/src/build/maven/scala-library-pom.xml
@@ -1,50 +1,48 @@
-<project
- xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
- xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
- <modelVersion>4.0.0</modelVersion>
- <groupId>org.scala-lang</groupId>
- <artifactId>scala-library</artifactId>
- <packaging>jar</packaging>
- <version>@VERSION@</version>
+<?xml version="1.0"?>
+<project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>org.scala-lang</groupId>
+ <artifactId>scala-library</artifactId>
+ <packaging>jar</packaging>
+ <version>@VERSION@</version>
<name>Scala Library</name>
<description>Standard library for the Scala Programming Language</description>
- <url>http://www.scala-lang.org/</url>
- <inceptionYear>2002</inceptionYear>
- <organization>
- <name>LAMP/EPFL</name>
- <url>http://lamp.epfl.ch/</url>
- </organization>
- <licenses>
- <license>
- <name>BSD-like</name>
- <url>http://www.scala-lang.org/downloads/license.html
- </url>
- <distribution>repo</distribution>
- </license>
- </licenses>
- <scm>
- <connection>scm:git:git://github.com/scala/scala.git</connection>
- <url>https://github.com/scala/scala.git</url>
- </scm>
- <issueManagement>
- <system>JIRA</system>
- <url>https://issues.scala-lang.org/</url>
- </issueManagement>
- <properties>
- <info.apiURL>http://www.scala-lang.org/api/@VERSION@/</info.apiURL>
- </properties>
- <dependencies>
+ <url>http://www.scala-lang.org/</url>
+ <inceptionYear>2002</inceptionYear>
+ <organization>
+ <name>LAMP/EPFL</name>
+ <url>http://lamp.epfl.ch/</url>
+ </organization>
+ <licenses>
+ <license>
+ <name>BSD 3-Clause</name>
+ <url>http://www.scala-lang.org/license.html</url>
+ <distribution>repo</distribution>
+ </license>
+ </licenses>
+ <scm>
+ <connection>scm:git:git://github.com/scala/scala.git</connection>
+ <url>https://github.com/scala/scala.git</url>
+ </scm>
+ <issueManagement>
+ <system>JIRA</system>
+ <url>https://issues.scala-lang.org/</url>
+ </issueManagement>
+ <properties>
+ <info.apiURL>http://www.scala-lang.org/api/@VERSION@/</info.apiURL>
+ </properties>
+ <dependencies>
</dependencies>
- <distributionManagement>
- <repository>
- <id>scala-tools.org</id>
- <url>@RELEASE_REPOSITORY@</url>
- </repository>
- <snapshotRepository>
- <id>scala-tools.org</id>
- <url>@SNAPSHOT_REPOSITORY@</url>
- <uniqueVersion>false</uniqueVersion>
- </snapshotRepository>
+ <distributionManagement>
+ <repository>
+ <id>scala-tools.org</id>
+ <url>@RELEASE_REPOSITORY@</url>
+ </repository>
+ <snapshotRepository>
+ <id>scala-tools.org</id>
+ <url>@SNAPSHOT_REPOSITORY@</url>
+ <uniqueVersion>false</uniqueVersion>
+ </snapshotRepository>
</distributionManagement>
<developers>
<developer>
diff --git a/src/build/maven/scala-reflect-pom.xml b/src/build/maven/scala-reflect-pom.xml
index 56d2ffc57c..d0a9c0e274 100644
--- a/src/build/maven/scala-reflect-pom.xml
+++ b/src/build/maven/scala-reflect-pom.xml
@@ -1,56 +1,54 @@
-<project
- xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
- xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
- <modelVersion>4.0.0</modelVersion>
- <groupId>org.scala-lang</groupId>
- <artifactId>scala-reflect</artifactId>
- <packaging>jar</packaging>
- <version>@VERSION@</version>
+<?xml version="1.0"?>
+<project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>org.scala-lang</groupId>
+ <artifactId>scala-reflect</artifactId>
+ <packaging>jar</packaging>
+ <version>@VERSION@</version>
<name>Scala Compiler</name>
<description>Compiler for the Scala Programming Language</description>
- <url>http://www.scala-lang.org/</url>
- <inceptionYear>2002</inceptionYear>
- <organization>
- <name>LAMP/EPFL</name>
- <url>http://lamp.epfl.ch/</url>
- </organization>
- <licenses>
- <license>
- <name>BSD-like</name>
- <url>http://www.scala-lang.org/downloads/license.html
- </url>
- <distribution>repo</distribution>
- </license>
- </licenses>
- <scm>
- <connection>scm:git:git://github.com/scala/scala.git</connection>
- <url>https://github.com/scala/scala.git</url>
- </scm>
- <issueManagement>
- <system>JIRA</system>
- <url>https://issues.scala-lang.org/</url>
- </issueManagement>
+ <url>http://www.scala-lang.org/</url>
+ <inceptionYear>2002</inceptionYear>
+ <organization>
+ <name>LAMP/EPFL</name>
+ <url>http://lamp.epfl.ch/</url>
+ </organization>
+ <licenses>
+ <license>
+ <name>BSD 3-Clause</name>
+ <url>http://www.scala-lang.org/license.html</url>
+ <distribution>repo</distribution>
+ </license>
+ </licenses>
+ <scm>
+ <connection>scm:git:git://github.com/scala/scala.git</connection>
+ <url>https://github.com/scala/scala.git</url>
+ </scm>
+ <issueManagement>
+ <system>JIRA</system>
+ <url>https://issues.scala-lang.org/</url>
+ </issueManagement>
<properties>
- <info.apiURL>http://www.scala-lang.org/api/@VERSION@/</info.apiURL>
+ <info.apiURL>http://www.scala-lang.org/api/@VERSION@/</info.apiURL>
</properties>
- <dependencies>
- <dependency>
- <groupId>org.scala-lang</groupId>
- <artifactId>scala-library</artifactId>
- <version>@VERSION@</version>
- </dependency>
- </dependencies>
- <distributionManagement>
- <repository>
- <id>scala-tools.org</id>
- <url>@RELEASE_REPOSITORY@</url>
- </repository>
- <snapshotRepository>
- <id>scala-tools.org</id>
- <url>@SNAPSHOT_REPOSITORY@</url>
- <uniqueVersion>false</uniqueVersion>
- </snapshotRepository>
- </distributionManagement>
+ <dependencies>
+ <dependency>
+ <groupId>org.scala-lang</groupId>
+ <artifactId>scala-library</artifactId>
+ <version>@VERSION@</version>
+ </dependency>
+ </dependencies>
+ <distributionManagement>
+ <repository>
+ <id>scala-tools.org</id>
+ <url>@RELEASE_REPOSITORY@</url>
+ </repository>
+ <snapshotRepository>
+ <id>scala-tools.org</id>
+ <url>@SNAPSHOT_REPOSITORY@</url>
+ <uniqueVersion>false</uniqueVersion>
+ </snapshotRepository>
+ </distributionManagement>
<developers>
<developer>
<id>lamp</id>
diff --git a/src/build/maven/scala-swing-pom.xml b/src/build/maven/scala-swing-pom.xml
index 5099fe11dc..01c89f9bea 100644
--- a/src/build/maven/scala-swing-pom.xml
+++ b/src/build/maven/scala-swing-pom.xml
@@ -1,56 +1,54 @@
-<project
- xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
- xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
- <modelVersion>4.0.0</modelVersion>
- <groupId>org.scala-lang</groupId>
- <artifactId>scala-swing</artifactId>
- <packaging>jar</packaging>
- <version>@VERSION@</version>
+<?xml version="1.0"?>
+<project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>org.scala-lang</groupId>
+ <artifactId>scala-swing</artifactId>
+ <packaging>jar</packaging>
+ <version>@VERSION@</version>
<name>Scala Swing library</name>
<description>Swing for Scala</description>
- <url>http://www.scala-lang.org/</url>
- <inceptionYear>2002</inceptionYear>
- <organization>
- <name>LAMP/EPFL</name>
- <url>http://lamp.epfl.ch/</url>
- </organization>
- <licenses>
- <license>
- <name>BSD-like</name>
- <url>http://www.scala-lang.org/downloads/license.html
- </url>
- <distribution>repo</distribution>
- </license>
- </licenses>
- <scm>
- <connection>scm:git:git://github.com/scala/scala.git</connection>
- <url>https://github.com/scala/scala.git</url>
- </scm>
- <issueManagement>
- <system>JIRA</system>
- <url>https://issues.scala-lang.org/</url>
- </issueManagement>
- <properties>
- <info.apiURL>http://www.scala-lang.org/api/@VERSION@/</info.apiURL>
- </properties>
- <dependencies>
- <dependency>
- <groupId>org.scala-lang</groupId>
- <artifactId>scala-library</artifactId>
- <version>@VERSION@</version>
- </dependency>
- </dependencies>
- <distributionManagement>
- <repository>
- <id>scala-tools.org</id>
- <url>@RELEASE_REPOSITORY@</url>
- </repository>
- <snapshotRepository>
- <id>scala-tools.org</id>
- <url>@SNAPSHOT_REPOSITORY@</url>
- <uniqueVersion>false</uniqueVersion>
- </snapshotRepository>
- </distributionManagement>
+ <url>http://www.scala-lang.org/</url>
+ <inceptionYear>2002</inceptionYear>
+ <organization>
+ <name>LAMP/EPFL</name>
+ <url>http://lamp.epfl.ch/</url>
+ </organization>
+ <licenses>
+ <license>
+ <name>BSD 3-Clause</name>
+ <url>http://www.scala-lang.org/license.html</url>
+ <distribution>repo</distribution>
+ </license>
+ </licenses>
+ <scm>
+ <connection>scm:git:git://github.com/scala/scala.git</connection>
+ <url>https://github.com/scala/scala.git</url>
+ </scm>
+ <issueManagement>
+ <system>JIRA</system>
+ <url>https://issues.scala-lang.org/</url>
+ </issueManagement>
+ <properties>
+ <info.apiURL>http://www.scala-lang.org/api/@VERSION@/</info.apiURL>
+ </properties>
+ <dependencies>
+ <dependency>
+ <groupId>org.scala-lang</groupId>
+ <artifactId>scala-library</artifactId>
+ <version>@VERSION@</version>
+ </dependency>
+ </dependencies>
+ <distributionManagement>
+ <repository>
+ <id>scala-tools.org</id>
+ <url>@RELEASE_REPOSITORY@</url>
+ </repository>
+ <snapshotRepository>
+ <id>scala-tools.org</id>
+ <url>@SNAPSHOT_REPOSITORY@</url>
+ <uniqueVersion>false</uniqueVersion>
+ </snapshotRepository>
+ </distributionManagement>
<developers>
<developer>
<id>lamp</id>
diff --git a/src/build/maven/scalap-pom.xml b/src/build/maven/scalap-pom.xml
index 50c08e899c..88cfce08d8 100644
--- a/src/build/maven/scalap-pom.xml
+++ b/src/build/maven/scalap-pom.xml
@@ -1,54 +1,51 @@
-<project
- xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
- xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
- <modelVersion>4.0.0</modelVersion>
- <groupId>org.scala-lang</groupId>
- <artifactId>scalap</artifactId>
- <packaging>jar</packaging>
- <version>@VERSION@</version>
+<?xml version="1.0"?>
+<project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>org.scala-lang</groupId>
+ <artifactId>scalap</artifactId>
+ <packaging>jar</packaging>
+ <version>@VERSION@</version>
<name>Scalap</name>
<description>bytecode analysis tool</description>
- <url>http://www.scala-lang.org/</url>
- <inceptionYear>2002</inceptionYear>
- <organization>
- <name>LAMP/EPFL</name>
- <url>http://lamp.epfl.ch/</url>
- </organization>
- <licenses>
- <license>
- <name>BSD-like</name>
- <url>http://www.scala-lang.org/downloads/license.html
- </url>
- <distribution>repo</distribution>
- </license>
- </licenses>
- <scm>
- <connection>scm:git:git://github.com/scala/scala.git</connection>
- <url>https://github.com/scala/scala.git</url>
- </scm>
- <issueManagement>
- <system>JIRA</system>
- <url>https://issues.scala-lang.org/</url>
- </issueManagement>
-
- <dependencies>
- <dependency>
- <groupId>org.scala-lang</groupId>
- <artifactId>scala-compiler</artifactId>
- <version>@VERSION@</version>
- </dependency>
- </dependencies>
- <distributionManagement>
- <repository>
- <id>scala-tools.org</id>
- <url>@RELEASE_REPOSITORY@</url>
- </repository>
- <snapshotRepository>
- <id>scala-tools.org</id>
- <url>@SNAPSHOT_REPOSITORY@</url>
- <uniqueVersion>false</uniqueVersion>
- </snapshotRepository>
- </distributionManagement>
+ <url>http://www.scala-lang.org/</url>
+ <inceptionYear>2002</inceptionYear>
+ <organization>
+ <name>LAMP/EPFL</name>
+ <url>http://lamp.epfl.ch/</url>
+ </organization>
+ <licenses>
+ <license>
+ <name>BSD 3-Clause</name>
+ <url>http://www.scala-lang.org/license.html</url>
+ <distribution>repo</distribution>
+ </license>
+ </licenses>
+ <scm>
+ <connection>scm:git:git://github.com/scala/scala.git</connection>
+ <url>https://github.com/scala/scala.git</url>
+ </scm>
+ <issueManagement>
+ <system>JIRA</system>
+ <url>https://issues.scala-lang.org/</url>
+ </issueManagement>
+ <dependencies>
+ <dependency>
+ <groupId>org.scala-lang</groupId>
+ <artifactId>scala-compiler</artifactId>
+ <version>@VERSION@</version>
+ </dependency>
+ </dependencies>
+ <distributionManagement>
+ <repository>
+ <id>scala-tools.org</id>
+ <url>@RELEASE_REPOSITORY@</url>
+ </repository>
+ <snapshotRepository>
+ <id>scala-tools.org</id>
+ <url>@SNAPSHOT_REPOSITORY@</url>
+ <uniqueVersion>false</uniqueVersion>
+ </snapshotRepository>
+ </distributionManagement>
<developers>
<developer>
<id>lamp</id>
diff --git a/src/build/pack.xml b/src/build/pack.xml
deleted file mode 100644
index ed628726fb..0000000000
--- a/src/build/pack.xml
+++ /dev/null
@@ -1,263 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-
-<project name="sabbus-pack">
-
- <description>
- SuperSabbus extension for packaging a distribution. THIS FILE IS NOT STAND-ALONE AND SHOULD ONLY BE USED THROUGH ENTRY POINTS IN SUPERSABBUS.
- </description>
-
-<!-- ===========================================================================
-PROPERTIES
-============================================================================ -->
-
- <!-- the maven stuff requires version.major, version.minor and version.patch properties.
- the "get-scala-revision" script only returns "version.number" -->
- <property file="${basedir}/build.number"/>
- <!-- also need to know scala binary version and versions for xml and parsers -->
- <property file="${basedir}/versions.properties"/>
-
-
-<!-- ===========================================================================
-MAIN DISTRIBUTION PACKAGING
-============================================================================ -->
-
- <target name="pack-archives.start">
- <mkdir dir="${dists.dir}/archives"/>
- </target>
-
- <target name="pack-archives.tar" depends="pack-archives.start">
- <tar destfile="${dists.dir}/archives/${dist.name}.tar"
- compression="none" longfile="gnu">
- <tarfileset dir="${dist.dir}" prefix="${dist.name}" includes="bin/**" mode="755"/>
- <tarfileset dir="${dist.dir}" prefix="${dist.name}" excludes="bin/**"/>
- </tar>
- <gzip src="${dists.dir}/archives/${dist.name}.tar" destfile="${dists.dir}/archives/${dist.name}.tgz"/>
- <if>
- <not><equals arg1="${archives.skipxz}" arg2="true" /></not>
- <then>
- <exec executable="xz" failifexecutionfails="false">
- <arg line="-k -9e -S .xz ${dists.dir}/archives/${dist.name}.tar"/>
- </exec>
- <move file="${dists.dir}/archives/${dist.name}.tar.xz" tofile="${dists.dir}/archives/${dist.name}.txz" failonerror="false"/>
- </then>
- </if>
- <delete file="${dists.dir}/archives/${dist.name}.tar" />
- <checksum fileext=".md5">
- <fileset dir="${dists.dir}/archives">
- <include name="${dist.name}.t?z"/>
- </fileset>
- </checksum>
- </target>
-
- <target name="pack-archives.zip" depends="pack-archives.tar">
- <zip destfile="${dists.dir}/archives/${dist.name}.zip">
- <zipfileset prefix="${dist.name}" dir="${dist.dir}"/>
- </zip>
- <checksum file="${dists.dir}/archives/${dist.name}.zip" fileext=".md5"/>
- </target>
-
- <target name="pack-devel-docs.tar" depends="pack-archives.zip">
- <tar destfile="${dists.dir}/archives/${dist.name}-devel-docs.tar"
- compression="none" longfile="gnu">
- <tarfileset dir="${dist.dir}/doc/scala-devel-docs" prefix="${dist.name}-devel-docs"/>
- </tar>
- <gzip src="${dists.dir}/archives/${dist.name}-devel-docs.tar" destfile="${dists.dir}/archives/${dist.name}-devel-docs.tgz"/>
- <if>
- <not><equals arg1="${archives.skipxz}" arg2="true" /></not>
- <then>
- <exec executable="xz" failifexecutionfails="false">
- <arg line="-k -9e -S .xz ${dists.dir}/archives/${dist.name}-devel-docs.tar"/>
- </exec>
- <move file="${dists.dir}/archives/${dist.name}-devel-docs.tar.xz" tofile="${dists.dir}/archives/${dist.name}-devel-docs.txz" failonerror="false"/>
- </then>
- </if>
- <delete file="${dists.dir}/archives/${dist.name}-devel-docs.tar" />
- <checksum fileext=".md5">
- <fileset dir="${dists.dir}/archives">
- <include name="${dist.name}-devel-docs.t?z"/>
- </fileset>
- </checksum>
- </target>
-
- <target name="pack-archives.src" depends="pack-devel-docs.tar">
- <tar destfile="${dists.dir}/archives/${dist.name}-sources.tar"
- compression="none" longfile="gnu">
- <tarfileset dir="${basedir}" prefix="${dist.name}-sources">
- <exclude name="bin/**"/>
- <exclude name="build/**"/>
- <exclude name="debian/**"/>
- <exclude name="dists/**"/>
- <exclude name="logs/**"/>
- <exclude name="sandbox/**"/>
- <exclude name="test/partest"/>
- <exclude name=".git"/>
- </tarfileset>
- <tarfileset dir="${basedir}" prefix="${dist.name}-sources" filemode="755">
- <include name="test/partest"/>
- </tarfileset>
- </tar>
- <gzip src="${dists.dir}/archives/${dist.name}-sources.tar" destfile="${dists.dir}/archives/${dist.name}-sources.tgz"/>
- <if>
- <not><equals arg1="${archives.skipxz}" arg2="true" /></not>
- <then>
- <exec executable="xz" failifexecutionfails="false">
- <arg line="-k -9e -S .xz ${dists.dir}/archives/${dist.name}-sources.tar"/>
- </exec>
- <move file="${dists.dir}/archives/${dist.name}-sources.tar.xz" tofile="${dists.dir}/archives/${dist.name}-sources.txz" failonerror="false"/>
- </then>
- </if>
- <delete file="${dists.dir}/archives/${dist.name}-sources.tar" />
- <checksum fileext=".md5">
- <fileset dir="${dists.dir}/archives">
- <include name="${dist.name}-sources.t?z"/>
- </fileset>
- </checksum>
- </target>
-
- <target name="pack-archives.latest.unix" depends="pack-archives.src" unless="os.win">
- <!-- be sure to use a relative symlink to make the distribution portable,
- `resource` is relative to directory of `link` -->
- <symlink link="${dists.dir}/archives/scala-latest-sources.tgz"
- resource="scala-${version.number}-sources.tgz"
- overwrite="yes"/>
- </target>
-
- <target name="pack-archives.latest.win" depends="pack-archives.src" if="os.win">
- <copy tofile="${dists.dir}/archives/scala-latest-sources.tgz">
- <fileset dir="${dists.dir}/archives">
- <include name="scala-${version.number}-sources.tgz"/>
- </fileset>
- </copy>
- </target>
-
- <target name="pack-archives.done" depends="pack-archives.src, pack-archives.latest.win, pack-archives.latest.unix"/>
-
- <target name="pack-maven.start">
- <mkdir dir="${dists.dir}/maven/${version.number}"/>
- </target>
-
- <target name="pack-maven.libs" depends="pack-maven.start">
- <macrodef name="mvn-copy-lib">
- <attribute name="mvn.artifact.name"/>
- <sequential>
- <mkdir dir="${dists.dir}/maven/${version.number}/@{mvn.artifact.name}"/>
- <copy todir="${dists.dir}/maven/${version.number}/@{mvn.artifact.name}">
- <fileset dir="${dist.dir}/lib/">
- <filename name="@{mvn.artifact.name}.jar"/>
- </fileset>
- <fileset dir="${src.dir}/build/maven/">
- <filename name="@{mvn.artifact.name}-pom.xml"/>
- </fileset>
- <fileset dir="${dist.dir}/src/">
- <filename name="@{mvn.artifact.name}-src.jar"/>
- </fileset>
- </copy>
- </sequential>
- </macrodef>
- <mvn-copy-lib mvn.artifact.name="scala-library"/>
- <mvn-copy-lib mvn.artifact.name="scala-reflect"/>
- <mvn-copy-lib mvn.artifact.name="scala-compiler"/>
- <mvn-copy-lib mvn.artifact.name="scala-swing"/>
- <mvn-copy-lib mvn.artifact.name="scala-actors"/>
- <mvn-copy-lib mvn.artifact.name="scalap"/>
- </target>
-
- <target name="pack-maven.plugins" depends="pack-maven.start">
- <macrodef name="mvn-copy-plugin">
- <attribute name="mvn.artifact.name"/>
- <sequential>
- <mkdir dir="${dists.dir}/maven/${version.number}/plugins/@{mvn.artifact.name}"/>
- <copy todir="${dists.dir}/maven/${version.number}/plugins/@{mvn.artifact.name}">
- <fileset dir="${dist.dir}/misc/scala-devel/plugins/">
- <filename name="@{mvn.artifact.name}.jar"/>
- </fileset>
- <fileset dir="${src.dir}/build/maven/plugins/">
- <filename name="@{mvn.artifact.name}-pom.xml"/>
- </fileset>
- </copy>
- </sequential>
- </macrodef>
- <mvn-copy-plugin mvn.artifact.name="continuations"/>
- </target>
-
- <target name="pack-maven.srcs" depends="pack-maven.libs">
- <!-- Add missing src jars. -->
- <!-- Continuations plugin -->
- <jar whenmanifestonly="fail" destfile="${dists.dir}/maven/${version.number}/plugins/continuations/continuations-src.jar"
- basedir="${src.dir}/continuations/plugin">
- <include name="**/*"/>
- </jar>
- </target>
-
- <target name="pack-maven.docs" depends="pack-maven.libs, pack-maven.plugins">
- <jar whenmanifestonly="fail" destfile="${dists.dir}/maven/${version.number}/scala-library/scala-library-docs.jar"
- basedir="${build-docs.dir}/library">
- <include name="**/*"/>
- </jar>
- <jar whenmanifestonly="fail" destfile="${dists.dir}/maven/${version.number}/scala-compiler/scala-compiler-docs.jar"
- basedir="${build-docs.dir}/compiler">
- <include name="**/*"/>
- </jar>
- <jar whenmanifestonly="fail" destfile="${dists.dir}/maven/${version.number}/scalap/scalap-docs.jar"
- basedir="${build-docs.dir}/scalap">
- <include name="**/*"/>
- </jar>
- <jar whenmanifestonly="fail" destfile="${dists.dir}/maven/${version.number}/plugins/continuations/continuations-docs.jar"
- basedir="${build-docs.dir}/continuations-plugin">
- <include name="**/*"/>
- </jar>
-
- <!-- TODO - Scala swing and actors should maybe have thier own jar, but creating it is SLOW. -->
- <copy tofile="${dists.dir}/maven/${version.number}/scala-swing/scala-swing-docs.jar"
- file="${dists.dir}/maven/${version.number}/scala-library/scala-library-docs.jar"/>
- <copy tofile="${dists.dir}/maven/${version.number}/scala-actors/scala-actors-docs.jar"
- file="${dists.dir}/maven/${version.number}/scala-library/scala-library-docs.jar"/>
- <copy tofile="${dists.dir}/maven/${version.number}/scala-reflect/scala-reflect-docs.jar"
- file="${dists.dir}/maven/${version.number}/scala-library/scala-library-docs.jar"/>
- </target>
-
- <target name="pack-maven.latest.unix" depends="pack-maven.docs" unless="os.win">
- <symlink link="${dists.dir}/maven/latest"
- resource="${version.number}"
- overwrite="yes"/>
- </target>
-
- <target name="pack-maven.latest.win" depends="pack-maven.docs" if="os.win">
- <copy todir="${dists.dir}/maven/latest">
- <fileset dir="${dists.dir}/maven/${version.number}"/>
- </copy>
- </target>
-
- <target name="pack-maven.scripts" depends="pack-maven.latest.unix,pack-maven.latest.win,pack-maven.srcs">
- <copy todir="${dists.dir}/maven/${version.number}"
- file="${lib-ant.dir}/ant-contrib.jar"/>
- <copy todir="${dists.dir}/maven/${version.number}"
- file="${lib-ant.dir}/maven-ant-tasks-2.1.1.jar"/>
- <copy tofile="${dists.dir}/maven/${version.number}/build.xml"
- file="${src.dir}/build/maven/maven-deploy.xml"/>
- <!-- export properties for use when deploying -->
- <echoproperties destfile="${dists.dir}/maven/${version.number}/build.properties"/>
- </target>
-
- <target name="pack-maven.done" depends="pack-maven.scripts"/>
-
-<!-- ===========================================================================
-MISCELLANEOUS
-============================================================================ -->
-
- <target name="pack-all.done" depends="pack-archives.done, pack-maven.done"/>
-
-<!-- ===========================================================================
-MISCELLANEOUS
-============================================================================ -->
-
- <target name="graph.init">
- <echo message="${basedir}/lib/ant/vizant.jar"/>
- <taskdef name="vizant" classname="vizant.Vizant" classpath="${basedir}/../../lib/ant/vizant.jar"/>
- </target>
-
- <target name="graph.pack" depends="graph.init">
- <vizant antfile="${ant.file}" outfile="${ant.project.name}.dot"/>
- </target>
-
-</project>
diff --git a/src/compiler/scala/reflect/macros/compiler/Errors.scala b/src/compiler/scala/reflect/macros/compiler/Errors.scala
index 9b56e417e2..45bb87fc47 100644
--- a/src/compiler/scala/reflect/macros/compiler/Errors.scala
+++ b/src/compiler/scala/reflect/macros/compiler/Errors.scala
@@ -44,14 +44,18 @@ trait Errors extends Traces {
message + suffix
}
- private def abbreviateCoreAliases(s: String): String = List("WeakTypeTag", "Expr").foldLeft(s)((res, x) => res.replace("c.universe." + x, "c." + x))
+ private def abbreviateCoreAliases(s: String): String = {
+ val coreAliases = List("WeakTypeTag", "Expr", "Tree")
+ coreAliases.foldLeft(s)((res, x) => res.replace("c.universe." + x, "c." + x))
+ }
- private def showMeth(pss: List[List[Symbol]], restpe: Type, abbreviate: Boolean) = {
- var argsPart = (pss map (ps => ps map (_.defString) mkString ("(", ", ", ")"))).mkString
- if (abbreviate) argsPart = abbreviateCoreAliases(argsPart)
- var retPart = restpe.toString
+ private def showMeth(pss: List[List[Symbol]], restpe: Type, abbreviate: Boolean, untype: Boolean) = {
+ def preprocess(tpe: Type) = if (untype) untypeMetalevel(tpe) else tpe
+ var pssPart = (pss map (ps => ps map (p => p.defStringSeenAs(preprocess(p.info))) mkString ("(", ", ", ")"))).mkString
+ if (abbreviate) pssPart = abbreviateCoreAliases(pssPart)
+ var retPart = preprocess(restpe).toString
if (abbreviate || macroDdef.tpt.tpe == null) retPart = abbreviateCoreAliases(retPart)
- argsPart + ": " + retPart
+ pssPart + ": " + retPart
}
// not exactly an error generator, but very related
@@ -86,8 +90,9 @@ trait Errors extends Traces {
private def compatibilityError(message: String) =
implRefError(
"macro implementation has wrong shape:"+
- "\n required: " + showMeth(rparamss, rret, abbreviate = true) +
- "\n found : " + showMeth(aparamss, aret, abbreviate = false) +
+ "\n required: " + showMeth(rparamss, rret, abbreviate = true, untype = false) +
+ "\n or : " + showMeth(rparamss, rret, abbreviate = true, untype = true) +
+ "\n found : " + showMeth(aparamss, aret, abbreviate = false, untype = false) +
"\n" + message)
def MacroImplNonTagImplicitParameters(params: List[Symbol]) = compatibilityError("macro implementations cannot have implicit parameters other than WeakTypeTag evidences")
diff --git a/src/compiler/scala/reflect/macros/contexts/Names.scala b/src/compiler/scala/reflect/macros/contexts/Names.scala
index e535754a98..c2f14cf0f1 100644
--- a/src/compiler/scala/reflect/macros/contexts/Names.scala
+++ b/src/compiler/scala/reflect/macros/contexts/Names.scala
@@ -4,7 +4,7 @@ package contexts
trait Names {
self: Context =>
- lazy val freshNameCreator = callsiteTyper.context.unit.fresh
+ def freshNameCreator = callsiteTyper.context.unit.fresh
def fresh(): String =
freshName()
@@ -16,7 +16,7 @@ trait Names {
freshName[NameType](name)
def freshName(): String =
- freshNameCreator.newName()
+ freshName("fresh$")
def freshName(name: String): String =
freshNameCreator.newName(name)
diff --git a/src/compiler/scala/reflect/macros/contexts/Parsers.scala b/src/compiler/scala/reflect/macros/contexts/Parsers.scala
index ae6488b5a8..88cfea8157 100644
--- a/src/compiler/scala/reflect/macros/contexts/Parsers.scala
+++ b/src/compiler/scala/reflect/macros/contexts/Parsers.scala
@@ -11,7 +11,7 @@ trait Parsers {
val sreporter = new StoreReporter()
val unit = new CompilationUnit(newSourceFile(code, "<macro>")) { override def reporter = sreporter }
val parser = newUnitParser(unit)
- val tree = gen.mkTreeOrBlock(parser.parseStats())
+ val tree = gen.mkTreeOrBlock(parser.parseStatsOrPackages())
sreporter.infos.foreach {
case sreporter.Info(pos, msg, sreporter.ERROR) => throw ParseException(pos, msg)
}
diff --git a/src/compiler/scala/reflect/macros/util/Helpers.scala b/src/compiler/scala/reflect/macros/util/Helpers.scala
index f40c6bb7e6..dd23b0fc32 100644
--- a/src/compiler/scala/reflect/macros/util/Helpers.scala
+++ b/src/compiler/scala/reflect/macros/util/Helpers.scala
@@ -55,6 +55,13 @@ trait Helpers {
case tp => typeRef(pre, MacroContextExprClass, List(tp))
}
+ /** Transforms c.Expr[T] types into c.Tree and leaves the rest unchanged.
+ */
+ def untypeMetalevel(tp: Type): Type = transparentShallowTransform(RepeatedParamClass, tp) {
+ case ExprClassOf(_) => typeRef(tp.prefix, TreesTreeType, Nil)
+ case tp => tp
+ }
+
/** Decreases metalevel of the type, i.e. transforms:
* * c.Expr[T] to T
* * Anything else to Any
diff --git a/src/compiler/scala/tools/ant/Scalac.scala b/src/compiler/scala/tools/ant/Scalac.scala
index b2cedf6338..1747405f03 100644
--- a/src/compiler/scala/tools/ant/Scalac.scala
+++ b/src/compiler/scala/tools/ant/Scalac.scala
@@ -91,7 +91,7 @@ class Scalac extends ScalaMatchingTask with ScalacShared {
val values = List("namer", "typer", "pickler", "refchecks",
"uncurry", "tailcalls", "specialize", "explicitouter",
"erasure", "lazyvals", "lambdalift", "constructors",
- "flatten", "mixin", "cleanup", "icode", "inliner",
+ "flatten", "mixin", "delambdafy", "cleanup", "icode", "inliner",
"closelim", "dce", "jvm", "terminal")
}
diff --git a/src/compiler/scala/tools/cmd/FromString.scala b/src/compiler/scala/tools/cmd/FromString.scala
index 433bbb167e..0b074efc0f 100644
--- a/src/compiler/scala/tools/cmd/FromString.scala
+++ b/src/compiler/scala/tools/cmd/FromString.scala
@@ -6,21 +6,20 @@
package scala.tools
package cmd
-import scala.tools.nsc.io.{ File, Directory }
-import scala.reflect.runtime.{universe => ru}
-import scala.tools.reflect.StdRuntimeTags._
+import nsc.io.{ Path, File, Directory }
+import scala.reflect.OptManifest
/** A general mechanism for defining how a command line argument
* (always a String) is transformed into an arbitrary type. A few
* example instances are in the companion object, but in general
* either IntFromString will suffice or you'll want custom transformers.
*/
-abstract class FromString[+T](implicit t: ru.TypeTag[T]) extends PartialFunction[String, T] {
+abstract class FromString[+T](implicit m: OptManifest[T]) extends PartialFunction[String, T] {
def apply(s: String): T
def isDefinedAt(s: String): Boolean = true
def zero: T = apply("")
- def targetString: String = t.toString
+ def targetString: String = m.toString
}
object FromString {
@@ -29,13 +28,13 @@ object FromString {
/** Path related stringifiers.
*/
- val ExistingDir: FromString[Directory] = new FromString[Directory]()(tagOfDirectory) {
+ val ExistingDir: FromString[Directory] = new FromString[Directory] {
override def isDefinedAt(s: String) = toDir(s).isDirectory
def apply(s: String): Directory =
if (isDefinedAt(s)) toDir(s)
else cmd.runAndExit(println("'%s' is not an existing directory." format s))
}
- def ExistingDirRelativeTo(root: Directory) = new FromString[Directory]()(tagOfDirectory) {
+ def ExistingDirRelativeTo(root: Directory) = new FromString[Directory] {
private def resolve(s: String) = (toDir(s) toAbsoluteWithRoot root).toDirectory
override def isDefinedAt(s: String) = resolve(s).isDirectory
def apply(s: String): Directory =
@@ -46,19 +45,19 @@ object FromString {
/** Argument expander, i.e. turns single argument "foo bar baz" into argument
* list "foo", "bar", "baz".
*/
- val ArgumentsFromString: FromString[List[String]] = new FromString[List[String]]()(tagOfListOfString) {
+ val ArgumentsFromString: FromString[List[String]] = new FromString[List[String]] {
def apply(s: String) = toArgs(s)
}
/** Identity.
*/
- implicit val StringFromString: FromString[String] = new FromString[String]()(tagOfString) {
+ implicit val StringFromString: FromString[String] = new FromString[String] {
def apply(s: String): String = s
}
/** Implicit as the most likely to be useful as-is.
*/
- implicit val IntFromString: FromString[Int] = new FromString[Int]()(tagOfInt) {
+ implicit val IntFromString: FromString[Int] = new FromString[Int] {
override def isDefinedAt(s: String) = safeToInt(s).isDefined
def apply(s: String) = safeToInt(s).get
def safeToInt(s: String): Option[Int] = try Some(java.lang.Integer.parseInt(s)) catch { case _: NumberFormatException => None }
diff --git a/src/compiler/scala/tools/nsc/CompilationUnits.scala b/src/compiler/scala/tools/nsc/CompilationUnits.scala
index 1de5c1f626..df5952a4cf 100644
--- a/src/compiler/scala/tools/nsc/CompilationUnits.scala
+++ b/src/compiler/scala/tools/nsc/CompilationUnits.scala
@@ -5,8 +5,7 @@
package scala.tools.nsc
-import util.FreshNameCreator
-import scala.reflect.internal.util.{ SourceFile, NoSourceFile }
+import scala.reflect.internal.util.{ SourceFile, NoSourceFile, FreshNameCreator }
import scala.collection.mutable
import scala.collection.mutable.{ LinkedHashSet, ListBuffer }
import scala.tools.nsc.reporters.Reporter
@@ -27,10 +26,9 @@ trait CompilationUnits { global: Global =>
class CompilationUnit(val source: SourceFile) extends CompilationUnitContextApi { self =>
/** the fresh name creator */
- val fresh: FreshNameCreator = new FreshNameCreator.Default
-
- def freshTermName(prefix: String): TermName = newTermName(fresh.newName(prefix))
- def freshTypeName(prefix: String): TypeName = newTypeName(fresh.newName(prefix))
+ implicit val fresh: FreshNameCreator = new FreshNameCreator
+ def freshTermName(prefix: String = "x$") = global.freshTermName(prefix)
+ def freshTypeName(prefix: String) = global.freshTypeName(prefix)
/** the content of the compilation unit in tree form */
var body: Tree = EmptyTree
diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala
index 1cd3e0ec4b..1852e670e4 100644
--- a/src/compiler/scala/tools/nsc/Global.scala
+++ b/src/compiler/scala/tools/nsc/Global.scala
@@ -110,9 +110,10 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
}
/** A spare instance of TreeBuilder left for backwards compatibility. */
- lazy val treeBuilder: TreeBuilder { val global: Global.this.type } = new UnitTreeBuilder {
+ lazy val treeBuilder: TreeBuilder { val global: Global.this.type } = new TreeBuilder {
val global: Global.this.type = Global.this;
- val unit = currentUnit
+ def unit = currentUnit
+ def source = currentUnit.source
}
/** Fold constants */
@@ -574,6 +575,13 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
val runsRightAfter = None
} with CleanUp
+ // phaseName = "delambdafy"
+ object delambdafy extends {
+ val global: Global.this.type = Global.this
+ val runsAfter = List("cleanup")
+ val runsRightAfter = None
+ } with Delambdafy
+
// phaseName = "icode"
object genicode extends {
val global: Global.this.type = Global.this
@@ -687,13 +695,14 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
uncurry -> "uncurry, translate function values to anonymous classes",
tailCalls -> "replace tail calls by jumps",
specializeTypes -> "@specialized-driven class and method specialization",
- explicitOuter -> "this refs to outer pointers, translate patterns",
+ explicitOuter -> "this refs to outer pointers",
erasure -> "erase types, add interfaces for traits",
postErasure -> "clean up erased inline classes",
lazyVals -> "allocate bitmaps, translate lazy vals into lazified defs",
lambdaLift -> "move nested functions to top level",
constructors -> "move field definitions into constructors",
mixer -> "mixin composition",
+ delambdafy -> "remove lambdas",
cleanup -> "platform-specific cleanups, generate reflective calls",
genicode -> "generate portable intermediate code",
inliner -> "optimization: do inlining",
@@ -1049,6 +1058,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
def currentRun: Run = curRun
def currentUnit: CompilationUnit = if (currentRun eq null) NoCompilationUnit else currentRun.currentUnit
def currentSource: SourceFile = if (currentUnit.exists) currentUnit.source else lastSeenSourceFile
+ def currentFreshNameCreator = currentUnit.fresh
def isGlobalInitialized = (
definitions.isDefinitionsInitialized
@@ -1066,6 +1076,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
@inline final def exitingExplicitOuter[T](op: => T): T = exitingPhase(currentRun.explicitouterPhase)(op)
@inline final def exitingFlatten[T](op: => T): T = exitingPhase(currentRun.flattenPhase)(op)
@inline final def exitingMixin[T](op: => T): T = exitingPhase(currentRun.mixinPhase)(op)
+ @inline final def exitingDelambdafy[T](op: => T): T = exitingPhase(currentRun.delambdafyPhase)(op)
@inline final def exitingPickler[T](op: => T): T = exitingPhase(currentRun.picklerPhase)(op)
@inline final def exitingRefchecks[T](op: => T): T = exitingPhase(currentRun.refchecksPhase)(op)
@inline final def exitingSpecialize[T](op: => T): T = exitingPhase(currentRun.specializePhase)(op)
@@ -1076,6 +1087,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
@inline final def enteringFlatten[T](op: => T): T = enteringPhase(currentRun.flattenPhase)(op)
@inline final def enteringIcode[T](op: => T): T = enteringPhase(currentRun.icodePhase)(op)
@inline final def enteringMixin[T](op: => T): T = enteringPhase(currentRun.mixinPhase)(op)
+ @inline final def enteringDelambdafy[T](op: => T): T = enteringPhase(currentRun.delambdafyPhase)(op)
@inline final def enteringPickler[T](op: => T): T = enteringPhase(currentRun.picklerPhase)(op)
@inline final def enteringSpecialize[T](op: => T): T = enteringPhase(currentRun.specializePhase)(op)
@inline final def enteringTyper[T](op: => T): T = enteringPhase(currentRun.typerPhase)(op)
@@ -1413,6 +1425,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
// val constructorsPhase = phaseNamed("constructors")
val flattenPhase = phaseNamed("flatten")
val mixinPhase = phaseNamed("mixin")
+ val delambdafyPhase = phaseNamed("delambdafy")
val cleanupPhase = phaseNamed("cleanup")
val icodePhase = phaseNamed("icode")
val inlinerPhase = phaseNamed("inliner")
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
index 34f3fcce9f..07938ec3df 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
@@ -13,9 +13,8 @@ import scala.collection.{ mutable, immutable }
import mutable.{ ListBuffer, StringBuilder }
import scala.reflect.internal.{ ModifierFlags => Flags }
import scala.reflect.internal.Chars.{ isScalaLetter }
-import scala.reflect.internal.util.{ SourceFile, Position }
+import scala.reflect.internal.util.{ SourceFile, Position, FreshNameCreator }
import Tokens._
-import util.FreshNameCreator
/** Historical note: JavaParsers started life as a direct copy of Parsers
* but at a time when that Parsers had been replaced by a different one.
@@ -41,11 +40,8 @@ trait ParsersCommon extends ScannersCommon { self =>
*/
abstract class ParserCommon {
val in: ScannerCommon
- def freshName(prefix: String): Name
- def freshTermName(prefix: String): TermName
- def freshTypeName(prefix: String): TypeName
- def deprecationWarning(off: Int, msg: String): Unit
- def accept(token: Int): Int
+ def deprecationWarning(off: Offset, msg: String): Unit
+ def accept(token: Token): Int
/** Methods inParensOrError and similar take a second argument which, should
* the next token not be the expected opener (e.g. LPAREN) will be returned
@@ -164,21 +160,13 @@ self =>
val in = newScanner()
in.init()
- private val globalFresh = new FreshNameCreator.Default
-
def unit = global.currentUnit
- def freshName(prefix: String): Name = freshTermName(prefix)
- def freshTermName(prefix: String): TermName = newTermName(globalFresh.newName(prefix))
- def freshTypeName(prefix: String): TypeName = newTypeName(globalFresh.newName(prefix))
-
- def o2p(offset: Int): Position = Position.offset(source, offset)
- def r2p(start: Int, mid: Int, end: Int): Position = rangePos(source, start, mid, end)
// suppress warnings; silent abort on errors
- def warning(offset: Int, msg: String) {}
- def deprecationWarning(offset: Int, msg: String) {}
+ def warning(offset: Offset, msg: String) {}
+ def deprecationWarning(offset: Offset, msg: String) {}
- def syntaxError(offset: Int, msg: String): Unit = throw new MalformedInput(offset, msg)
+ def syntaxError(offset: Offset, msg: String): Unit = throw new MalformedInput(offset, msg)
def incompleteInputError(msg: String): Unit = throw new MalformedInput(source.content.length - 1, msg)
object symbXMLBuilder extends SymbolicXMLBuilder(this, preserveWS = true) { // DEBUG choices
@@ -225,14 +213,11 @@ self =>
override def newScanner() = new UnitScanner(unit, patches)
- override def freshTermName(prefix: String): TermName = unit.freshTermName(prefix)
- override def freshTypeName(prefix: String): TypeName = unit.freshTypeName(prefix)
-
- override def warning(offset: Int, msg: String) {
+ override def warning(offset: Offset, msg: String) {
unit.warning(o2p(offset), msg)
}
- override def deprecationWarning(offset: Int, msg: String) {
+ override def deprecationWarning(offset: Offset, msg: String) {
unit.deprecationWarning(o2p(offset), msg)
}
@@ -250,7 +235,7 @@ self =>
for ((offset, msg) <- syntaxErrors)
unit.error(o2p(offset), msg)
- override def syntaxError(offset: Int, msg: String) {
+ override def syntaxError(offset: Offset, msg: String) {
if (smartParsing) syntaxErrors += ((offset, msg))
else unit.error(o2p(offset), msg)
}
@@ -274,9 +259,10 @@ self =>
}
}
- final val Local = 0
- final val InBlock = 1
- final val InTemplate = 2
+ type Location = Int
+ final val Local: Location = 0
+ final val InBlock: Location = 1
+ final val InTemplate: Location = 2
// These symbols may not yet be loaded (e.g. in the ide) so don't go
// through definitions to obtain the names.
@@ -295,23 +281,26 @@ self =>
abstract class Parser extends ParserCommon { parser =>
val in: Scanner
-
def unit: CompilationUnit
- def freshName(prefix: String): Name
- def freshTermName(prefix: String): TermName
- def freshTypeName(prefix: String): TypeName
- def o2p(offset: Int): Position
- def r2p(start: Int, mid: Int, end: Int): Position
-
- /** whether a non-continuable syntax error has been seen */
- private var lastErrorOffset : Int = -1
+ def source: SourceFile
- class ParserTreeBuilder extends UnitTreeBuilder {
+ class ParserTreeBuilder extends TreeBuilder {
val global: self.global.type = self.global
def unit = parser.unit
+ def source = parser.source
}
val treeBuilder = new ParserTreeBuilder
- import treeBuilder.{global => _, unit => _, _}
+ import treeBuilder.{global => _, unit => _, source => _, fresh => _, _}
+
+ implicit def fresh: FreshNameCreator = unit.fresh
+
+ def o2p(offset: Offset): Position = Position.offset(source, offset)
+ def r2p(start: Offset, mid: Offset, end: Offset): Position = rangePos(source, start, mid, end)
+ def r2p(start: Offset, mid: Offset): Position = r2p(start, mid, in.lastOffset max start)
+ def r2p(offset: Offset): Position = r2p(offset, offset)
+
+ /** whether a non-continuable syntax error has been seen */
+ private var lastErrorOffset : Int = -1
/** The types of the context bounds of type parameters of the surrounding class
*/
@@ -344,9 +333,10 @@ self =>
*/
def parse(): Tree = parseRule(_.parseStartRule())
- /** This is alternative entry point for repl, script runner, toolbox and quasiquotes.
+ /** These are alternative entry points for repl, script runner, toolbox and parsing in macros.
*/
def parseStats(): List[Tree] = parseRule(_.templateStats())
+ def parseStatsOrPackages(): List[Tree] = parseRule(_.templateOrTopStatSeq())
/** This is the parse entry point for code which is not self-contained, e.g.
* a script which is a series of template statements. They will be
@@ -507,7 +497,7 @@ self =>
finally inFunReturnType = saved
}
- protected def skip(targetToken: Int) {
+ protected def skip(targetToken: Token) {
var nparens = 0
var nbraces = 0
while (true) {
@@ -535,17 +525,17 @@ self =>
in.nextToken()
}
}
- def warning(offset: Int, msg: String): Unit
+ def warning(offset: Offset, msg: String): Unit
def incompleteInputError(msg: String): Unit
private def syntaxError(pos: Position, msg: String, skipIt: Boolean) {
syntaxError(pos pointOrElse in.offset, msg, skipIt)
}
- def syntaxError(offset: Int, msg: String): Unit
+ def syntaxError(offset: Offset, msg: String): Unit
def syntaxError(msg: String, skipIt: Boolean) {
syntaxError(in.offset, msg, skipIt)
}
- def syntaxError(offset: Int, msg: String, skipIt: Boolean) {
+ def syntaxError(offset: Offset, msg: String, skipIt: Boolean) {
if (offset > lastErrorOffset) {
syntaxError(offset, msg)
// no more errors on this token.
@@ -569,10 +559,10 @@ self =>
}
def expectedMsgTemplate(exp: String, fnd: String) = s"$exp expected but $fnd found."
- def expectedMsg(token: Int): String = expectedMsgTemplate(token2string(token), token2string(in.token))
+ def expectedMsg(token: Token): String = expectedMsgTemplate(token2string(token), token2string(in.token))
/** Consume one token of the specified type, or signal an error if it is not there. */
- def accept(token: Int): Int = {
+ def accept(token: Token): Offset = {
val offset = in.offset
if (in.token != token) {
syntaxErrorOrIncomplete(expectedMsg(token), skipIt = false)
@@ -632,8 +622,6 @@ self =>
def isAnnotation: Boolean = in.token == AT
- def isCaseDefStart: Boolean = in.token == CASE
-
def isLocalModifier: Boolean = in.token match {
case ABSTRACT | FINAL | SEALED | IMPLICIT | LAZY => true
case _ => false
@@ -660,14 +648,14 @@ self =>
def isIdent = in.token == IDENTIFIER || in.token == BACKQUOTED_IDENT
- def isLiteralToken(token: Int) = token match {
+ def isLiteralToken(token: Token) = token match {
case CHARLIT | INTLIT | LONGLIT | FLOATLIT | DOUBLELIT |
STRINGLIT | INTERPOLATIONID | SYMBOLLIT | TRUE | FALSE | NULL => true
case _ => false
}
def isLiteral = isLiteralToken(in.token)
- def isExprIntroToken(token: Int): Boolean = isLiteralToken(token) || (token match {
+ def isExprIntroToken(token: Token): Boolean = isLiteralToken(token) || (token match {
case IDENTIFIER | BACKQUOTED_IDENT |
THIS | SUPER | IF | FOR | NEW | USCORE | TRY | WHILE |
DO | RETURN | THROW | LPAREN | LBRACE | XMLSTART => true
@@ -676,7 +664,7 @@ self =>
def isExprIntro: Boolean = isExprIntroToken(in.token)
- def isTypeIntroToken(token: Int): Boolean = token match {
+ def isTypeIntroToken(token: Token): Boolean = token match {
case IDENTIFIER | BACKQUOTED_IDENT | THIS |
SUPER | USCORE | LPAREN | AT => true
case _ => false
@@ -684,7 +672,9 @@ self =>
def isStatSeqEnd = in.token == RBRACE || in.token == EOF
- def isStatSep(token: Int): Boolean =
+ def isCaseDefEnd = in.token == RBRACE || in.token == CASE || in.token == EOF
+
+ def isStatSep(token: Token): Boolean =
token == NEWLINE || token == NEWLINES || token == SEMI
def isStatSep: Boolean = isStatSep(in.token)
@@ -699,10 +689,10 @@ self =>
/* ---------- TREE CONSTRUCTION ------------------------------------------- */
- def atPos[T <: Tree](offset: Int)(t: T): T = atPos(r2p(offset, offset, in.lastOffset max offset))(t)
- def atPos[T <: Tree](start: Int, point: Int)(t: T): T = atPos(r2p(start, point, in.lastOffset max start))(t)
- def atPos[T <: Tree](start: Int, point: Int, end: Int)(t: T): T = atPos(r2p(start, point, end))(t)
- def atPos[T <: Tree](pos: Position)(t: T): T = global.atPos(pos)(t)
+ def atPos[T <: Tree](offset: Offset)(t: T): T = atPos(r2p(offset))(t)
+ def atPos[T <: Tree](start: Offset, point: Offset)(t: T): T = atPos(r2p(start, point))(t)
+ def atPos[T <: Tree](start: Offset, point: Offset, end: Offset)(t: T): T = atPos(r2p(start, point, end))(t)
+ def atPos[T <: Tree](pos: Position)(t: T): T = global.atPos(pos)(t)
def atInPos[T <: Tree](t: T): T = atPos(o2p(in.offset))(t)
def setInPos[T <: Tree](t: T): T = t setPos o2p(in.offset)
@@ -740,7 +730,7 @@ self =>
}
/** {{{ part { `sep` part } }}},or if sepFirst is true, {{{ { `sep` part } }}}. */
- final def tokenSeparated[T](separator: Int, sepFirst: Boolean, part: => T): List[T] = {
+ final def tokenSeparated[T](separator: Token, sepFirst: Boolean, part: => T): List[T] = {
val ts = new ListBuffer[T]
if (!sepFirst)
ts += part
@@ -783,7 +773,7 @@ self =>
}
}
- def checkAssoc(offset: Int, op: Name, leftAssoc: Boolean) =
+ def checkAssoc(offset: Offset, op: Name, leftAssoc: Boolean) =
if (treeInfo.isLeftAssoc(op) != leftAssoc)
syntaxError(
offset, "left- and right-associative operators with same precedence may not be mixed", skipIt = false)
@@ -823,7 +813,7 @@ self =>
def argType(): Tree
def functionArgType(): Tree
- private def tupleInfixType(start: Int) = {
+ private def tupleInfixType(start: Offset) = {
in.nextToken()
if (in.token == RPAREN) {
in.nextToken()
@@ -1060,7 +1050,7 @@ self =>
t
}
- def selectors(t: Tree, typeOK: Boolean, dotOffset: Int): Tree =
+ def selectors(t: Tree, typeOK: Boolean, dotOffset: Offset): Tree =
if (typeOK && in.token == TYPE) {
in.nextToken()
atPos(t.pos.start, dotOffset) { SingletonTypeTree(t) }
@@ -1118,7 +1108,7 @@ self =>
* | null
* }}}
*/
- def literal(isNegated: Boolean = false, inPattern: Boolean = false, start: Int = in.offset): Tree = atPos(start) {
+ def literal(isNegated: Boolean = false, inPattern: Boolean = false, start: Offset = in.offset): Tree = atPos(start) {
def finish(value: Any): Tree = try newLiteral(value) finally in.nextToken()
if (in.token == SYMBOLLIT)
Apply(scalaDot(nme.Symbol), List(finish(in.strVal)))
@@ -1160,7 +1150,7 @@ self =>
/** Consume a USCORE and create a fresh synthetic placeholder param. */
private def freshPlaceholder(): Tree = {
val start = in.offset
- val pname = freshName("x$")
+ val pname = freshTermName()
in.nextToken()
val id = atPos(start)(Ident(pname))
val param = atPos(id.pos.focus)(gen.mkSyntheticParam(pname.toTermName))
@@ -1215,12 +1205,12 @@ self =>
in.nextToken()
}
- def newLineOptWhenFollowedBy(token: Int) {
+ def newLineOptWhenFollowedBy(token: Offset) {
// note: next is defined here because current == NEWLINE
if (in.token == NEWLINE && in.next.token == token) newLineOpt()
}
- def newLineOptWhenFollowing(p: Int => Boolean) {
+ def newLineOptWhenFollowing(p: Token => Boolean) {
// note: next is defined here because current == NEWLINE
if (in.token == NEWLINE && p(in.next.token)) newLineOpt()
}
@@ -1235,7 +1225,7 @@ self =>
if (in.token == COLON) { in.nextToken(); typ() }
else TypeTree()
- def typeOrInfixType(location: Int): Tree =
+ def typeOrInfixType(location: Location): Tree =
if (location == Local) typ()
else startInfixType()
@@ -1246,7 +1236,7 @@ self =>
* WildcardType ::= `_' TypeBounds
* }}}
*/
- def wildcardType(start: Int) = {
+ def wildcardType(start: Offset) = {
val pname = freshTypeName("_$")
val t = atPos(start)(Ident(pname))
val bounds = typeBounds()
@@ -1272,7 +1262,7 @@ self =>
/* hook for IDE, unlike expression can be stubbed
* don't use for any tree that can be inspected in the parser!
*/
- def statement(location: Int): Tree = expr(location) // !!! still needed?
+ def statement(location: Location): Tree = expr(location) // !!! still needed?
/** {{{
* Expr ::= (Bindings | [`implicit'] Id | `_') `=>' Expr
@@ -1299,9 +1289,9 @@ self =>
*/
def expr(): Tree = expr(Local)
- def expr(location: Int): Tree = withPlaceholders(expr0(location), isAny = false)
+ def expr(location: Location): Tree = withPlaceholders(expr0(location), isAny = false)
- def expr0(location: Int): Tree = (in.token: @scala.annotation.switch) match {
+ def expr0(location: Location): Tree = (in.token: @scala.annotation.switch) match {
case IF =>
def parseIf = atPos(in.skipToken()) {
val cond = condExpr()
@@ -1326,7 +1316,7 @@ self =>
in.nextToken()
if (in.token != LBRACE) catchFromExpr()
else inBracesOrNil {
- if (isCaseDefStart) caseClauses()
+ if (in.token == CASE) caseClauses()
else catchFromExpr()
}
}
@@ -1459,7 +1449,7 @@ self =>
* }}}
*/
- def implicitClosure(start: Int, location: Int): Tree = {
+ def implicitClosure(start: Offset, location: Location): Tree = {
val param0 = convertToParam {
atPos(in.offset) {
Ident(ident()) match {
@@ -1637,7 +1627,7 @@ self =>
*/
def blockExpr(): Tree = atPos(in.offset) {
inBraces {
- if (isCaseDefStart) Match(EmptyTree, caseClauses())
+ if (in.token == CASE) Match(EmptyTree, caseClauses())
else block()
}
}
@@ -1723,7 +1713,7 @@ self =>
while (in.token == IF) enums += makeFilter(in.offset, guard())
}
- def makeFilter(start: Int, tree: Tree) = Filter(r2p(start, tree.pos.point, tree.pos.end), tree)
+ def makeFilter(start: Offset, tree: Tree) = Filter(r2p(start, tree.pos.point, tree.pos.end), tree)
/* -------- PATTERNS ------------------------------------------- */
@@ -2245,7 +2235,7 @@ self =>
}
}
val nameOffset = in.offset
- // TODO AM: freshName(o2p(in.skipToken()), "_$$"), will need to update test suite
+ // TODO AM: freshTermName(o2p(in.skipToken()), "_$$"), will need to update test suite
val pname: TypeName = wildcardOrIdent().toTypeName
val param = atPos(start, nameOffset) {
val tparams = typeParamClauseOpt(pname, null) // @M TODO null --> no higher-order context bounds for now
@@ -2287,7 +2277,7 @@ self =>
t setPos o2p(in.offset)
}
- def bound(tok: Int): Tree = if (in.token == tok) { in.nextToken(); typ() } else EmptyTree
+ def bound(tok: Token): Tree = if (in.token == tok) { in.nextToken(); typ() } else EmptyTree
/* -------- DEFS ------------------------------------------- */
@@ -2406,7 +2396,7 @@ self =>
* | type [nl] TypeDcl
* }}}
*/
- def defOrDcl(pos: Int, mods: Modifiers): List[Tree] = {
+ def defOrDcl(pos: Offset, mods: Modifiers): List[Tree] = {
if (mods.isLazy && in.token != VAL)
syntaxError("lazy not allowed here. Only vals can be lazy", skipIt = false)
in.token match {
@@ -2457,7 +2447,6 @@ self =>
EmptyTree
}
def mkDefs(p: Tree, tp: Tree, rhs: Tree): List[Tree] = {
- //Console.println("DEBUG: p = "+p.toString()); // DEBUG
val trees =
makePatDef(newmods,
if (tp.isEmpty) p
@@ -2523,7 +2512,11 @@ self =>
val vparamss = paramClauses(nme.CONSTRUCTOR, classContextBounds map (_.duplicate), ofCaseClass = false)
newLineOptWhenFollowedBy(LBRACE)
val rhs = in.token match {
- case LBRACE => atPos(in.offset) { constrBlock(vparamss) }
+ case LBRACE => {
+ if (settings.future)
+ deprecationWarning(in.offset, "Procedure syntax is deprecated. Convert procedure to method by adding `: Unit =`.")
+ atPos(in.offset) { constrBlock(vparamss) }
+ }
case _ => accept(EQUALS) ; atPos(in.offset) { constrExpr(vparamss) }
}
DefDef(mods, nme.CONSTRUCTOR, List(), vparamss, TypeTree(), rhs)
@@ -2536,7 +2529,7 @@ self =>
}
}
- def funDefRest(start: Int, nameOffset: Int, mods: Modifiers, name: Name): Tree = {
+ def funDefRest(start: Offset, nameOffset: Offset, mods: Modifiers, name: Name): Tree = {
val result = atPos(start, if (name.toTermName == nme.ERROR) start else nameOffset) {
var newmods = mods
// contextBoundBuf is for context bounded type parameters of the form
@@ -2549,10 +2542,14 @@ self =>
var restype = fromWithinReturnType(typedOpt())
val rhs =
if (isStatSep || in.token == RBRACE) {
+ if (settings.future)
+ deprecationWarning(in.lastOffset, "Procedure syntax is deprecated. Convert procedure to method by adding `: Unit`.")
if (restype.isEmpty) restype = scalaUnitConstr
newmods |= Flags.DEFERRED
EmptyTree
} else if (restype.isEmpty && in.token == LBRACE) {
+ if (settings.future)
+ deprecationWarning(in.offset, "Procedure syntax is deprecated. Convert procedure to method by adding `: Unit =`.")
restype = scalaUnitConstr
blockExpr()
} else {
@@ -2619,7 +2616,7 @@ self =>
* TypeDcl ::= type Id [TypeParamClause] TypeBounds
* }}}
*/
- def typeDefOrDcl(start: Int, mods: Modifiers): Tree = {
+ def typeDefOrDcl(start: Offset, mods: Modifiers): Tree = {
in.nextToken()
newLinesOpt()
atPos(start, in.offset) {
@@ -2652,7 +2649,7 @@ self =>
* | [override] trait TraitDef
* }}}
*/
- def tmplDef(pos: Int, mods: Modifiers): Tree = {
+ def tmplDef(pos: Offset, mods: Modifiers): Tree = {
if (mods.isLazy) syntaxError("classes cannot be lazy", skipIt = false)
in.token match {
case TRAIT =>
@@ -2676,7 +2673,7 @@ self =>
* TraitDef ::= Id [TypeParamClause] RequiresTypeOpt TraitTemplateOpt
* }}}
*/
- def classDef(start: Int, mods: Modifiers): ClassDef = {
+ def classDef(start: Offset, mods: Modifiers): ClassDef = {
in.nextToken()
val nameOffset = in.offset
val name = identForType()
@@ -2716,7 +2713,7 @@ self =>
* ObjectDef ::= Id ClassTemplateOpt
* }}}
*/
- def objectDef(start: Int, mods: Modifiers): ModuleDef = {
+ def objectDef(start: Offset, mods: Modifiers): ModuleDef = {
in.nextToken()
val nameOffset = in.offset
val name = ident()
@@ -2741,10 +2738,9 @@ self =>
*/
def packageObjectDef(start: Offset): PackageDef = {
val defn = objectDef(in.offset, NoMods)
- val module = copyModuleDef(defn)(name = nme.PACKAGEkw)
- val pid = atPos(o2p(defn.pos.start))(Ident(defn.name))
-
- makePackaging(start, pid, module :: Nil)
+ val pidPos = o2p(defn.pos.startOrPoint)
+ val pkgPos = r2p(start, pidPos.point)
+ gen.mkPackageObject(defn, pidPos, pkgPos)
}
def packageOrPackageObject(start: Offset): Tree = (
if (in.token == OBJECT)
@@ -2756,7 +2752,7 @@ self =>
)
// TODO - eliminate this and use "def packageObjectDef" (see call site of this
// method for small elaboration.)
- def makePackageObject(start: Int, objDef: ModuleDef): PackageDef = objDef match {
+ def makePackageObject(start: Offset, objDef: ModuleDef): PackageDef = objDef match {
case ModuleDef(mods, name, impl) =>
makePackaging(
start, atPos(o2p(objDef.pos.start)){ Ident(name) }, List(ModuleDef(mods, nme.PACKAGEkw, impl)))
@@ -2831,7 +2827,7 @@ self =>
* TraitExtends ::= `extends' | `<:'
* }}}
*/
- def templateOpt(mods: Modifiers, name: Name, constrMods: Modifiers, vparamss: List[List[ValDef]], tstart: Int): Template = {
+ def templateOpt(mods: Modifiers, name: Name, constrMods: Modifiers, vparamss: List[List[ValDef]], tstart: Offset): Template = {
val (parents, self, body) = (
if (in.token == EXTENDS || in.token == SUBTYPE && mods.isTrait) {
in.nextToken()
@@ -2894,14 +2890,26 @@ self =>
/* -------- STATSEQS ------------------------------------------- */
/** Create a tree representing a packaging. */
- def makePackaging(start: Int, pkg: Tree, stats: List[Tree]): PackageDef = pkg match {
+ def makePackaging(start: Offset, pkg: Tree, stats: List[Tree]): PackageDef = pkg match {
case x: RefTree => atPos(start, pkg.pos.point)(PackageDef(x, stats))
}
- def makeEmptyPackage(start: Int, stats: List[Tree]): PackageDef = (
+ def makeEmptyPackage(start: Offset, stats: List[Tree]): PackageDef = (
makePackaging(start, atPos(start, start, start)(Ident(nme.EMPTY_PACKAGE_NAME)), stats)
)
+ def statSeq(stat: PartialFunction[Token, List[Tree]], errorMsg: String = "illegal start of definition"): List[Tree] = {
+ val stats = new ListBuffer[Tree]
+ def default(tok: Token) =
+ if (isStatSep) Nil
+ else syntaxErrorOrIncompleteAnd(errorMsg, skipIt = true)(Nil)
+ while (!isStatSeqEnd) {
+ stats ++= stat.applyOrElse(in.token, default)
+ acceptStatSepOpt()
+ }
+ stats.toList
+ }
+
/** {{{
* TopStatSeq ::= TopStat {semi TopStat}
* TopStat ::= Annotations Modifiers TmplDef
@@ -2911,24 +2919,15 @@ self =>
* |
* }}}
*/
- def topStatSeq(): List[Tree] = {
- val stats = new ListBuffer[Tree]
- while (!isStatSeqEnd) {
- stats ++= (in.token match {
- case PACKAGE =>
- packageOrPackageObject(in.skipToken()) :: Nil
- case IMPORT =>
- in.flushDoc
- importClause()
- case x if isAnnotation || isTemplateIntro || isModifier =>
- joinComment(topLevelTmplDef :: Nil)
- case _ =>
- if (isStatSep) Nil
- else syntaxErrorOrIncompleteAnd("expected class or object definition", skipIt = true)(Nil)
- })
- acceptStatSepOpt()
- }
- stats.toList
+ def topStatSeq(): List[Tree] = statSeq(topStat, errorMsg = "expected class or object definition")
+ def topStat: PartialFunction[Token, List[Tree]] = {
+ case PACKAGE =>
+ packageOrPackageObject(in.skipToken()) :: Nil
+ case IMPORT =>
+ in.flushDoc
+ importClause()
+ case _ if isAnnotation || isTemplateIntro || isModifier =>
+ joinComment(topLevelTmplDef :: Nil)
}
/** {{{
@@ -2972,25 +2971,20 @@ self =>
* |
* }}}
*/
- def templateStats(): List[Tree] = {
- val stats = new ListBuffer[Tree]
- while (!isStatSeqEnd) {
- if (in.token == IMPORT) {
- in.flushDoc
- stats ++= importClause()
- } else if (isDefIntro || isModifier || isAnnotation) {
- stats ++= joinComment(nonLocalDefOrDcl)
- } else if (isExprIntro) {
- in.flushDoc
- stats += statement(InTemplate)
- } else if (!isStatSep) {
- syntaxErrorOrIncomplete("illegal start of definition", skipIt = true)
- }
- acceptStatSepOpt()
- }
- stats.toList
+ def templateStats(): List[Tree] = statSeq(templateStat)
+ def templateStat: PartialFunction[Token, List[Tree]] = {
+ case IMPORT =>
+ in.flushDoc
+ importClause()
+ case _ if isDefIntro || isModifier || isAnnotation =>
+ joinComment(nonLocalDefOrDcl)
+ case _ if isExprIntro =>
+ in.flushDoc
+ statement(InTemplate) :: Nil
}
+ def templateOrTopStatSeq(): List[Tree] = statSeq(templateStat.orElse(topStat))
+
/** {{{
* RefineStatSeq ::= RefineStat {semi RefineStat}
* RefineStat ::= Dcl
@@ -3057,14 +3051,14 @@ self =>
*/
def blockStatSeq(): List[Tree] = checkNoEscapingPlaceholders {
val stats = new ListBuffer[Tree]
- while (!isStatSeqEnd && !isCaseDefStart) {
+ while (!isStatSeqEnd && !isCaseDefEnd) {
if (in.token == IMPORT) {
stats ++= importClause()
acceptStatSepOpt()
}
else if (isExprIntro) {
stats += statement(InBlock)
- if (in.token != RBRACE && !isCaseDefStart) acceptStatSep()
+ if (!isCaseDefEnd) acceptStatSep()
}
else if (isDefIntro || isLocalModifier || isAnnotation) {
if (in.token == IMPLICIT) {
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
index 6957f85689..b12be1a056 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
@@ -21,19 +21,24 @@ trait ScannersCommon {
val global : Global
import global._
+ /** Offset into source character array */
+ type Offset = Int
+
+ type Token = Int
+
trait CommonTokenData {
- def token: Int
+ def token: Token
def name: TermName
}
trait ScannerCommon extends CommonTokenData {
// things to fill in, in addition to buf, decodeUni which come from CharArrayReader
- def error (off: Int, msg: String): Unit
- def incompleteInputError(off: Int, msg: String): Unit
- def deprecationWarning(off: Int, msg: String): Unit
+ def error(off: Offset, msg: String): Unit
+ def incompleteInputError(off: Offset, msg: String): Unit
+ def deprecationWarning(off: Offset, msg: String): Unit
}
- def createKeywordArray(keywords: Seq[(Name, Int)], defaultToken: Int): (Int, Array[Int]) = {
+ def createKeywordArray(keywords: Seq[(Name, Token)], defaultToken: Token): (Token, Array[Token]) = {
val names = keywords sortBy (_._1.start) map { case (k, v) => (k.start, v) }
val low = names.head._1
val high = names.last._1
@@ -48,13 +53,10 @@ trait Scanners extends ScannersCommon {
val global : Global
import global._
- /** Offset into source character array */
- type Offset = Int
-
trait TokenData extends CommonTokenData {
/** the next token */
- var token: Int = EMPTY
+ var token: Token = EMPTY
/** the offset of the first character of the current token */
var offset: Offset = 0
@@ -169,7 +171,7 @@ trait Scanners extends ScannersCommon {
def isAtEnd = charOffset >= buf.length
- def resume(lastCode: Int) = {
+ def resume(lastCode: Token) = {
token = lastCode
if (next.token != EMPTY && !reporter.hasErrors)
syntaxError("unexpected end of input: possible missing '}' in XML block")
@@ -194,7 +196,7 @@ trait Scanners extends ScannersCommon {
protected def emitIdentifierDeprecationWarnings = true
/** Clear buffer and set name and token */
- private def finishNamed(idtoken: Int = IDENTIFIER) {
+ private def finishNamed(idtoken: Token = IDENTIFIER) {
name = newTermName(cbuf.toString)
cbuf.clear()
token = idtoken
@@ -225,7 +227,7 @@ trait Scanners extends ScannersCommon {
* (the STRINGLIT appears twice in succession on the stack iff the
* expression is a multiline string literal).
*/
- var sepRegions: List[Int] = List()
+ var sepRegions: List[Token] = List()
// Get next token ------------------------------------------------------------
@@ -583,7 +585,7 @@ trait Scanners extends ScannersCommon {
}
/** Can token start a statement? */
- def inFirstOfStat(token: Int) = token match {
+ def inFirstOfStat(token: Token) = token match {
case EOF | CATCH | ELSE | EXTENDS | FINALLY | FORSOME | MATCH | WITH | YIELD |
COMMA | SEMI | NEWLINE | NEWLINES | DOT | COLON | EQUALS | ARROW | LARROW |
SUBTYPE | VIEWBOUND | SUPERTYPE | HASH | RPAREN | RBRACKET | RBRACE | LBRACKET =>
@@ -593,7 +595,7 @@ trait Scanners extends ScannersCommon {
}
/** Can token end a statement? */
- def inLastOfStat(token: Int) = token match {
+ def inLastOfStat(token: Token) = token match {
case CHARLIT | INTLIT | LONGLIT | FLOATLIT | DOUBLELIT | STRINGLIT | SYMBOLLIT |
IDENTIFIER | BACKQUOTED_IDENT | THIS | NULL | TRUE | FALSE | RETURN | USCORE |
TYPE | XMLSTART | RPAREN | RBRACKET | RBRACE =>
@@ -1122,7 +1124,7 @@ trait Scanners extends ScannersCommon {
def applyBracePatch(): Boolean = false
/** overridden in UnitScanners */
- def parenBalance(token: Int) = 0
+ def parenBalance(token: Token) = 0
/** overridden in UnitScanners */
def healBraces(): List[BracePatch] = List()
@@ -1137,7 +1139,7 @@ trait Scanners extends ScannersCommon {
// ------------- keyword configuration -----------------------------------
- private val allKeywords = List[(Name, Int)](
+ private val allKeywords = List[(Name, Token)](
nme.ABSTRACTkw -> ABSTRACT,
nme.CASEkw -> CASE,
nme.CATCHkw -> CATCH,
@@ -1191,8 +1193,8 @@ trait Scanners extends ScannersCommon {
nme.MACROkw -> IDENTIFIER,
nme.THENkw -> IDENTIFIER)
- private var kwOffset: Int = -1
- private val kwArray: Array[Int] = {
+ private var kwOffset: Offset = -1
+ private val kwArray: Array[Token] = {
val (offset, arr) = createKeywordArray(allKeywords, IDENTIFIER)
kwOffset = offset
arr
@@ -1203,7 +1205,7 @@ trait Scanners extends ScannersCommon {
// Token representation ----------------------------------------------------
/** Returns the string representation of given token. */
- def token2string(token: Int): String = (token: @switch) match {
+ def token2string(token: Token): String = (token: @switch) match {
case IDENTIFIER | BACKQUOTED_IDENT => "identifier"
case CHARLIT => "character literal"
case INTLIT => "integer literal"
@@ -1234,7 +1236,7 @@ trait Scanners extends ScannersCommon {
}
}
- class MalformedInput(val offset: Int, val msg: String) extends Exception
+ class MalformedInput(val offset: Offset, val msg: String) extends Exception
/** A scanner for a given source file not necessarily attached to a compilation unit.
* Useful for looking inside source files that aren not currently compiled to see what's there
@@ -1262,7 +1264,7 @@ trait Scanners extends ScannersCommon {
lazy val parensAnalyzer = new ParensAnalyzer(unit, List())
- override def parenBalance(token: Int) = parensAnalyzer.balance(token)
+ override def parenBalance(token: Token) = parensAnalyzer.balance(token)
override def healBraces(): List[BracePatch] = {
var patches: List[BracePatch] = List()
@@ -1412,7 +1414,7 @@ trait Scanners extends ScannersCommon {
var tabSeen = false
- def line(offset: Int): Int = {
+ def line(offset: Offset): Int = {
def findLine(lo: Int, hi: Int): Int = {
val mid = (lo + hi) / 2
if (offset < lineStart(mid)) findLine(lo, mid - 1)
@@ -1423,7 +1425,7 @@ trait Scanners extends ScannersCommon {
else findLine(0, lineStart.length - 1)
}
- def column(offset: Int): Int = {
+ def column(offset: Offset): Int = {
var col = 0
var i = offset - 1
while (i >= 0 && buf(i) != CR && buf(i) != LF) {
@@ -1485,6 +1487,6 @@ trait Scanners extends ScannersCommon {
// when skimming through the source file trying to heal braces
override def emitIdentifierDeprecationWarnings = false
- override def error(offset: Int, msg: String) {}
+ override def error(offset: Offset, msg: String) {}
}
}
diff --git a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
index 59abf99844..28d5aefc2b 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
@@ -8,24 +8,21 @@ package ast.parser
import symtab.Flags._
import scala.collection.mutable.ListBuffer
-import scala.reflect.internal.util.Position
+import scala.reflect.internal.util.{Position, SourceFile, FreshNameCreator}
/** Methods for building trees, used in the parser. All the trees
* returned by this class must be untyped.
*/
abstract class TreeBuilder {
-
val global: Global
import global._
- def freshName(): Name = freshName("x$")
- def freshTermName(): TermName = freshTermName("x$")
+ def unit: CompilationUnit
+ def source: SourceFile
- def freshName(prefix: String): Name
- def freshTermName(prefix: String): TermName
- def freshTypeName(prefix: String): TypeName
- def o2p(offset: Int): Position
- def r2p(start: Int, point: Int, end: Int): Position
+ implicit def fresh: FreshNameCreator = unit.fresh
+ def o2p(offset: Int): Position = Position.offset(source, offset)
+ def r2p(start: Int, mid: Int, end: Int): Position = rangePos(source, start, mid, end)
def rootScalaDot(name: Name) = gen.rootScalaDot(name)
def scalaDot(name: Name) = gen.scalaDot(name)
@@ -325,7 +322,7 @@ abstract class TreeBuilder {
/* If `pat` is not yet a `Bind` wrap it in one with a fresh name */
def makeBind(pat: Tree): Tree = pat match {
case Bind(_, _) => pat
- case _ => Bind(freshName(), pat) setPos pat.pos
+ case _ => Bind(freshTermName(), pat) setPos pat.pos
}
/* A reference to the name bound in Bind `pat`. */
@@ -416,7 +413,7 @@ abstract class TreeBuilder {
* }
*/
def makeCatchFromExpr(catchExpr: Tree): CaseDef = {
- val binder = freshTermName("x")
+ val binder = freshTermName()
val pat = Bind(binder, Typed(Ident(nme.WILDCARD), Ident(tpnme.Throwable)))
val catchDef = ValDef(Modifiers(ARTIFACT), freshTermName("catchExpr"), TypeTree(), catchExpr)
val catchFn = Ident(catchDef.name)
@@ -520,13 +517,3 @@ abstract class TreeBuilder {
}
}
}
-
-abstract class UnitTreeBuilder extends TreeBuilder {
- import global._
- def unit: CompilationUnit
- def freshName(prefix: String): Name = freshTermName(prefix)
- def freshTermName(prefix: String): TermName = unit.freshTermName(prefix)
- def freshTypeName(prefix: String): TypeName = unit.freshTypeName(prefix)
- def o2p(offset: Int): Position = Position.offset(unit.source, offset)
- def r2p(start: Int, mid: Int, end: Int): Position = rangePos(unit.source, start, mid, end)
-}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala
index 2f6f9620a8..8bbc382251 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala
@@ -2370,7 +2370,8 @@ abstract class GenASM extends SubComponent with BytecodeWriters with GenJVMASM {
case LOAD_MODULE(module) =>
// assert(module.isModule, "Expected module: " + module)
debuglog("generating LOAD_MODULE for: " + module + " flags: " + module.flagString)
- if (clasz.symbol == module.moduleClass && jMethodName != nme.readResolve.toString) {
+ def inStaticMethod = this.method != null && this.method.symbol.isStaticMember
+ if (clasz.symbol == module.moduleClass && jMethodName != nme.readResolve.toString && !inStaticMethod) {
jmethod.visitVarInsn(Opcodes.ALOAD, 0)
} else {
jmethod.visitFieldInsn(
diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
index 01d5791f60..b8ca4adc14 100644
--- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
+++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
@@ -184,6 +184,8 @@ trait ScalaSettings extends AbsScalaSettings
val YnoLoadImplClass = BooleanSetting ("-Yno-load-impl-class", "Do not load $class.class files.")
val exposeEmptyPackage = BooleanSetting("-Yexpose-empty-package", "Internal only: expose the empty package.").internalOnly()
+ // the current standard is "inline" but we are moving towards "method"
+ val Ydelambdafy = ChoiceSetting ("-Ydelambdafy", "strategy", "Strategy used for translating lambdas into JVM code.", List("inline", "method"), "inline")
private def removalIn212 = "This flag is scheduled for removal in 2.12. If you have a case where you need this flag then please report a bug."
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala
index 9ac1ce1b9c..ce3e7b1bb5 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala
@@ -11,6 +11,7 @@ import java.lang.Float.floatToIntBits
import java.lang.Double.doubleToLongBits
import scala.io.Codec
import scala.reflect.internal.pickling.{ PickleBuffer, PickleFormat }
+import scala.reflect.internal.util.shortClassOfInstance
import scala.collection.mutable.LinkedHashMap
import PickleFormat._
import Flags._
@@ -80,7 +81,7 @@ abstract class Pickler extends SubComponent {
private var entries = new Array[AnyRef](256)
private var ep = 0
private val index = new LinkedHashMap[AnyRef, Int]
- private lazy val nonClassRoot = findOrElse(root.ownersIterator)(!_.isClass)(NoSymbol)
+ private lazy val nonClassRoot = findSymbol(root.ownersIterator)(!_.isClass)
private def isRootSym(sym: Symbol) =
sym.name.toTermName == rootName && sym.owner == rootOwner
@@ -106,13 +107,14 @@ abstract class Pickler extends SubComponent {
* anyway? This is the case if symbol is a refinement class,
* an existentially bound variable, or a higher-order type parameter.
*/
- private def isLocal(sym: Symbol): Boolean =
- !sym.isPackageClass && sym != NoSymbol &&
- (isRootSym(sym) ||
- sym.isRefinementClass ||
- sym.isAbstractType && sym.hasFlag(EXISTENTIAL) || // existential param
- sym.isParameter ||
- isLocal(sym.owner))
+ private def isLocal(sym: Symbol): Boolean = (sym != NoSymbol) && !sym.isPackageClass && (
+ isRootSym(sym)
+ || sym.isRefinementClass
+ || sym.isAbstractType && sym.hasFlag(EXISTENTIAL) // existential param
+ || sym.isParameter
+ || isLocal(sym.owner)
+ )
+ private def isExternalSymbol(sym: Symbol): Boolean = (sym != NoSymbol) && !isLocal(sym)
// Phase 1 methods: Populate entries/index ------------------------------------
@@ -135,13 +137,18 @@ abstract class Pickler extends SubComponent {
true
}
+ private def deskolemizeTypeSymbols(ref: AnyRef): AnyRef = ref match {
+ case sym: Symbol => deskolemize(sym)
+ case _ => ref
+ }
+
/** If the symbol is a type skolem, deskolemize and log it.
* If we fail to deskolemize, in a method like
* trait Trait[+A] { def f[CC[X]] : CC[A] }
* the applied type CC[A] will hold a different CC symbol
* than the type-constructor type-parameter CC.
*/
- private def deskolemize(sym: Symbol) = {
+ private def deskolemize(sym: Symbol): Symbol = {
if (sym.isTypeSkolem) {
val sym1 = sym.deSkolemize
log({
@@ -169,7 +176,7 @@ abstract class Pickler extends SubComponent {
putSymbol(sym.owner)
putSymbol(sym.privateWithin)
putType(sym.info)
- if (sym.thisSym.tpeHK != sym.tpeHK)
+ if (sym.hasSelfType)
putType(sym.typeOfThis)
putSymbol(sym.alias)
if (!sym.children.isEmpty) {
@@ -202,252 +209,65 @@ abstract class Pickler extends SubComponent {
case ThisType(sym) =>
putSymbol(sym)
case SingleType(pre, sym) =>
- putType(pre); putSymbol(sym)
+ putType(pre)
+ putSymbol(sym)
case SuperType(thistpe, supertpe) =>
putType(thistpe)
putType(supertpe)
case ConstantType(value) =>
putConstant(value)
case TypeRef(pre, sym, args) =>
-// if (sym.isAbstractType && (sym hasFlag EXISTENTIAL))
-// if (!(boundSyms contains sym))
-// println("unbound existential: "+sym+sym.locationString)
- putType(pre); putSymbol(sym); putTypes(args)
+ putType(pre)
+ putSymbol(sym)
+ putTypes(args)
case TypeBounds(lo, hi) =>
- putType(lo); putType(hi)
- case RefinedType(parents, decls) =>
- val rclazz = tp.typeSymbol
- for (m <- decls.iterator)
- if (m.owner != rclazz) abort("bad refinement member "+m+" of "+tp+", owner = "+m.owner)
- putSymbol(rclazz); putTypes(parents); putSymbols(decls.toList)
- case ClassInfoType(parents, decls, clazz) =>
- putSymbol(clazz); putTypes(parents); putSymbols(decls.toList)
+ putType(lo)
+ putType(hi)
+ case tp: CompoundType =>
+ putSymbol(tp.typeSymbol)
+ putTypes(tp.parents)
+ putSymbols(tp.decls.toList)
case MethodType(params, restpe) =>
- putType(restpe); putSymbols(params)
+ putType(restpe)
+ putSymbols(params)
case NullaryMethodType(restpe) =>
putType(restpe)
case PolyType(tparams, restpe) =>
- /* no longer needed since all params are now local
- tparams foreach { tparam =>
- if (!isLocal(tparam)) locals += tparam // similar to existential types, these tparams are local
- }
- */
- putType(restpe); putSymbols(tparams)
+ putType(restpe)
+ putSymbols(tparams)
case ExistentialType(tparams, restpe) =>
-// val savedBoundSyms = boundSyms // boundSyms are known to be local based on the EXISTENTIAL flag (see isLocal)
-// boundSyms = tparams ::: boundSyms
-// try {
- putType(restpe)
- // } finally {
-// boundSyms = savedBoundSyms
-// }
+ putType(restpe)
putSymbols(tparams)
- case AnnotatedType(annotations, underlying, selfsym) =>
+ case AnnotatedType(_, underlying, selfsym) =>
putType(underlying)
if (settings.selfInAnnots) putSymbol(selfsym)
- putAnnotations(annotations filter (_.isStatic))
+ tp.staticAnnotations foreach putAnnotation
case _ =>
throw new FatalError("bad type: " + tp + "(" + tp.getClass + ")")
}
}
private def putTypes(tps: List[Type]) { tps foreach putType }
- private def putTree(tree: Tree): Unit = if (putEntry(tree)) {
- if (tree != EmptyTree)
- putType(tree.tpe)
- if (tree.hasSymbolField)
- putSymbol(tree.symbol)
-
- tree match {
- case EmptyTree =>
-
- case tree@PackageDef(pid, stats) =>
- putTree(pid)
- putTrees(stats)
-
- case ClassDef(mods, name, tparams, impl) =>
- putMods(mods)
- putEntry(name)
- putTree(impl)
- putTrees(tparams)
-
- case ModuleDef(mods, name, impl) =>
- putMods(mods)
- putEntry(name)
- putTree(impl)
-
- case ValDef(mods, name, tpt, rhs) =>
- putMods(mods)
- putEntry(name)
- putTree(tpt)
- putTree(rhs)
-
- case DefDef(mods, name, tparams, vparamss, tpt, rhs) =>
- putMods(mods)
- putEntry(name)
- putTrees(tparams)
- putTreess(vparamss)
- putTree(tpt)
- putTree(rhs)
-
- case TypeDef(mods, name, tparams, rhs) =>
- putMods(mods)
- putEntry(name)
- putTree(rhs)
- putTrees(tparams)
-
- case LabelDef(name, params, rhs) =>
- putEntry(name)
- putTree(rhs)
- putTrees(params)
-
- case Import(expr, selectors) =>
- putTree(expr)
- for (ImportSelector(from, _, to, _) <- selectors) {
- putEntry(from)
- putEntry(to)
- }
-/*
- case DocDef(comment, definition) => should not be needed
- putConstant(Constant(comment))
- putTree(definition)
-*/
- case Template(parents, self, body) =>
- putTrees(parents)
- putTree(self)
- putTrees(body)
-
- case Block(stats, expr) =>
- putTree(expr)
- putTrees(stats)
-
- case CaseDef(pat, guard, body) =>
- putTree(pat)
- putTree(guard)
- putTree(body)
-
- case Alternative(trees) =>
- putTrees(trees)
-
- case Star(elem) =>
- putTree(elem)
-
- case Bind(name, body) =>
- putEntry(name)
- putTree(body)
-
- case UnApply(fun: Tree, args) =>
- putTree(fun)
- putTrees(args)
-
- case ArrayValue(elemtpt, trees) =>
- putTree(elemtpt)
- putTrees(trees)
-
-
- case Function(vparams, body) =>
- putTree(body)
- putTrees(vparams)
-
- case Assign(lhs, rhs) =>
- putTree(lhs)
- putTree(rhs)
-
- case If(cond, thenp, elsep) =>
- putTree(cond)
- putTree(thenp)
- putTree(elsep)
-
- case Match(selector, cases) =>
- putTree(selector)
- putTrees(cases)
-
- case Return(expr) =>
- putTree(expr)
-
- case Try(block, catches, finalizer) =>
- putTree(block)
- putTree(finalizer)
- putTrees(catches)
-
- case Throw(expr) =>
- putTree(expr)
-
- case New(tpt) =>
- putTree(tpt)
-
- case Typed(expr, tpt) =>
- putTree(expr)
- putTree(tpt)
-
- case TypeApply(fun, args) =>
- putTree(fun)
- putTrees(args)
-
- case Apply(fun, args) =>
- putTree(fun)
- putTrees(args)
-
- case ApplyDynamic(qual, args) =>
- putTree(qual)
- putTrees(args)
-
- case Super(qual, mix) =>
- putTree(qual)
- putEntry(mix:Name)
-
- case This(qual) =>
- putEntry(qual)
-
- case Select(qualifier, selector) =>
- putTree(qualifier)
- putEntry(selector)
-
- case Ident(name) =>
- putEntry(name)
-
- case Literal(value) =>
- putEntry(value)
-
- case TypeTree() =>
-
- case Annotated(annot, arg) =>
- putTree(annot)
- putTree(arg)
-
- case SingletonTypeTree(ref) =>
- putTree(ref)
-
- case SelectFromTypeTree(qualifier, selector) =>
- putTree(qualifier)
- putEntry(selector)
-
- case CompoundTypeTree(templ: Template) =>
- putTree(templ)
-
- case AppliedTypeTree(tpt, args) =>
- putTree(tpt)
- putTrees(args)
-
- case TypeBoundsTree(lo, hi) =>
- putTree(lo)
- putTree(hi)
-
- case ExistentialTypeTree(tpt, whereClauses) =>
- putTree(tpt)
- putTrees(whereClauses)
+ private object putTreeTraverser extends Traverser {
+ // Only used when pickling trees, i.e. in an argument of some Annotation
+ // annotations in Modifiers are removed by the typechecker
+ override def traverseModifiers(mods: Modifiers): Unit = if (putEntry(mods)) putEntry(mods.privateWithin)
+ override def traverseName(name: Name): Unit = putEntry(name)
+ override def traverseConstant(const: Constant): Unit = putEntry(const)
+ override def traverse(tree: Tree): Unit = putTree(tree)
+
+ def put(tree: Tree): Unit = {
+ if (tree.canHaveAttrs)
+ putType(tree.tpe)
+ if (tree.hasSymbolField)
+ putSymbol(tree.symbol)
+
+ super.traverse(tree)
}
}
-
- private def putTrees(trees: List[Tree]) = trees foreach putTree
- private def putTreess(treess: List[List[Tree]]) = treess foreach putTrees
-
- /** only used when pickling trees, i.e. in an
- * argument of some Annotation */
- private def putMods(mods: Modifiers) = if (putEntry(mods)) {
- // annotations in Modifiers are removed by the typechecker
- val Modifiers(_, privateWithin, Nil) = mods
- putEntry(privateWithin)
+ private def putTree(tree: Tree) {
+ if (putEntry(tree))
+ putTreeTraverser put tree
}
/** Store a constant in map index, along with anything it references.
@@ -461,7 +281,7 @@ abstract class Pickler extends SubComponent {
}
private def putChildren(sym: Symbol, children: List[Symbol]) {
- assert(putEntry((sym, children)))
+ putEntry(sym -> children)
children foreach putSymbol
}
@@ -469,14 +289,10 @@ abstract class Pickler extends SubComponent {
private def putAnnotation(sym: Symbol, annot: AnnotationInfo) {
// if an annotation with the same arguments is applied to the
// same symbol multiple times, it's only pickled once.
- if (putEntry((sym, annot)))
+ if (putEntry(sym -> annot))
putAnnotationBody(annot)
}
- /** used in AnnotatedType only, i.e. annotations on types */
- private def putAnnotations(annots: List[AnnotationInfo]) {
- annots foreach putAnnotation
- }
private def putAnnotation(annot: AnnotationInfo) {
if (putEntry(annot))
putAnnotationBody(annot)
@@ -510,14 +326,11 @@ abstract class Pickler extends SubComponent {
/** Write a reference to object, i.e., the object's number in the map index.
*/
- private def writeRef(ref0: AnyRef) {
- val ref = ref0 match {
- case sym: Symbol => deskolemize(sym)
- case _ => ref0
- }
- writeNat(index(ref))
+ private def writeRef(ref: AnyRef) {
+ writeNat(index(deskolemizeTypeSymbols(ref)))
}
- private def writeRefs(refs: List[AnyRef]) { refs foreach writeRef }
+ private def writeRefs(refs: List[AnyRef]): Unit = refs foreach writeRef
+
private def writeRefsWithLength(refs: List[AnyRef]) {
writeNat(refs.length)
writeRefs(refs)
@@ -567,446 +380,137 @@ abstract class Pickler extends SubComponent {
}
}
- /** Write an entry */
- private def writeEntry(entry: AnyRef) {
- def writeBody(entry: AnyRef): Int = entry match {
- case name: Name =>
- writeName(name)
- if (name.isTermName) TERMname else TYPEname
- case NoSymbol =>
- NONEsym
- case sym: Symbol if !isLocal(sym) =>
- val tag =
- if (sym.isModuleClass) {
- writeRef(sym.name.toTermName); EXTMODCLASSref
- } else {
- writeRef(sym.name); EXTref
- }
- if (!sym.owner.isRoot) writeRef(sym.owner)
- tag
- case sym: ClassSymbol =>
- writeSymInfo(sym)
- if (sym.thisSym.tpe_* != sym.tpe_*) writeRef(sym.typeOfThis)
- CLASSsym
- case sym: TypeSymbol =>
- writeSymInfo(sym)
- if (sym.isAbstractType) TYPEsym else ALIASsym
- case sym: TermSymbol =>
- writeSymInfo(sym)
- if (sym.alias != NoSymbol) writeRef(sym.alias)
- if (sym.isModule) MODULEsym else VALsym
- case NoType =>
- NOtpe
- case NoPrefix =>
- NOPREFIXtpe
- case ThisType(sym) =>
- writeRef(sym); THIStpe
- case SingleType(pre, sym) =>
- writeRef(pre); writeRef(sym); SINGLEtpe
- case SuperType(thistpe, supertpe) =>
- writeRef(thistpe); writeRef(supertpe); SUPERtpe
- case ConstantType(value) =>
- writeRef(value); CONSTANTtpe
- case TypeRef(pre, sym, args) =>
- writeRef(pre); writeRef(sym); writeRefs(args); TYPEREFtpe
- case TypeBounds(lo, hi) =>
- writeRef(lo); writeRef(hi); TYPEBOUNDStpe
- case tp @ RefinedType(parents, decls) =>
- writeRef(tp.typeSymbol); writeRefs(parents); REFINEDtpe
- case ClassInfoType(parents, decls, clazz) =>
- writeRef(clazz); writeRefs(parents); CLASSINFOtpe
- case mt @ MethodType(formals, restpe) =>
- writeRef(restpe); writeRefs(formals) ; METHODtpe
- case mt @ NullaryMethodType(restpe) =>
- // reuse POLYtpe since those can never have an empty list of tparams.
- // TODO: is there any way this can come back and bite us in the bottom?
- // ugliness and thrift aside, this should make this somewhat more backward compatible
- // (I'm not sure how old scalac's would deal with nested PolyTypes, as these used to be folded into one)
- writeRef(restpe); writeRefs(Nil); POLYtpe
- case PolyType(tparams, restpe) => // invar: tparams nonEmpty
- writeRef(restpe); writeRefs(tparams); POLYtpe
- case ExistentialType(tparams, restpe) =>
- writeRef(restpe); writeRefs(tparams); EXISTENTIALtpe
- case c @ Constant(_) =>
- if (c.tag == BooleanTag) writeLong(if (c.booleanValue) 1 else 0)
- else if (ByteTag <= c.tag && c.tag <= LongTag) writeLong(c.longValue)
- else if (c.tag == FloatTag) writeLong(floatToIntBits(c.floatValue).toLong)
- else if (c.tag == DoubleTag) writeLong(doubleToLongBits(c.doubleValue))
- else if (c.tag == StringTag) writeRef(newTermName(c.stringValue))
- else if (c.tag == ClazzTag) writeRef(c.typeValue)
- else if (c.tag == EnumTag) writeRef(c.symbolValue)
- LITERAL + c.tag // also treats UnitTag, NullTag; no value required
- case AnnotatedType(annotations, tp, selfsym) =>
- annotations filter (_.isStatic) match {
- case Nil => writeBody(tp) // write the underlying type if there are no annotations
- case staticAnnots =>
- if (settings.selfInAnnots && selfsym != NoSymbol)
- writeRef(selfsym)
- writeRef(tp)
- writeRefs(staticAnnots)
- ANNOTATEDtpe
- }
- // annotations attached to a symbol (i.e. annots on terms)
- case (target: Symbol, annot@AnnotationInfo(_, _, _)) =>
- writeRef(target)
- writeAnnotation(annot)
- SYMANNOT
-
- case ArrayAnnotArg(args) =>
- args foreach writeClassfileAnnotArg
- ANNOTARGARRAY
-
- case (target: Symbol, children: List[_]) =>
- writeRef(target)
- writeRefs(children.asInstanceOf[List[Symbol]])
- CHILDREN
-
- case EmptyTree =>
- writeNat(EMPTYtree)
- TREE
-
- case tree@PackageDef(pid, stats) =>
- writeNat(PACKAGEtree)
- writeRef(tree.tpe)
- writeRef(tree.symbol)
- writeRef(tree.mods)
- writeRef(pid)
- writeRefs(stats)
- TREE
-
- case tree@ClassDef(mods, name, tparams, impl) =>
- writeNat(CLASStree)
- writeRef(tree.tpe)
- writeRef(tree.symbol)
- writeRef(mods)
- writeRef(name)
- writeRef(impl)
- writeRefs(tparams)
- TREE
-
- case tree@ModuleDef(mods, name, impl) =>
- writeNat(MODULEtree)
- writeRef(tree.tpe)
- writeRef(tree.symbol)
- writeRef(mods)
- writeRef(name)
- writeRef(impl)
- TREE
-
- case tree@ValDef(mods, name, tpt, rhs) =>
- writeNat(VALDEFtree)
- writeRef(tree.tpe)
- writeRef(tree.symbol)
- writeRef(mods)
- writeRef(name)
- writeRef(tpt)
- writeRef(rhs)
- TREE
-
- case tree@DefDef(mods, name, tparams, vparamss, tpt, rhs) =>
- writeNat(DEFDEFtree)
- writeRef(tree.tpe)
- writeRef(tree.symbol)
- writeRef(mods)
- writeRef(name)
- writeRefsWithLength(tparams)
- writeNat(vparamss.length)
- vparamss foreach writeRefsWithLength
- writeRef(tpt)
- writeRef(rhs)
- TREE
-
- case tree@TypeDef(mods, name, tparams, rhs) =>
- writeNat(TYPEDEFtree)
- writeRef(tree.tpe)
- writeRef(tree.symbol)
- writeRef(mods)
- writeRef(name)
- writeRef(rhs)
- writeRefs(tparams)
- TREE
-
- case tree@LabelDef(name, params, rhs) =>
- writeNat(LABELtree)
- writeRef(tree.tpe)
- writeRef(tree.symbol)
- writeRef(name)
- writeRef(rhs)
- writeRefs(params)
- TREE
-
- case tree@Import(expr, selectors) =>
- writeNat(IMPORTtree)
- writeRef(tree.tpe)
- writeRef(tree.symbol)
- writeRef(expr)
- for (ImportSelector(from, _, to, _) <- selectors) {
- writeRef(from)
- writeRef(to)
- }
- TREE
-
- case tree@DocDef(comment, definition) =>
- writeNat(DOCDEFtree)
- writeRef(tree.tpe)
- writeRef(Constant(comment))
- writeRef(definition)
- TREE
-
- case tree@Template(parents, self, body) =>
- writeNat(TEMPLATEtree)
- writeRef(tree.tpe)
- writeRef(tree.symbol)
- writeRefsWithLength(parents)
- writeRef(self)
- writeRefs(body)
- TREE
-
- case tree@Block(stats, expr) =>
- writeNat(BLOCKtree)
- writeRef(tree.tpe)
- writeRef(expr)
- writeRefs(stats)
- TREE
-
- case tree@CaseDef(pat, guard, body) =>
- writeNat(CASEtree)
- writeRef(tree.tpe)
- writeRef(pat)
- writeRef(guard)
- writeRef(body)
- TREE
-
- case tree@Alternative(trees) =>
- writeNat(ALTERNATIVEtree)
- writeRef(tree.tpe)
- writeRefs(trees)
- TREE
-
- case tree@Star(elem) =>
- writeNat(STARtree)
- writeRef(tree.tpe)
- writeRef(elem)
- TREE
-
- case tree@Bind(name, body) =>
- writeNat(BINDtree)
- writeRef(tree.tpe)
- writeRef(tree.symbol)
- writeRef(name)
- writeRef(body)
- TREE
-
- case tree@UnApply(fun: Tree, args) =>
- writeNat(UNAPPLYtree)
- writeRef(tree.tpe)
- writeRef(fun)
- writeRefs(args)
- TREE
-
- case tree@ArrayValue(elemtpt, trees) =>
- writeNat(ARRAYVALUEtree)
- writeRef(tree.tpe)
- writeRef(elemtpt)
- writeRefs(trees)
- TREE
-
- case tree@Function(vparams, body) =>
- writeNat(FUNCTIONtree)
- writeRef(tree.tpe)
- writeRef(tree.symbol)
- writeRef(body)
- writeRefs(vparams)
- TREE
-
- case tree@Assign(lhs, rhs) =>
- writeNat(ASSIGNtree)
- writeRef(tree.tpe)
- writeRef(lhs)
- writeRef(rhs)
- TREE
-
- case tree@If(cond, thenp, elsep) =>
- writeNat(IFtree)
- writeRef(tree.tpe)
- writeRef(cond)
- writeRef(thenp)
- writeRef(elsep)
- TREE
-
- case tree@Match(selector, cases) =>
- writeNat(MATCHtree)
- writeRef(tree.tpe)
- writeRef(selector)
- writeRefs(cases)
- TREE
-
- case tree@Return(expr) =>
- writeNat(RETURNtree)
- writeRef(tree.tpe)
- writeRef(tree.symbol)
- writeRef(expr)
- TREE
-
- case tree@Try(block, catches, finalizer) =>
- writeNat(TREtree)
- writeRef(tree.tpe)
- writeRef(block)
- writeRef(finalizer)
- writeRefs(catches)
- TREE
-
- case tree@Throw(expr) =>
- writeNat(THROWtree)
- writeRef(tree.tpe)
- writeRef(expr)
- TREE
-
- case tree@New(tpt) =>
- writeNat(NEWtree)
- writeRef(tree.tpe)
- writeRef(tpt)
- TREE
-
- case tree@Typed(expr, tpt) =>
- writeNat(TYPEDtree)
- writeRef(tree.tpe)
- writeRef(expr)
- writeRef(tpt)
- TREE
-
- case tree@TypeApply(fun, args) =>
- writeNat(TYPEAPPLYtree)
- writeRef(tree.tpe)
- writeRef(fun)
- writeRefs(args)
- TREE
-
- case tree@Apply(fun, args) =>
- writeNat(APPLYtree)
- writeRef(tree.tpe)
- writeRef(fun)
- writeRefs(args)
- TREE
-
- case tree@ApplyDynamic(qual, args) =>
- writeNat(APPLYDYNAMICtree)
- writeRef(tree.tpe)
- writeRef(tree.symbol)
- writeRef(qual)
- writeRefs(args)
- TREE
-
- case tree@Super(qual, mix) =>
- writeNat(SUPERtree)
- writeRef(tree.tpe)
- writeRef(tree.symbol)
- writeRef(qual)
- writeRef(mix)
- TREE
-
- case tree@This(qual) =>
- writeNat(THIStree)
- writeRef(tree.tpe)
- writeRef(tree.symbol)
- writeRef(qual)
- TREE
-
- case tree@Select(qualifier, selector) =>
- writeNat(SELECTtree)
- writeRef(tree.tpe)
- writeRef(tree.symbol)
- writeRef(qualifier)
- writeRef(selector)
- TREE
-
- case tree@Ident(name) =>
- writeNat(IDENTtree)
- writeRef(tree.tpe)
- writeRef(tree.symbol)
- writeRef(name)
- TREE
-
- case tree@Literal(value) =>
- writeNat(LITERALtree)
- writeRef(tree.tpe)
- writeRef(value)
- TREE
-
- case tree@TypeTree() =>
- writeNat(TYPEtree)
- writeRef(tree.tpe)
- TREE
-
- case tree@Annotated(annot, arg) =>
- writeNat(ANNOTATEDtree)
- writeRef(tree.tpe)
- writeRef(annot)
- writeRef(arg)
- TREE
-
- case tree@SingletonTypeTree(ref) =>
- writeNat(SINGLETONTYPEtree)
+ private object writeTreeBodyTraverser extends Traverser {
+ private var refs = false
+ @inline private def asRefs[T](body: => T): T = {
+ val saved = refs
+ refs = true
+ try body finally refs = saved
+ }
+ override def traverseModifiers(mods: Modifiers): Unit = if (refs) writeRef(mods) else super.traverseModifiers(mods)
+ override def traverseName(name: Name): Unit = writeRef(name)
+ override def traverseConstant(const: Constant): Unit = writeRef(const)
+ override def traverseParams(params: List[Tree]): Unit = writeRefsWithLength(params)
+ override def traverseParamss(vparamss: List[List[Tree]]): Unit = {
+ writeNat(vparamss.length)
+ super.traverseParamss(vparamss)
+ }
+ override def traverse(tree: Tree): Unit = {
+ if (refs)
+ writeRef(tree)
+ else {
writeRef(tree.tpe)
- writeRef(ref)
- TREE
+ if (tree.hasSymbolField)
+ writeRef(tree.symbol)
- case tree@SelectFromTypeTree(qualifier, selector) =>
- writeNat(SELECTFROMTYPEtree)
- writeRef(tree.tpe)
- writeRef(qualifier)
- writeRef(selector)
- TREE
+ asRefs(super.traverse(tree))
+ }
+ }
+ }
- case tree@CompoundTypeTree(templ: Template) =>
- writeNat(COMPOUNDTYPEtree)
- writeRef(tree.tpe)
- writeRef(templ)
- TREE
+ /** Write an entry */
+ private def writeEntry(entry: AnyRef) {
+ def writeLocalSymbolBody(sym: Symbol) {
+ writeSymInfo(sym)
+ sym match {
+ case _: ClassSymbol if sym.hasSelfType => writeRef(sym.typeOfThis)
+ case _: TermSymbol if sym.alias.exists => writeRef(sym.alias)
+ case _ =>
+ }
+ }
+ def writeExtSymbolBody(sym: Symbol) {
+ val name = if (sym.isModuleClass) sym.name.toTermName else sym.name
+ writeRef(name)
+ if (!sym.owner.isRoot)
+ writeRef(sym.owner)
+ }
+ def writeSymbolBody(sym: Symbol) {
+ if (sym ne NoSymbol) {
+ if (isLocal(sym))
+ writeLocalSymbolBody(sym)
+ else
+ writeExtSymbolBody(sym)
+ }
+ }
- case tree@AppliedTypeTree(tpt, args) =>
- writeNat(APPLIEDTYPEtree)
- writeRef(tree.tpe)
- writeRef(tpt)
- writeRefs(args)
- TREE
+ // NullaryMethodType reuses POLYtpe since those can never have an empty list of tparams.
+ // TODO: is there any way this can come back and bite us in the bottom?
+ // ugliness and thrift aside, this should make this somewhat more backward compatible
+ // (I'm not sure how old scalac's would deal with nested PolyTypes, as these used to be folded into one)
+ def writeTypeBody(tpe: Type): Unit = tpe match {
+ case NoType | NoPrefix =>
+ case ThisType(sym) => writeRef(sym)
+ case SingleType(pre, sym) => writeRef(pre) ; writeRef(sym)
+ case SuperType(thistpe, supertpe) => writeRef(thistpe) ; writeRef(supertpe)
+ case ConstantType(value) => writeRef(value)
+ case TypeBounds(lo, hi) => writeRef(lo) ; writeRef(hi)
+ case TypeRef(pre, sym, args) => writeRef(pre) ; writeRef(sym); writeRefs(args)
+ case MethodType(formals, restpe) => writeRef(restpe) ; writeRefs(formals)
+ case NullaryMethodType(restpe) => writeRef(restpe); writeRefs(Nil)
+ case PolyType(tparams, restpe) => writeRef(restpe); writeRefs(tparams)
+ case ExistentialType(tparams, restpe) => writeRef(restpe); writeRefs(tparams)
+ case StaticallyAnnotatedType(annots, tp) => writeRef(tp) ; writeRefs(annots)
+ case AnnotatedType(_, tp, _) => writeTypeBody(tp) // write the underlying type if there are no static annotations
+ case CompoundType(parents, _, clazz) => writeRef(clazz); writeRefs(parents)
+ }
- case tree@TypeBoundsTree(lo, hi) =>
- writeNat(TYPEBOUNDStree)
- writeRef(tree.tpe)
- writeRef(lo)
- writeRef(hi)
- TREE
+ def writeTreeBody(tree: Tree) {
+ writeNat(picklerSubTag(tree))
+ if (!tree.isEmpty)
+ writeTreeBodyTraverser traverse tree
+ }
- case tree@ExistentialTypeTree(tpt, whereClauses) =>
- writeNat(EXISTENTIALTYPEtree)
- writeRef(tree.tpe)
- writeRef(tpt)
- writeRefs(whereClauses)
- TREE
+ def writeConstant(c: Constant): Unit = c.tag match {
+ case BooleanTag => writeLong(if (c.booleanValue) 1 else 0)
+ case FloatTag => writeLong(floatToIntBits(c.floatValue).toLong)
+ case DoubleTag => writeLong(doubleToLongBits(c.doubleValue))
+ case StringTag => writeRef(newTermName(c.stringValue))
+ case ClazzTag => writeRef(c.typeValue)
+ case EnumTag => writeRef(c.symbolValue)
+ case tag => if (ByteTag <= tag && tag <= LongTag) writeLong(c.longValue)
+ }
- case Modifiers(flags, privateWithin, _) =>
- val pflags = rawToPickledFlags(flags)
- writeNat((pflags >> 32).toInt)
- writeNat((pflags & 0xFFFFFFFF).toInt)
- writeRef(privateWithin)
- MODIFIERS
+ def writeModifiers(mods: Modifiers) {
+ val pflags = rawToPickledFlags(mods.flags)
+ writeNat((pflags >> 32).toInt)
+ writeNat((pflags & 0xFFFFFFFF).toInt)
+ writeRef(mods.privateWithin)
+ }
- // annotations on types (not linked to a symbol)
- case annot@AnnotationInfo(_, _, _) =>
- writeAnnotation(annot)
- ANNOTINFO
+ def writeSymbolTuple(target: Symbol, other: Any) {
+ writeRef(target)
+ other match {
+ case annot: AnnotationInfo => writeAnnotation(annot)
+ case children: List[Symbol @unchecked] => writeRefs(children)
+ case _ =>
+ }
+ }
- case _ =>
- throw new FatalError("bad entry: " + entry + " " + entry.getClass)
+ def writeBody(entry: AnyRef): Unit = entry match {
+ case tree: Tree => writeTreeBody(tree)
+ case sym: Symbol => writeSymbolBody(sym)
+ case tpe: Type => writeTypeBody(tpe)
+ case name: Name => writeName(name)
+ case const: Constant => writeConstant(const)
+ case mods: Modifiers => writeModifiers(mods)
+ case annot: AnnotationInfo => writeAnnotation(annot)
+ case (target: Symbol, other) => writeSymbolTuple(target, other)
+ case ArrayAnnotArg(args) => args foreach writeClassfileAnnotArg
+ case _ => devWarning(s"Unexpected entry to pickler ${shortClassOfInstance(entry)} $entry")
}
// begin writeEntry
- val startpos = writeIndex
- // reserve some space so that the patchNat's most likely won't need to shift
- writeByte(0); writeByte(0)
- patchNat(startpos, writeBody(entry))
- patchNat(startpos + 1, writeIndex - (startpos + 2))
+ // The picklerTag method can't determine if it's an external symbol reference
+ val tag = entry match {
+ case sym: Symbol if isExternalSymbol(sym) => if (sym.isModuleClass) EXTMODCLASSref else EXTref
+ case _ => picklerTag(entry)
+ }
+ writeNat(tag)
+ writeByte(0) // reserve a place to record the number of bytes written
+ val start = writeIndex
+ writeBody(entry)
+ val length = writeIndex - start
+ patchNat(start - 1, length) // patch bytes written over the placeholder
}
/** Write byte array */
diff --git a/src/compiler/scala/tools/nsc/transform/Delambdafy.scala b/src/compiler/scala/tools/nsc/transform/Delambdafy.scala
new file mode 100644
index 0000000000..c546c21d48
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/transform/Delambdafy.scala
@@ -0,0 +1,449 @@
+package scala.tools.nsc
+package transform
+
+import symtab._
+import Flags._
+import scala.collection._
+import scala.language.postfixOps
+import scala.reflect.internal.Symbols
+import scala.collection.mutable.LinkedHashMap
+
+/**
+ * This transformer is responisble for turning lambdas into anonymous classes.
+ * The main assumption it makes is that a lambda {args => body} has been turned into
+ * {args => liftedBody()} where lifted body is a top level method that implements the body of the lambda.
+ * Currently Uncurry is responsible for that transformation.
+ *
+ * From a lambda, Delambdafy will create
+ * 1) a static forwarder at the top level of the class that contained the lambda
+ * 2) a new top level class that
+ a) has fields and a constructor taking the captured environment (including possbily the "this"
+ * reference)
+ * b) an apply method that calls the static forwarder
+ * c) if needed a bridge method for the apply method
+ * 3) an instantiation of the newly created class which replaces the lambda
+ *
+ * TODO the main work left to be done is to plug into specialization. Primarily that means choosing a
+ * specialized FunctionN trait instead of the generic FunctionN trait as a parent and creating the
+ * appropriately named applysp method
+ */
+abstract class Delambdafy extends Transform with TypingTransformers with ast.TreeDSL with TypeAdaptingTransformer {
+ import global._
+ import definitions._
+ import CODE._
+
+ val analyzer: global.analyzer.type = global.analyzer
+
+ /** the following two members override abstract members in Transform */
+ val phaseName: String = "delambdafy"
+
+ protected def newTransformer(unit: CompilationUnit): Transformer =
+ new DelambdafyTransformer(unit)
+
+ class DelambdafyTransformer(unit: CompilationUnit) extends TypingTransformer(unit) with TypeAdapter {
+ private val lambdaClassDefs = new mutable.LinkedHashMap[Symbol, List[Tree]] withDefaultValue Nil
+
+
+ val typer = localTyper
+
+ // we need to know which methods refer to the 'this' reference so that we can determine
+ // which lambdas need access to it
+ val thisReferringMethods: Set[Symbol] = {
+ val thisReferringMethodsTraverser = new ThisReferringMethodsTraverser()
+ thisReferringMethodsTraverser traverse unit.body
+ val methodReferringMap = thisReferringMethodsTraverser.liftedMethodReferences
+ val referrers = thisReferringMethodsTraverser.thisReferringMethods
+ // recursively find methods that refer to 'this' directly or indirectly via references to other methods
+ // for each method found add it to the referrers set
+ def refersToThis(symbol: Symbol): Boolean = {
+ if (referrers contains symbol) true
+ else if (methodReferringMap(symbol) exists refersToThis) {
+ // add it early to memoize
+ debuglog(s"$symbol indirectly refers to 'this'")
+ referrers += symbol
+ true
+ } else false
+ }
+ methodReferringMap.keys foreach refersToThis
+ referrers
+ }
+
+ val accessorMethods = mutable.ArrayBuffer[Tree]()
+
+ // the result of the transformFunction method. A class definition for the lambda, an expression
+ // insantiating the lambda class, and an accessor method for the lambda class to be able to
+ // call the implementation
+ case class TransformedFunction(lambdaClassDef: ClassDef, newExpr: Tree, accessorMethod: Tree)
+
+ // here's the main entry point of the transform
+ override def transform(tree: Tree): Tree = tree match {
+ // the main thing we care about is lambdas
+ case fun @ Function(_, _) =>
+ // a lambda beccomes a new class, an instantiation expression, and an
+ // accessor method
+ val TransformedFunction(lambdaClassDef, newExpr, accessorMethod) = transformFunction(fun)
+ // we'll add accessor methods to the current template later
+ accessorMethods += accessorMethod
+ val pkg = lambdaClassDef.symbol.owner
+
+ // we'll add the lambda class to the package later
+ lambdaClassDefs(pkg) = lambdaClassDef :: lambdaClassDefs(pkg)
+
+ super.transform(newExpr)
+ // when we encounter a template (basically the thing that holds body of a class/trait)
+ // we need to updated it to include newly created accesor methods after transforming it
+ case Template(_, _, _) =>
+ try {
+ // during this call accessorMethods will be populated from the Function case
+ val Template(parents, self, body) = super.transform(tree)
+ Template(parents, self, body ++ accessorMethods)
+ } finally accessorMethods.clear()
+ case _ => super.transform(tree)
+ }
+
+ // this entry point is aimed at the statements in the compilation unit.
+ // after working on the entire compilation until we'll have a set of
+ // new class definitions to add to the top level
+ override def transformStats(stats: List[Tree], exprOwner: Symbol): List[Tree] = {
+ super.transformStats(stats, exprOwner) ++ lambdaClassDefs(exprOwner)
+ }
+
+ private def optionSymbol(sym: Symbol): Option[Symbol] = if (sym.exists) Some(sym) else None
+
+ // turns a lambda into a new class def, a New expression instantiating that class, and an
+ // accessor method fo the body of the lambda
+ private def transformFunction(originalFunction: Function): TransformedFunction = {
+ val functionTpe = originalFunction.tpe
+ val targs = functionTpe.typeArgs
+ val formals :+ restpe = targs
+ val oldClass = originalFunction.symbol.enclClass
+
+ // find which variables are free in the lambda because those are captures that need to be
+ // passed into the constructor of the anonymous function class
+ val captures = FreeVarTraverser.freeVarsOf(originalFunction)
+
+ /**
+ * Creates the apply method for the anonymous subclass of FunctionN
+ */
+ def createAccessorMethod(thisProxy: Symbol, fun: Function): DefDef = {
+ val target = targetMethod(fun)
+ if (!thisProxy.exists) {
+ target setFlag STATIC
+ }
+ val params = ((optionSymbol(thisProxy) map {proxy:Symbol => ValDef(proxy)}) ++ (target.paramss.flatten map ValDef)).toList
+
+ val methSym = oldClass.newMethod(unit.freshTermName(nme.accessor.toString()), target.pos, FINAL | BRIDGE | SYNTHETIC | PROTECTED | STATIC)
+
+ val paramSyms = params map {param => methSym.newSyntheticValueParam(param.symbol.tpe, param.name) }
+
+ params zip paramSyms foreach { case (valdef, sym) => valdef.symbol = sym }
+ params foreach (_.symbol.owner = methSym)
+
+ val methodType = MethodType(paramSyms, restpe)
+ methSym setInfo methodType
+
+ oldClass.info.decls enter methSym
+
+ val body = localTyper.typed {
+ val newTarget = Select(if (thisProxy.exists) gen.mkAttributedRef(paramSyms(0)) else gen.mkAttributedThis(oldClass), target)
+ val newParams = paramSyms drop (if (thisProxy.exists) 1 else 0) map Ident
+ Apply(newTarget, newParams)
+ } setPos fun.pos
+ val methDef = DefDef(methSym, List(params), body)
+
+ // Have to repack the type to avoid mismatches when existentials
+ // appear in the result - see SI-4869.
+ // TODO probably don't need packedType
+ methDef.tpt setType localTyper.packedType(body, methSym)
+ methDef
+ }
+
+ /**
+ * Creates the apply method for the anonymous subclass of FunctionN
+ */
+ def createApplyMethod(newClass: Symbol, fun: Function, accessor: DefDef, thisProxy: Symbol): DefDef = {
+ val methSym = newClass.newMethod(nme.apply, fun.pos, FINAL | SYNTHETIC)
+ val params = fun.vparams map (_.duplicate)
+
+ val paramSyms = map2(formals, params) {
+ (tp, vparam) => methSym.newSyntheticValueParam(tp, vparam.name)
+ }
+ params zip paramSyms foreach { case (valdef, sym) => valdef.symbol = sym }
+ params foreach (_.symbol.owner = methSym)
+
+ val methodType = MethodType(paramSyms, restpe)
+ methSym setInfo methodType
+
+ newClass.info.decls enter methSym
+
+ val Apply(_, oldParams) = fun.body
+
+ val body = localTyper typed Apply(Select(gen.mkAttributedThis(oldClass), accessor.symbol), (optionSymbol(thisProxy) map {tp => Select(gen.mkAttributedThis(newClass), tp)}).toList ++ oldParams)
+ body.substituteSymbols(fun.vparams map (_.symbol), params map (_.symbol))
+ body changeOwner (fun.symbol -> methSym)
+
+ val methDef = DefDef(methSym, List(params), body)
+
+ // Have to repack the type to avoid mismatches when existentials
+ // appear in the result - see SI-4869.
+ // TODO probably don't need packedType
+ methDef.tpt setType localTyper.packedType(body, methSym)
+ methDef
+ }
+
+ /**
+ * Creates the constructor on the newly created class. It will handle
+ * initialization of members that represent the captured environment
+ */
+ def createConstructor(newClass: Symbol, members: List[ValDef]): DefDef = {
+ val constrSym = newClass.newConstructor(originalFunction.pos, SYNTHETIC)
+
+ val (paramSymbols, params, assigns) = (members map {member =>
+ val paramSymbol = newClass.newVariable(member.symbol.name.toTermName, newClass.pos, 0)
+ paramSymbol.setInfo(member.symbol.info)
+ val paramVal = ValDef(paramSymbol)
+ val paramIdent = Ident(paramSymbol)
+ val assign = Assign(Select(gen.mkAttributedThis(newClass), member.symbol), paramIdent)
+
+ (paramSymbol, paramVal, assign)
+ }).unzip3
+
+ val constrType = MethodType(paramSymbols, newClass.thisType)
+ constrSym setInfoAndEnter constrType
+
+ val body =
+ Block(
+ List(
+ Apply(Select(Super(gen.mkAttributedThis(newClass), tpnme.EMPTY) setPos newClass.pos, nme.CONSTRUCTOR) setPos newClass.pos, Nil) setPos newClass.pos
+ ) ++ assigns,
+ Literal(Constant(())): Tree
+ ) setPos newClass.pos
+
+ (localTyper typed DefDef(constrSym, List(params), body) setPos newClass.pos).asInstanceOf[DefDef]
+ }
+
+ val pkg = oldClass.owner
+
+ // Parent for anonymous class def
+ val abstractFunctionErasedType = AbstractFunctionClass(formals.length).tpe
+
+ // anonymous subclass of FunctionN with an apply method
+ def makeAnonymousClass = {
+ val parents = addSerializable(abstractFunctionErasedType)
+ val funOwner = originalFunction.symbol.owner
+
+ val suffix = "$lambda$" + (
+ if (funOwner.isPrimaryConstructor) ""
+ else "$" + funOwner.name
+ )
+ val name = unit.freshTypeName(s"${oldClass.name.decode}$suffix")
+
+ val anonClass = pkg newClassSymbol(name, originalFunction.pos, FINAL | SYNTHETIC) addAnnotation SerialVersionUIDAnnotation
+ anonClass setInfo ClassInfoType(parents, newScope, anonClass)
+
+ val captureProxies2 = new LinkedHashMap[Symbol, TermSymbol]
+ captures foreach {capture =>
+ val sym = anonClass.newVariable(capture.name.toTermName, capture.pos, SYNTHETIC)
+ sym setInfo capture.info
+ captureProxies2 += ((capture, sym))
+ }
+
+ // the Optional proxy that will hold a reference to the 'this'
+ // object used by the lambda, if any. NoSymbol if there is no this proxy
+ val thisProxy = {
+ val target = targetMethod(originalFunction)
+ if (thisReferringMethods contains target) {
+ val sym = anonClass.newVariable(nme.FAKE_LOCAL_THIS, originalFunction.pos, SYNTHETIC)
+ sym.info = oldClass.tpe
+ sym
+ } else NoSymbol
+ }
+
+ val decapturify = new DeCapturifyTransformer(captureProxies2, unit, oldClass, anonClass, originalFunction.symbol.pos, thisProxy)
+
+ val accessorMethod = createAccessorMethod(thisProxy, originalFunction)
+
+ val decapturedFunction = decapturify.transform(originalFunction).asInstanceOf[Function]
+
+ val members = (optionSymbol(thisProxy).toList ++ (captureProxies2 map (_._2))) map {member =>
+ anonClass.info.decls enter member
+ ValDef(member, gen.mkZero(member.tpe)) setPos decapturedFunction.pos
+ }
+
+ // constructor
+ val constr = createConstructor(anonClass, members)
+
+ // apply method with same arguments and return type as original lambda.
+ val applyMethodDef = createApplyMethod(anonClass, decapturedFunction, accessorMethod, thisProxy)
+
+ val bridgeMethod = createBridgeMethod(anonClass, originalFunction, applyMethodDef)
+
+ def fulldef(sym: Symbol) =
+ if (sym == NoSymbol) sym.toString
+ else s"$sym: ${sym.tpe} in ${sym.owner}"
+
+ def clashError(bm: Symbol) = {
+ unit.error(
+ applyMethodDef.symbol.pos,
+ sm"""bridge generated for member ${fulldef(applyMethodDef.symbol)}
+ |which overrides ${fulldef(getMember(abstractFunctionErasedType.typeSymbol, nme.apply))}
+ |clashes with definition of the member itself;
+ |both have erased type ${exitingPostErasure(bm.tpe)}""")
+ }
+
+ bridgeMethod foreach (bm =>
+ if (bm.symbol.tpe =:= applyMethodDef.symbol.tpe)
+ clashError(bm.symbol)
+ )
+
+ val body = members ++ List(constr, applyMethodDef) ++ bridgeMethod
+
+ // TODO if member fields are private this complains that they're not accessible
+ (localTyper.typedPos(decapturedFunction.pos)(ClassDef(anonClass, body)).asInstanceOf[ClassDef], thisProxy, accessorMethod)
+ }
+
+ val (anonymousClassDef, thisProxy, accessorMethod) = makeAnonymousClass
+
+ pkg.info.decls enter anonymousClassDef.symbol
+
+ val thisArg = optionSymbol(thisProxy) map (_ => gen.mkAttributedThis(oldClass) setPos originalFunction.pos)
+ val captureArgs = captures map (capture => Ident(capture) setPos originalFunction.pos)
+
+ val newStat =
+ Typed(New(anonymousClassDef.symbol, (thisArg.toList ++ captureArgs): _*), TypeTree(abstractFunctionErasedType))
+
+ val typedNewStat = localTyper.typedPos(originalFunction.pos)(newStat)
+
+ TransformedFunction(anonymousClassDef, typedNewStat, accessorMethod)
+ }
+
+ /**
+ * Creates a bridge method if needed. The bridge method forwards from apply(x1: Object, x2: Object...xn: Object): Object to
+ * apply(x1: T1, x2: T2...xn: Tn): T0 using type adaptation on each input and output. The only time a bridge isn't needed
+ * is when the original lambda is already erased to type Object, Object, Object... => Object
+ */
+ def createBridgeMethod(newClass:Symbol, originalFunction: Function, applyMethod: DefDef): Option[DefDef] = {
+ val bridgeMethSym = newClass.newMethod(nme.apply, applyMethod.pos, FINAL | SYNTHETIC | BRIDGE)
+ val originalParams = applyMethod.vparamss(0)
+ val bridgeParams = originalParams map { originalParam =>
+ val bridgeSym = bridgeMethSym.newSyntheticValueParam(ObjectTpe, originalParam.name)
+ ValDef(bridgeSym)
+ }
+
+ val bridgeSyms = bridgeParams map (_.symbol)
+
+ val methodType = MethodType(bridgeSyms, ObjectTpe)
+ bridgeMethSym setInfo methodType
+
+ def adapt(tree: Tree, expectedTpe: Type): (Boolean, Tree) = {
+ if (tree.tpe =:= expectedTpe) (false, tree)
+ else (true, adaptToType(tree, expectedTpe))
+ }
+
+ enteringPhase(currentRun.posterasurePhase) {
+ val liftedBodyDefTpe: MethodType = {
+ val liftedBodySymbol = {
+ val Apply(method, _) = originalFunction.body
+ method.symbol
+ }
+ liftedBodySymbol.info.asInstanceOf[MethodType]
+ }
+ val (paramNeedsAdaptation, adaptedParams) = (bridgeSyms zip liftedBodyDefTpe.params map {case (bridgeSym, param) => adapt(Ident(bridgeSym) setType bridgeSym.tpe, param.tpe)}).unzip
+ val body = Apply(gen.mkAttributedSelect(gen.mkAttributedThis(newClass), applyMethod.symbol), adaptedParams) setType applyMethod.symbol.tpe.resultType
+ val (needsReturnAdaptation, adaptedBody) = adapt(typer.typed(body), ObjectTpe)
+ val needsBridge = (paramNeedsAdaptation contains true) || needsReturnAdaptation
+ if (needsBridge) {
+ val methDef = DefDef(bridgeMethSym, List(bridgeParams), adaptedBody)
+ newClass.info.decls enter bridgeMethSym
+ Some((localTyper typed methDef).asInstanceOf[DefDef])
+ } else None
+ }
+ }
+ } // DelambdafyTransformer
+
+ // A traverser that finds symbols used but not defined in the given Tree
+ // TODO freeVarTraverser in LambdaLift does a very similar task. With some
+ // analysis this could probably be unified with it
+ class FreeVarTraverser extends Traverser {
+ val freeVars = mutable.LinkedHashSet[Symbol]()
+ val declared = mutable.LinkedHashSet[Symbol]()
+
+ override def traverse(tree: Tree) = {
+ tree match {
+ case Function(args, _) =>
+ args foreach {arg => declared += arg.symbol}
+ case ValDef(_, _, _, _) =>
+ declared += tree.symbol
+ case _: Bind =>
+ declared += tree.symbol
+ case Ident(_) =>
+ val sym = tree.symbol
+ if ((sym != NoSymbol) && sym.isLocal && sym.isTerm && !sym.isMethod && !declared.contains(sym)) freeVars += sym
+ case _ =>
+ }
+ super.traverse(tree)
+ }
+ }
+
+ object FreeVarTraverser {
+ def freeVarsOf(function: Function) = {
+ val freeVarsTraverser = new FreeVarTraverser
+ freeVarsTraverser.traverse(function)
+ freeVarsTraverser.freeVars
+ }
+ }
+
+ // A transformer that converts specified captured symbols into other symbols
+ // TODO this transform could look more like ThisSubstituter and TreeSymSubstituter. It's not clear that it needs that level of sophistication since the types
+ // at this point are always very simple flattened/erased types, but it would probably be more robust if it tried to take more complicated types into account
+ class DeCapturifyTransformer(captureProxies: Map[Symbol, TermSymbol], unit: CompilationUnit, oldClass: Symbol, newClass:Symbol, pos: Position, thisProxy: Symbol) extends TypingTransformer(unit) {
+ override def transform(tree: Tree) = tree match {
+ case tree@This(encl) if tree.symbol == oldClass && thisProxy.exists =>
+ gen mkAttributedSelect (gen mkAttributedThis newClass, thisProxy)
+ case Ident(name) if (captureProxies contains tree.symbol) =>
+ gen mkAttributedSelect (gen mkAttributedThis newClass, captureProxies(tree.symbol))
+ case _ => super.transform(tree)
+ }
+ }
+
+ /**
+ * Get the symbol of the target lifted lambad body method from a function. I.e. if
+ * the function is {args => anonfun(args)} then this method returns anonfun's symbol
+ */
+ private def targetMethod(fun: Function): Symbol = fun match {
+ case Function(_, Apply(target, _)) =>
+ target.symbol
+ case _ =>
+ // any other shape of Function is unexpected at this point
+ abort(s"could not understand function with tree $fun")
+ }
+
+ // finds all methods that reference 'this'
+ class ThisReferringMethodsTraverser() extends Traverser {
+ private var currentMethod: Symbol = NoSymbol
+ // the set of methods that refer to this
+ val thisReferringMethods = mutable.Set[Symbol]()
+ // the set of lifted lambda body methods that each method refers to
+ val liftedMethodReferences = mutable.Map[Symbol, Set[Symbol]]().withDefault(_ => mutable.Set())
+ override def traverse(tree: Tree) = tree match {
+ case DefDef(_, _, _, _, _, _) =>
+ // we don't expect defs within defs. At this phase trees should be very flat
+ if (currentMethod.exists) devWarning("Found a def within a def at a phase where defs are expected to be flattened out.")
+ currentMethod = tree.symbol
+ super.traverse(tree)
+ currentMethod = NoSymbol
+ case fun@Function(_, _) =>
+ // we don't drill into functions because at the beginning of this phase they will always refer to 'this'.
+ // They'll be of the form {(args...) => this.anonfun(args...)}
+ // but we do need to make note of the lifted body method in case it refers to 'this'
+ if (currentMethod.exists) liftedMethodReferences(currentMethod) += targetMethod(fun)
+ case This(_) =>
+ if (currentMethod.exists && tree.symbol == currentMethod.enclClass) {
+ debuglog(s"$currentMethod directly refers to 'this'")
+ thisReferringMethods add currentMethod
+ }
+ case _ =>
+ super.traverse(tree)
+ }
+ }
+}
diff --git a/src/compiler/scala/tools/nsc/transform/Erasure.scala b/src/compiler/scala/tools/nsc/transform/Erasure.scala
index e2902a74b8..68f1c81c59 100644
--- a/src/compiler/scala/tools/nsc/transform/Erasure.scala
+++ b/src/compiler/scala/tools/nsc/transform/Erasure.scala
@@ -17,11 +17,15 @@ abstract class Erasure extends AddInterfaces
with typechecker.Analyzer
with TypingTransformers
with ast.TreeDSL
+ with TypeAdaptingTransformer
{
import global._
import definitions._
import CODE._
+ val analyzer: typechecker.Analyzer { val global: Erasure.this.global.type } =
+ this.asInstanceOf[typechecker.Analyzer { val global: Erasure.this.global.type }]
+
val phaseName: String = "erasure"
def newTransformer(unit: CompilationUnit): Transformer =
@@ -352,13 +356,6 @@ abstract class Erasure extends AddInterfaces
override def newTyper(context: Context) = new Eraser(context)
- private def isSafelyRemovableUnbox(fn: Tree, arg: Tree): Boolean = {
- isUnbox(fn.symbol) && {
- val cls = arg.tpe.typeSymbol
- (cls == definitions.NullClass) || isBoxedValueClass(cls)
- }
- }
-
class ComputeBridges(unit: CompilationUnit, root: Symbol) {
assert(phase == currentRun.erasurePhase, phase)
@@ -522,161 +519,8 @@ abstract class Erasure extends AddInterfaces
}
/** The modifier typer which retypes with erased types. */
- class Eraser(_context: Context) extends Typer(_context) {
-
- private def isPrimitiveValueType(tpe: Type) = isPrimitiveValueClass(tpe.typeSymbol)
-
- private def isDifferentErasedValueType(tpe: Type, other: Type) =
- isErasedValueType(tpe) && (tpe ne other)
-
- private def isPrimitiveValueMember(sym: Symbol) = isPrimitiveValueClass(sym.owner)
-
- @inline private def box(tree: Tree, target: => String): Tree = {
- val result = box1(tree)
- if (tree.tpe =:= UnitTpe) ()
- else log(s"boxing ${tree.summaryString}: ${tree.tpe} into $target: ${result.tpe}")
- result
- }
-
- /** Box `tree` of unboxed type */
- private def box1(tree: Tree): Tree = tree match {
- case LabelDef(_, _, _) =>
- val ldef = deriveLabelDef(tree)(box1)
- ldef setType ldef.rhs.tpe
- case _ =>
- val tree1 = tree.tpe match {
- case ErasedValueType(tref) =>
- val clazz = tref.sym
- New(clazz, cast(tree, underlyingOfValueClass(clazz)))
- case _ =>
- tree.tpe.typeSymbol match {
- case UnitClass =>
- if (treeInfo isExprSafeToInline tree) REF(BoxedUnit_UNIT)
- else BLOCK(tree, REF(BoxedUnit_UNIT))
- case NothingClass => tree // a non-terminating expression doesn't need boxing
- case x =>
- assert(x != ArrayClass)
- tree match {
- /* Can't always remove a Box(Unbox(x)) combination because the process of boxing x
- * may lead to throwing an exception.
- *
- * This is important for specialization: calls to the super constructor should not box/unbox specialized
- * fields (see TupleX). (ID)
- */
- case Apply(boxFun, List(arg)) if isSafelyRemovableUnbox(tree, arg) =>
- log(s"boxing an unbox: ${tree.symbol} -> ${arg.tpe}")
- arg
- case _ =>
- (REF(boxMethod(x)) APPLY tree) setPos (tree.pos) setType ObjectTpe
- }
- }
- }
- typedPos(tree.pos)(tree1)
- }
-
- private def unbox(tree: Tree, pt: Type): Tree = {
- val result = unbox1(tree, pt)
- log(s"unboxing ${tree.shortClass}: ${tree.tpe} as a ${result.tpe}")
- result
- }
-
- /** Unbox `tree` of boxed type to expected type `pt`.
- *
- * @param tree the given tree
- * @param pt the expected type.
- * @return the unboxed tree
- */
- private def unbox1(tree: Tree, pt: Type): Tree = tree match {
-/*
- case Boxed(unboxed) =>
- println("unbox shorten: "+tree) // this never seems to kick in during build and test; therefore disabled.
- adaptToType(unboxed, pt)
- */
- case LabelDef(_, _, _) =>
- val ldef = deriveLabelDef(tree)(unbox(_, pt))
- ldef setType ldef.rhs.tpe
- case _ =>
- val tree1 = pt match {
- case ErasedValueType(tref) =>
- val clazz = tref.sym
- lazy val underlying = underlyingOfValueClass(clazz)
- val tree0 =
- if (tree.tpe.typeSymbol == NullClass &&
- isPrimitiveValueClass(underlying.typeSymbol)) {
- // convert `null` directly to underlying type, as going
- // via the unboxed type would yield a NPE (see SI-5866)
- unbox1(tree, underlying)
- } else
- Apply(Select(adaptToType(tree, clazz.tpe), clazz.derivedValueClassUnbox), List())
- cast(tree0, pt)
- case _ =>
- pt.typeSymbol match {
- case UnitClass =>
- if (treeInfo isExprSafeToInline tree) UNIT
- else BLOCK(tree, UNIT)
- case x =>
- assert(x != ArrayClass)
- // don't `setType pt` the Apply tree, as the Apply's fun won't be typechecked if the Apply tree already has a type
- Apply(unboxMethod(pt.typeSymbol), tree)
- }
- }
- typedPos(tree.pos)(tree1)
- }
-
- /** Generate a synthetic cast operation from tree.tpe to pt.
- * @pre pt eq pt.normalize
- */
- private def cast(tree: Tree, pt: Type): Tree = {
- if ((tree.tpe ne null) && !(tree.tpe =:= ObjectTpe)) {
- def word = (
- if (tree.tpe <:< pt) "upcast"
- else if (pt <:< tree.tpe) "downcast"
- else if (pt weak_<:< tree.tpe) "coerce"
- else if (tree.tpe weak_<:< pt) "widen"
- else "cast"
- )
- log(s"erasure ${word}s from ${tree.tpe} to $pt")
- }
- if (pt =:= UnitTpe) {
- // See SI-4731 for one example of how this occurs.
- log("Attempted to cast to Unit: " + tree)
- tree.duplicate setType pt
- } else if (tree.tpe != null && tree.tpe.typeSymbol == ArrayClass && pt.typeSymbol == ArrayClass) {
- // See SI-2386 for one example of when this might be necessary.
- val needsExtraCast = isPrimitiveValueType(tree.tpe.typeArgs.head) && !isPrimitiveValueType(pt.typeArgs.head)
- val tree1 = if (needsExtraCast) gen.mkRuntimeCall(nme.toObjectArray, List(tree)) else tree
- gen.mkAttributedCast(tree1, pt)
- } else gen.mkAttributedCast(tree, pt)
- }
-
- /** Adapt `tree` to expected type `pt`.
- *
- * @param tree the given tree
- * @param pt the expected type
- * @return the adapted tree
- */
- private def adaptToType(tree: Tree, pt: Type): Tree = {
- if (settings.debug && pt != WildcardType)
- log("adapting " + tree + ":" + tree.tpe + " : " + tree.tpe.parents + " to " + pt)//debug
- if (tree.tpe <:< pt)
- tree
- else if (isDifferentErasedValueType(tree.tpe, pt))
- adaptToType(box(tree, pt.toString), pt)
- else if (isDifferentErasedValueType(pt, tree.tpe))
- adaptToType(unbox(tree, pt), pt)
- else if (isPrimitiveValueType(tree.tpe) && !isPrimitiveValueType(pt)) {
- adaptToType(box(tree, pt.toString), pt)
- } else if (isMethodTypeWithEmptyParams(tree.tpe)) {
- // [H] this assert fails when trying to typecheck tree !(SomeClass.this.bitmap) for single lazy val
- //assert(tree.symbol.isStable, "adapt "+tree+":"+tree.tpe+" to "+pt)
- adaptToType(Apply(tree, List()) setPos tree.pos setType tree.tpe.resultType, pt)
-// } else if (pt <:< tree.tpe)
-// cast(tree, pt)
- } else if (isPrimitiveValueType(pt) && !isPrimitiveValueType(tree.tpe))
- adaptToType(unbox(tree, pt), pt)
- else
- cast(tree, pt)
- }
+ class Eraser(_context: Context) extends Typer(_context) with TypeAdapter {
+ val typer = this.asInstanceOf[analyzer.Typer]
/** Replace member references as follows:
*
@@ -697,13 +541,11 @@ abstract class Erasure extends AddInterfaces
case Apply(ta @ TypeApply(sel @ Select(qual, name), List(targ)), List())
if tree.symbol == Any_asInstanceOf =>
val qual1 = typedQualifier(qual, NOmode, ObjectTpe) // need to have an expected type, see #3037
-
+ // !!! Make pending/run/t5866b.scala work. The fix might be here and/or in unbox1.
if (isPrimitiveValueType(targ.tpe) || isErasedValueType(targ.tpe)) {
val noNullCheckNeeded = targ.tpe match {
- case ErasedValueType(tref) =>
- enteringPhase(currentRun.erasurePhase) {
- isPrimitiveValueClass(erasedValueClassArg(tref).typeSymbol)
- }
+ case ErasedValueType(_, underlying) =>
+ isPrimitiveValueClass(underlying.typeSymbol)
case _ =>
true
}
@@ -724,7 +566,7 @@ abstract class Erasure extends AddInterfaces
case Apply(TypeApply(sel @ Select(qual, name), List(targ)), List())
if tree.symbol == Any_isInstanceOf =>
targ.tpe match {
- case ErasedValueType(tref) => targ.setType(tref.sym.tpe)
+ case ErasedValueType(clazz, _) => targ.setType(clazz.tpe)
case _ =>
}
tree
@@ -787,11 +629,11 @@ abstract class Erasure extends AddInterfaces
(tree.attachments.get[TypeRefAttachment]: @unchecked) match {
case Some(itype) =>
val tref = itype.tpe
- val argPt = enteringPhase(currentRun.erasurePhase)(erasedValueClassArg(tref))
+ val argPt = enteringErasure(erasedValueClassArg(tref))
log(s"transforming inject $arg -> $tref/$argPt")
val result = typed(arg, mode, argPt)
log(s"transformed inject $arg -> $tref/$argPt = $result:${result.tpe}")
- return result setType ErasedValueType(tref)
+ return result setType ErasedValueType(tref.sym, result.tpe)
}
case _ =>
@@ -839,11 +681,6 @@ abstract class Erasure extends AddInterfaces
tree1
}
}
-
- private def isMethodTypeWithEmptyParams(tpe: Type) = tpe match {
- case MethodType(Nil, _) => true
- case _ => false
- }
}
/** The erasure transformer */
diff --git a/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala b/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala
index 9e8cbe6c03..2235a93ca4 100644
--- a/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala
+++ b/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala
@@ -129,7 +129,7 @@ abstract class ExtensionMethods extends Transform with TypingTransformers {
if (seen contains clazz)
unit.error(pos, "value class may not unbox to itself")
else {
- val unboxed = erasure.underlyingOfValueClass(clazz).typeSymbol
+ val unboxed = definitions.underlyingOfValueClass(clazz).typeSymbol
if (unboxed.isDerivedValueClass) checkNonCyclic(pos, seen + clazz, unboxed)
}
diff --git a/src/compiler/scala/tools/nsc/transform/PostErasure.scala b/src/compiler/scala/tools/nsc/transform/PostErasure.scala
index 96263f3c0c..cc78e27282 100644
--- a/src/compiler/scala/tools/nsc/transform/PostErasure.scala
+++ b/src/compiler/scala/tools/nsc/transform/PostErasure.scala
@@ -22,7 +22,7 @@ trait PostErasure extends InfoTransform with TypingTransformers {
object elimErasedValueType extends TypeMap {
def apply(tp: Type) = tp match {
case ConstantType(Constant(tp: Type)) => ConstantType(Constant(apply(tp)))
- case ErasedValueType(tref) => enteringErasure(erasure.erasedValueClassArg(tref))
+ case ErasedValueType(_, underlying) => underlying
case _ => mapOver(tp)
}
}
diff --git a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
index 2d4269a3bc..4cf3bef939 100644
--- a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
+++ b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
@@ -55,6 +55,8 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
import definitions._
import Flags._
+ private val inlineFunctionExpansion = settings.Ydelambdafy.value == "inline"
+
/** the name of the phase: */
val phaseName: String = "specialize"
@@ -1318,7 +1320,11 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
}
private def isAccessible(sym: Symbol): Boolean =
- (currentClass == sym.owner.enclClass) && (currentClass != targetClass)
+ if (currentOwner.isAnonymousFunction) {
+ if (inlineFunctionExpansion) devWarning("anonymous function made it to specialization even though inline expansion is set.")
+ false
+ }
+ else (currentClass == sym.owner.enclClass) && (currentClass != targetClass)
private def shouldMakePublic(sym: Symbol): Boolean =
sym.hasFlag(PRIVATE | PROTECTED) && (addressFields || !nme.isLocalName(sym.name))
diff --git a/src/compiler/scala/tools/nsc/transform/TypeAdaptingTransformer.scala b/src/compiler/scala/tools/nsc/transform/TypeAdaptingTransformer.scala
new file mode 100644
index 0000000000..41b8461c46
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/transform/TypeAdaptingTransformer.scala
@@ -0,0 +1,187 @@
+package scala.tools.nsc
+package transform
+
+import scala.reflect.internal._
+import scala.tools.nsc.ast.TreeDSL
+import scala.tools.nsc.Global
+
+/**
+ * A trait usable by transforms that need to adapt trees of one type to another type
+ */
+trait TypeAdaptingTransformer {
+ self: TreeDSL =>
+
+ val analyzer: typechecker.Analyzer { val global: self.global.type }
+
+ trait TypeAdapter {
+ val typer: analyzer.Typer
+ import global._
+ import definitions._
+ import CODE._
+
+ def isMethodTypeWithEmptyParams(tpe: Type) = tpe match {
+ case MethodType(Nil, _) => true
+ case _ => false
+ }
+
+ private def isSafelyRemovableUnbox(fn: Tree, arg: Tree): Boolean = {
+ isUnbox(fn.symbol) && {
+ val cls = arg.tpe.typeSymbol
+ (cls == definitions.NullClass) || isBoxedValueClass(cls)
+ }
+ }
+
+ private def isPrimitiveValueType(tpe: Type) = isPrimitiveValueClass(tpe.typeSymbol)
+
+ private def isErasedValueType(tpe: Type) = tpe.isInstanceOf[ErasedValueType]
+
+ private def isDifferentErasedValueType(tpe: Type, other: Type) =
+ isErasedValueType(tpe) && (tpe ne other)
+
+ def isPrimitiveValueMember(sym: Symbol) = isPrimitiveValueClass(sym.owner)
+
+ @inline def box(tree: Tree, target: => String): Tree = {
+ val result = box1(tree)
+ if (tree.tpe =:= UnitTpe) ()
+ else log(s"boxing ${tree.summaryString}: ${tree.tpe} into $target: ${result.tpe}")
+ result
+ }
+
+ /** Box `tree` of unboxed type */
+ private def box1(tree: Tree): Tree = tree match {
+ case LabelDef(_, _, _) =>
+ val ldef = deriveLabelDef(tree)(box1)
+ ldef setType ldef.rhs.tpe
+ case _ =>
+ val tree1 = tree.tpe match {
+ case ErasedValueType(clazz, _) =>
+ New(clazz, cast(tree, underlyingOfValueClass(clazz)))
+ case _ =>
+ tree.tpe.typeSymbol match {
+ case UnitClass =>
+ if (treeInfo isExprSafeToInline tree) REF(BoxedUnit_UNIT)
+ else BLOCK(tree, REF(BoxedUnit_UNIT))
+ case NothingClass => tree // a non-terminating expression doesn't need boxing
+ case x =>
+ assert(x != ArrayClass)
+ tree match {
+ /* Can't always remove a Box(Unbox(x)) combination because the process of boxing x
+ * may lead to throwing an exception.
+ *
+ * This is important for specialization: calls to the super constructor should not box/unbox specialized
+ * fields (see TupleX). (ID)
+ */
+ case Apply(boxFun, List(arg)) if isSafelyRemovableUnbox(tree, arg) =>
+ log(s"boxing an unbox: ${tree.symbol} -> ${arg.tpe}")
+ arg
+ case _ =>
+ (REF(boxMethod(x)) APPLY tree) setPos (tree.pos) setType ObjectTpe
+ }
+ }
+ }
+ typer.typedPos(tree.pos)(tree1)
+ }
+
+ def unbox(tree: Tree, pt: Type): Tree = {
+ val result = unbox1(tree, pt)
+ log(s"unboxing ${tree.shortClass}: ${tree.tpe} as a ${result.tpe}")
+ result
+ }
+
+ /** Unbox `tree` of boxed type to expected type `pt`.
+ *
+ * @param tree the given tree
+ * @param pt the expected type.
+ * @return the unboxed tree
+ */
+ private def unbox1(tree: Tree, pt: Type): Tree = tree match {
+/*
+ case Boxed(unboxed) =>
+ println("unbox shorten: "+tree) // this never seems to kick in during build and test; therefore disabled.
+ adaptToType(unboxed, pt)
+ */
+ case LabelDef(_, _, _) =>
+ val ldef = deriveLabelDef(tree)(unbox(_, pt))
+ ldef setType ldef.rhs.tpe
+ case _ =>
+ val tree1 = pt match {
+ case ErasedValueType(clazz, underlying) =>
+ val tree0 =
+ if (tree.tpe.typeSymbol == NullClass &&
+ isPrimitiveValueClass(underlying.typeSymbol)) {
+ // convert `null` directly to underlying type, as going
+ // via the unboxed type would yield a NPE (see SI-5866)
+ unbox1(tree, underlying)
+ } else
+ Apply(Select(adaptToType(tree, clazz.tpe), clazz.derivedValueClassUnbox), List())
+ cast(tree0, pt)
+ case _ =>
+ pt.typeSymbol match {
+ case UnitClass =>
+ if (treeInfo isExprSafeToInline tree) UNIT
+ else BLOCK(tree, UNIT)
+ case x =>
+ assert(x != ArrayClass)
+ // don't `setType pt` the Apply tree, as the Apply's fun won't be typechecked if the Apply tree already has a type
+ Apply(unboxMethod(pt.typeSymbol), tree)
+ }
+ }
+ typer.typedPos(tree.pos)(tree1)
+ }
+
+ /** Generate a synthetic cast operation from tree.tpe to pt.
+ * @pre pt eq pt.normalize
+ */
+ def cast(tree: Tree, pt: Type): Tree = {
+ if ((tree.tpe ne null) && !(tree.tpe =:= ObjectTpe)) {
+ def word = (
+ if (tree.tpe <:< pt) "upcast"
+ else if (pt <:< tree.tpe) "downcast"
+ else if (pt weak_<:< tree.tpe) "coerce"
+ else if (tree.tpe weak_<:< pt) "widen"
+ else "cast"
+ )
+ log(s"erasure ${word}s from ${tree.tpe} to $pt")
+ }
+ if (pt =:= UnitTpe) {
+ // See SI-4731 for one example of how this occurs.
+ log("Attempted to cast to Unit: " + tree)
+ tree.duplicate setType pt
+ } else if (tree.tpe != null && tree.tpe.typeSymbol == ArrayClass && pt.typeSymbol == ArrayClass) {
+ // See SI-2386 for one example of when this might be necessary.
+ val needsExtraCast = isPrimitiveValueType(tree.tpe.typeArgs.head) && !isPrimitiveValueType(pt.typeArgs.head)
+ val tree1 = if (needsExtraCast) gen.mkRuntimeCall(nme.toObjectArray, List(tree)) else tree
+ gen.mkAttributedCast(tree1, pt)
+ } else gen.mkAttributedCast(tree, pt)
+ }
+
+ /** Adapt `tree` to expected type `pt`.
+ *
+ * @param tree the given tree
+ * @param pt the expected type
+ * @return the adapted tree
+ */
+ def adaptToType(tree: Tree, pt: Type): Tree = {
+ if (settings.debug && pt != WildcardType)
+ log("adapting " + tree + ":" + tree.tpe + " : " + tree.tpe.parents + " to " + pt)//debug
+ if (tree.tpe <:< pt)
+ tree
+ else if (isDifferentErasedValueType(tree.tpe, pt))
+ adaptToType(box(tree, pt.toString), pt)
+ else if (isDifferentErasedValueType(pt, tree.tpe))
+ adaptToType(unbox(tree, pt), pt)
+ else if (isPrimitiveValueType(tree.tpe) && !isPrimitiveValueType(pt)) {
+ adaptToType(box(tree, pt.toString), pt)
+ } else if (isMethodTypeWithEmptyParams(tree.tpe)) {
+ // [H] this assert fails when trying to typecheck tree !(SomeClass.this.bitmap) for single lazy val
+ //assert(tree.symbol.isStable, "adapt "+tree+":"+tree.tpe+" to "+pt)
+ adaptToType(Apply(tree, List()) setPos tree.pos setType tree.tpe.resultType, pt)
+// } else if (pt <:< tree.tpe)
+// cast(tree, pt)
+ } else if (isPrimitiveValueType(pt) && !isPrimitiveValueType(tree.tpe))
+ adaptToType(unbox(tree, pt), pt)
+ else
+ cast(tree, pt)
+ }
+ }
+} \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/transform/UnCurry.scala b/src/compiler/scala/tools/nsc/transform/UnCurry.scala
index ccf2266540..3d648ccbac 100644
--- a/src/compiler/scala/tools/nsc/transform/UnCurry.scala
+++ b/src/compiler/scala/tools/nsc/transform/UnCurry.scala
@@ -63,6 +63,7 @@ abstract class UnCurry extends InfoTransform
// uncurry and uncurryType expand type aliases
class UnCurryTransformer(unit: CompilationUnit) extends TypingTransformer(unit) {
+ private val inlineFunctionExpansion = settings.Ydelambdafy.value == "inline"
private var needTryLift = false
private var inConstructorFlag = 0L
private val byNameArgs = mutable.HashSet[Tree]()
@@ -223,34 +224,110 @@ abstract class UnCurry extends InfoTransform
val targs = fun.tpe.typeArgs
val (formals, restpe) = (targs.init, targs.last)
- val applyMethodDef = {
- val methSym = anonClass.newMethod(nme.apply, fun.pos, FINAL)
- val paramSyms = map2(formals, fun.vparams) {
- (tp, param) => methSym.newSyntheticValueParam(tp, param.name)
+ if (inlineFunctionExpansion) {
+ val applyMethodDef = {
+ val methSym = anonClass.newMethod(nme.apply, fun.pos, FINAL)
+ val paramSyms = map2(formals, fun.vparams) {
+ (tp, param) => methSym.newSyntheticValueParam(tp, param.name)
+ }
+ methSym setInfoAndEnter MethodType(paramSyms, restpe)
+
+ fun.vparams foreach (_.symbol.owner = methSym)
+ fun.body changeOwner (fun.symbol -> methSym)
+
+ val body = localTyper.typedPos(fun.pos)(fun.body)
+ val methDef = DefDef(methSym, List(fun.vparams), body)
+
+ // Have to repack the type to avoid mismatches when existentials
+ // appear in the result - see SI-4869.
+ methDef.tpt setType localTyper.packedType(body, methSym)
+ methDef
}
- methSym setInfoAndEnter MethodType(paramSyms, restpe)
- fun.vparams foreach (_.symbol.owner = methSym)
- fun.body changeOwner (fun.symbol -> methSym)
+ localTyper.typedPos(fun.pos) {
+ Block(
+ List(ClassDef(anonClass, NoMods, ListOfNil, List(applyMethodDef), fun.pos)),
+ Typed(New(anonClass.tpe), TypeTree(fun.tpe)))
+ }
+ } else {
+ /**
+ * Abstracts away the common functionality required to create both
+ * the lifted function and the apply method on the anonymous class
+ * It creates a method definition with value params cloned from the
+ * original lambda. Then it calls a supplied function to create
+ * the body and types the result. Finally
+ * everything is wrapped up in a MethodDef
+ *
+ * TODO it is intended that this common functionality be used
+ * whether inlineFunctionExpansion is true or not. However, it
+ * seems to introduce subtle ownwership changes that produce
+ * binary incompatible changes and so it is completely
+ * hidden behind the inlineFunctionExpansion for now.
+ *
+ * @param owner The owner for the new method
+ * @param name name for the new method
+ * @param additionalFlags flags to be put on the method in addition to FINAL
+ * @bodyF function that turns the method symbol and list of value params
+ * into a body for the method
+ */
+ def createMethod(owner: Symbol, name: TermName, additionalFlags: Long)(bodyF: (Symbol, List[ValDef]) => Tree) = {
+ val methSym = owner.newMethod(name, fun.pos, FINAL | additionalFlags)
+ val vparams = fun.vparams map (_.duplicate)
+
+ val paramSyms = map2(formals, vparams) {
+ (tp, vparam) => methSym.newSyntheticValueParam(tp, vparam.name)
+ }
+ foreach2(vparams, paramSyms){(valdef, sym) => valdef.symbol = sym}
+ vparams foreach (_.symbol.owner = methSym)
- val body = localTyper.typedPos(fun.pos)(fun.body)
- val methDef = DefDef(methSym, List(fun.vparams), body)
+ val methodType = MethodType(paramSyms, restpe.deconst)
+ methSym setInfo methodType
- // Have to repack the type to avoid mismatches when existentials
- // appear in the result - see SI-4869.
- methDef.tpt setType localTyper.packedType(body, methSym)
- methDef
- }
+ // TODO this is probably cleaner if bodyF only works with symbols rather than parameter ValDefs
+ val tempBody = bodyF(methSym, vparams)
+ val body = localTyper.typedPos(fun.pos)(tempBody)
+ val methDef = DefDef(methSym, List(vparams), body)
- localTyper.typedPos(fun.pos) {
- Block(
- List(ClassDef(anonClass, NoMods, ListOfNil, List(applyMethodDef), fun.pos)),
- Typed(New(anonClass.tpe), TypeTree(fun.tpe)))
- }
+ // Have to repack the type to avoid mismatches when existentials
+ // appear in the result - see SI-4869.
+ methDef.tpt setType localTyper.packedType(body, methSym).deconst
+ methDef
+ }
- }
+ val methodFlags = ARTIFACT
+ // method definition with the same arguments, return type, and body as the original lambda
+ val liftedMethod = createMethod(fun.symbol.owner, tpnme.ANON_FUN_NAME.toTermName, methodFlags){
+ case(methSym, vparams) =>
+ fun.body.substituteSymbols(fun.vparams map (_.symbol), vparams map (_.symbol))
+ fun.body changeOwner (fun.symbol -> methSym)
+ }
+
+ // callsite for the lifted method
+ val args = fun.vparams map { vparam =>
+ val ident = Ident(vparam.symbol)
+ // if -Yeta-expand-keeps-star is turned on then T* types can get through. In order
+ // to forward them we need to forward x: T* ascribed as "x:_*"
+ if (settings.etaExpandKeepsStar && definitions.isRepeatedParamType(vparam.tpt.tpe))
+ gen.wildcardStar(ident)
+ else
+ ident
+ }
+
+ val funTyper = localTyper.typedPos(fun.pos) _
+
+ val liftedMethodCall = funTyper(Apply(liftedMethod.symbol, args:_*))
+
+ // new function whose body is just a call to the lifted method
+ val newFun = treeCopy.Function(fun, fun.vparams, liftedMethodCall)
+ funTyper(Block(
+ List(funTyper(liftedMethod)),
+ super.transform(newFun)
+ ))
+ }
+ }
}
+
def transformArgs(pos: Position, fun: Symbol, args: List[Tree], formals: List[Type]) = {
val isJava = fun.isJavaDefined
def transformVarargs(varargsElemType: Type) = {
@@ -381,7 +458,7 @@ abstract class UnCurry extends InfoTransform
deriveDefDef(dd)(_ => body)
case _ => tree
}
- def isNonLocalReturn(ret: Return) = ret.symbol != currentOwner.enclMethod || currentOwner.isLazy
+ def isNonLocalReturn(ret: Return) = ret.symbol != currentOwner.enclMethod || currentOwner.isLazy || currentOwner.isAnonymousFunction
// ------ The tree transformers --------------------------------------------------------
@@ -413,6 +490,10 @@ abstract class UnCurry extends InfoTransform
}
val sym = tree.symbol
+
+ // true if the taget is a lambda body that's been lifted into a method
+ def isLiftedLambdaBody(target: Tree) = target.symbol.isLocal && target.symbol.isArtifact && target.symbol.name.containsName(nme.ANON_FUN_NAME)
+
val result = (
// TODO - settings.noassertions.value temporarily retained to avoid
// breakage until a reasonable interface is settled upon.
@@ -494,6 +575,10 @@ abstract class UnCurry extends InfoTransform
val pat1 = transform(pat)
treeCopy.CaseDef(tree, pat1, transform(guard), transform(body))
+ // if a lambda is already the right shape we don't need to transform it again
+ case fun @ Function(_, Apply(target, _)) if (!inlineFunctionExpansion) && isLiftedLambdaBody(target) =>
+ super.transform(fun)
+
case fun @ Function(_, _) =>
mainTransform(transformFunction(fun))
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala b/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala
index f7b194a6ca..e0bc478fad 100644
--- a/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala
+++ b/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala
@@ -113,7 +113,7 @@ trait Logic extends Debugging {
// symbols are propositions
abstract case class Sym(variable: Var, const: Const) extends Prop {
- private[this] val id = Sym.nextSymId
+ private val id: Int = Sym.nextSymId
override def toString = variable +"="+ const +"#"+ id
}
@@ -125,6 +125,7 @@ trait Logic extends Debugging {
(uniques findEntryOrUpdate newSym)
}
private def nextSymId = {_symId += 1; _symId}; private var _symId = 0
+ implicit val SymOrdering: Ordering[Sym] = Ordering.by(_.id)
}
def /\(props: Iterable[Prop]) = if (props.isEmpty) True else props.reduceLeft(And(_, _))
@@ -161,13 +162,17 @@ trait Logic extends Debugging {
// to govern how much time we spend analyzing matches for unreachability/exhaustivity
object AnalysisBudget {
- import scala.tools.cmd.FromString.IntFromString
- val max = sys.props.get("scalac.patmat.analysisBudget").collect(IntFromString.orElse{case "off" => Integer.MAX_VALUE}).getOrElse(256)
+ private val budgetProp = scala.sys.Prop[Int]("scalac.patmat.analysisBudget")
+ private val budgetOff = "off"
+ val max: Int = {
+ val DefaultBudget = 256
+ budgetProp.option.getOrElse(if (budgetProp.get.equalsIgnoreCase("off")) Integer.MAX_VALUE else DefaultBudget)
+ }
abstract class Exception(val advice: String) extends RuntimeException("CNF budget exceeded")
object exceeded extends Exception(
- s"(The analysis required more space than allowed. Please try with scalac -Dscalac.patmat.analysisBudget=${AnalysisBudget.max*2} or -Dscalac.patmat.analysisBudget=off.)")
+ s"(The analysis required more space than allowed. Please try with scalac -D${budgetProp.key}=${AnalysisBudget.max*2} or -D${budgetProp.key}=${budgetOff}.)")
}
@@ -279,7 +284,7 @@ trait Logic extends Debugging {
def eqFreePropToSolvable(p: Prop): Formula
def cnfString(f: Formula): String
- type Model = Map[Sym, Boolean]
+ type Model = collection.immutable.SortedMap[Sym, Boolean]
val EmptyModel: Model
val NoModel: Model
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala b/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala
index 6267585ea8..1902606d86 100644
--- a/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala
+++ b/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala
@@ -26,9 +26,12 @@ trait Solving extends Logic {
type Formula = FormulaBuilder
def formula(c: Clause*): Formula = ArrayBuffer(c: _*)
- type Clause = Set[Lit]
+ type Clause = collection.Set[Lit]
// a clause is a disjunction of distinct literals
- def clause(l: Lit*): Clause = l.toSet
+ def clause(l: Lit*): Clause = (
+ // neg/t7020.scala changes output 1% of the time, the non-determinism is quelled with this linked set
+ mutable.LinkedHashSet(l: _*)
+ )
type Lit
def Lit(sym: Sym, pos: Boolean = true): Lit
@@ -134,7 +137,7 @@ trait Solving extends Logic {
def cnfString(f: Formula) = alignAcrossRows(f map (_.toList) toList, "\\/", " /\\\n")
// adapted from http://lara.epfl.ch/w/sav10:simple_sat_solver (original by Hossein Hojjat)
- val EmptyModel = Map.empty[Sym, Boolean]
+ val EmptyModel = collection.immutable.SortedMap.empty[Sym, Boolean]
val NoModel: Model = null
// returns all solutions, if any (TODO: better infinite recursion backstop -- detect fixpoint??)
@@ -229,9 +232,8 @@ trait Solving extends Logic {
}
}
- if (Statistics.canEnable) Statistics.stopTimer(patmatAnaDPLL, start)
-
- satisfiableWithModel
+ if (Statistics.canEnable) Statistics.stopTimer(patmatAnaDPLL, start)
+ satisfiableWithModel
}
}
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala b/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala
index 396f3407f3..69ae6ec0c8 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala
@@ -136,8 +136,8 @@ abstract class Duplicators extends Analyzer {
sym
private def invalidate(tree: Tree, owner: Symbol = NoSymbol) {
- debuglog("attempting to invalidate " + tree.symbol)
- if (tree.isDef && tree.symbol != NoSymbol) {
+ debuglog(s"attempting to invalidate symbol = ${tree.symbol}")
+ if ((tree.isDef || tree.isInstanceOf[Function]) && tree.symbol != NoSymbol) {
debuglog("invalid " + tree.symbol)
invalidSyms(tree.symbol) = tree
@@ -166,6 +166,11 @@ abstract class Duplicators extends Analyzer {
invalidateAll(tparams ::: vparamss.flatten)
tree.symbol = NoSymbol
+ case Function(vparams, _) =>
+ // invalidate parameters
+ invalidateAll(vparams)
+ tree.symbol = NoSymbol
+
case _ =>
tree.symbol = NoSymbol
}
@@ -226,6 +231,10 @@ abstract class Duplicators extends Analyzer {
ddef.tpt modifyType fixType
super.typed(ddef.clearType(), mode, pt)
+ case fun: Function =>
+ debuglog("Clearing the type and retyping Function: " + fun)
+ super.typed(fun.clearType, mode, pt)
+
case vdef @ ValDef(mods, name, tpt, rhs) =>
// log("vdef fixing tpe: " + tree.tpe + " with sym: " + tree.tpe.typeSymbol + " and " + invalidSyms)
//if (mods.hasFlag(Flags.LAZY)) vdef.symbol.resetFlag(Flags.MUTABLE) // Martin to Iulian: lazy vars can now appear because they are no longer boxed; Please check that deleting this statement is OK.
diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
index b30ae917d9..1e89e79cdd 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala
@@ -78,7 +78,11 @@ trait Implicits {
})
debuglog("update buffer: " + implicitSearchContext.reportBuffer.errors)
}
- context.undetparams = context.undetparams filterNot result.subst.from.contains
+ // SI-7944 undetermined type parameters that result from inference within typedImplicit land in
+ // `implicitSearchContext.undetparams`, *not* in `context.undetparams`
+ // Here, we copy them up to parent context (analogously to the way the errors are copied above),
+ // and then filter out any which *were* inferred and are part of the substitutor in the implicit search result.
+ context.undetparams = ((context.undetparams ++ implicitSearchContext.undetparams) filterNot result.subst.from.contains).distinct
if (Statistics.canEnable) Statistics.stopTimer(implicitNanos, start)
if (Statistics.canEnable) Statistics.stopCounter(rawTypeImpl, rawTypeStart)
@@ -216,7 +220,8 @@ trait Implicits {
case NullaryMethodType(restpe) =>
containsError(restpe)
case mt @ MethodType(_, restpe) =>
- (mt.paramTypes exists typeIsError) || containsError(restpe)
+ // OPT avoiding calling `mt.paramTypes` which creates a new list.
+ (mt.params exists symTypeIsError) || containsError(restpe)
case _ =>
tp.isError
}
@@ -579,10 +584,10 @@ trait Implicits {
private def typedImplicit1(info: ImplicitInfo, isLocal: Boolean): SearchResult = {
if (Statistics.canEnable) Statistics.incCounter(matchingImplicits)
- val itree = atPos(pos.focus) {
- // workaround for deficient context provided by ModelFactoryImplicitSupport#makeImplicitConstraints
- val isScalaDoc = context.tree == EmptyTree
+ // workaround for deficient context provided by ModelFactoryImplicitSupport#makeImplicitConstraints
+ val isScalaDoc = context.tree == EmptyTree
+ val itree = atPos(pos.focus) {
if (isLocal && !isScalaDoc) {
// SI-4270 SI-5376 Always use an unattributed Ident for implicits in the local scope,
// rather than an attributed Select, to detect shadowing.
@@ -605,7 +610,23 @@ trait Implicits {
atPos(itree.pos)(Apply(itree, List(Ident("<argument>") setType approximate(arg1)))),
EXPRmode,
approximate(arg2)
- )
+ ) match {
+ // try to infer implicit parameters immediately in order to:
+ // 1) guide type inference for implicit views
+ // 2) discard ineligible views right away instead of risking spurious ambiguous implicits
+ //
+ // this is an improvement of the state of the art that brings consistency to implicit resolution rules
+ // (and also helps fundep materialization to be applicable to implicit views)
+ //
+ // there's one caveat though. we need to turn this behavior off for scaladoc
+ // because scaladoc usually doesn't know the entire story
+ // and is just interested in views that are potentially applicable
+ // for instance, if we have `class C[T]` and `implicit def conv[T: Numeric](c: C[T]) = ???`
+ // then Scaladoc will give us something of type `C[T]`, and it would like to know
+ // that `conv` is potentially available under such and such conditions
+ case tree if isImplicitMethodType(tree.tpe) && !isScalaDoc => applyImplicitArgs(tree)
+ case tree => tree
+ }
case _ => fallback
}
context.firstError match { // using match rather than foreach to avoid non local return.
@@ -617,7 +638,7 @@ trait Implicits {
if (Statistics.canEnable) Statistics.incCounter(typedImplicits)
- val itree2 = if (isView) (itree1: @unchecked) match { case Apply(fun, _) => fun }
+ val itree2 = if (isView) treeInfo.dissectApplied(itree1).callee
else adapt(itree1, EXPRmode, wildPt)
typingStack.showAdapt(itree, itree2, pt, context)
diff --git a/src/compiler/scala/tools/nsc/typechecker/Macros.scala b/src/compiler/scala/tools/nsc/typechecker/Macros.scala
index 4765c301dd..d1045757a5 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Macros.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Macros.scala
@@ -111,7 +111,7 @@ trait Macros extends FastTrack with MacroRuntimes with Traces with Helpers {
* with synthetic content that carries the payload described in `MacroImplBinding`.
*
* For example, for a pair of macro definition and macro implementation:
- * def impl(c: scala.reflect.macros.Context): c.Expr[Unit] = c.literalUnit;
+ * def impl(c: scala.reflect.macros.Context): c.Expr[Unit] = ???
* def foo: Unit = macro impl
*
* We will have the following annotation added on the macro definition `foo`:
diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
index 599969598e..e3d7bfd4f8 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
@@ -425,11 +425,10 @@ trait Namers extends MethodSynthesis {
sym
}
- /** Enter a module symbol. The tree parameter can be either
- * a module definition or a class definition.
+ /** Enter a module symbol.
*/
def enterModuleSymbol(tree : ModuleDef): Symbol = {
- var m: Symbol = context.scope lookupAll tree.name find (_.isModule) getOrElse NoSymbol
+ var m: Symbol = context.scope lookupModule tree.name
val moduleFlags = tree.mods.flags | MODULE
if (m.isModule && !m.isPackage && inCurrentScope(m) && (currentRun.canRedefine(m) || m.isSynthetic)) {
updatePosFlags(m, tree.pos, moduleFlags)
diff --git a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
index dea4c46e79..03aad71165 100644
--- a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala
@@ -289,7 +289,7 @@ trait NamesDefaults { self: Analyzer =>
arg.tpe
}
).widen // have to widen or types inferred from literal defaults will be singletons
- val s = context.owner.newValue(unit.freshTermName("x$"), arg.pos, newFlags = ARTIFACT) setInfo {
+ val s = context.owner.newValue(unit.freshTermName(), arg.pos, newFlags = ARTIFACT) setInfo {
val tp = if (byName) functionType(Nil, argTpe) else argTpe
uncheckedBounds(tp)
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala b/src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala
index f3e8ac64f4..f69b8a9697 100644
--- a/src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala
@@ -281,7 +281,7 @@ trait PatternTypers {
else TypeBounds.lower(tpSym.tpeHK)
)
// origin must be the type param so we can deskolemize
- val skolem = context.owner.newGADTSkolem(unit.freshTypeName("?"+tpSym.name), tpSym, bounds)
+ val skolem = context.owner.newGADTSkolem(unit.freshTypeName("?" + tpSym.name), tpSym, bounds)
skolemBuffer += skolem
logResult(s"Created gadt skolem $skolem: ${skolem.tpe_*} to stand in for $tpSym")(skolem.tpe_*)
case tp1 => tp1
diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
index a9a7f6a954..36f889f8a4 100644
--- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala
@@ -1414,7 +1414,11 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans
private def checkTypeRef(tp: Type, tree: Tree, skipBounds: Boolean) = tp match {
case TypeRef(pre, sym, args) =>
- checkDeprecated(sym, tree.pos)
+ tree match {
+ case tt: TypeTree if tt.original == null => // SI-7783 don't warn about inferred types
+ case _ =>
+ checkDeprecated(sym, tree.pos)
+ }
if(sym.isJavaDefined)
sym.typeParams foreach (_.cookJavaRawInfo())
if (!tp.isHigherKinded && !skipBounds)
diff --git a/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala b/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala
index 1b726c37b9..fd8f9bebba 100644
--- a/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala
@@ -283,6 +283,9 @@ abstract class TreeCheckers extends Analyzer {
}
private def traverseInternal(tree: Tree) {
+ if (!tree.canHaveAttrs)
+ return
+
checkSymbolRefsRespectScope(enclosingMemberDefs takeWhile (md => !md.symbol.hasPackageFlag), tree)
checkReturnReferencesDirectlyEnclosingDef(tree)
@@ -329,10 +332,9 @@ abstract class TreeCheckers extends Analyzer {
return
case _ =>
}
-
- if (tree.canHaveAttrs && tree.pos == NoPosition)
+ if (tree.pos == NoPosition)
noPos(tree)
- else if (tree.tpe == null && phase.id > currentRun.typerPhase.id)
+ else if (tree.tpe == null && isPastTyper)
noType(tree)
else if (tree.isDef) {
checkSym(tree)
diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
index e93c0938e3..c385e7533a 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
@@ -13,7 +13,7 @@ package scala
package tools.nsc
package typechecker
-import scala.collection.mutable
+import scala.collection.{ mutable, immutable }
import scala.reflect.internal.util.{ BatchSourceFile, Statistics, shortClassOfInstance }
import mutable.ListBuffer
import symtab.Flags._
@@ -3459,8 +3459,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
// SI-7877 `isTerm` needed to exclude `class T[A] { def unapply(..) }; ... case T[X] =>`
case HasUnapply(unapply) if mode.inPatternMode && fun.isTerm =>
- if (unapply == QuasiquoteClass_api_unapply) macroExpandUnapply(this, tree, fun, unapply, args, mode, pt)
- else doTypedUnapply(tree, fun0, fun, args, mode, pt)
+ doTypedUnapply(tree, fun0, fun, args, mode, pt)
case _ =>
if (treeInfo.isMacroApplication(tree)) duplErrorTree(MacroTooManyArgumentListsError(tree, fun.symbol))
@@ -3590,7 +3589,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
else {
val annScope = annType.decls
.filter(sym => sym.isMethod && !sym.isConstructor && sym.isJavaDefined)
- val names = new scala.collection.mutable.HashSet[Symbol]
+ val names = mutable.Set[Symbol]()
names ++= (if (isJava) annScope.iterator
else typedFun.tpe.params.iterator)
@@ -3603,7 +3602,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
val nvPairs = args map {
case arg @ AssignOrNamedArg(Ident(name), rhs) =>
val sym = if (isJava) annScope.lookup(name)
- else typedFun.tpe.params.find(p => p.name == name).getOrElse(NoSymbol)
+ else findSymbol(typedFun.tpe.params)(_.name == name)
if (sym == NoSymbol) {
reportAnnotationError(UnknownAnnotationNameError(arg, name))
(nme.ERROR, None)
@@ -3730,8 +3729,8 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
while (o != owner && o != NoSymbol && !o.hasPackageFlag) o = o.owner
o == owner && !isVisibleParameter(sym)
}
- var localSyms = scala.collection.immutable.Set[Symbol]()
- var boundSyms = scala.collection.immutable.Set[Symbol]()
+ var localSyms = immutable.Set[Symbol]()
+ var boundSyms = immutable.Set[Symbol]()
def isLocal(sym: Symbol): Boolean =
if (sym == NoSymbol || sym.isRefinementClass || sym.isLocalDummy) false
else if (owner == NoSymbol) tree exists (defines(_, sym))
@@ -4311,7 +4310,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper
NotAMemberError(tpt, TypeTree(tp), nme.CONSTRUCTOR)
setError(tpt)
}
- else if (!( tp == sym.thisSym.tpe_* // when there's no explicit self type -- with (#3612) or without self variable
+ else if (!( tp == sym.typeOfThis // when there's no explicit self type -- with (#3612) or without self variable
// sym.thisSym.tpe == tp.typeOfThis (except for objects)
|| narrowRhs(tp) <:< tp.typeOfThis
|| phase.erasedTypes
diff --git a/src/compiler/scala/tools/nsc/util/FreshNameCreator.scala b/src/compiler/scala/tools/nsc/util/FreshNameCreator.scala
deleted file mode 100644
index e877c990f0..0000000000
--- a/src/compiler/scala/tools/nsc/util/FreshNameCreator.scala
+++ /dev/null
@@ -1,40 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Martin Odersky
- */
-
-package scala.tools.nsc
-package util
-
-import scala.collection.mutable
-
-trait FreshNameCreator {
- /** Do not call before after type checking ends.
- * PP: I think that directive needs to lose a word somewhere.
- */
- def newName(): String
- def newName(prefix: String): String
-}
-
-object FreshNameCreator {
- class Default extends FreshNameCreator {
- protected var counter = 0
- protected val counters = mutable.HashMap[String, Int]() withDefaultValue 0
-
- /**
- * Create a fresh name with the given prefix. It is guaranteed
- * that the returned name has never been returned by a previous
- * call to this function (provided the prefix does not end in a digit).
- */
- def newName(prefix: String): String = {
- val safePrefix = prefix.replaceAll("""[<>]""", """\$""")
- counters(safePrefix) += 1
-
- safePrefix + counters(safePrefix)
- }
- def newName(): String = {
- counter += 1
- "$" + counter + "$"
- }
- }
-}
diff --git a/src/compiler/scala/tools/reflect/ToolBoxFactory.scala b/src/compiler/scala/tools/reflect/ToolBoxFactory.scala
index fdc2613810..e94b7725cd 100644
--- a/src/compiler/scala/tools/reflect/ToolBoxFactory.scala
+++ b/src/compiler/scala/tools/reflect/ToolBoxFactory.scala
@@ -280,7 +280,7 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
def parse(code: String): Tree = {
reporter.reset()
- val tree = gen.mkTreeOrBlock(newUnitParser(code, "<toolbox>").parseStats())
+ val tree = gen.mkTreeOrBlock(newUnitParser(code, "<toolbox>").parseStatsOrPackages())
throwIfErrors()
tree
}
@@ -340,7 +340,8 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
lazy val exporter = importer.reverse
}
- def apply[T](f: CompilerApi => T): T = {
+ private val toolBoxLock = new Object
+ def apply[T](f: CompilerApi => T): T = toolBoxLock.synchronized {
try f(api)
catch { case ex: FatalError => throw ToolBoxError(s"fatal compiler error", ex) }
finally api.compiler.cleanupCaches()
diff --git a/src/compiler/scala/tools/reflect/quasiquotes/Holes.scala b/src/compiler/scala/tools/reflect/quasiquotes/Holes.scala
index dd849f2bca..f92c9aa845 100644
--- a/src/compiler/scala/tools/reflect/quasiquotes/Holes.scala
+++ b/src/compiler/scala/tools/reflect/quasiquotes/Holes.scala
@@ -154,7 +154,7 @@ trait Holes { self: Quasiquotes =>
object Hole {
def apply(splicee: Tree, holeCard: Cardinality): Hole = {
- if (splicee.tpe == null) return new Hole(splicee, UnknownLocation, holeCard)
+ if (method == nme.unapply) return new Hole(splicee, UnknownLocation, holeCard)
val (spliceeCard, elementTpe) = parseCardinality(splicee.tpe)
def cantSplice() = {
val holeCardMsg = if (holeCard != NoDot) s" with $holeCard" else ""
diff --git a/src/compiler/scala/tools/reflect/quasiquotes/Parsers.scala b/src/compiler/scala/tools/reflect/quasiquotes/Parsers.scala
index 5a1a25cfa1..0b5ade0b4c 100644
--- a/src/compiler/scala/tools/reflect/quasiquotes/Parsers.scala
+++ b/src/compiler/scala/tools/reflect/quasiquotes/Parsers.scala
@@ -4,15 +4,16 @@ package quasiquotes
import scala.tools.nsc.ast.parser.{Parsers => ScalaParser}
import scala.tools.nsc.ast.parser.Tokens._
import scala.compat.Platform.EOL
-import scala.reflect.internal.util.{BatchSourceFile, SourceFile}
+import scala.reflect.internal.util.{BatchSourceFile, SourceFile, FreshNameCreator}
import scala.collection.mutable.ListBuffer
+import scala.util.Try
/** Builds upon the vanilla Scala parser and teams up together with Placeholders.scala to emulate holes.
* A principled solution to splicing into Scala syntax would be a parser that natively supports holes.
* Unfortunately, that's outside of our reach in Scala 2.11, so we have to emulate.
*/
trait Parsers { self: Quasiquotes =>
- import global._
+ import global.{Try => _, _}
abstract class Parser extends {
val global: self.global.type = self.global
@@ -54,7 +55,13 @@ trait Parsers { self: Quasiquotes =>
def isHole(name: Name): Boolean = holeMap.contains(name)
+ override implicit def fresh: FreshNameCreator = new FreshNameCreator {
+ override def newName(prefix: String) = super.newName(nme.QUASIQUOTE_PREFIX + prefix)
+ }
+
override val treeBuilder = new ParserTreeBuilder {
+ override implicit def fresh: FreshNameCreator = parser.fresh
+
// q"(..$xs)"
override def makeTupleTerm(trees: List[Tree], flattenUnary: Boolean): Tree =
Apply(Ident(nme.QUASIQUOTE_TUPLE), trees)
@@ -94,8 +101,6 @@ trait Parsers { self: Quasiquotes =>
override def isAnnotation: Boolean = super.isAnnotation || (isHole && lookingAhead { isAnnotation })
- override def isCaseDefStart: Boolean = super.isCaseDefStart || (in.token == EOF)
-
override def isModifier: Boolean = super.isModifier || (isHole && lookingAhead { isModifier })
override def isLocalModifier: Boolean = super.isLocalModifier || (isHole && lookingAhead { isLocalModifier })
@@ -140,11 +145,18 @@ trait Parsers { self: Quasiquotes =>
case Ident(name) if isHole(name) => true
case _ => false
})
+
+ override def topStat = super.topStat.orElse {
+ case _ if isHole =>
+ val stats = ValDef(NoMods, in.name, Ident(tpnme.QUASIQUOTE_PACKAGE_STAT), EmptyTree) :: Nil
+ in.nextToken()
+ stats
+ }
}
}
object TermParser extends Parser {
- def entryPoint = { parser => gen.mkTreeOrBlock(parser.templateStats()) }
+ def entryPoint = { parser => gen.mkTreeOrBlock(parser.templateOrTopStatSeq()) }
}
object TypeParser extends Parser {
@@ -161,4 +173,14 @@ trait Parsers { self: Quasiquotes =>
parser.treeBuilder.patvarTransformer.transform(pat)
}
}
+
+ object FreshName {
+ def unapply(name: Name): Option[String] =
+ name.toString.split("\\$") match {
+ case Array(qq, left, right) if qq + "$" == nme.QUASIQUOTE_PREFIX && Try(right.toInt).isSuccess =>
+ Some(left + "$")
+ case _ =>
+ None
+ }
+ }
} \ No newline at end of file
diff --git a/src/compiler/scala/tools/reflect/quasiquotes/Placeholders.scala b/src/compiler/scala/tools/reflect/quasiquotes/Placeholders.scala
index c2b219ee31..c31d1fcd12 100644
--- a/src/compiler/scala/tools/reflect/quasiquotes/Placeholders.scala
+++ b/src/compiler/scala/tools/reflect/quasiquotes/Placeholders.scala
@@ -32,10 +32,17 @@ trait Placeholders { self: Quasiquotes =>
def appendHole(tree: Tree, cardinality: Cardinality) = {
val placeholderName = c.freshName(TermName(nme.QUASIQUOTE_PREFIX + sessionSuffix))
sb.append(placeholderName)
- holeMap(placeholderName) = Hole(tree, cardinality)
+ val holeTree = if (method == nme.unapply) Bind(placeholderName, Ident(nme.WILDCARD)) else tree
+ holeMap(placeholderName) = Hole(holeTree, cardinality)
}
- foreach2(args, parts.init) { case (tree, (p, pos)) =>
+ val iargs = method match {
+ case nme.apply => args
+ case nme.unapply => List.fill(parts.length - 1)(EmptyTree)
+ case _ => global.abort("unreachable")
+ }
+
+ foreach2(iargs, parts.init) { case (tree, (p, pos)) =>
val (part, cardinality) = parseDots(p)
appendPart(part, pos)
appendHole(tree, cardinality)
@@ -47,7 +54,7 @@ trait Placeholders { self: Quasiquotes =>
}
class HoleMap {
- private val underlying = mutable.ListMap[String, Hole]()
+ private var underlying = immutable.SortedMap[String, Hole]()
private val accessed = mutable.Set[String]()
def unused: Set[Name] = (underlying.keys.toSet -- accessed).map(TermName(_))
def contains(key: Name) = underlying.contains(key.toString)
@@ -64,6 +71,7 @@ trait Placeholders { self: Quasiquotes =>
accessed += s
underlying.get(s)
}
+ def toList = underlying.toList
}
// Step 2: Transform vanilla Scala AST into an AST with holes
@@ -146,4 +154,11 @@ trait Placeholders { self: Quasiquotes =>
case _ => None
}
}
+
+ object PackageStatPlaceholder {
+ def unapply(tree: Tree): Option[(Tree, Location, Cardinality)] = tree match {
+ case ValDef(NoMods, Placeholder(tree, location, card), Ident(tpnme.QUASIQUOTE_PACKAGE_STAT), EmptyTree) => Some((tree, location, card))
+ case _ => None
+ }
+ }
} \ No newline at end of file
diff --git a/src/compiler/scala/tools/reflect/quasiquotes/Quasiquotes.scala b/src/compiler/scala/tools/reflect/quasiquotes/Quasiquotes.scala
index 1305e25240..f4d6b39d02 100644
--- a/src/compiler/scala/tools/reflect/quasiquotes/Quasiquotes.scala
+++ b/src/compiler/scala/tools/reflect/quasiquotes/Quasiquotes.scala
@@ -14,8 +14,9 @@ abstract class Quasiquotes extends Parsers
def debug(msg: String): Unit =
if (settings.Yquasiquotedebug.value) println(msg)
- lazy val (universe: Tree, args, parts, parse, reify) = c.macroApplication match {
- case Apply(Select(Select(Apply(Select(universe0, _), List(Apply(_, parts0))), interpolator0), method0), args0) =>
+ lazy val (universe: Tree, args, parts, parse, reify, method) = c.macroApplication match {
+ case Apply(build.SyntacticTypeApplied(Select(Select(Apply(Select(universe0, _), List(Apply(_, parts0))), interpolator0), method0), _), args0) =>
+ debug(s"\nparse prefix:\nuniverse=$universe0\nparts=$parts0\ninterpolator=$interpolator0\nmethod=$method0\nargs=$args0\n")
val parts1 = parts0.map {
case lit @ Literal(Constant(s: String)) => s -> lit.pos
case part => c.abort(part.pos, "Quasiquotes can only be used with literal strings")
@@ -32,7 +33,7 @@ abstract class Quasiquotes extends Parsers
case nme.pq => PatternParser.parse(_)
case other => global.abort(s"Unknown quasiquote flavor: $other")
}
- (universe0, args0, parts1, parse0, reify0)
+ (universe0, args0, parts1, parse0, reify0, method0)
case _ =>
global.abort(s"Couldn't parse call prefix tree ${c.macroApplication}.")
}
@@ -41,11 +42,18 @@ abstract class Quasiquotes extends Parsers
lazy val universeTypes = new definitions.UniverseDependentTypes(universe)
def expandQuasiquote = {
+ debug(s"\nmacro application:\n${c.macroApplication}\n")
debug(s"\ncode to parse:\n$code\n")
val tree = parse(code)
debug(s"parsed:\n${showRaw(tree)}\n$tree\n")
val reified = reify(tree)
- debug(s"reified tree:\n$reified\n")
+ val sreified =
+ reified
+ .toString
+ .replace("scala.reflect.runtime.`package`.universe.build.", "")
+ .replace("scala.reflect.runtime.`package`.universe.", "")
+ .replace("scala.collection.immutable.", "")
+ debug(s"reified tree:\n$sreified\n")
reified
}
}
diff --git a/src/compiler/scala/tools/reflect/quasiquotes/Reifiers.scala b/src/compiler/scala/tools/reflect/quasiquotes/Reifiers.scala
index 18999e8267..3d1ecf95b2 100644
--- a/src/compiler/scala/tools/reflect/quasiquotes/Reifiers.scala
+++ b/src/compiler/scala/tools/reflect/quasiquotes/Reifiers.scala
@@ -7,10 +7,8 @@ import scala.reflect.internal.Flags._
trait Reifiers { self: Quasiquotes =>
import global._
- import global.build.{SyntacticClassDef, SyntacticTraitDef, SyntacticModuleDef,
- SyntacticDefDef, SyntacticValDef, SyntacticVarDef,
- SyntacticBlock, SyntacticApplied, SyntacticTypeApplied,
- SyntacticFunction, SyntacticNew, SyntacticAssign}
+ import global.build.{Select => _, Ident => _, TypeTree => _, _}
+ import global.treeInfo._
import global.definitions._
import Cardinality._
import universeTypes._
@@ -29,12 +27,89 @@ trait Reifiers { self: Quasiquotes =>
def action = if (isReifyingExpressions) "splice" else "extract"
def holesHaveTypes = isReifyingExpressions
+ /** Map that stores freshly generated names linked to the corresponding names in the reified tree.
+ * This information is used to reify names created by calls to freshTermName and freshTypeName.
+ */
+ var nameMap = collection.mutable.HashMap.empty[Name, Set[TermName]].withDefault { _ => Set() }
+
+ /** Wraps expressions into:
+ * a sequence of nested withFreshTermName/withFreshTypeName calls which are required
+ * to force regeneration of randomly generated names on every evaluation of quasiquote.
+ *
+ * Wraps patterns into:
+ * a call into anonymous class' unapply method required by unapply macro expansion:
+ *
+ * new {
+ * def unapply(tree) = tree match {
+ * case pattern if guard => Some(result)
+ * case _ => None
+ * }
+ * }.unapply(<unapply-selector>)
+ *
+ * where pattern corresponds to reified tree and guard represents conjunction of equalities
+ * which check that pairs of names in nameMap.values are equal between each other.
+ */
+ def wrap(tree: Tree) =
+ if (isReifyingExpressions) {
+ nameMap.foldLeft(tree) {
+ case (t, (origname, names)) =>
+ assert(names.size == 1)
+ val FreshName(prefix) = origname
+ val ctor = TermName("withFresh" + (if (origname.isTermName) "TermName" else "TypeName"))
+ // q"$u.build.$ctor($prefix) { ${names.head} => $t }"
+ Apply(Apply(Select(Select(u, nme.build), ctor), List(Literal(Constant(prefix)))),
+ List(Function(List(ValDef(Modifiers(PARAM), names.head, TypeTree(), EmptyTree)), t)))
+ }
+ } else {
+ val freevars = holeMap.toList.map { case (name, _) => Ident(name) }
+ val isVarPattern = tree match { case Bind(name, Ident(nme.WILDCARD)) => true case _ => false }
+ val cases =
+ if(isVarPattern) {
+ val Ident(name) :: Nil = freevars
+ // cq"$name: $treeType => $SomeModule($name)" :: Nil
+ CaseDef(Bind(name, Typed(Ident(nme.WILDCARD), TypeTree(treeType))),
+ EmptyTree, Apply(Ident(SomeModule), List(Ident(name)))) :: Nil
+ } else {
+ val (succ, fail) = freevars match {
+ case Nil =>
+ // (q"true", q"false")
+ (Literal(Constant(true)), Literal(Constant(false)))
+ case head :: Nil =>
+ // (q"$SomeModule($head)", q"$NoneModule")
+ (Apply(Ident(SomeModule), List(head)), Ident(NoneModule))
+ case vars =>
+ // (q"$SomeModule((..$vars))", q"$NoneModule")
+ (Apply(Ident(SomeModule), List(SyntacticTuple(vars))), Ident(NoneModule))
+ }
+ val guard =
+ nameMap.collect { case (_, nameset) if nameset.size >= 2 =>
+ nameset.toList.sliding(2).map { case List(n1, n2) =>
+ // q"$n1 == $n2"
+ Apply(Select(Ident(n1), nme.EQ), List(Ident(n2)))
+ }
+ }.flatten.reduceOption[Tree] { (l, r) =>
+ // q"$l && $r"
+ Apply(Select(l, nme.ZAND), List(r))
+ }.getOrElse { EmptyTree }
+ // cq"$tree if $guard => $succ" :: cq"_ => $fail" :: Nil
+ CaseDef(tree, guard, succ) :: CaseDef(Ident(nme.WILDCARD), EmptyTree, fail) :: Nil
+ }
+ // q"new { def unapply(tree: $AnyClass) = tree match { case ..$cases } }.unapply(..$args)"
+ Apply(
+ Select(
+ SyntacticNew(Nil, Nil, noSelfType, List(
+ DefDef(NoMods, nme.unapply, Nil, List(List(ValDef(NoMods, nme.tree, TypeTree(AnyClass.toType), EmptyTree))), TypeTree(),
+ Match(Ident(nme.tree), cases)))),
+ nme.unapply),
+ args)
+ }
+
def reifyFillingHoles(tree: Tree): Tree = {
val reified = reifyTree(tree)
holeMap.unused.foreach { hole =>
c.abort(holeMap(hole).tree.pos, s"Don't know how to $action here")
}
- reified
+ wrap(reified)
}
override def reifyTree(tree: Tree): Tree =
@@ -51,6 +126,7 @@ trait Reifiers { self: Quasiquotes =>
case CasePlaceholder(tree, location, _) => reifyCase(tree, location)
case RefineStatPlaceholder(tree, _, _) => reifyRefineStat(tree)
case EarlyDefPlaceholder(tree, _, _) => reifyEarlyDef(tree)
+ case PackageStatPlaceholder(tree, _, _) => reifyPackageStat(tree)
case _ => EmptyTree
}
@@ -60,18 +136,23 @@ trait Reifiers { self: Quasiquotes =>
case SyntacticClassDef(mods, name, tparams, constrmods, vparamss, earlyDefs, parents, selfdef, body) =>
reifyBuildCall(nme.SyntacticClassDef, mods, name, tparams, constrmods, vparamss,
earlyDefs, parents, selfdef, body)
- case SyntacticModuleDef(mods, name, earlyDefs, parents, selfdef, body) =>
- reifyBuildCall(nme.SyntacticModuleDef, mods, name, earlyDefs, parents, selfdef, body)
+ case SyntacticPackageObjectDef(name, earlyDefs, parents, selfdef, body) =>
+ reifyBuildCall(nme.SyntacticPackageObjectDef, name, earlyDefs, parents, selfdef, body)
+ case SyntacticObjectDef(mods, name, earlyDefs, parents, selfdef, body) =>
+ reifyBuildCall(nme.SyntacticObjectDef, mods, name, earlyDefs, parents, selfdef, body)
case SyntacticNew(earlyDefs, parents, selfdef, body) =>
reifyBuildCall(nme.SyntacticNew, earlyDefs, parents, selfdef, body)
case SyntacticDefDef(mods, name, tparams, vparamss, tpt, rhs) =>
reifyBuildCall(nme.SyntacticDefDef, mods, name, tparams, vparamss, tpt, rhs)
- case SyntacticValDef(mods, name, tpt, rhs) =>
+ case SyntacticValDef(mods, name, tpt, rhs) if tree != noSelfType =>
reifyBuildCall(nme.SyntacticValDef, mods, name, tpt, rhs)
case SyntacticVarDef(mods, name, tpt, rhs) =>
reifyBuildCall(nme.SyntacticVarDef, mods, name, tpt, rhs)
case SyntacticAssign(lhs, rhs) =>
reifyBuildCall(nme.SyntacticAssign, lhs, rhs)
+ case SyntacticApplied(fun, List(args))
+ if args.forall { case Placeholder(_, _, DotDotDot) => false case _ => true } =>
+ reifyBuildCall(nme.SyntacticApply, fun, args)
case SyntacticApplied(fun, argss) if argss.nonEmpty =>
reifyBuildCall(nme.SyntacticApplied, fun, argss)
case SyntacticTypeApplied(fun, targs) if targs.nonEmpty =>
@@ -94,6 +175,12 @@ trait Reifiers { self: Quasiquotes =>
case Placeholder(tree, location, _) =>
if (holesHaveTypes && !(location.tpe <:< nameType)) c.abort(tree.pos, s"$nameType expected but ${location.tpe} found")
tree
+ case FreshName(prefix) if prefix != nme.QUASIQUOTE_NAME_PREFIX =>
+ def fresh() = c.freshName[TermName](nme.QUASIQUOTE_NAME_PREFIX)
+ def introduceName() = { val n = fresh(); nameMap(name) += n; n}
+ def result(n: Name) = if (isReifyingExpressions) Ident(n) else Bind(n, Ident(nme.WILDCARD))
+ if (isReifyingPatterns) result(introduceName())
+ else result(nameMap.get(name).map { _.head }.getOrElse { introduceName() })
case _ =>
super.reifyName(name)
}
@@ -131,6 +218,8 @@ trait Reifiers { self: Quasiquotes =>
def reifyAnnotation(tree: Tree) = tree
+ def reifyPackageStat(tree: Tree) = tree
+
/** Splits list into a list of groups where subsequent elements are considered
* similar by the corresponding function.
*
@@ -185,6 +274,8 @@ trait Reifiers { self: Quasiquotes =>
case CasePlaceholder(tree, _, DotDot) => tree
case RefineStatPlaceholder(tree, _, DotDot) => reifyRefineStat(tree)
case EarlyDefPlaceholder(tree, _, DotDot) => reifyEarlyDef(tree)
+ case PackageStatPlaceholder(tree, _, DotDot) => reifyPackageStat(tree)
+
case List(Placeholder(tree, _, DotDotDot)) => tree
} {
reify(_)
@@ -201,13 +292,13 @@ trait Reifiers { self: Quasiquotes =>
// to overload the same tree for two different concepts:
// - MUTABLE that is used to override ValDef for vars
// - TRAIT that is used to override ClassDef for traits
- val nonoverloadedExplicitFlags = ExplicitFlags & ~MUTABLE & ~TRAIT
+ val nonOverloadedExplicitFlags = ExplicitFlags & ~MUTABLE & ~TRAIT
def ensureNoExplicitFlags(m: Modifiers, pos: Position) = {
// Traits automatically have ABSTRACT flag assigned to
// them so in that case it's not an explicit flag
val flags = if (m.isTrait) m.flags & ~ABSTRACT else m.flags
- if ((flags & nonoverloadedExplicitFlags) != 0L)
+ if ((flags & nonOverloadedExplicitFlags) != 0L)
c.abort(pos, s"Can't $action modifiers together with flags, consider merging flags into modifiers")
}
@@ -280,6 +371,8 @@ trait Reifiers { self: Quasiquotes =>
override def reifyEarlyDef(tree: Tree) = mirrorBuildCall(nme.mkEarlyDef, tree)
override def reifyAnnotation(tree: Tree) = mirrorBuildCall(nme.mkAnnotation, tree)
+
+ override def reifyPackageStat(tree: Tree) = mirrorBuildCall(nme.mkPackageStat, tree)
}
class UnapplyReifier extends Reifier {
diff --git a/src/eclipse/README.md b/src/eclipse/README.md
index a1dc01e122..d23e10ca1c 100644
--- a/src/eclipse/README.md
+++ b/src/eclipse/README.md
@@ -61,6 +61,17 @@ consider them unchanged:
git update-index --no-assume-unchanged `find src/eclipse -iname .classpath -or -iname .project`
+If it doesn’t compile
+=====================
+
+The likely reason is that the build path of the imported projects isn’t correct. This can happen for instance
+when the [version.properties](https://github.com/scala/scala/blob/master/versions.properties) file is updated,
+and Eclipse .classpath of the different projects isn’t updated accordingly. The fix is simple, manually inspect
+the build path of each project and make sure the version of the declared dependencies is in sync with the version
+declared in the `version.properties` file. If it isn’t, update it manually and, when done, don’t forget to share
+your changes via a pull request.
+(We are aware this is manual process is cumbersome. If you feel like scripting this process, pull requests are of course welcome.)
+
DETAILS
=======
diff --git a/src/eclipse/partest/.classpath b/src/eclipse/partest/.classpath
index 2214517d92..abe92cebb5 100644
--- a/src/eclipse/partest/.classpath
+++ b/src/eclipse/partest/.classpath
@@ -5,7 +5,7 @@
<classpathentry combineaccessrules="false" kind="src" path="/repl"/>
<classpathentry kind="var" path="M2_REPO/com/googlecode/java-diff-utils/diffutils/1.3.0/diffutils-1.3.0.jar"/>
<classpathentry kind="var" path="M2_REPO/org/scala-tools/testing/test-interface/0.5/test-interface-0.5.jar"/>
- <classpathentry kind="var" path="M2_REPO/org/scala-lang/modules/scala-partest_2.11.0-M4/1.0-RC4/scala-partest_2.11.0-M4-1.0-RC4.jar"/>
+ <classpathentry kind="var" path="M2_REPO/org/scala-lang/modules/scala-partest_2.11.0-M5/1.0-RC5/scala-partest_2.11.0-M5-1.0-RC5.jar"/>
<classpathentry kind="var" path="SCALA_BASEDIR/lib/ant/ant.jar"/>
<classpathentry kind="con" path="org.scala-ide.sdt.launching.SCALA_CONTAINER"/>
<classpathentry kind="con" path="org.scala-ide.sdt.launching.SCALA_COMPILER_CONTAINER"/>
diff --git a/src/eclipse/scaladoc/.classpath b/src/eclipse/scaladoc/.classpath
index 1135a5c311..c5b8a02c0b 100644
--- a/src/eclipse/scaladoc/.classpath
+++ b/src/eclipse/scaladoc/.classpath
@@ -6,8 +6,8 @@
<classpathentry combineaccessrules="false" kind="src" path="/scala-compiler"/>
<classpathentry combineaccessrules="false" kind="src" path="/scala-library"/>
<classpathentry combineaccessrules="false" kind="src" path="/partest-extras"/>
- <classpathentry kind="var" path="M2_REPO/org/scala-lang/modules/scala-xml_2.11.0-M4/1.0-RC3/scala-xml_2.11.0-M4-1.0-RC3.jar"/>
- <classpathentry kind="var" path="M2_REPO/org/scala-lang/modules/scala-parser-combinators_2.11.0-M4/1.0-RC1/scala-parser-combinators_2.11.0-M4-1.0-RC1.jar"/>
- <classpathentry kind="var" path="M2_REPO/org/scala-lang/modules/scala-partest_2.11.0-M4/1.0-RC4/scala-partest_2.11.0-M4-1.0-RC4.jar"/>
+ <classpathentry kind="var" path="M2_REPO/org/scala-lang/modules/scala-xml_2.11.0-M5/1.0-RC4/scala-xml_2.11.0-M5-1.0-RC4.jar"/>
+ <classpathentry kind="var" path="M2_REPO/org/scala-lang/modules/scala-parser-combinators_2.11.0-M5/1.0-RC2/scala-parser-combinators_2.11.0-M5-1.0-RC2.jar"/>
+ <classpathentry kind="var" path="M2_REPO/org/scala-lang/modules/scala-partest_2.11.0-M5/1.0-RC5/scala-partest_2.11.0-M5-1.0-RC5.jar"/>
<classpathentry kind="output" path="build-quick-scaladoc"/>
</classpath>
diff --git a/src/library/scala/collection/IterableLike.scala b/src/library/scala/collection/IterableLike.scala
index f79b5afce9..d5e1dd53db 100644
--- a/src/library/scala/collection/IterableLike.scala
+++ b/src/library/scala/collection/IterableLike.scala
@@ -152,7 +152,7 @@ self =>
*
* @param size the number of elements per group
* @return An iterator producing ${coll}s of size `size`, except the
- * last will be truncated if the elements don't divide evenly.
+ * last will be less than size `size` if the elements don't divide evenly.
*/
def grouped(size: Int): Iterator[Repr] =
for (xs <- iterator grouped size) yield {
@@ -202,7 +202,7 @@ self =>
b.sizeHintBounded(n, this)
val lead = this.iterator drop n
var go = false
- for (x <- this.seq) {
+ for (x <- this) {
if (lead.hasNext) lead.next()
else go = true
if (go) b += x
diff --git a/src/library/scala/collection/SeqLike.scala b/src/library/scala/collection/SeqLike.scala
index c02ea98914..189dce49b7 100644
--- a/src/library/scala/collection/SeqLike.scala
+++ b/src/library/scala/collection/SeqLike.scala
@@ -273,7 +273,7 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[
def reverseMap[B, That](f: A => B)(implicit bf: CanBuildFrom[Repr, B, That]): That = {
var xs: List[A] = List()
- for (x <- this.seq)
+ for (x <- this)
xs = x :: xs
val b = bf(repr)
for (x <- xs)
@@ -478,7 +478,7 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[
private def occCounts[B](sq: Seq[B]): mutable.Map[B, Int] = {
val occ = new mutable.HashMap[B, Int] { override def default(k: B) = 0 }
- for (y <- sq.seq) occ(y) += 1
+ for (y <- sq) occ(y) += 1
occ
}
@@ -608,7 +608,7 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[
val len = this.length
val arr = new ArraySeq[A](len)
var i = 0
- for (x <- this.seq) {
+ for (x <- this) {
arr(i) = x
i += 1
}
diff --git a/src/library/scala/collection/TraversableLike.scala b/src/library/scala/collection/TraversableLike.scala
index 00f4de82cd..f02c00a312 100644
--- a/src/library/scala/collection/TraversableLike.scala
+++ b/src/library/scala/collection/TraversableLike.scala
@@ -481,7 +481,7 @@ trait TraversableLike[+A, +Repr] extends Any
var follow = false
val b = newBuilder
b.sizeHint(this, -1)
- for (x <- this.seq) {
+ for (x <- this) {
if (follow) b += lst
else follow = true
lst = x
@@ -506,7 +506,7 @@ trait TraversableLike[+A, +Repr] extends Any
private[this] def sliceInternal(from: Int, until: Int, b: Builder[A, Repr]): Repr = {
var i = 0
breakable {
- for (x <- this.seq) {
+ for (x <- this) {
if (i >= from) b += x
i += 1
if (i >= until) break
diff --git a/src/library/scala/collection/TraversableOnce.scala b/src/library/scala/collection/TraversableOnce.scala
index 2fdad0f8f9..26af32046c 100644
--- a/src/library/scala/collection/TraversableOnce.scala
+++ b/src/library/scala/collection/TraversableOnce.scala
@@ -97,7 +97,7 @@ trait TraversableOnce[+A] extends Any with GenTraversableOnce[A] {
// for internal use
protected[this] def reversed = {
var elems: List[A] = Nil
- self.seq foreach (elems ::= _)
+ self foreach (elems ::= _)
elems
}
@@ -140,7 +140,7 @@ trait TraversableOnce[+A] extends Any with GenTraversableOnce[A] {
def foldLeft[B](z: B)(op: (B, A) => B): B = {
var result = z
- this.seq foreach (x => result = op(result, x))
+ this foreach (x => result = op(result, x))
result
}
diff --git a/src/library/scala/collection/convert/Wrappers.scala b/src/library/scala/collection/convert/Wrappers.scala
index 4410ddc7d8..56f1802509 100644
--- a/src/library/scala/collection/convert/Wrappers.scala
+++ b/src/library/scala/collection/convert/Wrappers.scala
@@ -212,6 +212,15 @@ private[collection] trait Wrappers {
}
}
}
+
+ override def containsKey(key: AnyRef): Boolean = try {
+ // Note: Subclass of collection.Map with specific key type may redirect generic
+ // contains to specific contains, which will throw a ClassCastException if the
+ // wrong type is passed. This is why we need a type cast to A inside a try/catch.
+ underlying.contains(key.asInstanceOf[A])
+ } catch {
+ case ex: ClassCastException => false
+ }
}
case class MutableMapWrapper[A, B](underlying: mutable.Map[A, B]) extends MapWrapper[A, B](underlying) {
diff --git a/src/library/scala/collection/generic/Growable.scala b/src/library/scala/collection/generic/Growable.scala
index 254d4566be..a223c0c8a8 100644
--- a/src/library/scala/collection/generic/Growable.scala
+++ b/src/library/scala/collection/generic/Growable.scala
@@ -54,7 +54,7 @@ trait Growable[-A] extends Clearable {
loop(xs.tail)
}
}
- xs.seq match {
+ xs match {
case xs: scala.collection.LinearSeq[_] => loop(xs)
case xs => xs foreach +=
}
diff --git a/src/library/scala/collection/generic/Shrinkable.scala b/src/library/scala/collection/generic/Shrinkable.scala
index b5ec568667..dea5bb7217 100644
--- a/src/library/scala/collection/generic/Shrinkable.scala
+++ b/src/library/scala/collection/generic/Shrinkable.scala
@@ -46,5 +46,5 @@ trait Shrinkable[-A] {
* @param xs the iterator producing the elements to remove.
* @return the $coll itself
*/
- def --=(xs: TraversableOnce[A]): this.type = { xs.seq foreach -= ; this }
+ def --=(xs: TraversableOnce[A]): this.type = { xs foreach -= ; this }
}
diff --git a/src/library/scala/collection/immutable/DefaultMap.scala b/src/library/scala/collection/immutable/DefaultMap.scala
index ce34b84486..e9b277b9c4 100755
--- a/src/library/scala/collection/immutable/DefaultMap.scala
+++ b/src/library/scala/collection/immutable/DefaultMap.scala
@@ -46,7 +46,7 @@ trait DefaultMap[A, +B] extends Map[A, B] { self =>
*/
override def - (key: A): Map[A, B] = {
val b = newBuilder
- for (kv <- this.seq ; if kv._1 != key) b += kv
+ for (kv <- this ; if kv._1 != key) b += kv
b.result()
}
}
diff --git a/src/library/scala/collection/mutable/ArrayStack.scala b/src/library/scala/collection/mutable/ArrayStack.scala
index e05d668519..fec2da8839 100644
--- a/src/library/scala/collection/mutable/ArrayStack.scala
+++ b/src/library/scala/collection/mutable/ArrayStack.scala
@@ -157,7 +157,7 @@ extends AbstractSeq[T]
* @param xs The source of elements to push.
* @return A reference to this stack.
*/
- override def ++=(xs: TraversableOnce[T]): this.type = { xs.seq foreach += ; this }
+ override def ++=(xs: TraversableOnce[T]): this.type = { xs foreach += ; this }
/** Does the same as `push`, but returns the updated stack.
*
diff --git a/src/library/scala/collection/mutable/Stack.scala b/src/library/scala/collection/mutable/Stack.scala
index 34c6d1fbb9..53b6c59939 100644
--- a/src/library/scala/collection/mutable/Stack.scala
+++ b/src/library/scala/collection/mutable/Stack.scala
@@ -119,7 +119,7 @@ extends AbstractSeq[A]
* @param xs the traversable object.
* @return the stack with the new elements on top.
*/
- def pushAll(xs: TraversableOnce[A]): this.type = { xs.seq foreach push ; this }
+ def pushAll(xs: TraversableOnce[A]): this.type = { xs foreach push ; this }
/** Returns the top element of the stack. This method will not remove
* the element from the stack. An error is signaled if there is no
diff --git a/src/library/scala/collection/parallel/ParIterableLike.scala b/src/library/scala/collection/parallel/ParIterableLike.scala
index 332d132ad0..5ec0238c69 100644
--- a/src/library/scala/collection/parallel/ParIterableLike.scala
+++ b/src/library/scala/collection/parallel/ParIterableLike.scala
@@ -416,8 +416,8 @@ self: ParIterableLike[T, Repr, Sequential] =>
* may be invoked arbitrary number of times.
*
* For example, one might want to process some elements and then produce a `Set`. In this
- * case, `seqop` would process an element and append it to the list, while `combop`
- * would concatenate two lists from different partitions together. The initial value
+ * case, `seqop` would process an element and append it to the set, while `combop`
+ * would concatenate two sets from different partitions together. The initial value
* `z` would be an empty set.
*
* {{{
diff --git a/src/library/scala/collection/parallel/Tasks.scala b/src/library/scala/collection/parallel/Tasks.scala
index f8d0c6043a..fcf0dff846 100644
--- a/src/library/scala/collection/parallel/Tasks.scala
+++ b/src/library/scala/collection/parallel/Tasks.scala
@@ -52,7 +52,7 @@ trait Task[R, +Tp] {
signalAbort()
}
} catch {
- case thr: Exception =>
+ case thr: Throwable =>
result = result // ensure that effects of `leaf` are visible
throwable = thr
signalAbort()
@@ -433,9 +433,7 @@ trait ForkJoinTasks extends Tasks with HavingForkJoinPool {
}
object ForkJoinTasks {
- val defaultForkJoinPool: ForkJoinPool = new ForkJoinPool() // scala.parallel.forkjoinpool
- // defaultForkJoinPool.setParallelism(Runtime.getRuntime.availableProcessors)
- // defaultForkJoinPool.setMaximumPoolSize(Runtime.getRuntime.availableProcessors)
+ lazy val defaultForkJoinPool: ForkJoinPool = new ForkJoinPool()
}
/* Some boilerplate due to no deep mixin composition. Not sure if it can be done differently without them.
@@ -461,19 +459,98 @@ trait AdaptiveWorkStealingThreadPoolTasks extends ThreadPoolTasks with AdaptiveW
def newWrappedTask[R, Tp](b: Task[R, Tp]) = new WrappedTask[R, Tp](b)
}
+/** An implementation of the `Tasks` that uses Scala `Future`s to compute
+ * the work encapsulated in each task.
+ */
+private[parallel] final class FutureTasks(executor: ExecutionContext) extends Tasks {
+ import scala.concurrent._
+ import scala.util._
+
+ private val maxdepth = (math.log(parallelismLevel) / math.log(2) + 1).toInt
+
+ val environment: ExecutionContext = executor
+
+ /** Divides this task into a lot of small tasks and executes them asynchronously
+ * using futures.
+ * Folds the futures and merges them asynchronously.
+ */
+ private def exec[R, Tp](topLevelTask: Task[R, Tp]): Future[R] = {
+ implicit val ec = environment
+
+ /** Constructs a tree of futures where tasks can be reasonably split.
+ */
+ def compute(task: Task[R, Tp], depth: Int): Future[Task[R, Tp]] = {
+ if (task.shouldSplitFurther && depth < maxdepth) {
+ val subtasks = task.split
+ val subfutures = for (subtask <- subtasks.iterator) yield compute(subtask, depth + 1)
+ subfutures.reduceLeft { (firstFuture, nextFuture) =>
+ for {
+ firstTask <- firstFuture
+ nextTask <- nextFuture
+ } yield {
+ firstTask tryMerge nextTask.repr
+ firstTask
+ }
+ } andThen {
+ case Success(firstTask) =>
+ task.throwable = firstTask.throwable
+ task.result = firstTask.result
+ case Failure(exception) =>
+ task.throwable = exception
+ }
+ } else Future {
+ task.tryLeaf(None)
+ task
+ }
+ }
+
+ compute(topLevelTask, 0) map { t =>
+ t.forwardThrowable()
+ t.result
+ }
+ }
+
+ def execute[R, Tp](task: Task[R, Tp]): () => R = {
+ val future = exec(task)
+ val callback = () => {
+ Await.result(future, scala.concurrent.duration.Duration.Inf)
+ }
+ callback
+ }
+
+ def executeAndWaitResult[R, Tp](task: Task[R, Tp]): R = {
+ execute(task)()
+ }
+
+ def parallelismLevel = Runtime.getRuntime.availableProcessors
+}
+
+/** This tasks implementation uses execution contexts to spawn a parallel computation.
+ *
+ * As an optimization, it internally checks whether the execution context is the
+ * standard implementation based on fork/join pools, and if it is, creates a
+ * `ForkJoinTaskSupport` that shares the same pool to forward its request to it.
+ *
+ * Otherwise, it uses an execution context exclusive `Tasks` implementation to
+ * divide the tasks into smaller chunks and execute operations on it.
+ */
trait ExecutionContextTasks extends Tasks {
def executionContext = environment
val environment: ExecutionContext
- // this part is a hack which allows switching
- val driver: Tasks = executionContext match {
+ /** A driver serves as a target for this proxy `Tasks` object.
+ *
+ * If the execution context has the standard implementation and uses fork/join pools,
+ * the driver is `ForkJoinTaskSupport` with the same pool, as an optimization.
+ * Otherwise, the driver will be a Scala `Future`-based implementation.
+ */
+ private val driver: Tasks = executionContext match {
case eci: scala.concurrent.impl.ExecutionContextImpl => eci.executor match {
case fjp: ForkJoinPool => new ForkJoinTaskSupport(fjp)
- case tpe: ThreadPoolExecutor => new ThreadPoolTaskSupport(tpe)
- case _ => ???
+ case _ => new FutureTasks(environment)
}
- case _ => ???
+ case _ => new FutureTasks(environment)
}
def execute[R, Tp](task: Task[R, Tp]): () => R = driver execute task
diff --git a/src/library/scala/collection/parallel/package.scala b/src/library/scala/collection/parallel/package.scala
index b25553d2c8..923e21e5a7 100644
--- a/src/library/scala/collection/parallel/package.scala
+++ b/src/library/scala/collection/parallel/package.scala
@@ -41,7 +41,7 @@ package object parallel {
private[parallel] def outofbounds(idx: Int) = throw new IndexOutOfBoundsException(idx.toString)
- private[parallel] def getTaskSupport: TaskSupport = new ForkJoinTaskSupport
+ private[parallel] def getTaskSupport: TaskSupport = new ExecutionContextTaskSupport
val defaultTaskSupport: TaskSupport = getTaskSupport
diff --git a/src/library/scala/concurrent/ExecutionContext.scala b/src/library/scala/concurrent/ExecutionContext.scala
index fa264e5d7f..d3c5a6b019 100644
--- a/src/library/scala/concurrent/ExecutionContext.scala
+++ b/src/library/scala/concurrent/ExecutionContext.scala
@@ -16,7 +16,7 @@ import scala.util.Try
/**
* An `ExecutionContext` is an abstraction over an entity that can execute program logic.
*/
-@implicitNotFound("Cannot find an implicit ExecutionContext, either require one yourself or import ExecutionContext.Implicits.global")
+@implicitNotFound("Cannot find an implicit ExecutionContext, either import scala.concurrent.ExecutionContext.Implicits.global or use a custom one")
trait ExecutionContext {
/** Runs a block of code on this execution context.
diff --git a/src/reflect/scala/reflect/api/BuildUtils.scala b/src/reflect/scala/reflect/api/BuildUtils.scala
index 551c27bf9c..28551b1dcd 100644
--- a/src/reflect/scala/reflect/api/BuildUtils.scala
+++ b/src/reflect/scala/reflect/api/BuildUtils.scala
@@ -80,12 +80,20 @@ private[reflect] trait BuildUtils { self: Universe =>
def mkRefineStat(stats: List[Tree]): List[Tree]
+ def mkPackageStat(stat: Tree): Tree
+
+ def mkPackageStat(stats: List[Tree]): List[Tree]
+
def mkEarlyDef(defn: Tree): Tree
def mkEarlyDef(defns: List[Tree]): List[Tree]
def RefTree(qual: Tree, sym: Symbol): Tree
+ def withFreshTermName[T](prefix: String)(f: TermName => T): T
+
+ def withFreshTypeName[T](prefix: String)(f: TypeName => T): T
+
val ScalaDot: ScalaDotExtractor
trait ScalaDotExtractor {
@@ -114,12 +122,19 @@ private[reflect] trait BuildUtils { self: Universe =>
def unapply(tree: Tree): Some[(Tree, List[List[Tree]])]
}
+ val SyntacticApply: SyntacticApplyExtractor
+
+ trait SyntacticApplyExtractor {
+ def apply(tree: Tree, args: List[Tree]): Tree
+ def unapply(tree: Tree): Some[(Tree, List[Tree])]
+ }
+
val SyntacticClassDef: SyntacticClassDefExtractor
trait SyntacticClassDefExtractor {
def apply(mods: Modifiers, name: TypeName, tparams: List[TypeDef],
constrMods: Modifiers, vparamss: List[List[ValDef]], earlyDefs: List[Tree],
- parents: List[Tree], selfdef: ValDef, body: List[Tree]): ClassDef
+ parents: List[Tree], selfType: ValDef, body: List[Tree]): ClassDef
def unapply(tree: Tree): Option[(Modifiers, TypeName, List[TypeDef], Modifiers, List[List[ValDef]],
List[Tree], List[Tree], ValDef, List[Tree])]
}
@@ -128,19 +143,27 @@ private[reflect] trait BuildUtils { self: Universe =>
trait SyntacticTraitDefExtractor {
def apply(mods: Modifiers, name: TypeName, tparams: List[TypeDef],
- earlyDefs: List[Tree], parents: List[Tree], selfdef: ValDef, body: List[Tree]): ClassDef
+ earlyDefs: List[Tree], parents: List[Tree], selfType: ValDef, body: List[Tree]): ClassDef
def unapply(tree: Tree): Option[(Modifiers, TypeName, List[TypeDef],
List[Tree], List[Tree], ValDef, List[Tree])]
}
- val SyntacticModuleDef: SyntacticModuleDefExtractor
+ val SyntacticObjectDef: SyntacticObjectDefExtractor
- trait SyntacticModuleDefExtractor {
+ trait SyntacticObjectDefExtractor {
def apply(mods: Modifiers, name: TermName, earlyDefs: List[Tree],
- parents: List[Tree], selfdef: ValDef, body: List[Tree]): Tree
+ parents: List[Tree], selfType: ValDef, body: List[Tree]): Tree
def unapply(tree: Tree): Option[(Modifiers, TermName, List[Tree], List[Tree], ValDef, List[Tree])]
}
+ val SyntacticPackageObjectDef: SyntacticPackageObjectDefExtractor
+
+ trait SyntacticPackageObjectDefExtractor {
+ def apply(name: TermName, earlyDefs: List[Tree],
+ parents: List[Tree], selfType: ValDef, body: List[Tree]): Tree
+ def unapply(tree: Tree): Option[(TermName, List[Tree], List[Tree], ValDef, List[Tree])]
+ }
+
val SyntacticTuple: SyntacticTupleExtractor
val SyntacticTupleType: SyntacticTupleExtractor
@@ -159,7 +182,7 @@ private[reflect] trait BuildUtils { self: Universe =>
val SyntacticNew: SyntacticNewExtractor
trait SyntacticNewExtractor {
- def apply(earlyDefs: List[Tree], parents: List[Tree], selfdef: ValDef, body: List[Tree]): Tree
+ def apply(earlyDefs: List[Tree], parents: List[Tree], selfType: ValDef, body: List[Tree]): Tree
def unapply(tree: Tree): Option[(List[Tree], List[Tree], ValDef, List[Tree])]
}
diff --git a/src/reflect/scala/reflect/api/Quasiquotes.scala b/src/reflect/scala/reflect/api/Quasiquotes.scala
index 08d3274ca5..3687ccba63 100644
--- a/src/reflect/scala/reflect/api/Quasiquotes.scala
+++ b/src/reflect/scala/reflect/api/Quasiquotes.scala
@@ -7,8 +7,8 @@ trait Quasiquotes { self: Universe =>
// using the mechanism implemented in `scala.tools.reflect.FastTrack`
implicit class Quasiquote(ctx: StringContext) {
protected trait api {
- def apply(args: Any*): Any = macro ???
- def unapply(subpatterns: Any*): Option[Any] = macro ???
+ def apply[T](args: T*): Any = macro ???
+ def unapply(scrutinee: Any): Any = macro ???
}
object q extends api
object tq extends api
diff --git a/src/reflect/scala/reflect/api/Trees.scala b/src/reflect/scala/reflect/api/Trees.scala
index fa7d41f0fc..241747e6d8 100644
--- a/src/reflect/scala/reflect/api/Trees.scala
+++ b/src/reflect/scala/reflect/api/Trees.scala
@@ -2544,18 +2544,32 @@ trait Trees { self: Universe =>
class Traverser {
protected[scala] var currentOwner: Symbol = rootMirror.RootClass
+ /** Traverse something which Trees contain, but which isn't a Tree itself. */
+ def traverseName(name: Name): Unit = ()
+ def traverseConstant(c: Constant): Unit = ()
+ def traverseImportSelector(sel: ImportSelector): Unit = ()
+ def traverseModifiers(mods: Modifiers): Unit = traverseAnnotations(mods.annotations)
+
/** Traverses a single tree. */
- def traverse(tree: Tree): Unit = itraverse(this, tree)
+ def traverse(tree: Tree): Unit = itraverse(this, tree)
+ def traversePattern(pat: Tree): Unit = traverse(pat)
+ def traverseGuard(guard: Tree): Unit = traverse(guard)
+ def traverseTypeAscription(tpt: Tree): Unit = traverse(tpt)
+ // Special handling of noSelfType necessary for backward compat: existing
+ // traversers break down when they see the unexpected tree.
+ def traverseSelfType(self: ValDef): Unit = if (self ne noSelfType) traverse(self)
/** Traverses a list of trees. */
- def traverseTrees(trees: List[Tree]) {
- trees foreach traverse
- }
+ def traverseTrees(trees: List[Tree]): Unit = trees foreach traverse
+ def traverseTypeArgs(args: List[Tree]): Unit = traverseTrees(args)
+ def traverseParents(parents: List[Tree]): Unit = traverseTrees(parents)
+ def traverseCases(cases: List[CaseDef]): Unit = traverseTrees(cases)
+ def traverseAnnotations(annots: List[Tree]): Unit = traverseTrees(annots)
/** Traverses a list of lists of trees. */
- def traverseTreess(treess: List[List[Tree]]) {
- treess foreach traverseTrees
- }
+ def traverseTreess(treess: List[List[Tree]]): Unit = treess foreach traverseTrees
+ def traverseParams(params: List[Tree]): Unit = traverseTrees(params)
+ def traverseParamss(vparamss: List[List[Tree]]): Unit = vparamss foreach traverseParams
/** Traverses a list of trees with a given owner symbol. */
def traverseStats(stats: List[Tree], exprOwner: Symbol) {
diff --git a/src/reflect/scala/reflect/internal/AnnotationInfos.scala b/src/reflect/scala/reflect/internal/AnnotationInfos.scala
index 215ab6abd6..f45fa40f89 100644
--- a/src/reflect/scala/reflect/internal/AnnotationInfos.scala
+++ b/src/reflect/scala/reflect/internal/AnnotationInfos.scala
@@ -27,6 +27,8 @@ trait AnnotationInfos extends api.Annotations { self: SymbolTable =>
def filterAnnotations(p: AnnotationInfo => Boolean): Self // Retain only annotations meeting the condition.
def withoutAnnotations: Self // Remove all annotations from this type.
+ def staticAnnotations = annotations filter (_.isStatic)
+
/** Symbols of any @throws annotations on this symbol.
*/
def throwsAnnotations(): List[Symbol] = annotations collect {
diff --git a/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala b/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala
index 05aaa462c4..19c67879f5 100644
--- a/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala
+++ b/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala
@@ -38,7 +38,7 @@ trait BaseTypeSeqs {
* This is necessary because when run from reflection every base type sequence needs to have a
* SynchronizedBaseTypeSeq as mixin.
*/
- class BaseTypeSeq protected[BaseTypeSeqs] (private[BaseTypeSeqs] val parents: List[Type], private[BaseTypeSeqs] val elems: Array[Type]) {
+ class BaseTypeSeq protected[reflect] (private[BaseTypeSeqs] val parents: List[Type], private[BaseTypeSeqs] val elems: Array[Type]) {
self =>
if (Statistics.canEnable) Statistics.incCounter(baseTypeSeqCount)
if (Statistics.canEnable) Statistics.incCounter(baseTypeSeqLenTotal, elems.length)
diff --git a/src/reflect/scala/reflect/internal/BuildUtils.scala b/src/reflect/scala/reflect/internal/BuildUtils.scala
index 951efd90ed..fc6b26db3f 100644
--- a/src/reflect/scala/reflect/internal/BuildUtils.scala
+++ b/src/reflect/scala/reflect/internal/BuildUtils.scala
@@ -98,6 +98,18 @@ trait BuildUtils { self: SymbolTable =>
def mkRefineStat(stats: List[Tree]): List[Tree] = stats.map(mkRefineStat)
+ def mkPackageStat(stat: Tree): Tree = {
+ stat match {
+ case cd: ClassDef =>
+ case md: ModuleDef =>
+ case pd: PackageDef =>
+ case _ => throw new IllegalArgumentException(s"not legal package stat: $stat")
+ }
+ stat
+ }
+
+ def mkPackageStat(stats: List[Tree]): List[Tree] = stats.map(mkPackageStat)
+
object ScalaDot extends ScalaDotExtractor {
def apply(name: Name): Tree = gen.scalaDot(name)
def unapply(tree: Tree): Option[Name] = tree match {
@@ -110,7 +122,7 @@ trait BuildUtils { self: SymbolTable =>
case vdef @ ValDef(mods, _, _, _) if !mods.isDeferred =>
copyValDef(vdef)(mods = mods | PRESUPER)
case tdef @ TypeDef(mods, _, _, _) =>
- copyTypeDef(tdef)(mods = mods | PRESUPER)
+ copyTypeDef(tdef)(mods = mods | PRESUPER)
case _ =>
throw new IllegalArgumentException(s"not legal early def: $defn")
}
@@ -119,6 +131,12 @@ trait BuildUtils { self: SymbolTable =>
def RefTree(qual: Tree, sym: Symbol) = self.RefTree(qual, sym.name) setSymbol sym
+ def withFreshTermName[T](prefix: String)(f: TermName => T): T = f(freshTermName(prefix))
+
+ def withFreshTypeName[T](prefix: String)(f: TypeName => T): T = f(freshTypeName(prefix))
+
+ private implicit def fresh: FreshNameCreator = self.currentFreshNameCreator
+
object FlagsRepr extends FlagsReprExtractor {
def apply(bits: Long): FlagSet = bits
def unapply(flags: Long): Some[Long] = Some(flags)
@@ -148,6 +166,15 @@ trait BuildUtils { self: SymbolTable =>
}
}
+ object SyntacticApply extends SyntacticApplyExtractor {
+ def apply(tree: Tree, args: List[Tree]): Tree = SyntacticApplied(tree, List(args))
+
+ def unapply(tree: Tree): Some[(Tree, List[Tree])] = tree match {
+ case Apply(fun, args) => Some((fun, args))
+ case other => Some((other, Nil))
+ }
+ }
+
private object UnCtor {
def unapply(tree: Tree): Option[(Modifiers, List[List[ValDef]], List[Tree])] = tree match {
case DefDef(mods, nme.MIXIN_CONSTRUCTOR, _, _, _, Block(lvdefs, _)) =>
@@ -160,9 +187,9 @@ trait BuildUtils { self: SymbolTable =>
private object UnMkTemplate {
def unapply(templ: Template): Option[(List[Tree], ValDef, Modifiers, List[List[ValDef]], List[Tree], List[Tree])] = {
- val Template(parents, selfdef, tbody) = templ
+ val Template(parents, selfType, tbody) = templ
def result(ctorMods: Modifiers, vparamss: List[List[ValDef]], edefs: List[Tree], body: List[Tree]) =
- Some((parents, selfdef, ctorMods, vparamss, edefs, body))
+ Some((parents, selfType, ctorMods, vparamss, edefs, body))
def indexOfCtor(trees: List[Tree]) =
trees.indexWhere { case UnCtor(_, _, _) => true ; case _ => false }
@@ -202,7 +229,7 @@ trait BuildUtils { self: SymbolTable =>
object SyntacticClassDef extends SyntacticClassDefExtractor {
def apply(mods: Modifiers, name: TypeName, tparams: List[TypeDef],
constrMods: Modifiers, vparamss: List[List[ValDef]], earlyDefs: List[Tree],
- parents: List[Tree], selfdef: ValDef, body: List[Tree]): ClassDef = {
+ parents: List[Tree], selfType: ValDef, body: List[Tree]): ClassDef = {
val extraFlags = PARAMACCESSOR | (if (mods.isCase) CASEACCESSOR else 0L)
val vparamss0 = vparamss.map { _.map { vd => copyValDef(vd)(mods = (vd.mods | extraFlags) & (~DEFERRED)) } }
val tparams0 = mkTparams(tparams)
@@ -213,15 +240,15 @@ trait BuildUtils { self: SymbolTable =>
} else parents
)
val body0 = earlyDefs ::: body
- val templ = gen.mkTemplate(parents0, selfdef, constrMods, vparamss0, body0)
+ val templ = gen.mkTemplate(parents0, selfType, constrMods, vparamss0, body0)
gen.mkClassDef(mods, name, tparams0, templ)
}
def unapply(tree: Tree): Option[(Modifiers, TypeName, List[TypeDef], Modifiers, List[List[ValDef]],
List[Tree], List[Tree], ValDef, List[Tree])] = tree match {
- case ClassDef(mods, name, tparams, UnMkTemplate(parents, selfdef, ctorMods, vparamss, earlyDefs, body))
+ case ClassDef(mods, name, tparams, UnMkTemplate(parents, selfType, ctorMods, vparamss, earlyDefs, body))
if !ctorMods.isTrait && !ctorMods.hasFlag(JAVA) =>
- Some((mods, name, tparams, ctorMods, vparamss, earlyDefs, parents, selfdef, body))
+ Some((mods, name, tparams, ctorMods, vparamss, earlyDefs, parents, selfType, body))
case _ =>
None
}
@@ -229,29 +256,42 @@ trait BuildUtils { self: SymbolTable =>
object SyntacticTraitDef extends SyntacticTraitDefExtractor {
def apply(mods: Modifiers, name: TypeName, tparams: List[TypeDef], earlyDefs: List[Tree],
- parents: List[Tree], selfdef: ValDef, body: List[Tree]): ClassDef = {
+ parents: List[Tree], selfType: ValDef, body: List[Tree]): ClassDef = {
val mods0 = mods | TRAIT | ABSTRACT
- val templ = gen.mkTemplate(parents, selfdef, Modifiers(TRAIT), Nil, earlyDefs ::: body)
+ val templ = gen.mkTemplate(parents, selfType, Modifiers(TRAIT), Nil, earlyDefs ::: body)
gen.mkClassDef(mods0, name, mkTparams(tparams), templ)
}
def unapply(tree: Tree): Option[(Modifiers, TypeName, List[TypeDef],
List[Tree], List[Tree], ValDef, List[Tree])] = tree match {
- case ClassDef(mods, name, tparams, UnMkTemplate(parents, selfdef, ctorMods, vparamss, earlyDefs, body))
+ case ClassDef(mods, name, tparams, UnMkTemplate(parents, selfType, ctorMods, vparamss, earlyDefs, body))
if mods.isTrait =>
- Some((mods, name, tparams, earlyDefs, parents, selfdef, body))
+ Some((mods, name, tparams, earlyDefs, parents, selfType, body))
case _ => None
}
}
- object SyntacticModuleDef extends SyntacticModuleDefExtractor {
+ object SyntacticObjectDef extends SyntacticObjectDefExtractor {
def apply(mods: Modifiers, name: TermName, earlyDefs: List[Tree],
- parents: List[Tree], selfdef: ValDef, body: List[Tree]) =
- ModuleDef(mods, name, gen.mkTemplate(parents, selfdef, NoMods, Nil, earlyDefs ::: body))
+ parents: List[Tree], selfType: ValDef, body: List[Tree]) =
+ ModuleDef(mods, name, gen.mkTemplate(parents, selfType, NoMods, Nil, earlyDefs ::: body))
def unapply(tree: Tree): Option[(Modifiers, TermName, List[Tree], List[Tree], ValDef, List[Tree])] = tree match {
- case ModuleDef(mods, name, UnMkTemplate(parents, selfdef, _, _, earlyDefs, body)) =>
- Some((mods, name, earlyDefs, parents, selfdef, body))
+ case ModuleDef(mods, name, UnMkTemplate(parents, selfType, _, _, earlyDefs, body)) =>
+ Some((mods, name, earlyDefs, parents, selfType, body))
+ case _ =>
+ None
+ }
+ }
+
+ object SyntacticPackageObjectDef extends SyntacticPackageObjectDefExtractor {
+ def apply(name: TermName, earlyDefs: List[Tree],
+ parents: List[Tree], selfType: ValDef, body: List[Tree]): Tree =
+ gen.mkPackageObject(SyntacticObjectDef(NoMods, name, earlyDefs, parents, selfType, body))
+
+ def unapply(tree: Tree): Option[(TermName, List[Tree], List[Tree], ValDef, List[Tree])] = tree match {
+ case PackageDef(Ident(name: TermName), List(SyntacticObjectDef(NoMods, nme.PACKAGEkw, earlyDefs, parents, selfType, body))) =>
+ Some((name, earlyDefs, parents, selfType, body))
case _ =>
None
}
@@ -359,15 +399,15 @@ trait BuildUtils { self: SymbolTable =>
}
object SyntacticNew extends SyntacticNewExtractor {
- def apply(earlyDefs: List[Tree], parents: List[Tree], selfdef: ValDef, body: List[Tree]): Tree =
- gen.mkNew(parents, selfdef, earlyDefs ::: body, NoPosition, NoPosition)
+ def apply(earlyDefs: List[Tree], parents: List[Tree], selfType: ValDef, body: List[Tree]): Tree =
+ gen.mkNew(parents, selfType, earlyDefs ::: body, NoPosition, NoPosition)
def unapply(tree: Tree): Option[(List[Tree], List[Tree], ValDef, List[Tree])] = tree match {
case SyntacticApplied(Select(New(SyntacticTypeApplied(ident, targs)), nme.CONSTRUCTOR), argss) =>
Some((Nil, SyntacticApplied(SyntacticTypeApplied(ident, targs), argss) :: Nil, noSelfType, Nil))
- case SyntacticBlock(SyntacticClassDef(_, tpnme.ANON_CLASS_NAME, Nil, _, ListOfNil, earlyDefs, parents, selfdef, body) ::
+ case SyntacticBlock(SyntacticClassDef(_, tpnme.ANON_CLASS_NAME, Nil, _, ListOfNil, earlyDefs, parents, selfType, body) ::
Apply(Select(New(Ident(tpnme.ANON_CLASS_NAME)), nme.CONSTRUCTOR), Nil) :: Nil) =>
- Some((earlyDefs, parents, selfdef, body))
+ Some((earlyDefs, parents, selfType, body))
case _ =>
None
}
diff --git a/src/reflect/scala/reflect/internal/Constants.scala b/src/reflect/scala/reflect/internal/Constants.scala
index 511b39b8c6..85d0efdcba 100644
--- a/src/reflect/scala/reflect/internal/Constants.scala
+++ b/src/reflect/scala/reflect/internal/Constants.scala
@@ -223,7 +223,15 @@ trait Constants extends api.Constants {
case ClazzTag =>
def show(tpe: Type) = "classOf[" + signature(tpe) + "]"
typeValue match {
- case ErasedValueType(orig) => show(orig)
+ case ErasedValueType(clazz, underlying) =>
+ // A note on tpe_* usage here:
+ //
+ // We've intentionally erased the type arguments to the value class so that different
+ // instantiations of a particular value class that erase to the same underlying type
+ // don't result in spurious bridges (e.g. run/t6385.scala). I don't think that matters;
+ // printing trees of `classOf[ValueClass[String]]` shows `classOf[ValueClass]` at phase
+ // erasure both before and after the use of `tpe_*` here.
+ show(clazz.tpe_*)
case _ => show(typeValue)
}
case CharTag => "'" + escapedChar(charValue) + "'"
diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala
index 99aad4f057..ea680867da 100644
--- a/src/reflect/scala/reflect/internal/Definitions.scala
+++ b/src/reflect/scala/reflect/internal/Definitions.scala
@@ -153,6 +153,10 @@ trait Definitions extends api.StandardDefinitions {
DoubleClass
)
def ScalaPrimitiveValueClasses: List[ClassSymbol] = ScalaValueClasses
+
+ def underlyingOfValueClass(clazz: Symbol): Type =
+ clazz.derivedValueClassUnbox.tpe.resultType
+
}
abstract class DefinitionsClass extends DefinitionsApi with ValueClassDefinitions {
@@ -289,12 +293,6 @@ trait Definitions extends api.StandardDefinitions {
lazy val ConstantFalse = ConstantType(Constant(false))
lazy val ConstantNull = ConstantType(Constant(null))
- // Note: this is not the type alias AnyRef, it's a companion-like
- // object used by the @specialize annotation.
- lazy val AnyRefModule = getMemberModule(ScalaPackageClass, nme.AnyRef)
- @deprecated("Use AnyRefModule", "2.10.0")
- def Predef_AnyRef = AnyRefModule
-
lazy val AnyValClass: ClassSymbol = (ScalaPackageClass.info member tpnme.AnyVal orElse {
val anyval = enterNewClass(ScalaPackageClass, tpnme.AnyVal, AnyTpe :: Nil, ABSTRACT)
val av_constr = anyval.newClassConstructor(NoPosition)
@@ -1180,14 +1178,21 @@ trait Definitions extends api.StandardDefinitions {
}
lazy val AnnotationDefaultAttr: ClassSymbol = {
- val attr = enterNewClass(RuntimePackageClass, tpnme.AnnotationDefaultATTR, List(AnnotationClass.tpe))
- // This attribute needs a constructor so that modifiers in parsed Java code make sense
- attr.info.decls enter attr.newClassConstructor(NoPosition)
- attr
+ val sym = RuntimePackageClass.newClassSymbol(tpnme.AnnotationDefaultATTR, NoPosition, 0L)
+ sym setInfo ClassInfoType(List(AnnotationClass.tpe), newScope, sym)
+ RuntimePackageClass.info.decls.toList.filter(_.name == sym.name) match {
+ case existing :: _ =>
+ existing.asInstanceOf[ClassSymbol]
+ case _ =>
+ RuntimePackageClass.info.decls enter sym
+ // This attribute needs a constructor so that modifiers in parsed Java code make sense
+ sym.info.decls enter sym.newClassConstructor(NoPosition)
+ sym
+ }
}
- private def fatalMissingSymbol(owner: Symbol, name: Name, what: String = "member") = {
- throw new FatalError(owner + " does not have a " + what + " " + name)
+ private def fatalMissingSymbol(owner: Symbol, name: Name, what: String = "member", addendum: String = "") = {
+ throw new FatalError(owner + " does not have a " + what + " " + name + addendum)
}
def getLanguageFeature(name: String, owner: Symbol = languageFeatureModule): Symbol = getMember(owner, newTypeName(name))
@@ -1222,7 +1227,8 @@ trait Definitions extends api.StandardDefinitions {
def getMemberModule(owner: Symbol, name: Name): ModuleSymbol = {
getMember(owner, name.toTermName) match {
case x: ModuleSymbol => x
- case _ => fatalMissingSymbol(owner, name, "member object")
+ case NoSymbol => fatalMissingSymbol(owner, name, "member object")
+ case other => fatalMissingSymbol(owner, name, "member object", addendum = s". A symbol ${other} of kind ${other.accurateKindString} already exists.")
}
}
def getTypeMember(owner: Symbol, name: Name): TypeSymbol = {
@@ -1388,10 +1394,13 @@ trait Definitions extends api.StandardDefinitions {
else flatNameString(etp.typeSymbol, '.')
}
+ // documented in JavaUniverse.init
def init() {
if (isInitialized) return
- // force initialization of every symbol that is synthesized or hijacked by the compiler
- val _ = symbolsNotPresentInBytecode
+ ObjectClass.initialize
+ ScalaPackageClass.initialize
+ val forced1 = symbolsNotPresentInBytecode
+ val forced2 = NoSymbol
isInitialized = true
} //init
diff --git a/src/reflect/scala/reflect/internal/Mirrors.scala b/src/reflect/scala/reflect/internal/Mirrors.scala
index 6ed9de8e20..e122fa498b 100644
--- a/src/reflect/scala/reflect/internal/Mirrors.scala
+++ b/src/reflect/scala/reflect/internal/Mirrors.scala
@@ -250,6 +250,19 @@ trait Mirrors extends api.Mirrors {
RootClass.info.decls enter EmptyPackage
RootClass.info.decls enter RootPackage
+ if (rootOwner != NoSymbol) {
+ // synthetic core classes are only present in root mirrors
+ // because Definitions.scala, which initializes and enters them, only affects rootMirror
+ // therefore we need to enter them manually for non-root mirrors
+ definitions.syntheticCoreClasses foreach (theirSym => {
+ val theirOwner = theirSym.owner
+ assert(theirOwner.isPackageClass, s"theirSym = $theirSym, theirOwner = $theirOwner")
+ val ourOwner = staticPackage(theirOwner.fullName).moduleClass
+ val ourSym = theirSym // just copy the symbol into our branch of the symbol table
+ ourOwner.info.decls enterIfNew ourSym
+ })
+ }
+
initialized = true
}
}
@@ -274,34 +287,45 @@ trait Mirrors extends api.Mirrors {
def mirror = thisMirror.asInstanceOf[Mirror]
}
- // This is the package _root_. The actual root cannot be referenced at
- // the source level, but _root_ is essentially a function => <root>.
- final object RootPackage extends ModuleSymbol(rootOwner, NoPosition, nme.ROOTPKG) with RootSymbol {
+ class RootPackage extends ModuleSymbol(rootOwner, NoPosition, nme.ROOTPKG) with RootSymbol {
this setInfo NullaryMethodType(RootClass.tpe)
RootClass.sourceModule = this
override def isRootPackage = true
}
+
+ // This is the package _root_. The actual root cannot be referenced at
+ // the source level, but _root_ is essentially a function => <root>.
+ lazy val RootPackage = new RootPackage
+
+ class RootClass extends PackageClassSymbol(rootOwner, NoPosition, tpnme.ROOT) with RootSymbol {
+ this setInfo rootLoader
+
+ override def isRoot = true
+ override def isEffectiveRoot = true
+ override def isNestedClass = false
+ }
+
// This is <root>, the actual root of everything except the package _root_.
// <root> and _root_ (RootPackage and RootClass) should be the only "well known"
// symbols owned by NoSymbol. All owner chains should go through RootClass,
// although it is probable that some symbols are created as direct children
// of NoSymbol to ensure they will not be stumbled upon. (We should designate
// a better encapsulated place for that.)
- final object RootClass extends PackageClassSymbol(rootOwner, NoPosition, tpnme.ROOT) with RootSymbol {
- this setInfo rootLoader
+ lazy val RootClass = new RootClass
- override def isRoot = true
- override def isEffectiveRoot = true
- override def isNestedClass = false
- }
- // The empty package, which holds all top level types without given packages.
- final object EmptyPackage extends ModuleSymbol(RootClass, NoPosition, nme.EMPTY_PACKAGE_NAME) with WellKnownSymbol {
+ class EmptyPackage extends ModuleSymbol(RootClass, NoPosition, nme.EMPTY_PACKAGE_NAME) with WellKnownSymbol {
override def isEmptyPackage = true
}
- final object EmptyPackageClass extends PackageClassSymbol(RootClass, NoPosition, tpnme.EMPTY_PACKAGE_NAME) with WellKnownSymbol {
+
+ // The empty package, which holds all top level types without given packages.
+ lazy val EmptyPackage = new EmptyPackage
+
+ class EmptyPackageClass extends PackageClassSymbol(RootClass, NoPosition, tpnme.EMPTY_PACKAGE_NAME) with WellKnownSymbol {
override def isEffectiveRoot = true
override def isEmptyPackageClass = true
}
+
+ lazy val EmptyPackageClass = new EmptyPackageClass
}
}
diff --git a/src/reflect/scala/reflect/internal/Scopes.scala b/src/reflect/scala/reflect/internal/Scopes.scala
index 485d4d5ddd..b7a1681838 100644
--- a/src/reflect/scala/reflect/internal/Scopes.scala
+++ b/src/reflect/scala/reflect/internal/Scopes.scala
@@ -139,6 +139,12 @@ trait Scopes extends api.Scopes { self: SymbolTable =>
enter(sym)
}
+ def enterIfNew[T <: Symbol](sym: T): T = {
+ val existing = lookupEntry(sym.name)
+ if (existing == null) enter(sym)
+ else existing.sym.asInstanceOf[T]
+ }
+
private def createHash() {
hashtable = new Array[ScopeEntry](HASHSIZE)
enterAllInHash(elems)
@@ -221,8 +227,8 @@ trait Scopes extends api.Scopes { self: SymbolTable =>
/** Lookup a module or a class, filtering out matching names in scope
* which do not match that requirement.
*/
- def lookupModule(name: Name): Symbol = lookupAll(name.toTermName) find (_.isModule) getOrElse NoSymbol
- def lookupClass(name: Name): Symbol = lookupAll(name.toTypeName) find (_.isClass) getOrElse NoSymbol
+ def lookupModule(name: Name): Symbol = findSymbol(lookupAll(name.toTermName))(_.isModule)
+ def lookupClass(name: Name): Symbol = findSymbol(lookupAll(name.toTypeName))(_.isClass)
/** True if the name exists in this scope, false otherwise. */
def containsName(name: Name) = lookupEntry(name) != null
diff --git a/src/reflect/scala/reflect/internal/StdNames.scala b/src/reflect/scala/reflect/internal/StdNames.scala
index af26253802..02f22a16f6 100644
--- a/src/reflect/scala/reflect/internal/StdNames.scala
+++ b/src/reflect/scala/reflect/internal/StdNames.scala
@@ -247,11 +247,12 @@ trait StdNames {
final val Quasiquote: NameType = "Quasiquote"
// quasiquote-specific names
- final val QUASIQUOTE_MODS: NameType = "$quasiquote$mods$"
- final val QUASIQUOTE_TUPLE: NameType = "$quasiquote$tuple$"
- final val QUASIQUOTE_FUNCTION: NameType = "$quasiquote$function$"
- final val QUASIQUOTE_REFINE_STAT: NameType = "$quasiquote$refine$stat$"
- final val QUASIQUOTE_EARLY_DEF: NameType = "$quasiquote$early$def$"
+ final val QUASIQUOTE_MODS: NameType = "$quasiquote$mods$"
+ final val QUASIQUOTE_TUPLE: NameType = "$quasiquote$tuple$"
+ final val QUASIQUOTE_FUNCTION: NameType = "$quasiquote$function$"
+ final val QUASIQUOTE_REFINE_STAT: NameType = "$quasiquote$refine$stat$"
+ final val QUASIQUOTE_EARLY_DEF: NameType = "$quasiquote$early$def$"
+ final val QUASIQUOTE_PACKAGE_STAT: NameType = "$quasiquote$package$stat$"
// Annotation simple names, used in Namer
final val BeanPropertyAnnot: NameType = "BeanProperty"
@@ -273,6 +274,8 @@ trait StdNames {
final val SourceFileATTR: NameType = "SourceFile"
final val SyntheticATTR: NameType = "Synthetic"
+ final val scala_ : NameType = "scala"
+
def dropSingletonName(name: Name): TypeName = (name dropRight SINGLETON_SUFFIX.length).toTypeName
def singletonName(name: Name): TypeName = (name append SINGLETON_SUFFIX).toTypeName
def implClassName(name: Name): TypeName = (name append IMPL_CLASS_SUFFIX).toTypeName
@@ -319,6 +322,7 @@ trait StdNames {
val REIFY_FREE_VALUE_SUFFIX: NameType = "$value"
val REIFY_SYMDEF_PREFIX: NameType = "symdef$"
val QUASIQUOTE_PREFIX: String = "qq$"
+ val QUASIQUOTE_NAME_PREFIX: String = "nn$"
val QUASIQUOTE_FILE: String = "<quasiquote>"
val QUASIQUOTE_TUPLE: NameType = "$quasiquote$tuple$"
val QUASIQUOTE_CASE: NameType = "$quasiquote$case$"
@@ -582,13 +586,15 @@ trait StdNames {
val Select: NameType = "Select"
val SelectFromTypeTree: NameType = "SelectFromTypeTree"
val SyntacticApplied: NameType = "SyntacticApplied"
+ val SyntacticApply: NameType = "SyntacticApply"
val SyntacticAssign: NameType = "SyntacticAssign"
val SyntacticBlock: NameType = "SyntacticBlock"
val SyntacticClassDef: NameType = "SyntacticClassDef"
val SyntacticDefDef: NameType = "SyntacticDefDef"
val SyntacticFunction: NameType = "SyntacticFunction"
- val SyntacticFunctionType: NameType= "SyntacticFunctionType"
- val SyntacticModuleDef: NameType = "SyntacticModuleDef"
+ val SyntacticFunctionType: NameType = "SyntacticFunctionType"
+ val SyntacticPackageObjectDef: NameType = "SyntacticPackageObjectDef"
+ val SyntacticObjectDef: NameType = "SyntacticObjectDef"
val SyntacticNew: NameType = "SyntacticNew"
val SyntacticTraitDef: NameType = "SyntacticTraitDef"
val SyntacticTuple: NameType = "SyntacticTuple"
@@ -603,6 +609,7 @@ trait StdNames {
val TypeRef: NameType = "TypeRef"
val TypeTree: NameType = "TypeTree"
val UNIT : NameType = "UNIT"
+ val accessor: NameType = "accessor"
val add_ : NameType = "add"
val annotation: NameType = "annotation"
val anyValClass: NameType = "anyValClass"
@@ -683,6 +690,7 @@ trait StdNames {
val mkAnnotation: NameType = "mkAnnotation"
val mkRefineStat: NameType = "mkRefineStat"
val mkEarlyDef: NameType = "mkEarlyDef"
+ val mkPackageStat: NameType = "mkPackageStat"
val ne: NameType = "ne"
val newArray: NameType = "newArray"
val newFreeTerm: NameType = "newFreeTerm"
diff --git a/src/reflect/scala/reflect/internal/SymbolTable.scala b/src/reflect/scala/reflect/internal/SymbolTable.scala
index a6f9dfc164..8386d02b7c 100644
--- a/src/reflect/scala/reflect/internal/SymbolTable.scala
+++ b/src/reflect/scala/reflect/internal/SymbolTable.scala
@@ -41,6 +41,7 @@ abstract class SymbolTable extends macros.Universe
with StdCreators
with BuildUtils
with PrivateWithin
+ with pickling.Translations
{
val gen = new TreeGen { val global: SymbolTable.this.type = SymbolTable.this }
@@ -124,6 +125,10 @@ abstract class SymbolTable extends macros.Universe
result
}
+ @inline final def findSymbol(xs: TraversableOnce[Symbol])(p: Symbol => Boolean): Symbol = {
+ xs find p getOrElse NoSymbol
+ }
+
// For too long have we suffered in order to sort NAMES.
// I'm pretty sure there's a reasonable default for that.
// Notice challenge created by Ordering's invariance.
@@ -237,12 +242,20 @@ abstract class SymbolTable extends macros.Universe
finally popPhase(saved)
}
+ def slowButSafeEnteringPhase[T](ph: Phase)(op: => T): T = {
+ if (isCompilerUniverse) enteringPhase(ph)(op)
+ else op
+ }
+
@inline final def exitingPhase[T](ph: Phase)(op: => T): T = enteringPhase(ph.next)(op)
@inline final def enteringPrevPhase[T](op: => T): T = enteringPhase(phase.prev)(op)
@inline final def enteringPhaseNotLaterThan[T](target: Phase)(op: => T): T =
if (isAtPhaseAfter(target)) enteringPhase(target)(op) else op
+ def slowButSafeEnteringPhaseNotLaterThan[T](target: Phase)(op: => T): T =
+ if (isCompilerUniverse) enteringPhaseNotLaterThan(target)(op) else op
+
final def isValid(period: Period): Boolean =
period != 0 && runId(period) == currentRunId && {
val pid = phaseId(period)
@@ -370,6 +383,11 @@ abstract class SymbolTable extends macros.Universe
* Adds the `sm` String interpolator to a [[scala.StringContext]].
*/
implicit val StringContextStripMarginOps: StringContext => StringContextStripMarginOps = util.StringContextStripMarginOps
+
+ // fresh name creation
+ def currentFreshNameCreator: FreshNameCreator
+ def freshTermName(prefix: String = "x$")(implicit creator: FreshNameCreator): TermName = newTermName(creator.newName(prefix))
+ def freshTypeName(prefix: String)(implicit creator: FreshNameCreator): TypeName = newTypeName(creator.newName(prefix))
}
object SymbolTableStats {
diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala
index efdc8f7435..ba785c14bd 100644
--- a/src/reflect/scala/reflect/internal/Symbols.scala
+++ b/src/reflect/scala/reflect/internal/Symbols.scala
@@ -30,13 +30,13 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
//protected var lockedSyms = scala.collection.immutable.Set[Symbol]()
/** Used to keep track of the recursion depth on locked symbols */
- private var recursionTable = immutable.Map.empty[Symbol, Int]
+ private var _recursionTable = immutable.Map.empty[Symbol, Int]
+ def recursionTable = _recursionTable
+ def recursionTable_=(value: immutable.Map[Symbol, Int]) = _recursionTable = value
- private var nextexid = 0
- protected def freshExistentialName(suffix: String) = {
- nextexid += 1
- newTypeName("_" + nextexid + suffix)
- }
+ private var existentialIds = 0
+ protected def nextExistentialId() = { existentialIds += 1; existentialIds }
+ protected def freshExistentialName(suffix: String) = newTypeName("_" + nextExistentialId() + suffix)
// Set the fields which point companions at one another. Returns the module.
def connectModuleToClass(m: ModuleSymbol, moduleClass: ClassSymbol): ModuleSymbol = {
@@ -110,10 +110,14 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
children
}
+ def selfType = {
+ if (!isCompilerUniverse && needsInitialize(isFlagRelated = false, mask = 0)) initialize
+ typeOfThis
+ }
+
def baseClasses = info.baseClasses
def module = sourceModule
def thisPrefix: Type = thisType
- def selfType: Type = typeOfThis
def typeSignature: Type = { fullyInitializeSymbol(this); info }
def typeSignatureIn(site: Type): Type = { fullyInitializeSymbol(this); site memberInfo this }
@@ -127,6 +131,8 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
def setter: Symbol = setter(owner)
}
+ private[reflect] case class SymbolKind(accurate: String, sanitized: String, abbreviation: String)
+
/** The class for all symbols */
abstract class Symbol protected[Symbols] (initOwner: Symbol, initPos: Position, initName: Name)
extends SymbolContextApiImpl
@@ -800,7 +806,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
*
* Stability and volatility are checked separately to allow volatile paths in patterns that amount to equality checks. SI-6815
*/
- final def isStable = isTerm && !isMutable && !(hasFlag(BYNAMEPARAM)) && (!isMethod || hasStableFlag)
+ def isStable = isTerm && !isMutable && !(hasFlag(BYNAMEPARAM)) && (!isMethod || hasStableFlag)
final def hasVolatileType = tpe.isVolatile && !hasAnnotation(uncheckedStableClass)
/** Does this symbol denote the primary constructor of its enclosing class? */
@@ -949,6 +955,13 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
final def isInitialized: Boolean =
validTo != NoPeriod
+ /** Some completers call sym.setInfo when still in-flight and then proceed with initialization (e.g. see LazyPackageType)
+ * setInfo sets _validTo to current period, which means that after a call to setInfo isInitialized will start returning true.
+ * Unfortunately, this doesn't mean that info becomes ready to be used, because subsequent initialization might change the info.
+ * Therefore we need this method to distinguish between initialized and really initialized symbol states.
+ */
+ final def isFullyInitialized: Boolean = _validTo != NoPeriod && (flags & LOCKED) == 0
+
/** Can this symbol be loaded by a reflective mirror?
*
* Scalac relies on `ScalaSignature' annotation to retain symbols across compilation runs.
@@ -1563,6 +1576,8 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
* assumption: if a type starts out as monomorphic, it will not acquire
* type parameters later.
*/
+ // NOTE: overridden in SynchronizedSymbols with the code copy/pasted
+ // don't forget to modify the code over there if you modify this method
def unsafeTypeParams: List[Symbol] =
if (isMonomorphicType) Nil
else enteringPhase(unsafeTypeParamPhase)(rawInfo.typeParams)
@@ -1571,6 +1586,8 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
* assumption: if a type starts out as monomorphic, it will not acquire
* type parameters later.
*/
+ // NOTE: overridden in SynchronizedSymbols with the code copy/pasted
+ // don't forget to modify the code over there if you modify this method
def typeParams: List[Symbol] =
if (isMonomorphicType) Nil
else {
@@ -1859,6 +1876,8 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
*/
def thisSym: Symbol = this
+ def hasSelfType = thisSym.tpeHK != this.tpeHK
+
/** The type of `this` in a class, or else the type of the symbol itself. */
def typeOfThis = thisSym.tpe_*
@@ -2391,7 +2410,6 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
else if (isTerm && (!isParameter || isParamAccessor)) "val"
else ""
- private case class SymbolKind(accurate: String, sanitized: String, abbreviation: String)
private def symbolKind: SymbolKind = {
var kind =
if (isTermMacro) ("term macro", "macro method", "MACM")
@@ -3122,8 +3140,8 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
override def thisType: Type = {
val period = thisTypePeriod
if (period != currentPeriod) {
- thisTypePeriod = currentPeriod
if (!isValid(period)) thisTypeCache = ThisType(this)
+ thisTypePeriod = currentPeriod
}
thisTypeCache
}
@@ -3211,9 +3229,9 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
override def typeOfThis = {
val period = typeOfThisPeriod
if (period != currentPeriod) {
- typeOfThisPeriod = currentPeriod
if (!isValid(period))
typeOfThisCache = singleType(owner.thisType, sourceModule)
+ typeOfThisPeriod = currentPeriod
}
typeOfThisCache
}
@@ -3224,9 +3242,9 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
// Skip a package object class, because the members are also in
// the package and we wish to avoid spurious ambiguities as in pos/t3999.
if (!isPackageObjectClass) {
+ implicitMembersCacheValue = tp.implicitMembers
implicitMembersCacheKey1 = tp
implicitMembersCacheKey2 = tp.decls.elems
- implicitMembersCacheValue = tp.implicitMembers
}
}
implicitMembersCacheValue
@@ -3334,10 +3352,11 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
def name = nme.NO_NAME
override def name_=(n: Name) = abort("Cannot set NoSymbol's name to " + n)
- synchronized {
- setInfo(NoType)
- privateWithin = this
- }
+ // Syncnote: no need to synchronize this, because NoSymbol's initialization is triggered by JavaUniverse.init
+ // which is called in universe's constructor - something that's inherently single-threaded
+ setInfo(NoType)
+ privateWithin = this
+
override def info_=(info: Type) = {
infos = TypeHistory(1, NoType, null)
unlock()
diff --git a/src/reflect/scala/reflect/internal/TreeGen.scala b/src/reflect/scala/reflect/internal/TreeGen.scala
index 720d8bfe4a..cf7c729a6a 100644
--- a/src/reflect/scala/reflect/internal/TreeGen.scala
+++ b/src/reflect/scala/reflect/internal/TreeGen.scala
@@ -448,4 +448,10 @@ abstract class TreeGen extends macros.TreeBuilder {
case _ =>
Assign(lhs, rhs)
}
+
+ def mkPackageObject(defn: ModuleDef, pidPos: Position = NoPosition, pkgPos: Position = NoPosition) = {
+ val module = copyModuleDef(defn)(name = nme.PACKAGEkw)
+ val pid = atPos(pidPos)(Ident(defn.name))
+ atPos(pkgPos)(PackageDef(pid, module :: Nil))
+ }
}
diff --git a/src/reflect/scala/reflect/internal/Trees.scala b/src/reflect/scala/reflect/internal/Trees.scala
index 54a1913b96..743c674eea 100644
--- a/src/reflect/scala/reflect/internal/Trees.scala
+++ b/src/reflect/scala/reflect/internal/Trees.scala
@@ -8,10 +8,12 @@ package reflect
package internal
import Flags._
-import scala.collection.mutable.{ListBuffer, LinkedHashSet}
+import pickling.PickleFormat._
+import scala.collection.{ mutable, immutable }
import util.Statistics
-trait Trees extends api.Trees { self: SymbolTable =>
+trait Trees extends api.Trees {
+ self: SymbolTable =>
private[scala] var nodeCount = 0
@@ -160,7 +162,7 @@ trait Trees extends api.Trees { self: SymbolTable =>
override def freeTypes: List[FreeTypeSymbol] = freeSyms[FreeTypeSymbol](_.isFreeType, _.typeSymbol)
private def freeSyms[S <: Symbol](isFree: Symbol => Boolean, symOfType: Type => Symbol): List[S] = {
- val s = scala.collection.mutable.LinkedHashSet[S]()
+ val s = mutable.LinkedHashSet[S]()
def addIfFree(sym: Symbol): Unit = if (sym != null && isFree(sym)) s += sym.asInstanceOf[S]
for (t <- this) {
addIfFree(t.symbol)
@@ -999,7 +1001,8 @@ trait Trees extends api.Trees { self: SymbolTable =>
// ---- values and creators ---------------------------------------
/** @param sym the class symbol
- * @return the implementation template
+ * @param impl the implementation template
+ * @return the class definition
*/
def ClassDef(sym: Symbol, impl: Template): ClassDef =
atPos(sym.pos) {
@@ -1009,6 +1012,25 @@ trait Trees extends api.Trees { self: SymbolTable =>
impl) setSymbol sym
}
+ /** @param sym the class symbol
+ * @param body trees that constitute the body of the class
+ * @return the class definition
+ */
+ def ClassDef(sym: Symbol, body: List[Tree]): ClassDef =
+ ClassDef(sym, Template(sym, body))
+
+ /** @param sym the template's symbol
+ * @param body trees that constitute the body of the template
+ * @return the template
+ */
+ def Template(sym: Symbol, body: List[Tree]): Template = {
+ atPos(sym.pos) {
+ Template(sym.info.parents map TypeTree,
+ if (sym.thisSym == sym) noSelfType else ValDef(sym),
+ body)
+ }
+ }
+
/**
* @param sym the class symbol
* @param impl the implementation template
@@ -1019,18 +1041,22 @@ trait Trees extends api.Trees { self: SymbolTable =>
}
trait CannotHaveAttrs extends Tree {
- override def canHaveAttrs = false
-
- private def requireLegal(value: Any, allowed: Any, what: String) =
- require(value == allowed, s"can't set $what for $self to value other than $allowed")
-
super.setPos(NoPosition)
+ super.setType(NoType)
+
+ override def canHaveAttrs = false
override def setPos(pos: Position) = { requireLegal(pos, NoPosition, "pos"); this }
override def pos_=(pos: Position) = setPos(pos)
-
- super.setType(NoType)
override def setType(t: Type) = { requireLegal(t, NoType, "tpe"); this }
override def tpe_=(t: Type) = setType(t)
+
+ private def requireLegal(value: Any, allowed: Any, what: String) = (
+ if (value != allowed) {
+ log(s"can't set $what for $self to value other than $allowed")
+ if (settings.debug && settings.developer)
+ (new Throwable).printStackTrace
+ }
+ )
}
case object EmptyTree extends TermTree with CannotHaveAttrs { override def isEmpty = true; val asList = List(this) }
@@ -1159,113 +1185,136 @@ trait Trees extends api.Trees { self: SymbolTable =>
override protected def itraverse(traverser: Traverser, tree: Tree): Unit = {
import traverser._
- tree match {
- case EmptyTree =>
- ;
- case PackageDef(pid, stats) =>
- traverse(pid)
- atOwner(mclass(tree.symbol)) {
- traverseTrees(stats)
- }
- case ClassDef(mods, name, tparams, impl) =>
- atOwner(tree.symbol) {
- traverseTrees(mods.annotations); traverseTrees(tparams); traverse(impl)
- }
- case ModuleDef(mods, name, impl) =>
- atOwner(mclass(tree.symbol)) {
- traverseTrees(mods.annotations); traverse(impl)
- }
- case ValDef(mods, name, tpt, rhs) =>
- atOwner(tree.symbol) {
- traverseTrees(mods.annotations); traverse(tpt); traverse(rhs)
- }
- case DefDef(mods, name, tparams, vparamss, tpt, rhs) =>
- atOwner(tree.symbol) {
- traverseTrees(mods.annotations); traverseTrees(tparams); traverseTreess(vparamss); traverse(tpt); traverse(rhs)
- }
- case TypeDef(mods, name, tparams, rhs) =>
- atOwner(tree.symbol) {
- traverseTrees(mods.annotations); traverseTrees(tparams); traverse(rhs)
- }
+
+ def traverseMemberDef(md: MemberDef, owner: Symbol): Unit = atOwner(owner) {
+ traverseModifiers(md.mods)
+ traverseName(md.name)
+ md match {
+ case ClassDef(_, _, tparams, impl) => traverseParams(tparams) ; traverse(impl)
+ case ModuleDef(_, _, impl) => traverse(impl)
+ case ValDef(_, _, tpt, rhs) => traverseTypeAscription(tpt) ; traverse(rhs)
+ case TypeDef(_, _, tparams, rhs) => traverseParams(tparams) ; traverse(rhs)
+ case DefDef(_, _, tparams, vparamss, tpt, rhs) =>
+ traverseParams(tparams)
+ traverseParamss(vparamss)
+ traverseTypeAscription(tpt)
+ traverse(rhs)
+ }
+ }
+ def traverseComponents(): Unit = tree match {
case LabelDef(name, params, rhs) =>
- traverseTrees(params); traverse(rhs)
+ traverseName(name)
+ traverseParams(params)
+ traverse(rhs)
case Import(expr, selectors) =>
traverse(expr)
+ selectors foreach traverseImportSelector
case Annotated(annot, arg) =>
- traverse(annot); traverse(arg)
+ traverse(annot)
+ traverse(arg)
case Template(parents, self, body) =>
- traverseTrees(parents)
- if (self ne noSelfType) traverse(self)
+ traverseParents(parents)
+ traverseSelfType(self)
traverseStats(body, tree.symbol)
case Block(stats, expr) =>
- traverseTrees(stats); traverse(expr)
+ traverseTrees(stats)
+ traverse(expr)
case CaseDef(pat, guard, body) =>
- traverse(pat); traverse(guard); traverse(body)
+ traversePattern(pat)
+ traverseGuard(guard)
+ traverse(body)
case Alternative(trees) =>
traverseTrees(trees)
case Star(elem) =>
traverse(elem)
case Bind(name, body) =>
+ traverseName(name)
traverse(body)
case UnApply(fun, args) =>
- traverse(fun); traverseTrees(args)
+ traverse(fun)
+ traverseTrees(args)
case ArrayValue(elemtpt, trees) =>
- traverse(elemtpt); traverseTrees(trees)
- case Function(vparams, body) =>
- atOwner(tree.symbol) {
- traverseTrees(vparams); traverse(body)
- }
+ traverse(elemtpt)
+ traverseTrees(trees)
case Assign(lhs, rhs) =>
- traverse(lhs); traverse(rhs)
+ traverse(lhs)
+ traverse(rhs)
case AssignOrNamedArg(lhs, rhs) =>
- traverse(lhs); traverse(rhs)
+ traverse(lhs)
+ traverse(rhs)
case If(cond, thenp, elsep) =>
- traverse(cond); traverse(thenp); traverse(elsep)
+ traverse(cond)
+ traverse(thenp)
+ traverse(elsep)
case Match(selector, cases) =>
- traverse(selector); traverseTrees(cases)
+ traverse(selector)
+ traverseCases(cases)
case Return(expr) =>
traverse(expr)
case Try(block, catches, finalizer) =>
- traverse(block); traverseTrees(catches); traverse(finalizer)
+ traverse(block)
+ traverseCases(catches)
+ traverse(finalizer)
case Throw(expr) =>
traverse(expr)
case New(tpt) =>
traverse(tpt)
case Typed(expr, tpt) =>
- traverse(expr); traverse(tpt)
+ traverse(expr)
+ traverseTypeAscription(tpt)
case TypeApply(fun, args) =>
- traverse(fun); traverseTrees(args)
+ traverse(fun)
+ traverseTypeArgs(args)
case Apply(fun, args) =>
- traverse(fun); traverseTrees(args)
+ traverse(fun)
+ traverseTrees(args)
case ApplyDynamic(qual, args) =>
- traverse(qual); traverseTrees(args)
- case Super(qual, _) =>
traverse(qual)
- case This(_) =>
- ;
+ traverseTrees(args)
+ case Super(qual, mix) =>
+ traverse(qual)
+ traverseName(mix)
+ case This(qual) =>
+ traverseName(qual)
case Select(qualifier, selector) =>
traverse(qualifier)
- case Ident(_) =>
- ;
+ traverseName(selector)
+ case Ident(name) =>
+ traverseName(name)
case ReferenceToBoxed(idt) =>
traverse(idt)
- case Literal(_) =>
- ;
+ case Literal(const) =>
+ traverseConstant(const)
case TypeTree() =>
;
case SingletonTypeTree(ref) =>
traverse(ref)
case SelectFromTypeTree(qualifier, selector) =>
traverse(qualifier)
+ traverseName(selector)
case CompoundTypeTree(templ) =>
traverse(templ)
case AppliedTypeTree(tpt, args) =>
- traverse(tpt); traverseTrees(args)
+ traverse(tpt)
+ traverseTypeArgs(args)
case TypeBoundsTree(lo, hi) =>
- traverse(lo); traverse(hi)
+ traverse(lo)
+ traverse(hi)
case ExistentialTypeTree(tpt, whereClauses) =>
- traverse(tpt); traverseTrees(whereClauses)
- case _ => xtraverse(traverser, tree)
+ traverse(tpt)
+ traverseTrees(whereClauses)
+ case _ =>
+ xtraverse(traverser, tree)
+ }
+
+ if (tree.canHaveAttrs) {
+ tree match {
+ case PackageDef(pid, stats) => traverse(pid) ; traverseStats(stats, mclass(tree.symbol))
+ case md: ModuleDef => traverseMemberDef(md, mclass(tree.symbol))
+ case md: MemberDef => traverseMemberDef(md, tree.symbol)
+ case Function(vparams, body) => atOwner(tree.symbol) { traverseParams(vparams) ; traverse(body) }
+ case _ => traverseComponents()
+ }
}
}
@@ -1563,7 +1612,7 @@ trait Trees extends api.Trees { self: SymbolTable =>
}
class FilterTreeTraverser(p: Tree => Boolean) extends Traverser {
- val hits = new ListBuffer[Tree]
+ val hits = mutable.ListBuffer[Tree]()
override def traverse(t: Tree) {
if (p(t)) hits += t
super.traverse(t)
@@ -1571,7 +1620,7 @@ trait Trees extends api.Trees { self: SymbolTable =>
}
class CollectTreeTraverser[T](pf: PartialFunction[Tree, T]) extends Traverser {
- val results = new ListBuffer[T]
+ val results = mutable.ListBuffer[T]()
override def traverse(t: Tree) {
if (pf.isDefinedAt(t)) results += pf(t)
super.traverse(t)
diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala
index 8f19e89dd4..e483fa6ba8 100644
--- a/src/reflect/scala/reflect/internal/Types.scala
+++ b/src/reflect/scala/reflect/internal/Types.scala
@@ -115,14 +115,17 @@ trait Types
/** The current skolemization level, needed for the algorithms
* in isSameType, isSubType that do constraint solving under a prefix.
*/
- var skolemizationLevel = 0
+ private var _skolemizationLevel = 0
+ def skolemizationLevel = _skolemizationLevel
+ def skolemizationLevel_=(value: Int) = _skolemizationLevel = value
/** A map from lists to compound types that have the given list as parents.
* This is used to avoid duplication in the computation of base type sequences and baseClasses.
* It makes use of the fact that these two operations depend only on the parents,
* not on the refinement.
*/
- val intersectionWitness = perRunCaches.newWeakMap[List[Type], WeakReference[Type]]()
+ private val _intersectionWitness = perRunCaches.newWeakMap[List[Type], WeakReference[Type]]()
+ def intersectionWitness = _intersectionWitness
/** A proxy for a type (identified by field `underlying`) that forwards most
* operations to it (for exceptions, see WrappingProxy, which forwards even more operations).
@@ -1483,6 +1486,14 @@ trait Types
}
}
+ object CompoundType {
+ def unapply(tp: Type): Option[(List[Type], Scope, Symbol)] = tp match {
+ case ClassInfoType(parents, decls, clazz) => Some((parents, decls, clazz))
+ case RefinedType(parents, decls) => Some((parents, decls, tp.typeSymbol))
+ case _ => None
+ }
+ }
+
/** A common base class for intersection types and class types
*/
abstract class CompoundType extends Type {
@@ -1966,12 +1977,12 @@ trait Types
def apply(value: Constant) = unique(new UniqueConstantType(value))
}
- /* Syncnote: The `volatile` var and `pendingVolatiles` mutable set need not be protected
- * with synchronized, because they are accessed only from isVolatile, which is called only from
- * Typer.
- */
- private var volatileRecursions: Int = 0
- private val pendingVolatiles = new mutable.HashSet[Symbol]
+ private var _volatileRecursions: Int = 0
+ def volatileRecursions = _volatileRecursions
+ def volatileRecursions_=(value: Int) = _volatileRecursions = value
+
+ private val _pendingVolatiles = new mutable.HashSet[Symbol]
+ def pendingVolatiles = _pendingVolatiles
class ArgsTypeRef(pre0: Type, sym0: Symbol, args0: List[Type]) extends TypeRef(pre0, sym0, args0) {
require(args0.nonEmpty, this)
@@ -3305,6 +3316,13 @@ trait Types
object AnnotatedType extends AnnotatedTypeExtractor
+ object StaticallyAnnotatedType {
+ def unapply(tp: Type): Option[(List[AnnotationInfo], Type)] = tp.staticAnnotations match {
+ case Nil => None
+ case annots => Some((annots, tp.withoutAnnotations))
+ }
+ }
+
/** A class representing types with a name. When an application uses
* named arguments, the named argument types for calling isApplicable
* are represented as NamedType.
@@ -3323,18 +3341,25 @@ trait Types
/** A temporary type representing the erasure of a user-defined value type.
* Created during phase erasure, eliminated again in posterasure.
*
- * @param original The underlying type before erasure
+ * SI-6385 Erasure's creation of bridges considers method signatures `exitingErasure`,
+ * which contain `ErasedValueType`-s. In order to correctly consider the overriding
+ * and overriden signatures as equivalent in `run/t6385.scala`, it is critical that
+ * this type contains the erasure of the wrapped type, rather than the unerased type
+ * of the value class itself, as was originally done.
+ *
+ * @param valueClazz The value class symbol
+ * @param erasedUnderlying The erased type of the unboxed value
*/
- abstract case class ErasedValueType(original: TypeRef) extends UniqueType {
- override def safeToString = "ErasedValueType("+original+")"
+ abstract case class ErasedValueType(valueClazz: Symbol, erasedUnderlying: Type) extends UniqueType {
+ override def safeToString = s"ErasedValueType($valueClazz, $erasedUnderlying)"
}
- final class UniqueErasedValueType(original: TypeRef) extends ErasedValueType(original)
+ final class UniqueErasedValueType(valueClazz: Symbol, erasedUnderlying: Type) extends ErasedValueType(valueClazz, erasedUnderlying)
object ErasedValueType {
- def apply(original: TypeRef): Type = {
- assert(original.sym ne NoSymbol, "ErasedValueType over NoSymbol")
- unique(new UniqueErasedValueType(original))
+ def apply(valueClazz: Symbol, erasedUnderlying: Type): Type = {
+ assert(valueClazz ne NoSymbol, "ErasedValueType over NoSymbol")
+ unique(new UniqueErasedValueType(valueClazz, erasedUnderlying))
}
}
@@ -3367,7 +3392,11 @@ trait Types
/** Rebind symbol `sym` to an overriding member in type `pre`. */
private def rebind(pre: Type, sym: Symbol): Symbol = {
if (!sym.isOverridableMember || sym.owner == pre.typeSymbol) sym
- else pre.nonPrivateMember(sym.name).suchThat(sym => sym.isType || (sym.isStable && !sym.hasVolatileType)) orElse sym
+ else pre.nonPrivateMember(sym.name).suchThat(sym =>
+ // SI-7928 `isModuleNotMethod` is here to avoid crashing with overloaded module accessor and module symbols
+ // after refchecks eliminates a ModuleDef that implements and interface.
+ sym.isType || (!sym.isModuleNotMethod && sym.isStable && !sym.hasVolatileType)
+ ) orElse sym
}
/** Convert a `super` prefix to a this-type if `sym` is abstract or final. */
@@ -3932,9 +3961,12 @@ trait Types
*/
final def hasLength(xs: List[_], len: Int) = xs.lengthCompare(len) == 0
- private var basetypeRecursions: Int = 0
- private val pendingBaseTypes = new mutable.HashSet[Type]
+ private var _basetypeRecursions: Int = 0
+ def basetypeRecursions = _basetypeRecursions
+ def basetypeRecursions_=(value: Int) = _basetypeRecursions = value
+ private val _pendingBaseTypes = new mutable.HashSet[Type]
+ def pendingBaseTypes = _pendingBaseTypes
/** Does this type have a prefix that begins with a type variable,
* or is it a refinement type? For type prefixes that fulfil this condition,
@@ -4434,7 +4466,9 @@ trait Types
}
/** The current indentation string for traces */
- protected[internal] var indent: String = ""
+ private var _indent: String = ""
+ protected def indent = _indent
+ protected def indent_=(value: String) = _indent = value
/** Perform operation `p` on arguments `tp1`, `arg2` and print trace of computation. */
protected def explain[T](op: String, p: (Type, T) => Boolean, tp1: Type, arg2: T): Boolean = {
@@ -4513,7 +4547,7 @@ trait Types
private[scala] val typeIsNonClassType = (tp: Type) => tp.typeSymbolDirect.isNonClassType
private[scala] val typeIsExistentiallyBound = (tp: Type) => tp.typeSymbol.isExistentiallyBound
private[scala] val typeIsErroneous = (tp: Type) => tp.isErroneous
- private[scala] val typeIsError = (tp: Type) => tp.isError
+ private[scala] val symTypeIsError = (sym: Symbol) => sym.tpe.isError
private[scala] val typeHasAnnotations = (tp: Type) => tp.annotations.nonEmpty
private[scala] val boundsContainType = (bounds: TypeBounds, tp: Type) => bounds containsType tp
private[scala] val typeListIsEmpty = (ts: List[Type]) => ts.isEmpty
diff --git a/src/reflect/scala/reflect/internal/Variances.scala b/src/reflect/scala/reflect/internal/Variances.scala
index bf00a7ac87..5280467055 100644
--- a/src/reflect/scala/reflect/internal/Variances.scala
+++ b/src/reflect/scala/reflect/internal/Variances.scala
@@ -142,7 +142,8 @@ trait Variances {
// No variance check for object-private/protected methods/values.
// Or constructors, or case class factory or extractor.
def skip = (
- sym.hasLocalFlag
+ sym == NoSymbol
+ || sym.hasLocalFlag
|| sym.owner.isConstructor
|| sym.owner.isCaseApplyOrUnapply
)
diff --git a/src/reflect/scala/reflect/internal/pickling/Translations.scala b/src/reflect/scala/reflect/internal/pickling/Translations.scala
new file mode 100644
index 0000000000..e56cf796cb
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/pickling/Translations.scala
@@ -0,0 +1,128 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Paul Phillips
+ */
+
+package scala
+package reflect
+package internal
+package pickling
+
+import PickleFormat._
+import util.shortClassOfInstance
+
+trait Translations {
+ self: SymbolTable =>
+
+ def isTreeSymbolPickled(code: Int): Boolean = (code: @annotation.switch) match {
+ case PACKAGEtree | CLASStree | MODULEtree | VALDEFtree | DEFDEFtree | TYPEDEFtree | LABELtree => true
+ case IMPORTtree | TEMPLATEtree | BINDtree | FUNCTIONtree | RETURNtree => true
+ case APPLYDYNAMICtree | SUPERtree | THIStree | SELECTtree | IDENTtree => true
+ case _ => false
+ }
+ /** This method should be equivalent to tree.hasSymbolField, but that method
+ * doesn't do us any good when we're unpickling because we need to know based
+ * on the Int tag - the tree doesn't exist yet. Thus, this method is documentation only.
+ */
+ def isTreeSymbolPickled(tree: Tree): Boolean = isTreeSymbolPickled(picklerSubTag(tree))
+
+ // The ad hoc pattern matching of tuples out of AnyRefs is a
+ // truly terrible idea. It reaches the height of its powers in
+ // combination with scala's insistence on helpfully tupling
+ // multiple arguments passed to a single-arg AnyRef.
+ def picklerTag(ref: AnyRef): Int = ref match {
+ case tp: Type => picklerTag(tp)
+ case sym: Symbol => picklerTag(sym)
+ case const: Constant => LITERAL + const.tag
+ case _: Tree => TREE // its sub tag more precisely identifies it
+ case _: TermName => TERMname
+ case _: TypeName => TYPEname
+ case _: ArrayAnnotArg => ANNOTARGARRAY // an array of annotation arguments
+ case _: AnnotationInfo => ANNOTINFO // annotations on types (not linked to a symbol)
+ case (_: Symbol, _: AnnotationInfo) => SYMANNOT // symbol annotations, i.e. on terms
+ case (_: Symbol, _: List[_]) => CHILDREN // the direct subclasses of a sealed symbol
+ case _: Modifiers => MODIFIERS
+ case _ => sys.error(s"unpicklable entry ${shortClassOfInstance(ref)} $ref")
+ }
+
+ /** Local symbols only. The assessment of locality depends
+ * on convoluted conditions which depends in part on the root
+ * symbol being pickled, so it cannot be reproduced here.
+ * The pickler tags at stake are EXTMODCLASSref and EXTref.
+ * Those tags are never produced here - such symbols must be
+ * excluded prior to calling this method.
+ */
+ def picklerTag(sym: Symbol): Int = sym match {
+ case NoSymbol => NONEsym
+ case _: ClassSymbol => CLASSsym
+ case _: TypeSymbol if sym.isAbstractType => TYPEsym
+ case _: TypeSymbol => ALIASsym
+ case _: TermSymbol if sym.isModule => MODULEsym
+ case _: TermSymbol => VALsym
+ }
+
+ def picklerTag(tpe: Type): Int = tpe match {
+ case NoType => NOtpe
+ case NoPrefix => NOPREFIXtpe
+ case _: ThisType => THIStpe
+ case _: SingleType => SINGLEtpe
+ case _: SuperType => SUPERtpe
+ case _: ConstantType => CONSTANTtpe
+ case _: TypeBounds => TYPEBOUNDStpe
+ case _: TypeRef => TYPEREFtpe
+ case _: RefinedType => REFINEDtpe
+ case _: ClassInfoType => CLASSINFOtpe
+ case _: MethodType => METHODtpe
+ case _: PolyType => POLYtpe
+ case _: NullaryMethodType => POLYtpe // bad juju, distinct ints are not at a premium!
+ case _: ExistentialType => EXISTENTIALtpe
+ case _: AnnotatedType => ANNOTATEDtpe
+ }
+
+ def picklerSubTag(tree: Tree): Int = tree match {
+ case EmptyTree => EMPTYtree
+ case _: PackageDef => PACKAGEtree
+ case _: ClassDef => CLASStree
+ case _: ModuleDef => MODULEtree
+ case _: ValDef => VALDEFtree
+ case _: DefDef => DEFDEFtree
+ case _: TypeDef => TYPEDEFtree
+ case _: LabelDef => LABELtree
+ case _: Import => IMPORTtree
+ // case _: DocDef => DOCDEFtree
+ case _: Template => TEMPLATEtree
+ case _: Block => BLOCKtree
+ case _: CaseDef => CASEtree
+ case _: Alternative => ALTERNATIVEtree
+ case _: Star => STARtree
+ case _: Bind => BINDtree
+ case _: UnApply => UNAPPLYtree
+ case _: ArrayValue => ARRAYVALUEtree
+ case _: Function => FUNCTIONtree
+ case _: Assign => ASSIGNtree
+ case _: If => IFtree
+ case _: Match => MATCHtree
+ case _: Return => RETURNtree
+ case _: Try => TREtree // TREtree?
+ case _: Throw => THROWtree
+ case _: New => NEWtree
+ case _: Typed => TYPEDtree
+ case _: TypeApply => TYPEAPPLYtree
+ case _: Apply => APPLYtree
+ case _: ApplyDynamic => APPLYDYNAMICtree
+ case _: Super => SUPERtree
+ case _: This => THIStree
+ case _: Select => SELECTtree
+ case _: Ident => IDENTtree
+ case _: Literal => LITERALtree
+ case _: TypeTree => TYPEtree
+ case _: Annotated => ANNOTATEDtree
+ case _: SingletonTypeTree => SINGLETONTYPEtree
+ case _: SelectFromTypeTree => SELECTFROMTYPEtree
+ case _: CompoundTypeTree => COMPOUNDTYPEtree
+ case _: AppliedTypeTree => APPLIEDTYPEtree
+ case _: TypeBoundsTree => TYPEBOUNDStree
+ case _: ExistentialTypeTree => EXISTENTIALTYPEtree
+ }
+}
+
diff --git a/src/reflect/scala/reflect/internal/pickling/UnPickler.scala b/src/reflect/scala/reflect/internal/pickling/UnPickler.scala
index f42dbf56e1..a6c34935ad 100644
--- a/src/reflect/scala/reflect/internal/pickling/UnPickler.scala
+++ b/src/reflect/scala/reflect/internal/pickling/UnPickler.scala
@@ -65,36 +65,38 @@ abstract class UnPickler {
/** A map from symbols to their associated `decls` scopes */
private val symScopes = mutable.HashMap[Symbol, Scope]()
+ private def expect(expected: Int, msg: => String) {
+ val tag = readByte()
+ if (tag != expected)
+ errorBadSignature(s"$msg ($tag)")
+ }
+
//println("unpickled " + classRoot + ":" + classRoot.rawInfo + ", " + moduleRoot + ":" + moduleRoot.rawInfo);//debug
+ @inline private def runAtIndex[T](i: Int)(body: => T): T = {
+ val saved = readIndex
+ readIndex = index(i)
+ try body finally readIndex = saved
+ }
+
// Laboriously unrolled for performance.
def run() {
var i = 0
while (i < index.length) {
- if (entries(i) == null && isSymbolEntry(i)) {
- val savedIndex = readIndex
- readIndex = index(i)
- entries(i) = readSymbol()
- readIndex = savedIndex
- }
+ if (entries(i) == null && isSymbolEntry(i))
+ runAtIndex(i)(entries(i) = readSymbol())
+
i += 1
}
+
// read children last, fix for #3951
i = 0
while (i < index.length) {
if (entries(i) == null) {
- if (isSymbolAnnotationEntry(i)) {
- val savedIndex = readIndex
- readIndex = index(i)
- readSymbolAnnotation()
- readIndex = savedIndex
- }
- else if (isChildrenEntry(i)) {
- val savedIndex = readIndex
- readIndex = index(i)
- readChildren()
- readIndex = savedIndex
- }
+ if (isSymbolAnnotationEntry(i))
+ runAtIndex(i)(readSymbolAnnotation())
+ else if (isChildrenEntry(i))
+ runAtIndex(i)(readChildren())
}
i += 1
}
@@ -145,6 +147,11 @@ abstract class UnPickler {
tag == CHILDREN
}
+ private def maybeReadSymbol(): Either[Int, Symbol] = readNat() match {
+ case index if isSymbolRef(index) => Right(at(index, readSymbol))
+ case index => Left(index)
+ }
+
/** Does entry represent a refinement symbol?
* pre: Entry is a class symbol
*/
@@ -256,14 +263,11 @@ abstract class UnPickler {
val name = at(nameref, readName)
val owner = readSymbolRef()
val flags = pickledToRawFlags(readLongNat())
- var inforef = readNat()
- val privateWithin =
- if (!isSymbolRef(inforef)) NoSymbol
- else {
- val pw = at(inforef, readSymbol)
- inforef = readNat()
- pw
- }
+
+ val (privateWithin, inforef) = maybeReadSymbol() match {
+ case Left(index) => NoSymbol -> index
+ case Right(sym) => sym -> readNat()
+ }
def isModuleFlag = (flags & MODULE) != 0L
def isClassRoot = (name == classRoot.name) && (owner == classRoot.owner)
@@ -305,7 +309,7 @@ abstract class UnPickler {
sym
case MODULEsym =>
- val clazz = at(inforef, () => readType()).typeSymbol // after the NMT_TRANSITION period, we can leave off the () => ... ()
+ val clazz = at(inforef, () => readType()).typeSymbol // after NMT_TRANSITION, we can leave off the () => ... ()
if (isModuleRoot) moduleRoot setFlag pflags
else owner.newLinkedModule(clazz, pflags)
case VALsym =>
@@ -317,84 +321,48 @@ abstract class UnPickler {
})
}
- /** Read a type
- *
- * @param forceProperType is used to ease the transition to NullaryMethodTypes (commentmarker: NMT_TRANSITION)
- * the flag say that a type of kind * is expected, so that PolyType(tps, restpe) can be disambiguated to PolyType(tps, NullaryMethodType(restpe))
- * (if restpe is not a ClassInfoType, a MethodType or a NullaryMethodType, which leaves TypeRef/SingletonType -- the latter would make the polytype a type constructor)
- */
protected def readType(forceProperType: Boolean = false): Type = {
val tag = readByte()
val end = readEnd()
+ @inline def all[T](body: => T): List[T] = until(end, () => body)
+
+ def readTypes() = all(readTypeRef)
+ def readSymbols() = all(readSymbolRef)
+ def readAnnots() = all(readAnnotationRef)
+
+ // if the method is overloaded, the params cannot be determined (see readSymbol) => return NoType.
+ // Only happen for trees, "case Apply" in readTree() takes care of selecting the correct
+ // alternative after parsing the arguments.
+ def MethodTypeRef(restpe: Type, params: List[Symbol]): Type = (
+ if (restpe == NoType || (params contains NoSymbol)) NoType
+ else MethodType(params, restpe)
+ )
+ def PolyOrNullaryType(restpe: Type, tparams: List[Symbol]): Type = tparams match {
+ case Nil => NullaryMethodType(restpe)
+ case _ => PolyType(tparams, restpe)
+ }
+ def CompoundType(clazz: Symbol, parents: List[Type]): Type = tag match {
+ case REFINEDtpe => RefinedType(parents, symScope(clazz), clazz)
+ case CLASSINFOtpe => ClassInfoType(parents, symScope(clazz), clazz)
+ }
+
+ // We're stuck with the order types are pickled in, but with judicious use
+ // of named parameters we can recapture a declarative flavor in a few cases.
+ // But it's still a rat's nest of adhockery.
(tag: @switch) match {
- case NOtpe =>
- NoType
- case NOPREFIXtpe =>
- NoPrefix
- case THIStpe =>
- ThisType(readSymbolRef())
- case SINGLEtpe =>
- SingleType(readTypeRef(), readSymbolRef()) // !!! was singleType
- case SUPERtpe =>
- val thistpe = readTypeRef()
- val supertpe = readTypeRef()
- SuperType(thistpe, supertpe)
- case CONSTANTtpe =>
- ConstantType(readConstantRef())
- case TYPEREFtpe =>
- val pre = readTypeRef()
- val sym = readSymbolRef()
- val args = until(end, readTypeRef)
- TypeRef(pre, sym, args)
- case TYPEBOUNDStpe =>
- TypeBounds(readTypeRef(), readTypeRef())
- case REFINEDtpe =>
- val clazz = readSymbolRef()
- RefinedType(until(end, readTypeRef), symScope(clazz), clazz)
- case CLASSINFOtpe =>
- val clazz = readSymbolRef()
- ClassInfoType(until(end, readTypeRef), symScope(clazz), clazz)
- case METHODtpe | IMPLICITMETHODtpe =>
- val restpe = readTypeRef()
- val params = until(end, readSymbolRef)
- // if the method is overloaded, the params cannot be determined (see readSymbol) => return NoType.
- // Only happen for trees, "case Apply" in readTree() takes care of selecting the correct
- // alternative after parsing the arguments.
- if (params.contains(NoSymbol) || restpe == NoType) NoType
- else MethodType(params, restpe)
- case POLYtpe =>
- val restpe = readTypeRef()
- val typeParams = until(end, readSymbolRef)
- if (typeParams.nonEmpty) {
- // NMT_TRANSITION: old class files denoted a polymorphic nullary method as PolyType(tps, restpe), we now require PolyType(tps, NullaryMethodType(restpe))
- // when a type of kind * is expected (forceProperType is true), we know restpe should be wrapped in a NullaryMethodType (if it wasn't suitably wrapped yet)
- def transitionNMT(restpe: Type) = {
- val resTpeCls = restpe.getClass.toString // what's uglier than isInstanceOf? right! -- isInstanceOf does not work since the concrete types are defined in the compiler (not in scope here)
- if(forceProperType /*&& pickleformat < 2.9 */ && !(resTpeCls.endsWith("MethodType"))) { assert(!resTpeCls.contains("ClassInfoType"))
- NullaryMethodType(restpe) }
- else restpe
- }
- PolyType(typeParams, transitionNMT(restpe))
- }
- else
- NullaryMethodType(restpe)
- case EXISTENTIALtpe =>
- val restpe = readTypeRef()
- newExistentialType(until(end, readSymbolRef), restpe)
-
- case ANNOTATEDtpe =>
- var typeRef = readNat()
- val selfsym = if (isSymbolRef(typeRef)) {
- val s = at(typeRef, readSymbol)
- typeRef = readNat()
- s
- } else NoSymbol // selfsym can go.
- val tp = at(typeRef, () => readType(forceProperType)) // NMT_TRANSITION
- val annots = until(end, readAnnotationRef)
- if (selfsym == NoSymbol) AnnotatedType(annots, tp, selfsym)
- else tp
- case _ =>
- noSuchTypeTag(tag, end)
+ case NOtpe => NoType
+ case NOPREFIXtpe => NoPrefix
+ case THIStpe => ThisType(readSymbolRef())
+ case SINGLEtpe => SingleType(readTypeRef(), readSymbolRef())
+ case SUPERtpe => SuperType(readTypeRef(), readTypeRef())
+ case CONSTANTtpe => ConstantType(readConstantRef())
+ case TYPEREFtpe => TypeRef(readTypeRef(), readSymbolRef(), readTypes())
+ case TYPEBOUNDStpe => TypeBounds(readTypeRef(), readTypeRef())
+ case REFINEDtpe | CLASSINFOtpe => CompoundType(readSymbolRef(), readTypes())
+ case METHODtpe => MethodTypeRef(readTypeRef(), readSymbols())
+ case POLYtpe => PolyOrNullaryType(readTypeRef(), readSymbols())
+ case EXISTENTIALtpe => ExistentialType(underlying = readTypeRef(), quantified = readSymbols())
+ case ANNOTATEDtpe => AnnotatedType(underlying = readTypeRef(), annotations = readAnnots(), selfsym = NoSymbol)
}
}
@@ -483,9 +451,7 @@ abstract class UnPickler {
* the symbol it requests. Called at top-level, for all
* (symbol, annotInfo) entries. */
protected def readSymbolAnnotation() {
- val tag = readByte()
- if (tag != SYMANNOT)
- errorBadSignature("symbol annotation expected ("+ tag +")")
+ expect(SYMANNOT, "symbol annotation expected")
val end = readEnd()
val target = readSymbolRef()
target.addAnnotation(readAnnotationInfo(end))
@@ -501,260 +467,100 @@ abstract class UnPickler {
readAnnotationInfo(end)
}
- /* Read an abstract syntax tree */
- protected def readTree(): Tree = {
- val outerTag = readByte()
- if (outerTag != TREE)
- errorBadSignature("tree expected (" + outerTag + ")")
- val end = readEnd()
- val tag = readByte()
- val tpe = if (tag == EMPTYtree) NoType else readTypeRef()
-
- // Set by the three functions to follow. If symbol is non-null
- // after the new tree 't' has been created, t has its Symbol
- // set to symbol; and it always has its Type set to tpe.
- var symbol: Symbol = null
- var mods: Modifiers = null
- var name: Name = null
-
- /* Read a Symbol, Modifiers, and a Name */
- def setSymModsName() {
- symbol = readSymbolRef()
- mods = readModifiersRef()
- name = readNameRef()
+ private def readNonEmptyTree(tag: Int, end: Int): Tree = {
+ @inline def all[T](body: => T): List[T] = until(end, () => body)
+ @inline def rep[T](body: => T): List[T] = times(readNat(), () => body)
+
+ // !!! What is this doing here?
+ def fixApply(tree: Apply, tpe: Type): Apply = {
+ val Apply(fun, args) = tree
+ if (fun.symbol.isOverloaded) {
+ fun setType fun.symbol.info
+ inferMethodAlternative(fun, args map (_.tpe), tpe)
+ }
+ tree
}
- /* Read a Symbol and a Name */
- def setSymName() {
- symbol = readSymbolRef()
- name = readNameRef()
+ def ref() = readTreeRef()
+ def caseRef() = readCaseDefRef()
+ def modsRef() = readModifiersRef()
+ def implRef() = readTemplateRef()
+ def nameRef() = readNameRef()
+ def tparamRef() = readTypeDefRef()
+ def vparamRef() = readValDefRef()
+ def constRef() = readConstantRef()
+ def idRef() = readIdentRef()
+ def termNameRef() = readNameRef().toTermName
+ def typeNameRef() = readNameRef().toTypeName
+ def refTreeRef() = ref() match {
+ case t: RefTree => t
+ case t => errorBadSignature("RefTree expected, found " + t.shortClass)
}
- /* Read a Symbol */
- def setSym() {
- symbol = readSymbolRef()
+ def selectorsRef() = all(ImportSelector(nameRef(), -1, nameRef(), -1))
+
+ /** A few of the most popular trees have been pulled to the top for
+ * switch efficiency purposes.
+ */
+ def readTree(tpe: Type): Tree = (tag: @switch) match {
+ case IDENTtree => Ident(nameRef)
+ case SELECTtree => Select(ref, nameRef)
+ case APPLYtree => fixApply(Apply(ref, all(ref)), tpe) // !!!
+ case BINDtree => Bind(nameRef, ref)
+ case BLOCKtree => all(ref) match { case stats :+ expr => Block(stats, expr) }
+ case IFtree => If(ref, ref, ref)
+ case LITERALtree => Literal(constRef)
+ case TYPEAPPLYtree => TypeApply(ref, all(ref))
+ case TYPEDtree => Typed(ref, ref)
+ case ALTERNATIVEtree => Alternative(all(ref))
+ case ANNOTATEDtree => Annotated(ref, ref)
+ case APPLIEDTYPEtree => AppliedTypeTree(ref, all(ref))
+ case APPLYDYNAMICtree => ApplyDynamic(ref, all(ref))
+ case ARRAYVALUEtree => ArrayValue(ref, all(ref))
+ case ASSIGNtree => Assign(ref, ref)
+ case CASEtree => CaseDef(ref, ref, ref)
+ case CLASStree => ClassDef(modsRef, typeNameRef, rep(tparamRef), implRef)
+ case COMPOUNDTYPEtree => CompoundTypeTree(implRef)
+ case DEFDEFtree => DefDef(modsRef, termNameRef, rep(tparamRef), rep(rep(vparamRef)), ref, ref)
+ case EXISTENTIALTYPEtree => ExistentialTypeTree(ref, all(ref))
+ case FUNCTIONtree => Function(rep(vparamRef), ref)
+ case IMPORTtree => Import(ref, selectorsRef)
+ case LABELtree => LabelDef(termNameRef, rep(idRef), ref)
+ case MATCHtree => Match(ref, all(caseRef))
+ case MODULEtree => ModuleDef(modsRef, termNameRef, implRef)
+ case NEWtree => New(ref)
+ case PACKAGEtree => PackageDef(refTreeRef, all(ref))
+ case RETURNtree => Return(ref)
+ case SELECTFROMTYPEtree => SelectFromTypeTree(ref, typeNameRef)
+ case SINGLETONTYPEtree => SingletonTypeTree(ref)
+ case STARtree => Star(ref)
+ case SUPERtree => Super(ref, typeNameRef)
+ case TEMPLATEtree => Template(rep(ref), vparamRef, all(ref))
+ case THIStree => This(typeNameRef)
+ case THROWtree => Throw(ref)
+ case TREtree => Try(ref, rep(caseRef), ref)
+ case TYPEBOUNDStree => TypeBoundsTree(ref, ref)
+ case TYPEDEFtree => TypeDef(modsRef, typeNameRef, rep(tparamRef), ref)
+ case TYPEtree => TypeTree()
+ case UNAPPLYtree => UnApply(ref, all(ref))
+ case VALDEFtree => ValDef(modsRef, termNameRef, ref, ref)
+ case _ => noSuchTreeTag(tag, end)
}
- val t = tag match {
- case EMPTYtree =>
- EmptyTree
-
- case PACKAGEtree =>
- setSym()
- val pid = readTreeRef().asInstanceOf[RefTree]
- val stats = until(end, readTreeRef)
- PackageDef(pid, stats)
-
- case CLASStree =>
- setSymModsName()
- val impl = readTemplateRef()
- val tparams = until(end, readTypeDefRef)
- ClassDef(mods, name.toTypeName, tparams, impl)
-
- case MODULEtree =>
- setSymModsName()
- ModuleDef(mods, name.toTermName, readTemplateRef())
-
- case VALDEFtree =>
- setSymModsName()
- val tpt = readTreeRef()
- val rhs = readTreeRef()
- ValDef(mods, name.toTermName, tpt, rhs)
-
- case DEFDEFtree =>
- setSymModsName()
- val tparams = times(readNat(), readTypeDefRef)
- val vparamss = times(readNat(), () => times(readNat(), readValDefRef))
- val tpt = readTreeRef()
- val rhs = readTreeRef()
- DefDef(mods, name.toTermName, tparams, vparamss, tpt, rhs)
-
- case TYPEDEFtree =>
- setSymModsName()
- val rhs = readTreeRef()
- val tparams = until(end, readTypeDefRef)
- TypeDef(mods, name.toTypeName, tparams, rhs)
-
- case LABELtree =>
- setSymName()
- val rhs = readTreeRef()
- val params = until(end, readIdentRef)
- LabelDef(name.toTermName, params, rhs)
-
- case IMPORTtree =>
- setSym()
- val expr = readTreeRef()
- val selectors = until(end, () => {
- val from = readNameRef()
- val to = readNameRef()
- ImportSelector(from, -1, to, -1)
- })
-
- Import(expr, selectors)
-
- case TEMPLATEtree =>
- setSym()
- val parents = times(readNat(), readTreeRef)
- val self = readValDefRef()
- val body = until(end, readTreeRef)
-
- Template(parents, self, body)
-
- case BLOCKtree =>
- val expr = readTreeRef()
- val stats = until(end, readTreeRef)
- Block(stats, expr)
-
- case CASEtree =>
- val pat = readTreeRef()
- val guard = readTreeRef()
- val body = readTreeRef()
- CaseDef(pat, guard, body)
-
- case ALTERNATIVEtree =>
- Alternative(until(end, readTreeRef))
-
- case STARtree =>
- Star(readTreeRef())
-
- case BINDtree =>
- setSymName()
- Bind(name, readTreeRef())
-
- case UNAPPLYtree =>
- val fun = readTreeRef()
- val args = until(end, readTreeRef)
- UnApply(fun, args)
-
- case ARRAYVALUEtree =>
- val elemtpt = readTreeRef()
- val trees = until(end, readTreeRef)
- ArrayValue(elemtpt, trees)
-
- case FUNCTIONtree =>
- setSym()
- val body = readTreeRef()
- val vparams = until(end, readValDefRef)
- Function(vparams, body)
-
- case ASSIGNtree =>
- val lhs = readTreeRef()
- val rhs = readTreeRef()
- Assign(lhs, rhs)
-
- case IFtree =>
- val cond = readTreeRef()
- val thenp = readTreeRef()
- val elsep = readTreeRef()
- If(cond, thenp, elsep)
-
- case MATCHtree =>
- val selector = readTreeRef()
- val cases = until(end, readCaseDefRef)
- Match(selector, cases)
-
- case RETURNtree =>
- setSym()
- Return(readTreeRef())
-
- case TREtree =>
- val block = readTreeRef()
- val finalizer = readTreeRef()
- val catches = until(end, readCaseDefRef)
- Try(block, catches, finalizer)
-
- case THROWtree =>
- Throw(readTreeRef())
-
- case NEWtree =>
- New(readTreeRef())
-
- case TYPEDtree =>
- val expr = readTreeRef()
- val tpt = readTreeRef()
- Typed(expr, tpt)
-
- case TYPEAPPLYtree =>
- val fun = readTreeRef()
- val args = until(end, readTreeRef)
- TypeApply(fun, args)
-
- case APPLYtree =>
- val fun = readTreeRef()
- val args = until(end, readTreeRef)
- if (fun.symbol.isOverloaded) {
- fun.setType(fun.symbol.info)
- inferMethodAlternative(fun, args map (_.tpe), tpe)
- }
- Apply(fun, args)
-
- case APPLYDYNAMICtree =>
- setSym()
- val qual = readTreeRef()
- val args = until(end, readTreeRef)
- ApplyDynamic(qual, args)
-
- case SUPERtree =>
- setSym()
- val qual = readTreeRef()
- val mix = readTypeNameRef()
- Super(qual, mix)
-
- case THIStree =>
- setSym()
- This(readTypeNameRef())
-
- case SELECTtree =>
- setSym()
- val qualifier = readTreeRef()
- val selector = readNameRef()
- Select(qualifier, selector)
-
- case IDENTtree =>
- setSymName()
- Ident(name)
-
- case LITERALtree =>
- Literal(readConstantRef())
-
- case TYPEtree =>
- TypeTree()
-
- case ANNOTATEDtree =>
- val annot = readTreeRef()
- val arg = readTreeRef()
- Annotated(annot, arg)
-
- case SINGLETONTYPEtree =>
- SingletonTypeTree(readTreeRef())
-
- case SELECTFROMTYPEtree =>
- val qualifier = readTreeRef()
- val selector = readTypeNameRef()
- SelectFromTypeTree(qualifier, selector)
-
- case COMPOUNDTYPEtree =>
- CompoundTypeTree(readTemplateRef())
-
- case APPLIEDTYPEtree =>
- val tpt = readTreeRef()
- val args = until(end, readTreeRef)
- AppliedTypeTree(tpt, args)
-
- case TYPEBOUNDStree =>
- val lo = readTreeRef()
- val hi = readTreeRef()
- TypeBoundsTree(lo, hi)
-
- case EXISTENTIALTYPEtree =>
- val tpt = readTreeRef()
- val whereClauses = until(end, readTreeRef)
- ExistentialTypeTree(tpt, whereClauses)
+ val tpe = readTypeRef()
+ val sym = if (isTreeSymbolPickled(tag)) readSymbolRef() else null
+ val result = readTree(tpe)
- case _ =>
- noSuchTreeTag(tag, end)
- }
+ if (sym ne null) result setSymbol sym
+ result setType tpe
+ }
- if (symbol == null) t setType tpe
- else t setSymbol symbol setType tpe
+ /* Read an abstract syntax tree */
+ protected def readTree(): Tree = {
+ expect(TREE, "tree expected")
+ val end = readEnd()
+ readByte() match {
+ case EMPTYtree => EmptyTree
+ case tag => readNonEmptyTree(tag, end)
+ }
}
def noSuchTreeTag(tag: Int, end: Int) =
@@ -853,7 +659,7 @@ abstract class UnPickler {
override def complete(sym: Symbol) : Unit = try {
val tp = at(i, () => readType(sym.isTerm)) // after NMT_TRANSITION, revert `() => readType(sym.isTerm)` to `readType`
if (p ne null)
- enteringPhase(p) (sym setInfo tp)
+ slowButSafeEnteringPhase(p) (sym setInfo tp)
if (currentRunId != definedAtRunId)
sym.setInfo(adaptToNewRunMap(tp))
}
@@ -871,7 +677,7 @@ abstract class UnPickler {
super.complete(sym)
var alias = at(j, readSymbol)
if (alias.isOverloaded)
- alias = enteringPhase(picklerPhase)((alias suchThat (alt => sym.tpe =:= sym.owner.thisType.memberType(alt))))
+ alias = slowButSafeEnteringPhase(picklerPhase)((alias suchThat (alt => sym.tpe =:= sym.owner.thisType.memberType(alt))))
sym.asInstanceOf[TermSymbol].setAlias(alias)
}
diff --git a/src/reflect/scala/reflect/internal/settings/MutableSettings.scala b/src/reflect/scala/reflect/internal/settings/MutableSettings.scala
index e21e95903b..28afd18fe0 100644
--- a/src/reflect/scala/reflect/internal/settings/MutableSettings.scala
+++ b/src/reflect/scala/reflect/internal/settings/MutableSettings.scala
@@ -33,23 +33,26 @@ abstract class MutableSettings extends AbsSettings {
}
}
- def overrideObjects: BooleanSetting
- def printtypes: BooleanSetting
+ def Xexperimental: BooleanSetting
+ def XfullLubs: BooleanSetting
+ def XnoPatmatAnalysis: BooleanSetting
+ def Xprintpos: BooleanSetting
+ def Yposdebug: BooleanSetting
+ def Yrangepos: BooleanSetting
+ def Yshowsymkinds: BooleanSetting
+ def breakCycles: BooleanSetting
def debug: BooleanSetting
+ def developer: BooleanSetting
def explaintypes: BooleanSetting
- def verbose: BooleanSetting
+ def overrideObjects: BooleanSetting
+ def printtypes: BooleanSetting
def uniqid: BooleanSetting
- def Yshowsymkinds: BooleanSetting
- def Yposdebug: BooleanSetting
- def Yrangepos: BooleanSetting
- def Xprintpos: BooleanSetting
+ def verbose: BooleanSetting
+
def Yrecursion: IntSetting
def maxClassfileName: IntSetting
- def Xexperimental: BooleanSetting
- def XnoPatmatAnalysis: BooleanSetting
- def XfullLubs: BooleanSetting
- def breakCycles: BooleanSetting
}
+
object MutableSettings {
import scala.language.implicitConversions
/** Support the common use case, `if (settings.debug) println("Hello, martin.")` */
diff --git a/src/reflect/scala/reflect/internal/tpe/GlbLubs.scala b/src/reflect/scala/reflect/internal/tpe/GlbLubs.scala
index 6fa536d84c..6b33aca025 100644
--- a/src/reflect/scala/reflect/internal/tpe/GlbLubs.scala
+++ b/src/reflect/scala/reflect/internal/tpe/GlbLubs.scala
@@ -251,8 +251,11 @@ private[internal] trait GlbLubs {
else if (isNumericSubType(t2, t1)) t1
else IntTpe)
- private val lubResults = new mutable.HashMap[(Depth, List[Type]), Type]
- private val glbResults = new mutable.HashMap[(Depth, List[Type]), Type]
+ private val _lubResults = new mutable.HashMap[(Depth, List[Type]), Type]
+ def lubResults = _lubResults
+
+ private val _glbResults = new mutable.HashMap[(Depth, List[Type]), Type]
+ def glbResults = _glbResults
/** Given a list of types, finds all the base classes they have in
* common, then returns a list of type constructors derived directly
diff --git a/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala b/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala
index 6532bce9f0..b60fecd66e 100644
--- a/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala
+++ b/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala
@@ -14,7 +14,8 @@ trait TypeComparers {
private final val LogPendingSubTypesThreshold = TypeConstants.DefaultLogThreshhold
- private val pendingSubTypes = new mutable.HashSet[SubTypePair]
+ private val _pendingSubTypes = new mutable.HashSet[SubTypePair]
+ def pendingSubTypes = _pendingSubTypes
class SubTypePair(val tp1: Type, val tp2: Type) {
override def hashCode = tp1.hashCode * 41 + tp2.hashCode
@@ -33,7 +34,9 @@ trait TypeComparers {
override def toString = tp1+" <:<? "+tp2
}
- private var subsametypeRecursions: Int = 0
+ private var _subsametypeRecursions: Int = 0
+ def subsametypeRecursions = _subsametypeRecursions
+ def subsametypeRecursions_=(value: Int) = _subsametypeRecursions = value
private def isUnifiable(pre1: Type, pre2: Type) = (
(isEligibleForPrefixUnification(pre1) || isEligibleForPrefixUnification(pre2))
@@ -100,17 +103,13 @@ trait TypeComparers {
// isSameType1(tp1, tp2)
// }
- undoLog.lock()
+ val before = undoLog.log
+ var result = false
try {
- val before = undoLog.log
- var result = false
- try {
- result = isSameType1(tp1, tp2)
- }
- finally if (!result) undoLog.undoTo(before)
- result
+ result = isSameType1(tp1, tp2)
}
- finally undoLog.unlock()
+ finally if (!result) undoLog.undoTo(before)
+ result
}
finally {
subsametypeRecursions -= 1
@@ -256,30 +255,27 @@ trait TypeComparers {
// }
// }
- undoLog.lock()
- try {
- val before = undoLog.log
- var result = false
-
- try result = { // if subtype test fails, it should not affect constraints on typevars
- if (subsametypeRecursions >= LogPendingSubTypesThreshold) {
- val p = new SubTypePair(tp1, tp2)
- if (pendingSubTypes(p))
- false
- else
- try {
- pendingSubTypes += p
- isSubType1(tp1, tp2, depth)
- } finally {
- pendingSubTypes -= p
- }
- } else {
- isSubType1(tp1, tp2, depth)
- }
- } finally if (!result) undoLog.undoTo(before)
+ val before = undoLog.log
+ var result = false
+
+ try result = { // if subtype test fails, it should not affect constraints on typevars
+ if (subsametypeRecursions >= LogPendingSubTypesThreshold) {
+ val p = new SubTypePair(tp1, tp2)
+ if (pendingSubTypes(p))
+ false
+ else
+ try {
+ pendingSubTypes += p
+ isSubType1(tp1, tp2, depth)
+ } finally {
+ pendingSubTypes -= p
+ }
+ } else {
+ isSubType1(tp1, tp2, depth)
+ }
+ } finally if (!result) undoLog.undoTo(before)
- result
- } finally undoLog.unlock()
+ result
} finally {
subsametypeRecursions -= 1
// XXX AM TODO: figure out when it is safe and needed to clear the log -- the commented approach below is too eager (it breaks #3281, #3866)
diff --git a/src/reflect/scala/reflect/internal/tpe/TypeConstraints.scala b/src/reflect/scala/reflect/internal/tpe/TypeConstraints.scala
index fdfe376c18..e2159d30f5 100644
--- a/src/reflect/scala/reflect/internal/tpe/TypeConstraints.scala
+++ b/src/reflect/scala/reflect/internal/tpe/TypeConstraints.scala
@@ -13,34 +13,14 @@ private[internal] trait TypeConstraints {
/** A log of type variable with their original constraints. Used in order
* to undo constraints in the case of isSubType/isSameType failure.
*/
- lazy val undoLog = newUndoLog
-
- protected def newUndoLog = new UndoLog
+ private lazy val _undoLog = new UndoLog
+ def undoLog = _undoLog
class UndoLog extends Clearable {
private type UndoPairs = List[(TypeVar, TypeConstraint)]
//OPT this method is public so we can do `manual inlining`
var log: UndoPairs = List()
- /*
- * These two methods provide explicit locking mechanism that is overridden in SynchronizedUndoLog.
- *
- * The idea behind explicit locking mechanism is that all public methods that access mutable state
- * will have to obtain the lock for their entire execution so both reads and writes can be kept in
- * right order. Originally, that was achieved by overriding those public methods in
- * `SynchronizedUndoLog` which was fine but expensive. The reason is that those public methods take
- * thunk as argument and if we keep them non-final there's no way to make them inlined so thunks
- * can go away.
- *
- * By using explicit locking we can achieve inlining.
- *
- * NOTE: They are made public for now so we can apply 'manual inlining' (copy&pasting into hot
- * places implementation of `undo` or `undoUnless`). This should be changed back to protected
- * once inliner is fixed.
- */
- def lock(): Unit = ()
- def unlock(): Unit = ()
-
// register with the auto-clearing cache manager
perRunCaches.recordCache(this)
@@ -64,23 +44,16 @@ private[internal] trait TypeConstraints {
}
def clear() {
- lock()
- try {
- if (settings.debug)
- self.log("Clearing " + log.size + " entries from the undoLog.")
- log = Nil
- } finally unlock()
+ if (settings.debug)
+ self.log("Clearing " + log.size + " entries from the undoLog.")
+ log = Nil
}
// `block` should not affect constraints on typevars
def undo[T](block: => T): T = {
- lock()
- try {
- val before = log
-
- try block
- finally undoTo(before)
- } finally unlock()
+ val before = log
+ try block
+ finally undoTo(before)
}
}
diff --git a/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala b/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala
index 7e98ac03d5..9a54ad8217 100644
--- a/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala
+++ b/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala
@@ -581,6 +581,7 @@ private[internal] trait TypeMaps {
else if (!matchesPrefixAndClass(pre, clazz)(tparam.owner))
loop(nextBase.prefix, clazz.owner)
else nextBase match {
+ case NoType => loop(NoType, clazz.owner) // backstop for SI-2797, must remove `SingletonType#isHigherKinded` and run pos/t2797.scala to get here.
case applied @ TypeRef(_, _, _) => correspondingTypeArgument(classParam, applied)
case ExistentialType(eparams, qtpe) => captureSkolems(eparams) ; loop(qtpe, clazz)
case t => abort(s"$tparam in ${tparam.owner} cannot be instantiated from ${seenFromPrefix.widen}")
@@ -593,10 +594,8 @@ private[internal] trait TypeMaps {
// Since pre may be something like ThisType(A) where trait A { self: B => },
// we have to test the typeSymbol of the widened type, not pre.typeSymbol, or
// B will not be considered.
- private def matchesPrefixAndClass(pre: Type, clazz: Symbol)(candidate: Symbol) = pre.widen match {
- case _: TypeVar => false
- case wide => (clazz == candidate) && (wide.typeSymbol isSubClass clazz)
- }
+ private def matchesPrefixAndClass(pre: Type, clazz: Symbol)(candidate: Symbol) =
+ (clazz == candidate) && (pre.widen.typeSymbol isSubClass clazz)
// Whether the annotation tree currently being mapped over has had a This(_) node rewritten.
private[this] var wroteAnnotation = false
diff --git a/src/reflect/scala/reflect/internal/tpe/TypeToStrings.scala b/src/reflect/scala/reflect/internal/tpe/TypeToStrings.scala
index 16929cca0f..ebc4394d25 100644
--- a/src/reflect/scala/reflect/internal/tpe/TypeToStrings.scala
+++ b/src/reflect/scala/reflect/internal/tpe/TypeToStrings.scala
@@ -10,7 +10,9 @@ private[internal] trait TypeToStrings {
*/
final val maxTostringRecursions = 50
- private var tostringRecursions = 0
+ private var _tostringRecursions = 0
+ def tostringRecursions = _tostringRecursions
+ def tostringRecursions_=(value: Int) = _tostringRecursions = value
protected def typeToString(tpe: Type): String =
if (tostringRecursions >= maxTostringRecursions) {
diff --git a/src/reflect/scala/reflect/internal/transform/Erasure.scala b/src/reflect/scala/reflect/internal/transform/Erasure.scala
index 185e2c3d1e..3eb3a4cdf4 100644
--- a/src/reflect/scala/reflect/internal/transform/Erasure.scala
+++ b/src/reflect/scala/reflect/internal/transform/Erasure.scala
@@ -77,9 +77,6 @@ trait Erasure {
if (cls.owner.isClass) cls.owner.tpe_* else pre // why not cls.isNestedClass?
}
- def underlyingOfValueClass(clazz: Symbol): Type =
- clazz.derivedValueClassUnbox.tpe.resultType
-
/** The type of the argument of a value class reference after erasure
* This method needs to be called at a phase no later than erasurephase
*/
@@ -257,11 +254,11 @@ trait Erasure {
/** This is used as the Scala erasure during the erasure phase itself
* It differs from normal erasure in that value classes are erased to ErasedValueTypes which
- * are then later converted to the underlying parameter type in phase posterasure.
+ * are then later unwrapped to the underlying parameter type in phase posterasure.
*/
object specialScalaErasure extends ScalaErasureMap {
override def eraseDerivedValueClassRef(tref: TypeRef): Type =
- ErasedValueType(tref)
+ ErasedValueType(tref.sym, erasedValueClassArg(tref))
}
object javaErasure extends JavaErasureMap
diff --git a/src/reflect/scala/reflect/internal/util/FreshNameCreator.scala b/src/reflect/scala/reflect/internal/util/FreshNameCreator.scala
new file mode 100644
index 0000000000..3e54de8e1e
--- /dev/null
+++ b/src/reflect/scala/reflect/internal/util/FreshNameCreator.scala
@@ -0,0 +1,27 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2013 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+package scala.reflect.internal
+package util
+
+import java.util.concurrent.ConcurrentHashMap
+import java.util.concurrent.atomic.AtomicLong
+import scala.collection.mutable
+import scala.reflect.NameTransformer
+
+class FreshNameCreator {
+ protected val counters = new ConcurrentHashMap[String, AtomicLong]()
+
+ /**
+ * Create a fresh name with the given prefix. It is guaranteed
+ * that the returned name has never been returned by a previous
+ * call to this function (provided the prefix does not end in a digit).
+ */
+ def newName(prefix: String): String = {
+ val safePrefix = NameTransformer.encode(prefix)
+ counters.putIfAbsent(safePrefix, new AtomicLong(0));
+ safePrefix + counters.get(safePrefix).incrementAndGet();
+ }
+}
diff --git a/src/reflect/scala/reflect/macros/ExprUtils.scala b/src/reflect/scala/reflect/macros/ExprUtils.scala
index af11bd6efc..76a8392b9c 100644
--- a/src/reflect/scala/reflect/macros/ExprUtils.scala
+++ b/src/reflect/scala/reflect/macros/ExprUtils.scala
@@ -12,41 +12,54 @@ trait ExprUtils {
self: Context =>
/** Shorthand for `Literal(Constant(null))` in the underlying `universe`. */
+ @deprecated("Use quasiquotes instead", "2.11.0")
def literalNull: Expr[Null]
/** Shorthand for `Literal(Constant(()))` in the underlying `universe`. */
+ @deprecated("Use quasiquotes instead", "2.11.0")
def literalUnit: Expr[Unit]
/** Shorthand for `Literal(Constant(true))` in the underlying `universe`. */
+ @deprecated("Use quasiquotes instead", "2.11.0")
def literalTrue: Expr[Boolean]
/** Shorthand for `Literal(Constant(false))` in the underlying `universe`. */
+ @deprecated("Use quasiquotes instead", "2.11.0")
def literalFalse: Expr[Boolean]
/** Shorthand for `Literal(Constant(x: Boolean))` in the underlying `universe`. */
+ @deprecated("Use quasiquotes instead", "2.11.0")
def literal(x: Boolean): Expr[Boolean]
/** Shorthand for `Literal(Constant(x: Byte))` in the underlying `universe`. */
+ @deprecated("Use quasiquotes instead", "2.11.0")
def literal(x: Byte): Expr[Byte]
/** Shorthand for `Literal(Constant(x: Short))` in the underlying `universe`. */
+ @deprecated("Use quasiquotes instead", "2.11.0")
def literal(x: Short): Expr[Short]
/** Shorthand for `Literal(Constant(x: Int))` in the underlying `universe`. */
+ @deprecated("Use quasiquotes instead", "2.11.0")
def literal(x: Int): Expr[Int]
/** Shorthand for `Literal(Constant(x: Long))` in the underlying `universe`. */
+ @deprecated("Use quasiquotes instead", "2.11.0")
def literal(x: Long): Expr[Long]
/** Shorthand for `Literal(Constant(x: Float))` in the underlying `universe`. */
+ @deprecated("Use quasiquotes instead", "2.11.0")
def literal(x: Float): Expr[Float]
/** Shorthand for `Literal(Constant(x: Double))` in the underlying `universe`. */
+ @deprecated("Use quasiquotes instead", "2.11.0")
def literal(x: Double): Expr[Double]
/** Shorthand for `Literal(Constant(x: String))` in the underlying `universe`. */
+ @deprecated("Use quasiquotes instead", "2.11.0")
def literal(x: String): Expr[String]
/** Shorthand for `Literal(Constant(x: Char))` in the underlying `universe`. */
+ @deprecated("Use quasiquotes instead", "2.11.0")
def literal(x: Char): Expr[Char]
}
diff --git a/src/reflect/scala/reflect/macros/Parsers.scala b/src/reflect/scala/reflect/macros/Parsers.scala
index 3b25309614..4232b05f8c 100644
--- a/src/reflect/scala/reflect/macros/Parsers.scala
+++ b/src/reflect/scala/reflect/macros/Parsers.scala
@@ -8,6 +8,7 @@ package macros
* A slice of [[scala.reflect.macros.Context the Scala macros context]] that
* exposes functions to parse strings with Scala code into trees.
*/
+@deprecated("Use quasiquotes instead", "2.11.0")
trait Parsers {
self: Context =>
@@ -15,9 +16,11 @@ trait Parsers {
* Only works for expressions, i.e. parsing a package declaration will fail.
* @throws [[scala.reflect.macros.ParseException]]
*/
+ @deprecated("Use quasiquotes instead", "2.11.0")
def parse(code: String): Tree
}
/** Indicates an error during [[scala.reflect.macros.Parsers#parse]].
*/
+ @deprecated("Use quasiquotes instead", "2.11.0")
case class ParseException(pos: scala.reflect.api.Position, msg: String) extends Exception(msg)
diff --git a/src/reflect/scala/reflect/macros/TreeBuilder.scala b/src/reflect/scala/reflect/macros/TreeBuilder.scala
index 427b4f70d1..7f57274347 100644
--- a/src/reflect/scala/reflect/macros/TreeBuilder.scala
+++ b/src/reflect/scala/reflect/macros/TreeBuilder.scala
@@ -8,6 +8,7 @@ package macros
* A helper available in [[scala.reflect.macros.Universe]] that defines shorthands for the
* most common tree-creating functions.
*/
+@deprecated("Use quasiquotes instead", "2.11.0")
abstract class TreeBuilder {
val global: Universe
@@ -17,6 +18,7 @@ abstract class TreeBuilder {
* The type must be suitable for this. For example, it
* must not be a TypeRef pointing to an abstract type variable.
*/
+ @deprecated("Use quasiquotes instead", "2.11.0")
def mkAttributedQualifier(tpe: Type): Tree
/** Builds a reference to value whose type is given stable prefix.
@@ -25,27 +27,35 @@ abstract class TreeBuilder {
* termSym as the Ident's symbol. In that case, termSym must
* not be NoSymbol.
*/
+ @deprecated("Use quasiquotes instead", "2.11.0")
def mkAttributedQualifier(tpe: Type, termSym: Symbol): Tree
/** Builds a typed reference to given symbol with given stable prefix. */
+ @deprecated("Use quasiquotes instead", "2.11.0")
def mkAttributedRef(pre: Type, sym: Symbol): RefTree
/** Builds a typed reference to given symbol. */
+ @deprecated("Use quasiquotes instead", "2.11.0")
def mkAttributedRef(sym: Symbol): RefTree
/** Builds an untyped reference to given symbol. Requires the symbol to be static. */
+ @deprecated("Use quasiquotes instead", "2.11.0")
def mkUnattributedRef(sym: Symbol): RefTree
/** Builds an untyped reference to symbol with given name. Requires the symbol to be static. */
+ @deprecated("Use quasiquotes instead", "2.11.0")
def mkUnattributedRef(fullName: Name): RefTree
/** Builds a typed This reference to given symbol. */
+ @deprecated("Use quasiquotes instead", "2.11.0")
def mkAttributedThis(sym: Symbol): This
/** Builds a typed Ident with an underlying symbol. */
+ @deprecated("Use quasiquotes instead", "2.11.0")
def mkAttributedIdent(sym: Symbol): RefTree
/** Builds a typed Select with an underlying symbol. */
+ @deprecated("Use quasiquotes instead", "2.11.0")
def mkAttributedSelect(qual: Tree, sym: Symbol): RefTree
/** A creator for method calls, e.g. fn[T1, T2, ...](v1, v2, ...)
@@ -57,22 +67,31 @@ abstract class TreeBuilder {
* @param args value arguments
* @return the newly created trees.
*/
+ @deprecated("Use quasiquotes instead", "2.11.0")
def mkMethodCall(receiver: Symbol, methodName: Name, targs: List[Type], args: List[Tree]): Tree
+ @deprecated("Use quasiquotes instead", "2.11.0")
def mkMethodCall(method: Symbol, targs: List[Type], args: List[Tree]): Tree
+ @deprecated("Use quasiquotes instead", "2.11.0")
def mkMethodCall(method: Symbol, args: List[Tree]): Tree
+ @deprecated("Use quasiquotes instead", "2.11.0")
def mkMethodCall(target: Tree, args: List[Tree]): Tree
+ @deprecated("Use quasiquotes instead", "2.11.0")
def mkMethodCall(receiver: Symbol, methodName: Name, args: List[Tree]): Tree
+ @deprecated("Use quasiquotes instead", "2.11.0")
def mkMethodCall(receiver: Tree, method: Symbol, targs: List[Type], args: List[Tree]): Tree
+ @deprecated("Use quasiquotes instead", "2.11.0")
def mkMethodCall(target: Tree, targs: List[Type], args: List[Tree]): Tree
+ @deprecated("Use quasiquotes instead", "2.11.0")
def mkNullaryCall(method: Symbol, targs: List[Type]): Tree
/** A tree that refers to the runtime reflexive universe, `scala.reflect.runtime.universe`. */
+ @deprecated("Use quasiquotes instead", "2.11.0")
def mkRuntimeUniverseRef: Tree
}
diff --git a/src/reflect/scala/reflect/macros/Universe.scala b/src/reflect/scala/reflect/macros/Universe.scala
index d1d90f53c9..297bac2999 100644
--- a/src/reflect/scala/reflect/macros/Universe.scala
+++ b/src/reflect/scala/reflect/macros/Universe.scala
@@ -20,6 +20,7 @@ abstract class Universe extends scala.reflect.api.Universe {
/** A factory that encapsulates common tree-building functions.
* @group Macros
*/
+ @deprecated("Use quasiquotes instead", "2.11.0")
val treeBuild: TreeBuilder { val global: Universe.this.type }
/** The API of reflection artifacts that support [[scala.reflect.macros.Attachments]].
diff --git a/src/reflect/scala/reflect/runtime/Gil.scala b/src/reflect/scala/reflect/runtime/Gil.scala
new file mode 100644
index 0000000000..0edb1e5748
--- /dev/null
+++ b/src/reflect/scala/reflect/runtime/Gil.scala
@@ -0,0 +1,25 @@
+package scala.reflect
+package runtime
+
+private[reflect] trait Gil {
+ self: SymbolTable =>
+
+ // fixme... please...
+ // there are the following avenues of optimization we discussed with Roland:
+ // 1) replace PackageScope locks with ConcurrentHashMap, because PackageScope materializers seem to be idempotent
+ // 2) unlock unpickling completers by verifying that they are idempotent or moving non-idempotent parts
+ // 3) remove the necessity in global state for isSubType
+ private lazy val gil = new java.util.concurrent.locks.ReentrantLock
+
+ @inline final def gilSynchronized[T](body: => T): T = {
+ if (isCompilerUniverse) body
+ else {
+ try {
+ gil.lock()
+ body
+ } finally {
+ gil.unlock()
+ }
+ }
+ }
+}
diff --git a/src/reflect/scala/reflect/runtime/JavaMirrors.scala b/src/reflect/scala/reflect/runtime/JavaMirrors.scala
index 1a15454500..8e822ca4f0 100644
--- a/src/reflect/scala/reflect/runtime/JavaMirrors.scala
+++ b/src/reflect/scala/reflect/runtime/JavaMirrors.scala
@@ -18,11 +18,11 @@ import internal.pickling.ByteCodecs
import internal.pickling.UnPickler
import scala.collection.mutable.{ HashMap, ListBuffer }
import internal.Flags._
-import ReflectionUtils.{staticSingletonInstance, innerSingletonInstance, scalacShouldntLoadClass}
+import ReflectionUtils._
import scala.language.existentials
import scala.runtime.{ScalaRunTime, BoxesRunTime}
-private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUniverse { thisUniverse: SymbolTable =>
+private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUniverse with TwoWayCaches { thisUniverse: SymbolTable =>
private lazy val mirrors = new WeakHashMap[ClassLoader, WeakReference[JavaMirror]]()
@@ -44,19 +44,11 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
trait JavaClassCompleter extends FlagAssigningCompleter
- def init() = {
- definitions.AnyValClass // force it.
-
- // establish root association to avoid cyclic dependency errors later
- rootMirror.classToScala(classOf[java.lang.Object]).initialize
-
- // println("initializing definitions")
- definitions.init()
- }
-
- def runtimeMirror(cl: ClassLoader): Mirror = mirrors get cl match {
- case Some(WeakReference(m)) => m
- case _ => createMirror(rootMirror.RootClass, cl)
+ def runtimeMirror(cl: ClassLoader): Mirror = gilSynchronized {
+ mirrors get cl match {
+ case Some(WeakReference(m)) => m
+ case _ => createMirror(rootMirror.RootClass, cl)
+ }
}
/** The API of a mirror for a reflective universe */
@@ -69,6 +61,11 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
import definitions._
+ override lazy val RootPackage = new RootPackage with SynchronizedTermSymbol
+ override lazy val RootClass = new RootClass with SynchronizedModuleClassSymbol
+ override lazy val EmptyPackage = new EmptyPackage with SynchronizedTermSymbol
+ override lazy val EmptyPackageClass = new EmptyPackageClass with SynchronizedModuleClassSymbol
+
/** The lazy type for root.
*/
override lazy val rootLoader = new LazyType with FlagAgnosticCompleter {
@@ -689,7 +686,7 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
completeRest()
}
- def completeRest(): Unit = thisUniverse.synchronized {
+ def completeRest(): Unit = gilSynchronized {
val tparams = clazz.rawInfo.typeParams
val parents = try {
@@ -780,33 +777,19 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
/**
* The Scala owner of the Scala class corresponding to the Java class `jclazz`
*/
- private def sOwner(jclazz: jClass[_]): Symbol =
- if (jclazz.isMemberClass) {
- val jEnclosingClass = jclazz.getEnclosingClass
- val sEnclosingClass = classToScala(jEnclosingClass)
- followStatic(sEnclosingClass, jclazz.javaFlags)
- } else if (jclazz.isLocalClass0) {
- val jEnclosingMethod = jclazz.getEnclosingMethod
- if (jEnclosingMethod != null) {
- methodToScala(jEnclosingMethod)
- } else {
- val jEnclosingConstructor = jclazz.getEnclosingConstructor
- constructorToScala(jEnclosingConstructor)
- }
- } else if (jclazz.isPrimitive || jclazz.isArray) {
- ScalaPackageClass
- } else if (jclazz.getPackage != null) {
- val jPackage = jclazz.getPackage
- packageToScala(jPackage).moduleClass
- } else {
- // @eb: a weird classloader might return a null package for something with a non-empty package name
- // for example, http://groups.google.com/group/scala-internals/browse_thread/thread/7be09ff8f67a1e5c
- // in that case we could invoke packageNameToScala(jPackageName) and, probably, be okay
- // however, I think, it's better to blow up, since weirdness of the class loader might bite us elsewhere
- // [martin] I think it's better to be forgiving here. Restoring packageNameToScala.
- val jPackageName = jclazz.getName take jclazz.getName.lastIndexOf('.')
- packageNameToScala(jPackageName).moduleClass
- }
+ // @eb: a weird classloader might return a null package for something with a non-empty package name
+ // for example, http://groups.google.com/group/scala-internals/browse_thread/thread/7be09ff8f67a1e5c
+ // in that case we could invoke packageNameToScala(jPackageName) and, probably, be okay
+ // however, I think, it's better to blow up, since weirdness of the class loader might bite us elsewhere
+ // [martin] I think it's better to be forgiving here. Restoring packageNameToScala.
+ private def sOwner(jclazz: jClass[_]): Symbol = jclazz match {
+ case PrimitiveOrArray() => ScalaPackageClass
+ case EnclosedInMethod(jowner) => methodToScala(jowner)
+ case EnclosedInConstructor(jowner) => constructorToScala(jowner)
+ case EnclosedInClass(jowner) => followStatic(classToScala(jowner), jclazz.javaFlags)
+ case EnclosedInPackage(jowner) => packageToScala(jowner).moduleClass
+ case _ => packageNameToScala(jclazz.getName take jclazz.getName.lastIndexOf('.')).moduleClass
+ }
/**
* The Scala owner of the Scala symbol corresponding to the Java member `jmember`
@@ -894,7 +877,7 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
* The Scala package with given fully qualified name. Unlike `packageNameToScala`,
* this one bypasses the cache.
*/
- private[JavaMirrors] def makeScalaPackage(fullname: String): ModuleSymbol = {
+ private[JavaMirrors] def makeScalaPackage(fullname: String): ModuleSymbol = gilSynchronized {
val split = fullname lastIndexOf '.'
val ownerModule: ModuleSymbol =
if (split > 0) packageNameToScala(fullname take split) else this.RootPackage
@@ -1275,11 +1258,6 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
case _ => abort(s"${sym}.enclosingRootClass = ${sym.enclosingRootClass}, which is not a RootSymbol")
}
- private lazy val syntheticCoreClasses: Map[(String, Name), Symbol] = {
- def mapEntry(sym: Symbol): ((String, Name), Symbol) = (sym.owner.fullName, sym.name) -> sym
- Map() ++ (definitions.syntheticCoreClasses map mapEntry)
- }
-
/** 1. If `owner` is a package class (but not the empty package) and `name` is a term name, make a new package
* <owner>.<name>, otherwise return NoSymbol.
* Exception: If owner is root and a java class with given name exists, create symbol in empty package instead
@@ -1289,20 +1267,20 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni
override def missingHook(owner: Symbol, name: Name): Symbol = {
if (owner.hasPackageFlag) {
val mirror = mirrorThatLoaded(owner)
- // todo. this makes toolbox tests pass, but it's a mere workaround for SI-5865
-// assert((owner.info decl name) == NoSymbol, s"already exists: $owner . $name")
if (owner.isRootSymbol && mirror.tryJavaClass(name.toString).isDefined)
return mirror.EmptyPackageClass.info decl name
if (name.isTermName && !owner.isEmptyPackageClass)
return mirror.makeScalaPackage(
if (owner.isRootSymbol) name.toString else owner.fullName+"."+name)
- syntheticCoreClasses get ((owner.fullName, name)) foreach { tsym =>
- // synthetic core classes are only present in root mirrors
- // because Definitions.scala, which initializes and enters them, only affects rootMirror
- // therefore we need to enter them manually for non-root mirrors
- if (mirror ne thisUniverse.rootMirror) owner.info.decls enter tsym
- return tsym
- }
+ if (name == tpnme.AnyRef && owner.owner.isRoot && owner.name == tpnme.scala_)
+ // when we synthesize the scala.AnyRef symbol, we need to add it to the scope of the scala package
+ // the problem is that adding to the scope implies doing something like `owner.info.decls enter anyRef`
+ // which entails running a completer for the scala package
+ // which will try to unpickle the stuff in scala/package.class
+ // which will transitively load scala.AnyRef
+ // which doesn't exist yet, because it hasn't been added to the scope yet
+ // this missing hook ties the knot without introducing synchronization problems like before
+ return definitions.AnyRefClass
}
info("*** missing: "+name+"/"+name.isTermName+"/"+owner+"/"+owner.hasPackageFlag+"/"+owner.info.decls.getClass)
super.missingHook(owner, name)
diff --git a/src/reflect/scala/reflect/runtime/JavaUniverse.scala b/src/reflect/scala/reflect/runtime/JavaUniverse.scala
index 06a7db6289..54b75b8e5b 100644
--- a/src/reflect/scala/reflect/runtime/JavaUniverse.scala
+++ b/src/reflect/scala/reflect/runtime/JavaUniverse.scala
@@ -8,7 +8,7 @@ package runtime
*
* @contentDiagram hideNodes "*Api" "*Extractor"
*/
-class JavaUniverse extends internal.SymbolTable with ReflectSetup with runtime.SymbolTable { self =>
+class JavaUniverse extends internal.SymbolTable with JavaUniverseForce with ReflectSetup with runtime.SymbolTable { self =>
override def inform(msg: String): Unit = log(msg)
def picklerPhase = internal.SomePhase
@@ -21,10 +21,82 @@ class JavaUniverse extends internal.SymbolTable with ReflectSetup with runtime.S
def newStrictTreeCopier: TreeCopier = new StrictTreeCopier
def newLazyTreeCopier: TreeCopier = new LazyTreeCopier
+ val currentFreshNameCreator = new reflect.internal.util.FreshNameCreator
+
// can't put this in runtime.Trees since that's mixed with Global in ReflectGlobal, which has the definition from internal.Trees
object treeInfo extends {
val global: JavaUniverse.this.type = JavaUniverse.this
} with internal.TreeInfo
init()
+
+ // ======= Initialization of runtime reflection =======
+ //
+ // This doc describes the carefully laid out sequence of actions used to initialize reflective universes.
+ //
+ // Before reading the text below, read up the section Mirrors in the reflection pre-SIP
+ // https://docs.google.com/document/d/1nAwSw4TmMplsIlzh2shYLUJ5mVh3wndDa1Zm1H6an9A/edit.
+ // Take an especially good look at Figure 2, because it illustrates fundamental principles underlying runtime reflection:
+ // 1) For each universe we have one mirror per classloader
+ // 2) Package symbols are per-mirror
+ // 3) Other symbols are per-universe, which means that a symbol (e.g. Seq on the picture) might be shared between multiple owners
+ //
+ // Main challenges that runtime reflection presents wrt initialization are:
+ // 1) Extravagant completion scheme that enters package members on-demand rather than a result of scanning a directory with class files.
+ // (That's a direct consequence of the fact that in general case we can't enumerate all classes in a classloader.
+ // As Paul rightfully mentioned, we could specialcase classloaders that point to filesystems, but that is left for future work).
+ // 2) Presence of synthetic symbols that aren't loaded by normal means (from classfiles) but are synthesized on-the-fly,
+ // and the necessity to propagate these synthetic symbols from rootMirror to other mirrors,
+ // complicated by the fact that such symbols depend on normal symbols (e.g. AnyRef depends on Object).
+ // 3) Necessity to remain thread-safe, which limits our options related to lazy initialization
+ // (E.g. we cannot use missingHook to enter synthetic symbols, because that's thread-unsafe).
+ //
+ // Directly addressing the challenge #3, we create all synthetic symbols fully in advance during init().
+ // However, it's not that simple as just calling definitions.symbolsNotPresentInBytecode.
+ // Before doing that, we need to first initialize ObjectClass, then ScalaPackageClass, and only then deal with synthetics.
+ // Below you can find a detailed explanation for that.
+ //
+ // ### Why ScalaPackageClass? ###
+ //
+ // Forcing ScalaPackageClass first thing during startup is important, because syntheticCoreClasses such as AnyRefClass
+ // need to be entered into ScalaPackageClass, which entails calling ScalaPackageClass.info.decls.enter.
+ // If ScalaPackageClass isn't initialized by that moment, the following will happen for runtime reflection:
+ // 1) Initialization of ScalaPackageClass will trigger unpickling.
+ // 2) Unpickling will need to load some auxiliary types such as, for example, String.
+ // 3) To load String, runtime reflection will call mirrorDefining(classOf[String]).
+ // 4) This, in turn, will call runtimeMirror(classOf[String].getClassLoader).
+ // 5) For some classloader configurations, the resulting mirror will be different from rootMirror.
+ // 6) In that case, initialization of the resulting mirror will try to import definitions.syntheticCoreClasses into the mirror.
+ // 7) This will force all the lazy vals corresponding to syntheticCoreClasses.
+ // 8) By that time, the completer of ScalaPackageClass will have already called setInfo on ScalaPackageClass, so there won't be any stack overflow.
+ //
+ // So far so good, no crashes, no problems, right? Not quite.
+ // If forcing of ScalaPackageClass was called by a syntheticCoreClasses lazy val,
+ // then this lazy val will be entered twice: once during step 7 and once when returning from the original call.
+ // To avoid this we need to initialize ScalaPackageClass prior to other synthetics.
+ //
+ // ### Why ObjectClass? ###
+ //
+ // 1) As explained in JavaMirrors.missingHook, initialization of ScalaPackageClass critically depends on AnyRefClass.
+ // 2) AnyRefClass is defined as "lazy val AnyRefClass = newAlias(ScalaPackageClass, tpnme.AnyRef, ObjectTpe)",
+ // which means that initialization of AnyRefClass depends on ObjectClass.
+ // 3) ObjectClass is defined as "lazy val ObjectClass = getRequiredClass(sn.Object.toString)",
+ // which means that under some classloader configurations (see JavaMirrors.missingHook for more details)
+ // dereferencing ObjectClass might trigger an avalanche of initializations calling back into AnyRefClass
+ // while another AnyRefClass initializer is still on stack.
+ // 4) That will lead to AnyRefClass being entered two times (once when the recursive call returns and once when the original one returns)
+ // 5) That will crash PackageScope.enter that helpfully detects double-enters.
+ //
+ // Therefore, before initializing ScalaPackageClass, we must pre-initialize ObjectClass
+ def init() {
+ definitions.init()
+
+ // workaround for http://groups.google.com/group/scala-internals/browse_thread/thread/97840ba4fd37b52e
+ // constructors are by definition single-threaded, so we initialize all lazy vals (and local object) in advance
+ // in order to avoid deadlocks later (e.g. one thread holds a global reflection lock and waits for definitions.Something to initialize,
+ // whereas another thread holds a definitions.Something initialization lock and needs a global reflection lock to complete the initialization)
+
+ // TODO Convert this into a macro
+ force()
+ }
}
diff --git a/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala b/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala
new file mode 100644
index 0000000000..8fd58c42be
--- /dev/null
+++ b/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala
@@ -0,0 +1,496 @@
+// Generated Code, validated by run/t6240-universe-code-gen.scala
+package scala.reflect
+package runtime
+
+trait JavaUniverseForce { self: runtime.JavaUniverse =>
+ def force() {
+ Literal(Constant(42)).duplicate
+ nme.flattenedName()
+ nme.raw
+ WeakTypeTag
+ TypeTag
+ TypeTag.Byte.tpe
+ TypeTag.Short.tpe
+ TypeTag.Char.tpe
+ TypeTag.Int.tpe
+ TypeTag.Long.tpe
+ TypeTag.Float.tpe
+ TypeTag.Double.tpe
+ TypeTag.Boolean.tpe
+ TypeTag.Unit.tpe
+ TypeTag.Any.tpe
+ TypeTag.AnyVal.tpe
+ TypeTag.AnyRef.tpe
+ TypeTag.Object.tpe
+ TypeTag.Nothing.tpe
+ TypeTag.Null.tpe
+
+ this.settings
+ this.treeInfo
+ // inaccessible: this.scala$reflect$runtime$Gil$$gil
+ // inaccessible: this.uniqueLock
+ // inaccessible: this._skolemizationLevel
+ // inaccessible: this._undoLog
+ // inaccessible: this._intersectionWitness
+ // inaccessible: this._volatileRecursions
+ // inaccessible: this._pendingVolatiles
+ // inaccessible: this._subsametypeRecursions
+ // inaccessible: this._pendingSubTypes
+ // inaccessible: this._basetypeRecursions
+ // inaccessible: this._pendingBaseTypes
+ // inaccessible: this._lubResults
+ // inaccessible: this._glbResults
+ // inaccessible: this._indent
+ // inaccessible: this._tostringRecursions
+ // inaccessible: this.atomicIds
+ // inaccessible: this.atomicExistentialIds
+ // inaccessible: this._recursionTable
+ // inaccessible: this.mirrors
+ this.rootMirror
+ this.treeBuild
+ // inaccessible: this.SimpleNameOrdering
+ this.traceSymbols
+ this.perRunCaches
+ this.FixedMirrorTreeCreator
+ this.FixedMirrorTypeCreator
+ this.BackquotedIdentifierAttachment
+ this.CompoundTypeTreeOriginalAttachment
+ this.noPrint
+ this.typeDebug
+ // inaccessible: this.maxFree
+ this.Range
+ // inaccessible: this.posAssigner
+ this.ConsoleWriter
+ this.RefTree
+ this.PackageDef
+ this.ClassDef
+ this.ModuleDef
+ this.ValOrDefDef
+ this.ValDef
+ this.DefDef
+ this.TypeDef
+ this.LabelDef
+ this.ImportSelector
+ this.Import
+ this.Template
+ this.Block
+ this.CaseDef
+ this.Alternative
+ this.Star
+ this.Bind
+ this.UnApply
+ this.ArrayValue
+ this.Function
+ this.Assign
+ this.AssignOrNamedArg
+ this.If
+ this.Match
+ this.Return
+ this.Try
+ this.Throw
+ this.New
+ this.Typed
+ this.TypeApply
+ this.Apply
+ this.ApplyDynamic
+ this.Super
+ this.This
+ this.Select
+ this.Ident
+ this.ReferenceToBoxed
+ this.Literal
+ this.Annotated
+ this.SingletonTypeTree
+ this.SelectFromTypeTree
+ this.CompoundTypeTree
+ this.AppliedTypeTree
+ this.TypeBoundsTree
+ this.ExistentialTypeTree
+ this.TypeTree
+ this.Modifiers
+ this.EmptyTree
+ this.noSelfType
+ this.pendingSuperCall
+ this.emptyValDef
+ this.EmptyTreeTypeSubstituter
+ // inaccessible: this.duplicator
+ this.UnmappableAnnotArg
+ this.LiteralAnnotArg
+ this.ArrayAnnotArg
+ this.NestedAnnotArg
+ this.ScalaSigBytes
+ this.AnnotationInfo
+ this.Annotation
+ this.UnmappableAnnotation
+ this.ErroneousAnnotation
+ this.ThrownException
+ // inaccessible: this.compactify
+ this.tpnme
+ this.fulltpnme
+ this.binarynme
+ this.nme
+ this.sn
+ this.Constant
+ this.definitions
+ this.LookupSucceeded
+ this.LookupAmbiguous
+ this.LookupInaccessible
+ this.LookupNotFound
+ this.Scope
+ this.EmptyScope
+ this.Flag
+ this.KindErrors
+ this.Kind
+ this.ProperTypeKind
+ this.TypeConKind
+ this.inferKind
+ // inaccessible: this.substTypeMapCache
+ this.UnmappableTree
+ this.ErrorType
+ this.WildcardType
+ this.BoundedWildcardType
+ this.NoType
+ this.NoPrefix
+ this.ThisType
+ this.SingleType
+ this.SuperType
+ this.TypeBounds
+ this.CompoundType
+ this.baseClassesCycleMonitor
+ this.RefinedType
+ this.ClassInfoType
+ this.ConstantType
+ this.TypeRef
+ this.MethodType
+ this.NullaryMethodType
+ this.PolyType
+ this.ExistentialType
+ this.OverloadedType
+ this.AntiPolyType
+ this.HasTypeMember
+ this.ArrayTypeRef
+ this.TypeVar
+ this.AnnotatedType
+ this.StaticallyAnnotatedType
+ this.NamedType
+ this.RepeatedType
+ this.ErasedValueType
+ this.GenPolyType
+ this.unwrapToClass
+ this.unwrapToStableClass
+ this.unwrapWrapperTypes
+ this.RecoverableCyclicReference
+ // inaccessible: this._undoLog
+ // inaccessible: this.numericLoBound
+ // inaccessible: this.numericHiBound
+ this.TypeConstraint
+ this.normalizeAliases
+ this.dropSingletonType
+ this.abstractTypesToBounds
+ this.dropIllegalStarTypes
+ this.IsDependentCollector
+ this.ApproximateDependentMap
+ this.wildcardToTypeVarMap
+ this.typeVarToOriginMap
+ this.ErroneousCollector
+ this.adaptToNewRunMap
+ // inaccessible: this.commonOwnerMapObj
+ this.SymbolKind
+ this.NoSymbol
+ this.CyclicReference
+ // inaccessible: this.TypeHistory
+ this.TermName
+ this.TypeName
+ this.BooleanFlag
+ this.WeakTypeTag
+ this.TypeTag
+ this.Expr
+ this.NoMods
+ definitions.JavaLangPackage
+ definitions.JavaLangPackageClass
+ definitions.ScalaPackage
+ definitions.ScalaPackageClass
+ definitions.RuntimePackage
+ definitions.RuntimePackageClass
+ definitions.AnyClass
+ definitions.AnyRefClass
+ definitions.ObjectClass
+ definitions.AnyRefTpe
+ definitions.AnyTpe
+ definitions.AnyValTpe
+ definitions.BoxedUnitTpe
+ definitions.NothingTpe
+ definitions.NullTpe
+ definitions.ObjectTpe
+ definitions.SerializableTpe
+ definitions.StringTpe
+ definitions.ThrowableTpe
+ definitions.ConstantTrue
+ definitions.ConstantFalse
+ definitions.ConstantNull
+ definitions.AnyValClass
+ definitions.RuntimeNothingClass
+ definitions.RuntimeNullClass
+ definitions.NothingClass
+ definitions.NullClass
+ definitions.ClassCastExceptionClass
+ definitions.IndexOutOfBoundsExceptionClass
+ definitions.InvocationTargetExceptionClass
+ definitions.MatchErrorClass
+ definitions.NonLocalReturnControlClass
+ definitions.NullPointerExceptionClass
+ definitions.ThrowableClass
+ definitions.UninitializedErrorClass
+ definitions.UninitializedFieldConstructor
+ definitions.PartialFunctionClass
+ definitions.AbstractPartialFunctionClass
+ definitions.SymbolClass
+ definitions.StringClass
+ definitions.StringModule
+ definitions.ClassClass
+ definitions.DynamicClass
+ definitions.SysPackage
+ definitions.UnqualifiedModules
+ definitions.UnqualifiedOwners
+ definitions.PredefModule
+ definitions.SpecializableModule
+ definitions.GroupOfSpecializable
+ definitions.ScalaRunTimeModule
+ definitions.SymbolModule
+ definitions.Symbol_apply
+ definitions.StringAddClass
+ definitions.ArrowAssocClass
+ definitions.StringAdd_$plus
+ definitions.ScalaNumberClass
+ definitions.TraitSetterAnnotationClass
+ definitions.DelayedInitClass
+ definitions.TypeConstraintClass
+ definitions.SingletonClass
+ definitions.SerializableClass
+ definitions.JavaSerializableClass
+ definitions.ComparableClass
+ definitions.JavaCloneableClass
+ definitions.JavaNumberClass
+ definitions.RemoteInterfaceClass
+ definitions.RemoteExceptionClass
+ definitions.ByNameParamClass
+ definitions.JavaRepeatedParamClass
+ definitions.RepeatedParamClass
+ definitions.ExprClassOf
+ definitions.ConsClass
+ definitions.IteratorClass
+ definitions.IterableClass
+ definitions.ListClass
+ definitions.SeqClass
+ definitions.StringBuilderClass
+ definitions.TraversableClass
+ definitions.ListModule
+ definitions.List_apply
+ definitions.NilModule
+ definitions.SeqModule
+ definitions.ArrayModule
+ definitions.ArrayModule_overloadedApply
+ definitions.ArrayClass
+ definitions.Array_apply
+ definitions.Array_update
+ definitions.Array_length
+ definitions.Array_clone
+ definitions.SoftReferenceClass
+ definitions.MethodClass
+ definitions.EmptyMethodCacheClass
+ definitions.MethodCacheClass
+ definitions.ScalaXmlTopScope
+ definitions.ScalaXmlPackage
+ definitions.ReflectPackage
+ definitions.ReflectApiPackage
+ definitions.ReflectRuntimePackage
+ definitions.PartialManifestClass
+ definitions.PartialManifestModule
+ definitions.FullManifestClass
+ definitions.FullManifestModule
+ definitions.OptManifestClass
+ definitions.NoManifest
+ definitions.TreesClass
+ definitions.TreesTreeType
+ definitions.TreeType
+ definitions.SubtreeType
+ definitions.ExprsClass
+ definitions.ExprClass
+ definitions.ClassTagModule
+ definitions.ClassTagClass
+ definitions.TypeTagsClass
+ definitions.WeakTypeTagClass
+ definitions.WeakTypeTagModule
+ definitions.TypeTagClass
+ definitions.TypeTagModule
+ definitions.ApiUniverseClass
+ definitions.JavaUniverseClass
+ definitions.MirrorClass
+ definitions.TypeCreatorClass
+ definitions.TreeCreatorClass
+ definitions.LiftableClass
+ definitions.MacroClass
+ definitions.MacroContextClass
+ definitions.MacroImplAnnotation
+ definitions.StringContextClass
+ definitions.QuasiquoteClass
+ definitions.QuasiquoteClass_api
+ definitions.QuasiquoteClass_api_apply
+ definitions.QuasiquoteClass_api_unapply
+ definitions.ScalaSignatureAnnotation
+ definitions.ScalaLongSignatureAnnotation
+ definitions.OptionClass
+ definitions.OptionModule
+ definitions.Option_apply
+ definitions.SomeClass
+ definitions.NoneModule
+ definitions.SomeModule
+ definitions.VarArityClass
+ definitions.ProductClass
+ definitions.TupleClass
+ definitions.FunctionClass
+ definitions.AbstractFunctionClass
+ definitions.ProductRootClass
+ definitions.Any_$eq$eq
+ definitions.Any_$bang$eq
+ definitions.Any_equals
+ definitions.Any_hashCode
+ definitions.Any_toString
+ definitions.Any_$hash$hash
+ definitions.Any_getClass
+ definitions.Any_isInstanceOf
+ definitions.Any_asInstanceOf
+ definitions.primitiveGetClassMethods
+ definitions.getClassMethods
+ definitions.Object_$hash$hash
+ definitions.Object_$eq$eq
+ definitions.Object_$bang$eq
+ definitions.Object_eq
+ definitions.Object_ne
+ definitions.Object_isInstanceOf
+ definitions.Object_asInstanceOf
+ definitions.Object_synchronized
+ definitions.String_$plus
+ definitions.ObjectRefClass
+ definitions.VolatileObjectRefClass
+ definitions.RuntimeStaticsModule
+ definitions.BoxesRunTimeModule
+ definitions.BoxesRunTimeClass
+ definitions.BoxedNumberClass
+ definitions.BoxedCharacterClass
+ definitions.BoxedBooleanClass
+ definitions.BoxedByteClass
+ definitions.BoxedShortClass
+ definitions.BoxedIntClass
+ definitions.BoxedLongClass
+ definitions.BoxedFloatClass
+ definitions.BoxedDoubleClass
+ definitions.Boxes_isNumberOrBool
+ definitions.Boxes_isNumber
+ definitions.BoxedUnitClass
+ definitions.BoxedUnitModule
+ definitions.AnnotationClass
+ definitions.ClassfileAnnotationClass
+ definitions.StaticAnnotationClass
+ definitions.BridgeClass
+ definitions.ElidableMethodClass
+ definitions.ImplicitNotFoundClass
+ definitions.MigrationAnnotationClass
+ definitions.ScalaStrictFPAttr
+ definitions.SwitchClass
+ definitions.TailrecClass
+ definitions.VarargsClass
+ definitions.uncheckedStableClass
+ definitions.uncheckedVarianceClass
+ definitions.BeanPropertyAttr
+ definitions.BooleanBeanPropertyAttr
+ definitions.CompileTimeOnlyAttr
+ definitions.DeprecatedAttr
+ definitions.DeprecatedNameAttr
+ definitions.DeprecatedInheritanceAttr
+ definitions.DeprecatedOverridingAttr
+ definitions.NativeAttr
+ definitions.RemoteAttr
+ definitions.ScalaInlineClass
+ definitions.ScalaNoInlineClass
+ definitions.SerialVersionUIDAttr
+ definitions.SerialVersionUIDAnnotation
+ definitions.SpecializedClass
+ definitions.ThrowsClass
+ definitions.TransientAttr
+ definitions.UncheckedClass
+ definitions.UncheckedBoundsClass
+ definitions.UnspecializedClass
+ definitions.VolatileAttr
+ definitions.BeanGetterTargetClass
+ definitions.BeanSetterTargetClass
+ definitions.FieldTargetClass
+ definitions.GetterTargetClass
+ definitions.ParamTargetClass
+ definitions.SetterTargetClass
+ definitions.ObjectTargetClass
+ definitions.ClassTargetClass
+ definitions.MethodTargetClass
+ definitions.LanguageFeatureAnnot
+ definitions.languageFeatureModule
+ definitions.experimentalModule
+ definitions.MacrosFeature
+ definitions.DynamicsFeature
+ definitions.PostfixOpsFeature
+ definitions.ReflectiveCallsFeature
+ definitions.ImplicitConversionsFeature
+ definitions.HigherKindsFeature
+ definitions.ExistentialsFeature
+ definitions.metaAnnotations
+ definitions.AnnotationDefaultAttr
+ definitions.isUnbox
+ definitions.isBox
+ definitions.isPhantomClass
+ definitions.syntheticCoreClasses
+ definitions.syntheticCoreMethods
+ definitions.hijackedCoreClasses
+ definitions.symbolsNotPresentInBytecode
+ definitions.isPossibleSyntheticParent
+ // inaccessible: definitions.boxedValueClassesSet
+ definitions.abbrvTag
+ definitions.numericWeight
+ definitions.boxedModule
+ definitions.boxedClass
+ definitions.refClass
+ definitions.volatileRefClass
+ definitions.boxMethod
+ definitions.unboxMethod
+ definitions.UnitClass
+ definitions.ByteClass
+ definitions.ShortClass
+ definitions.CharClass
+ definitions.IntClass
+ definitions.LongClass
+ definitions.FloatClass
+ definitions.DoubleClass
+ definitions.BooleanClass
+ definitions.Boolean_and
+ definitions.Boolean_or
+ definitions.Boolean_not
+ definitions.UnitTpe
+ definitions.ByteTpe
+ definitions.ShortTpe
+ definitions.CharTpe
+ definitions.IntTpe
+ definitions.LongTpe
+ definitions.FloatTpe
+ definitions.DoubleTpe
+ definitions.BooleanTpe
+ definitions.ScalaNumericValueClasses
+ definitions.ScalaValueClassesNoUnit
+ definitions.ScalaValueClasses
+
+
+ erasure.GenericArray
+ erasure.scalaErasure
+ erasure.specialScalaErasure
+ erasure.javaErasure
+ erasure.verifiedJavaErasure
+ erasure.boxingErasure
+ }
+} \ No newline at end of file
diff --git a/src/reflect/scala/reflect/runtime/ReflectionUtils.scala b/src/reflect/scala/reflect/runtime/ReflectionUtils.scala
index 710ec02acd..813c0e1386 100644
--- a/src/reflect/scala/reflect/runtime/ReflectionUtils.scala
+++ b/src/reflect/scala/reflect/runtime/ReflectionUtils.scala
@@ -84,4 +84,18 @@ private[scala] object ReflectionUtils {
def scalacShouldntLoadClassfile(fileName: String) = isTraitImplementation(fileName)
def scalacShouldntLoadClass(name: scala.reflect.internal.SymbolTable#Name) = scalacShouldntLoadClassfile(name + ".class")
+
+ object PrimitiveOrArray {
+ def unapply(jclazz: jClass[_]) = jclazz.isPrimitive || jclazz.isArray
+ }
+
+ class EnclosedIn[T](enclosure: jClass[_] => T) {
+ def unapply(jclazz: jClass[_]): Option[T] = if (enclosure(jclazz) != null) Some(enclosure(jclazz)) else None
+ }
+
+ object EnclosedInMethod extends EnclosedIn(_.getEnclosingMethod)
+ object EnclosedInConstructor extends EnclosedIn(_.getEnclosingConstructor)
+ object EnclosedInClass extends EnclosedIn(_.getEnclosingClass)
+ object EnclosedInPackage extends EnclosedIn(_.getPackage)
}
+
diff --git a/src/reflect/scala/reflect/runtime/Settings.scala b/src/reflect/scala/reflect/runtime/Settings.scala
index d65e9329ed..11db83d7d5 100644
--- a/src/reflect/scala/reflect/runtime/Settings.scala
+++ b/src/reflect/scala/reflect/runtime/Settings.scala
@@ -34,16 +34,17 @@ private[reflect] class Settings extends MutableSettings {
val XfullLubs = new BooleanSetting(false)
val XnoPatmatAnalysis = new BooleanSetting(false)
val Xprintpos = new BooleanSetting(false)
- val Yshowsymkinds = new BooleanSetting(false)
val Yposdebug = new BooleanSetting(false)
val Yrangepos = new BooleanSetting(false)
+ val Yshowsymkinds = new BooleanSetting(false)
+ val breakCycles = new BooleanSetting(false)
val debug = new BooleanSetting(false)
+ val developer = new BooleanSetting(false)
val explaintypes = new BooleanSetting(false)
val overrideObjects = new BooleanSetting(false)
val printtypes = new BooleanSetting(false)
val uniqid = new BooleanSetting(false)
val verbose = new BooleanSetting(false)
- val breakCycles = new BooleanSetting(false)
val Yrecursion = new IntSetting(0)
val maxClassfileName = new IntSetting(255)
diff --git a/src/reflect/scala/reflect/runtime/SymbolLoaders.scala b/src/reflect/scala/reflect/runtime/SymbolLoaders.scala
index 3e01a6df02..30a3855d70 100644
--- a/src/reflect/scala/reflect/runtime/SymbolLoaders.scala
+++ b/src/reflect/scala/reflect/runtime/SymbolLoaders.scala
@@ -17,37 +17,13 @@ private[reflect] trait SymbolLoaders { self: SymbolTable =>
* is found, a package is created instead.
*/
class TopClassCompleter(clazz: Symbol, module: Symbol) extends SymLoader with FlagAssigningCompleter {
-// def makePackage() {
-// println("wrong guess; making package "+clazz)
-// val ptpe = newPackageType(module.moduleClass)
-// for (sym <- List(clazz, module, module.moduleClass)) {
-// sym setFlag Flags.PACKAGE
-// sym setInfo ptpe
-// }
-// }
-
override def complete(sym: Symbol) = {
debugInfo("completing "+sym+"/"+clazz.fullName)
assert(sym == clazz || sym == module || sym == module.moduleClass)
-// try {
- enteringPhaseNotLaterThan(picklerPhase) {
+ slowButSafeEnteringPhaseNotLaterThan(picklerPhase) {
val loadingMirror = mirrorThatLoaded(sym)
val javaClass = loadingMirror.javaClass(clazz.javaClassName)
loadingMirror.unpickleClass(clazz, module, javaClass)
-// } catch {
-// case ex: ClassNotFoundException => makePackage()
-// case ex: NoClassDefFoundError => makePackage()
- // Note: We catch NoClassDefFoundError because there are situations
- // where a package and a class have the same name except for capitalization.
- // It seems in this case the class is loaded even if capitalization differs
- // but then a NoClassDefFound error is issued with a ("wrong name: ...")
- // reason. (I guess this is a concession to Windows).
- // The present behavior is a bit too forgiving, in that it masks
- // all class load errors, not just wrong name errors. We should try
- // to be more discriminating. To get on the right track simply delete
- // the clause above and load a collection class such as collection.Iterable.
- // You'll see an error that class `parallel` has the wrong name.
-// }
}
}
override def load(sym: Symbol) = complete(sym)
@@ -91,12 +67,52 @@ private[reflect] trait SymbolLoaders { self: SymbolTable =>
}
}
+
+ // Since runtime reflection doesn't have a luxury of enumerating all classes
+ // on the classpath, it has to materialize symbols for top-level definitions
+ // (packages, classes, objects) on demand.
+ //
+ // Someone asks us for a class named `foo.Bar`? Easy. Let's speculatively create
+ // a package named `foo` and then look up `newTypeName("bar")` in its decls.
+ // This lookup, implemented in `SymbolLoaders.PackageScope` tests the waters by
+ // trying to to `Class.forName("foo.Bar")` and then creates a ClassSymbol upon
+ // success (the whole story is a bit longer, but the rest is irrelevant here).
+ //
+ // That's all neat, but these non-deterministic mutations of the global symbol
+ // table give a lot of trouble in multi-threaded setting. One of the popular
+ // reflection crashes happens when multiple threads happen to trigger symbol
+ // materialization multiple times for the same symbol, making subsequent
+ // reflective operations stumble upon outrageous stuff like overloaded packages.
+ //
+ // Short of significantly changing SymbolLoaders I see no other way than just
+ // to slap a global lock on materialization in runtime reflection.
class PackageScope(pkgClass: Symbol) extends Scope(initFingerPrints = -1L) // disable fingerprinting as we do not know entries beforehand
with SynchronizedScope {
assert(pkgClass.isType)
- // disable fingerprinting as we do not know entries beforehand
- private val negatives = mutable.Set[Name]() // Syncnote: Performance only, so need not be protected.
- override def lookupEntry(name: Name): ScopeEntry = {
+
+ // materializing multiple copies of the same symbol in PackageScope is a very popular bug
+ // this override does its best to guard against it
+ override def enter[T <: Symbol](sym: T): T = {
+ // workaround for SI-7728
+ if (isCompilerUniverse) super.enter(sym)
+ else {
+ val existing = super.lookupEntry(sym.name)
+ assert(existing == null || existing.sym.isMethod, s"pkgClass = $pkgClass, sym = $sym, existing = $existing")
+ super.enter(sym)
+ }
+ }
+
+ override def enterIfNew[T <: Symbol](sym: T): T = {
+ val existing = super.lookupEntry(sym.name)
+ if (existing == null) enter(sym)
+ else existing.sym.asInstanceOf[T]
+ }
+
+ // package scopes need to synchronize on the GIL
+ // because lookupEntry might cause changes to the global symbol table
+ override def syncLockSynchronized[T](body: => T): T = gilSynchronized(body)
+ private val negatives = new mutable.HashSet[Name]
+ override def lookupEntry(name: Name): ScopeEntry = syncLockSynchronized {
val e = super.lookupEntry(name)
if (e != null)
e
@@ -119,8 +135,21 @@ private[reflect] trait SymbolLoaders { self: SymbolTable =>
val module = origOwner.info decl name.toTermName
assert(clazz != NoSymbol)
assert(module != NoSymbol)
- pkgClass.info.decls enter clazz
- pkgClass.info.decls enter module
+ // currentMirror.mirrorDefining(cls) might side effect by entering symbols into pkgClass.info.decls
+ // therefore, even though in the beginning of this method, super.lookupEntry(name) returned null
+ // entering clazz/module now will result in a double-enter assertion in PackageScope.enter
+ // here's how it might happen
+ // 1) we are the rootMirror
+ // 2) cls.getClassLoader is different from our classloader
+ // 3) mirrorDefining(cls) looks up a mirror corresponding to that classloader and cannot find it
+ // 4) mirrorDefining creates a new mirror
+ // 5) that triggers Mirror.init() of the new mirror
+ // 6) that triggers definitions.syntheticCoreClasses
+ // 7) that might materialize symbols and enter them into our scope (because syntheticCoreClasses live in rootMirror)
+ // 8) now we come back here and try to enter one of the now entered symbols => BAM!
+ // therefore we use enterIfNew rather than just enter
+ enterIfNew(clazz)
+ enterIfNew(module)
(clazz, module)
}
debugInfo(s"created $module/${module.moduleClass} in $pkgClass")
diff --git a/src/reflect/scala/reflect/runtime/SymbolTable.scala b/src/reflect/scala/reflect/runtime/SymbolTable.scala
index bcd4d16cde..ddbf3bd629 100644
--- a/src/reflect/scala/reflect/runtime/SymbolTable.scala
+++ b/src/reflect/scala/reflect/runtime/SymbolTable.scala
@@ -9,7 +9,7 @@ import scala.reflect.internal.Flags._
* It can be used either from a reflexive universe (class scala.reflect.runtime.JavaUniverse), or else from
* a runtime compiler that uses reflection to get a class information (class scala.tools.reflect.ReflectGlobal)
*/
-private[scala] trait SymbolTable extends internal.SymbolTable with JavaMirrors with SymbolLoaders with SynchronizedOps {
+private[scala] trait SymbolTable extends internal.SymbolTable with JavaMirrors with SymbolLoaders with SynchronizedOps with Gil with ThreadLocalStorage {
def info(msg: => String) =
if (settings.verbose) println("[reflect-compiler] "+msg)
diff --git a/src/reflect/scala/reflect/runtime/SynchronizedOps.scala b/src/reflect/scala/reflect/runtime/SynchronizedOps.scala
index 6aa47a0405..c90901410a 100644
--- a/src/reflect/scala/reflect/runtime/SynchronizedOps.scala
+++ b/src/reflect/scala/reflect/runtime/SynchronizedOps.scala
@@ -14,20 +14,25 @@ private[reflect] trait SynchronizedOps extends internal.SymbolTable
// BaseTypeSeqs
override protected def newBaseTypeSeq(parents: List[Type], elems: Array[Type]) =
- new BaseTypeSeq(parents, elems) with SynchronizedBaseTypeSeq
+ // only need to synchronize BaseTypeSeqs if they contain refined types
+ if (elems.filter(_.isInstanceOf[RefinedType]).nonEmpty) new BaseTypeSeq(parents, elems) with SynchronizedBaseTypeSeq
+ else new BaseTypeSeq(parents, elems)
trait SynchronizedBaseTypeSeq extends BaseTypeSeq {
- override def apply(i: Int): Type = synchronized { super.apply(i) }
- override def rawElem(i: Int) = synchronized { super.rawElem(i) }
- override def typeSymbol(i: Int): Symbol = synchronized { super.typeSymbol(i) }
- override def toList: List[Type] = synchronized { super.toList }
- override def copy(head: Type, offset: Int): BaseTypeSeq = synchronized { super.copy(head, offset) }
- override def map(f: Type => Type): BaseTypeSeq = synchronized { super.map(f) }
- override def exists(p: Type => Boolean): Boolean = synchronized { super.exists(p) }
- override lazy val maxDepth = synchronized { maxDepthOfElems }
- override def toString = synchronized { super.toString }
-
- override def lateMap(f: Type => Type): BaseTypeSeq = new MappedBaseTypeSeq(this, f) with SynchronizedBaseTypeSeq
+ override def apply(i: Int): Type = gilSynchronized { super.apply(i) }
+ override def rawElem(i: Int) = gilSynchronized { super.rawElem(i) }
+ override def typeSymbol(i: Int): Symbol = gilSynchronized { super.typeSymbol(i) }
+ override def toList: List[Type] = gilSynchronized { super.toList }
+ override def copy(head: Type, offset: Int): BaseTypeSeq = gilSynchronized { super.copy(head, offset) }
+ override def map(f: Type => Type): BaseTypeSeq = gilSynchronized { super.map(f) }
+ override def exists(p: Type => Boolean): Boolean = gilSynchronized { super.exists(p) }
+ override lazy val maxDepth = gilSynchronized { maxDepthOfElems }
+ override def toString = gilSynchronized { super.toString }
+
+ override def lateMap(f: Type => Type): BaseTypeSeq =
+ // only need to synchronize BaseTypeSeqs if they contain refined types
+ if (map(f).toList.filter(_.isInstanceOf[RefinedType]).nonEmpty) new MappedBaseTypeSeq(this, f) with SynchronizedBaseTypeSeq
+ else new MappedBaseTypeSeq(this, f)
}
// Scopes
@@ -36,15 +41,19 @@ private[reflect] trait SynchronizedOps extends internal.SymbolTable
override def newNestedScope(outer: Scope): Scope = new Scope(outer) with SynchronizedScope
trait SynchronizedScope extends Scope {
- override def isEmpty: Boolean = synchronized { super.isEmpty }
- override def size: Int = synchronized { super.size }
- override def enter[T <: Symbol](sym: T): T = synchronized { super.enter(sym) }
- override def rehash(sym: Symbol, newname: Name) = synchronized { super.rehash(sym, newname) }
- override def unlink(e: ScopeEntry) = synchronized { super.unlink(e) }
- override def unlink(sym: Symbol) = synchronized { super.unlink(sym) }
- override def lookupAll(name: Name) = synchronized { super.lookupAll(name) }
- override def lookupEntry(name: Name) = synchronized { super.lookupEntry(name) }
- override def lookupNextEntry(entry: ScopeEntry) = synchronized { super.lookupNextEntry(entry) }
- override def toList: List[Symbol] = synchronized { super.toList }
+ // we can keep this lock fine-grained, because methods of Scope don't do anything extraordinary, which makes deadlocks impossible
+ // fancy subclasses of internal.Scopes#Scope should do synchronization themselves (e.g. see PackageScope for an example)
+ private lazy val syncLock = new Object
+ def syncLockSynchronized[T](body: => T): T = if (isCompilerUniverse) body else syncLock.synchronized { body }
+ override def isEmpty: Boolean = syncLockSynchronized { super.isEmpty }
+ override def size: Int = syncLockSynchronized { super.size }
+ override def enter[T <: Symbol](sym: T): T = syncLockSynchronized { super.enter(sym) }
+ override def rehash(sym: Symbol, newname: Name) = syncLockSynchronized { super.rehash(sym, newname) }
+ override def unlink(e: ScopeEntry) = syncLockSynchronized { super.unlink(e) }
+ override def unlink(sym: Symbol) = syncLockSynchronized { super.unlink(sym) }
+ override def lookupAll(name: Name) = syncLockSynchronized { super.lookupAll(name) }
+ override def lookupEntry(name: Name) = syncLockSynchronized { super.lookupEntry(name) }
+ override def lookupNextEntry(entry: ScopeEntry) = syncLockSynchronized { super.lookupNextEntry(entry) }
+ override def toList: List[Symbol] = syncLockSynchronized { super.toList }
}
}
diff --git a/src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala b/src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala
index 98cad45db1..298d0ffebd 100644
--- a/src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala
+++ b/src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala
@@ -3,17 +3,23 @@ package reflect
package runtime
import scala.reflect.io.AbstractFile
+import scala.collection.{ immutable, mutable }
private[reflect] trait SynchronizedSymbols extends internal.Symbols { self: SymbolTable =>
- override protected def nextId() = synchronized { super.nextId() }
+ private lazy val atomicIds = new java.util.concurrent.atomic.AtomicInteger(0)
+ override protected def nextId() = atomicIds.incrementAndGet()
- override protected def freshExistentialName(suffix: String) =
- synchronized { super.freshExistentialName(suffix) }
+ private lazy val atomicExistentialIds = new java.util.concurrent.atomic.AtomicInteger(0)
+ override protected def nextExistentialId() = atomicExistentialIds.incrementAndGet()
+
+ private lazy val _recursionTable = mkThreadLocalStorage(immutable.Map.empty[Symbol, Int])
+ override def recursionTable = _recursionTable.get
+ override def recursionTable_=(value: immutable.Map[Symbol, Int]) = _recursionTable.set(value)
// Set the fields which point companions at one another. Returns the module.
override def connectModuleToClass(m: ModuleSymbol, moduleClass: ClassSymbol): ModuleSymbol =
- synchronized { super.connectModuleToClass(m, moduleClass) }
+ gilSynchronized { super.connectModuleToClass(m, moduleClass) }
override def newFreeTermSymbol(name: TermName, value: => Any, flags: Long = 0L, origin: String = null): FreeTermSymbol =
new FreeTermSymbol(name, value, origin) with SynchronizedTermSymbol initFlags flags
@@ -25,35 +31,40 @@ private[reflect] trait SynchronizedSymbols extends internal.Symbols { self: Symb
trait SynchronizedSymbol extends Symbol {
- override def rawflags = synchronized { super.rawflags }
- override def rawflags_=(x: Long) = synchronized { super.rawflags_=(x) }
-
- override def rawowner = synchronized { super.rawowner }
- override def owner_=(owner: Symbol) = synchronized { super.owner_=(owner) }
-
- override def validTo = synchronized { super.validTo }
- override def validTo_=(x: Period) = synchronized { super.validTo_=(x) }
-
- override def pos = synchronized { super.pos }
- override def setPos(pos: Position): this.type = { synchronized { super.setPos(pos) }; this }
-
- override def privateWithin = synchronized { super.privateWithin }
- override def privateWithin_=(sym: Symbol) = synchronized { super.privateWithin_=(sym) }
-
- override def info = synchronized { super.info }
- override def info_=(info: Type) = synchronized { super.info_=(info) }
- override def updateInfo(info: Type): Symbol = synchronized { super.updateInfo(info) }
- override def rawInfo: Type = synchronized { super.rawInfo }
-
- override def typeParams: List[Symbol] = synchronized { super.typeParams }
-
- override def reset(completer: Type): this.type = synchronized { super.reset(completer) }
-
- override def infosString: String = synchronized { super.infosString }
-
- override def annotations: List[AnnotationInfo] = synchronized { super.annotations }
- override def setAnnotations(annots: List[AnnotationInfo]): this.type = { synchronized { super.setAnnotations(annots) }; this }
-
+ def gilSynchronizedIfNotInited[T](body: => T): T = {
+ if (isFullyInitialized) body
+ else gilSynchronized { body }
+ }
+
+ override def validTo = gilSynchronizedIfNotInited { super.validTo }
+ override def info = gilSynchronizedIfNotInited { super.info }
+ override def rawInfo: Type = gilSynchronizedIfNotInited { super.rawInfo }
+
+ override def typeParams: List[Symbol] = gilSynchronizedIfNotInited {
+ if (isCompilerUniverse) super.typeParams
+ else {
+ if (isMonomorphicType) Nil
+ else {
+ // analogously to the "info" getter, here we allow for two completions:
+ // one: sourceCompleter to LazyType, two: LazyType to completed type
+ if (validTo == NoPeriod)
+ rawInfo load this
+ if (validTo == NoPeriod)
+ rawInfo load this
+
+ rawInfo.typeParams
+ }
+ }
+ }
+ override def unsafeTypeParams: List[Symbol] = gilSynchronizedIfNotInited {
+ if (isCompilerUniverse) super.unsafeTypeParams
+ else {
+ if (isMonomorphicType) Nil
+ else rawInfo.typeParams
+ }
+ }
+
+ override def isStable: Boolean = gilSynchronized { super.isStable }
// ------ creators -------------------------------------------------------------------
@@ -90,50 +101,38 @@ private[reflect] trait SynchronizedSymbols extends internal.Symbols { self: Symb
override protected def createModuleSymbol(name: TermName, pos: Position, newFlags: Long): ModuleSymbol =
new ModuleSymbol(this, pos, name) with SynchronizedTermSymbol initFlags newFlags
- override protected def createPackageSymbol(name: TermName, pos: Position, newFlags: Long): ModuleSymbol = createModuleSymbol(name, pos, newFlags)
+ override protected def createPackageSymbol(name: TermName, pos: Position, newFlags: Long): ModuleSymbol =
+ createModuleSymbol(name, pos, newFlags)
+
+ override protected def createValueParameterSymbol(name: TermName, pos: Position, newFlags: Long) =
+ new TermSymbol(this, pos, name) with SynchronizedTermSymbol initFlags newFlags
- // TODO
- // override protected def createValueParameterSymbol(name: TermName, pos: Position, newFlags: Long)
- // override protected def createValueMemberSymbol(name: TermName, pos: Position, newFlags: Long)
+ override protected def createValueMemberSymbol(name: TermName, pos: Position, newFlags: Long) =
+ new TermSymbol(this, pos, name) with SynchronizedTermSymbol initFlags newFlags
}
// ------- subclasses ---------------------------------------------------------------------
- trait SynchronizedTermSymbol extends TermSymbol with SynchronizedSymbol {
- override def name_=(x: Name) = synchronized { super.name_=(x) }
- override def rawname = synchronized { super.rawname }
- override def referenced: Symbol = synchronized { super.referenced }
- override def referenced_=(x: Symbol) = synchronized { super.referenced_=(x) }
- }
+ trait SynchronizedTermSymbol extends SynchronizedSymbol
trait SynchronizedMethodSymbol extends MethodSymbol with SynchronizedTermSymbol {
- override def typeAsMemberOf(pre: Type): Type = synchronized { super.typeAsMemberOf(pre) }
- override def paramss: List[List[Symbol]] = synchronized { super.paramss }
- override def returnType: Type = synchronized { super.returnType }
+ // we can keep this lock fine-grained, because it's just a cache over asSeenFrom, which makes deadlocks impossible
+ // unfortunately we cannot elide this lock, because the cache depends on `pre`
+ private lazy val typeAsMemberOfLock = new Object
+ override def typeAsMemberOf(pre: Type): Type = gilSynchronizedIfNotInited { typeAsMemberOfLock.synchronized { super.typeAsMemberOf(pre) } }
}
+ trait SynchronizedModuleSymbol extends ModuleSymbol with SynchronizedTermSymbol
+
trait SynchronizedTypeSymbol extends TypeSymbol with SynchronizedSymbol {
- override def name_=(x: Name) = synchronized { super.name_=(x) }
- override def rawname = synchronized { super.rawname }
- override def typeConstructor: Type = synchronized { super.typeConstructor }
- override def tpe_* : Type = synchronized { super.tpe_* }
- override def tpeHK : Type = synchronized { super.tpeHK }
+ // unlike with typeConstructor, a lock is necessary here, because tpe calculation relies on
+ // temporarily assigning NoType to tpeCache to detect cyclic reference errors
+ private lazy val tpeLock = new Object
+ override def tpe_* : Type = gilSynchronizedIfNotInited { tpeLock.synchronized { super.tpe_* } }
}
- trait SynchronizedClassSymbol extends ClassSymbol with SynchronizedTypeSymbol {
- override def associatedFile = synchronized { super.associatedFile }
- override def associatedFile_=(f: AbstractFile) = synchronized { super.associatedFile_=(f) }
- override def thisSym: Symbol = synchronized { super.thisSym }
- override def thisType: Type = synchronized { super.thisType }
- override def typeOfThis: Type = synchronized { super.typeOfThis }
- override def typeOfThis_=(tp: Type) = synchronized { super.typeOfThis_=(tp) }
- override def children = synchronized { super.children }
- override def addChild(sym: Symbol) = synchronized { super.addChild(sym) }
- }
+ trait SynchronizedClassSymbol extends ClassSymbol with SynchronizedTypeSymbol
- trait SynchronizedModuleClassSymbol extends ModuleClassSymbol with SynchronizedClassSymbol {
- override def sourceModule = synchronized { super.sourceModule }
- override def implicitMembers: Scope = synchronized { super.implicitMembers }
- }
+ trait SynchronizedModuleClassSymbol extends ModuleClassSymbol with SynchronizedClassSymbol
}
diff --git a/src/reflect/scala/reflect/runtime/SynchronizedTypes.scala b/src/reflect/scala/reflect/runtime/SynchronizedTypes.scala
index c0146167df..de78e527a7 100644
--- a/src/reflect/scala/reflect/runtime/SynchronizedTypes.scala
+++ b/src/reflect/scala/reflect/runtime/SynchronizedTypes.scala
@@ -2,8 +2,9 @@ package scala
package reflect
package runtime
-import scala.collection.mutable.WeakHashMap
-import java.lang.ref.WeakReference
+import scala.collection.mutable
+import java.lang.ref.{WeakReference => jWeakRef}
+import scala.ref.{WeakReference => sWeakRef}
import scala.reflect.internal.Depth
/** This trait overrides methods in reflect.internal, bracketing
@@ -14,9 +15,10 @@ private[reflect] trait SynchronizedTypes extends internal.Types { self: SymbolTa
// No sharing of map objects:
override protected def commonOwnerMap = new CommonOwnerMap
- private object uniqueLock
-
- private val uniques = WeakHashMap[Type, WeakReference[Type]]()
+ // we can keep this lock fine-grained, because super.unique just updates the cache
+ // and, in particular, doesn't call any reflection APIs which makes deadlocks impossible
+ private lazy val uniqueLock = new Object
+ private val uniques = mutable.WeakHashMap[Type, jWeakRef[Type]]()
override def unique[T <: Type](tp: T): T = uniqueLock.synchronized {
// we need to have weak uniques for runtime reflection
// because unlike the normal compiler universe, reflective universe isn't organized in runs
@@ -30,7 +32,7 @@ private[reflect] trait SynchronizedTypes extends internal.Types { self: SymbolTa
val result = if (inCache.isDefined) inCache.get.get else null
if (result ne null) result.asInstanceOf[T]
else {
- uniques(tp) = new WeakReference(tp)
+ uniques(tp) = new jWeakRef(tp)
tp
}
} else {
@@ -38,47 +40,50 @@ private[reflect] trait SynchronizedTypes extends internal.Types { self: SymbolTa
}
}
- class SynchronizedUndoLog extends UndoLog {
- private val actualLock = new java.util.concurrent.locks.ReentrantLock
-
- final override def lock(): Unit = actualLock.lock()
- final override def unlock(): Unit = actualLock.unlock()
- }
+ private lazy val _skolemizationLevel = mkThreadLocalStorage(0)
+ override def skolemizationLevel = _skolemizationLevel.get
+ override def skolemizationLevel_=(value: Int) = _skolemizationLevel.set(value)
- override protected def newUndoLog = new SynchronizedUndoLog
+ private lazy val _undoLog = mkThreadLocalStorage(new UndoLog)
+ override def undoLog = _undoLog.get
- override protected def baseTypeOfNonClassTypeRef(tpe: NonClassTypeRef, clazz: Symbol) =
- synchronized { super.baseTypeOfNonClassTypeRef(tpe, clazz) }
+ private lazy val _intersectionWitness = mkThreadLocalStorage(perRunCaches.newWeakMap[List[Type], sWeakRef[Type]]())
+ override def intersectionWitness = _intersectionWitness.get
- private object subsametypeLock
+ private lazy val _volatileRecursions = mkThreadLocalStorage(0)
+ override def volatileRecursions = _volatileRecursions.get
+ override def volatileRecursions_=(value: Int) = _volatileRecursions.set(value)
- override def isSameType(tp1: Type, tp2: Type): Boolean =
- subsametypeLock.synchronized { super.isSameType(tp1, tp2) }
+ private lazy val _pendingVolatiles = mkThreadLocalStorage(new mutable.HashSet[Symbol])
+ override def pendingVolatiles = _pendingVolatiles.get
- override def isDifferentType(tp1: Type, tp2: Type): Boolean =
- subsametypeLock.synchronized { super.isDifferentType(tp1, tp2) }
+ private lazy val _subsametypeRecursions = mkThreadLocalStorage(0)
+ override def subsametypeRecursions = _subsametypeRecursions.get
+ override def subsametypeRecursions_=(value: Int) = _subsametypeRecursions.set(value)
- override def isSubType(tp1: Type, tp2: Type, depth: Depth): Boolean =
- subsametypeLock.synchronized { super.isSubType(tp1, tp2, depth) }
+ private lazy val _pendingSubTypes = mkThreadLocalStorage(new mutable.HashSet[SubTypePair])
+ override def pendingSubTypes = _pendingSubTypes.get
- private object lubglbLock
+ private lazy val _basetypeRecursions = mkThreadLocalStorage(0)
+ override def basetypeRecursions = _basetypeRecursions.get
+ override def basetypeRecursions_=(value: Int) = _basetypeRecursions.set(value)
- override def glb(ts: List[Type]): Type =
- lubglbLock.synchronized { super.glb(ts) }
+ private lazy val _pendingBaseTypes = mkThreadLocalStorage(new mutable.HashSet[Type])
+ override def pendingBaseTypes = _pendingBaseTypes.get
- override def lub(ts: List[Type]): Type =
- lubglbLock.synchronized { super.lub(ts) }
+ private lazy val _lubResults = mkThreadLocalStorage(new mutable.HashMap[(Depth, List[Type]), Type])
+ override def lubResults = _lubResults.get
- private object indentLock
-
- override protected def explain[T](op: String, p: (Type, T) => Boolean, tp1: Type, arg2: T): Boolean = {
- indentLock.synchronized { super.explain(op, p, tp1, arg2) }
- }
+ private lazy val _glbResults = mkThreadLocalStorage(new mutable.HashMap[(Depth, List[Type]), Type])
+ override def glbResults = _glbResults.get
- private object toStringLock
+ private lazy val _indent = mkThreadLocalStorage("")
+ override def indent = _indent.get
+ override def indent_=(value: String) = _indent.set(value)
- override protected def typeToString(tpe: Type): String =
- toStringLock.synchronized(super.typeToString(tpe))
+ private lazy val _tostringRecursions = mkThreadLocalStorage(0)
+ override def tostringRecursions = _tostringRecursions.get
+ override def tostringRecursions_=(value: Int) = _tostringRecursions.set(value)
/* The idea of caches is as follows.
* When in reflexive mode, a cache is either null, or one sentinal
@@ -91,18 +96,18 @@ private[reflect] trait SynchronizedTypes extends internal.Types { self: SymbolTa
*/
override protected def defineUnderlyingOfSingleType(tpe: SingleType) =
- tpe.synchronized { super.defineUnderlyingOfSingleType(tpe) }
+ gilSynchronized { super.defineUnderlyingOfSingleType(tpe) }
override protected def defineBaseTypeSeqOfCompoundType(tpe: CompoundType) =
- tpe.synchronized { super.defineBaseTypeSeqOfCompoundType(tpe) }
+ gilSynchronized { super.defineBaseTypeSeqOfCompoundType(tpe) }
override protected def defineBaseClassesOfCompoundType(tpe: CompoundType) =
- tpe.synchronized { super.defineBaseClassesOfCompoundType(tpe) }
+ gilSynchronized { super.defineBaseClassesOfCompoundType(tpe) }
override protected def defineParentsOfTypeRef(tpe: TypeRef) =
- tpe.synchronized { super.defineParentsOfTypeRef(tpe) }
+ gilSynchronized { super.defineParentsOfTypeRef(tpe) }
override protected def defineBaseTypeSeqOfTypeRef(tpe: TypeRef) =
- tpe.synchronized { super.defineBaseTypeSeqOfTypeRef(tpe) }
+ gilSynchronized { super.defineBaseTypeSeqOfTypeRef(tpe) }
}
diff --git a/src/reflect/scala/reflect/runtime/ThreadLocalStorage.scala b/src/reflect/scala/reflect/runtime/ThreadLocalStorage.scala
new file mode 100644
index 0000000000..5edc051461
--- /dev/null
+++ b/src/reflect/scala/reflect/runtime/ThreadLocalStorage.scala
@@ -0,0 +1,28 @@
+package scala.reflect
+package runtime
+
+import java.lang.Thread._
+
+private[reflect] trait ThreadLocalStorage {
+ self: SymbolTable =>
+
+ // see a discussion at scala-internals for more information:
+ // http://groups.google.com/group/scala-internals/browse_thread/thread/337ce68aa5e51f79
+ trait ThreadLocalStorage[T] { def get: T; def set(newValue: T): Unit }
+ private class MyThreadLocalStorage[T](initialValue: => T) extends ThreadLocalStorage[T] {
+ // TODO: how do we use org.cliffc.high_scale_lib.NonBlockingHashMap here?
+ val values = new java.util.concurrent.ConcurrentHashMap[Thread, T]()
+ def get: T = {
+ if (values containsKey currentThread) values.get(currentThread)
+ else {
+ val value = initialValue
+ values.putIfAbsent(currentThread, value)
+ value
+ }
+ }
+ def set(newValue: T): Unit = {
+ values.put(currentThread, newValue)
+ }
+ }
+ @inline final def mkThreadLocalStorage[T](x: => T): ThreadLocalStorage[T] = new MyThreadLocalStorage(x)
+}
diff --git a/src/reflect/scala/reflect/runtime/TwoWayCaches.scala b/src/reflect/scala/reflect/runtime/TwoWayCaches.scala
new file mode 100644
index 0000000000..6e2890e536
--- /dev/null
+++ b/src/reflect/scala/reflect/runtime/TwoWayCaches.scala
@@ -0,0 +1,68 @@
+package scala.reflect
+package runtime
+
+import scala.collection.mutable.WeakHashMap
+import java.lang.ref.WeakReference
+
+/** A cache that maintains a bijection between Java reflection type `J`
+ * and Scala reflection type `S`.
+ *
+ * The cache is two-way weak (i.e. is powered by weak references),
+ * so that neither Java artifacts prevent Scala artifacts from being garbage collected,
+ * nor the other way around.
+ */
+private[runtime] trait TwoWayCaches { self: SymbolTable =>
+ class TwoWayCache[J, S] {
+
+ private val toScalaMap = new WeakHashMap[J, WeakReference[S]]
+ private val toJavaMap = new WeakHashMap[S, WeakReference[J]]
+
+ def enter(j: J, s: S) = gilSynchronized {
+ // debugInfo("cached: "+j+"/"+s)
+ toScalaMap(j) = new WeakReference(s)
+ toJavaMap(s) = new WeakReference(j)
+ }
+
+ private object SomeRef {
+ def unapply[T](optRef: Option[WeakReference[T]]): Option[T] =
+ if (optRef.nonEmpty) {
+ val result = optRef.get.get
+ if (result != null) Some(result) else None
+ } else None
+ }
+
+ def toScala(key: J)(body: => S): S = gilSynchronized {
+ toScalaMap get key match {
+ case SomeRef(v) =>
+ v
+ case _ =>
+ val result = body
+ enter(key, result)
+ result
+ }
+ }
+
+ def toJava(key: S)(body: => J): J = gilSynchronized {
+ toJavaMap get key match {
+ case SomeRef(v) =>
+ v
+ case _ =>
+ val result = body
+ enter(result, key)
+ result
+ }
+ }
+
+ def toJavaOption(key: S)(body: => Option[J]): Option[J] = gilSynchronized {
+ toJavaMap get key match {
+ case SomeRef(v) =>
+ Some(v)
+ case _ =>
+ val result = body
+ for (value <- result) enter(value, key)
+ result
+ }
+ }
+ }
+}
+
diff --git a/src/repl/scala/tools/nsc/interpreter/Phased.scala b/src/repl/scala/tools/nsc/interpreter/Phased.scala
index f625124e70..1cdbd65949 100644
--- a/src/repl/scala/tools/nsc/interpreter/Phased.scala
+++ b/src/repl/scala/tools/nsc/interpreter/Phased.scala
@@ -91,7 +91,7 @@ trait Phased {
Parser, Namer, Packageobjects, Typer, Superaccessors, Pickler, Refchecks,
Selectiveanf, Liftcode, Selectivecps, Uncurry, Tailcalls, Specialize,
Explicitouter, Erasure, Lazyvals, Lambdalift, Constructors, Flatten, Mixin,
- Cleanup, Icode, Inliner, Closelim, Dce, Jvm, Terminal
+ Cleanup, Delambdafy, Icode, Inliner, Closelim, Dce, Jvm, Terminal
)
lazy val nameMap = all.map(x => x.name -> x).toMap withDefaultValue NoPhaseName
multi = all
@@ -127,6 +127,7 @@ trait Phased {
case object Flatten extends PhaseName
case object Mixin extends PhaseName
case object Cleanup extends PhaseName
+ case object Delambdafy extends PhaseName
case object Icode extends PhaseName
case object Inliner extends PhaseName
case object Closelim extends PhaseName
diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/index.js b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/index.js
index 96689ae701..c201b324e7 100644
--- a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/index.js
+++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/index.js
@@ -383,51 +383,56 @@ function compilePattern(query) {
// Filters all focused templates and packages. This function should be made less-blocking.
// @param query The string of the query
function textFilter() {
- scheduler.clear("filter");
-
- $('#tpl').html('');
-
var query = $("#textfilter input").attr("value") || '';
var queryRegExp = compilePattern(query);
- var index = 0;
+ if ((typeof textFilter.lastQuery === "undefined") || (textFilter.lastQuery !== query)) {
+
+ textFilter.lastQuery = query;
- var searchLoop = function () {
- var packages = Index.keys(Index.PACKAGES).sort();
+ scheduler.clear("filter");
- while (packages[index]) {
- var pack = packages[index];
- var children = Index.PACKAGES[pack];
- index++;
+ $('#tpl').html('');
- if (focusFilterState) {
- if (pack == focusFilterState ||
- pack.indexOf(focusFilterState + '.') == 0) {
- ;
- } else {
- continue;
+ var index = 0;
+
+ var searchLoop = function () {
+ var packages = Index.keys(Index.PACKAGES).sort();
+
+ while (packages[index]) {
+ var pack = packages[index];
+ var children = Index.PACKAGES[pack];
+ index++;
+
+ if (focusFilterState) {
+ if (pack == focusFilterState ||
+ pack.indexOf(focusFilterState + '.') == 0) {
+ ;
+ } else {
+ continue;
+ }
}
- }
- var matched = $.grep(children, function (child, i) {
- return queryRegExp.test(child.name);
- });
+ var matched = $.grep(children, function (child, i) {
+ return queryRegExp.test(child.name);
+ });
- if (matched.length > 0) {
- $('#tpl').append(Index.createPackageTree(pack, matched,
- focusFilterState));
- scheduler.add('filter', searchLoop);
- return;
+ if (matched.length > 0) {
+ $('#tpl').append(Index.createPackageTree(pack, matched,
+ focusFilterState));
+ scheduler.add('filter', searchLoop);
+ return;
+ }
}
- }
- $('#tpl a.packfocus').click(function () {
- focusFilter($(this).parent().parent());
- });
- configureHideFilter();
- };
+ $('#tpl a.packfocus').click(function () {
+ focusFilter($(this).parent().parent());
+ });
+ configureHideFilter();
+ };
- scheduler.add('filter', searchLoop);
+ scheduler.add('filter', searchLoop);
+ }
}
/* Configures the hide tool by adding the hide link to all packages. */
diff --git a/test/files/presentation/doc.check b/test/disabled/presentation/doc.check
index 5a3ff13151..5a3ff13151 100644
--- a/test/files/presentation/doc.check
+++ b/test/disabled/presentation/doc.check
diff --git a/test/files/presentation/doc/doc.scala b/test/disabled/presentation/doc/doc.scala
index f2233f1828..f2233f1828 100755
--- a/test/files/presentation/doc/doc.scala
+++ b/test/disabled/presentation/doc/doc.scala
diff --git a/test/files/presentation/doc/src/Class.scala b/test/disabled/presentation/doc/src/Class.scala
index a974bd6f5c..a974bd6f5c 100755
--- a/test/files/presentation/doc/src/Class.scala
+++ b/test/disabled/presentation/doc/src/Class.scala
diff --git a/test/files/presentation/doc/src/p/Base.scala b/test/disabled/presentation/doc/src/p/Base.scala
index d91632b6f6..d91632b6f6 100755
--- a/test/files/presentation/doc/src/p/Base.scala
+++ b/test/disabled/presentation/doc/src/p/Base.scala
diff --git a/test/files/presentation/doc/src/p/Derived.scala b/test/disabled/presentation/doc/src/p/Derived.scala
index 1a9c9a26d1..1a9c9a26d1 100755
--- a/test/files/presentation/doc/src/p/Derived.scala
+++ b/test/disabled/presentation/doc/src/p/Derived.scala
diff --git a/test/disabled/presentation/ide-bug-1000450.check b/test/disabled/presentation/ide-bug-1000450.check
deleted file mode 100644
index e69de29bb2..0000000000
--- a/test/disabled/presentation/ide-bug-1000450.check
+++ /dev/null
diff --git a/test/disabled/presentation/ide-bug-1000545.check b/test/disabled/presentation/ide-bug-1000545.check
deleted file mode 100644
index e69de29bb2..0000000000
--- a/test/disabled/presentation/ide-bug-1000545.check
+++ /dev/null
diff --git a/test/disabled/run/reflection-sync-subtypes.scala b/test/disabled/run/reflection-sync-subtypes.scala
new file mode 100644
index 0000000000..7f75a464ac
--- /dev/null
+++ b/test/disabled/run/reflection-sync-subtypes.scala
@@ -0,0 +1,20 @@
+import scala.reflect.runtime.universe._
+
+object Test extends App {
+ val n = 1000
+ val rng = new scala.util.Random()
+ val tasks = List(
+ () => typeOf[List[Int]] <:< typeOf[List[T] forSome { type T }],
+ () => typeOf[List[T] forSome { type T }] <:< typeOf[List[Any]],
+ () => typeOf[Map[Int, Object]] <:< typeOf[Iterable[(Int, String)]],
+ () => typeOf[Expr[Any] { val mirror: rootMirror.type }] <:< typeOf[Expr[List[List[List[Int]]]]{ val mirror: rootMirror.type }])
+ val perms = tasks.permutations.toList
+ val diceRolls = List.fill(n)(rng.nextInt(perms.length))
+ val threads = (1 to n) map (i => new Thread(s"Reflector-$i") {
+ override def run(): Unit = {
+ val result = perms(diceRolls(i - 1)).map(_())
+ assert(result.sorted == List(false, false, true, true))
+ }
+ })
+ threads foreach (_.start)
+} \ No newline at end of file
diff --git a/test/files/disabled/run/t4602.scala b/test/disabled/run/t4602.scala
index 655c350497..655c350497 100644
--- a/test/files/disabled/run/t4602.scala
+++ b/test/disabled/run/t4602.scala
diff --git a/test/files/continuations-run/z1673.check b/test/files/continuations-run/z1673.check
deleted file mode 100644
index e69de29bb2..0000000000
--- a/test/files/continuations-run/z1673.check
+++ /dev/null
diff --git a/test/files/filters b/test/files/filters
index 9a9b439784..70d7dcff70 100644
--- a/test/files/filters
+++ b/test/files/filters
@@ -1,3 +1,6 @@
#
#Java HotSpot(TM) 64-Bit Server VM warning: Failed to reserve shared memory (errno = 28).
Java HotSpot\(TM\) .* warning:
+# Hotspot receiving VM options through the $_JAVA_OPTIONS
+# env variable outputs them on stderr
+Picked up _JAVA_OPTIONS:
diff --git a/test/files/instrumented/inline-in-constructors.flags b/test/files/instrumented/inline-in-constructors.flags
index c9b68d70dc..068318e8ac 100644
--- a/test/files/instrumented/inline-in-constructors.flags
+++ b/test/files/instrumented/inline-in-constructors.flags
@@ -1 +1 @@
--optimise
+-optimise -Ydelambdafy:inline
diff --git a/test/files/jvm/deprecation.check b/test/files/jvm/deprecation.check
index e263e4909d..d116778d3f 100644
--- a/test/files/jvm/deprecation.check
+++ b/test/files/jvm/deprecation.check
@@ -1,3 +1,3 @@
-warning: there were 5 deprecation warning(s); re-run with -deprecation for details
+warning: there were 4 deprecation warning(s); re-run with -deprecation for details
Note: deprecation/Use_2.java uses or overrides a deprecated API.
Note: Recompile with -Xlint:deprecation for details.
diff --git a/test/files/jvm/future-spec/FutureTests.scala b/test/files/jvm/future-spec/FutureTests.scala
index 5d213691df..1595b2c862 100644
--- a/test/files/jvm/future-spec/FutureTests.scala
+++ b/test/files/jvm/future-spec/FutureTests.scala
@@ -10,7 +10,7 @@ import scala.util.{Try,Success,Failure}
-object FutureTests extends MinimalScalaTest {
+class FutureTests extends MinimalScalaTest {
/* some utils */
diff --git a/test/files/jvm/future-spec/PromiseTests.scala b/test/files/jvm/future-spec/PromiseTests.scala
index 6e613bf3ec..49350586b8 100644
--- a/test/files/jvm/future-spec/PromiseTests.scala
+++ b/test/files/jvm/future-spec/PromiseTests.scala
@@ -9,7 +9,7 @@ import scala.runtime.NonLocalReturnControl
import scala.util.{Try,Success,Failure}
-object PromiseTests extends MinimalScalaTest {
+class PromiseTests extends MinimalScalaTest {
import ExecutionContext.Implicits._
val defaultTimeout = Inf
diff --git a/test/files/jvm/future-spec/TryTests.scala b/test/files/jvm/future-spec/TryTests.scala
index 5d1b9b84b4..01bb3c9d36 100644
--- a/test/files/jvm/future-spec/TryTests.scala
+++ b/test/files/jvm/future-spec/TryTests.scala
@@ -5,7 +5,7 @@
import scala.util.{Try,Success,Failure}
-object TryTests extends MinimalScalaTest {
+class TryTests extends MinimalScalaTest {
class MyException extends Exception
val e = new Exception("this is an exception")
diff --git a/test/files/jvm/future-spec/main.scala b/test/files/jvm/future-spec/main.scala
index 132263e2e8..697d0fe91f 100644
--- a/test/files/jvm/future-spec/main.scala
+++ b/test/files/jvm/future-spec/main.scala
@@ -10,9 +10,9 @@ import java.util.concurrent.{ TimeoutException, CountDownLatch, TimeUnit }
object Test {
def main(args: Array[String]) {
- FutureTests.check()
- PromiseTests.check()
- TryTests.check()
+ (new FutureTests).check()
+ (new PromiseTests).check()
+ (new TryTests).check()
}
}
diff --git a/test/files/jvm/t1116.check b/test/files/jvm/t1116.check
deleted file mode 100644
index e69de29bb2..0000000000
--- a/test/files/jvm/t1116.check
+++ /dev/null
diff --git a/test/files/jvm/t1143.check b/test/files/jvm/t1143.check
deleted file mode 100644
index e69de29bb2..0000000000
--- a/test/files/jvm/t1143.check
+++ /dev/null
diff --git a/test/files/jvm/t1948.check b/test/files/jvm/t1948.check
deleted file mode 100644
index e69de29bb2..0000000000
--- a/test/files/jvm/t1948.check
+++ /dev/null
diff --git a/test/files/jvm/t2104.check b/test/files/jvm/t2104.check
deleted file mode 100644
index e69de29bb2..0000000000
--- a/test/files/jvm/t2104.check
+++ /dev/null
diff --git a/test/files/jvm/t2570.check b/test/files/jvm/t2570.check
deleted file mode 100644
index e69de29bb2..0000000000
--- a/test/files/jvm/t2570.check
+++ /dev/null
diff --git a/test/files/jvm/t2585.check b/test/files/jvm/t2585.check
deleted file mode 100644
index e69de29bb2..0000000000
--- a/test/files/jvm/t2585.check
+++ /dev/null
diff --git a/test/files/jvm/t680.check b/test/files/jvm/t680.check
deleted file mode 100644
index e69de29bb2..0000000000
--- a/test/files/jvm/t680.check
+++ /dev/null
diff --git a/test/files/jvm/t7006.check b/test/files/jvm/t7006.check
index 7c99eba30c..6294b14d62 100644
--- a/test/files/jvm/t7006.check
+++ b/test/files/jvm/t7006.check
@@ -19,6 +19,7 @@
[running phase flatten on Foo_1.scala]
[running phase mixin on Foo_1.scala]
[running phase cleanup on Foo_1.scala]
+[running phase delambdafy on Foo_1.scala]
[running phase icode on Foo_1.scala]
[running phase inliner on Foo_1.scala]
[running phase inlinehandlers on Foo_1.scala]
diff --git a/test/files/neg/delambdafy_t6260_method.check b/test/files/neg/delambdafy_t6260_method.check
new file mode 100644
index 0000000000..f5cd6947d1
--- /dev/null
+++ b/test/files/neg/delambdafy_t6260_method.check
@@ -0,0 +1,13 @@
+delambdafy_t6260_method.scala:3: error: bridge generated for member method apply: (bx: Object)Object in class map$extension1
+which overrides method apply: (v1: Object)Object in trait Function1
+clashes with definition of the member itself;
+both have erased type (bx: Object)Object
+ ((bx: Box[X]) => new Box(f(bx.x)))(this)
+ ^
+delambdafy_t6260_method.scala:8: error: bridge generated for member method apply: (bx: Object)Object in class map21
+which overrides method apply: (v1: Object)Object in trait Function1
+clashes with definition of the member itself;
+both have erased type (bx: Object)Object
+ ((bx: Box[X]) => new Box(f(bx.x)))(self)
+ ^
+two errors found
diff --git a/test/files/neg/delambdafy_t6260_method.flags b/test/files/neg/delambdafy_t6260_method.flags
new file mode 100644
index 0000000000..48b438ddf8
--- /dev/null
+++ b/test/files/neg/delambdafy_t6260_method.flags
@@ -0,0 +1 @@
+-Ydelambdafy:method
diff --git a/test/files/neg/delambdafy_t6260_method.scala b/test/files/neg/delambdafy_t6260_method.scala
new file mode 100644
index 0000000000..93b5448227
--- /dev/null
+++ b/test/files/neg/delambdafy_t6260_method.scala
@@ -0,0 +1,17 @@
+class Box[X](val x: X) extends AnyVal {
+ def map[Y](f: X => Y): Box[Y] =
+ ((bx: Box[X]) => new Box(f(bx.x)))(this)
+}
+
+object Test {
+ def map2[X, Y](self: Box[X], f: X => Y): Box[Y] =
+ ((bx: Box[X]) => new Box(f(bx.x)))(self)
+
+ def main(args: Array[String]) {
+ val f = (x: Int) => x + 1
+ val g = (x: String) => x + x
+
+ map2(new Box(42), f)
+ new Box("abc") map g
+ }
+}
diff --git a/test/files/neg/divergent-implicit.check b/test/files/neg/divergent-implicit.check
index 5f20df1b91..60d876409f 100644
--- a/test/files/neg/divergent-implicit.check
+++ b/test/files/neg/divergent-implicit.check
@@ -3,16 +3,14 @@ divergent-implicit.scala:4: error: type mismatch;
required: String
val x1: String = 1
^
-divergent-implicit.scala:5: error: diverging implicit expansion for type Int => String
-starting with method cast in object Test1
- val x2: String = cast[Int, String](1)
- ^
-divergent-implicit.scala:14: error: diverging implicit expansion for type Test2.Baz => Test2.Bar
-starting with method baz2bar in object Test2
+divergent-implicit.scala:14: error: type mismatch;
+ found : Test2.Foo
+ required: Test2.Bar
val x: Bar = new Foo
^
-divergent-implicit.scala:15: error: diverging implicit expansion for type Test2.Foo => Test2.Bar
-starting with method foo2bar in object Test2
+divergent-implicit.scala:15: error: type mismatch;
+ found : Test2.Baz
+ required: Test2.Bar
val y: Bar = new Baz
^
-four errors found
+three errors found
diff --git a/test/files/neg/macro-basic-mamdmi.check b/test/files/neg/macro-basic-mamdmi.check
index 9328fbd51c..61df5131cc 100644
--- a/test/files/neg/macro-basic-mamdmi.check
+++ b/test/files/neg/macro-basic-mamdmi.check
@@ -1,4 +1,4 @@
-Impls_Macros_Test_1.scala:36: error: macro implementation not found: quux
+Impls_Macros_Test_1.scala:33: error: macro implementation not found: quux
(the most common reason for that is that you cannot use macro implementations in the same compilation run that defines them)
println(foo(2) + Macros.bar(2) * new Macros().quux(4))
^
diff --git a/test/files/neg/macro-basic-mamdmi/Impls_Macros_Test_1.scala b/test/files/neg/macro-basic-mamdmi/Impls_Macros_Test_1.scala
index f9e0ca5077..97780ef503 100644
--- a/test/files/neg/macro-basic-mamdmi/Impls_Macros_Test_1.scala
+++ b/test/files/neg/macro-basic-mamdmi/Impls_Macros_Test_1.scala
@@ -3,20 +3,17 @@ import scala.reflect.macros.{Context => Ctx}
object Impls {
def foo(c: Ctx)(x: c.Expr[Int]): c.Expr[Int] = {
import c.universe._
- val body = Apply(Select(x.tree, TermName("$plus")), List(Literal(Constant(1))))
- c.Expr[Int](body)
+ c.Expr[Int](q"$x + 1")
}
def bar(c: Ctx)(x: c.Expr[Int]): c.Expr[Int] = {
import c.universe._
- val body = Apply(Select(x.tree, TermName("$plus")), List(Literal(Constant(2))))
- c.Expr[Int](body)
+ c.Expr[Int](q"$x + 2")
}
def quux(c: Ctx)(x: c.Expr[Int]): c.Expr[Int] = {
import c.universe._
- val body = Apply(Select(x.tree, TermName("$plus")), List(Literal(Constant(3))))
- c.Expr[Int](body)
+ c.Expr[Int](q"$x + 3")
}
}
diff --git a/test/files/neg/macro-bundle-object.check b/test/files/neg/macro-bundle-object.check
index e122001427..8c19271b51 100644
--- a/test/files/neg/macro-bundle-object.check
+++ b/test/files/neg/macro-bundle-object.check
@@ -1,5 +1,6 @@
macro-bundle-object.scala:11: error: macro implementation has wrong shape:
required: (c: scala.reflect.macros.Context): c.Expr[Any]
+ or : (c: scala.reflect.macros.Context): c.Tree
found : : Nothing
number of parameter sections differ
def foo = macro Bundle.impl
diff --git a/test/files/neg/macro-divergence-controlled/Impls_Macros_1.scala b/test/files/neg/macro-divergence-controlled/Impls_Macros_1.scala
index 59acaede65..3983f590dc 100644
--- a/test/files/neg/macro-divergence-controlled/Impls_Macros_1.scala
+++ b/test/files/neg/macro-divergence-controlled/Impls_Macros_1.scala
@@ -15,7 +15,7 @@ object Complex {
val recur = c.inferImplicitValue(trecur, silent = true)
if (recur == EmptyTree) c.abort(c.enclosingPosition, s"couldn't synthesize $trecur")
}
- c.literalNull
+ c.Expr[Null](q"null")
}
implicit object ComplexString extends Complex[String]
diff --git a/test/files/neg/macro-invalidimpl.check b/test/files/neg/macro-invalidimpl.check
index aaf4f88fc2..e39cc8105b 100644
--- a/test/files/neg/macro-invalidimpl.check
+++ b/test/files/neg/macro-invalidimpl.check
@@ -32,12 +32,14 @@ match expected type ?
^
Macros_Test_2.scala:31: error: macro implementation has wrong shape:
required: (c: scala.reflect.macros.Context): c.Expr[Unit]
+ or : (c: scala.reflect.macros.Context): c.Tree
found : (c: scala.reflect.macros.Context)(): c.Expr[Unit]
number of parameter sections differ
def foo1 = macro Impls6.fooEmpty
^
Macros_Test_2.scala:32: error: macro implementation has wrong shape:
required: (c: scala.reflect.macros.Context)(): c.Expr[Unit]
+ or : (c: scala.reflect.macros.Context)(): c.Tree
found : (c: scala.reflect.macros.Context): c.Expr[Unit]
number of parameter sections differ
def bar1() = macro Impls6.fooNullary
diff --git a/test/files/neg/macro-invalidimpl/Impls_1.scala b/test/files/neg/macro-invalidimpl/Impls_1.scala
index cf78ecc65a..9f48ab7ad9 100644
--- a/test/files/neg/macro-invalidimpl/Impls_1.scala
+++ b/test/files/neg/macro-invalidimpl/Impls_1.scala
@@ -22,8 +22,7 @@ object Impls5 {
object Impls6 {
def fooNullary(c: Context) = {
import c.universe._
- val body = Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant("it works"))))
- c.Expr[Unit](body)
+ c.Expr[Unit](q"""Predef.println("it works")""")
}
def fooEmpty(c: Context)() = fooNullary(c)
diff --git a/test/files/neg/macro-invalidret.check b/test/files/neg/macro-invalidret.check
index 8c6ed4eb45..6cf62c292b 100644
--- a/test/files/neg/macro-invalidret.check
+++ b/test/files/neg/macro-invalidret.check
@@ -1,11 +1,13 @@
Macros_Test_2.scala:2: error: macro implementation has wrong shape:
required: (c: scala.reflect.macros.Context): c.Expr[Any]
+ or : (c: scala.reflect.macros.Context): c.Tree
found : (c: scala.reflect.macros.Context): Int
type mismatch for return type: Int does not conform to c.Expr[Any]
def foo1 = macro Impls.foo1
^
Macros_Test_2.scala:3: error: macro implementation has wrong shape:
required: (c: scala.reflect.macros.Context): c.Expr[Any]
+ or : (c: scala.reflect.macros.Context): c.Tree
found : (c: scala.reflect.macros.Context): reflect.runtime.universe.Literal
type mismatch for return type: reflect.runtime.universe.Literal does not conform to c.Expr[Any]
def foo2 = macro Impls.foo2
diff --git a/test/files/neg/macro-invalidshape/Macros_Test_2.scala b/test/files/neg/macro-invalidshape/Macros_Test_2.scala
index cf37e14d8e..819844b9f1 100644
--- a/test/files/neg/macro-invalidshape/Macros_Test_2.scala
+++ b/test/files/neg/macro-invalidshape/Macros_Test_2.scala
@@ -3,7 +3,7 @@ object Macros {
def foo2(x: Any) = macro Impls.foo(null)(null)
def foo3(x: Any) = macro {2; Impls.foo}
{
- def impl(c: scala.reflect.macros.Context) = c.literalUnit
+ def impl(c: scala.reflect.macros.Context) = { import c.universe._; c.Expr[Unit](q"()") }
def foo = macro impl
foo
}
diff --git a/test/files/neg/macro-invalidsig-params-badtype.check b/test/files/neg/macro-invalidsig-params-badtype.check
index 3cc1c9abf1..86aa08291f 100644
--- a/test/files/neg/macro-invalidsig-params-badtype.check
+++ b/test/files/neg/macro-invalidsig-params-badtype.check
@@ -1,5 +1,6 @@
Impls_Macros_1.scala:8: error: macro implementation has wrong shape:
required: (c: scala.reflect.macros.Context)(x: c.Expr[Int]): c.Expr[Any]
+ or : (c: scala.reflect.macros.Context)(x: c.Tree): c.Tree
found : (c: scala.reflect.macros.Context)(x: Int): Nothing
type mismatch for parameter x: c.Expr[Int] does not conform to Int
def foo(x: Int) = macro Impls.foo
diff --git a/test/files/neg/macro-invalidsig.check b/test/files/neg/macro-invalidsig.check
index cbdaf51081..732380d4b3 100644
--- a/test/files/neg/macro-invalidsig.check
+++ b/test/files/neg/macro-invalidsig.check
@@ -1,65 +1,76 @@
Macros_Test_2.scala:2: error: macro implementation has wrong shape:
required: (c: scala.reflect.macros.Context): c.Expr[Any]
+ or : (c: scala.reflect.macros.Context): c.Tree
found : (c: scala.reflect.macros.Context)(implicit evidence$2: Numeric[U]): c.universe.Literal
macro implementations cannot have implicit parameters other than WeakTypeTag evidences
def foo[U] = macro Impls1.foo[U]
^
Macros_Test_2.scala:6: error: macro implementation has wrong shape:
required: (c: scala.reflect.macros.Context): c.Expr[Any]
+ or : (c: scala.reflect.macros.Context): c.Tree
found : : Nothing
number of parameter sections differ
def foo = macro Impls2.foo
^
Macros_Test_2.scala:10: error: macro implementation has wrong shape:
required: (c: scala.reflect.macros.Context): c.Expr[Any]
+ or : (c: scala.reflect.macros.Context): c.Tree
found : (c: scala.reflect.api.Universe): Nothing
type mismatch for parameter c: scala.reflect.macros.Context does not conform to scala.reflect.api.Universe
def foo = macro Impls3.foo
^
Macros_Test_2.scala:14: error: macro implementation has wrong shape:
required: (c: scala.reflect.macros.Context): c.Expr[Any]
+ or : (c: scala.reflect.macros.Context): c.Tree
found : (cs: scala.reflect.macros.Context*): Nothing
types incompatible for parameter cs: corresponding is not a vararg parameter
def foo = macro Impls4.foo
^
Macros_Test_2.scala:18: error: macro implementation has wrong shape:
required: (c: scala.reflect.macros.Context)(x: c.Expr[Any]): c.Expr[Any]
+ or : (c: scala.reflect.macros.Context)(x: c.Tree): c.Tree
found : (c: scala.reflect.macros.Context): Nothing
number of parameter sections differ
def foo(x: Any) = macro Impls5.foo
^
Macros_Test_2.scala:22: error: macro implementation has wrong shape:
required: (c: scala.reflect.macros.Context)(x: c.Expr[Int]): c.Expr[Unit]
+ or : (c: scala.reflect.macros.Context)(x: c.Tree): c.Tree
found : (c: scala.reflect.macros.Context)(implicit x: c.Expr[Int]): c.Expr[Unit]
macro implementations cannot have implicit parameters other than WeakTypeTag evidences
def foo[U](x: Int) = macro Impls6.foo[T, U]
^
Macros_Test_2.scala:26: error: macro implementation has wrong shape:
required: (c: scala.reflect.macros.Context)(x: c.Expr[Int]): c.Expr[Any]
+ or : (c: scala.reflect.macros.Context)(x: c.Tree): c.Tree
found : (c: scala.reflect.macros.Context)(x: c.Expr[Int], y: c.Expr[Int]): Nothing
parameter lists have different length, found extra parameter y: c.Expr[Int]
def foo(x: Int) = macro Impls7.foo
^
Macros_Test_2.scala:30: error: macro implementation has wrong shape:
required: (c: scala.reflect.macros.Context)(x: c.Expr[Int]): c.Expr[Any]
+ or : (c: scala.reflect.macros.Context)(x: c.Tree): c.Tree
found : (c: scala.reflect.macros.Context)(x: c.universe.Symbol): Nothing
type mismatch for parameter x: c.Expr[Int] does not conform to c.universe.Symbol
def foo(x: Int) = macro Impls8.foo
^
Macros_Test_2.scala:34: error: macro implementation has wrong shape:
required: (c: scala.reflect.macros.Context)(x: c.Expr[Int], y: c.Expr[Int]): c.Expr[Any]
+ or : (c: scala.reflect.macros.Context)(x: c.Tree, y: c.Tree): c.Tree
found : (c: scala.reflect.macros.Context)(xs: c.Expr[Int]*): Nothing
parameter lists have different length, required extra parameter y: c.Expr[Int]
def foo(x: Int, y: Int) = macro Impls9.foo
^
Macros_Test_2.scala:38: error: macro implementation has wrong shape:
required: (c: scala.reflect.macros.Context)(x: c.Expr[Int], y: c.Expr[Int]): c.Expr[Any]
+ or : (c: scala.reflect.macros.Context)(x: c.Tree, y: c.Tree): c.Tree
found : (c: scala.reflect.macros.Context)(y: c.Expr[Int], x: c.Expr[Int]): Nothing
parameter names differ: x != y
def foo(x: Int, y: Int) = macro Impls10.foo
^
Macros_Test_2.scala:42: error: macro implementation has wrong shape:
required: (c: scala.reflect.macros.Context): c.Expr[Any]
+ or : (c: scala.reflect.macros.Context): c.Tree
found : (c: scala.reflect.macros.Context)(U: c.universe.Type): Nothing
number of parameter sections differ
def foo[U] = macro Impls11.foo[U]
diff --git a/test/files/neg/macro-invalidsig/Impls_1.scala b/test/files/neg/macro-invalidsig/Impls_1.scala
index e7d6c18f8d..d16ed26386 100644
--- a/test/files/neg/macro-invalidsig/Impls_1.scala
+++ b/test/files/neg/macro-invalidsig/Impls_1.scala
@@ -2,10 +2,7 @@ import scala.reflect.runtime.universe._
import scala.reflect.macros.Context
object Impls1 {
- def foo[U: c.WeakTypeTag: Numeric](c: Context) = {
- import c.universe._
- Literal(Constant(42))
- }
+ def foo[U: c.WeakTypeTag: Numeric](c: Context) = { import c.universe._; q"42" }
}
object Impls2 {
@@ -28,12 +25,11 @@ object Impls6 {
def foo[T, U: c.WeakTypeTag](c: Context)(implicit x: c.Expr[Int]) = {
import c.{prefix => prefix}
import c.universe._
- val body = Block(List(
- Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant("invoking foo_targs...")))),
- Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant("type of prefix is: " + prefix.staticType)))),
- Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant("U is: " + implicitly[c.WeakTypeTag[U]].tpe))))),
- Literal(Constant(())))
- c.Expr[Unit](body)
+ c.Expr[Unit](q"""
+ println("invoking foo_targs...")
+ println("type of prefix is: " + ${prefix.staticType.toString})
+ println("U is: " + ${implicitly[c.WeakTypeTag[U]].tpe.toString})
+ """)
}
}
@@ -71,18 +67,20 @@ object Impls14 {
object Impls15 {
def foo[T: c.WeakTypeTag, U: c.WeakTypeTag, V](c: Context)(implicit V: c.WeakTypeTag[V]): c.Expr[Unit] = {
+ import c.universe._
println(implicitly[c.WeakTypeTag[T]])
println(implicitly[c.WeakTypeTag[U]])
println(V)
- c.literalUnit
+ c.Expr[Unit](q"()")
}
}
object Impls16 {
def foo[T: c.WeakTypeTag, U: c.WeakTypeTag, V](c: Context)(implicit V: c.WeakTypeTag[V]): c.Expr[Unit] = {
+ import c.universe._
println(implicitly[c.WeakTypeTag[T]])
println(implicitly[c.WeakTypeTag[U]])
println(V)
- c.literalUnit
+ c.Expr[Unit](q"()")
}
} \ No newline at end of file
diff --git a/test/files/neg/macro-invalidusage-badbounds/Impls_1.scala b/test/files/neg/macro-invalidusage-badbounds/Impls_1.scala
index 6ee71a3628..74c163596a 100644
--- a/test/files/neg/macro-invalidusage-badbounds/Impls_1.scala
+++ b/test/files/neg/macro-invalidusage-badbounds/Impls_1.scala
@@ -1,5 +1,5 @@
import scala.reflect.macros.{Context => Ctx}
object Impls {
- def foo[U <: String](c: Ctx) = c.literalUnit
+ def foo[U <: String](c: Ctx) = { import c.universe._; c.Expr[Unit](q"()") }
}
diff --git a/test/files/neg/macro-invalidusage-methodvaluesyntax/Impls_1.scala b/test/files/neg/macro-invalidusage-methodvaluesyntax/Impls_1.scala
index 498bd4f18d..11b6a8c3b0 100644
--- a/test/files/neg/macro-invalidusage-methodvaluesyntax/Impls_1.scala
+++ b/test/files/neg/macro-invalidusage-methodvaluesyntax/Impls_1.scala
@@ -3,7 +3,6 @@ import scala.reflect.macros.{Context => Ctx}
object Impls {
def foo(c: Ctx) = {
import c.universe._
- val body = Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant("it works"))))
- c.Expr[Unit](body)
+ c.Expr[Unit](q"""println("it works")""")
}
} \ No newline at end of file
diff --git a/test/files/neg/macro-invalidusage-presuper/Impls_1.scala b/test/files/neg/macro-invalidusage-presuper/Impls_1.scala
index b39a037c47..c4b57233c9 100644
--- a/test/files/neg/macro-invalidusage-presuper/Impls_1.scala
+++ b/test/files/neg/macro-invalidusage-presuper/Impls_1.scala
@@ -1,5 +1,5 @@
import scala.reflect.macros.Context
object Impls {
- def impl(c: Context) = c.literalUnit
+ def impl(c: Context) = { import c.universe._; c.Expr[Unit](q"()") }
} \ No newline at end of file
diff --git a/test/files/neg/macro-override-method-overrides-macro/Impls_1.scala b/test/files/neg/macro-override-method-overrides-macro/Impls_1.scala
index 69ef57d18d..64a9299ee6 100644
--- a/test/files/neg/macro-override-method-overrides-macro/Impls_1.scala
+++ b/test/files/neg/macro-override-method-overrides-macro/Impls_1.scala
@@ -4,8 +4,7 @@ object Impls {
def impl(c: Ctx)(tag: String, x: c.Expr[_]) = {
import c.{prefix => prefix}
import c.universe._
- val body = Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant(tag)), Literal(Constant(prefix.toString)), x.tree))
- c.Expr[Unit](body)
+ c.Expr[Unit](q"println($tag, ${prefix.toString}, $x)")
}
def fooBString(c: Ctx)(x: c.Expr[_]) = impl(c)("fooBString", x)
diff --git a/test/files/neg/macro-quasiquotes.check b/test/files/neg/macro-quasiquotes.check
index 96ef75dd32..d000bb5316 100644
--- a/test/files/neg/macro-quasiquotes.check
+++ b/test/files/neg/macro-quasiquotes.check
@@ -1,6 +1,7 @@
Macros_1.scala:14: error: macro implementation has wrong shape:
required: (x: Impls.this.c.Expr[Int]): Impls.this.c.Expr[Any]
- found : (x: Impls.this.c.universe.Block): Impls.this.c.universe.Tree
+ or : (x: Impls.this.c.Tree): Impls.this.c.Tree
+ found : (x: Impls.this.c.universe.Block): Impls.this.c.Tree
type mismatch for parameter x: Impls.this.c.Expr[Int] does not conform to Impls.this.c.universe.Block
def m3(x: Int) = macro Impls.impl3
^
diff --git a/test/files/neg/macro-without-xmacros-a/Impls_1.scala b/test/files/neg/macro-without-xmacros-a/Impls_1.scala
index c6677c4fde..868616aace 100644
--- a/test/files/neg/macro-without-xmacros-a/Impls_1.scala
+++ b/test/files/neg/macro-without-xmacros-a/Impls_1.scala
@@ -3,16 +3,16 @@ import scala.reflect.macros.{Context => Ctx}
object Impls {
def foo_impl(c: Ctx)(x: c.Expr[Int]): c.Expr[Int] = {
import c.universe._
- c.Expr(Apply(Select(x.tree, TermName("$plus")), List(Literal(Constant(1)))))
+ c.Expr(q"$x + 1")
}
def bar_impl(c: Ctx)(x: c.Expr[Int]): c.Expr[Int] = {
import c.universe._
- c.Expr(Apply(Select(x.tree, TermName("$plus")), List(Literal(Constant(2)))))
+ c.Expr(q"$x + 2")
}
def quux_impl(c: Ctx)(x: c.Expr[Int]): c.Expr[Int] = {
import c.universe._
- c.Expr(Apply(Select(x.tree, TermName("$plus")), List(Literal(Constant(3)))))
+ c.Expr(q"$x + 3")
}
} \ No newline at end of file
diff --git a/test/files/neg/macro-without-xmacros-b/Impls_1.scala b/test/files/neg/macro-without-xmacros-b/Impls_1.scala
index c6677c4fde..868616aace 100644
--- a/test/files/neg/macro-without-xmacros-b/Impls_1.scala
+++ b/test/files/neg/macro-without-xmacros-b/Impls_1.scala
@@ -3,16 +3,16 @@ import scala.reflect.macros.{Context => Ctx}
object Impls {
def foo_impl(c: Ctx)(x: c.Expr[Int]): c.Expr[Int] = {
import c.universe._
- c.Expr(Apply(Select(x.tree, TermName("$plus")), List(Literal(Constant(1)))))
+ c.Expr(q"$x + 1")
}
def bar_impl(c: Ctx)(x: c.Expr[Int]): c.Expr[Int] = {
import c.universe._
- c.Expr(Apply(Select(x.tree, TermName("$plus")), List(Literal(Constant(2)))))
+ c.Expr(q"$x + 2")
}
def quux_impl(c: Ctx)(x: c.Expr[Int]): c.Expr[Int] = {
import c.universe._
- c.Expr(Apply(Select(x.tree, TermName("$plus")), List(Literal(Constant(3)))))
+ c.Expr(q"$x + 3")
}
} \ No newline at end of file
diff --git a/test/files/neg/quasiquotes-syntax-error-position.check b/test/files/neg/quasiquotes-syntax-error-position.check
index 3bd813b1bb..25e5b8d75a 100644
--- a/test/files/neg/quasiquotes-syntax-error-position.check
+++ b/test/files/neg/quasiquotes-syntax-error-position.check
@@ -10,9 +10,9 @@ quasiquotes-syntax-error-position.scala:7: error: '}' expected but end of quote
quasiquotes-syntax-error-position.scala:8: error: '.' expected but splicee found.
q"import $t $t"
^
-quasiquotes-syntax-error-position.scala:9: error: illegal start of definition
+quasiquotes-syntax-error-position.scala:9: error: '{' expected but end of quote found.
q"package p"
- ^
+ ^
quasiquotes-syntax-error-position.scala:10: error: ';' expected but '@' found.
q"foo@$a"
^
diff --git a/test/files/neg/sammy_restrictions.check b/test/files/neg/sammy_restrictions.check
new file mode 100644
index 0000000000..8cc49f9aa9
--- /dev/null
+++ b/test/files/neg/sammy_restrictions.check
@@ -0,0 +1,49 @@
+sammy_restrictions.scala:31: error: type mismatch;
+ found : () => Int
+ required: NoAbstract
+ (() => 0) : NoAbstract
+ ^
+sammy_restrictions.scala:32: error: type mismatch;
+ found : Int => Int
+ required: TwoAbstract
+ ((x: Int) => 0): TwoAbstract
+ ^
+sammy_restrictions.scala:34: error: class type required but DerivedOneAbstract with OneAbstract found
+ ((x: Int) => 0): NonClassType // "class type required". I think we should avoid SAM translation here.
+ ^
+sammy_restrictions.scala:35: error: type mismatch;
+ found : Int => Int
+ required: NoEmptyConstructor
+ ((x: Int) => 0): NoEmptyConstructor
+ ^
+sammy_restrictions.scala:37: error: type mismatch;
+ found : Int => Int
+ required: OneEmptySecondaryConstructor
+ ((x: Int) => 0): OneEmptySecondaryConstructor // derived class must have an empty *primary* to call.
+ ^
+sammy_restrictions.scala:38: error: type mismatch;
+ found : Int => Int
+ required: MultipleConstructorLists
+ ((x: Int) => 0): MultipleConstructorLists
+ ^
+sammy_restrictions.scala:39: error: type mismatch;
+ found : Int => Int
+ required: MultipleMethodLists
+ ((x: Int) => 0): MultipleMethodLists
+ ^
+sammy_restrictions.scala:40: error: type mismatch;
+ found : Int => Int
+ required: ImplicitConstructorParam
+ ((x: Int) => 0): ImplicitConstructorParam
+ ^
+sammy_restrictions.scala:41: error: type mismatch;
+ found : Int => Int
+ required: ImplicitMethodParam
+ ((x: Int) => 0): ImplicitMethodParam
+ ^
+sammy_restrictions.scala:44: error: type mismatch;
+ found : Int => Int
+ required: PolyMethod
+ ((x: Int) => 0): PolyMethod
+ ^
+10 errors found
diff --git a/test/files/neg/sammy_restrictions.flags b/test/files/neg/sammy_restrictions.flags
new file mode 100644
index 0000000000..48fd867160
--- /dev/null
+++ b/test/files/neg/sammy_restrictions.flags
@@ -0,0 +1 @@
+-Xexperimental
diff --git a/test/files/neg/sammy_restrictions.scala b/test/files/neg/sammy_restrictions.scala
new file mode 100644
index 0000000000..5f1a04cd20
--- /dev/null
+++ b/test/files/neg/sammy_restrictions.scala
@@ -0,0 +1,45 @@
+class NoAbstract
+
+class TwoAbstract { def ap(a: Int): Int; def pa(a: Int): Int }
+
+class Base // check that the super class constructor isn't considered.
+class NoEmptyConstructor(a: Int) extends Base { def this(a: String) = this(0); def ap(a: Int): Int }
+
+class OneEmptyConstructor() { def this(a: Int) = this(); def ap(a: Int): Int }
+
+class OneEmptySecondaryConstructor(a: Int) { def this() = this(0); def ap(a: Int): Int }
+
+class MultipleConstructorLists()() { def ap(a: Int): Int }
+
+class MultipleMethodLists()() { def ap(a: Int)(): Int }
+
+class ImplicitConstructorParam()(implicit a: String) { def ap(a: Int): Int }
+
+class ImplicitMethodParam() { def ap(a: Int)(implicit b: String): Int }
+
+class PolyClass[T] { def ap(a: T): T }
+
+class PolyMethod { def ap[T](a: T): T }
+
+class OneAbstract { def ap(a: Any): Any }
+class DerivedOneAbstract extends OneAbstract
+
+object Test {
+ implicit val s: String = ""
+ type NonClassType = DerivedOneAbstract with OneAbstract
+
+ (() => 0) : NoAbstract
+ ((x: Int) => 0): TwoAbstract
+ ((x: Int) => 0): DerivedOneAbstract // okay
+ ((x: Int) => 0): NonClassType // "class type required". I think we should avoid SAM translation here.
+ ((x: Int) => 0): NoEmptyConstructor
+ ((x: Int) => 0): OneEmptyConstructor // okay
+ ((x: Int) => 0): OneEmptySecondaryConstructor // derived class must have an empty *primary* to call.
+ ((x: Int) => 0): MultipleConstructorLists
+ ((x: Int) => 0): MultipleMethodLists
+ ((x: Int) => 0): ImplicitConstructorParam
+ ((x: Int) => 0): ImplicitMethodParam
+
+ ((x: Int) => 0): PolyClass[Int] // okay
+ ((x: Int) => 0): PolyMethod
+}
diff --git a/test/files/neg/t3346b.check b/test/files/neg/t3346b.check
new file mode 100644
index 0000000000..bcde6d90e4
--- /dev/null
+++ b/test/files/neg/t3346b.check
@@ -0,0 +1,4 @@
+t3346b.scala:14: error: could not find implicit value for evidence parameter of type TC[Any]
+ val y = foo(1)
+ ^
+one error found
diff --git a/test/files/neg/t3346b.scala b/test/files/neg/t3346b.scala
new file mode 100644
index 0000000000..8ea8970298
--- /dev/null
+++ b/test/files/neg/t3346b.scala
@@ -0,0 +1,15 @@
+import scala.language.implicitConversions
+
+trait T[X]
+trait U[X]
+trait TC[M[_]]
+
+object Test extends App {
+ def foo[M[_]: TC, A](ma: M[A]) = ()
+ implicit val TCofT: TC[T] = new TC[T] {}
+ implicit def any2T[A](a: A): T[A] = new T[A] {}
+ implicit def any2U[A](a: A): U[A] = new U[A] {}
+
+ val x = foo[T, Int](1)
+ val y = foo(1)
+} \ No newline at end of file
diff --git a/test/files/neg/t3346c.check b/test/files/neg/t3346c.check
new file mode 100644
index 0000000000..575379d009
--- /dev/null
+++ b/test/files/neg/t3346c.check
@@ -0,0 +1,4 @@
+t3346c.scala:60: error: value bar is not a member of Either[Int,String]
+ eii.bar
+ ^
+one error found
diff --git a/test/files/neg/t3346c.scala b/test/files/neg/t3346c.scala
new file mode 100644
index 0000000000..a5ac166b2d
--- /dev/null
+++ b/test/files/neg/t3346c.scala
@@ -0,0 +1,61 @@
+object Test extends App {
+ //
+ // An attempt to workaround SI-2712, foiled by SI-3346
+ //
+ trait TC[M[_]]
+
+ type EitherInt[A] = Either[Int, A]
+
+ implicit object EitherTC extends TC[EitherInt]
+
+ def foo[M[_]: TC, A](ma: M[A]) = ()
+
+ val eii: Either[Int, String] = Right("")
+
+ foo[EitherInt, String](eii)
+
+ // This one needs SI-2712 Higher Order Unification
+ //foo(eii) // not inferred
+
+ // A workaround is to provide a set of implicit conversions that take values
+ // based on type constructors of various shapes, and search for the
+ // type class instances.
+ //
+ // This is the approach taken by scalaz7.
+
+ trait TCValue[M[_], A] {
+ implicit def self: M[A]
+ def M: TC[M]
+
+ // instead of `foo(eii)`, we'll try `eii.foo`
+ def foo[M[_], A] = ()
+ }
+
+
+ implicit def ToTCValue[M[_], A](ma: M[A])(implicit M0: TC[M]) = new TCValue[M, A] {
+ implicit val M = M0
+ val self = ma
+ }
+ implicit def ToTCValueBin1[M[_, _], A, B](ma: M[A, B])(implicit M0: TC[({type λ[α]=M[A, α]})#λ]): TCValue[({type λ[α] = M[A, α]})#λ, B] = new TCValue[({type λ[α]=M[A, α]})#λ, B] {
+ implicit val M = M0
+ val self = ma
+ }
+ implicit def ToTCValueBin2[M[_, _], A, B](ma: M[A, B])(implicit M0: TC[({type λ[α]=M[α, B]})#λ]): TCValue[({type λ[α]=M[α, B]})#λ, A] = new TCValue[({type λ[α]=M[α, B]})#λ, A] {
+ implicit val M = M0
+ val self = ma
+ }
+
+
+ ToTCValueBin1(eii).foo
+
+ // as expected, could not find implicit parameter
+ // ToTCValueBin2(eii).bar
+
+ // error: implicit conversions are not applicable because they are ambiguous, both method ToTCValueBin1 ... and method ToTCValueBin2
+ // annoying!!
+ // https://issues.scala-lang.org/browse/SI-3346
+ //
+ // Works if we remove ToTCValueBin2
+ //
+ eii.bar
+}
diff --git a/test/files/neg/t3346i.check b/test/files/neg/t3346i.check
new file mode 100644
index 0000000000..cc17ab7ce4
--- /dev/null
+++ b/test/files/neg/t3346i.check
@@ -0,0 +1,7 @@
+t3346i.scala:28: error: value a is not a member of Test.A[T]
+ (new A).a
+ ^
+t3346i.scala:29: error: value a is not a member of Test.A[Nothing]
+ (new A[Nothing]).a
+ ^
+two errors found
diff --git a/test/files/neg/t3346i.scala b/test/files/neg/t3346i.scala
new file mode 100644
index 0000000000..9ad2544537
--- /dev/null
+++ b/test/files/neg/t3346i.scala
@@ -0,0 +1,30 @@
+import scala.language.implicitConversions
+
+// the classes involved
+case class Z[U](a: U)
+case class Intermediate[T, U](t: T, u: U)
+class Implicit1[T](b: Implicit2[T])
+class Implicit2[T](c: Implicit3[T])
+class Implicit3[T](/* and so on */)
+
+object Test extends App {
+ // the base conversion
+ implicit def convertToZ[T](a: A[T])(implicit b: Implicit1[T]): Z[A[T]] = Z(a)
+
+ // and the implicit chaining, don't you just love it? :D
+ // implicit1, with one alternative
+ implicit def implicit1[T <: Intermediate[_, _]](implicit b: Implicit2[T]) = new Implicit1[T](b)
+ // implicit2, with two alternatives
+ implicit def implicit2alt1[T <: Intermediate[_ <: String, _]](implicit c: Implicit3[T]) = new Implicit2[T](c)
+ implicit def implicit2alt2[T <: Intermediate[_ <: Double, _]](implicit c: Implicit3[T]) = new Implicit2[T](c)
+ // implicit3, with two alternatives
+ implicit def implicit3alt1[T <: Intermediate[_, _ <: Int]] = new Implicit3[T]()
+ implicit def implicit3alt2[T <: Intermediate[_ <: Double, _ <: AnyRef],X] = new Implicit3[T]()
+
+ // and our targets
+ /** conversion here, with constraints */
+ class A[T]()
+
+ (new A).a
+ (new A[Nothing]).a
+}
diff --git a/test/files/neg/t3871.check b/test/files/neg/t3871.check
new file mode 100644
index 0000000000..b920357ee6
--- /dev/null
+++ b/test/files/neg/t3871.check
@@ -0,0 +1,7 @@
+t3871.scala:4: error: variable foo in class Sub2 cannot be accessed in Sub2
+ Access to protected method foo not permitted because
+ enclosing class Base is not a subclass of
+ class Sub2 where target is defined
+ s.foo = true
+ ^
+one error found
diff --git a/test/files/neg/t3871.scala b/test/files/neg/t3871.scala
new file mode 100644
index 0000000000..fc459867df
--- /dev/null
+++ b/test/files/neg/t3871.scala
@@ -0,0 +1,11 @@
+class Base {
+ def mkNew() = {
+ val s = new Sub2
+ s.foo = true
+ s
+ }
+}
+
+class Sub2 extends Base {
+ protected var foo = false
+}
diff --git a/test/files/neg/t3871b.check b/test/files/neg/t3871b.check
new file mode 100644
index 0000000000..6ab5ddfaf1
--- /dev/null
+++ b/test/files/neg/t3871b.check
@@ -0,0 +1,97 @@
+t3871b.scala:61: error: not found: value protOT
+ protOT // not allowed
+ ^
+t3871b.scala:77: error: method prot in class A cannot be accessed in E.this.A
+ Access to protected method prot not permitted because
+ prefix type E.this.A does not conform to
+ class B in class E where the access take place
+ a.prot // not allowed, prefix type `A` does not conform to `B`
+ ^
+t3871b.scala:79: error: value protT is not a member of E.this.B
+ b.protT // not allowed
+ ^
+t3871b.scala:80: error: value protT is not a member of E.this.C
+ c.protT // not allowed
+ ^
+t3871b.scala:81: error: value protT is not a member of E.this.A
+ a.protT // not allowed
+ ^
+t3871b.scala:91: error: method prot in class A cannot be accessed in E.this.A
+ Access to protected method prot not permitted because
+ prefix type E.this.A does not conform to
+ object B in class E where the access take place
+ a.prot // not allowed
+ ^
+t3871b.scala:93: error: value protT is not a member of E.this.B
+ b.protT // not allowed
+ ^
+t3871b.scala:94: error: value protT is not a member of E.this.C
+ c.protT // not allowed
+ ^
+t3871b.scala:95: error: value protT is not a member of E.this.A
+ a.protT // not allowed
+ ^
+t3871b.scala:102: error: method prot in class A cannot be accessed in E.this.B
+ Access to protected method prot not permitted because
+ enclosing class Z in class E is not a subclass of
+ class A in class E where target is defined
+ b.prot // not allowed
+ ^
+t3871b.scala:103: error: method prot in class A cannot be accessed in E.this.C
+ Access to protected method prot not permitted because
+ enclosing class Z in class E is not a subclass of
+ class A in class E where target is defined
+ c.prot // not allowed
+ ^
+t3871b.scala:104: error: method prot in class A cannot be accessed in E.this.A
+ Access to protected method prot not permitted because
+ enclosing class Z in class E is not a subclass of
+ class A in class E where target is defined
+ a.prot // not allowed
+ ^
+t3871b.scala:109: error: value protT is not a member of E.this.B
+ b.protT // not allowed
+ ^
+t3871b.scala:110: error: value protT is not a member of E.this.C
+ c.protT // not allowed
+ ^
+t3871b.scala:111: error: value protT is not a member of E.this.A
+ a.protT // not allowed
+ ^
+t3871b.scala:120: error: method prot in class A cannot be accessed in Other.this.e.B
+ Access to protected method prot not permitted because
+ enclosing class Other is not a subclass of
+ class A in class E where target is defined
+ b.prot // not allowed
+ ^
+t3871b.scala:121: error: method prot in class A cannot be accessed in Other.this.e.C
+ Access to protected method prot not permitted because
+ enclosing class Other is not a subclass of
+ class A in class E where target is defined
+ c.prot // not allowed
+ ^
+t3871b.scala:122: error: method prot in class A cannot be accessed in Other.this.e.A
+ Access to protected method prot not permitted because
+ enclosing class Other is not a subclass of
+ class A in class E where target is defined
+ a.prot // not allowed
+ ^
+t3871b.scala:123: error: method protE in class A cannot be accessed in Other.this.e.B
+ Access to protected method protE not permitted because
+ enclosing class Other is not a subclass of
+ class A in class E where target is defined
+ b.protE // not allowed
+ ^
+t3871b.scala:124: error: method protE in class A cannot be accessed in Other.this.e.A
+ Access to protected method protE not permitted because
+ enclosing class Other is not a subclass of
+ class A in class E where target is defined
+ a.protE // not allowed
+ ^
+t3871b.scala:125: error: method protE in class A cannot be accessed in Other.this.e.C
+ Access to protected method protE not permitted because
+ enclosing class Other is not a subclass of
+ class A in class E where target is defined
+ c.protE // not allowed
+ ^
+21 errors found
diff --git a/test/files/neg/t3871b.scala b/test/files/neg/t3871b.scala
new file mode 100644
index 0000000000..b490b7789a
--- /dev/null
+++ b/test/files/neg/t3871b.scala
@@ -0,0 +1,127 @@
+/**
+
+The protected modifier applies to class member definitions. Protected members of a class can be accessed from within
+
+ 0a. the companion module of any of those classes
+
+A protected identifier x may be used as a member name in a selection r.x only
+if one of the following applies:
+ 1a. The access is within the template defining the member, or,
+ if a qualification C is given,
+ 1b. inside the package C, or
+ 1c. the class C , or its companion module, or
+ 2. r is one of the reserved words this and super, or
+ 3. r’s type conforms to a type-instance of the class which contains the access.
+
+ 4. A different form of qualification is protected[this]. A member M marked with this
+ modifier is called object-protected; it can be accessed only from within the object
+ in which it is defined. That is, a selection p.M is only legal if the prefix is this
+ or O.this, for some class O enclosing the reference. In addition, the restrictions
+ for unqualified protected apply.
+*/
+
+object E {
+ val e = new E
+ import e._
+ def n(a: A, b: B, c: C) = {
+ b.protE // 1c
+ c.protE // 1c
+ a.protE // 1c
+ A.protOE // 1c
+ }
+}
+
+class E {
+ object A {
+ protected def protO = 2
+ protected[E] def protOE = 3
+ protected[this] def protOT = 3
+ }
+ class A {
+ protected def prot = 2
+ protected[E] def protE = 3
+ protected[this] def protT = 4
+
+ // 1a
+ prot; protE; protT
+ def foo = {prot; protE; protT}
+ new { prot; protE }
+ def this(a: Any) = {this(); prot; protE; protT}
+ object B extends A {
+ A.this.prot
+ A.this.protE
+ A.this.protT
+ }
+
+ import A._
+ // 0a
+ protO
+ // 3
+ protOE
+ protOT // not allowed
+ }
+
+ class B extends A {
+ // 1b
+ this.prot; this.protE;
+ super.prot; super.protE;
+
+ // 4
+ this.protT
+ // 4 !!! "or the super keyword"
+ super.protT
+
+ def n(a: A, b: B, c: C) = {
+ b.prot // 3
+ c.prot // 3
+ a.prot // not allowed, prefix type `A` does not conform to `B`
+
+ b.protT // not allowed
+ c.protT // not allowed
+ a.protT // not allowed
+ }
+ }
+ object B {
+ def n(a: A, b: B, c: C) = {
+ b.prot // 3 !!!
+ c.prot // 3 !!!
+ // Wording of 3 seems insufficient, missing:
+ // "... (if the access is from a class), or
+ // the type instance of companion class (if the access is from a module)"
+ a.prot // not allowed
+
+ b.protT // not allowed
+ c.protT // not allowed
+ a.protT // not allowed
+ }
+ }
+ class C extends B
+
+ class Z {
+ def n(a: A, b: B, c: C) = {
+ b.prot // not allowed
+ c.prot // not allowed
+ a.prot // not allowed
+ b.protE // 2
+ a.protE // 2
+ c.protE // 2
+
+ b.protT // not allowed
+ c.protT // not allowed
+ a.protT // not allowed
+ }
+ }
+}
+
+class Other {
+ val e = new E
+ import e._
+ def n(a: A, b: B, c: C) = {
+ b.prot // not allowed
+ c.prot // not allowed
+ a.prot // not allowed
+ b.protE // not allowed
+ a.protE // not allowed
+ c.protE // not allowed
+ }
+}
diff --git a/test/files/neg/t5578.check b/test/files/neg/t5578.check
index d803adb223..56123d2e0f 100644
--- a/test/files/neg/t5578.check
+++ b/test/files/neg/t5578.check
@@ -1,4 +1,7 @@
-t5578.scala:33: error: No Manifest available for T.
+t5578.scala:33: error: type mismatch;
+ found : NumericOpsExp.this.Plus[T]
+ required: NumericOpsExp.this.Rep[T]
+ (which expands to) NumericOpsExp.this.Exp[T]
def plus[T: Numeric](x: Rep[T], y: Rep[T]): Rep[T] = Plus[T](x,y)
^
one error found
diff --git a/test/files/neg/t5689.check b/test/files/neg/t5689.check
index ad9b79cdcb..8cf0534e77 100644
--- a/test/files/neg/t5689.check
+++ b/test/files/neg/t5689.check
@@ -1,5 +1,6 @@
t5689.scala:4: error: macro implementation has wrong shape:
required: (c: scala.reflect.macros.Context)(i: c.Expr[Double]): c.Expr[String]
+ or : (c: scala.reflect.macros.Context)(i: c.Tree): c.Tree
found : (c: scala.reflect.macros.Context)(i: c.Expr[Double]): c.Expr[Int]
type mismatch for return type: c.Expr[Int] does not conform to c.Expr[String]
def returnsString(i: Double): String = macro returnsIntImpl
diff --git a/test/files/neg/t5845.check b/test/files/neg/t5845.check
deleted file mode 100644
index 8c6100d6de..0000000000
--- a/test/files/neg/t5845.check
+++ /dev/null
@@ -1,7 +0,0 @@
-t5845.scala:9: error: value +++ is not a member of Int
- println(5 +++ 5)
- ^
-t5845.scala:15: error: value +++ is not a member of Int
- println(5 +++ 5)
- ^
-two errors found
diff --git a/test/files/neg/t6123-explaintypes-macros.check b/test/files/neg/t6123-explaintypes-macros.check
index ebcb8069d5..43f8371326 100644
--- a/test/files/neg/t6123-explaintypes-macros.check
+++ b/test/files/neg/t6123-explaintypes-macros.check
@@ -2,6 +2,7 @@ c.universe.Expr[Any]* <: c.universe.Expr[String]*?
false
BadMac_2.scala:6: error: macro implementation has wrong shape:
required: (c: scala.reflect.macros.Context)(format: c.Expr[String], params: c.Expr[Any]*): c.Expr[Unit]
+ or : (c: scala.reflect.macros.Context)(format: c.Tree, params: Tree*): c.Tree
found : (c: scala.reflect.macros.Context)(format: c.Expr[String], params: c.Expr[String]*): c.Expr[Unit]
type mismatch for parameter params: c.Expr[Any]* does not conform to c.Expr[String]*
def printf(format: String, params: Any*): Unit = macro printf_impl
diff --git a/test/files/neg/t6231.flags b/test/files/neg/t6231.flags
new file mode 100644
index 0000000000..ac96850b69
--- /dev/null
+++ b/test/files/neg/t6231.flags
@@ -0,0 +1 @@
+-Ydelambdafy:inline \ No newline at end of file
diff --git a/test/files/neg/t6260.flags b/test/files/neg/t6260.flags
new file mode 100644
index 0000000000..2349d8294d
--- /dev/null
+++ b/test/files/neg/t6260.flags
@@ -0,0 +1 @@
+-Ydelambdafy:inline
diff --git a/test/files/neg/t6260b.check b/test/files/neg/t6260b.check
new file mode 100644
index 0000000000..3a7e8947aa
--- /dev/null
+++ b/test/files/neg/t6260b.check
@@ -0,0 +1,7 @@
+t6260b.scala:3: error: bridge generated for member method apply: ()X in <$anon: () => X>
+which overrides method apply: ()R in trait Function0
+clashes with definition of the member itself;
+both have erased type ()Object
+class Y { def f = new X("") or new X("") }
+ ^
+one error found
diff --git a/test/files/neg/t6260b.scala b/test/files/neg/t6260b.scala
new file mode 100644
index 0000000000..73e2e58f73
--- /dev/null
+++ b/test/files/neg/t6260b.scala
@@ -0,0 +1,3 @@
+
+class X(val value: Object) extends AnyVal { def or(alt: => X): X = this }
+class Y { def f = new X("") or new X("") }
diff --git a/test/files/neg/t6260c.check b/test/files/neg/t6260c.check
new file mode 100644
index 0000000000..cbbcfd1504
--- /dev/null
+++ b/test/files/neg/t6260c.check
@@ -0,0 +1,7 @@
+t6260c.scala:4: error: bridge generated for member method f: ()Option[A] in class Bar1
+which overrides method f: ()A in class Foo1
+clashes with definition of the member itself;
+both have erased type ()Object
+ class Bar1[A] extends Foo1[Option[A]] { def f(): Option[A] = ??? }
+ ^
+one error found
diff --git a/test/files/neg/t6260c.scala b/test/files/neg/t6260c.scala
new file mode 100644
index 0000000000..02bf152376
--- /dev/null
+++ b/test/files/neg/t6260c.scala
@@ -0,0 +1,4 @@
+final class Option[+A](val value: A) extends AnyVal
+
+abstract class Foo1[A] { def f(): A }
+ class Bar1[A] extends Foo1[Option[A]] { def f(): Option[A] = ??? }
diff --git a/test/files/neg/t6385.check b/test/files/neg/t6385.check
deleted file mode 100644
index 93e51e8927..0000000000
--- a/test/files/neg/t6385.check
+++ /dev/null
@@ -1,7 +0,0 @@
-t6385.scala:12: error: bridge generated for member method x: ()C[T] in class C
-which overrides method x: ()C[T] in trait AA
-clashes with definition of the member itself;
-both have erased type ()Object
- def x = this
- ^
-one error found
diff --git a/test/files/neg/t6446-additional.check b/test/files/neg/t6446-additional.check
index 6dfe072913..a87af2f1e5 100755
--- a/test/files/neg/t6446-additional.check
+++ b/test/files/neg/t6446-additional.check
@@ -12,7 +12,7 @@ superaccessors 6 add super accessors in traits and nested classes
uncurry 10 uncurry, translate function values to anonymous classes
tailcalls 11 replace tail calls by jumps
specialize 12 @specialized-driven class and method specialization
- explicitouter 13 this refs to outer pointers, translate patterns
+ explicitouter 13 this refs to outer pointers
erasure 14 erase types, add interfaces for traits
posterasure 15 clean up erased inline classes
lazyvals 16 allocate bitmaps, translate lazy vals into lazified defs
@@ -21,18 +21,19 @@ superaccessors 6 add super accessors in traits and nested classes
flatten 19 eliminate inner classes
mixin 20 mixin composition
cleanup 21 platform-specific cleanups, generate reflective calls
- icode 22 generate portable intermediate code
+ delambdafy 22 remove lambdas
+ icode 23 generate portable intermediate code
#partest -optimise
- inliner 23 optimization: do inlining
-inlinehandlers 24 optimization: inline exception handlers
- closelim 25 optimization: eliminate uncalled closures
- constopt 26 optimization: optimize null and other constants
- dce 27 optimization: eliminate dead code
- jvm 28 generate JVM bytecode
- ploogin 29 A sample phase that does so many things it's kind of hard...
- terminal 30 the last phase during a compilation run
+ inliner 24 optimization: do inlining
+inlinehandlers 25 optimization: inline exception handlers
+ closelim 26 optimization: eliminate uncalled closures
+ constopt 27 optimization: optimize null and other constants
+ dce 28 optimization: eliminate dead code
+ jvm 29 generate JVM bytecode
+ ploogin 30 A sample phase that does so many things it's kind of hard...
+ terminal 31 the last phase during a compilation run
#partest !-optimise
- jvm 23 generate JVM bytecode
- ploogin 24 A sample phase that does so many things it's kind of hard...
- terminal 25 the last phase during a compilation run
+ jvm 24 generate JVM bytecode
+ ploogin 25 A sample phase that does so many things it's kind of hard...
+ terminal 26 the last phase during a compilation run
#partest
diff --git a/test/files/neg/t6446-missing.check b/test/files/neg/t6446-missing.check
index ba5e30dc05..cd867289c3 100755
--- a/test/files/neg/t6446-missing.check
+++ b/test/files/neg/t6446-missing.check
@@ -13,7 +13,7 @@ superaccessors 6 add super accessors in traits and nested classes
uncurry 10 uncurry, translate function values to anonymous classes
tailcalls 11 replace tail calls by jumps
specialize 12 @specialized-driven class and method specialization
- explicitouter 13 this refs to outer pointers, translate patterns
+ explicitouter 13 this refs to outer pointers
erasure 14 erase types, add interfaces for traits
posterasure 15 clean up erased inline classes
lazyvals 16 allocate bitmaps, translate lazy vals into lazified defs
@@ -22,16 +22,17 @@ superaccessors 6 add super accessors in traits and nested classes
flatten 19 eliminate inner classes
mixin 20 mixin composition
cleanup 21 platform-specific cleanups, generate reflective calls
- icode 22 generate portable intermediate code
+ delambdafy 22 remove lambdas
+ icode 23 generate portable intermediate code
#partest !-optimise
- jvm 23 generate JVM bytecode
- terminal 24 the last phase during a compilation run
+ jvm 24 generate JVM bytecode
+ terminal 25 the last phase during a compilation run
#partest -optimise
- inliner 23 optimization: do inlining
-inlinehandlers 24 optimization: inline exception handlers
- closelim 25 optimization: eliminate uncalled closures
- constopt 26 optimization: optimize null and other constants
- dce 27 optimization: eliminate dead code
- jvm 28 generate JVM bytecode
- terminal 29 the last phase during a compilation run
+ inliner 24 optimization: do inlining
+inlinehandlers 25 optimization: inline exception handlers
+ closelim 26 optimization: eliminate uncalled closures
+ constopt 27 optimization: optimize null and other constants
+ dce 28 optimization: eliminate dead code
+ jvm 29 generate JVM bytecode
+ terminal 30 the last phase during a compilation run
#partest
diff --git a/test/files/neg/t6446-show-phases.check b/test/files/neg/t6446-show-phases.check
index 10a9e08b86..3ae3f96ef2 100644
--- a/test/files/neg/t6446-show-phases.check
+++ b/test/files/neg/t6446-show-phases.check
@@ -12,7 +12,7 @@ superaccessors 6 add super accessors in traits and nested classes
uncurry 10 uncurry, translate function values to anonymous classes
tailcalls 11 replace tail calls by jumps
specialize 12 @specialized-driven class and method specialization
- explicitouter 13 this refs to outer pointers, translate patterns
+ explicitouter 13 this refs to outer pointers
erasure 14 erase types, add interfaces for traits
posterasure 15 clean up erased inline classes
lazyvals 16 allocate bitmaps, translate lazy vals into lazified defs
@@ -21,16 +21,17 @@ superaccessors 6 add super accessors in traits and nested classes
flatten 19 eliminate inner classes
mixin 20 mixin composition
cleanup 21 platform-specific cleanups, generate reflective calls
- icode 22 generate portable intermediate code
+ delambdafy 22 remove lambdas
+ icode 23 generate portable intermediate code
#partest !-optimise
- jvm 23 generate JVM bytecode
- terminal 24 the last phase during a compilation run
+ jvm 24 generate JVM bytecode
+ terminal 25 the last phase during a compilation run
#partest -optimise
- inliner 23 optimization: do inlining
-inlinehandlers 24 optimization: inline exception handlers
- closelim 25 optimization: eliminate uncalled closures
- constopt 26 optimization: optimize null and other constants
- dce 27 optimization: eliminate dead code
- jvm 28 generate JVM bytecode
- terminal 29 the last phase during a compilation run
+ inliner 24 optimization: do inlining
+inlinehandlers 25 optimization: inline exception handlers
+ closelim 26 optimization: eliminate uncalled closures
+ constopt 27 optimization: optimize null and other constants
+ dce 28 optimization: eliminate dead code
+ jvm 29 generate JVM bytecode
+ terminal 30 the last phase during a compilation run
#partest
diff --git a/test/files/neg/t6666.flags b/test/files/neg/t6666.flags
new file mode 100644
index 0000000000..2349d8294d
--- /dev/null
+++ b/test/files/neg/t6666.flags
@@ -0,0 +1 @@
+-Ydelambdafy:inline
diff --git a/test/files/neg/t6666c.flags b/test/files/neg/t6666c.flags
new file mode 100644
index 0000000000..2349d8294d
--- /dev/null
+++ b/test/files/neg/t6666c.flags
@@ -0,0 +1 @@
+-Ydelambdafy:inline
diff --git a/test/files/neg/t6680b.flags b/test/files/neg/t6680b.flags
deleted file mode 100644
index a02d83efad..0000000000
--- a/test/files/neg/t6680b.flags
+++ /dev/null
@@ -1 +0,0 @@
--Xstrict-inference
diff --git a/test/files/neg/t6680c.check b/test/files/neg/t6680c.check
deleted file mode 100644
index 7a749de1f2..0000000000
--- a/test/files/neg/t6680c.check
+++ /dev/null
@@ -1,6 +0,0 @@
-t6680c.scala:15: error: type mismatch;
- found : String("a string!")
- required: S
- res match { case Unfold(s, f) => f("a string!") }
- ^
-one error found
diff --git a/test/files/neg/t6680c.flags b/test/files/neg/t6680c.flags
deleted file mode 100644
index a02d83efad..0000000000
--- a/test/files/neg/t6680c.flags
+++ /dev/null
@@ -1 +0,0 @@
--Xstrict-inference
diff --git a/test/files/neg/t7020.check b/test/files/neg/t7020.check
index f9600ca7fc..76390b243d 100644
--- a/test/files/neg/t7020.check
+++ b/test/files/neg/t7020.check
@@ -1,17 +1,17 @@
t7020.scala:3: warning: match may not be exhaustive.
-It would fail on the following inputs: List((x: Int forSome x not in (1, 2, 4, 5, 6, 7))), List((x: Int forSome x not in (1, 2, 4, 5, 6, 7)), _), List(1, _), List(2, _), List(4, _), List(5, _), List(6, _), List(7, _), List(??, _), List(_, _)
+It would fail on the following inputs: List((x: Int forSome x not in (1, 2, 4, 5, 6, 7))), List((x: Int forSome x not in (1, 2, 4, 5, 6, 7)), _), List(1, _), List(2, _), List(4, _), List(5, _), List(6, _), List(7, _), List(_, _)
List(5) match {
^
t7020.scala:10: warning: match may not be exhaustive.
-It would fail on the following inputs: List((x: Int forSome x not in (1, 2, 4, 5, 6, 7))), List((x: Int forSome x not in (1, 2, 4, 5, 6, 7)), _), List(1, _), List(2, _), List(4, _), List(5, _), List(6, _), List(7, _), List(??, _), List(_, _)
+It would fail on the following inputs: List((x: Int forSome x not in (1, 2, 4, 5, 6, 7))), List((x: Int forSome x not in (1, 2, 4, 5, 6, 7)), _), List(1, _), List(2, _), List(4, _), List(5, _), List(6, _), List(7, _), List(_, _)
List(5) match {
^
t7020.scala:17: warning: match may not be exhaustive.
-It would fail on the following inputs: List((x: Int forSome x not in (1, 2, 4, 5, 6, 7))), List((x: Int forSome x not in (1, 2, 4, 5, 6, 7)), _), List(1, _), List(2, _), List(4, _), List(5, _), List(6, _), List(7, _), List(??, _), List(_, _)
+It would fail on the following inputs: List((x: Int forSome x not in (1, 2, 4, 5, 6, 7))), List((x: Int forSome x not in (1, 2, 4, 5, 6, 7)), _), List(1, _), List(2, _), List(4, _), List(5, _), List(6, _), List(7, _), List(_, _)
List(5) match {
^
t7020.scala:24: warning: match may not be exhaustive.
-It would fail on the following inputs: List((x: Int forSome x not in (1, 2, 4, 5, 6, 7))), List((x: Int forSome x not in (1, 2, 4, 5, 6, 7)), _), List(1, _), List(2, _), List(4, _), List(5, _), List(6, _), List(7, _), List(??, _), List(_, _)
+It would fail on the following inputs: List((x: Int forSome x not in (1, 2, 4, 5, 6, 7))), List((x: Int forSome x not in (1, 2, 4, 5, 6, 7)), _), List(1, _), List(2, _), List(4, _), List(5, _), List(6, _), List(7, _), List(_, _)
List(5) match {
^
error: No warnings can be incurred under -Xfatal-warnings.
diff --git a/test/files/neg/t7157/Impls_Macros_1.scala b/test/files/neg/t7157/Impls_Macros_1.scala
index 09f423fbab..9069d26e6e 100644
--- a/test/files/neg/t7157/Impls_Macros_1.scala
+++ b/test/files/neg/t7157/Impls_Macros_1.scala
@@ -2,30 +2,30 @@ import scala.reflect.macros.Context
import language.experimental.macros
object Macros {
- def impl1_0_0(c: Context)() = c.literalUnit
- def impl1_1_1(c: Context)(x: c.Expr[Int]) = c.literalUnit
- def impl1_2_2(c: Context)(x: c.Expr[Int], y: c.Expr[Int]) = c.literalUnit
+ def impl1_0_0(c: Context)() = { import c.universe._; c.Expr[Unit](q"()") }
+ def impl1_1_1(c: Context)(x: c.Expr[Int]) = { import c.universe._; c.Expr[Unit](q"()") }
+ def impl1_2_2(c: Context)(x: c.Expr[Int], y: c.Expr[Int]) = { import c.universe._; c.Expr[Unit](q"()") }
def m1_0_0() = macro impl1_0_0
def m1_1_1(x: Int) = macro impl1_1_1
def m1_2_2(x: Int, y: Int) = macro impl1_2_2
- def impl1_0_inf(c: Context)(x: c.Expr[Int]*) = c.literalUnit
- def impl1_1_inf(c: Context)(x: c.Expr[Int], y: c.Expr[Int]*) = c.literalUnit
- def impl1_2_inf(c: Context)(x: c.Expr[Int], y: c.Expr[Int], z: c.Expr[Int]*) = c.literalUnit
+ def impl1_0_inf(c: Context)(x: c.Expr[Int]*) = { import c.universe._; c.Expr[Unit](q"()") }
+ def impl1_1_inf(c: Context)(x: c.Expr[Int], y: c.Expr[Int]*) = { import c.universe._; c.Expr[Unit](q"()") }
+ def impl1_2_inf(c: Context)(x: c.Expr[Int], y: c.Expr[Int], z: c.Expr[Int]*) = { import c.universe._; c.Expr[Unit](q"()") }
def m1_0_inf(x: Int*) = macro impl1_0_inf
def m1_1_inf(x: Int, y: Int*) = macro impl1_1_inf
def m1_2_inf(x: Int, y: Int, z: Int*) = macro impl1_2_inf
- def impl2_0_0(c: Context)()() = c.literalUnit
- def impl2_1_1(c: Context)()(x: c.Expr[Int]) = c.literalUnit
- def impl2_2_2(c: Context)()(x: c.Expr[Int], y: c.Expr[Int]) = c.literalUnit
+ def impl2_0_0(c: Context)()() = { import c.universe._; c.Expr[Unit](q"()") }
+ def impl2_1_1(c: Context)()(x: c.Expr[Int]) = { import c.universe._; c.Expr[Unit](q"()") }
+ def impl2_2_2(c: Context)()(x: c.Expr[Int], y: c.Expr[Int]) = { import c.universe._; c.Expr[Unit](q"()") }
def m2_0_0()() = macro impl2_0_0
def m2_1_1()(x: Int) = macro impl2_1_1
def m2_2_2()(x: Int, y: Int) = macro impl2_2_2
- def impl2_0_inf(c: Context)()(x: c.Expr[Int]*) = c.literalUnit
- def impl2_1_inf(c: Context)()(x: c.Expr[Int], y: c.Expr[Int]*) = c.literalUnit
- def impl2_2_inf(c: Context)()(x: c.Expr[Int], y: c.Expr[Int], z: c.Expr[Int]*) = c.literalUnit
+ def impl2_0_inf(c: Context)()(x: c.Expr[Int]*) = { import c.universe._; c.Expr[Unit](q"()") }
+ def impl2_1_inf(c: Context)()(x: c.Expr[Int], y: c.Expr[Int]*) = { import c.universe._; c.Expr[Unit](q"()") }
+ def impl2_2_inf(c: Context)()(x: c.Expr[Int], y: c.Expr[Int], z: c.Expr[Int]*) = { import c.universe._; c.Expr[Unit](q"()") }
def m2_0_inf()(x: Int*) = macro impl2_0_inf
def m2_1_inf()(x: Int, y: Int*) = macro impl2_1_inf
def m2_2_inf()(x: Int, y: Int, z: Int*) = macro impl2_2_inf
diff --git a/test/files/neg/t7494-no-options.check b/test/files/neg/t7494-no-options.check
index 0bde84c96c..e3316f590a 100644
--- a/test/files/neg/t7494-no-options.check
+++ b/test/files/neg/t7494-no-options.check
@@ -13,7 +13,7 @@ superaccessors 6 add super accessors in traits and nested classes
uncurry 10 uncurry, translate function values to anonymous classes
tailcalls 11 replace tail calls by jumps
specialize 12 @specialized-driven class and method specialization
- explicitouter 13 this refs to outer pointers, translate patterns
+ explicitouter 13 this refs to outer pointers
erasure 14 erase types, add interfaces for traits
posterasure 15 clean up erased inline classes
lazyvals 16 allocate bitmaps, translate lazy vals into lazified defs
@@ -22,18 +22,19 @@ superaccessors 6 add super accessors in traits and nested classes
flatten 19 eliminate inner classes
mixin 20 mixin composition
cleanup 21 platform-specific cleanups, generate reflective calls
- icode 22 generate portable intermediate code
+ delambdafy 22 remove lambdas
+ icode 23 generate portable intermediate code
#partest !-optimise
- jvm 23 generate JVM bytecode
- ploogin 24 A sample phase that does so many things it's kind of hard...
- terminal 25 the last phase during a compilation run
+ jvm 24 generate JVM bytecode
+ ploogin 25 A sample phase that does so many things it's kind of hard...
+ terminal 26 the last phase during a compilation run
#partest -optimise
- inliner 23 optimization: do inlining
-inlinehandlers 24 optimization: inline exception handlers
- closelim 25 optimization: eliminate uncalled closures
- constopt 26 optimization: optimize null and other constants
- dce 27 optimization: eliminate dead code
- jvm 28 generate JVM bytecode
- ploogin 29 A sample phase that does so many things it's kind of hard...
- terminal 30 the last phase during a compilation run
+ inliner 24 optimization: do inlining
+inlinehandlers 25 optimization: inline exception handlers
+ closelim 26 optimization: eliminate uncalled closures
+ constopt 27 optimization: optimize null and other constants
+ dce 28 optimization: eliminate dead code
+ jvm 29 generate JVM bytecode
+ ploogin 30 A sample phase that does so many things it's kind of hard...
+ terminal 31 the last phase during a compilation run
#partest
diff --git a/test/files/neg/t7519-b.check b/test/files/neg/t7519-b.check
new file mode 100644
index 0000000000..08d819eeec
--- /dev/null
+++ b/test/files/neg/t7519-b.check
@@ -0,0 +1,6 @@
+Use_2.scala:6: error: type mismatch;
+ found : String
+ required: Q
+ val x: Q = ex.Mac.mac("asdf")
+ ^
+one error found
diff --git a/test/files/neg/t7519-b/Mac_1.scala b/test/files/neg/t7519-b/Mac_1.scala
new file mode 100644
index 0000000000..55b583d24b
--- /dev/null
+++ b/test/files/neg/t7519-b/Mac_1.scala
@@ -0,0 +1,14 @@
+// get expected error message without package declaration
+package ex
+
+import scala.language.experimental.macros
+import scala.reflect.macros._
+
+object IW {
+ def foo(a: String): String = ???
+}
+object Mac {
+ def mac(s: String): String = macro macImpl
+ def macImpl(c: Context)(s: c.Expr[String]): c.Expr[String] =
+ c.universe.reify(IW.foo(s.splice))
+}
diff --git a/test/files/neg/t7519-b/Use_2.scala b/test/files/neg/t7519-b/Use_2.scala
new file mode 100644
index 0000000000..413e40e25e
--- /dev/null
+++ b/test/files/neg/t7519-b/Use_2.scala
@@ -0,0 +1,8 @@
+trait Q
+trait K
+
+object Use {
+ implicit def cd[T](p: T)(implicit ev: T => K): Q = ???
+ val x: Q = ex.Mac.mac("asdf")
+}
+
diff --git a/test/files/neg/t7519.check b/test/files/neg/t7519.check
index 164d67f595..df54abaa3e 100644
--- a/test/files/neg/t7519.check
+++ b/test/files/neg/t7519.check
@@ -1,7 +1,11 @@
-t7519.scala:5: error: could not find implicit value for parameter nada: Nothing
+t7519.scala:5: error: type mismatch;
+ found : Int(0)
+ required: String
locally(0 : String) // was: "value conversion is not a member of C.this.C"
^
-t7519.scala:15: error: could not find implicit value for parameter nada: Nothing
+t7519.scala:15: error: type mismatch;
+ found : Int(0)
+ required: String
locally(0 : String) // was: "value conversion is not a member of U"
^
two errors found
diff --git a/test/files/neg/t7605-deprecation.check b/test/files/neg/t7605-deprecation.check
new file mode 100644
index 0000000000..9c466c058c
--- /dev/null
+++ b/test/files/neg/t7605-deprecation.check
@@ -0,0 +1,12 @@
+t7605-deprecation.scala:2: warning: Procedure syntax is deprecated. Convert procedure to method by adding `: Unit =`.
+ def this(i: Int) { this() }
+ ^
+t7605-deprecation.scala:3: warning: Procedure syntax is deprecated. Convert procedure to method by adding `: Unit =`.
+ def bar {}
+ ^
+t7605-deprecation.scala:4: warning: Procedure syntax is deprecated. Convert procedure to method by adding `: Unit`.
+ def baz
+ ^
+error: No warnings can be incurred under -Xfatal-warnings.
+three warnings found
+one error found
diff --git a/test/files/neg/t7605-deprecation.flags b/test/files/neg/t7605-deprecation.flags
new file mode 100644
index 0000000000..0a7cb7d202
--- /dev/null
+++ b/test/files/neg/t7605-deprecation.flags
@@ -0,0 +1 @@
+-deprecation -Xfuture -Xfatal-warnings
diff --git a/test/files/neg/t7605-deprecation.scala b/test/files/neg/t7605-deprecation.scala
new file mode 100644
index 0000000000..4a7dcd26d6
--- /dev/null
+++ b/test/files/neg/t7605-deprecation.scala
@@ -0,0 +1,5 @@
+abstract class Foo {
+ def this(i: Int) { this() }
+ def bar {}
+ def baz
+} \ No newline at end of file
diff --git a/test/files/neg/t7694b.check b/test/files/neg/t7694b.check
deleted file mode 100644
index ea3d7736f8..0000000000
--- a/test/files/neg/t7694b.check
+++ /dev/null
@@ -1,7 +0,0 @@
-t7694b.scala:8: error: type arguments [_3,_4] do not conform to trait L's type parameter bounds [A2,B2 <: A2]
- def d = if (true) (null: L[A, A]) else (null: L[B, B])
- ^
-t7694b.scala:9: error: type arguments [_1,_2] do not conform to trait L's type parameter bounds [A2,B2 <: A2]
- val v = if (true) (null: L[A, A]) else (null: L[B, B])
- ^
-two errors found
diff --git a/test/files/neg/t7783.check b/test/files/neg/t7783.check
new file mode 100644
index 0000000000..647cfee121
--- /dev/null
+++ b/test/files/neg/t7783.check
@@ -0,0 +1,18 @@
+t7783.scala:1: warning: type D in object O is deprecated:
+object O { class C; @deprecated("", "") type D = C; def foo: Seq[D] = Nil }
+ ^
+t7783.scala:11: warning: type D in object O is deprecated:
+ type T = O.D
+ ^
+t7783.scala:12: warning: type D in object O is deprecated:
+ locally(null: O.D)
+ ^
+t7783.scala:13: warning: type D in object O is deprecated:
+ val x: O.D = null
+ ^
+t7783.scala:14: warning: type D in object O is deprecated:
+ locally(null.asInstanceOf[O.D])
+ ^
+error: No warnings can be incurred under -Xfatal-warnings.
+5 warnings found
+one error found
diff --git a/test/files/neg/t7783.flags b/test/files/neg/t7783.flags
new file mode 100644
index 0000000000..d1b831ea87
--- /dev/null
+++ b/test/files/neg/t7783.flags
@@ -0,0 +1 @@
+-deprecation -Xfatal-warnings \ No newline at end of file
diff --git a/test/files/neg/t7783.scala b/test/files/neg/t7783.scala
new file mode 100644
index 0000000000..995b644a09
--- /dev/null
+++ b/test/files/neg/t7783.scala
@@ -0,0 +1,15 @@
+object O { class C; @deprecated("", "") type D = C; def foo: Seq[D] = Nil }
+
+object NoWarn {
+ O.foo // nowarn
+ O.foo +: Nil // nowarn
+ def bar(a: Any, b: Any) = () // nowarn
+ bar(b = O.foo, a = ()) // nowarn
+}
+
+object Warn {
+ type T = O.D
+ locally(null: O.D)
+ val x: O.D = null
+ locally(null.asInstanceOf[O.D])
+}
diff --git a/test/files/neg/xmltruncated6.check b/test/files/neg/xmltruncated6.check
index 6123114560..f638f2f090 100644
--- a/test/files/neg/xmltruncated6.check
+++ b/test/files/neg/xmltruncated6.check
@@ -1,4 +1,4 @@
-xmltruncated6.scala:2: error: ';' expected but eof found.
+xmltruncated6.scala:2: error: in XML literal: expected end of Scala block
val stuff = <a>{ "no closing brace"
^
one error found
diff --git a/test/files/pos/annotated-treecopy.check b/test/files/pos/annotated-treecopy.check
deleted file mode 100644
index e69de29bb2..0000000000
--- a/test/files/pos/annotated-treecopy.check
+++ /dev/null
diff --git a/test/files/pos/annotated-treecopy/Impls_Macros_1.scala b/test/files/pos/annotated-treecopy/Impls_Macros_1.scala
index ecf8916c46..9b7af0c3b8 100644
--- a/test/files/pos/annotated-treecopy/Impls_Macros_1.scala
+++ b/test/files/pos/annotated-treecopy/Impls_Macros_1.scala
@@ -38,10 +38,10 @@ object Macros {
val reifiedExpr = c.Expr[scala.reflect.runtime.universe.Expr[T => U]](reifiedTree)
val template =
c.universe.reify(new (T => U) with TypedFunction {
- override def toString = c.literal(tp+" => "+ttag.tpe+" { "+b1.toString+" } ").splice // DEBUG
+ override def toString = c.Expr[String](q"""${tp+" => "+ttag.tpe+" { "+b1.toString+" } "}""").splice // DEBUG
def tree = reifiedExpr.splice.tree
- val typeIn = c.literal(tp.toString).splice
- val typeOut = c.literal(ttag.tpe.toString).splice
+ val typeIn = c.Expr[String](q"${tp.toString}").splice
+ val typeOut = c.Expr[String](q"${ttag.tpe.toString}").splice
def apply(_arg: T): U = c.Expr[U](b1)(ttag.asInstanceOf[c.WeakTypeTag[U]]).splice
})
val untyped = c.resetLocalAttrs(template.tree)
diff --git a/test/files/pos/attachments-typed-another-ident.check b/test/files/pos/attachments-typed-another-ident.check
deleted file mode 100644
index e69de29bb2..0000000000
--- a/test/files/pos/attachments-typed-another-ident.check
+++ /dev/null
diff --git a/test/files/pos/attachments-typed-ident.check b/test/files/pos/attachments-typed-ident.check
deleted file mode 100644
index e69de29bb2..0000000000
--- a/test/files/pos/attachments-typed-ident.check
+++ /dev/null
diff --git a/test/files/pos/delambdafy-lambdalift.scala b/test/files/pos/delambdafy-lambdalift.scala
new file mode 100644
index 0000000000..e9da24ef37
--- /dev/null
+++ b/test/files/pos/delambdafy-lambdalift.scala
@@ -0,0 +1,8 @@
+class LambdaLift {
+
+ def enclosingMethod(capturedArg: Int): Unit = {
+ def innerMethod(x: Int): Int = x + capturedArg
+ val f = (y: Int) => innerMethod(y)
+ }
+
+}
diff --git a/test/files/pos/delambdafy-patterns.scala b/test/files/pos/delambdafy-patterns.scala
new file mode 100644
index 0000000000..95d498629b
--- /dev/null
+++ b/test/files/pos/delambdafy-patterns.scala
@@ -0,0 +1,15 @@
+class DelambdafyPatterns {
+ def bar: Unit = ()
+ def wildcardPatternInTryCatch: Unit => Unit = (x: Unit) =>
+ // patterns in try..catch are preserved so we need to be
+ // careful when it comes to free variable detction
+ // in particular a is _not_ free variable, also the
+ // `_` identifier has no symbol attached to it
+ try bar catch {
+ case a@(_:java.lang.reflect.InvocationTargetException) =>
+ // refer to a so we trigger a bug where a is considered
+ // to be a free variable for enclosing lambda
+ val b = a
+ ()
+ }
+}
diff --git a/test/files/pos/macro-qmarkqmarkqmark.check b/test/files/pos/macro-qmarkqmarkqmark.check
deleted file mode 100644
index e69de29bb2..0000000000
--- a/test/files/pos/macro-qmarkqmarkqmark.check
+++ /dev/null
diff --git a/test/files/pos/t5692a/Macros_1.scala b/test/files/pos/t5692a/Macros_1.scala
index 06b5a3de36..e530713bb0 100644
--- a/test/files/pos/t5692a/Macros_1.scala
+++ b/test/files/pos/t5692a/Macros_1.scala
@@ -1,6 +1,6 @@
import scala.reflect.macros.Context
object Macros {
- def impl[T](c: Context) = c.literalUnit
+ def impl[T](c: Context) = { import c.universe._; c.Expr[Unit](q"()") }
def foo[T] = macro impl[T]
} \ No newline at end of file
diff --git a/test/files/pos/t5692b/Macros_1.scala b/test/files/pos/t5692b/Macros_1.scala
index b28d19f903..45c672cfce 100644
--- a/test/files/pos/t5692b/Macros_1.scala
+++ b/test/files/pos/t5692b/Macros_1.scala
@@ -1,6 +1,6 @@
import scala.reflect.macros.Context
object Macros {
- def impl[T, U](c: Context) = c.literalUnit
+ def impl[T, U](c: Context) = { import c.universe._; c.Expr[Unit](q"()") }
def foo[T, U] = macro impl[T, U]
} \ No newline at end of file
diff --git a/test/files/pos/t5692c.check b/test/files/pos/t5692c.check
deleted file mode 100644
index e69de29bb2..0000000000
--- a/test/files/pos/t5692c.check
+++ /dev/null
diff --git a/test/files/neg/t5845.scala b/test/files/pos/t5845.scala
index 823c722c14..823c722c14 100644
--- a/test/files/neg/t5845.scala
+++ b/test/files/pos/t5845.scala
diff --git a/test/files/pos/t6260a.scala b/test/files/pos/t6260a.scala
new file mode 100644
index 0000000000..194294e981
--- /dev/null
+++ b/test/files/pos/t6260a.scala
@@ -0,0 +1,15 @@
+final class Option[+A](val value: A) extends AnyVal
+
+// Was: sandbox/test.scala:21: error: bridge generated for member method f: ()Option[A] in class Bar
+// which overrides method f: ()Option[A] in class Foo"
+abstract class Foo[A] { def f(): Option[A] }
+ class Bar[A] extends Foo[A] { def f(): Option[A] = ??? }
+
+// User reported this as erroneous but I couldn't reproduce with 2.10.{0,1,2,3}
+// https://issues.scala-lang.org/browse/SI-6260?focusedCommentId=64764&page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel#comment-64764
+// I suspect he whittled down the example too far.
+class Wrapper(val value: Int) extends AnyVal
+abstract class Test { def check(the: Wrapper): Boolean }
+object T {
+ new Test { def check(the: Wrapper) = true }
+}
diff --git a/test/files/pos/t7461.check b/test/files/pos/t7461.check
deleted file mode 100644
index e69de29bb2..0000000000
--- a/test/files/pos/t7461.check
+++ /dev/null
diff --git a/test/files/pos/t7461/Macros_1.scala b/test/files/pos/t7461/Macros_1.scala
index 353dec66d7..8621650f77 100644
--- a/test/files/pos/t7461/Macros_1.scala
+++ b/test/files/pos/t7461/Macros_1.scala
@@ -6,7 +6,7 @@ object Macros {
import c.universe._
val wut = c.typeCheck(Select(Literal(Constant(10)), newTermName("$minus")), silent = true)
// println(showRaw(wut, printIds = true, printTypes = true))
- c.literalUnit
+ c.Expr[Unit](q"()")
}
def foo = macro impl
diff --git a/test/files/pos/t7649.scala b/test/files/pos/t7649.scala
index a1b02f63f1..ff3c626fca 100644
--- a/test/files/pos/t7649.scala
+++ b/test/files/pos/t7649.scala
@@ -4,7 +4,7 @@ object Test {
reify {
// The lookup of the implicit WeakTypeTag[Any]
// was triggering an unpositioned tree.
- c.Expr[Any](Literal(Constant(0))).splice
+ c.Expr[Any](q"0").splice
}
import scala.reflect.ClassTag
diff --git a/test/files/pos/t7688.scala b/test/files/pos/t7688.scala
new file mode 100644
index 0000000000..5a846b97e9
--- /dev/null
+++ b/test/files/pos/t7688.scala
@@ -0,0 +1,7 @@
+import scala.reflect.macros._
+
+class A[C <: Context with Singleton](position: C#Position)
+
+object A {
+ def apply(c: Context)(in: c.Tree): A[c.type] = new A(in.pos)
+}
diff --git a/test/files/pos/t7928.scala b/test/files/pos/t7928.scala
new file mode 100644
index 0000000000..d9e29935b3
--- /dev/null
+++ b/test/files/pos/t7928.scala
@@ -0,0 +1,16 @@
+trait OuterTrait {
+ trait InnerTrait {
+ type Element
+ type Collection <: Iterable[Inner.Element]
+ }
+
+ val Inner: InnerTrait
+
+}
+
+object OuterObject extends OuterTrait {
+ object Inner extends InnerTrait {
+ type Element = String
+ override type Collection = Seq[Inner.Element]
+ }
+}
diff --git a/test/files/pos/t7944.scala b/test/files/pos/t7944.scala
new file mode 100644
index 0000000000..2fe2c5866d
--- /dev/null
+++ b/test/files/pos/t7944.scala
@@ -0,0 +1,24 @@
+class M[+A, +B]
+
+object Test {
+ implicit class EitherOps[A, B](self: Either[A, B]) {
+ def disjunction: M[A, B] = null
+ }
+
+ def foo = {
+ val l: Either[Int, Nothing] = Left[Int, Nothing](1)
+
+ var ok = EitherOps(l).disjunction
+
+ val runawayTypeVar = l.disjunction
+
+ // reported bug:
+ // found : M[Int,B]; required: M[Int,Nothing]
+ val assign: M[Int, Nothing] = runawayTypeVar
+
+ // variations on the theme, all failed before similarly.
+ val assign1: M[Int, Nothing] = {val temp = runawayTypeVar; temp}
+ val assign2: M[Int, String] = runawayTypeVar
+ val assign3: M[Int, Nothing] = {val temp = Left(1).disjunction; temp}
+ }
+}
diff --git a/test/files/pos/virtpatmat_anonfun_for.flags b/test/files/pos/virtpatmat_anonfun_for.flags
deleted file mode 100644
index e69de29bb2..0000000000
--- a/test/files/pos/virtpatmat_anonfun_for.flags
+++ /dev/null
diff --git a/test/files/presentation/partial-fun/partial-fun.check b/test/files/presentation/partial-fun/partial-fun.check
deleted file mode 100644
index adceab8280..0000000000
--- a/test/files/presentation/partial-fun/partial-fun.check
+++ /dev/null
@@ -1 +0,0 @@
-reload: PartialFun.scala
diff --git a/test/files/run/dead-code-elimination.check b/test/files/run/dead-code-elimination.check
deleted file mode 100644
index e69de29bb2..0000000000
--- a/test/files/run/dead-code-elimination.check
+++ /dev/null
diff --git a/test/files/run/delambdafy-nested-by-name.check b/test/files/run/delambdafy-nested-by-name.check
new file mode 100644
index 0000000000..94954abda4
--- /dev/null
+++ b/test/files/run/delambdafy-nested-by-name.check
@@ -0,0 +1,2 @@
+hello
+world
diff --git a/test/files/run/delambdafy-nested-by-name.scala b/test/files/run/delambdafy-nested-by-name.scala
new file mode 100644
index 0000000000..4498b3308d
--- /dev/null
+++ b/test/files/run/delambdafy-nested-by-name.scala
@@ -0,0 +1,11 @@
+// during development of delayed delambdafication I created a bug where calling a by-name method with a by-name argument that
+// itself contained a by-name argument would cause a class cast exception. That bug wasn't found in the existing test suite
+// so this test covers that case
+object Test {
+ def meth1(arg1: => String) = arg1
+ def meth2(arg2: => String) = meth1({println("hello"); arg2})
+
+ def main(args: Array[String]) {
+ println(meth2("world"))
+ }
+} \ No newline at end of file
diff --git a/test/files/run/delambdafy-two-lambdas.check b/test/files/run/delambdafy-two-lambdas.check
new file mode 100644
index 0000000000..ed9ea404dd
--- /dev/null
+++ b/test/files/run/delambdafy-two-lambdas.check
@@ -0,0 +1,2 @@
+13
+24
diff --git a/test/files/run/delambdafy-two-lambdas.scala b/test/files/run/delambdafy-two-lambdas.scala
new file mode 100644
index 0000000000..decede74a4
--- /dev/null
+++ b/test/files/run/delambdafy-two-lambdas.scala
@@ -0,0 +1,12 @@
+/*
+ * Tests if two lambdas defined in the same class do not lead to
+ * name clashes.
+ */
+object Test {
+ def takeLambda(f: Int => Int ): Int = f(12)
+
+ def main(args: Array[String]): Unit = {
+ println(takeLambda(x => x+1))
+ println(takeLambda(x => x*2))
+ }
+}
diff --git a/test/files/run/delambdafy_t6028.check b/test/files/run/delambdafy_t6028.check
new file mode 100644
index 0000000000..92cfbaefb6
--- /dev/null
+++ b/test/files/run/delambdafy_t6028.check
@@ -0,0 +1,57 @@
+[[syntax trees at end of lambdalift]] // newSource1.scala
+package <empty> {
+ class T extends Object {
+ <paramaccessor> private[this] val classParam: Int = _;
+ def <init>(classParam: Int): T = {
+ T.super.<init>();
+ ()
+ };
+ private[this] val field: Int = 0;
+ <stable> <accessor> def field(): Int = T.this.field;
+ def foo(methodParam: Int): Function0 = {
+ val methodLocal: Int = 0;
+ {
+ (() => T.this.$anonfun$1(methodParam, methodLocal)).$asInstanceOf[Function0]()
+ }
+ };
+ def bar(barParam: Int): Object = {
+ @volatile var MethodLocalObject$module: runtime.VolatileObjectRef = scala.runtime.VolatileObjectRef.zero();
+ T.this.MethodLocalObject$1(barParam, MethodLocalObject$module)
+ };
+ def tryy(tryyParam: Int): Function0 = {
+ var tryyLocal: runtime.IntRef = scala.runtime.IntRef.create(0);
+ {
+ (() => T.this.$anonfun$2(tryyParam, tryyLocal)).$asInstanceOf[Function0]()
+ }
+ };
+ final <artifact> private[this] def $anonfun$1(methodParam$1: Int, methodLocal$1: Int): Int = T.this.classParam.+(T.this.field()).+(methodParam$1).+(methodLocal$1);
+ abstract trait MethodLocalTrait$1 extends Object {
+ <synthetic> <stable> <artifact> def $outer(): T
+ };
+ object MethodLocalObject$2 extends Object with T#MethodLocalTrait$1 {
+ def <init>($outer: T, barParam$1: Int): T#MethodLocalObject$2.type = {
+ MethodLocalObject$2.super.<init>();
+ MethodLocalObject$2.this.$asInstanceOf[T#MethodLocalTrait$1$class]()./*MethodLocalTrait$1$class*/$init$(barParam$1);
+ ()
+ };
+ <synthetic> <paramaccessor> <artifact> private[this] val $outer: T = _;
+ <synthetic> <stable> <artifact> def $outer(): T = MethodLocalObject$2.this.$outer;
+ <synthetic> <stable> <artifact> def $outer(): T = MethodLocalObject$2.this.$outer
+ };
+ final <stable> private[this] def MethodLocalObject$1(barParam$1: Int, MethodLocalObject$module$1: runtime.VolatileObjectRef): T#MethodLocalObject$2.type = {
+ MethodLocalObject$module$1.elem = new T#MethodLocalObject$2.type(T.this, barParam$1);
+ MethodLocalObject$module$1.elem.$asInstanceOf[T#MethodLocalObject$2.type]()
+ };
+ abstract trait MethodLocalTrait$1$class extends Object with T#MethodLocalTrait$1 {
+ def /*MethodLocalTrait$1$class*/$init$(barParam$1: Int): Unit = {
+ ()
+ };
+ scala.this.Predef.print(scala.Int.box(barParam$1))
+ };
+ final <artifact> private[this] def $anonfun$2(tryyParam$1: Int, tryyLocal$1: runtime.IntRef): Unit = try {
+ tryyLocal$1.elem = tryyParam$1
+ } finally ()
+ }
+}
+
+warning: there were 1 feature warning(s); re-run with -feature for details
diff --git a/test/files/run/delambdafy_t6028.scala b/test/files/run/delambdafy_t6028.scala
new file mode 100644
index 0000000000..0b7ef48c3d
--- /dev/null
+++ b/test/files/run/delambdafy_t6028.scala
@@ -0,0 +1,21 @@
+import scala.tools.partest._
+import java.io.{Console => _, _}
+
+object Test extends DirectTest {
+
+ override def extraSettings: String = "-usejavacp -Ydelambdafy:method -Xprint:lambdalift -d " + testOutput.path
+
+ override def code = """class T(classParam: Int) {
+ | val field: Int = 0
+ | def foo(methodParam: Int) = {val methodLocal = 0 ; () => classParam + field + methodParam + methodLocal }
+ | def bar(barParam: Int) = { trait MethodLocalTrait { print(barParam) }; object MethodLocalObject extends MethodLocalTrait; MethodLocalObject }
+ | def tryy(tryyParam: Int) = { var tryyLocal = 0; () => try { tryyLocal = tryyParam } finally () }
+ |}
+ |""".stripMargin.trim
+
+ override def show(): Unit = {
+ Console.withErr(System.out) {
+ compile()
+ }
+ }
+}
diff --git a/test/files/run/delambdafy_t6555.check b/test/files/run/delambdafy_t6555.check
new file mode 100644
index 0000000000..6b174c0d2a
--- /dev/null
+++ b/test/files/run/delambdafy_t6555.check
@@ -0,0 +1,15 @@
+[[syntax trees at end of specialize]] // newSource1.scala
+package <empty> {
+ class Foo extends Object {
+ def <init>(): Foo = {
+ Foo.super.<init>();
+ ()
+ };
+ private[this] val f: Int => Int = {
+ final <artifact> def $anonfun(param: Int): Int = param;
+ ((param: Int) => $anonfun(param))
+ };
+ <stable> <accessor> def f(): Int => Int = Foo.this.f
+ }
+}
+
diff --git a/test/files/run/delambdafy_t6555.scala b/test/files/run/delambdafy_t6555.scala
new file mode 100644
index 0000000000..a1dcfe790c
--- /dev/null
+++ b/test/files/run/delambdafy_t6555.scala
@@ -0,0 +1,15 @@
+import scala.tools.partest._
+import java.io.{Console => _, _}
+
+object Test extends DirectTest {
+
+ override def extraSettings: String = "-usejavacp -Xprint:specialize -Ydelambdafy:method -d " + testOutput.path
+
+ override def code = "class Foo { val f = (param: Int) => param } "
+
+ override def show(): Unit = {
+ Console.withErr(System.out) {
+ compile()
+ }
+ }
+}
diff --git a/test/files/run/delambdafy_uncurry_byname_inline.check b/test/files/run/delambdafy_uncurry_byname_inline.check
new file mode 100644
index 0000000000..0dc69b379a
--- /dev/null
+++ b/test/files/run/delambdafy_uncurry_byname_inline.check
@@ -0,0 +1,21 @@
+[[syntax trees at end of uncurry]] // newSource1.scala
+package <empty> {
+ class Foo extends Object {
+ def <init>(): Foo = {
+ Foo.super.<init>();
+ ()
+ };
+ def bar(x: () => Int): Int = x.apply();
+ def foo(): Int = Foo.this.bar({
+ @SerialVersionUID(0) final <synthetic> class $anonfun extends scala.runtime.AbstractFunction0[Int] with Serializable {
+ def <init>(): <$anon: () => Int> = {
+ $anonfun.super.<init>();
+ ()
+ };
+ final def apply(): Int = 1
+ };
+ (new <$anon: () => Int>(): () => Int)
+ })
+ }
+}
+
diff --git a/test/files/run/delambdafy_uncurry_byname_inline.scala b/test/files/run/delambdafy_uncurry_byname_inline.scala
new file mode 100644
index 0000000000..8f480fa804
--- /dev/null
+++ b/test/files/run/delambdafy_uncurry_byname_inline.scala
@@ -0,0 +1,20 @@
+import scala.tools.partest._
+import java.io.{Console => _, _}
+
+object Test extends DirectTest {
+
+ override def extraSettings: String = "-usejavacp -Xprint:uncurry -Ydelambdafy:inline -d " + testOutput.path
+
+ override def code = """class Foo {
+ | def bar(x: => Int) = x
+ |
+ | def foo = bar(1)
+ |}
+ |""".stripMargin.trim
+
+ override def show(): Unit = {
+ Console.withErr(System.out) {
+ compile()
+ }
+ }
+}
diff --git a/test/files/run/delambdafy_uncurry_byname_method.check b/test/files/run/delambdafy_uncurry_byname_method.check
new file mode 100644
index 0000000000..cd3edc7d6f
--- /dev/null
+++ b/test/files/run/delambdafy_uncurry_byname_method.check
@@ -0,0 +1,15 @@
+[[syntax trees at end of uncurry]] // newSource1.scala
+package <empty> {
+ class Foo extends Object {
+ def <init>(): Foo = {
+ Foo.super.<init>();
+ ()
+ };
+ def bar(x: () => Int): Int = x.apply();
+ def foo(): Int = Foo.this.bar({
+ final <artifact> def $anonfun(): Int = 1;
+ (() => $anonfun())
+ })
+ }
+}
+
diff --git a/test/files/run/delambdafy_uncurry_byname_method.scala b/test/files/run/delambdafy_uncurry_byname_method.scala
new file mode 100644
index 0000000000..1adeec8433
--- /dev/null
+++ b/test/files/run/delambdafy_uncurry_byname_method.scala
@@ -0,0 +1,20 @@
+import scala.tools.partest._
+import java.io.{Console => _, _}
+
+object Test extends DirectTest {
+
+ override def extraSettings: String = "-usejavacp -Xprint:uncurry -Ydelambdafy:method -Ystop-after:uncurry -d " + testOutput.path
+
+ override def code = """class Foo {
+ | def bar(x: => Int) = x
+ |
+ | def foo = bar(1)
+ |}
+ |""".stripMargin.trim
+
+ override def show(): Unit = {
+ Console.withErr(System.out) {
+ compile()
+ }
+ }
+}
diff --git a/test/files/run/delambdafy_uncurry_inline.check b/test/files/run/delambdafy_uncurry_inline.check
new file mode 100644
index 0000000000..e2b024b462
--- /dev/null
+++ b/test/files/run/delambdafy_uncurry_inline.check
@@ -0,0 +1,23 @@
+[[syntax trees at end of uncurry]] // newSource1.scala
+package <empty> {
+ class Foo extends Object {
+ def <init>(): Foo = {
+ Foo.super.<init>();
+ ()
+ };
+ def bar(): Unit = {
+ val f: Int => Int = {
+ @SerialVersionUID(0) final <synthetic> class $anonfun extends scala.runtime.AbstractFunction1[Int,Int] with Serializable {
+ def <init>(): <$anon: Int => Int> = {
+ $anonfun.super.<init>();
+ ()
+ };
+ final def apply(x: Int): Int = x.+(1)
+ };
+ (new <$anon: Int => Int>(): Int => Int)
+ };
+ ()
+ }
+ }
+}
+
diff --git a/test/files/run/delambdafy_uncurry_inline.scala b/test/files/run/delambdafy_uncurry_inline.scala
new file mode 100644
index 0000000000..b42b65f5bb
--- /dev/null
+++ b/test/files/run/delambdafy_uncurry_inline.scala
@@ -0,0 +1,20 @@
+import scala.tools.partest._
+import java.io.{Console => _, _}
+
+object Test extends DirectTest {
+
+ override def extraSettings: String = "-usejavacp -Xprint:uncurry -Ydelambdafy:inline -d " + testOutput.path
+
+ override def code = """class Foo {
+ | def bar = {
+ | val f = {x: Int => x + 1}
+ | }
+ |}
+ |""".stripMargin.trim
+
+ override def show(): Unit = {
+ Console.withErr(System.out) {
+ compile()
+ }
+ }
+}
diff --git a/test/files/run/delambdafy_uncurry_method.check b/test/files/run/delambdafy_uncurry_method.check
new file mode 100644
index 0000000000..5ee3d174b3
--- /dev/null
+++ b/test/files/run/delambdafy_uncurry_method.check
@@ -0,0 +1,17 @@
+[[syntax trees at end of uncurry]] // newSource1.scala
+package <empty> {
+ class Foo extends Object {
+ def <init>(): Foo = {
+ Foo.super.<init>();
+ ()
+ };
+ def bar(): Unit = {
+ val f: Int => Int = {
+ final <artifact> def $anonfun(x: Int): Int = x.+(1);
+ ((x: Int) => $anonfun(x))
+ };
+ ()
+ }
+ }
+}
+
diff --git a/test/files/run/delambdafy_uncurry_method.scala b/test/files/run/delambdafy_uncurry_method.scala
new file mode 100644
index 0000000000..a988fb2ee7
--- /dev/null
+++ b/test/files/run/delambdafy_uncurry_method.scala
@@ -0,0 +1,20 @@
+import scala.tools.partest._
+import java.io.{Console => _, _}
+
+object Test extends DirectTest {
+
+ override def extraSettings: String = "-usejavacp -Xprint:uncurry -Ydelambdafy:method -Ystop-after:uncurry -d " + testOutput.path
+
+ override def code = """class Foo {
+ | def bar = {
+ | val f = {x: Int => x + 1}
+ | }
+ |}
+ |""".stripMargin.trim
+
+ override def show(): Unit = {
+ Console.withErr(System.out) {
+ compile()
+ }
+ }
+}
diff --git a/test/files/run/exoticnames.check b/test/files/run/exoticnames.check
deleted file mode 100644
index e69de29bb2..0000000000
--- a/test/files/run/exoticnames.check
+++ /dev/null
diff --git a/test/files/run/intmap.check b/test/files/run/intmap.check
deleted file mode 100644
index e69de29bb2..0000000000
--- a/test/files/run/intmap.check
+++ /dev/null
diff --git a/test/files/run/longmap.check b/test/files/run/longmap.check
deleted file mode 100644
index e69de29bb2..0000000000
--- a/test/files/run/longmap.check
+++ /dev/null
diff --git a/test/files/run/macro-abort-fresh.check b/test/files/run/macro-abort-fresh.check
index 87491f6e48..9fddee57d4 100644
--- a/test/files/run/macro-abort-fresh.check
+++ b/test/files/run/macro-abort-fresh.check
@@ -1,4 +1,4 @@
-$1$
+fresh$1
qwe1
qwe2
reflective compilation has failed:
diff --git a/test/files/run/macro-bundle-repl.check b/test/files/run/macro-bundle-repl.check
index b9c809f037..c11c48dc55 100644
--- a/test/files/run/macro-bundle-repl.check
+++ b/test/files/run/macro-bundle-repl.check
@@ -7,13 +7,13 @@ import scala.language.experimental.macros
scala> import scala.reflect.macros.Macro
import scala.reflect.macros.Macro
-scala> trait Bar extends Macro { def impl = c.literalUnit };def bar = macro Bar.impl
+scala> trait Bar extends Macro { def impl = { import c.universe._; c.Expr[Unit](q"()") } };def bar = macro Bar.impl
defined trait Bar
defined term macro bar: Unit
scala> bar
-scala> trait Foo extends Macro { def impl = c.literalUnit }
+scala> trait Foo extends Macro { def impl = { import c.universe._; c.Expr[Unit](q"()") } }
defined trait Foo
scala> def foo = macro Foo.impl
diff --git a/test/files/run/macro-bundle-repl.scala b/test/files/run/macro-bundle-repl.scala
index 50783c8cba..3171aaacc2 100644
--- a/test/files/run/macro-bundle-repl.scala
+++ b/test/files/run/macro-bundle-repl.scala
@@ -4,9 +4,9 @@ object Test extends ReplTest {
def code = """
import scala.language.experimental.macros
import scala.reflect.macros.Macro
-trait Bar extends Macro { def impl = c.literalUnit };def bar = macro Bar.impl
+trait Bar extends Macro { def impl = { import c.universe._; c.Expr[Unit](q"()") } };def bar = macro Bar.impl
bar
-trait Foo extends Macro { def impl = c.literalUnit }
+trait Foo extends Macro { def impl = { import c.universe._; c.Expr[Unit](q"()") } }
def foo = macro Foo.impl
foo
"""
diff --git a/test/files/run/macro-bundle-static/Impls_Macros_1.scala b/test/files/run/macro-bundle-static/Impls_Macros_1.scala
index 831dac6df5..e81fd0dbd6 100644
--- a/test/files/run/macro-bundle-static/Impls_Macros_1.scala
+++ b/test/files/run/macro-bundle-static/Impls_Macros_1.scala
@@ -4,8 +4,8 @@ import scala.language.experimental.macros
object Enclosing {
trait Impl extends Macro {
- def mono = c.literalUnit
- def poly[T: c.WeakTypeTag] = c.literal(c.weakTypeOf[T].toString)
+ def mono = { import c.universe._; c.Expr[Unit](q"()") }
+ def poly[T: c.WeakTypeTag] = { import c.universe._; c.Expr[String](q"${c.weakTypeOf[T].toString}") }
def weird = macro mono
}
}
@@ -18,8 +18,8 @@ object Macros {
package pkg {
object Enclosing {
trait Impl extends Macro {
- def mono = c.literalTrue
- def poly[T: c.WeakTypeTag] = c.literal(c.weakTypeOf[T].toString + c.weakTypeOf[T].toString)
+ def mono = { import c.universe._; c.Expr[Boolean](q"true") }
+ def poly[T: c.WeakTypeTag] = { import c.universe._; c.Expr[String](q"${c.weakTypeOf[T].toString + c.weakTypeOf[T].toString}") }
def weird = macro mono
}
}
diff --git a/test/files/run/macro-bundle-toplevel/Impls_Macros_1.scala b/test/files/run/macro-bundle-toplevel/Impls_Macros_1.scala
index 676935682e..8c7df2cdc5 100644
--- a/test/files/run/macro-bundle-toplevel/Impls_Macros_1.scala
+++ b/test/files/run/macro-bundle-toplevel/Impls_Macros_1.scala
@@ -2,8 +2,8 @@ import scala.reflect.macros.Context
import scala.reflect.macros.Macro
trait Impl extends Macro {
- def mono = c.literalUnit
- def poly[T: c.WeakTypeTag] = c.literal(c.weakTypeOf[T].toString)
+ def mono = { import c.universe._; c.Expr[Unit](q"()") }
+ def poly[T: c.WeakTypeTag] = { import c.universe._; c.Expr[String](q"${c.weakTypeOf[T].toString}") }
def weird = macro mono
}
@@ -14,8 +14,8 @@ object Macros {
package pkg {
trait Impl extends Macro {
- def mono = c.literalTrue
- def poly[T: c.WeakTypeTag] = c.literal(c.weakTypeOf[T].toString + c.weakTypeOf[T].toString)
+ def mono = { import c.universe._; c.Expr[Boolean](q"true") }
+ def poly[T: c.WeakTypeTag] = { import c.universe._; c.Expr[String](q"${c.weakTypeOf[T].toString + c.weakTypeOf[T].toString}") }
def weird = macro mono
}
diff --git a/test/files/run/macro-divergence-spurious/Impls_Macros_1.scala b/test/files/run/macro-divergence-spurious/Impls_Macros_1.scala
index bc4a9fded7..53511ebc72 100644
--- a/test/files/run/macro-divergence-spurious/Impls_Macros_1.scala
+++ b/test/files/run/macro-divergence-spurious/Impls_Macros_1.scala
@@ -15,7 +15,7 @@ object Complex {
val recur = c.inferImplicitValue(trecur, silent = true)
if (recur == EmptyTree) c.abort(c.enclosingPosition, s"couldn't synthesize $trecur")
}
- c.literalNull
+ c.Expr[Null](Literal(Constant(null)))
}
implicit object ComplexString extends Complex[String]
diff --git a/test/files/run/macro-enclosures/Impls_Macros_1.scala b/test/files/run/macro-enclosures/Impls_Macros_1.scala
index cd54028676..68f1920cdd 100644
--- a/test/files/run/macro-enclosures/Impls_Macros_1.scala
+++ b/test/files/run/macro-enclosures/Impls_Macros_1.scala
@@ -1,13 +1,16 @@
import scala.reflect.macros.Context
object Macros {
- def impl(c: Context) = c.universe.reify {
- println("enclosingPackage = " + c.literal(c.enclosingPackage.toString).splice)
- println("enclosingClass = " + c.literal(c.enclosingClass.toString).splice)
- println("enclosingImpl = " + c.literal(c.enclosingImpl.toString).splice)
- println("enclosingTemplate = " + c.literal(c.enclosingTemplate.toString).splice)
- println("enclosingMethod = " + c.literal(c.enclosingMethod.toString).splice)
- println("enclosingDef = " + c.literal(c.enclosingDef.toString).splice)
+ def impl(c: Context) = {
+ import c.universe._
+ reify {
+ println("enclosingPackage = " + c.Expr[String](Literal(Constant(c.enclosingPackage.toString))).splice)
+ println("enclosingClass = " + c.Expr[String](Literal(Constant(c.enclosingClass.toString))).splice)
+ println("enclosingImpl = " + c.Expr[String](Literal(Constant(c.enclosingImpl.toString))).splice)
+ println("enclosingTemplate = " + c.Expr[String](Literal(Constant(c.enclosingTemplate.toString))).splice)
+ println("enclosingMethod = " + c.Expr[String](Literal(Constant(c.enclosingMethod.toString))).splice)
+ println("enclosingDef = " + c.Expr[String](Literal(Constant(c.enclosingDef.toString))).splice)
+ }
}
def foo = macro impl
diff --git a/test/files/run/macro-expand-tparams-bounds.check b/test/files/run/macro-expand-tparams-bounds.check
deleted file mode 100644
index e69de29bb2..0000000000
--- a/test/files/run/macro-expand-tparams-bounds.check
+++ /dev/null
diff --git a/test/files/run/macro-expand-tparams-bounds/Impls_1.scala b/test/files/run/macro-expand-tparams-bounds/Impls_1.scala
index f9103aaf8f..d63f034e9b 100644
--- a/test/files/run/macro-expand-tparams-bounds/Impls_1.scala
+++ b/test/files/run/macro-expand-tparams-bounds/Impls_1.scala
@@ -1,12 +1,12 @@
import scala.reflect.macros.Context
object Impls1 {
- def foo[U <: String](c: Context): c.Expr[Unit] = c.literalUnit
+ def foo[U <: String](c: Context): c.Expr[Unit] = { import c.universe._; c.Expr[Unit](q"()") }
}
class C
class D extends C
object Impls2 {
- def foo[U <: C](c: Context): c.Expr[Unit] = c.literalUnit
+ def foo[U <: C](c: Context): c.Expr[Unit] = { import c.universe._; c.Expr[Unit](q"()") }
}
diff --git a/test/files/run/macro-expand-tparams-prefix/Impls_1.scala b/test/files/run/macro-expand-tparams-prefix/Impls_1.scala
index e92396d1b4..a98c4abe78 100644
--- a/test/files/run/macro-expand-tparams-prefix/Impls_1.scala
+++ b/test/files/run/macro-expand-tparams-prefix/Impls_1.scala
@@ -5,8 +5,7 @@ object Impls1 {
def foo[U: c.WeakTypeTag](c: Context)(x: c.Expr[U]) = {
import c.universe._
val U = implicitly[c.WeakTypeTag[U]]
- val body = Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant(U.toString))))
- c.Expr[Unit](body)
+ c.Expr[Unit](q"println(${U.toString})")
}
}
@@ -16,18 +15,18 @@ object Impls2 {
val T = implicitly[c.WeakTypeTag[T]]
val U = implicitly[c.WeakTypeTag[U]]
val body = Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant(T.toString + " " + U.toString))))
- c.Expr[Unit](body)
+ c.Expr[Unit](q"""println(${T.toString} + " " + ${U.toString})""")
}
}
object Impls345 {
def foo[T, U: c.WeakTypeTag, V](c: Context)(implicit T: c.WeakTypeTag[T], V: c.WeakTypeTag[V]): c.Expr[Unit] = {
import c.universe._
- c.Expr(Block(List(
- Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant(T.toString)))),
- Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant(implicitly[c.WeakTypeTag[U]].toString)))),
- Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant(V.toString))))),
- Literal(Constant(()))))
+ c.Expr(q"""
+ println(${T.toString})
+ println(${implicitly[c.WeakTypeTag[U]].toString})
+ println(${V.toString})
+ """)
}
}
diff --git a/test/files/run/macro-impl-default-params/Impls_Macros_1.scala b/test/files/run/macro-impl-default-params/Impls_Macros_1.scala
index 95d746980e..043675ec00 100644
--- a/test/files/run/macro-impl-default-params/Impls_Macros_1.scala
+++ b/test/files/run/macro-impl-default-params/Impls_Macros_1.scala
@@ -6,13 +6,12 @@ object Impls {
import c.{prefix => prefix}
import c.universe._
val U = implicitly[c.WeakTypeTag[U]]
- val body = Block(List(
- Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant("invoking foo_targs...")))),
- Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant("type of prefix is: " + prefix.staticType)))),
- Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant("type of prefix tree is: " + prefix.tree.tpe)))),
- Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant("U is: " + U.tpe))))),
- Literal(Constant(())))
- c.Expr[Unit](body)
+ c.Expr[Unit](q"""
+ println("invoking foo_targs...")
+ println("type of prefix is: " + ${prefix.staticType.toString})
+ println("type of prefix tree is: " + ${prefix.tree.tpe.toString})
+ println("U is: " + ${U.tpe.toString})
+ """)
}
}
diff --git a/test/files/run/macro-impl-rename-context/Impls_Macros_1.scala b/test/files/run/macro-impl-rename-context/Impls_Macros_1.scala
index 738c88bbc8..5f3bbac719 100644
--- a/test/files/run/macro-impl-rename-context/Impls_Macros_1.scala
+++ b/test/files/run/macro-impl-rename-context/Impls_Macros_1.scala
@@ -3,10 +3,7 @@ import scala.reflect.macros.{Context => Ctx}
object Impls {
def foo(unconventionalName: Ctx)(x: unconventionalName.Expr[Int]) = {
import unconventionalName.universe._
- val body = Block(List(
- Apply(Select(Ident(definitions.PredefModule), TermName("println")), List(Literal(Constant("invoking foo..."))))),
- Literal(Constant(())))
- unconventionalName.Expr[Unit](body)
+ unconventionalName.Expr[Unit](q"""println("invoking foo...")""")
}
}
diff --git a/test/files/run/macro-impl-tparam-only-in-impl/Impls_1.scala b/test/files/run/macro-impl-tparam-only-in-impl/Impls_1.scala
index 9b8dafaa97..24eacb36de 100644
--- a/test/files/run/macro-impl-tparam-only-in-impl/Impls_1.scala
+++ b/test/files/run/macro-impl-tparam-only-in-impl/Impls_1.scala
@@ -1,5 +1,5 @@
import scala.reflect.macros.{Context => Ctx}
object Impls {
- def foo[U <: String](c: Ctx): c.Expr[Unit] = c.literalUnit
+ def foo[U <: String](c: Ctx): c.Expr[Unit] = { import c.universe._; c.Expr[Unit](q"()") }
}
diff --git a/test/files/run/macro-openmacros/Impls_Macros_1.scala b/test/files/run/macro-openmacros/Impls_Macros_1.scala
index 50a1782431..884d7f8825 100644
--- a/test/files/run/macro-openmacros/Impls_Macros_1.scala
+++ b/test/files/run/macro-openmacros/Impls_Macros_1.scala
@@ -14,9 +14,9 @@ object Macros {
}
import c.universe._
- val next = if (c.enclosingMacros.length < 3) c.Expr[Unit](Select(Ident(c.mirror.staticModule("Macros")), TermName("foo"))) else c.literalUnit
+ val next = if (c.enclosingMacros.length < 3) c.Expr[Unit](Select(Ident(c.mirror.staticModule("Macros")), TermName("foo"))) else c.Expr[Unit](Literal(Constant(())))
c.universe.reify {
- println(c.literal(normalizePaths(c.enclosingMacros.toString)).splice)
+ println(c.Expr[String](Literal(Constant(normalizePaths(c.enclosingMacros.toString)))).splice)
next.splice
}
}
diff --git a/test/files/run/macro-reify-nested-a.check b/test/files/run/macro-reify-nested-a.check
deleted file mode 100644
index e69de29bb2..0000000000
--- a/test/files/run/macro-reify-nested-a.check
+++ /dev/null
diff --git a/test/files/run/macro-reify-nested-b.check b/test/files/run/macro-reify-nested-b.check
deleted file mode 100644
index e69de29bb2..0000000000
--- a/test/files/run/macro-reify-nested-b.check
+++ /dev/null
diff --git a/test/files/run/macro-reify-splice-outside-reify/Impls_Macros_1.scala b/test/files/run/macro-reify-splice-outside-reify/Impls_Macros_1.scala
index 5330d0e32b..f454fc430a 100644
--- a/test/files/run/macro-reify-splice-outside-reify/Impls_Macros_1.scala
+++ b/test/files/run/macro-reify-splice-outside-reify/Impls_Macros_1.scala
@@ -2,9 +2,9 @@ import scala.reflect.macros.{Context => Ctx}
object Impls {
def foo(c: Ctx)(x: c.Expr[Int]) = {
+ import c.universe._
val x1 = c.Expr[Int](c.resetAllAttrs(x.tree))
-// was: c.literal(x1.splice)
- c.literal(c.eval(x1))
+ c.Expr[Int](Literal(Constant(c.eval(x1))))
}
}
diff --git a/test/files/run/macro-settings/Impls_Macros_1.scala b/test/files/run/macro-settings/Impls_Macros_1.scala
index 83d80a5bff..9257784cf2 100644
--- a/test/files/run/macro-settings/Impls_Macros_1.scala
+++ b/test/files/run/macro-settings/Impls_Macros_1.scala
@@ -1,8 +1,11 @@
import scala.reflect.macros.Context
object Impls {
- def impl(c: Context) = c.universe.reify {
- println(c.literal(c.settings.toString).splice)
+ def impl(c: Context) = {
+ import c.universe._
+ reify {
+ println(c.Expr[String](Literal(Constant(c.settings.toString))).splice)
+ }
}
}
diff --git a/test/files/run/macro-sip19-revised/Impls_Macros_1.scala b/test/files/run/macro-sip19-revised/Impls_Macros_1.scala
index 8d7d3b5d3d..1b914ac797 100644
--- a/test/files/run/macro-sip19-revised/Impls_Macros_1.scala
+++ b/test/files/run/macro-sip19-revised/Impls_Macros_1.scala
@@ -11,7 +11,8 @@ object Macros {
val fileName = fun.pos.source.file.file.getName
val line = fun.pos.line
val charOffset = fun.pos.point
- c.universe.reify { SourceLocation1(outer.splice, c.literal(fileName).splice, c.literal(line).splice, c.literal(charOffset).splice) }
+ def literal[T](x: T) = c.Expr[T](Literal(Constant(x)))
+ c.universe.reify { SourceLocation1(outer.splice, literal(fileName).splice, literal(line).splice, literal(charOffset).splice) }
}
implicit def sourceLocation: SourceLocation1 = macro impl
diff --git a/test/files/run/macro-sip19/Impls_Macros_1.scala b/test/files/run/macro-sip19/Impls_Macros_1.scala
index 4c165ed1b8..95e19c4fd1 100644
--- a/test/files/run/macro-sip19/Impls_Macros_1.scala
+++ b/test/files/run/macro-sip19/Impls_Macros_1.scala
@@ -7,7 +7,8 @@ object Macros {
val fileName = fun.pos.source.file.file.getName
val line = fun.pos.line
val charOffset = fun.pos.point
- c.universe.reify { SourceLocation(c.literal(fileName).splice, c.literal(line).splice, c.literal(charOffset).splice) }
+ def literal[T](x: T) = c.Expr[T](Literal(Constant(x)))
+ c.universe.reify { SourceLocation(literal(fileName).splice, literal(line).splice, literal(charOffset).splice) }
}
implicit def sourceLocation: SourceLocation = macro impl
diff --git a/test/files/run/macro-system-properties.check b/test/files/run/macro-system-properties.check
index ff96a34426..c61fe7f2cf 100644
--- a/test/files/run/macro-system-properties.check
+++ b/test/files/run/macro-system-properties.check
@@ -8,7 +8,7 @@ import reflect.macros.Context
scala> object GrabContext {
def lastContext = Option(System.getProperties.get("lastContext").asInstanceOf[reflect.macros.runtime.Context])
// System.properties lets you stash true globals (unlike statics which are classloader scoped)
- def impl(c: Context)() = { System.getProperties.put("lastContext", c); c.literalUnit }
+ def impl(c: Context)() = { import c.universe._; System.getProperties.put("lastContext", c); c.Expr[Unit](q"()") }
def grab() = macro impl
}
defined object GrabContext
diff --git a/test/files/run/macro-system-properties.scala b/test/files/run/macro-system-properties.scala
index e182defc81..9dcd044dbd 100644
--- a/test/files/run/macro-system-properties.scala
+++ b/test/files/run/macro-system-properties.scala
@@ -7,7 +7,7 @@ object Test extends ReplTest {
object GrabContext {
def lastContext = Option(System.getProperties.get("lastContext").asInstanceOf[reflect.macros.runtime.Context])
// System.properties lets you stash true globals (unlike statics which are classloader scoped)
- def impl(c: Context)() = { System.getProperties.put("lastContext", c); c.literalUnit }
+ def impl(c: Context)() = { import c.universe._; System.getProperties.put("lastContext", c); c.Expr[Unit](q"()") }
def grab() = macro impl
}
object Test { class C(implicit a: Any) { GrabContext.grab } }
diff --git a/test/files/run/macro-typecheck-implicitsdisabled/Impls_Macros_1.scala b/test/files/run/macro-typecheck-implicitsdisabled/Impls_Macros_1.scala
index dbeb7efbc0..cd37c269b5 100644
--- a/test/files/run/macro-typecheck-implicitsdisabled/Impls_Macros_1.scala
+++ b/test/files/run/macro-typecheck-implicitsdisabled/Impls_Macros_1.scala
@@ -6,7 +6,7 @@ object Macros {
val tree1 = Apply(Select(Literal(Constant(1)), TermName("$minus$greater")), List(Literal(Constant(2))))
val ttree1 = c.typeCheck(tree1, withImplicitViewsDisabled = false)
- c.literal(ttree1.toString)
+ c.Expr[String](Literal(Constant(ttree1.toString)))
}
def foo_with_implicits_enabled = macro impl_with_implicits_enabled
@@ -17,10 +17,10 @@ object Macros {
try {
val tree2 = Apply(Select(Literal(Constant(1)), TermName("$minus$greater")), List(Literal(Constant(2))))
val ttree2 = c.typeCheck(tree2, withImplicitViewsDisabled = true)
- c.literal(ttree2.toString)
+ c.Expr[String](Literal(Constant(ttree2.toString)))
} catch {
case ex: Throwable =>
- c.literal(ex.toString)
+ c.Expr[String](Literal(Constant(ex.toString)))
}
}
diff --git a/test/files/run/macro-typecheck-macrosdisabled/Impls_Macros_1.scala b/test/files/run/macro-typecheck-macrosdisabled/Impls_Macros_1.scala
index ff535fea8d..2532cfd2b9 100644
--- a/test/files/run/macro-typecheck-macrosdisabled/Impls_Macros_1.scala
+++ b/test/files/run/macro-typecheck-macrosdisabled/Impls_Macros_1.scala
@@ -7,7 +7,7 @@ object Macros {
val ru = Select(Select(Select(Select(Ident(TermName("scala")), TermName("reflect")), TermName("runtime")), TermName("package")), TermName("universe"))
val tree1 = Apply(Select(ru, TermName("reify")), List(Literal(Constant(2))))
val ttree1 = c.typeCheck(tree1, withMacrosDisabled = false)
- c.literal(ttree1.toString)
+ c.Expr[String](Literal(Constant(ttree1.toString)))
}
def foo_with_macros_enabled = macro impl_with_macros_enabled
@@ -23,7 +23,7 @@ object Macros {
val tree2 = Apply(Select(Ident(ru), TermName("reify")), List(Literal(Constant(2))))
val ttree2 = c.typeCheck(tree2, withMacrosDisabled = true)
- c.literal(ttree2.toString)
+ c.Expr[String](Literal(Constant(ttree2.toString)))
}
def foo_with_macros_disabled = macro impl_with_macros_disabled
diff --git a/test/files/run/macro-typecheck-macrosdisabled2/Impls_Macros_1.scala b/test/files/run/macro-typecheck-macrosdisabled2/Impls_Macros_1.scala
index a96e0c53b6..7b22793df9 100644
--- a/test/files/run/macro-typecheck-macrosdisabled2/Impls_Macros_1.scala
+++ b/test/files/run/macro-typecheck-macrosdisabled2/Impls_Macros_1.scala
@@ -7,7 +7,7 @@ object Macros {
val ru = Select(Select(Select(Select(Ident(TermName("scala")), TermName("reflect")), TermName("runtime")), TermName("package")), TermName("universe"))
val tree1 = Apply(Select(ru, TermName("reify")), List(Apply(Select(Ident(TermName("scala")), TermName("Array")), List(Literal(Constant(2))))))
val ttree1 = c.typeCheck(tree1, withMacrosDisabled = false)
- c.literal(ttree1.toString)
+ c.Expr[String](Literal(Constant(ttree1.toString)))
}
def foo_with_macros_enabled = macro impl_with_macros_enabled
@@ -23,7 +23,7 @@ object Macros {
val tree2 = Apply(Select(Ident(ru), TermName("reify")), List(Apply(Select(Ident(TermName("scala")), TermName("Array")), List(Literal(Constant(2))))))
val ttree2 = c.typeCheck(tree2, withMacrosDisabled = true)
- c.literal(ttree2.toString)
+ c.Expr[String](Literal(Constant(ttree2.toString)))
}
def foo_with_macros_disabled = macro impl_with_macros_disabled
diff --git a/test/files/run/macro-undetparams-consfromsls/Impls_Macros_1.scala b/test/files/run/macro-undetparams-consfromsls/Impls_Macros_1.scala
index bcbd12817b..6695a297ea 100644
--- a/test/files/run/macro-undetparams-consfromsls/Impls_Macros_1.scala
+++ b/test/files/run/macro-undetparams-consfromsls/Impls_Macros_1.scala
@@ -2,14 +2,20 @@ import scala.reflect.runtime.universe._
import scala.reflect.macros.Context
object Macros {
- def cons_impl[A: c.WeakTypeTag](c: Context)(x: c.Expr[A], xs: c.Expr[List[A]]): c.Expr[List[A]] = c.universe.reify {
- println("A = " + c.literal(implicitly[c.WeakTypeTag[A]].toString).splice)
- x.splice :: xs.splice
+ def cons_impl[A: c.WeakTypeTag](c: Context)(x: c.Expr[A], xs: c.Expr[List[A]]): c.Expr[List[A]] = {
+ import c.universe._
+ reify {
+ println("A = " + c.Expr[String](Literal(Constant(implicitly[c.WeakTypeTag[A]].toString))).splice)
+ x.splice :: xs.splice
+ }
}
- def nil_impl[B: c.WeakTypeTag](c: Context): c.Expr[List[B]] = c.universe.reify {
- println("B = " + c.literal(implicitly[c.WeakTypeTag[B]].toString).splice)
- Nil
+ def nil_impl[B: c.WeakTypeTag](c: Context): c.Expr[List[B]] = {
+ import c.universe._
+ reify {
+ println("B = " + c.Expr[String](Literal(Constant(implicitly[c.WeakTypeTag[B]].toString))).splice)
+ Nil
+ }
}
def cons[A](x: A, xs: List[A]): List[A] = macro cons_impl[A]
diff --git a/test/files/run/macro-undetparams-macroitself/Impls_Macros_1.scala b/test/files/run/macro-undetparams-macroitself/Impls_Macros_1.scala
index 0244273b6f..85877b3f13 100644
--- a/test/files/run/macro-undetparams-macroitself/Impls_Macros_1.scala
+++ b/test/files/run/macro-undetparams-macroitself/Impls_Macros_1.scala
@@ -2,7 +2,10 @@ import scala.reflect.runtime.universe._
import scala.reflect.macros.Context
object Macros {
- def impl[T: c.WeakTypeTag](c: Context)(foo: c.Expr[T]): c.Expr[Unit] = c.universe.reify { println(c.literal(implicitly[c.WeakTypeTag[T]].toString).splice) }
+ def impl[T: c.WeakTypeTag](c: Context)(foo: c.Expr[T]): c.Expr[Unit] = {
+ import c.universe._
+ reify { println(c.Expr[String](Literal(Constant(implicitly[c.WeakTypeTag[T]].toString))).splice) }
+ }
def foo[T](foo: T) = macro impl[T]
} \ No newline at end of file
diff --git a/test/files/run/origins.flags b/test/files/run/origins.flags
index a7e64e4f0c..690753d807 100644
--- a/test/files/run/origins.flags
+++ b/test/files/run/origins.flags
@@ -1 +1 @@
--no-specialization \ No newline at end of file
+-no-specialization -Ydelambdafy:inline \ No newline at end of file
diff --git a/test/files/run/primitive-sigs-2-new.flags b/test/files/run/primitive-sigs-2-new.flags
new file mode 100644
index 0000000000..2349d8294d
--- /dev/null
+++ b/test/files/run/primitive-sigs-2-new.flags
@@ -0,0 +1 @@
+-Ydelambdafy:inline
diff --git a/test/files/run/primitive-sigs-2-old.flags b/test/files/run/primitive-sigs-2-old.flags
new file mode 100644
index 0000000000..ac96850b69
--- /dev/null
+++ b/test/files/run/primitive-sigs-2-old.flags
@@ -0,0 +1 @@
+-Ydelambdafy:inline \ No newline at end of file
diff --git a/test/files/run/programmatic-main.check b/test/files/run/programmatic-main.check
index 10d2ed5af7..1cd94ccb45 100644
--- a/test/files/run/programmatic-main.check
+++ b/test/files/run/programmatic-main.check
@@ -12,7 +12,7 @@ superaccessors 6 add super accessors in traits and nested classes
uncurry 10 uncurry, translate function values to anonymous classes
tailcalls 11 replace tail calls by jumps
specialize 12 @specialized-driven class and method specialization
- explicitouter 13 this refs to outer pointers, translate patterns
+ explicitouter 13 this refs to outer pointers
erasure 14 erase types, add interfaces for traits
posterasure 15 clean up erased inline classes
lazyvals 16 allocate bitmaps, translate lazy vals into lazified defs
@@ -21,6 +21,7 @@ superaccessors 6 add super accessors in traits and nested classes
flatten 19 eliminate inner classes
mixin 20 mixin composition
cleanup 21 platform-specific cleanups, generate reflective calls
- icode 22 generate portable intermediate code
- jvm 23 generate JVM bytecode
- terminal 24 the last phase during a compilation run
+ delambdafy 22 remove lambdas
+ icode 23 generate portable intermediate code
+ jvm 24 generate JVM bytecode
+ terminal 25 the last phase during a compilation run
diff --git a/test/files/run/range.check b/test/files/run/range.check
deleted file mode 100644
index e69de29bb2..0000000000
--- a/test/files/run/range.check
+++ /dev/null
diff --git a/test/files/run/reflection-fancy-java-classes.check b/test/files/run/reflection-fancy-java-classes.check
new file mode 100644
index 0000000000..258208dd99
--- /dev/null
+++ b/test/files/run/reflection-fancy-java-classes.check
@@ -0,0 +1,12 @@
+===== JAVA POV =====
+class Foo_1$1
+getEnclosingClass = class Foo_1
+getEnclosingMethod = null
+getEnclosingConstructor = null
+isMemberClass = false
+isLocalClass = false
+isAnonymousClass = true
+
+===== SCALA POV =====
+class 1
+object Foo_1
diff --git a/test/files/run/reflection-fancy-java-classes/Foo_1.java b/test/files/run/reflection-fancy-java-classes/Foo_1.java
new file mode 100644
index 0000000000..f6fd76124b
--- /dev/null
+++ b/test/files/run/reflection-fancy-java-classes/Foo_1.java
@@ -0,0 +1,5 @@
+public class Foo_1 {
+ public static Bar bar = new Bar();
+ private static class Bar {
+ }
+} \ No newline at end of file
diff --git a/test/files/run/reflection-fancy-java-classes/Test_2.scala b/test/files/run/reflection-fancy-java-classes/Test_2.scala
new file mode 100644
index 0000000000..271960ee79
--- /dev/null
+++ b/test/files/run/reflection-fancy-java-classes/Test_2.scala
@@ -0,0 +1,20 @@
+import scala.reflect.runtime.{universe => ru}
+import scala.reflect.runtime.{currentMirror => cm}
+
+object Test extends App {
+ println("===== JAVA POV =====")
+ val jfancy = Class.forName("Foo_1$1")
+ println(jfancy)
+ println("getEnclosingClass = " + jfancy.getEnclosingClass)
+ println("getEnclosingMethod = " + jfancy.getEnclosingMethod)
+ println("getEnclosingConstructor = " + jfancy.getEnclosingConstructor)
+ println("isMemberClass = " + jfancy.isMemberClass)
+ println("isLocalClass = " + jfancy.isLocalClass)
+ println("isAnonymousClass = " + jfancy.isAnonymousClass)
+
+ println("")
+ println("===== SCALA POV =====")
+ val sfancy = cm.classSymbol(jfancy)
+ println(sfancy)
+ println(sfancy.owner)
+} \ No newline at end of file
diff --git a/test/files/run/reflection-sync-potpourri.scala b/test/files/run/reflection-sync-potpourri.scala
new file mode 100644
index 0000000000..0ad5f2ab66
--- /dev/null
+++ b/test/files/run/reflection-sync-potpourri.scala
@@ -0,0 +1,32 @@
+import scala.reflect.runtime.universe._
+
+// this test checks that under heavily multithreaded conditions:
+// 1) scala.reflect.runtime.universe, its rootMirror and definitions are initialized correctly
+// 2) symbols are correctly materialized into PackageScopes (no dupes)
+// 3) unpickling works okay even we unpickle the same symbol a lot of times
+
+object Test extends App {
+ def foo[T: TypeTag](x: T) = typeOf[T].toString
+ val n = 1000
+ val rng = new scala.util.Random()
+ val types = List(
+ () => typeOf[java.lang.reflect.Method],
+ () => typeOf[java.lang.annotation.Annotation],
+ () => typeOf[scala.io.BufferedSource],
+ () => typeOf[scala.io.Codec])
+ val perms = types.permutations.toList
+ def force(lazytpe: () => Type): String = {
+ lazytpe().typeSymbol.typeSignature
+ lazytpe().toString
+ }
+ val diceRolls = List.fill(n)(rng.nextInt(perms.length))
+ val threads = (1 to n) map (i => new Thread(s"Reflector-$i") {
+ override def run(): Unit = {
+ val s1 = foo("42")
+ val s2 = perms(diceRolls(i - 1)).map(x => force(x)).sorted.mkString(", ")
+ assert(s1 == "java.lang.String")
+ assert(s2 == "java.lang.annotation.Annotation, java.lang.reflect.Method, scala.io.BufferedSource, scala.io.Codec")
+ }
+ })
+ threads foreach (_.start)
+} \ No newline at end of file
diff --git a/test/files/run/reify_for1.check b/test/files/run/reify_for1.check
deleted file mode 100644
index e69de29bb2..0000000000
--- a/test/files/run/reify_for1.check
+++ /dev/null
diff --git a/test/files/run/reify_fors_oldpatmat.flags b/test/files/run/reify_fors_oldpatmat.flags
deleted file mode 100644
index e69de29bb2..0000000000
--- a/test/files/run/reify_fors_oldpatmat.flags
+++ /dev/null
diff --git a/test/files/run/reify_maps_oldpatmat.flags b/test/files/run/reify_maps_oldpatmat.flags
deleted file mode 100644
index e69de29bb2..0000000000
--- a/test/files/run/reify_maps_oldpatmat.flags
+++ /dev/null
diff --git a/test/files/run/repl-term-macros.check b/test/files/run/repl-term-macros.check
index 2a143a1777..63bafe401b 100644
--- a/test/files/run/repl-term-macros.check
+++ b/test/files/run/repl-term-macros.check
@@ -9,7 +9,7 @@ import language.experimental.macros
scala>
-scala> def impl1(c: Context) = c.literalUnit
+scala> def impl1(c: Context) = { import c.universe._; c.Expr[Unit](q"()") }
impl1: (c: scala.reflect.macros.Context)c.Expr[Unit]
scala> def foo1 = macro impl1
@@ -19,7 +19,7 @@ scala> foo1
scala>
-scala> def impl2(c: Context)() = c.literalUnit
+scala> def impl2(c: Context)() = { import c.universe._; c.Expr[Unit](q"()") }
impl2: (c: scala.reflect.macros.Context)()c.Expr[Unit]
scala> def foo2() = macro impl2
@@ -29,7 +29,7 @@ scala> foo2()
scala>
-scala> def impl3(c: Context)(x: c.Expr[Int])(y: c.Expr[Int]) = c.literalUnit
+scala> def impl3(c: Context)(x: c.Expr[Int])(y: c.Expr[Int]) = { import c.universe._; c.Expr[Unit](q"()") }
impl3: (c: scala.reflect.macros.Context)(x: c.Expr[Int])(y: c.Expr[Int])c.Expr[Unit]
scala> def foo3(x: Int)(y: Int) = macro impl3
diff --git a/test/files/run/repl-term-macros.scala b/test/files/run/repl-term-macros.scala
index f826259be9..125e397b22 100644
--- a/test/files/run/repl-term-macros.scala
+++ b/test/files/run/repl-term-macros.scala
@@ -5,15 +5,15 @@ object Test extends ReplTest {
import scala.reflect.macros.Context
import language.experimental.macros
-def impl1(c: Context) = c.literalUnit
+def impl1(c: Context) = { import c.universe._; c.Expr[Unit](q"()") }
def foo1 = macro impl1
foo1
-def impl2(c: Context)() = c.literalUnit
+def impl2(c: Context)() = { import c.universe._; c.Expr[Unit](q"()") }
def foo2() = macro impl2
foo2()
-def impl3(c: Context)(x: c.Expr[Int])(y: c.Expr[Int]) = c.literalUnit
+def impl3(c: Context)(x: c.Expr[Int])(y: c.Expr[Int]) = { import c.universe._; c.Expr[Unit](q"()") }
def foo3(x: Int)(y: Int) = macro impl3
foo3(2)(3)
"""
diff --git a/test/files/run/static-module-method.check b/test/files/run/static-module-method.check
new file mode 100644
index 0000000000..ce01362503
--- /dev/null
+++ b/test/files/run/static-module-method.check
@@ -0,0 +1 @@
+hello
diff --git a/test/files/run/static-module-method.scala b/test/files/run/static-module-method.scala
new file mode 100644
index 0000000000..a8691300de
--- /dev/null
+++ b/test/files/run/static-module-method.scala
@@ -0,0 +1,14 @@
+// During development of delayed delambdafy there was a problem where
+// GenASM would eliminate a loadmodule for all methods defined within that module
+// even if those methods were static. This test would thus fail
+// with a verify error under -Ydelambdafy:method
+
+object Test {
+ def moduleMethod(x: String) = x
+
+ def map(x: String, f: String => String) = f(x)
+
+ def main(args: Array[String]) {
+ println(map("hello", Test.moduleMethod))
+ }
+} \ No newline at end of file
diff --git a/test/files/run/t0668.check b/test/files/run/t0668.check
deleted file mode 100644
index e69de29bb2..0000000000
--- a/test/files/run/t0668.check
+++ /dev/null
diff --git a/test/files/run/t1167.flags b/test/files/run/t1167.flags
new file mode 100644
index 0000000000..ac96850b69
--- /dev/null
+++ b/test/files/run/t1167.flags
@@ -0,0 +1 @@
+-Ydelambdafy:inline \ No newline at end of file
diff --git a/test/files/run/t1829.check b/test/files/run/t1829.check
deleted file mode 100644
index e69de29bb2..0000000000
--- a/test/files/run/t1829.check
+++ /dev/null
diff --git a/test/files/run/t2594_tcpoly.check b/test/files/run/t2594_tcpoly.check
deleted file mode 100644
index e69de29bb2..0000000000
--- a/test/files/run/t2594_tcpoly.check
+++ /dev/null
diff --git a/test/files/run/t3346a.check b/test/files/run/t3346a.check
new file mode 100644
index 0000000000..d00491fd7e
--- /dev/null
+++ b/test/files/run/t3346a.check
@@ -0,0 +1 @@
+1
diff --git a/test/files/run/t3346a.scala b/test/files/run/t3346a.scala
new file mode 100644
index 0000000000..c0a90b011b
--- /dev/null
+++ b/test/files/run/t3346a.scala
@@ -0,0 +1,11 @@
+import scala.language.implicitConversions
+
+object Test extends App {
+ class Rep[T](x : T)
+
+ class SomeOps[T](x : Rep[T]) { def foo = 1 }
+ implicit def mkOps[X, T](x : X)(implicit conv: X => Rep[T]) : SomeOps[T] = new SomeOps(conv(x))
+
+ val a: Rep[Int] = new Rep(42)
+ println(a.foo)
+} \ No newline at end of file
diff --git a/test/files/run/t3346d.scala b/test/files/run/t3346d.scala
new file mode 100644
index 0000000000..3f79896210
--- /dev/null
+++ b/test/files/run/t3346d.scala
@@ -0,0 +1,21 @@
+import scala.language.implicitConversions
+
+object Test extends App {
+ trait TARInt
+
+ trait Basket[A,B] {
+ def iAmABasket = {}
+ }
+
+ trait BasketFactory[A,B] {
+ def create(v: A): Basket[A,B]
+ }
+
+ implicit val bf = new BasketFactory[Int,TARInt] {
+ def create(v: Int): Basket[Int,TARInt] = new Basket[Int, TARInt]{}
+ }
+
+ implicit def i2[A,B](a: A)(implicit bf: BasketFactory[A,B]): Basket[A,B] = bf.create(a)
+
+ 1.iAmABasket // <-- i2 conversion not applicable
+} \ No newline at end of file
diff --git a/test/files/run/t3346e.check b/test/files/run/t3346e.check
new file mode 100644
index 0000000000..71a57ffa70
--- /dev/null
+++ b/test/files/run/t3346e.check
@@ -0,0 +1,12 @@
+eqw
+List(0, 2)
+List(0, 2)
+BitSet(0, 2)
+Vector(113, 119, 101)
+qwe
+List(2, 0)
+List(0!)
+BitSet(0, 2)
+qwe
+List(2, 0)
+qwe
diff --git a/test/files/run/t3346e.scala b/test/files/run/t3346e.scala
new file mode 100644
index 0000000000..ac0de564d4
--- /dev/null
+++ b/test/files/run/t3346e.scala
@@ -0,0 +1,81 @@
+import scala.language.implicitConversions
+import scala.collection.generic.CanBuildFrom
+import scala.math.Ordering
+import collection.{TraversableLike, SeqLike}
+import collection.immutable.BitSet
+
+class QuickSort[Coll](a: Coll) {
+ //should be able to sort only something with defined order (someting like a Seq)
+ def quickSort[T](implicit ev0: Coll => SeqLike[T, Coll],
+ cbf: CanBuildFrom[Coll, T, Coll],
+ n: Ordering[T]): Coll = {
+ quickSortAnything(ev0, cbf, n)
+ }
+
+ //we can even sort a Set, if we really want to
+ def quickSortAnything[T](implicit ev0: Coll => TraversableLike[T, Coll],
+ cbf: CanBuildFrom[Coll, T, Coll],
+ n: Ordering[T]): Coll = {
+ import n._
+ if (a.size < 2) {
+ a
+ } else {
+ // We pick the first value for the pivot.
+ val pivot = a.head
+ val (lower, tmp) = a.partition(_ < pivot)
+ val (upper, same) = tmp.partition(_ > pivot)
+ val b = cbf()
+ b.sizeHint(a.size)
+ b ++= new QuickSort(lower).quickSortAnything
+ b ++= same
+ b ++= new QuickSort(upper).quickSortAnything
+ b.result
+ }
+ }
+}
+
+class FilterMap[Repr](a: Repr) {
+ def filterMap[A, B, That](f: A => Option[B])(implicit ev0: Repr => TraversableLike[A, Repr],
+ cbf: CanBuildFrom[Repr, B, That]): That = {
+ a.flatMap(e => f(e).toSeq)
+ }
+}
+
+class FilterMapFixed[A, Repr <% TraversableLike[A, Repr]](a: Repr) {
+ def filterMap2[B, That](f: A => Option[B])(implicit cbf: CanBuildFrom[Repr, B, That]): That = {
+ a.flatMap(e => f(e).toSeq)
+ }
+}
+
+object MyEnhancements {
+ implicit def toQS[Coll](a: Coll) = new QuickSort(a)
+ implicit def toFM[Coll](a: Coll) = new FilterMap(a)
+ implicit def toFM2[A, Repr <% TraversableLike[A, Repr]](a: Repr) = new FilterMapFixed(a)
+}
+
+object Test extends App {
+
+ import MyEnhancements._
+
+ println("qwe".quickSort)
+ println(Array(2, 0).quickSort.toList)
+ println(Seq(2, 0).quickSort)
+ //not very useful to sort a set, but just as a demonstration
+ println(BitSet(2, 0).quickSortAnything)
+
+ //need to hint type inferencer,
+ //probably will be able to overcome after https://issues.scala-lang.org/browse/SI-4699 and
+ // related issues are fixed (by moving ev0 parameter from filterMap to toFM), see toFM2
+ println("qwe".filterMap((c: Char) => Some(c.toInt)))
+ println("qwe".filterMap((c: Char) => Some(c)))
+ println(Array(2, 0).filterMap((c: Int) => Some(c.toInt)).toList)
+ println(Seq(2, 0).filterMap((c: Int) => if (c < 2) Some(c + "!") else None))
+ def test(i:Int) = Option(i)
+ println(BitSet(2,0).filterMap(test))
+
+ println(toFM2("qwe").filterMap2(c => Some(c)))
+ println(toFM2(Array(2, 0)).filterMap2(c => Some(c.toInt)).toList)
+ //No implicit view available from java.lang.String => scala.collection.TraversableLike[A,java.lang.String]. :(
+ //Not anymore :)
+ println("qwe".filterMap2(c => Some(c)))
+}
diff --git a/test/files/run/t3346f.check b/test/files/run/t3346f.check
new file mode 100644
index 0000000000..fd3c81a4d7
--- /dev/null
+++ b/test/files/run/t3346f.check
@@ -0,0 +1,2 @@
+5
+5
diff --git a/test/files/run/t3346f.scala b/test/files/run/t3346f.scala
new file mode 100644
index 0000000000..4799ca2ca9
--- /dev/null
+++ b/test/files/run/t3346f.scala
@@ -0,0 +1,15 @@
+import scala.language.implicitConversions
+import scala.language.reflectiveCalls
+
+object Test extends App {
+ trait Foo[A]
+ implicit def fooString: Foo[String] = null
+ implicit def value[A](implicit foo: Foo[A]) = 5
+
+ println(implicitly[Int])
+
+ implicit def conversion[A](x: Int)(implicit foo: Foo[A]) = new {
+ def aMethod = 5
+ }
+ println(1.aMethod)
+}
diff --git a/test/files/run/t3346g.check b/test/files/run/t3346g.check
new file mode 100644
index 0000000000..ce894825e0
--- /dev/null
+++ b/test/files/run/t3346g.check
@@ -0,0 +1 @@
+A(3,asdf)
diff --git a/test/files/run/t3346g.scala b/test/files/run/t3346g.scala
new file mode 100644
index 0000000000..d7c9d79c7f
--- /dev/null
+++ b/test/files/run/t3346g.scala
@@ -0,0 +1,9 @@
+import scala.language.implicitConversions
+
+case class A(b: Int, c: String)
+
+object Test extends App {
+ implicit def s2i(s: String): Int = s.length
+ implicit def toA[T](t: T)(implicit f: T => Int): A = A(f(t), t.toString)
+ println("asdf".copy(b = 3))
+} \ No newline at end of file
diff --git a/test/files/run/t3346h.check b/test/files/run/t3346h.check
new file mode 100644
index 0000000000..587be6b4c3
--- /dev/null
+++ b/test/files/run/t3346h.check
@@ -0,0 +1 @@
+x
diff --git a/test/files/run/t3346h.scala b/test/files/run/t3346h.scala
new file mode 100644
index 0000000000..97ebc9380c
--- /dev/null
+++ b/test/files/run/t3346h.scala
@@ -0,0 +1,9 @@
+import scala.language.implicitConversions
+
+object Test extends App {
+ trait Fundep[T, U] { def u(t: T): U }
+ class C { def y = "x" }
+ implicit val FundepStringC = new Fundep[String, C]{ def u(t: String) = new C }
+ implicit def foo[T, U](x: T)(implicit y: Fundep[T, U]): U = y.u(x)
+ println("x".y)
+} \ No newline at end of file
diff --git a/test/files/run/t3346j.check b/test/files/run/t3346j.check
new file mode 100644
index 0000000000..59e8626fc5
--- /dev/null
+++ b/test/files/run/t3346j.check
@@ -0,0 +1 @@
+Int
diff --git a/test/files/run/t3346j.scala b/test/files/run/t3346j.scala
new file mode 100644
index 0000000000..98b5a870a7
--- /dev/null
+++ b/test/files/run/t3346j.scala
@@ -0,0 +1,11 @@
+import scala.language.implicitConversions
+import scala.language.reflectiveCalls
+import scala.reflect.runtime.universe._
+
+object Test extends App {
+ class A[T]
+ class B[T]
+ implicit def foo[T: TypeTag](a: A[T])(implicit b: B[T]) = new { def baz = typeOf[T] }
+ implicit def bar[T <: Int]: B[T] = new B[T]()
+ println(new A[Int]().baz)
+} \ No newline at end of file
diff --git a/test/files/run/t3897.flags b/test/files/run/t3897.flags
new file mode 100644
index 0000000000..ac96850b69
--- /dev/null
+++ b/test/files/run/t3897.flags
@@ -0,0 +1 @@
+-Ydelambdafy:inline \ No newline at end of file
diff --git a/test/files/run/t4542.check b/test/files/run/t4542.check
index 5a8108dcbc..a53f31a3c7 100644
--- a/test/files/run/t4542.check
+++ b/test/files/run/t4542.check
@@ -9,9 +9,6 @@ defined class Foo
scala> val f = new Foo
<console>:8: warning: class Foo is deprecated: foooo
val f = new Foo
- ^
-<console>:8: warning: class Foo is deprecated: foooo
- val f = new Foo
^
f: Foo = Bippy
diff --git a/test/files/run/t5229_1.check b/test/files/run/t5229_1.check
deleted file mode 100644
index e69de29bb2..0000000000
--- a/test/files/run/t5229_1.check
+++ /dev/null
diff --git a/test/files/run/t5271_4.check b/test/files/run/t5271_4.check
deleted file mode 100644
index e69de29bb2..0000000000
--- a/test/files/run/t5271_4.check
+++ /dev/null
diff --git a/test/files/run/t5272_1_oldpatmat.flags b/test/files/run/t5272_1_oldpatmat.flags
deleted file mode 100644
index e69de29bb2..0000000000
--- a/test/files/run/t5272_1_oldpatmat.flags
+++ /dev/null
diff --git a/test/files/run/t5272_2_oldpatmat.flags b/test/files/run/t5272_2_oldpatmat.flags
deleted file mode 100644
index e69de29bb2..0000000000
--- a/test/files/run/t5272_2_oldpatmat.flags
+++ /dev/null
diff --git a/test/files/run/t5273_1_oldpatmat.flags b/test/files/run/t5273_1_oldpatmat.flags
deleted file mode 100644
index e69de29bb2..0000000000
--- a/test/files/run/t5273_1_oldpatmat.flags
+++ /dev/null
diff --git a/test/files/run/t5273_2a_oldpatmat.flags b/test/files/run/t5273_2a_oldpatmat.flags
deleted file mode 100644
index e69de29bb2..0000000000
--- a/test/files/run/t5273_2a_oldpatmat.flags
+++ /dev/null
diff --git a/test/files/run/t5273_2b_oldpatmat.flags b/test/files/run/t5273_2b_oldpatmat.flags
deleted file mode 100644
index e69de29bb2..0000000000
--- a/test/files/run/t5273_2b_oldpatmat.flags
+++ /dev/null
diff --git a/test/files/run/t5415.check b/test/files/run/t5415.check
deleted file mode 100644
index e69de29bb2..0000000000
--- a/test/files/run/t5415.check
+++ /dev/null
diff --git a/test/files/run/t5418.check b/test/files/run/t5418.check
deleted file mode 100644
index e69de29bb2..0000000000
--- a/test/files/run/t5418.check
+++ /dev/null
diff --git a/test/files/run/t5545.check b/test/files/run/t5545.check
deleted file mode 100644
index e69de29bb2..0000000000
--- a/test/files/run/t5545.check
+++ /dev/null
diff --git a/test/files/run/t5894.scala b/test/files/run/t5894.scala
index abeec32365..5deda34489 100644
--- a/test/files/run/t5894.scala
+++ b/test/files/run/t5894.scala
@@ -4,7 +4,7 @@ class Test
object Test {
def foo = macro fooImpl
- def fooImpl(c: reflect.macros.Context) = c.literalUnit
+ def fooImpl(c: reflect.macros.Context) = { import c.universe._; c.Expr[Unit](q"()") }
def main(args: Array[String]) {
try {
diff --git a/test/files/run/t5923a/Macros_1.scala b/test/files/run/t5923a/Macros_1.scala
index 97076eb102..741379cf34 100644
--- a/test/files/run/t5923a/Macros_1.scala
+++ b/test/files/run/t5923a/Macros_1.scala
@@ -46,7 +46,7 @@ object Macros {
if (sym.isParameter && !sym.isSkolem) TypeTag.Nothing.asInstanceOf[TypeTag[T]]
else ttag0
}
- reify(C[T](c.literal(weakTypeOf[T].toString).splice))
+ reify(C[T](c.Expr[String](Literal(Constant(weakTypeOf[T].toString))).splice))
}
}
} \ No newline at end of file
diff --git a/test/files/run/t5923d.check b/test/files/run/t5923d.check
deleted file mode 100644
index e69de29bb2..0000000000
--- a/test/files/run/t5923d.check
+++ /dev/null
diff --git a/test/files/run/t5940.scala b/test/files/run/t5940.scala
index 147ff38256..9c8f702c68 100644
--- a/test/files/run/t5940.scala
+++ b/test/files/run/t5940.scala
@@ -7,12 +7,12 @@ object Test extends DirectTest {
import scala.reflect.macros.Context
object Impls {
- def impl(c: Context) = c.literalUnit
+ def impl(c: Context) = { import c.universe._; c.Expr[Unit](q"()") }
}
object Macros {
//import Impls._
- def impl(c: Context) = c.literalUnit
+ def impl(c: Context) = { import c.universe._; c.Expr[Unit](q"()") }
def foo = macro impl
}
"""
diff --git a/test/files/run/t5942.check b/test/files/run/t5942.check
deleted file mode 100644
index e69de29bb2..0000000000
--- a/test/files/run/t5942.check
+++ /dev/null
diff --git a/test/files/run/t6028.scala b/test/files/run/t6028.scala
index cab17535fc..a6f920c5bb 100644
--- a/test/files/run/t6028.scala
+++ b/test/files/run/t6028.scala
@@ -3,7 +3,7 @@ import java.io.{Console => _, _}
object Test extends DirectTest {
- override def extraSettings: String = "-usejavacp -Xprint:lambdalift -d " + testOutput.path
+ override def extraSettings: String = "-usejavacp -Ydelambdafy:inline -Xprint:lambdalift -d " + testOutput.path
override def code = """class T(classParam: Int) {
| val field: Int = 0
diff --git a/test/files/run/t6102.check b/test/files/run/t6102.check
index 4e8efa7b6d..aa3e6cc9e2 100644
--- a/test/files/run/t6102.check
+++ b/test/files/run/t6102.check
@@ -19,6 +19,7 @@
[running phase flatten on t6102.scala]
[running phase mixin on t6102.scala]
[running phase cleanup on t6102.scala]
+[running phase delambdafy on t6102.scala]
[running phase icode on t6102.scala]
#partest -optimise
[running phase inliner on t6102.scala]
diff --git a/test/files/run/t6197.check b/test/files/run/t6197.check
deleted file mode 100644
index e69de29bb2..0000000000
--- a/test/files/run/t6197.check
+++ /dev/null
diff --git a/test/files/run/t6198.check b/test/files/run/t6198.check
deleted file mode 100644
index e69de29bb2..0000000000
--- a/test/files/run/t6198.check
+++ /dev/null
diff --git a/test/files/run/t6199-toolbox.scala b/test/files/run/t6199-toolbox.scala
index 89015f5878..6ba5e50f66 100644
--- a/test/files/run/t6199-toolbox.scala
+++ b/test/files/run/t6199-toolbox.scala
@@ -4,5 +4,5 @@ import scala.tools.reflect.ToolBox
object Test extends App {
val tb = cm.mkToolBox()
- println(tb.eval(Literal(Constant(()))))
+ println(tb.eval(q"()"))
} \ No newline at end of file
diff --git a/test/files/run/t6240-universe-code-gen.scala b/test/files/run/t6240-universe-code-gen.scala
new file mode 100644
index 0000000000..84691639bd
--- /dev/null
+++ b/test/files/run/t6240-universe-code-gen.scala
@@ -0,0 +1,82 @@
+import scala.tools.partest.nest.FileManager._
+
+object Test extends App {
+ val cm = reflect.runtime.currentMirror
+ val u = cm.universe
+ import u._
+
+ val JavaUniverseTpe = typeOf[reflect.runtime.JavaUniverse]
+ val DefinitionsModule = JavaUniverseTpe.member(TermName("definitions"))
+
+ def forceCode(prefix: String, tp: Type): String = {
+ def isLazyAccessorOrObject(sym: Symbol) = (
+ (sym.isMethod && sym.asMethod.isLazy)
+ || sym.isModule
+ )
+ val forcables = tp.members.sorted.filter(isLazyAccessorOrObject)
+ forcables.map {
+ sym =>
+ val path = s"$prefix.${sym.name}"
+ " " + (
+ if (sym.isPrivate || sym.isProtected) s"// inaccessible: $path"
+ else path
+ )
+ }.mkString("\n")
+ }
+
+ val code =
+ s"""|// Generated Code, validated by run/t6240-universe-code-gen.scala
+ |package scala.reflect
+ |package runtime
+ |
+ |trait JavaUniverseForce { self: runtime.JavaUniverse =>
+ | def force() {
+ | Literal(Constant(42)).duplicate
+ | nme.flattenedName()
+ | nme.raw
+ | WeakTypeTag
+ | TypeTag
+ | TypeTag.Byte.tpe
+ | TypeTag.Short.tpe
+ | TypeTag.Char.tpe
+ | TypeTag.Int.tpe
+ | TypeTag.Long.tpe
+ | TypeTag.Float.tpe
+ | TypeTag.Double.tpe
+ | TypeTag.Boolean.tpe
+ | TypeTag.Unit.tpe
+ | TypeTag.Any.tpe
+ | TypeTag.AnyVal.tpe
+ | TypeTag.AnyRef.tpe
+ | TypeTag.Object.tpe
+ | TypeTag.Nothing.tpe
+ | TypeTag.Null.tpe
+ |
+ |${forceCode("this", JavaUniverseTpe)}
+ |${forceCode("definitions", DefinitionsModule.typeSignature)}
+ |${forceCode("refChecks", typeOf[scala.reflect.internal.transform.RefChecks])}
+ |${forceCode("uncurry", typeOf[scala.reflect.internal.transform.UnCurry])}
+ |${forceCode("erasure", typeOf[scala.reflect.internal.transform.Erasure])}
+ | }
+ |}""".stripMargin
+
+ import java.io.File
+ val testFile = new File(sys.props("partest.test-path"))
+ val actualFile = new java.io.File(testFile.getParent + "/../../../src/reflect/scala/reflect/runtime/JavaUniverseForce.scala").getCanonicalFile
+ val actual = scala.io.Source.fromFile(actualFile)
+ val actualLines = actual.getLines.toList
+ val generatedLines = code.lines.toList
+ if (actualLines != generatedLines) {
+ val msg = s"""|${actualFile} must be updated.
+ |===========================================================
+ | DIFF:
+ |===========================================================
+ |${compareContents(actualLines, generatedLines)}
+ |===========================================================
+ | NEW CONTENTS:
+ |===========================================================
+ |${code}""".stripMargin
+
+ assert(false, msg)
+ }
+}
diff --git a/test/files/run/t6240a.check b/test/files/run/t6240a.check
new file mode 100644
index 0000000000..29f695b6f4
--- /dev/null
+++ b/test/files/run/t6240a.check
@@ -0,0 +1 @@
+StepTwo.type
diff --git a/test/files/run/t6240a/StepOne.java b/test/files/run/t6240a/StepOne.java
new file mode 100644
index 0000000000..342d617c79
--- /dev/null
+++ b/test/files/run/t6240a/StepOne.java
@@ -0,0 +1,41 @@
+import java.io.File;
+import java.io.IOException;
+import java.lang.ClassNotFoundException;
+import java.lang.NoSuchMethodException;
+import java.lang.IllegalAccessException;
+import java.lang.reflect.Method;
+import java.lang.reflect.InvocationTargetException;
+import java.net.URL;
+import java.net.URLClassLoader;
+import java.net.MalformedURLException;
+
+public class StepOne {
+ public static void main(String[] args)
+ throws MalformedURLException, ClassNotFoundException, NoSuchMethodException, IllegalAccessException, InvocationTargetException, IOException {
+ String[] launchPaths = System.getProperty("launch.classpath").split(File.pathSeparator);
+
+ // move away StepThree
+ File tempDir = File.createTempFile("temp", Long.toString(System.nanoTime()));
+ System.setProperty("launch.step.three", tempDir.getAbsolutePath());
+ tempDir.delete();
+ tempDir.mkdir();
+ File[] testClasses = new File(launchPaths[0]).listFiles();
+ for (int i = 0; i < testClasses.length; i++) {
+ File testClass = testClasses[i];
+ if (testClass.getPath().contains("StepThree")) {
+ File testClassMoved = new File(tempDir.getAbsolutePath() + "/" + testClass.getName());
+ testClass.renameTo(testClassMoved);
+ }
+ }
+
+ // launch StepTwo
+ URL[] launchURLs = new URL[launchPaths.length];
+ for (int i = 0; i < launchPaths.length; i++) {
+ launchURLs[i] = new File(launchPaths[i]).toURL();
+ }
+ URLClassLoader classLoader = new URLClassLoader(launchURLs, Object.class.getClassLoader());
+ Class<?> stepTwo = classLoader.loadClass("StepTwo");
+ Method main = stepTwo.getDeclaredMethod("main", String[].class);
+ main.invoke(null, (Object)(new String[]{}));
+ }
+}
diff --git a/test/files/run/t6240a/StepTwo.scala b/test/files/run/t6240a/StepTwo.scala
new file mode 100644
index 0000000000..fc3221921d
--- /dev/null
+++ b/test/files/run/t6240a/StepTwo.scala
@@ -0,0 +1,7 @@
+import java.io.File
+import java.net.URLClassLoader
+
+object StepTwo extends App {
+ import scala.reflect.runtime.universe._
+ println(typeOf[StepTwo.type])
+} \ No newline at end of file
diff --git a/test/files/run/t6240a/Test.scala b/test/files/run/t6240a/Test.scala
new file mode 100644
index 0000000000..05c3678cbe
--- /dev/null
+++ b/test/files/run/t6240a/Test.scala
@@ -0,0 +1,15 @@
+import java.io.File
+import scala.sys.process._
+
+object Test extends App {
+ def prop(key: String) = {
+ val value = System.getProperties.getProperty(key)
+ assert(value != null, key)
+ value
+ }
+ val testClassesDir = prop("partest.output")
+ assert(new File(testClassesDir).exists, testClassesDir)
+ val fullTestClassesClasspath = testClassesDir + prop("path.separator") + prop("java.class.path")
+ val javaBinary = if (new File(prop("javacmd")).isAbsolute) prop("javacmd") else prop("java.home") + "/bin/" + prop("javacmd")
+ List(javaBinary, "-cp", testClassesDir, "-Dlaunch.classpath=" + fullTestClassesClasspath, "StepOne").!
+} \ No newline at end of file
diff --git a/test/files/run/t6240b.check b/test/files/run/t6240b.check
new file mode 100644
index 0000000000..255836105a
--- /dev/null
+++ b/test/files/run/t6240b.check
@@ -0,0 +1 @@
+StepThree.type
diff --git a/test/files/run/t6240b/StepOne.java b/test/files/run/t6240b/StepOne.java
new file mode 100644
index 0000000000..342d617c79
--- /dev/null
+++ b/test/files/run/t6240b/StepOne.java
@@ -0,0 +1,41 @@
+import java.io.File;
+import java.io.IOException;
+import java.lang.ClassNotFoundException;
+import java.lang.NoSuchMethodException;
+import java.lang.IllegalAccessException;
+import java.lang.reflect.Method;
+import java.lang.reflect.InvocationTargetException;
+import java.net.URL;
+import java.net.URLClassLoader;
+import java.net.MalformedURLException;
+
+public class StepOne {
+ public static void main(String[] args)
+ throws MalformedURLException, ClassNotFoundException, NoSuchMethodException, IllegalAccessException, InvocationTargetException, IOException {
+ String[] launchPaths = System.getProperty("launch.classpath").split(File.pathSeparator);
+
+ // move away StepThree
+ File tempDir = File.createTempFile("temp", Long.toString(System.nanoTime()));
+ System.setProperty("launch.step.three", tempDir.getAbsolutePath());
+ tempDir.delete();
+ tempDir.mkdir();
+ File[] testClasses = new File(launchPaths[0]).listFiles();
+ for (int i = 0; i < testClasses.length; i++) {
+ File testClass = testClasses[i];
+ if (testClass.getPath().contains("StepThree")) {
+ File testClassMoved = new File(tempDir.getAbsolutePath() + "/" + testClass.getName());
+ testClass.renameTo(testClassMoved);
+ }
+ }
+
+ // launch StepTwo
+ URL[] launchURLs = new URL[launchPaths.length];
+ for (int i = 0; i < launchPaths.length; i++) {
+ launchURLs[i] = new File(launchPaths[i]).toURL();
+ }
+ URLClassLoader classLoader = new URLClassLoader(launchURLs, Object.class.getClassLoader());
+ Class<?> stepTwo = classLoader.loadClass("StepTwo");
+ Method main = stepTwo.getDeclaredMethod("main", String[].class);
+ main.invoke(null, (Object)(new String[]{}));
+ }
+}
diff --git a/test/files/run/t6240b/StepThree.scala b/test/files/run/t6240b/StepThree.scala
new file mode 100644
index 0000000000..210795d68f
--- /dev/null
+++ b/test/files/run/t6240b/StepThree.scala
@@ -0,0 +1,4 @@
+object StepThree extends App {
+ import scala.reflect.runtime.universe._
+ println(typeOf[StepThree.type])
+} \ No newline at end of file
diff --git a/test/files/run/t6240b/StepTwo.scala b/test/files/run/t6240b/StepTwo.scala
new file mode 100644
index 0000000000..88e46492e3
--- /dev/null
+++ b/test/files/run/t6240b/StepTwo.scala
@@ -0,0 +1,10 @@
+import java.io.File
+import java.net.URLClassLoader
+
+object StepTwo extends App {
+ val classes = new File(System.getProperty("launch.step.three"))
+ val cl = new URLClassLoader(Array(classes.toURI.toURL), getClass.getClassLoader)
+ val stepThree = cl.loadClass("StepThree")
+ val main = stepThree.getDeclaredMethod("main", classOf[Array[String]])
+ main.invoke(null, Array[String]())
+} \ No newline at end of file
diff --git a/test/files/run/t6240b/Test.scala b/test/files/run/t6240b/Test.scala
new file mode 100644
index 0000000000..05c3678cbe
--- /dev/null
+++ b/test/files/run/t6240b/Test.scala
@@ -0,0 +1,15 @@
+import java.io.File
+import scala.sys.process._
+
+object Test extends App {
+ def prop(key: String) = {
+ val value = System.getProperties.getProperty(key)
+ assert(value != null, key)
+ value
+ }
+ val testClassesDir = prop("partest.output")
+ assert(new File(testClassesDir).exists, testClassesDir)
+ val fullTestClassesClasspath = testClassesDir + prop("path.separator") + prop("java.class.path")
+ val javaBinary = if (new File(prop("javacmd")).isAbsolute) prop("javacmd") else prop("java.home") + "/bin/" + prop("javacmd")
+ List(javaBinary, "-cp", testClassesDir, "-Dlaunch.classpath=" + fullTestClassesClasspath, "StepOne").!
+} \ No newline at end of file
diff --git a/test/files/run/t6260b.scala b/test/files/run/t6260b.scala
new file mode 100644
index 0000000000..dd2cf4bc4e
--- /dev/null
+++ b/test/files/run/t6260b.scala
@@ -0,0 +1,13 @@
+class C[A](val a: A) extends AnyVal
+
+class DD {
+ def foo(c: C[String]) = ()
+ def bar[A <: String](c: C[A]) = ()
+ def baz[A](c: C[A]) = ()
+}
+
+object Test extends App {
+ classOf[DD].getMethod("foo", classOf[String])
+ classOf[DD].getMethod("bar", classOf[String])
+ classOf[DD].getMethod("baz", classOf[Object])
+}
diff --git a/test/files/run/t6381.check b/test/files/run/t6381.check
index 5070b67e46..c9d4713aa8 100644
--- a/test/files/run/t6381.check
+++ b/test/files/run/t6381.check
@@ -4,8 +4,10 @@ Type :help for more information.
scala> import language.experimental.macros
import language.experimental.macros
-scala> def pos_impl(c: reflect.macros.Context): c.Expr[String] =
- c.literal(c.enclosingPosition.getClass.toString)
+scala> def pos_impl(c: reflect.macros.Context): c.Expr[String] = {
+ import c.universe._
+ c.Expr[String](Literal(Constant(c.enclosingPosition.getClass.toString)))
+}
pos_impl: (c: scala.reflect.macros.Context)c.Expr[String]
scala> def pos = macro pos_impl
diff --git a/test/files/run/t6381.scala b/test/files/run/t6381.scala
index 859ec3cb30..4c2a40fe87 100644
--- a/test/files/run/t6381.scala
+++ b/test/files/run/t6381.scala
@@ -3,8 +3,10 @@ import scala.tools.partest.ReplTest
object Test extends ReplTest {
def code = """
|import language.experimental.macros
- |def pos_impl(c: reflect.macros.Context): c.Expr[String] =
- | c.literal(c.enclosingPosition.getClass.toString)
+ |def pos_impl(c: reflect.macros.Context): c.Expr[String] = {
+ | import c.universe._
+ | c.Expr[String](Literal(Constant(c.enclosingPosition.getClass.toString)))
+ |}
|def pos = macro pos_impl
|pos
|""".stripMargin.trim
diff --git a/test/files/neg/t6385.scala b/test/files/run/t6385.scala
index cec58eec9e..24fc3cd768 100644
--- a/test/files/neg/t6385.scala
+++ b/test/files/run/t6385.scala
@@ -1,8 +1,8 @@
-object N {
+object Test {
def main(args: Array[String]) {
val y: AA[Int] = C(2)
val c: Int = y.x.y
- println(c)
+ assert(c == 2)
}
}
trait AA[T] extends Any {
diff --git a/test/files/run/t6555.scala b/test/files/run/t6555.scala
index b1a6137786..cc0e4d1bfa 100644
--- a/test/files/run/t6555.scala
+++ b/test/files/run/t6555.scala
@@ -3,7 +3,7 @@ import java.io.{Console => _, _}
object Test extends DirectTest {
- override def extraSettings: String = "-usejavacp -Xprint:specialize -d " + testOutput.path
+ override def extraSettings: String = "-usejavacp -Xprint:specialize -Ydelambdafy:inline -d " + testOutput.path
override def code = "class Foo { val f = (param: Int) => param } "
diff --git a/test/files/run/t7008-scala-defined/Impls_Macros_2.scala b/test/files/run/t7008-scala-defined/Impls_Macros_2.scala
index 94fd99018e..477829f200 100644
--- a/test/files/run/t7008-scala-defined/Impls_Macros_2.scala
+++ b/test/files/run/t7008-scala-defined/Impls_Macros_2.scala
@@ -3,9 +3,10 @@ import scala.reflect.macros.Context
object Macros {
def impl(c: Context) = {
+ import c.universe._
val decls = c.typeOf[ScalaClassWithCheckedExceptions_1[_]].declarations.toList
val s = decls.sortBy(_.name.toString).map(decl => (s"${decl.name}: ${decl.annotations}")).mkString(scala.compat.Platform.EOL)
- c.universe.reify(println(c.literal(s).splice))
+ reify(println(c.Expr[String](Literal(Constant(s))).splice))
}
def foo = macro impl
diff --git a/test/files/run/t7008/Impls_Macros_2.scala b/test/files/run/t7008/Impls_Macros_2.scala
index 7a17314085..63c3f9d696 100644
--- a/test/files/run/t7008/Impls_Macros_2.scala
+++ b/test/files/run/t7008/Impls_Macros_2.scala
@@ -3,9 +3,10 @@ import scala.reflect.macros.Context
object Macros {
def impl(c: Context) = {
+ import c.universe._
val decls = c.typeOf[JavaClassWithCheckedExceptions_1[_]].declarations.toList
val s = decls.sortBy(_.name.toString).map(decl => (s"${decl.name}: ${decl.annotations}")).mkString(scala.compat.Platform.EOL)
- c.universe.reify(println(c.literal(s).splice))
+ reify(println(c.Expr[String](Literal(Constant(s))).splice))
}
def foo = macro impl
diff --git a/test/files/run/t7045.check b/test/files/run/t7045.check
new file mode 100644
index 0000000000..28134535c8
--- /dev/null
+++ b/test/files/run/t7045.check
@@ -0,0 +1,2 @@
+D with C
+D with C
diff --git a/test/files/run/t7045.scala b/test/files/run/t7045.scala
new file mode 100644
index 0000000000..f41baca05e
--- /dev/null
+++ b/test/files/run/t7045.scala
@@ -0,0 +1,12 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{currentMirror => cm}
+
+class C
+class D { self: C => }
+
+object Test extends App {
+ val d = cm.staticClass("D")
+ println(d.selfType)
+ d.typeSignature
+ println(d.selfType)
+} \ No newline at end of file
diff --git a/test/files/run/t7047/Impls_Macros_1.scala b/test/files/run/t7047/Impls_Macros_1.scala
index 2992e3efe4..a5d55c3a42 100644
--- a/test/files/run/t7047/Impls_Macros_1.scala
+++ b/test/files/run/t7047/Impls_Macros_1.scala
@@ -12,7 +12,7 @@ object Macros {
} catch {
case _: Exception =>
}
- c.literalNull
+ c.Expr[Null](Literal(Constant(null)))
}
def foo = macro impl
diff --git a/test/files/run/t7240.check b/test/files/run/t7240.check
deleted file mode 100644
index e69de29bb2..0000000000
--- a/test/files/run/t7240.check
+++ /dev/null
diff --git a/test/files/run/t7341.check b/test/files/run/t7341.check
deleted file mode 100644
index e69de29bb2..0000000000
--- a/test/files/run/t7341.check
+++ /dev/null
diff --git a/test/files/run/t7375b/Macros_1.scala b/test/files/run/t7375b/Macros_1.scala
index 70e79cc2b4..7a307805db 100644
--- a/test/files/run/t7375b/Macros_1.scala
+++ b/test/files/run/t7375b/Macros_1.scala
@@ -11,7 +11,7 @@ object Macros {
def foo = macro impl
def impl(c: Context) = {
import c.universe._
- def test[T: c.TypeTag] = reify(println(c.literal(c.reifyRuntimeClass(c.typeOf[T]).toString).splice)).tree
+ def test[T: c.TypeTag] = reify(println(c.Expr[String](Literal(Constant(c.reifyRuntimeClass(c.typeOf[T]).toString))).splice)).tree
def tests = Block(List(test[C1], test[C2], test[F1], test[F2]), Literal(Constant(())))
c.Expr[Unit](tests)
}
diff --git a/test/files/run/t7510.check b/test/files/run/t7510.check
deleted file mode 100644
index e69de29bb2..0000000000
--- a/test/files/run/t7510.check
+++ /dev/null
diff --git a/test/files/run/t7852.check b/test/files/run/t7852.check
deleted file mode 100644
index e69de29bb2..0000000000
--- a/test/files/run/t7852.check
+++ /dev/null
diff --git a/test/files/run/toolbox_parse_package.check b/test/files/run/toolbox_parse_package.check
new file mode 100644
index 0000000000..46465980a0
--- /dev/null
+++ b/test/files/run/toolbox_parse_package.check
@@ -0,0 +1,8 @@
+package foo {
+ object bar extends scala.AnyRef {
+ def <init>() = {
+ super.<init>();
+ ()
+ }
+ }
+}
diff --git a/test/files/run/toolbox_parse_package.scala b/test/files/run/toolbox_parse_package.scala
new file mode 100644
index 0000000000..62412a50d7
--- /dev/null
+++ b/test/files/run/toolbox_parse_package.scala
@@ -0,0 +1,9 @@
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.{universe => ru}
+import scala.reflect.runtime.{currentMirror => cm}
+import scala.tools.reflect.ToolBox
+
+object Test extends App {
+ val toolbox = cm.mkToolBox()
+ println(toolbox.parse("package foo { object bar }"))
+} \ No newline at end of file
diff --git a/test/files/run/typetags_without_scala_reflect_manifest_lookup.check b/test/files/run/typetags_without_scala_reflect_manifest_lookup.check
deleted file mode 100644
index e69de29bb2..0000000000
--- a/test/files/run/typetags_without_scala_reflect_manifest_lookup.check
+++ /dev/null
diff --git a/test/files/scalacheck/parallel-collections/ParallelArrayCheck.scala b/test/files/scalacheck/parallel-collections/ParallelArrayCheck.scala
index 255c04498e..691a3e961e 100644
--- a/test/files/scalacheck/parallel-collections/ParallelArrayCheck.scala
+++ b/test/files/scalacheck/parallel-collections/ParallelArrayCheck.scala
@@ -24,6 +24,8 @@ abstract class ParallelArrayCheck[T](tp: String) extends ParallelSeqCheck[T]("Pa
def hasStrictOrder = true
+ def tasksupport: TaskSupport
+
def ofSize(vals: Seq[Gen[T]], sz: Int) = {
val a = new mutable.ArrayBuffer[T](sz)
val gen = vals(rnd.nextInt(vals.size))
@@ -33,6 +35,7 @@ abstract class ParallelArrayCheck[T](tp: String) extends ParallelSeqCheck[T]("Pa
def fromSeq(a: Seq[T]) = {
val pa = new ParArray[T](a.size)
+ pa.tasksupport = tasksupport
var i = 0
for (elem <- a.toList) {
pa(i) = elem
@@ -50,7 +53,7 @@ abstract class ParallelArrayCheck[T](tp: String) extends ParallelSeqCheck[T]("Pa
}
-object IntParallelArrayCheck extends ParallelArrayCheck[Int]("Int") with IntSeqOperators with IntValues {
+class IntParallelArrayCheck(val tasksupport: TaskSupport) extends ParallelArrayCheck[Int]("Int") with IntSeqOperators with IntValues {
override def instances(vals: Seq[Gen[Int]]) = oneOf(super.instances(vals), sized { sz =>
(0 until sz).toArray.toSeq
}, sized { sz =>
diff --git a/test/files/scalacheck/parallel-collections/ParallelCtrieCheck.scala b/test/files/scalacheck/parallel-collections/ParallelCtrieCheck.scala
index b952704af2..cf15afb3b9 100644
--- a/test/files/scalacheck/parallel-collections/ParallelCtrieCheck.scala
+++ b/test/files/scalacheck/parallel-collections/ParallelCtrieCheck.scala
@@ -25,6 +25,8 @@ abstract class ParallelConcurrentTrieMapCheck[K, V](tp: String) extends Parallel
def hasStrictOrder = false
+ def tasksupport: TaskSupport
+
def ofSize(vals: Seq[Gen[(K, V)]], sz: Int) = {
val ct = new concurrent.TrieMap[K, V]
val gen = vals(rnd.nextInt(vals.size))
@@ -34,6 +36,7 @@ abstract class ParallelConcurrentTrieMapCheck[K, V](tp: String) extends Parallel
def fromTraversable(t: Traversable[(K, V)]) = {
val pct = new ParTrieMap[K, V]
+ pct.tasksupport = tasksupport
var i = 0
for (kv <- t.toList) {
pct += kv
@@ -45,7 +48,7 @@ abstract class ParallelConcurrentTrieMapCheck[K, V](tp: String) extends Parallel
}
-object IntIntParallelConcurrentTrieMapCheck extends ParallelConcurrentTrieMapCheck[Int, Int]("Int, Int")
+class IntIntParallelConcurrentTrieMapCheck(val tasksupport: TaskSupport) extends ParallelConcurrentTrieMapCheck[Int, Int]("Int, Int")
with PairOperators[Int, Int]
with PairValues[Int, Int]
{
diff --git a/test/files/scalacheck/parallel-collections/ParallelHashMapCheck.scala b/test/files/scalacheck/parallel-collections/ParallelHashMapCheck.scala
index 9299a201a1..34b3f33de2 100644
--- a/test/files/scalacheck/parallel-collections/ParallelHashMapCheck.scala
+++ b/test/files/scalacheck/parallel-collections/ParallelHashMapCheck.scala
@@ -24,6 +24,8 @@ abstract class ParallelHashMapCheck[K, V](tp: String) extends ParallelMapCheck[K
def hasStrictOrder = false
+ def tasksupport: TaskSupport
+
def ofSize(vals: Seq[Gen[(K, V)]], sz: Int) = {
val hm = new mutable.HashMap[K, V]
val gen = vals(rnd.nextInt(vals.size))
@@ -33,6 +35,7 @@ abstract class ParallelHashMapCheck[K, V](tp: String) extends ParallelMapCheck[K
def fromTraversable(t: Traversable[(K, V)]) = {
val phm = new ParHashMap[K, V]
+ phm.tasksupport = tasksupport
var i = 0
for (kv <- t.toList) {
phm += kv
@@ -44,7 +47,7 @@ abstract class ParallelHashMapCheck[K, V](tp: String) extends ParallelMapCheck[K
}
-object IntIntParallelHashMapCheck extends ParallelHashMapCheck[Int, Int]("Int, Int")
+class IntIntParallelHashMapCheck(val tasksupport: TaskSupport) extends ParallelHashMapCheck[Int, Int]("Int, Int")
with PairOperators[Int, Int]
with PairValues[Int, Int]
{
diff --git a/test/files/scalacheck/parallel-collections/ParallelHashSetCheck.scala b/test/files/scalacheck/parallel-collections/ParallelHashSetCheck.scala
index 8b41908a26..91de2472a7 100644
--- a/test/files/scalacheck/parallel-collections/ParallelHashSetCheck.scala
+++ b/test/files/scalacheck/parallel-collections/ParallelHashSetCheck.scala
@@ -24,6 +24,8 @@ abstract class ParallelHashSetCheck[T](tp: String) extends ParallelSetCheck[T]("
def hasStrictOrder = false
+ def tasksupport: TaskSupport
+
def ofSize(vals: Seq[Gen[T]], sz: Int) = {
val hm = new mutable.HashSet[T]
val gen = vals(rnd.nextInt(vals.size))
@@ -32,19 +34,20 @@ abstract class ParallelHashSetCheck[T](tp: String) extends ParallelSetCheck[T]("
}
def fromTraversable(t: Traversable[T]) = {
- val phm = new ParHashSet[T]
+ val phs = new ParHashSet[T]
+ phs.tasksupport = tasksupport
var i = 0
for (kv <- t.toList) {
- phm += kv
+ phs += kv
i += 1
}
- phm
+ phs
}
}
-object IntParallelHashSetCheck extends ParallelHashSetCheck[Int]("Int")
+class IntParallelHashSetCheck(val tasksupport: TaskSupport) extends ParallelHashSetCheck[Int]("Int")
with IntOperators
with IntValues
{
diff --git a/test/files/scalacheck/parallel-collections/ParallelHashTrieCheck.scala b/test/files/scalacheck/parallel-collections/ParallelHashTrieCheck.scala
index bbec52dc92..9e29be5429 100644
--- a/test/files/scalacheck/parallel-collections/ParallelHashTrieCheck.scala
+++ b/test/files/scalacheck/parallel-collections/ParallelHashTrieCheck.scala
@@ -24,6 +24,8 @@ abstract class ParallelHashMapCheck[K, V](tp: String) extends ParallelMapCheck[K
def hasStrictOrder = false
+ def tasksupport: TaskSupport
+
def ofSize(vals: Seq[Gen[(K, V)]], sz: Int) = {
var hm = new immutable.HashMap[K, V]
val gen = vals(rnd.nextInt(vals.size))
@@ -33,6 +35,7 @@ abstract class ParallelHashMapCheck[K, V](tp: String) extends ParallelMapCheck[K
def fromTraversable(t: Traversable[(K, V)]) = {
var phm = new ParHashMap[K, V]
+ phm.tasksupport = tasksupport
var i = 0
for (kv <- t.toList) {
phm += kv
@@ -44,7 +47,7 @@ abstract class ParallelHashMapCheck[K, V](tp: String) extends ParallelMapCheck[K
}
-object IntIntParallelHashMapCheck extends ParallelHashMapCheck[Int, Int]("Int, Int")
+class IntIntParallelHashMapCheck(val tasksupport: TaskSupport) extends ParallelHashMapCheck[Int, Int]("Int, Int")
with PairOperators[Int, Int]
with PairValues[Int, Int]
{
@@ -76,6 +79,8 @@ abstract class ParallelHashSetCheck[T](tp: String) extends ParallelSetCheck[T]("
def hasStrictOrder = false
+ def tasksupport: TaskSupport
+
def ofSize(vals: Seq[Gen[T]], sz: Int) = {
var hm = new immutable.HashSet[T]
val gen = vals(rnd.nextInt(vals.size))
@@ -84,13 +89,14 @@ abstract class ParallelHashSetCheck[T](tp: String) extends ParallelSetCheck[T]("
}
def fromTraversable(t: Traversable[T]) = {
- var phm = new ParHashSet[T]
+ var phs = new ParHashSet[T]
+ phs.tasksupport = tasksupport
var i = 0
for (kv <- t.toList) {
- phm += kv
+ phs += kv
i += 1
}
- phm
+ phs
}
override def printDataStructureDebugInfo(ds: AnyRef) = ds match {
@@ -103,7 +109,7 @@ abstract class ParallelHashSetCheck[T](tp: String) extends ParallelSetCheck[T]("
}
-object IntParallelHashSetCheck extends ParallelHashSetCheck[Int]("Int")
+class IntParallelHashSetCheck(val tasksupport: TaskSupport) extends ParallelHashSetCheck[Int]("Int")
with IntOperators
with IntValues
{
diff --git a/test/files/scalacheck/parallel-collections/ParallelIterableCheck.scala b/test/files/scalacheck/parallel-collections/ParallelIterableCheck.scala
index 26fa71d72c..774d6f428b 100644
--- a/test/files/scalacheck/parallel-collections/ParallelIterableCheck.scala
+++ b/test/files/scalacheck/parallel-collections/ParallelIterableCheck.scala
@@ -358,30 +358,35 @@ abstract class ParallelIterableCheck[T](collName: String) extends Properties(col
}
property("++s must be equal") = forAll(collectionTriplets) { case (t, coll, colltoadd) =>
- val toadd = colltoadd
- val tr = t ++ toadd.iterator
- val cr = coll ++ toadd.iterator
- if (!areEqual(tr, cr)) {
- println("from: " + t)
- println("and: " + coll.iterator.toList)
- println("adding: " + toadd)
- println(tr.toList)
- println(cr.iterator.toList)
- }
- ("adding " |: areEqual(tr, cr)) &&
- (for ((trav, ind) <- (addAllTraversables).zipWithIndex) yield {
- val tadded = t ++ trav
- val cadded = coll ++ collection.parallel.mutable.ParArray(trav.toSeq: _*)
- if (!areEqual(tadded, cadded)) {
- println("----------------------")
+ try {
+ val toadd = colltoadd
+ val tr = t ++ toadd.iterator
+ val cr = coll ++ toadd.iterator
+ if (!areEqual(tr, cr)) {
println("from: " + t)
- println("and: " + coll)
- println("adding: " + trav)
- println(tadded)
- println(cadded)
+ println("and: " + coll.iterator.toList)
+ println("adding: " + toadd)
+ println(tr.toList)
+ println(cr.iterator.toList)
}
- ("traversable " + ind) |: areEqual(tadded, cadded)
- }).reduceLeft(_ && _)
+ ("adding " |: areEqual(tr, cr)) &&
+ (for ((trav, ind) <- (addAllTraversables).zipWithIndex) yield {
+ val tadded = t ++ trav
+ val cadded = coll ++ collection.parallel.mutable.ParArray(trav.toSeq: _*)
+ if (!areEqual(tadded, cadded)) {
+ println("----------------------")
+ println("from: " + t)
+ println("and: " + coll)
+ println("adding: " + trav)
+ println(tadded)
+ println(cadded)
+ }
+ ("traversable " + ind) |: areEqual(tadded, cadded)
+ }).reduceLeft(_ && _)
+ } catch {
+ case e: java.lang.Exception =>
+ throw e
+ }
}
if (hasStrictOrder) property("copies to array must be equal") = forAll(collectionPairs) { case (t, coll) =>
diff --git a/test/files/scalacheck/parallel-collections/ParallelRangeCheck.scala b/test/files/scalacheck/parallel-collections/ParallelRangeCheck.scala
index 372d6b9fbd..f490d9490a 100644
--- a/test/files/scalacheck/parallel-collections/ParallelRangeCheck.scala
+++ b/test/files/scalacheck/parallel-collections/ParallelRangeCheck.scala
@@ -17,7 +17,7 @@ import scala.collection.parallel.ops._
-object ParallelRangeCheck extends ParallelSeqCheck[Int]("ParallelRange[Int]") with ops.IntSeqOperators {
+class ParallelRangeCheck(val tasksupport: TaskSupport) extends ParallelSeqCheck[Int]("ParallelRange[Int]") with ops.IntSeqOperators {
// ForkJoinTasks.defaultForkJoinPool.setMaximumPoolSize(Runtime.getRuntime.availableProcessors * 2)
// ForkJoinTasks.defaultForkJoinPool.setParallelism(Runtime.getRuntime.availableProcessors * 2)
@@ -38,9 +38,13 @@ object ParallelRangeCheck extends ParallelSeqCheck[Int]("ParallelRange[Int]") wi
}
def fromSeq(a: Seq[Int]) = a match {
- case r: Range => ParRange(r.start, r.end, r.step, false)
+ case r: Range =>
+ val pr = ParRange(r.start, r.end, r.step, false)
+ pr.tasksupport = tasksupport
+ pr
case _ =>
val pa = new parallel.mutable.ParArray[Int](a.length)
+ pa.tasksupport = tasksupport
for (i <- 0 until a.length) pa(i) = a(i)
pa
}
diff --git a/test/files/scalacheck/parallel-collections/ParallelVectorCheck.scala b/test/files/scalacheck/parallel-collections/ParallelVectorCheck.scala
index a2b6cef96d..bbebd51919 100644
--- a/test/files/scalacheck/parallel-collections/ParallelVectorCheck.scala
+++ b/test/files/scalacheck/parallel-collections/ParallelVectorCheck.scala
@@ -17,6 +17,8 @@ import scala.collection.parallel.ops._
import immutable.Vector
import immutable.VectorBuilder
+import scala.collection.parallel.TaskSupport
+
@@ -30,6 +32,8 @@ abstract class ParallelVectorCheck[T](tp: String) extends collection.parallel.Pa
def hasStrictOrder = true
+ def tasksupport: TaskSupport
+
def ofSize(vals: Seq[Gen[T]], sz: Int) = {
val vb = new immutable.VectorBuilder[T]()
val gen = vals(rnd.nextInt(vals.size))
@@ -38,16 +42,18 @@ abstract class ParallelVectorCheck[T](tp: String) extends collection.parallel.Pa
}
def fromSeq(a: Seq[T]) = {
- val pa = ParVector.newCombiner[T]
- for (elem <- a.toList) pa += elem
- pa.result
+ val pc = ParVector.newCombiner[T]
+ for (elem <- a.toList) pc += elem
+ val pv = pc.result
+ pv.tasksupport = tasksupport
+ pv
}
}
-object IntParallelVectorCheck extends ParallelVectorCheck[Int]("Int") with IntSeqOperators with IntValues {
+class IntParallelVectorCheck(val tasksupport: TaskSupport) extends ParallelVectorCheck[Int]("Int") with IntSeqOperators with IntValues {
override def instances(vals: Seq[Gen[Int]]) = oneOf(super.instances(vals), sized { sz =>
(0 until sz).toArray.toSeq
}, sized { sz =>
diff --git a/test/files/scalacheck/parallel-collections/pc.scala b/test/files/scalacheck/parallel-collections/pc.scala
index c588692fd2..a3c1df4054 100644
--- a/test/files/scalacheck/parallel-collections/pc.scala
+++ b/test/files/scalacheck/parallel-collections/pc.scala
@@ -6,35 +6,52 @@
import org.scalacheck._
import scala.collection.parallel._
-class ParCollProperties extends Properties("Parallel collections") {
- /* Collections */
+// package here to be able access the package-private implementation and shutdown the pool
+package scala {
- // parallel arrays
- include(mutable.IntParallelArrayCheck)
+ class ParCollProperties extends Properties("Parallel collections") {
+
+ def includeAllTestsWith(support: TaskSupport) {
+ // parallel arrays with default task support
+ include(new mutable.IntParallelArrayCheck(support))
+
+ // parallel ranges
+ include(new immutable.ParallelRangeCheck(support))
+
+ // parallel immutable hash maps (tries)
+ include(new immutable.IntIntParallelHashMapCheck(support))
+
+ // parallel immutable hash sets (tries)
+ include(new immutable.IntParallelHashSetCheck(support))
+
+ // parallel mutable hash maps (tables)
+ include(new mutable.IntIntParallelHashMapCheck(support))
+
+ // parallel ctrie
+ include(new mutable.IntIntParallelConcurrentTrieMapCheck(support))
+
+ // parallel mutable hash sets (tables)
+ include(new mutable.IntParallelHashSetCheck(support))
+
+ // parallel vectors
+ include(new immutable.IntParallelVectorCheck(support))
+ }
+
+ includeAllTestsWith(defaultTaskSupport)
+
+ val ec = scala.concurrent.ExecutionContext.fromExecutorService(java.util.concurrent.Executors.newFixedThreadPool(5))
+ val ectasks = new collection.parallel.ExecutionContextTaskSupport(ec)
+ includeAllTestsWith(ectasks)
- // parallel ranges
- include(immutable.ParallelRangeCheck)
-
- // parallel immutable hash maps (tries)
- include(immutable.IntIntParallelHashMapCheck)
-
- // parallel immutable hash sets (tries)
- include(immutable.IntParallelHashSetCheck)
-
- // parallel mutable hash maps (tables)
- include(mutable.IntIntParallelHashMapCheck)
-
- // parallel ctrie
- include(mutable.IntIntParallelConcurrentTrieMapCheck)
-
- // parallel mutable hash sets (tables)
- include(mutable.IntParallelHashSetCheck)
+ // no post test hooks in scalacheck, so cannot do:
+ // ec.shutdown()
+
+ }
- // parallel vectors
- include(immutable.IntParallelVectorCheck)
}
-object Test extends ParCollProperties {
+
+object Test extends scala.ParCollProperties {
/*
def main(args: Array[String]) {
val pc = new ParCollProperties
diff --git a/test/files/scalacheck/quasiquotes/ArbitraryTreesAndNames.scala b/test/files/scalacheck/quasiquotes/ArbitraryTreesAndNames.scala
index 23b6a5fbdb..4118d92076 100644
--- a/test/files/scalacheck/quasiquotes/ArbitraryTreesAndNames.scala
+++ b/test/files/scalacheck/quasiquotes/ArbitraryTreesAndNames.scala
@@ -257,7 +257,7 @@ trait ArbitraryTreesAndNames {
/* These are marker types that allow to write tests that
* depend specificly on Trees that are terms or types.
- * They are transperantly tranformed to trees through
+ * They are transparently tranformed to trees through
* implicit conversions and liftables for quasiquotes.
*/
diff --git a/test/files/scalacheck/quasiquotes/DefinitionConstructionProps.scala b/test/files/scalacheck/quasiquotes/DefinitionConstructionProps.scala
index 153e23d947..e8ddb4b72a 100644
--- a/test/files/scalacheck/quasiquotes/DefinitionConstructionProps.scala
+++ b/test/files/scalacheck/quasiquotes/DefinitionConstructionProps.scala
@@ -13,6 +13,7 @@ object DefinitionConstructionProps
with TraitConstruction
with TypeDefConstruction
with ValDefConstruction
+ with PackageConstruction
trait ClassConstruction { self: QuasiquoteProperties =>
val anyRef = ScalaDot(TypeName("AnyRef"))
@@ -39,7 +40,9 @@ trait ClassConstruction { self: QuasiquoteProperties =>
assertEqAst(q"class Foo extends ..$parents", "class Foo")
}
- property("splice term name into class") = forAll { (name: TypeName) =>
+ property("splice term name into class") = forAll { (rname: TypeName) =>
+ // add prefix to avoid failure in case rname is keyword
+ val name = TypeName("prefix$" + rname)
eqAst(q"class $name", "class " + name.toString)
}
@@ -290,4 +293,53 @@ trait MethodConstruction { self: QuasiquoteProperties =>
val a = q"new Foo(a)(b)"
assertEqAst(q"@$a def foo", "@Foo(a)(b) def foo")
}
+}
+
+trait PackageConstruction { self: QuasiquoteProperties =>
+ property("splice select into package name") = test {
+ val name = q"foo.bar"
+ assertEqAst(q"package $name { }", "package foo.bar { }")
+ }
+
+ property("splce name into package name") = test{
+ val name = TermName("bippy")
+ assertEqAst(q"package $name { }", "package bippy { }")
+ }
+
+ property("splice members into package body") = test {
+ val members = q"class C" :: q"object O" :: Nil
+ assertEqAst(q"package foo { ..$members }", "package foo { class C; object O }")
+ }
+
+ property("splice illegal members into package body") = test {
+ val f = q"def f"
+ assertThrows[IllegalArgumentException] { q"package foo { $f }" }
+ val v = q"val v = 0"
+ assertThrows[IllegalArgumentException] { q"package foo { $v }" }
+ val expr = q"x + 1"
+ assertThrows[IllegalArgumentException] { q"package foo { $expr }" }
+ }
+
+ property("splice name into package object") = test {
+ val foo = TermName("foo")
+ assertEqAst(q"package object $foo", "package object foo")
+ }
+
+ property("splice parents into package object") = test {
+ val parents = tq"a" :: tq"b" :: Nil
+ assertEqAst(q"package object foo extends ..$parents",
+ "package object foo extends a with b")
+ }
+
+ property("splice members into package object") = test {
+ val members = q"def foo" :: q"val x = 1" :: Nil
+ assertEqAst(q"package object foo { ..$members }",
+ "package object foo { def foo; val x = 1 }")
+ }
+
+ property("splice early def into package object") = test {
+ val edefs = q"val x = 1" :: q"type I = Int" :: Nil
+ assertEqAst(q"package object foo extends { ..$edefs } with Any",
+ "package object foo extends { val x = 1; type I = Int } with Any")
+ }
} \ No newline at end of file
diff --git a/test/files/scalacheck/quasiquotes/DefinitionDeconstructionProps.scala b/test/files/scalacheck/quasiquotes/DefinitionDeconstructionProps.scala
index fdfbfe871c..993ef899b0 100644
--- a/test/files/scalacheck/quasiquotes/DefinitionDeconstructionProps.scala
+++ b/test/files/scalacheck/quasiquotes/DefinitionDeconstructionProps.scala
@@ -13,6 +13,7 @@ object DefinitionDeconstructionProps
with ObjectDeconstruction
with ModsDeconstruction
with ValVarDeconstruction
+ with PackageDeconstruction
trait TraitDeconstruction { self: QuasiquoteProperties =>
property("exhaustive trait matcher") = test {
@@ -144,4 +145,27 @@ trait ValVarDeconstruction { self: QuasiquoteProperties =>
matches("var x = 1")
assertThrows[MatchError] { matches("val x = 1") }
}
+}
+
+trait PackageDeconstruction { self: QuasiquoteProperties =>
+ property("exhaustive package matcher") = test {
+ def matches(line: String) { val q"package $name { ..$body }" = parse(line) }
+ matches("package foo { }")
+ matches("package foo { class C }")
+ matches("package foo.bar { }")
+ matches("package bippy.bongo { object A; object B }")
+ matches("package bippy { package bongo { object O } }")
+ }
+
+ property("exhaustive package object matcher") = test {
+ def matches(line: String) {
+ val q"package object $name extends { ..$early } with ..$parents { $self => ..$body }" = parse(line)
+ }
+ matches("package object foo")
+ matches("package object foo { def baz }")
+ matches("package object foo { self => }")
+ matches("package object foo extends mammy with daddy { def baz }")
+ matches("package object foo extends { val early = 1 } with daddy")
+ assertThrows[MatchError] { matches("object foo") }
+ }
} \ No newline at end of file
diff --git a/test/files/scalacheck/quasiquotes/ErrorProps.scala b/test/files/scalacheck/quasiquotes/ErrorProps.scala
index b9e69e0e88..b0a7641577 100644
--- a/test/files/scalacheck/quasiquotes/ErrorProps.scala
+++ b/test/files/scalacheck/quasiquotes/ErrorProps.scala
@@ -41,10 +41,7 @@ object ErrorProps extends QuasiquoteProperties("errors") {
property("@..$first @$rest def foo") = fails(
"Can't extract with .. here",
"""
- val a = annot("a")
- val b = annot("b")
- val c = annot("c")
- val q"@..$first @$rest def foo" = q"@$a @$b @$c def foo"
+ q"@a @b @c def foo" match { case q"@..$first @$rest def foo" => }
""")
property("only literal string arguments") = fails(
diff --git a/test/files/scalacheck/quasiquotes/TermConstructionProps.scala b/test/files/scalacheck/quasiquotes/TermConstructionProps.scala
index 753ad1aa59..f68656d0f7 100644
--- a/test/files/scalacheck/quasiquotes/TermConstructionProps.scala
+++ b/test/files/scalacheck/quasiquotes/TermConstructionProps.scala
@@ -7,7 +7,6 @@ import scala.reflect.runtime.universe._
import Flag._
object TermConstructionProps extends QuasiquoteProperties("term construction") {
-
property("splice single tree return tree itself") = forAll { (t: Tree) =>
q"$t" ≈ t
}
@@ -191,4 +190,22 @@ object TermConstructionProps extends QuasiquoteProperties("term construction") {
val assignx = q"x = 1"
assertEqAst(q"f($assignx)", "f(x = 1)")
}
+
+ property("fresh names are regenerated at each evaluation") = test {
+ def plusOne = q"{ _ + 1 }"
+ assert(!(plusOne ≈ plusOne))
+ def whileTrue = q"while(true) false"
+ assert(!(whileTrue ≈ whileTrue))
+ def withEvidence = q"def foo[T: X]"
+ assert(!(withEvidence ≈ withEvidence))
+ }
+
+ property("make sure inference doesn't infer any") = test {
+ val l1 = List(q"foo")
+ val l2 = List(q"bar")
+ val baz = q"baz"
+ assert(q"f(..${l1 ++ l2})" ≈ q"f(foo, bar)")
+ assert(q"f(..${l1 ++ l2}, $baz)" ≈ q"f(foo, bar, baz)")
+ assert(q"f(${if (true) q"a" else q"b"})" ≈ q"f(a)")
+ }
}
diff --git a/test/files/scalacheck/quasiquotes/TermDeconstructionProps.scala b/test/files/scalacheck/quasiquotes/TermDeconstructionProps.scala
index 22d4b1ce4f..f37e4d9975 100644
--- a/test/files/scalacheck/quasiquotes/TermDeconstructionProps.scala
+++ b/test/files/scalacheck/quasiquotes/TermDeconstructionProps.scala
@@ -8,9 +8,8 @@ import Flag._
object TermDeconstructionProps extends QuasiquoteProperties("term deconstruction") {
property("f(..x) = f") = test {
- assertThrows[MatchError] {
- val q"f(..$argss)" = q"f"
- }
+ val q"f(..$args)" = q"f"
+ assert(args ≈ Nil)
}
property("f(x)") = forAll { (x: Tree) =>
@@ -88,7 +87,7 @@ object TermDeconstructionProps extends QuasiquoteProperties("term deconstruction
matches("new foo with bar")
matches("new foo with bar { body }")
matches("new { anonymous }")
- matches("new { val early = 1} with Parent[Int] { body }")
+ matches("new { val early = 1 } with Parent[Int] { body }")
matches("new Foo { selfie => }")
}
@@ -111,4 +110,26 @@ object TermDeconstructionProps extends QuasiquoteProperties("term deconstruction
assert(left ≈ q"foo(bar)")
assert(value ≈ q"baz")
}
+
+ property("deconstruct while loop") = test {
+ val q"while($cond) $body" = parse("while(cond) body")
+ assert(cond ≈ q"cond")
+ assert(body ≈ q"body")
+ }
+
+ property("deconstruct do while loop") = test {
+ val q"do $body while($cond)" = parse("do body while(cond)")
+ assert(cond ≈ q"cond")
+ assert(body ≈ q"body")
+ }
+
+ property("deconstruct anonymous function with placeholders") = test {
+ val q"{ $f(_) }" = q"{ foo(_) }"
+ assert(f ≈ q"foo")
+ val q"{ _.$member }" = q"{ _.foo }"
+ assert(member ≈ TermName("foo"))
+ val q"{ _ + $x }" = q"{ _ + x }"
+ assert(x ≈ q"x")
+ val q"{ _ * _ }" = q"{ _ * _ }"
+ }
}
diff --git a/test/files/specialized/constant_lambda.check b/test/files/specialized/constant_lambda.check
new file mode 100644
index 0000000000..4b095fd0ff
--- /dev/null
+++ b/test/files/specialized/constant_lambda.check
@@ -0,0 +1,2 @@
+false
+false
diff --git a/test/files/specialized/constant_lambda.scala b/test/files/specialized/constant_lambda.scala
new file mode 100644
index 0000000000..bb9a97403e
--- /dev/null
+++ b/test/files/specialized/constant_lambda.scala
@@ -0,0 +1,16 @@
+// during development of late delmabdafying there was a problem where
+// specialization would undo some of the work done in uncurry if the body of the
+// lambda had a constant type. That would result in a compiler crash as
+// when the delambdafy phase got a tree shape it didn't understand
+class X[@specialized(Int) A] {
+ val f = { x: A => false }
+}
+
+object Test {
+ def main(args: Array[String]) {
+ val xInt = new X[Int]
+ println(xInt.f(42))
+ val xString = new X[String]
+ println(xString.f("hello"))
+ }
+} \ No newline at end of file
diff --git a/test/junit/scala/collection/convert/MapWrapperTest.scala b/test/junit/scala/collection/convert/MapWrapperTest.scala
new file mode 100644
index 0000000000..060b6b5937
--- /dev/null
+++ b/test/junit/scala/collection/convert/MapWrapperTest.scala
@@ -0,0 +1,49 @@
+package scala.collection.convert
+
+import org.junit.Assert._
+import org.junit.Test
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+
+@RunWith(classOf[JUnit4])
+class MapWrapperTest {
+
+ /* Test for SI-7883 */
+ @Test
+ def testContains() {
+ import scala.collection.JavaConverters.mapAsJavaMapConverter
+ import scala.language.reflectiveCalls // for accessing containsCounter
+
+ // A HashMap which throws an exception when the iterator() method is called.
+ // Before the fix for SI-7883, calling MapWrapper.containsKey() used to
+ // iterate through every element of the wrapped Map, and thus would crash
+ // in this case.
+ val scalaMap = new scala.collection.mutable.HashMap[String, String] {
+ var containsCounter = 0 // keep track of how often contains() has been called.
+ override def iterator = throw new UnsupportedOperationException
+
+ override def contains(key: String): Boolean = {
+ containsCounter += 1
+ super.contains(key)
+ }
+ }
+
+ val javaMap = scalaMap.asJava
+
+ scalaMap("hello") = "world"
+ scalaMap(null) = "null's value"
+
+ assertEquals(0, scalaMap.containsCounter)
+ assertTrue(javaMap.containsKey("hello")) // positive test
+ assertTrue(javaMap.containsKey(null)) // positive test, null key
+
+ assertFalse(javaMap.containsKey("goodbye")) // negative test
+ // Note: this case does NOT make it to scalaMap's contains() method because the runtime
+ // cast fails in MapWrapper, so the containsCounter is not incremented in this case.
+ assertFalse(javaMap.containsKey(42)) // negative test, wrong key type
+
+ assertEquals(Some("null's value"), scalaMap.remove(null))
+ assertFalse(javaMap.containsKey(null)) // negative test, null key
+ assertEquals(4, scalaMap.containsCounter)
+ }
+}
diff --git a/test/junit/scala/tools/nsc/symtab/SymbolTableForUnitTesting.scala b/test/junit/scala/tools/nsc/symtab/SymbolTableForUnitTesting.scala
index 285e87e3b2..a3699a4eeb 100644
--- a/test/junit/scala/tools/nsc/symtab/SymbolTableForUnitTesting.scala
+++ b/test/junit/scala/tools/nsc/symtab/SymbolTableForUnitTesting.scala
@@ -85,5 +85,7 @@ class SymbolTableForUnitTesting extends SymbolTable {
}
lazy val treeInfo: scala.reflect.internal.TreeInfo{val global: SymbolTableForUnitTesting.this.type} = ???
+ val currentFreshNameCreator = new reflect.internal.util.FreshNameCreator
+
phase = SomePhase
}
diff --git a/test/pending/run/macro-term-declared-in-anonymous-explicit-import.check b/test/pending/run/macro-term-declared-in-anonymous-explicit-import.check
deleted file mode 100644
index e69de29bb2..0000000000
--- a/test/pending/run/macro-term-declared-in-anonymous-explicit-import.check
+++ /dev/null
diff --git a/test/pending/run/t5866b.scala b/test/pending/run/t5866b.scala
new file mode 100644
index 0000000000..44d8b114b8
--- /dev/null
+++ b/test/pending/run/t5866b.scala
@@ -0,0 +1,17 @@
+class Foo(val d: Double) extends AnyVal {
+ override def toString = s"Foo($d)"
+}
+
+class Bar(val d: String) extends AnyVal {
+ override def toString = s"Foo($d)"
+}
+
+object Test {
+ def main(args: Array[String]): Unit = {
+ val f: Foo = {val n: Any = null; n.asInstanceOf[Foo]}
+ println(f)
+
+ val b: Bar = {val n: Any = null; n.asInstanceOf[Bar]}
+ println(b)
+ }
+}
diff --git a/test/pending/run/virtpatmat_anonfun_underscore.check b/test/pending/run/virtpatmat_anonfun_underscore.check
deleted file mode 100644
index e69de29bb2..0000000000
--- a/test/pending/run/virtpatmat_anonfun_underscore.check
+++ /dev/null
diff --git a/tools/buildcp b/tools/buildcp
deleted file mode 100755
index 3ae70e10a3..0000000000
--- a/tools/buildcp
+++ /dev/null
@@ -1,11 +0,0 @@
-#!/bin/sh
-#
-
-[[ $# -eq 1 ]] || { echo "Usage: $0 <locker|quick|...>"; exit 0; }
-
-dir=$(dirname $0)
-lib=$($dir/abspath $dir/../lib)
-build=$($dir/abspath $dir/../build)
-cp=$($dir/cpof $build/$1/classes):$build/asm/classes
-
-echo $cp:$lib/forkjoin.jar:$lib/jline.jar:$lib/extra/'*'
diff --git a/tools/diffPickled b/tools/diffPickled
deleted file mode 100755
index b4a345dc7d..0000000000
--- a/tools/diffPickled
+++ /dev/null
@@ -1,51 +0,0 @@
-#!/bin/sh
-#
-# Shows the difference in pickler output between two variations on a class.
-#
-# If quick and strap are built normally you can run
-#
-# diffPickled foo.bar.Baz
-#
-# to see any differences between them in that class.
-
-USAGE="Usage: $0 classpath1 classpath2 class"
-TOOLSDIR=`dirname $0`
-BUILDDIR="${TOOLSDIR}/../build"
-QUICKDIR="${BUILDDIR}/quick"
-STRAPDIR="${BUILDDIR}/strap"
-
-CP1=""
-CP2=""
-CLASS=""
-
-if [ $# == 1 ] ; then
- if [ -e ${QUICKDIR} ] && [ -e ${STRAPDIR} ] ; then
- CP1=`${TOOLSDIR}/quickcp`
- CP2=`${TOOLSDIR}/strapcp`
- CLASS=$1
- else
- echo $USAGE
- echo "(If only one argument is given, $QUICKDIR and $STRAPDIR must exist.)"
- exit 1
- fi
-elif [ $# == 3 ] ; then
- CP1=$1
- CP2=$2
- CLASS=$3
-else
- echo $USAGE
- exit 1
-fi
-
-TMPDIR="/tmp/scala_pickle_diff${RANDOM}"
-
-if mkdir -m 0700 "$TMPDIR" 2>/dev/null ; then
- ${TOOLSDIR}/showPickled -cp $CP1 $CLASS > "${TMPDIR}/out1.txt"
- ${TOOLSDIR}/showPickled -cp $CP2 $CLASS > "${TMPDIR}/out2.txt"
- diff "${TMPDIR}/out1.txt" "${TMPDIR}/out2.txt"
- rm -rf ${TMPDIR}
-else
- echo "Failed to create temporary directory ${TMPDIR}."
- exit 1
-fi
-
diff --git a/tools/epfl-build b/tools/epfl-build
deleted file mode 100755
index dd66307de3..0000000000
--- a/tools/epfl-build
+++ /dev/null
@@ -1,28 +0,0 @@
-#!/usr/bin/env bash
-#
-# builds nightlies
-
-[[ $# -gt 0 ]] || {
- cat <<EOM
-Usage: $0 <version> [opt opt ...]
-
- Everything after the version is supplied to scalac and partest.
- Example: $0 -Xcheckinit -Ycheck:all
-
-Environment variables:
- extra_ant_targets Additional ant targets to run after nightly
-
-EOM
- exit 0
-}
-
-# version isn't actually used at present.
-scalaVersion="$1" && shift
-scalaArgs="-Dscalac.args=\"$@\" -Dpartest.scalac_opts=\"$@\""
-
-ant all.clean && ./pull-binary-libs.sh
-
-ant $scalaArgs build-opt &&
-ant $scalaArgs nightly &&
-for target in $extra_ant_targets; do ant $target ; done
-# [[ -n "$BUILD_DOCSCOMP" ]] && ant docscomp
diff --git a/tools/epfl-publish b/tools/epfl-publish
deleted file mode 100755
index cdf18823a5..0000000000
--- a/tools/epfl-publish
+++ /dev/null
@@ -1,32 +0,0 @@
-#!/usr/bin/env bash
-#
-# publishes nightly build if $publish_to is set in environment.
-#
-
-[[ $# -eq 1 ]] || {
- cat <<EOM
-Usage: $0 <scala version>
-
-Environment variables:
- publish_to rsync destination
-EOM
- exit 0
-}
-version="$1"
-
-[[ -d dists/archives ]] || {
- echo "Can't find build, has it completed? No directory at dists/archives"
- exit 1
-}
-
-if [[ -z $publish_to ]]; then
- echo "Nothing to publish."
-else
- echo "Publishing nightly build to $publish_to"
- # Archive Scala nightly distribution
- rsync -az --exclude scala-latest-sources.tgz dists/archives/ "$publish_to/distributions"
- # only publish scaladoc nightly for trunk
- [[ $version == "master" ]] && rsync -az build/scaladoc/ "$publish_to/docs"
- # sbaz
- [[ -d dists/sbaz ]] && rsync -az dists/sbaz/ "$publish_to/sbaz"
-fi
diff --git a/tools/locker_scala b/tools/locker_scala
deleted file mode 100755
index 02d2efcdd8..0000000000
--- a/tools/locker_scala
+++ /dev/null
@@ -1,6 +0,0 @@
-#!/bin/sh
-#
-
-CP=$($(dirname $BASH_SOURCE)/lockercp)
-
-java $JAVA_OPTS -classpath "$CP" scala.tools.nsc.MainGenericRunner -usejavacp "$@"
diff --git a/tools/locker_scalac b/tools/locker_scalac
deleted file mode 100755
index c4b28b7bc0..0000000000
--- a/tools/locker_scalac
+++ /dev/null
@@ -1,6 +0,0 @@
-#!/bin/sh
-#
-
-CP=$($(dirname $BASH_SOURCE)/lockercp)
-
-java $JAVA_OPTS -classpath "$CP" scala.tools.nsc.Main -usejavacp "$@"
diff --git a/tools/lockercp b/tools/lockercp
deleted file mode 100755
index 43c72dd629..0000000000
--- a/tools/lockercp
+++ /dev/null
@@ -1,4 +0,0 @@
-#!/bin/sh
-#
-
-$(dirname $0)/buildcp locker
diff --git a/tools/make-release-notes.scala b/tools/make-release-notes.scala
deleted file mode 100644
index 3e5b60d223..0000000000
--- a/tools/make-release-notes.scala
+++ /dev/null
@@ -1,129 +0,0 @@
-#!/bin/sh
-exec scala -feature $0 $@
-!#
-
-import sys.process._
-
-val tag1 = "v2.10.0-M4"
-val tag2 = "v2.10.0-M5"
-
-// Git commit parsing magikz
-
-case class Commit(sha: String, author: String, header: String, body: String) {
- override def toString = " * " + sha + " (" + author + ") " + header + " - " + body.take(5) + " ..."
-}
-
-val gitFormat = "--format=format:*-*%h``%aN``%s``%b"
-
-def processGitCommits(input: String): IndexedSeq[Commit] =
- ((input split "[\\r\\n]*\\*\\-\\*").view map (_ split "``") collect {
- case Array(sha, author, hdr, msg) => Commit(sha, author, hdr, msg)
- }).toVector
-
-val commits =
- processGitCommits(Process(Seq("git", "log", tag1+".."+tag2,"--format=format:*-*%h``%aN``%s``%b","--no-merges")).!!)
-
-val authors: Seq[(String, Int)] = {
- val grouped: Vector[(String,Int)] = (commits groupBy (_.author)).map { case (a,c) => a -> c.length }{collection.breakOut}
- (grouped sortBy (_._2)).reverse
-}
-
-def hasFixins(msg: String): Boolean = (
- (msg contains "SI-") /*&& ((msg.toLowerCase contains "fix") || (msg.toLowerCase contains "close"))*/
-)
-
-val fixCommits =
- for {
- commit <- commits
- searchString = commit.body + commit.header
- if hasFixins(searchString)
- } yield commit
-
-
-val siPattern = java.util.regex.Pattern.compile("(SI-[0-9]+)")
-
-def fixLinks(commit: Commit): String = {
- val searchString = commit.body + commit.header
- val m = siPattern matcher searchString
- val issues = new collection.mutable.ArrayBuffer[String]
- while(m.find()) {
- issues += (m group 1)
- }
- issues map (si => """<a href="https://issues.scala-lang.org/browse/%s">%s</a>""" format (si, si)) mkString ", "
-}
-
-
-// HTML Generation for Toni
-
-def commitShaLink(sha: String) =
- """<a href="https://github.com/scala/scala/commit/%s">%s</a>""" format (sha,sha)
-
-def printBlankLine(): Unit = println("<p>&nbsp</p>")
-def printHeader4(msg: String): Unit = println("<h4>%s</h4>" format (msg))
-
-def printCommiterList(): Unit = {
- printBlankLine()
- printHeader4("Special thanks to all the contribtuors!")
- println("""<table border="0" cellspacing="0" cellpadding="1">
- <thead><tr><th>#</th><th align="left">Author</th></tr></thead>
- <tbody>""")
- for((author, count) <- authors)
- println("""<tr><td align="right">%d &nbsp;</td><td>%s</td></tr>""" format (count, author))
- println("""</tbody>
-</table>""")
-}
-
-def printCommitList(): Unit = {
- printBlankLine()
- printHeader4("Complete commit list!")
- println("""<table border="0" cellspacing="0" cellpadding="1">
- <thead><tr><th>sha</th><th align="left">Title</th></tr></thead>
- <tbody>""")
- for(commit <- commits) {
- println("<tr>")
- println("""<td align="right">%s&nbsp;</td><td>%s</td>""" format (commitShaLink(commit.sha), commit.header))
- /*print("<td>")
- (commit.body split "[\\r\\n]") foreach { line =>
- print(line)
- print("<br/>")
- }
- print("</td>")*/
- println("""</tr>""")
- }
- println("""</tbody>
-</table>""")
-}
-
-def issueFixPrinter(): Unit = {
- printBlankLine()
- printHeader4("Here's a list of isssues that have been fixed since %s" format (tag1))
- println("""<table border="0" cellspacing="0" cellpading="1">
- <thead><tr><th>Issue(s)</th><th>Commit</th><th>Message</th></tr></thead>
- <tbody>""")
- for(commit <- fixCommits) {
- println("""<tr><td>%s&nbsp;</td><td>%s&nbsp;</td><td>%s</td></tr>""" format(fixLinks(commit), commitShaLink(commit.sha), commit.header))
- }
- println("""</tbody>
-</table>""")
- printBlankLine()
-}
-
-def printHTML(): Unit = {
- println("""<html>
- <head>
- <title>%s - Release notes</title>
- </head>
- <body>
- <h3>A new release of Scala is available! Please point your build tools at %s</h3>
- <p>:: INSERT HAND GENERATED NOTES HERE ::</p>
-""" format(tag2, tag2 drop 1))
- issueFixPrinter()
- printCommiterList()
- printCommitList()
- println("""</body></html>""")
-}
-
-printHTML()
-
-
-
diff --git a/tools/packcp b/tools/packcp
deleted file mode 100755
index ecc7ee1b5d..0000000000
--- a/tools/packcp
+++ /dev/null
@@ -1,5 +0,0 @@
-#!/bin/sh
-#
-
-dir=$(dirname $0)
-$dir/cpof $dir/../build/pack/lib
diff --git a/tools/quick_scala b/tools/quick_scala
deleted file mode 100755
index 16938ddba4..0000000000
--- a/tools/quick_scala
+++ /dev/null
@@ -1,6 +0,0 @@
-#!/bin/sh
-#
-
-CP=$($(dirname $BASH_SOURCE)/quickcp)
-
-java $JAVA_OPTS -classpath "$CP" scala.tools.nsc.MainGenericRunner -usejavacp "$@"
diff --git a/tools/quick_scalac b/tools/quick_scalac
deleted file mode 100755
index 1b9a036c18..0000000000
--- a/tools/quick_scalac
+++ /dev/null
@@ -1,6 +0,0 @@
-#!/bin/sh
-#
-
-CP=$($(dirname $BASH_SOURCE)/quickcp)
-
-java $JAVA_OPTS -classpath "$CP" scala.tools.nsc.Main -usejavacp "$@"
diff --git a/tools/quickcp b/tools/quickcp
deleted file mode 100755
index 25d46e56d9..0000000000
--- a/tools/quickcp
+++ /dev/null
@@ -1,4 +0,0 @@
-#!/bin/sh
-#
-
-$(dirname $0)/buildcp quick
diff --git a/tools/remotetest b/tools/remotetest
deleted file mode 100755
index fb89794c30..0000000000
--- a/tools/remotetest
+++ /dev/null
@@ -1,230 +0,0 @@
-
-
-#
-# Remote build&test script.
-# Author: Aleksandar Prokopec
-#
-
-
-SCRIPTNAME="..:: RemoteTest ::.."
-DESC="This script pushes the current git repo to a remote bare repo. \
-It then checks out the source tree in a workspace repo and starts the\
- build and all the tests. It can also initialize the remote bare repo\
- and the workspace repo. It assumes that the current repo refspec has\
- been set for the remote bare repository - .git/config of the current\
- repo must have a remote called <server> and the corresponding master\
- branch. Git should, naturally, be installed on both systems.\
- "
-USAGE=" Usage: remotetest (--help|--init|--all|--incr|--clear) <user> <server> <bare-repo-path> <workspace-repo-path> [logfile]"
-
-
-
-function title()
-{
- echo
- echo $SCRIPTNAME
- echo
-}
-
-function usage()
-{
- title
- echo $DESC
- echo
- echo $USAGE
-}
-
-
-function error()
-{
- echo $1
- echo "Failed."
- exit 1
-}
-
-function success()
-{
- echo "Success!"
- exit 0
-}
-
-function instruct()
-{
- usage
- error
-}
-
-function help()
-{
- usage
- echo
- echo "Make sure you have git installed on both your computer and the server, as well as java and ant."
- echo "Add your ssh key to the list of authorized keys on the server (see .ssh dir in your home). This is not required, but makes life easier, as you will have to answer fewer passwords."
- echo "To initialize the remote repositories on a server 'server.url.com', see the following example:"
- echo
- echo "> tools/remotetest --init jack server.url.com ~jack/git-repos-dir/scala ~jack/tmp-build-dir/scala"
- echo
- echo "If you decide you no longer want this remote repository to be tracked (this also tries to delete remote repos on the server):"
- echo
- echo "> tools/remotetest --clear jack server.url.com ~jack/git-repos-dir/scala ~jack/tmp-build-dir/scala"
- echo
- echo "Once the initialization is successful, simply run: "
- echo
- echo "> tools/remotetest --all jack server.url.com ~jack/git-repos-dir/scala ~jack/tmp-build-dir/scala"
- echo
- echo "Optionally, build and test results will be saved into the logfile on the server (an additional, last argument). Be aware that problems arise should you push an ammended commit over a previously pushed commit - this has nothing to do with this script per se."
- echo
- echo " Example workflow:"
- echo
- echo " ------------------- "
- echo " | | "
- echo " V | "
- echo " init ---> [ all | incr ] ---> clear "
- echo
- echo "Complete argument list:"
- echo " --help prints this help"
- echo " --init initializes remote repos"
- echo " --clear deletes remote repos and removes the remote repo reference from local git repo"
- echo " --all pushes the newest version, checks it out at the server, cleans all, builds and tests"
- echo " --incr incremental does the same as --all, but does not clean the directory before testing"
-}
-
-
-if [ $# -lt 1 ]
-then
- instruct
-fi
-
-
-
-if [ $# -lt 5 ]
-then
- if [[ $1 = "--help" ]]
- then
- help
- success
- else
- instruct
- fi
-fi
-
-COMMAND=$1
-USER=$2
-LOCATION=$3
-BAREREPO=$4
-WORKREPO=$5
-LOGFILE=$6
-
-
-if [[ $COMMAND = "--help" ]]
-then
- help
- success
-fi
-
-
-
-
-#
-# Init
-#
-
-
-if [[ $COMMAND = "--init" ]]
-then
- echo "Initializing."
-
- # init bare repo
- ssh $USER@$LOCATION "mkdir $BAREREPO"
- ssh $USER@$LOCATION "cd $BAREREPO; git init; git config --bool core.bare true"
- if [ $? -ne 0 ]
- then
- error "Could not initialize bare repo."
- fi
-
- # add remote bare repo
- git remote add $LOCATION $USER@$LOCATION:$BAREREPO
-
- # push to bare repo
- git push $LOCATION master
- if [ $? -ne 0 ]
- then
- error "Could not push to bare repo."
- fi
-
- # init and checkout work repo
- ssh $USER@$LOCATION "git clone $BAREREPO $WORKREPO"
- if [ $? -ne 0 ]
- then
- error "Could not init working repo."
- fi
-
- success
-fi
-
-
-
-
-#
-# Clear.
-#
-
-
-if [[ $COMMAND = "--clear" ]]
-then
- echo "Clearing remote and deleting remote repos."
- git remote rm $LOCATION
- ssh $USER@$LOCATION "rm -rf $BAREREPO"
- ssh $USER@$LOCATION "cd $WORKREPO; ant all.clean; rm -rf $WORKREPO"
-
- echo "Removed remote repo $LOCATION."
- success
-fi
-
-
-
-
-#
-# Test.
-#
-
-
-if [[ $COMMAND = "--all" || $COMMAND = "--incr" ]]
-then
- # proceed
- echo "Starting remote build and testing."
-else
- error "Unrecognized command $COMMAND."
-fi
-
-# if it's not the init operation, proceed normally
-# push to remote bare repo
-git push $LOCATION master
-if [ $? -ne 0 ]
-then
- error "Could not push to bare repo - push from local machine failed."
-fi
-
-# remotely checkout the repo
-ssh $USER@$LOCATION "cd $WORKREPO; git pull origin master"
-if [ $? -ne 0 ]
-then
- error "Could not remotely pull from bare repo to work repo."
-fi
-
-# clean the build dir if not incremental
-if [[ $COMMAND = "--all" ]]
-then
- ssh $USER@$LOCATION "cd $WORKREPO; ant all.clean"
-fi
-
-# run the build and tests
-SET_ANT_OPTS='export ANT_OPTS="-XX:MaxPermSize=192M -Xmx1536m"; echo $ANT_OPTS'
-echo "Set ant options command: $SET_ANT_OPTS"
-ssh $USER@$LOCATION "cd $WORKREPO; $SET_ANT_OPTS; ant nightly | tee -a $LOGFILE"
-
-success
-
-
-
-
diff --git a/tools/showPickled b/tools/showPickled
deleted file mode 100755
index 27421c3ae5..0000000000
--- a/tools/showPickled
+++ /dev/null
@@ -1,32 +0,0 @@
-#!/bin/sh
-#
-# Shows the pickled scala data in a classfile.
-
-if [ $# == 0 ] ; then
- echo "Usage: $0 [--bare] [-cp classpath] <class*>"
- exit 1
-fi
-
-TOOLSDIR=`dirname $0`
-CPOF="$TOOLSDIR/cpof"
-
-PACK="$TOOLSDIR/../build/pack/lib"
-QUICK="$TOOLSDIR/../build/quick/classes"
-STARR="$TOOLSDIR/../lib"
-CP=""
-
-if [ -f "${PACK}/scala-library.jar" ] ; then
- CP=`${TOOLSDIR}/packcp`
-elif [ -d "${QUICK}/library" ] ; then
- CP=`${TOOLSDIR}/quickcp`
-else
- CP=`${TOOLSDIR}/starrcp`
-fi
-
-if [ "$1" == "-cp" ] ; then
- shift
- CP="${1}:${CP}"
- shift
-fi
-
-java -cp "$CP" scala.tools.nsc.util.ShowPickled $*
diff --git a/tools/starr_scala b/tools/starr_scala
deleted file mode 100755
index 9b0fb60cf7..0000000000
--- a/tools/starr_scala
+++ /dev/null
@@ -1,6 +0,0 @@
-#!/bin/sh
-#
-
-CP=$($(dirname $BASH_SOURCE)/starrcp)
-
-java $JAVA_OPTS -classpath "$CP" scala.tools.nsc.MainGenericRunner -usejavacp "$@"
diff --git a/tools/starr_scalac b/tools/starr_scalac
deleted file mode 100755
index 972eeaff2b..0000000000
--- a/tools/starr_scalac
+++ /dev/null
@@ -1,6 +0,0 @@
-#!/bin/sh
-#
-
-CP=$($(dirname $BASH_SOURCE)/starrcp)
-
-java $JAVA_OPTS -classpath "$CP" scala.tools.nsc.Main -usejavacp "$@"
diff --git a/tools/starrcp b/tools/starrcp
deleted file mode 100755
index 76f40fde03..0000000000
--- a/tools/starrcp
+++ /dev/null
@@ -1,5 +0,0 @@
-#!/bin/sh
-#
-
-dir=$(dirname $0)
-$dir/cpof $dir/../lib
diff --git a/tools/strapcp b/tools/strapcp
deleted file mode 100755
index 6a4044ae24..0000000000
--- a/tools/strapcp
+++ /dev/null
@@ -1,11 +0,0 @@
-#!/bin/sh
-#
-
-dir=$(dirname $0)
-strap="$dir/../build/strap/classes"
-[[ -d $strap ]] || { echo "Error: no directory at $strap"; exit 1; }
-
-cp=$($dir/cpof $strap)
-asm=$($dir/abspath $dir/../build/asm/classes)
-
-echo $cp:$asm
diff --git a/tools/test-renamer b/tools/test-renamer
deleted file mode 100755
index 5a7fc3d78c..0000000000
--- a/tools/test-renamer
+++ /dev/null
@@ -1,82 +0,0 @@
-#!/usr/bin/env bash
-#
-# Despite its shameful hackiness, checked in for posterity.
-# It'll live on forever in the git history; then I can remove it.
-
-# set -e
-shopt -s nullglob
-
-cd $(dirname $0)/../test
-
-rename_pattern='^.*/bug[0-9]+.*?(\.scala)?$'
-
-targets ()
-{
- ls -d pending/*/* disabled/*/* | egrep "$rename_pattern"
-}
-
-showRun ()
-{
- echo "$@"
- "$@"
-}
-
-for path in $(targets); do
- if [[ -f "$path" ]]; then
- # echo "$path"
- dir=$(dirname "$path")
- file=$(basename "$path")
- base=${file%%.scala}
- num=${base##bug}
-
- (cd "$dir" &&
- for file in ${base}.*; do
- ext=${file##*.}
- newname="t${num}.${ext}"
-
- if [[ -e "$newname" ]]; then
- echo "Hey, $newname already exists."
- else
- showRun perl -pi -e "'s/bug$num\b/t$num/g;'" "$file"
- showRun mv "$file" "$newname"
- fi
- done
- )
- fi
-
- if [[ -d "$path" ]]; then
- dir=$(dirname "$path")
- file=$(basename "$path")
- base="$file"
- num=${base##bug}
-
- (cd "$dir" &&
- for file in $file ${file}.*; do
- ext=${file##*.}
- if [[ "$ext" != "$file" ]]; then
- newname="t${num}.${ext}"
- else
- newname="t${num}"
- for file0 in ${file}/*; do
- showRun perl -pi -e "'s/bug$num\b/t$num/g;'" "$file0"
- done
- fi
-
- if [[ -e "$newname" ]]; then
- echo "Hey, $newname already exists."
- else
- if [[ -f "$file" ]]; then
- showRun perl -pi -e "'s/bug$num\b/t$num/g;'" "$file"
- fi
- showRun mv "$file" "$newname"
- fi
- done
- )
-
- fi
-
-done
-#
-# for d in files/*/*; do
-# [[ -d "$d" ]] && do_dir "$d"
-# done
diff --git a/tools/updatescalacheck b/tools/updatescalacheck
deleted file mode 100755
index c4b80dd963..0000000000
--- a/tools/updatescalacheck
+++ /dev/null
@@ -1,130 +0,0 @@
-#
-#
-# ScalaCheck update script.
-#
-#
-
-
-# vars
-TMPFILE=`mktemp`
-SCALACHECK_REL_DIR=src/scalacheck
-DESC="Updates ScalaCheck sources from ScalaCheck nightly branch."
-WARN="Make sure your repository checkout is clean. Will remove and delete existing ScalaCheck source in <path-to-scala-repo>/$SCALACHECK_REL_DIR!"
-USAGE=" Usage: updatescalacheck <path-to-scala-repo>"
-
-
-# functions
-function error() {
- rm $TMPFILE
- exit 1
-}
-
-function success() {
- rm $TMPFILE
- exit 0
-}
-
-
-
-# check num args
-if [ $# -ne 1 ]
-then
- echo $DESC
- echo $WARN
- echo "Must provide path to scala repo checkout dir."
- echo $USAGE
- error
-fi
-
-if [[ $1 = "--help" ]]
-then
- echo $DESC
- echo $WARN
- echo $USAGE
- error
-fi
-
-if [ ! -d $1 ]
-then
- echo "The folder $1 does not exist."
- error
-fi
-
-# go to scala dir
-SCALA_DIR=$1
-cd $SCALA_DIR
-
-#
-# check if checkout is svn and up to date
-# otherwise check if its git and up to date
-#
-if [ -d .svn ] || [ -d _svn ]
-then
- #
- # svn repo - check if clean
- #
- svn status > $TMPFILE
- if [ $? -ne 0 ]
- then
- echo "Detected .svn dir, but svn status returns an error. Check if this is really an .svn repo."
- error
- fi
- echo "svn status output: "
- cat $TMPFILE
- echo "grep found: "
- cat $TMPFILE | grep "^\(?\|A\|D\|M\|C\|!\|~\)"
- GREPRETCODE=$?
- echo "grep return code: $GREPRETCODE"
- if [ $GREPRETCODE -eq 0 ]
- then
- echo "Working directory does not seem to be clean. Do a clean checkout and try again."
- error
- fi
- echo "Checkout appears to be clean."
-elif [ -d .git ]
-then
- #
- # git repo - check if clean
- #
- git status --porcelain > $TMPFILE
- if [ $? -ne 0 ]
- then
- echo "Detected .git dir, but git status returns an error. Check if this is really a .git repo."
- error
- fi
- echo "git status output: "
- cat $TMPFILE
- echo "grep found: "
- cat $TMPFILE | grep "^\(A\|M\|D\|R\|C\|U\)"
- GREPRETCODE=$?
- echo "grep return code: $GREPRETCODE"
- if [ $GREPRETCODE -eq 0 ]
- then
- echo "Working directory does not seem to be clean. Do a clean checkout and try again."
- error
- fi
- echo "Checkout appears to be clean."
-else
- # no repo detected
- echo "The directory $SCALA_DIR does not seem to be a repository."
- error
-fi
-
-# check if ScalaCheck source dir exists
-if [ ! -d $SCALACHECK_REL_DIR ]
-then
- echo "ScalaCheck source dir does not seem to exist in: $SCALA_DIR/$SCALACHECK_REL_DIR"
- echo "Please create one and try again."
- error
-fi
-
-# go to ScalaCheck source dir
-cd $SCALACHECK_DIR
-
-# update sources
-svn export --force https://scalacheck.googlecode.com/svn/branches/scalanightly/src/main/scala .
-
-# remove unneeded class
-rm org/scalacheck/ScalaCheckFramework.scala
-
-success