summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.gitignore1
-rwxr-xr-xbuild.xml144
-rw-r--r--dbuild-meta.json250
-rw-r--r--lib/.gitignore15
-rw-r--r--lib/jline.jar.desired.sha11
-rw-r--r--src/asm/scala/tools/asm/tree/MethodNode.java2
-rw-r--r--src/build/bnd/scala-compiler.bnd2
-rw-r--r--src/build/bnd/scala-parser-combinators.bnd5
-rw-r--r--src/build/bnd/scala-xml.bnd5
-rw-r--r--src/build/dbuild-meta-json-gen.scala63
-rw-r--r--src/build/maven/jline-pom.xml68
-rw-r--r--src/build/maven/maven-deploy.xml320
-rw-r--r--src/build/maven/plugins/continuations-pom.xml (renamed from src/build/maven/continuations-plugin-pom.xml)0
-rw-r--r--src/build/maven/scala-compiler-pom.xml56
-rw-r--r--src/build/maven/scala-library-pom.xml5
-rw-r--r--src/build/maven/scala-parser-combinators-pom.xml59
-rw-r--r--src/build/maven/scala-xml-pom.xml (renamed from src/build/maven/scala-dotnet-library-pom.xml)40
-rw-r--r--src/build/pack.xml27
-rw-r--r--src/compiler/scala/reflect/macros/compiler/Errors.scala4
-rw-r--r--src/compiler/scala/reflect/reify/codegen/GenTrees.scala27
-rw-r--r--src/compiler/scala/reflect/reify/codegen/GenUtils.scala3
-rw-r--r--src/compiler/scala/reflect/reify/package.scala2
-rw-r--r--src/compiler/scala/tools/ant/sabbus/Settings.scala14
-rw-r--r--src/compiler/scala/tools/ant/templates/tool-unix.tmpl2
-rw-r--r--src/compiler/scala/tools/cmd/Spec.scala2
-rw-r--r--src/compiler/scala/tools/nsc/Global.scala21
-rw-r--r--src/compiler/scala/tools/nsc/ast/TreeGen.scala49
-rw-r--r--src/compiler/scala/tools/nsc/ast/Trees.scala77
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Parsers.scala54
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/Scanners.scala33
-rw-r--r--src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala49
-rw-r--r--src/compiler/scala/tools/nsc/backend/JavaPlatform.scala6
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/GenICode.scala3
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/Members.scala8
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala3
-rw-r--r--src/compiler/scala/tools/nsc/backend/icode/analysis/CopyPropagation.scala2
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala1256
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/BCodeGlue.scala881
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala1329
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/BCodeIdiomatic.scala844
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/BCodeSkelBuilder.scala727
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/BCodeSyncAndTry.scala401
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/BCodeTypes.scala991
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/BytecodeWriters.scala4
-rw-r--r--src/compiler/scala/tools/nsc/backend/jvm/GenBCode.scala203
-rw-r--r--src/compiler/scala/tools/nsc/backend/opt/Inliners.scala15
-rw-r--r--src/compiler/scala/tools/nsc/dependencies/Changes.scala4
-rw-r--r--src/compiler/scala/tools/nsc/settings/MutableSettings.scala71
-rw-r--r--src/compiler/scala/tools/nsc/settings/ScalaSettings.scala17
-rw-r--r--src/compiler/scala/tools/nsc/settings/ScalaVersion.scala2
-rw-r--r--src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala2
-rw-r--r--src/compiler/scala/tools/nsc/transform/CleanUp.scala21
-rw-r--r--src/compiler/scala/tools/nsc/transform/Constructors.scala6
-rw-r--r--src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala39
-rw-r--r--src/compiler/scala/tools/nsc/transform/LambdaLift.scala34
-rw-r--r--src/compiler/scala/tools/nsc/transform/Mixin.scala2
-rw-r--r--src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala32
-rw-r--r--src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala8
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala17
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Contexts.scala21
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Duplicators.scala4
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Macros.scala107
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala16
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Namers.scala15
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Typers.scala26
-rw-r--r--src/compiler/scala/tools/nsc/typechecker/Unapplies.scala10
-rw-r--r--src/compiler/scala/tools/nsc/util/CharArrayReader.scala31
-rw-r--r--src/compiler/scala/tools/nsc/util/CommandLine.scala98
-rw-r--r--src/compiler/scala/tools/nsc/util/TreeSet.scala64
-rw-r--r--src/compiler/scala/tools/reflect/FastTrack.scala8
-rw-r--r--src/compiler/scala/tools/reflect/ToolBoxFactory.scala2
-rw-r--r--src/compiler/scala/tools/reflect/quasiquotes/Holes.scala187
-rw-r--r--src/compiler/scala/tools/reflect/quasiquotes/Parsers.scala134
-rw-r--r--src/compiler/scala/tools/reflect/quasiquotes/Placeholders.scala123
-rw-r--r--src/compiler/scala/tools/reflect/quasiquotes/Quasiquotes.scala51
-rw-r--r--src/compiler/scala/tools/reflect/quasiquotes/Reifiers.scala290
-rw-r--r--src/eclipse/continuations-library/.classpath2
-rw-r--r--src/eclipse/continuations-library/.project2
-rw-r--r--src/eclipse/interactive/.classpath5
-rw-r--r--src/eclipse/partest/.classpath13
-rw-r--r--src/eclipse/reflect/.classpath2
-rw-r--r--src/eclipse/repl/.classpath15
-rw-r--r--src/eclipse/repl/.project64
-rw-r--r--src/eclipse/scala-compiler/.classpath7
-rw-r--r--src/eclipse/scala-library/.classpath2
-rw-r--r--src/eclipse/scala-parser-combinators/.classpath7
-rw-r--r--src/eclipse/scala-parser-combinators/.project30
-rw-r--r--src/eclipse/scala-xml/.classpath7
-rw-r--r--src/eclipse/scala-xml/.project30
-rw-r--r--src/eclipse/scaladoc/.classpath11
-rw-r--r--src/eclipse/scalap/.classpath9
-rw-r--r--src/eclipse/test-junit/.classpath7
-rw-r--r--src/interactive/scala/tools/nsc/interactive/Global.scala2
-rw-r--r--src/interactive/scala/tools/nsc/interactive/Lexer.scala (renamed from src/compiler/scala/tools/nsc/io/Lexer.scala)2
-rw-r--r--src/interactive/scala/tools/nsc/interactive/Pickler.scala (renamed from src/compiler/scala/tools/nsc/io/Pickler.scala)2
-rw-r--r--src/interactive/scala/tools/nsc/interactive/Picklers.scala4
-rw-r--r--src/interactive/scala/tools/nsc/interactive/PrettyWriter.scala (renamed from src/compiler/scala/tools/nsc/io/PrettyWriter.scala)2
-rw-r--r--src/interactive/scala/tools/nsc/interactive/Replayer.scala (renamed from src/compiler/scala/tools/nsc/io/Replayer.scala)2
-rw-r--r--src/interactive/scala/tools/nsc/interactive/ScratchPadMaker.scala2
-rw-r--r--src/library/scala/Predef.scala9
-rw-r--r--src/library/scala/collection/GenTraversableOnce.scala30
-rw-r--r--src/library/scala/collection/TraversableOnce.scala28
-rw-r--r--src/library/scala/concurrent/Future.scala177
-rw-r--r--src/library/scala/math/BigDecimal.scala2
-rw-r--r--src/library/scala/math/BigInt.scala2
-rw-r--r--src/parser-combinators/scala/util/parsing/combinator/ImplicitConversions.scala (renamed from src/library/scala/util/parsing/combinator/ImplicitConversions.scala)0
-rw-r--r--src/parser-combinators/scala/util/parsing/combinator/JavaTokenParsers.scala (renamed from src/library/scala/util/parsing/combinator/JavaTokenParsers.scala)0
-rw-r--r--src/parser-combinators/scala/util/parsing/combinator/PackratParsers.scala (renamed from src/library/scala/util/parsing/combinator/PackratParsers.scala)0
-rw-r--r--src/parser-combinators/scala/util/parsing/combinator/Parsers.scala (renamed from src/library/scala/util/parsing/combinator/Parsers.scala)4
-rw-r--r--src/parser-combinators/scala/util/parsing/combinator/RegexParsers.scala (renamed from src/library/scala/util/parsing/combinator/RegexParsers.scala)0
-rw-r--r--src/parser-combinators/scala/util/parsing/combinator/lexical/Lexical.scala (renamed from src/library/scala/util/parsing/combinator/lexical/Lexical.scala)0
-rw-r--r--src/parser-combinators/scala/util/parsing/combinator/lexical/Scanners.scala (renamed from src/library/scala/util/parsing/combinator/lexical/Scanners.scala)0
-rw-r--r--src/parser-combinators/scala/util/parsing/combinator/lexical/StdLexical.scala (renamed from src/library/scala/util/parsing/combinator/lexical/StdLexical.scala)0
-rw-r--r--src/parser-combinators/scala/util/parsing/combinator/syntactical/StandardTokenParsers.scala (renamed from src/library/scala/util/parsing/combinator/syntactical/StandardTokenParsers.scala)0
-rw-r--r--src/parser-combinators/scala/util/parsing/combinator/syntactical/StdTokenParsers.scala (renamed from src/library/scala/util/parsing/combinator/syntactical/StdTokenParsers.scala)0
-rw-r--r--src/parser-combinators/scala/util/parsing/combinator/syntactical/TokenParsers.scala (renamed from src/library/scala/util/parsing/combinator/syntactical/TokenParsers.scala)0
-rw-r--r--src/parser-combinators/scala/util/parsing/combinator/token/StdTokens.scala (renamed from src/library/scala/util/parsing/combinator/token/StdTokens.scala)0
-rw-r--r--src/parser-combinators/scala/util/parsing/combinator/token/Tokens.scala (renamed from src/library/scala/util/parsing/combinator/token/Tokens.scala)0
-rw-r--r--src/parser-combinators/scala/util/parsing/input/CharArrayReader.scala (renamed from src/library/scala/util/parsing/input/CharArrayReader.scala)0
-rw-r--r--src/parser-combinators/scala/util/parsing/input/CharSequenceReader.scala (renamed from src/library/scala/util/parsing/input/CharSequenceReader.scala)0
-rw-r--r--src/parser-combinators/scala/util/parsing/input/NoPosition.scala (renamed from src/library/scala/util/parsing/input/NoPosition.scala)0
-rw-r--r--src/parser-combinators/scala/util/parsing/input/OffsetPosition.scala (renamed from src/library/scala/util/parsing/input/OffsetPosition.scala)0
-rw-r--r--src/parser-combinators/scala/util/parsing/input/PagedSeqReader.scala (renamed from src/library/scala/util/parsing/input/PagedSeqReader.scala)0
-rw-r--r--src/parser-combinators/scala/util/parsing/input/Position.scala (renamed from src/library/scala/util/parsing/input/Position.scala)0
-rw-r--r--src/parser-combinators/scala/util/parsing/input/Positional.scala (renamed from src/library/scala/util/parsing/input/Positional.scala)0
-rw-r--r--src/parser-combinators/scala/util/parsing/input/Reader.scala (renamed from src/library/scala/util/parsing/input/Reader.scala)0
-rw-r--r--src/parser-combinators/scala/util/parsing/input/StreamReader.scala (renamed from src/library/scala/util/parsing/input/StreamReader.scala)0
-rw-r--r--src/parser-combinators/scala/util/parsing/json/JSON.scala (renamed from src/library/scala/util/parsing/json/JSON.scala)0
-rw-r--r--src/parser-combinators/scala/util/parsing/json/Lexer.scala (renamed from src/library/scala/util/parsing/json/Lexer.scala)0
-rw-r--r--src/parser-combinators/scala/util/parsing/json/Parser.scala (renamed from src/library/scala/util/parsing/json/Parser.scala)0
-rw-r--r--src/partest/scala/tools/partest/PartestTask.scala5
-rw-r--r--src/partest/scala/tools/partest/ReplTest.scala13
-rw-r--r--src/partest/scala/tools/partest/TestKinds.scala3
-rw-r--r--src/partest/scala/tools/partest/nest/ConsoleRunner.scala68
-rw-r--r--src/partest/scala/tools/partest/nest/ConsoleRunnerSpec.scala54
-rw-r--r--src/partest/scala/tools/partest/nest/FileManager.scala4
-rw-r--r--src/partest/scala/tools/partest/nest/NestUI.scala30
-rw-r--r--src/partest/scala/tools/partest/nest/ReflectiveRunner.scala7
-rw-r--r--src/partest/scala/tools/partest/nest/Runner.scala11
-rw-r--r--src/reflect/scala/reflect/api/BuildUtils.scala42
-rw-r--r--src/reflect/scala/reflect/api/Liftable.scala32
-rw-r--r--src/reflect/scala/reflect/api/Quasiquotes.scala20
-rw-r--r--src/reflect/scala/reflect/api/StandardLiftables.scala36
-rw-r--r--src/reflect/scala/reflect/api/Universe.scala2
-rw-r--r--src/reflect/scala/reflect/internal/AnnotationInfos.scala2
-rw-r--r--src/reflect/scala/reflect/internal/BuildUtils.scala120
-rw-r--r--src/reflect/scala/reflect/internal/Definitions.scala46
-rw-r--r--src/reflect/scala/reflect/internal/Names.scala35
-rw-r--r--src/reflect/scala/reflect/internal/StdNames.scala49
-rw-r--r--src/reflect/scala/reflect/internal/Symbols.scala2
-rw-r--r--src/reflect/scala/reflect/internal/TreeGen.scala76
-rw-r--r--src/reflect/scala/reflect/internal/util/Statistics.scala12
-rw-r--r--src/repl/scala/tools/nsc/interpreter/ConsoleReaderHelper.scala2
-rw-r--r--src/repl/scala/tools/nsc/interpreter/Delimited.scala2
-rw-r--r--src/repl/scala/tools/nsc/interpreter/ILoop.scala150
-rw-r--r--src/repl/scala/tools/nsc/interpreter/JLineReader.scala6
-rw-r--r--src/repl/scala/tools/nsc/interpreter/session/SimpleHistory.scala5
-rw-r--r--src/repl/scala/tools/nsc/interpreter/session/package.scala8
-rw-r--r--src/scaladoc/scala/tools/nsc/doc/model/diagram/Diagram.scala4
-rw-r--r--src/xml/scala/xml/Atom.scala (renamed from src/library/scala/xml/Atom.scala)0
-rw-r--r--src/xml/scala/xml/Attribute.scala (renamed from src/library/scala/xml/Attribute.scala)0
-rw-r--r--src/xml/scala/xml/Comment.scala (renamed from src/library/scala/xml/Comment.scala)0
-rw-r--r--src/xml/scala/xml/Document.scala (renamed from src/library/scala/xml/Document.scala)0
-rwxr-xr-xsrc/xml/scala/xml/Elem.scala (renamed from src/library/scala/xml/Elem.scala)0
-rw-r--r--src/xml/scala/xml/EntityRef.scala (renamed from src/library/scala/xml/EntityRef.scala)0
-rw-r--r--src/xml/scala/xml/Equality.scala (renamed from src/library/scala/xml/Equality.scala)0
-rw-r--r--src/xml/scala/xml/Group.scala (renamed from src/library/scala/xml/Group.scala)0
-rw-r--r--src/xml/scala/xml/MalformedAttributeException.scala (renamed from src/library/scala/xml/MalformedAttributeException.scala)0
-rw-r--r--src/xml/scala/xml/MetaData.scala (renamed from src/library/scala/xml/MetaData.scala)0
-rw-r--r--src/xml/scala/xml/NamespaceBinding.scala (renamed from src/library/scala/xml/NamespaceBinding.scala)0
-rwxr-xr-xsrc/xml/scala/xml/Node.scala (renamed from src/library/scala/xml/Node.scala)0
-rw-r--r--src/xml/scala/xml/NodeBuffer.scala (renamed from src/library/scala/xml/NodeBuffer.scala)0
-rw-r--r--src/xml/scala/xml/NodeSeq.scala (renamed from src/library/scala/xml/NodeSeq.scala)0
-rw-r--r--src/xml/scala/xml/Null.scala (renamed from src/library/scala/xml/Null.scala)0
-rw-r--r--src/xml/scala/xml/PCData.scala (renamed from src/library/scala/xml/PCData.scala)0
-rw-r--r--src/xml/scala/xml/PrefixedAttribute.scala (renamed from src/library/scala/xml/PrefixedAttribute.scala)0
-rwxr-xr-xsrc/xml/scala/xml/PrettyPrinter.scala (renamed from src/library/scala/xml/PrettyPrinter.scala)0
-rw-r--r--src/xml/scala/xml/ProcInstr.scala (renamed from src/library/scala/xml/ProcInstr.scala)0
-rw-r--r--src/xml/scala/xml/QNode.scala (renamed from src/library/scala/xml/QNode.scala)0
-rw-r--r--src/xml/scala/xml/SpecialNode.scala (renamed from src/library/scala/xml/SpecialNode.scala)0
-rw-r--r--src/xml/scala/xml/Text.scala (renamed from src/library/scala/xml/Text.scala)0
-rw-r--r--src/xml/scala/xml/TextBuffer.scala (renamed from src/library/scala/xml/TextBuffer.scala)0
-rw-r--r--src/xml/scala/xml/TopScope.scala (renamed from src/library/scala/xml/TopScope.scala)0
-rw-r--r--src/xml/scala/xml/TypeSymbol.scala (renamed from src/library/scala/xml/TypeSymbol.scala)0
-rw-r--r--src/xml/scala/xml/Unparsed.scala (renamed from src/library/scala/xml/Unparsed.scala)0
-rw-r--r--src/xml/scala/xml/UnprefixedAttribute.scala (renamed from src/library/scala/xml/UnprefixedAttribute.scala)0
-rwxr-xr-xsrc/xml/scala/xml/Utility.scala (renamed from src/library/scala/xml/Utility.scala)0
-rwxr-xr-xsrc/xml/scala/xml/XML.scala (renamed from src/library/scala/xml/XML.scala)0
-rw-r--r--src/xml/scala/xml/Xhtml.scala (renamed from src/library/scala/xml/Xhtml.scala)0
-rw-r--r--src/xml/scala/xml/dtd/ContentModel.scala (renamed from src/library/scala/xml/dtd/ContentModel.scala)0
-rw-r--r--src/xml/scala/xml/dtd/ContentModelParser.scala (renamed from src/library/scala/xml/dtd/ContentModelParser.scala)0
-rw-r--r--src/xml/scala/xml/dtd/DTD.scala (renamed from src/library/scala/xml/dtd/DTD.scala)0
-rw-r--r--src/xml/scala/xml/dtd/Decl.scala (renamed from src/library/scala/xml/dtd/Decl.scala)10
-rw-r--r--src/xml/scala/xml/dtd/DocType.scala (renamed from src/library/scala/xml/dtd/DocType.scala)0
-rw-r--r--src/xml/scala/xml/dtd/ElementValidator.scala (renamed from src/library/scala/xml/dtd/ElementValidator.scala)0
-rw-r--r--src/xml/scala/xml/dtd/ExternalID.scala (renamed from src/library/scala/xml/dtd/ExternalID.scala)2
-rw-r--r--src/xml/scala/xml/dtd/Scanner.scala (renamed from src/library/scala/xml/dtd/Scanner.scala)0
-rw-r--r--src/xml/scala/xml/dtd/Tokens.scala (renamed from src/library/scala/xml/dtd/Tokens.scala)0
-rw-r--r--src/xml/scala/xml/dtd/ValidationException.scala (renamed from src/library/scala/xml/dtd/ValidationException.scala)0
-rw-r--r--src/xml/scala/xml/dtd/impl/Base.scala (renamed from src/library/scala/xml/dtd/impl/Base.scala)0
-rw-r--r--src/xml/scala/xml/dtd/impl/BaseBerrySethi.scala (renamed from src/library/scala/xml/dtd/impl/BaseBerrySethi.scala)0
-rw-r--r--src/xml/scala/xml/dtd/impl/DetWordAutom.scala (renamed from src/library/scala/xml/dtd/impl/DetWordAutom.scala)0
-rw-r--r--src/xml/scala/xml/dtd/impl/Inclusion.scala (renamed from src/library/scala/xml/dtd/impl/Inclusion.scala)0
-rw-r--r--src/xml/scala/xml/dtd/impl/NondetWordAutom.scala (renamed from src/library/scala/xml/dtd/impl/NondetWordAutom.scala)0
-rw-r--r--src/xml/scala/xml/dtd/impl/PointedHedgeExp.scala (renamed from src/library/scala/xml/dtd/impl/PointedHedgeExp.scala)0
-rw-r--r--src/xml/scala/xml/dtd/impl/SubsetConstruction.scala (renamed from src/library/scala/xml/dtd/impl/SubsetConstruction.scala)0
-rw-r--r--src/xml/scala/xml/dtd/impl/SyntaxError.scala (renamed from src/library/scala/xml/dtd/impl/SyntaxError.scala)0
-rw-r--r--src/xml/scala/xml/dtd/impl/WordBerrySethi.scala (renamed from src/library/scala/xml/dtd/impl/WordBerrySethi.scala)0
-rw-r--r--src/xml/scala/xml/dtd/impl/WordExp.scala (renamed from src/library/scala/xml/dtd/impl/WordExp.scala)0
-rwxr-xr-xsrc/xml/scala/xml/factory/Binder.scala (renamed from src/library/scala/xml/factory/Binder.scala)0
-rw-r--r--src/xml/scala/xml/factory/LoggedNodeFactory.scala (renamed from src/library/scala/xml/factory/LoggedNodeFactory.scala)0
-rw-r--r--src/xml/scala/xml/factory/NodeFactory.scala (renamed from src/library/scala/xml/factory/NodeFactory.scala)0
-rw-r--r--src/xml/scala/xml/factory/XMLLoader.scala (renamed from src/library/scala/xml/factory/XMLLoader.scala)0
-rw-r--r--src/xml/scala/xml/include/CircularIncludeException.scala (renamed from src/library/scala/xml/include/CircularIncludeException.scala)0
-rw-r--r--src/xml/scala/xml/include/UnavailableResourceException.scala (renamed from src/library/scala/xml/include/UnavailableResourceException.scala)0
-rw-r--r--src/xml/scala/xml/include/XIncludeException.scala (renamed from src/library/scala/xml/include/XIncludeException.scala)0
-rw-r--r--src/xml/scala/xml/include/sax/EncodingHeuristics.scala (renamed from src/library/scala/xml/include/sax/EncodingHeuristics.scala)0
-rw-r--r--src/xml/scala/xml/include/sax/XIncludeFilter.scala (renamed from src/library/scala/xml/include/sax/XIncludeFilter.scala)0
-rw-r--r--src/xml/scala/xml/include/sax/XIncluder.scala (renamed from src/library/scala/xml/include/sax/XIncluder.scala)0
-rw-r--r--src/xml/scala/xml/package.scala (renamed from src/library/scala/xml/package.scala)0
-rwxr-xr-xsrc/xml/scala/xml/parsing/ConstructingHandler.scala (renamed from src/library/scala/xml/parsing/ConstructingHandler.scala)0
-rw-r--r--src/xml/scala/xml/parsing/ConstructingParser.scala (renamed from src/library/scala/xml/parsing/ConstructingParser.scala)0
-rwxr-xr-xsrc/xml/scala/xml/parsing/DefaultMarkupHandler.scala (renamed from src/library/scala/xml/parsing/DefaultMarkupHandler.scala)0
-rw-r--r--src/xml/scala/xml/parsing/ExternalSources.scala (renamed from src/library/scala/xml/parsing/ExternalSources.scala)0
-rw-r--r--src/xml/scala/xml/parsing/FactoryAdapter.scala (renamed from src/library/scala/xml/parsing/FactoryAdapter.scala)0
-rw-r--r--src/xml/scala/xml/parsing/FatalError.scala (renamed from src/library/scala/xml/parsing/FatalError.scala)0
-rwxr-xr-xsrc/xml/scala/xml/parsing/MarkupHandler.scala (renamed from src/library/scala/xml/parsing/MarkupHandler.scala)0
-rwxr-xr-xsrc/xml/scala/xml/parsing/MarkupParser.scala (renamed from src/library/scala/xml/parsing/MarkupParser.scala)0
-rw-r--r--src/xml/scala/xml/parsing/MarkupParserCommon.scala (renamed from src/library/scala/xml/parsing/MarkupParserCommon.scala)0
-rw-r--r--src/xml/scala/xml/parsing/NoBindingFactoryAdapter.scala (renamed from src/library/scala/xml/parsing/NoBindingFactoryAdapter.scala)0
-rw-r--r--src/xml/scala/xml/parsing/TokenTests.scala (renamed from src/library/scala/xml/parsing/TokenTests.scala)0
-rw-r--r--src/xml/scala/xml/parsing/ValidatingMarkupHandler.scala (renamed from src/library/scala/xml/parsing/ValidatingMarkupHandler.scala)0
-rw-r--r--src/xml/scala/xml/parsing/XhtmlEntities.scala (renamed from src/library/scala/xml/parsing/XhtmlEntities.scala)0
-rw-r--r--src/xml/scala/xml/parsing/XhtmlParser.scala (renamed from src/library/scala/xml/parsing/XhtmlParser.scala)0
-rw-r--r--src/xml/scala/xml/persistent/CachedFileStorage.scala (renamed from src/library/scala/xml/persistent/CachedFileStorage.scala)0
-rw-r--r--src/xml/scala/xml/persistent/Index.scala (renamed from src/library/scala/xml/persistent/Index.scala)0
-rw-r--r--src/xml/scala/xml/persistent/SetStorage.scala (renamed from src/library/scala/xml/persistent/SetStorage.scala)0
-rw-r--r--src/xml/scala/xml/pull/XMLEvent.scala (renamed from src/library/scala/xml/pull/XMLEvent.scala)0
-rwxr-xr-xsrc/xml/scala/xml/pull/XMLEventReader.scala (renamed from src/library/scala/xml/pull/XMLEventReader.scala)0
-rw-r--r--src/xml/scala/xml/pull/package.scala (renamed from src/library/scala/xml/pull/package.scala)0
-rw-r--r--src/xml/scala/xml/transform/BasicTransformer.scala (renamed from src/library/scala/xml/transform/BasicTransformer.scala)0
-rw-r--r--src/xml/scala/xml/transform/RewriteRule.scala (renamed from src/library/scala/xml/transform/RewriteRule.scala)0
-rw-r--r--src/xml/scala/xml/transform/RuleTransformer.scala (renamed from src/library/scala/xml/transform/RuleTransformer.scala)0
-rw-r--r--test/files/codelib/.gitignore1
-rw-r--r--test/files/jvm/bytecode-test-example/Foo_1.flags1
-rw-r--r--test/files/jvm/nooptimise/Foo_1.flags2
-rw-r--r--test/files/jvm/scala-concurrent-tck.check3
-rw-r--r--test/files/jvm/scala-concurrent-tck.scala562
-rw-r--r--test/files/lib/.gitignore8
-rw-r--r--test/files/neg/case-collision.flags2
-rw-r--r--test/files/neg/case-collision2.check12
-rw-r--r--test/files/neg/case-collision2.flags1
-rw-r--r--test/files/neg/case-collision2.scala12
-rw-r--r--test/files/neg/macro-quasiquotes.check7
-rw-r--r--test/files/neg/macro-quasiquotes/Macros_1.scala15
-rw-r--r--test/files/neg/macro-quasiquotes/Test_2.scala5
-rw-r--r--test/files/neg/names-defaults-neg.check9
-rw-r--r--test/files/neg/t1181.check6
-rw-r--r--test/files/neg/t556.check4
-rw-r--r--test/files/neg/t6574.check7
-rw-r--r--test/files/neg/t6574.scala10
-rw-r--r--test/files/neg/valueclasses-impl-restrictions.check8
-rw-r--r--test/files/neg/valueclasses-impl-restrictions.scala4
-rw-r--r--test/files/pos/SI-7638.scala51
-rw-r--r--test/files/pos/t6221.scala29
-rw-r--r--test/files/pos/t6574.scala19
-rw-r--r--test/files/pos/t7591/Demo.scala (renamed from src/compiler/scala/tools/cmd/Demo.scala)4
-rw-r--r--test/files/run/macro-quasiquotes.check4
-rw-r--r--test/files/run/macro-quasiquotes/Macros_1.scala15
-rw-r--r--test/files/run/macro-quasiquotes/Test_2.scala5
-rw-r--r--test/files/run/t4594-repl-settings.scala26
-rw-r--r--test/files/run/t6221.check1
-rw-r--r--test/files/run/t6221/Macros_1.scala22
-rw-r--r--test/files/run/t6221/Test_2.scala10
-rw-r--r--test/files/run/t6331.scala7
-rw-r--r--test/files/run/t6331b.scala9
-rw-r--r--test/files/run/t6574b.check1
-rw-r--r--test/files/run/t6574b.scala7
-rw-r--r--test/files/run/t7008-scala-defined.flags1
-rw-r--r--test/files/run/t7271.scala1
-rw-r--r--test/files/run/t7439/Test_2.scala3
-rw-r--r--test/files/run/t7571.scala12
-rw-r--r--test/files/run/t7582-private-within.check12
-rw-r--r--test/files/run/t7582-private-within/JavaPackagePrivate.java8
-rw-r--r--test/files/run/t7582-private-within/Test.scala22
-rw-r--r--test/files/run/t7582.check2
-rw-r--r--test/files/run/t7582.flags1
-rw-r--r--test/files/run/t7582/InlineHolder.scala16
-rw-r--r--test/files/run/t7582/PackageProtectedJava.java6
-rw-r--r--test/files/run/t7582b.check2
-rw-r--r--test/files/run/t7582b.flags1
-rw-r--r--test/files/run/t7582b/InlineHolder.scala16
-rw-r--r--test/files/run/t7582b/PackageProtectedJava.java6
-rw-r--r--test/files/scalacheck/quasiquotes/ArbitraryTreesAndNames.scala307
-rw-r--r--test/files/scalacheck/quasiquotes/ErrorProps.scala199
-rw-r--r--test/files/scalacheck/quasiquotes/LiftableProps.scala84
-rw-r--r--test/files/scalacheck/quasiquotes/PatternConstructionProps.scala37
-rw-r--r--test/files/scalacheck/quasiquotes/PatternDeconstructionProps.scala35
-rw-r--r--test/files/scalacheck/quasiquotes/QuasiquoteProperties.scala89
-rw-r--r--test/files/scalacheck/quasiquotes/TermConstructionProps.scala341
-rw-r--r--test/files/scalacheck/quasiquotes/TermDeconstructionProps.scala122
-rw-r--r--test/files/scalacheck/quasiquotes/Test.scala12
-rw-r--r--test/files/scalacheck/quasiquotes/TypeConstructionProps.scala25
-rw-r--r--test/files/scalacheck/quasiquotes/TypeDeconstructionProps.scala29
-rw-r--r--test/files/specialized/SI-7343.scala55
-rw-r--r--test/files/specialized/spec-ame.check2
-rw-r--r--test/files/specialized/spec-ame.scala3
-rw-r--r--test/files/speclib/.gitignore1
-rw-r--r--test/junit/scala/collection/TraversableOnceTest.scala70
-rwxr-xr-xtest/partest2
-rw-r--r--tools/.gitignore1
311 files changed, 11552 insertions, 1714 deletions
diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000000..84c048a73c
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1 @@
+/build/
diff --git a/build.xml b/build.xml
index 2af335d6ab..1282c3b45b 100755
--- a/build.xml
+++ b/build.xml
@@ -123,7 +123,6 @@ TODO:
<property name="library.starr.jar" value="${lib.dir}/scala-library.jar"/>
<property name="reflect.starr.jar" value="${lib.dir}/scala-reflect.jar"/>
<property name="compiler.starr.jar" value="${lib.dir}/scala-compiler.jar"/>
- <property name="jline.jar" value="${lib.dir}/jline.jar"/>
<property name="ant.jar" value="${ant.home}/lib/ant.jar"/>
<property name="scalacheck.jar" value="${lib.dir}/scalacheck.jar"/>
@@ -153,6 +152,8 @@ TODO:
<property name="copyright.string" value="Copyright 2002-2013, LAMP/EPFL"/>
<property name="partest.version.number" value="0.9.3"/>
+ <property name="jline.version" value="2.11"/>
+
<!-- These are NOT the flags used to run SuperSabbus, but the ones written
into the script runners created with scala.tools.ant.ScalaTool -->
<property name="java.flags" value="-Xmx256M -Xms32M"/>
@@ -238,6 +239,10 @@ TODO:
<dependency groupId="org.scala-tools.testing" artifactId="test-interface" version="0.5" />
</artifact:dependencies>
+ <artifact:dependencies pathId="repl.deps.classpath" filesetId="repl.deps.fileset" versionsId="repl.deps.versions">
+ <dependency groupId="jline" artifactId="jline" version="${jline.version}"/>
+ </artifact:dependencies>
+
<!-- BND support -->
<typedef resource="aQute/bnd/ant/taskdef.properties" classpathref="extra.tasks.classpath" />
@@ -472,7 +477,7 @@ TODO:
There must be a variable of the shape @{stage}.@{project}.build.path
for all @{stage} in locker, quick, strap
and all @{project} in library, reflect, compiler
- when stage is quick, @{project} also includes: actors, repl, swing, plugins, scalacheck, interactive, scaladoc, partest, scalap
+ when stage is quick, @{project} also includes: actors, parser-combinators, xml, repl, swing, plugins, scalacheck, interactive, scaladoc, partest, scalap
-->
<!-- LOCKER -->
@@ -510,6 +515,11 @@ TODO:
<pathelement location="${build-quick.dir}/classes/actors"/>
</path>
+ <path id="quick.parser-combinators.build.path">
+ <path refid="quick.library.build.path"/>
+ <pathelement location="${build-quick.dir}/classes/parser-combinators"/>
+ </path>
+
<path id="quick.reflect.build.path">
<path refid="quick.library.build.path"/>
<pathelement location="${build-quick.dir}/classes/reflect"/>
@@ -524,12 +534,16 @@ TODO:
<path id="quick.repl.build.path">
<path refid="quick.compiler.build.path"/>
<pathelement location="${build-quick.dir}/classes/repl"/>
- <pathelement location="${jline.jar}"/>
+ <path refid="repl.deps.classpath"/>
+ </path>
+
+ <path id="quick.xml.build.path">
+ <path refid="quick.library.build.path"/>
+ <pathelement location="${build-quick.dir}/classes/xml"/>
</path>
<path id="quick.swing.build.path">
<path refid="quick.library.build.path"/>
- <path refid="quick.actors.build.path"/>
<pathelement location="${build-quick.dir}/classes/swing"/>
</path>
@@ -541,6 +555,7 @@ TODO:
<path id="quick.scalacheck.build.path">
<pathelement location="${build-quick.dir}/classes/library"/>
<pathelement location="${build-quick.dir}/classes/actors"/>
+ <pathelement location="${build-quick.dir}/classes/parser-combinators"/>
<pathelement location="${build-quick.dir}/classes/scalacheck"/>
<path refid="partest.extras.classpath"/>
</path>
@@ -548,18 +563,21 @@ TODO:
<path id="quick.scalap.build.path">
<path refid="quick.compiler.build.path"/>
<pathelement location="${build-quick.dir}/classes/scalap"/>
- <pathelement location="${build-quick.dir}/classes/partest"/>
</path>
<path id="quick.partest.build.path">
+ <path refid="quick.xml.build.path"/>
<path refid="quick.scalap.build.path"/>
<path refid="partest.extras.classpath"/>
<pathelement location="${build-quick.dir}/classes/repl"/>
<pathelement location="${scalacheck.jar}"/>
+ <pathelement location="${build-quick.dir}/classes/partest"/>
</path>
<path id="quick.scaladoc.build.path">
+ <path refid="quick.xml.build.path"/>
<path refid="quick.compiler.build.path"/>
+ <path refid="quick.parser-combinators.build.path"/>
<pathelement location="${build-quick.dir}/classes/partest"/>
<pathelement location="${build-quick.dir}/classes/scaladoc"/>
</path>
@@ -571,6 +589,8 @@ TODO:
</path>
<path id="quick.bin.tool.path">
+ <path refid="quick.parser-combinators.build.path"/>
+ <path refid="quick.xml.build.path"/>
<path refid="quick.repl.build.path"/>
<path refid="quick.actors.build.path"/>
<pathelement location="${build-quick.dir}/classes/scalap"/>
@@ -578,25 +598,24 @@ TODO:
</path>
<!-- PACK -->
- <!-- also used for docs.* targets TODO: use separate paths for those -->
<path id="pack.compiler.path">
<pathelement location="${build-pack.dir}/lib/scala-library.jar"/>
<pathelement location="${build-pack.dir}/lib/scala-reflect.jar"/>
<pathelement location="${build-pack.dir}/lib/scala-compiler.jar"/>
- <pathelement location="${build-pack.dir}/lib/scala-partest.jar"/>
- <pathelement location="${build-pack.dir}/lib/scalap.jar"/>
- <pathelement location="${build-pack.dir}/lib/scala-actors.jar"/>
<pathelement location="${ant.jar}"/>
- <path refid="partest.extras.classpath"/>
+ <path refid="forkjoin.classpath"/>
<path refid="aux.libs"/>
</path>
<path id="pack.bin.tool.path">
<pathelement location="${build-pack.dir}/lib/scala-library.jar"/>
+ <pathelement location="${build-pack.dir}/lib/scala-actors.jar"/>
+ <pathelement location="${build-pack.dir}/lib/scala-parser-combinators.jar"/>
+ <pathelement location="${build-pack.dir}/lib/scala-xml.jar"/>
<pathelement location="${build-pack.dir}/lib/scala-reflect.jar"/>
<pathelement location="${build-pack.dir}/lib/scala-compiler.jar"/>
<pathelement location="${build-pack.dir}/lib/scalap.jar"/>
- <pathelement location="${build-pack.dir}/lib/jline.jar"/>
+ <path refid="repl.deps.classpath"/>
<path refid="aux.libs"/>
</path>
@@ -618,6 +637,8 @@ TODO:
<fileset dir="${asm-classes}"/>
</path>
+ <path id="pack.parser-combinators.files"> <fileset dir="${build-quick.dir}/classes/parser-combinators"/> </path>
+ <path id="pack.xml.files"> <fileset dir="${build-quick.dir}/classes/xml"/> </path>
<path id="pack.swing.files"> <fileset dir="${build-quick.dir}/classes/swing"/> </path>
<path id="pack.reflect.files"> <fileset dir="${build-quick.dir}/classes/reflect"/> </path>
<path id="pack.plugins.files"> <fileset dir="${build-quick.dir}/classes/continuations-plugin"/> </path>
@@ -656,6 +677,20 @@ TODO:
</path>
<!-- MISC -->
+ <path id="docs.compiler.path">
+ <pathelement location="${build-pack.dir}/lib/scala-library.jar"/>
+ <pathelement location="${build-pack.dir}/lib/scala-parser-combinators.jar"/>
+ <pathelement location="${build-pack.dir}/lib/scala-xml.jar"/>
+ <pathelement location="${build-pack.dir}/lib/scala-reflect.jar"/>
+ <pathelement location="${build-pack.dir}/lib/scala-compiler.jar"/>
+ <pathelement location="${build-pack.dir}/lib/scala-partest.jar"/>
+ <pathelement location="${build-pack.dir}/lib/scalap.jar"/>
+ <pathelement location="${build-pack.dir}/lib/scala-actors.jar"/>
+ <pathelement location="${ant.jar}"/>
+ <path refid="partest.extras.classpath"/>
+ <path refid="aux.libs"/>
+ </path>
+
<path id="sbt.compile.build.path">
<path refid="quick.compiler.build.path"/>
<pathelement location="${build-quick.dir}/classes/repl"/>
@@ -671,6 +706,11 @@ TODO:
<path id="partest.classpath">
<path refid="pack.compiler.path"/>
+ <pathelement location="${build-pack.dir}/lib/scala-parser-combinators.jar"/>
+ <pathelement location="${build-pack.dir}/lib/scala-xml.jar"/>
+ <pathelement location="${build-pack.dir}/lib/scala-partest.jar"/>
+ <pathelement location="${build-pack.dir}/lib/scalap.jar"/>
+ <pathelement location="${build-pack.dir}/lib/scala-actors.jar"/>
<path refid="partest.extras.classpath"/>
</path>
@@ -696,6 +736,7 @@ TODO:
<pathelement location="${build-osgi.dir}/org.scala-lang.scala-reflect.jar"/>
<pathelement location="${build-osgi.dir}/org.scala-lang.scala-compiler.jar"/>
<pathelement location="${build-osgi.dir}/org.scala-lang.scala-actors.jar"/>
+ <pathelement location="${build-osgi.dir}/org.scala-lang.scala-parser-combinators.jar"/>
<path refid="pax.exam.classpath"/>
<path refid="forkjoin.classpath"/>
</path>
@@ -704,7 +745,7 @@ TODO:
<pathelement location="${build-palo.dir}/lib/scala-library.jar"/>
<pathelement location="${build-palo.dir}/lib/scala-reflect.jar"/>
<pathelement location="${build-palo.dir}/lib/scala-compiler.jar"/>
- <pathelement location="${build-palo.dir}/lib/jline.jar"/>
+ <path refid="repl.deps.classpath"/>
</path>
<path id="test.positions.sub.build.path" path="${build-quick.dir}/classes/library"/>
@@ -1070,7 +1111,7 @@ TODO:
doctitle="@{title}"
docversion="${version.number}"
sourcepath="${src.dir}"
- classpathref="pack.compiler.path"
+ classpathref="docs.compiler.path"
srcdir="${src.dir}/@{dir}"
addparams="${scalac.args.all}"
implicits="on"
@@ -1086,7 +1127,7 @@ TODO:
doctitle="@{title}"
docversion="${version.number}"
sourcepath="${src.dir}"
- classpathref="pack.compiler.path"
+ classpathref="docs.compiler.path"
srcdir="${src.dir}/@{dir}"
docRootContent="${src.dir}/@{project}/@{docroot}"
addparams="${scalac.args.all}"
@@ -1140,6 +1181,9 @@ TODO:
<target name="quick.actors" depends="quick.lib">
<staged-build with="locker" stage="quick" project="actors"/> </target>
+ <target name="quick.parser-combinators" depends="quick.lib">
+ <staged-build with="locker" stage="quick" project="parser-combinators"/> </target>
+
<target name="quick.reflect" depends="quick.lib">
<staged-build with="locker" stage="quick" project="reflect"/> </target>
@@ -1149,20 +1193,23 @@ TODO:
<target name="quick.repl" depends="quick.comp">
<staged-build with="locker" stage="quick" project="repl"/> </target>
- <target name="quick.scalacheck" depends="quick.actors, quick.lib">
+ <target name="quick.scalacheck" depends="quick.actors, quick.parser-combinators, quick.lib">
<staged-build with="locker" stage="quick" project="scalacheck" args="-nowarn"/> </target>
<target name="quick.scalap" depends="quick.repl">
<staged-build with="locker" stage="quick" project="scalap"/> </target>
- <target name="quick.partest" depends="quick.scalap, quick.repl, asm.done">
+ <target name="quick.partest" depends="quick.scalap, quick.xml, quick.repl, asm.done">
<staged-build with="locker" stage="quick" project="partest" version="partest"/> </target>
- <target name="quick.scaladoc" depends="quick.comp, quick.partest">
- <staged-build with="locker" stage="quick" project="scaladoc" version="scaladoc"/> </target>
+ <target name="quick.scaladoc" depends="quick.comp, quick.partest, quick.parser-combinators">
+ <staged-build with="locker" stage="quick" project="scaladoc" version="scaladoc"/> </target>
<target name="quick.interactive" depends="quick.comp, quick.scaladoc">
- <staged-build with="locker" stage="quick" project="interactive"/> </target>
+ <staged-build with="locker" stage="quick" project="interactive"/> </target>
+
+ <target name="quick.xml" depends="quick.lib">
+ <staged-build with="locker" stage="quick" project="xml"/> </target>
<target name="quick.swing" depends="quick.actors, quick.lib" if="has.java6">
<staged-build with="locker" stage="quick" project="swing"/> </target>
@@ -1200,7 +1247,7 @@ TODO:
</staged-uptodate>
</target>
- <target name="quick.bin" depends="quick.lib, quick.reflect, quick.comp, quick.repl, quick.scalacheck, quick.scalap, quick.interactive, quick.swing, quick.plugins, quick.partest, quick.scaladoc">
+ <target name="quick.bin" depends="quick.lib, quick.reflect, quick.comp, quick.repl, quick.scalacheck, quick.scalap, quick.interactive, quick.xml, quick.parser-combinators, quick.swing, quick.plugins, quick.partest, quick.scaladoc">
<staged-bin stage="quick" classpathref="quick.bin.tool.path"/>
</target>
@@ -1215,13 +1262,18 @@ TODO:
<staged-pack project="library"/></target>
<target name="pack.actors" depends="quick.lib"> <staged-pack project="actors"/> </target>
+ <target name="pack.xml" depends="quick.xml"> <staged-pack project="xml"/> </target>
+ <target name="pack.parser-combinators" depends="quick.parser-combinators"> <staged-pack project="parser-combinators"/> </target>
<target name="pack.swing" if="has.java6" depends="quick.swing"> <staged-pack project="swing"/> </target>
<target name="pack.reflect" depends="quick.reflect"> <staged-pack project="reflect"/> </target>
<target name="pack.comp" depends="quick.comp, quick.scaladoc, quick.interactive, quick.repl, asm.done">
<staged-pack project="compiler" manifest="${build-pack.dir}/META-INF/MANIFEST.MF">
<pre> <!-- TODO the files copied here do not influence actuality of this target (nor does the manifest) -->
- <copy file="${jline.jar}" toDir="${build-pack.dir}/lib"/>
+ <copy todir="${build-pack.dir}/lib">
+ <resources refid="repl.deps.fileset"/>
+ <mapper classpathref="maven-ant-tasks.classpath" classname="org.apache.maven.artifact.ant.VersionMapper" from="${repl.deps.versions}" to="flatten"/>
+ </copy>
<copy todir="${build-pack.dir}/lib">
<fileset dir="${lib-extra.dir}">
<include name="**/*.jar"/>
@@ -1251,7 +1303,7 @@ TODO:
<target name="pack.scalap" depends="quick.scalap"> <staged-pack project="scalap" targetjar="scalap.jar"/> </target>
- <target name="pack.bin" depends="pack.comp, pack.lib, pack.actors, pack.partest, pack.plugins, pack.reflect, pack.scalacheck, pack.scalap, pack.swing">
+ <target name="pack.bin" depends="pack.comp, pack.lib, pack.actors, pack.partest, pack.plugins, pack.reflect, pack.scalacheck, pack.scalap, pack.xml, pack.swing, pack.parser-combinators">
<staged-bin stage="pack"/>
</target>
@@ -1264,7 +1316,7 @@ TODO:
from="${partest.extras.versions}" to="flatten"/>
</copy>
- <taskdef resource="scala/tools/ant/antlib.xml" classpathref="pack.compiler.path"/>
+ <taskdef resource="scala/tools/ant/antlib.xml" classpathref="docs.compiler.path"/>
<taskdef resource="scala/tools/partest/antlib.xml" classpathref="partest.classpath"/>
</target>
@@ -1296,7 +1348,10 @@ TODO:
<fileset dir="${build-locker.dir}/classes/compiler"/>
<fileset dir="${asm-classes}"/>
</jar>
- <copy file="${jline.jar}" toDir="${build-palo.dir}/lib"/>
+ <copy todir="${build-palo.dir}/lib">
+ <resources refid="repl.deps.fileset"/>
+ <mapper classpathref="maven-ant-tasks.classpath" classname="org.apache.maven.artifact.ant.VersionMapper" from="${repl.deps.versions}" to="flatten"/>
+ </copy>
</target>
<target name="palo.bin" depends="palo.done"> <staged-bin stage="palo"/></target>
@@ -1355,16 +1410,18 @@ TODO:
<stopwatch name="osgi.bundle.timer"/>
<make-bundle name="scala-library" version="${osgi.version.number}" />
<make-bundle name="scala-actors" version="${osgi.version.number}" />
+ <make-bundle name="scala-parser-combinators" version="${osgi.version.number}" />
<make-bundle name="scala-reflect" version="${osgi.version.number}" />
<make-bundle name="scala-compiler" version="${osgi.version.number}" />
<make-plugin-bundle name="continuations" version="${osgi.version.number}" />
+ <make-bundle name="scala-xml" version="${osgi.version.number}"/>
<touch file="${build-osgi.dir}/bundles.complete" verbose="no"/>
<if><isset property="has.java6"/><then>
<make-bundle name="scala-swing" version="${osgi.version.number}"/></then>
</if>
- <stopwatch name="osgi.bundle.timer" action="total"/></then>
- </if>
+ <stopwatch name="osgi.bundle.timer" action="total"/>
+ </then></if>
</target>
@@ -1641,7 +1698,7 @@ TODO:
docversion="${version.number}"
docsourceurl="${scaladoc.url}€{FILE_PATH}.scala#L1"
sourcepath="${src.dir}"
- classpathref="pack.compiler.path"
+ classpathref="docs.compiler.path"
addparams="${scalac.args.all}"
docRootContent="${src.dir}/library/rootdoc.txt"
implicits="on"
@@ -1674,16 +1731,21 @@ TODO:
</staged-uptodate>
</target>
- <target name="docs.comp" depends="docs.start">
- <staged-docs project="compiler" title="Scala Compiler" docroot="rootdoc.txt">
+ <target name="docs.xml" depends="docs.start">
+ <staged-docs project="xml" title="Scala XML Library" docroot="rootdoc.txt">
+ <include name="**/*.scala"/>
+ </staged-docs>
+ </target>
+
+ <target name="docs.parser-combinators" depends="docs.start">
+ <staged-docs project="parser-combinators" title="Scala Parser Combinator Library" docroot="rootdoc.txt">
<include name="**/*.scala"/>
</staged-docs>
</target>
- <target name="docs.jline" depends="docs.start">
- <staged-docs project="jline" dir="jline/src/main/java" title="Scala JLine">
+ <target name="docs.comp" depends="docs.start">
+ <staged-docs project="compiler" title="Scala Compiler" docroot="rootdoc.txt">
<include name="**/*.scala"/>
- <include name="**/*.java"/>
</staged-docs>
</target>
@@ -1712,7 +1774,7 @@ TODO:
<mkdir dir="${build.dir}/manmaker/classes"/>
<scalac
destdir="${build.dir}/manmaker/classes"
- classpathref="pack.compiler.path"
+ classpathref="docs.compiler.path"
srcdir="${src.dir}/manual"
includes="**/*.scala"
addparams="${scalac.args.all}"/>
@@ -1742,8 +1804,7 @@ TODO:
</staged-uptodate>
</target>
- <target name="docs.done" depends="docs.jline, docs.comp, docs.man, docs.lib, docs.scalap, docs.partest, docs.continuations-plugin"/>
-
+ <target name="docs.done" depends="docs.comp, docs.man, docs.lib, docs.xml, docs.parser-combinators, docs.scalap, docs.partest, docs.continuations-plugin"/>
<!-- ===========================================================================
DISTRIBUTION
@@ -1771,7 +1832,6 @@ TODO:
<mkdir dir="${dist.dir}/lib"/>
<copy toDir="${dist.dir}/lib">
<fileset dir="${build-pack.dir}/lib">
- <include name="jline.jar"/>
<include name="scalacheck.jar"/>
<include name="scala-partest.jar"/>
<include name="scalap.jar"/>
@@ -1784,12 +1844,20 @@ TODO:
from="${partest.extras.versions}" to="flatten"/>
</copy>
+ <copy todir="${dist.dir}/lib">
+ <resources refid="repl.deps.fileset"/>
+ <mapper classpathref="maven-ant-tasks.classpath" classname="org.apache.maven.artifact.ant.VersionMapper"
+ from="${repl.deps.versions}" to="flatten"/>
+ </copy>
+
<mkdir dir="${dist.dir}/bin"/>
<!-- TODO - Stop being inefficient and don't copy OSGi bundles overtop other jars. -->
<copy-bundle name="scala-library"/>
<copy-bundle name="scala-reflect"/>
+ <copy-bundle name="scala-xml"/>
<copy-bundle name="scala-swing"/>
<copy-bundle name="scala-actors"/>
+ <copy-bundle name="scala-parser-combinators"/>
<copy-bundle name="scala-compiler"/>
<copy toDir="${dist.dir}/bin">
<fileset dir="${build-pack.dir}/bin"/>
@@ -1846,8 +1914,10 @@ TODO:
<fileset dir="${src.dir}/interactive"/>
<fileset dir="${src.dir}/continuations/plugin"/>
</jar>
+ <jar whenmanifestonly="fail" destfile="${dist.dir}/src/scala-xml-src.jar" basedir="${src.dir}/xml"/>
<jar whenmanifestonly="fail" destfile="${dist.dir}/src/scala-swing-src.jar" basedir="${src.dir}/swing"/>
<jar whenmanifestonly="fail" destfile="${dist.dir}/src/scala-reflect-src.jar" basedir="${src.dir}/reflect"/>
+ <jar whenmanifestonly="fail" destfile="${dist.dir}/src/scala-parser-combinators-src.jar" basedir="${src.dir}/parser-combinators"/>
<jar whenmanifestonly="fail" destfile="${dist.dir}/src/scalap-src.jar" basedir="${src.dir}/scalap"/>
<jar whenmanifestonly="fail" destfile="${dist.dir}/src/scala-partest-src.jar" basedir="${src.dir}/partest"/>
</target>
@@ -1896,8 +1966,10 @@ TODO:
<target name="starr.src" depends="starr.jars">
<jar whenmanifestonly="fail" destfile="${lib.dir}/scala-library-src.jar">
<fileset dir="${src.dir}/library"/>
+ <fileset dir="${src.dir}/xml"/>
<fileset dir="${src.dir}/swing"/>
<fileset dir="${src.dir}/actors"/>
+ <fileset dir="${src.dir}/parser-combinators"/>
<fileset dir="${src.dir}/forkjoin"/>
</jar>
<jar whenmanifestonly="fail" destfile="${lib.dir}/scala-reflect-src.jar" basedir="${src.dir}/reflect"/>
diff --git a/dbuild-meta.json b/dbuild-meta.json
new file mode 100644
index 0000000000..705eeeb6b6
--- /dev/null
+++ b/dbuild-meta.json
@@ -0,0 +1,250 @@
+{
+ "version": "2.11.0",
+ "subproj": [],
+ "projects": [
+ {
+ "artifacts": [
+ {
+ "extension": "jar",
+ "name": "scala-library",
+ "organization": "org.scala-lang"
+ }
+ ],
+ "dependencies": [],
+ "name": "scala-library",
+ "organization": "org.scala-lang"
+ },
+ {
+ "artifacts": [
+ {
+ "extension": "jar",
+ "name": "scala-reflect",
+ "organization": "org.scala-lang"
+ }
+ ],
+ "dependencies": [
+ {
+ "extension": "jar",
+ "name": "scala-library",
+ "organization": "org.scala-lang"
+ }
+ ],
+ "name": "scala-reflect",
+ "organization": "org.scala-lang"
+ },
+ {
+ "artifacts": [
+ {
+ "extension": "jar",
+ "name": "scala-compiler",
+ "organization": "org.scala-lang"
+ }
+ ],
+ "dependencies": [
+ {
+ "extension": "jar",
+ "name": "scala-reflect",
+ "organization": "org.scala-lang"
+ }
+ ],
+ "name": "scala-compiler",
+ "organization": "org.scala-lang"
+ },
+ {
+ "artifacts": [
+ {
+ "extension": "jar",
+ "name": "scala-swing",
+ "organization": "org.scala-lang"
+ }
+ ],
+ "dependencies": [
+ {
+ "extension": "jar",
+ "name": "scala-library",
+ "organization": "org.scala-lang"
+ }
+ ],
+ "name": "scala-swing",
+ "organization": "org.scala-lang"
+ },
+ {
+ "artifacts": [
+ {
+ "extension": "jar",
+ "name": "scala-actors",
+ "organization": "org.scala-lang"
+ }
+ ],
+ "dependencies": [
+ {
+ "extension": "jar",
+ "name": "scala-library",
+ "organization": "org.scala-lang"
+ }
+ ],
+ "name": "scala-actors",
+ "organization": "org.scala-lang"
+ },
+ {
+ "artifacts": [
+ {
+ "extension": "jar",
+ "name": "scala-xml",
+ "organization": "org.scala-lang"
+ }
+ ],
+ "dependencies": [
+ {
+ "extension": "jar",
+ "name": "scala-library",
+ "organization": "org.scala-lang"
+ }
+ ],
+ "name": "scala-xml",
+ "organization": "org.scala-lang"
+ },
+ {
+ "artifacts": [
+ {
+ "extension": "jar",
+ "name": "scala-parser-combinators",
+ "organization": "org.scala-lang"
+ }
+ ],
+ "dependencies": [
+ {
+ "extension": "jar",
+ "name": "scala-library",
+ "organization": "org.scala-lang"
+ }
+ ],
+ "name": "scala-parser-combinators",
+ "organization": "org.scala-lang"
+ },
+ {
+ "artifacts": [
+ {
+ "extension": "jar",
+ "name": "scalacheck",
+ "organization": "org.scala-lang"
+ }
+ ],
+ "dependencies": [
+ {
+ "extension": "jar",
+ "name": "scala-library",
+ "organization": "org.scala-lang"
+ },
+ {
+ "extension": "jar",
+ "name": "scala-actors",
+ "organization": "org.scala-lang"
+ },
+ {
+ "extension": "jar",
+ "name": "scala-parser-combinators",
+ "organization": "org.scala-lang"
+ }
+ ],
+ "name": "scalacheck",
+ "organization": "org.scala-lang"
+ },
+ {
+ "artifacts": [
+ {
+ "extension": "jar",
+ "name": "scala-partest",
+ "organization": "org.scala-lang"
+ }
+ ],
+ "dependencies": [
+ {
+ "extension": "jar",
+ "name": "scala-compiler",
+ "organization": "org.scala-lang"
+ },
+ {
+ "extension": "jar",
+ "name": "scalap",
+ "organization": "org.scala-lang"
+ },
+ {
+ "extension": "jar",
+ "name": "scala-xml",
+ "organization": "org.scala-lang"
+ },
+ {
+ "extension": "jar",
+ "name": "scalacheck",
+ "organization": "org.scala-lang"
+ }
+ ],
+ "name": "scala-partest",
+ "organization": "org.scala-lang"
+ },
+ {
+ "artifacts": [
+ {
+ "extension": "jar",
+ "name": "scaladoc",
+ "organization": "org.scala-lang"
+ }
+ ],
+ "dependencies": [
+ {
+ "extension": "jar",
+ "name": "scala-compiler",
+ "organization": "org.scala-lang"
+ },
+ {
+ "extension": "jar",
+ "name": "scala-partest",
+ "organization": "org.scala-lang"
+ },
+ {
+ "extension": "jar",
+ "name": "scala-xml",
+ "organization": "org.scala-lang"
+ },
+ {
+ "extension": "jar",
+ "name": "scala-parser-combinators",
+ "organization": "org.scala-lang"
+ }
+ ],
+ "name": "scaladoc",
+ "organization": "org.scala-lang"
+ },
+ {
+ "artifacts": [
+ {
+ "extension": "jar",
+ "name": "scalap",
+ "organization": "org.scala-lang"
+ }
+ ],
+ "dependencies": [
+ {
+ "extension": "jar",
+ "name": "scala-compiler",
+ "organization": "org.scala-lang"
+ }
+ ],
+ "name": "scalap",
+ "organization": "org.scala-lang"
+ },
+ {
+ "artifacts": [
+ {
+ "extension": "jar",
+ "name": "continuations",
+ "organization": "org.scala-lang.plugins"
+ }
+ ],
+ "dependencies": [],
+ "name": "continuations",
+ "organization": "org.scala-lang.plugins"
+ }
+ ]
+}
diff --git a/lib/.gitignore b/lib/.gitignore
new file mode 100644
index 0000000000..0c507490be
--- /dev/null
+++ b/lib/.gitignore
@@ -0,0 +1,15 @@
+ant-contrib.jar
+ant-dotnet-1.0.jar
+ant.jar
+fjbg.jar
+forkjoin.jar
+jline.jar
+maven-ant-tasks-2.1.1.jar
+msil.jar
+scala-compiler.jar
+scala-compiler-src.jar
+scala-library.jar
+scala-library-src.jar
+scala-reflect.jar
+scala-reflect-src.jar
+vizant.jar
diff --git a/lib/jline.jar.desired.sha1 b/lib/jline.jar.desired.sha1
deleted file mode 100644
index 1eb994cf1b..0000000000
--- a/lib/jline.jar.desired.sha1
+++ /dev/null
@@ -1 +0,0 @@
-e87ad04fdffb5cd9b7aa9293596d9fdde086eccd ?jline.jar
diff --git a/src/asm/scala/tools/asm/tree/MethodNode.java b/src/asm/scala/tools/asm/tree/MethodNode.java
index 5f9c778e0c..a161600edb 100644
--- a/src/asm/scala/tools/asm/tree/MethodNode.java
+++ b/src/asm/scala/tools/asm/tree/MethodNode.java
@@ -458,7 +458,7 @@ public class MethodNode extends MethodVisitor {
*/
protected LabelNode getLabelNode(final Label l) {
if (!(l.info instanceof LabelNode)) {
- l.info = new LabelNode();
+ l.info = new LabelNode(l);
}
return (LabelNode) l.info;
}
diff --git a/src/build/bnd/scala-compiler.bnd b/src/build/bnd/scala-compiler.bnd
index c289843447..dc30513db4 100644
--- a/src/build/bnd/scala-compiler.bnd
+++ b/src/build/bnd/scala-compiler.bnd
@@ -3,6 +3,6 @@ Bundle-SymbolicName: org.scala-lang.scala-compiler
ver: @VERSION@
Bundle-Version: ${ver}
Export-Package: *;version=${ver}
-Import-Package: scala.tools.jline.*;resolution:=optional, \
+Import-Package: jline.*;resolution:=optional, \
org.apache.tools.ant.*;resolution:=optional, \
*
diff --git a/src/build/bnd/scala-parser-combinators.bnd b/src/build/bnd/scala-parser-combinators.bnd
new file mode 100644
index 0000000000..d712a4ba2a
--- /dev/null
+++ b/src/build/bnd/scala-parser-combinators.bnd
@@ -0,0 +1,5 @@
+Bundle-Name: Scala Parser Combinators Library
+Bundle-SymbolicName: org.scala-lang.scala-parser-combinators
+ver: @VERSION@
+Bundle-Version: ${ver}
+Export-Package: *;version=${ver}
diff --git a/src/build/bnd/scala-xml.bnd b/src/build/bnd/scala-xml.bnd
new file mode 100644
index 0000000000..6203c57dfe
--- /dev/null
+++ b/src/build/bnd/scala-xml.bnd
@@ -0,0 +1,5 @@
+Bundle-Name: Scala XML Library
+Bundle-SymbolicName: org.scala-lang.scala-xml
+ver: @VERSION@
+Bundle-Version: ${ver}
+Export-Package: *;version=${ver}
diff --git a/src/build/dbuild-meta-json-gen.scala b/src/build/dbuild-meta-json-gen.scala
new file mode 100644
index 0000000000..42214dd191
--- /dev/null
+++ b/src/build/dbuild-meta-json-gen.scala
@@ -0,0 +1,63 @@
+// use this script to generate dbuild-meta.json
+// make sure the version is specified correctly,
+// update the dependency structura and
+// check out distributed-build and run `sbt console`:
+// TODO: also generate build.xml and eclipse config from a similar data-structure
+
+import distributed.project.model._
+
+val meta =
+ ExtractedBuildMeta("2.11.0", Seq(
+ Project("scala-library", "org.scala-lang",
+ Seq(ProjectRef("scala-library", "org.scala-lang")),
+ Seq.empty), // TODO: forkjoin
+ Project("scala-reflect", "org.scala-lang",
+ Seq(ProjectRef("scala-reflect", "org.scala-lang")),
+ Seq(ProjectRef("scala-library", "org.scala-lang"))),
+ Project("scala-compiler", "org.scala-lang",
+ Seq(ProjectRef("scala-compiler", "org.scala-lang")),
+ Seq(ProjectRef("scala-reflect", "org.scala-lang"))), // asm
+
+ // Project("scala-repl", "org.scala-lang",
+ // Seq(ProjectRef("scala-repl", "org.scala-lang")),
+ // Seq(ProjectRef("scala-compiler", "org.scala-lang"))), // jline
+
+ // Project("scala-interactive", "org.scala-lang",
+ // Seq(ProjectRef("scala-interactive", "org.scala-lang")),
+ // Seq(ProjectRef("scala-compiler", "org.scala-lang"), ProjectRef("scaladoc", "org.scala-lang"))),
+
+ Project("scala-swing", "org.scala-lang",
+ Seq(ProjectRef("scala-swing", "org.scala-lang")),
+ Seq(ProjectRef("scala-library", "org.scala-lang"))),
+
+ Project("scala-actors", "org.scala-lang",
+ Seq(ProjectRef("scala-actors", "org.scala-lang")),
+ Seq(ProjectRef("scala-library", "org.scala-lang"))),
+ Project("scala-xml", "org.scala-lang",
+ Seq(ProjectRef("scala-xml", "org.scala-lang")),
+ Seq(ProjectRef("scala-library", "org.scala-lang"))),
+ Project("scala-parser-combinators", "org.scala-lang",
+ Seq(ProjectRef("scala-parser-combinators", "org.scala-lang")),
+ Seq(ProjectRef("scala-library", "org.scala-lang"))),
+
+ Project("scalacheck", "org.scala-lang",
+ Seq(ProjectRef("scalacheck", "org.scala-lang")),
+ Seq(ProjectRef("scala-library", "org.scala-lang"), ProjectRef("scala-actors", "org.scala-lang"), ProjectRef("scala-parser-combinators", "org.scala-lang"))),
+
+ Project("scala-partest", "org.scala-lang",
+ Seq(ProjectRef("scala-partest", "org.scala-lang")),
+ Seq(ProjectRef("scala-compiler", "org.scala-lang"), // TODO: refine to scala-repl
+ ProjectRef("scalap", "org.scala-lang"), ProjectRef("scala-xml", "org.scala-lang"), ProjectRef("scalacheck", "org.scala-lang"))),
+
+ Project("scaladoc", "org.scala-lang",
+ Seq(ProjectRef("scaladoc", "org.scala-lang")),
+ Seq(ProjectRef("scala-compiler", "org.scala-lang"),ProjectRef("scala-partest", "org.scala-lang"), ProjectRef("scala-xml", "org.scala-lang"), ProjectRef("scala-parser-combinators", "org.scala-lang"))),
+
+ Project("scalap", "org.scala-lang",
+ Seq(ProjectRef("scalap", "org.scala-lang")),
+ Seq(ProjectRef("scala-compiler", "org.scala-lang"))),
+
+ Project("continuations", "org.scala-lang.plugins", Seq(ProjectRef("continuations", "org.scala-lang.plugins")), Seq.empty)
+ ))
+
+println(Utils.writeValue(meta))
diff --git a/src/build/maven/jline-pom.xml b/src/build/maven/jline-pom.xml
deleted file mode 100644
index 0d6e801551..0000000000
--- a/src/build/maven/jline-pom.xml
+++ /dev/null
@@ -1,68 +0,0 @@
-<project
- xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
- xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
- <modelVersion>4.0.0</modelVersion>
- <groupId>org.scala-lang</groupId>
- <artifactId>jline</artifactId>
- <packaging>jar</packaging>
- <version>@VERSION@</version>
- <name>jline</name>
- <description>Like readline, but better</description>
- <url>http://www.scala-lang.org/</url>
- <inceptionYear>2011</inceptionYear>
- <organization>
- <name>LAMP/EPFL</name>
- <url>http://lamp.epfl.ch/</url>
- </organization>
- <licenses>
- <license>
- <name>BSD-like</name>
- <url>http://www.scala-lang.org/downloads/license.html
- </url>
- <distribution>repo</distribution>
- </license>
- <license>
- <name>The BSD License</name>
- <url>http://www.opensource.org/licenses/bsd-license.php</url>
- <distribution>repo</distribution>
- </license>
- </licenses>
- <scm>
- <connection>scm:git:git://github.com/scala/scala.git</connection>
- <url>https://github.com/scala/scala.git</url>
- </scm>
- <issueManagement>
- <system>JIRA</system>
- <url>https://issues.scala-lang.org/</url>
- </issueManagement>
-
- <dependencies>
- <dependency>
- <groupId>org.fusesource.jansi</groupId>
- <artifactId>jansi</artifactId>
- <version>1.4</version>
- <!--<scope>provided</scope>-->
- </dependency>
- </dependencies>
- <distributionManagement>
- <repository>
- <id>scala-tools.org</id>
- <url>@RELEASE_REPOSITORY@</url>
- </repository>
- <snapshotRepository>
- <id>scala-tools.org</id>
- <url>@SNAPSHOT_REPOSITORY@</url>
- <uniqueVersion>false</uniqueVersion>
- </snapshotRepository>
- </distributionManagement>
- <developers>
- <developer>
- <id>lamp</id>
- <name>EPFL LAMP</name>
- </developer>
- <developer>
- <id>Typesafe</id>
- <name>Typesafe, Inc.</name>
- </developer>
- </developers>
-</project>
diff --git a/src/build/maven/maven-deploy.xml b/src/build/maven/maven-deploy.xml
index e70173319e..84a12066f5 100644
--- a/src/build/maven/maven-deploy.xml
+++ b/src/build/maven/maven-deploy.xml
@@ -5,271 +5,121 @@
<description>
SuperSabbus extension for deploying a distribution to Maven. THIS FILE IS MEANT TO BE RUN STANDALONE IN THE MAVEN "distpack" DIRECTORY
</description>
- <target name="init.properties">
+
+ <target name="boot">
<!-- Pull in properties from build -->
<property file="build.properties" />
- <!-- Initialize specific properties -->
+
<property name="remote.snapshot.repository" value="https://oss.sonatype.org/content/repositories/snapshots" />
- <property name="remote.release.repository" value="https://oss.sonatype.org/service/local/staging/deploy/maven2" />
+ <property name="remote.release.repository" value="https://oss.sonatype.org/service/local/staging/deploy/maven2" />
<property name="local.snapshot.repository" value="${user.home}/.m2/repository" />
- <property name="local.release.repository" value="${user.home}/.m2/repository" />
+ <property name="local.release.repository" value="${user.home}/.m2/repository" />
+
<property name="repository.credentials.id" value="sonatype-nexus" />
<property name="settings.file" value="${user.home}/.m2/settings.xml" />
- <condition property="version.is.snapshot">
- <contains string="${maven.version.number}" substring="-SNAPSHOT"/>
- </condition>
-
- <echo>Using server[${repository.credentials.id}] for maven repository credentials.
- Please make sure that your ~/.m2/settings.xml has the needed username/password for this server id
- </echo>
-
- </target>
-
- <target name="init.maven" depends="init.properties">
+ <!-- Set up Ant contrib tasks so we can use <if><then><else> instead of the clunky `unless` attribute -->
+ <taskdef resource="net/sf/antcontrib/antlib.xml" classpath="ant-contrib.jar"/>
<!-- Add our maven ant tasks -->
<path id="maven-ant-tasks.classpath" path="maven-ant-tasks-2.1.1.jar" />
<typedef resource="org/apache/maven/artifact/ant/antlib.xml" uri="urn:maven-artifact-ant" classpathref="maven-ant-tasks.classpath" />
+ </target>
- <!-- simplify fixing pom versions -->
- <macrodef name="make-pom">
- <attribute name="name" />
- <attribute name="version" />
- <sequential>
- <copy file="@{name}/@{name}-pom.xml" tofile="@{name}/@{name}-pom-fixed.xml" overwrite="true">
- <filterset>
- <filter token="VERSION" value="@{version}" />
- <filter token="RELEASE_REPOSITORY" value="${remote.release.repository}" />
- <filter token="SNAPSHOT_REPOSITORY" value="${remote.snapshot.repository}" />
- </filterset>
- </copy>
- <artifact:pom id="@{name}.pom" file="@{name}/@{name}-pom-fixed.xml" />
- </sequential>
- </macrodef>
+ <target name="init" depends="boot">
+ <if><contains string="${maven.version.number}" substring="-SNAPSHOT"/><then>
+ <property name="remote.repository" value="${remote.snapshot.repository}"/>
+ <property name="local.repository" value="${local.snapshot.repository}"/>
+ </then><else>
+ <property name="remote.repository" value="${remote.release.repository}"/>
+ <property name="local.repository" value="${local.release.repository}"/>
+ </else></if>
- <macrodef name="make-pom-plugin">
+ <echo>Using server[${repository.credentials.id}] for maven repository credentials.
+ Please make sure that your ~/.m2/settings.xml has the needed username/password for this server id
+ </echo>
+
+ <macrodef name="deploy-one">
+ <attribute name="dir" default=""/>
<attribute name="name" />
<attribute name="version" />
+ <attribute name="local" />
+ <attribute name="signed" />
+
<sequential>
- <copy file="plugins/@{name}/@{name}-plugin-pom.xml" tofile="plugins/@{name}/@{name}-pom-fixed.xml" overwrite="true">
+ <local name="path"/> <property name="path" value="@{dir}@{name}/@{name}"/>
+
+ <echo>Deploying ${path}-[pom.xml|src.jar|docs.jar].</echo>
+
+ <copy file="${path}-pom.xml" tofile="${path}-pom-filtered.xml" overwrite="true">
<filterset>
<filter token="VERSION" value="@{version}" />
<filter token="RELEASE_REPOSITORY" value="${remote.release.repository}" />
<filter token="SNAPSHOT_REPOSITORY" value="${remote.snapshot.repository}" />
+ <filter token="JLINE_VERSION" value="${jline.version}" />
</filterset>
</copy>
- <artifact:pom id="plugin-@{name}.pom" file="plugins/@{name}/@{name}-pom-fixed.xml" />
- </sequential>
- </macrodef>
- </target>
- <!-- macros for local deployment -->
- <target name="deploy.local.init" depends="init.maven">
- <!-- Deploy single artifact locally -->
- <macrodef name="deploy-local">
- <attribute name="name" />
- <attribute name="version" />
- <attribute name="repository" />
- <element name="extra-attachments" optional="yes" />
- <sequential>
- <make-pom name="@{name}" version="@{version}" />
- <artifact:install file="@{name}/@{name}.jar">
- <artifact:pom refid="@{name}.pom" />
- <artifact:localRepository path="@{repository}" id="${repository.credentials.id}" />
- <artifact:attach type="jar" file="@{name}/@{name}-src.jar" classifier="sources" />
- <artifact:attach type="jar" file="@{name}/@{name}-docs.jar" classifier="javadoc" />
- <extra-attachments />
- </artifact:install>
- </sequential>
- </macrodef>
-
- <!-- Deploy compiler plugins -->
- <macrodef name="deploy-local-plugin">
- <attribute name="name" />
- <attribute name="version" />
- <attribute name="repository" />
- <element name="extra-attachments" optional="yes" />
- <sequential>
- <make-pom-plugin name="@{name}" version="@{version}" />
- <artifact:install file="plugins/@{name}/@{name}.jar">
- <artifact:pom refid="plugin-@{name}.pom" />
- <artifact:attach type="jar" file="plugins/@{name}/@{name}-src.jar" classifier="sources" />
- <artifact:attach type="jar" file="plugins/@{name}/@{name}-docs.jar" classifier="javadoc" />
- <artifact:localRepository path="@{repository}" id="${repository.credentials.id}" />
- <extra-attachments />
- </artifact:install>
- </sequential>
- </macrodef>
-
-
- <!-- Deploy all artifacts locally -->
- <macrodef name="deploy-local-all">
- <attribute name="repository" />
- <attribute name="version" />
- <sequential>
- <deploy-local name="scala-library" version="@{version}" repository="@{repository}" />
- <deploy-local name="scala-compiler" version="@{version}" repository="@{repository}" />
- <deploy-local-plugin name="continuations" version="@{version}" repository="@{repository}"/>
- <deploy-local name="scala-reflect" version="@{version}" repository="@{repository}" />
- <deploy-local name="scala-actors" version="@{version}" repository="@{repository}" />
- <deploy-local name="scala-swing" version="@{version}" repository="@{repository}"/>
- <deploy-local name="scalap" version="@{version}" repository="@{repository}"/>
- <deploy-local name="scala-partest" version="@{version}" repository="@{repository}"/>
- <deploy-local name="jline" version="@{version}" repository="@{repository}"/>
- </sequential>
- </macrodef>
- </target>
-
- <!-- macros for remote deployment -->
- <target name="deploy.remote.init" depends="init.maven">
- <!-- Deploy single artifact locally -->
- <macrodef name="deploy-remote">
- <attribute name="name" />
- <attribute name="repository" />
- <attribute name="version" />
- <element name="extra-attachments" optional="yes" />
- <sequential>
- <make-pom name="@{name}" version="@{version}" />
- <artifact:deploy file="@{name}/@{name}.jar" settingsFile="${settings.file}">
- <artifact:pom refid="@{name}.pom" />
- <artifact:remoteRepository url="@{repository}" id="${repository.credentials.id}" />
- <artifact:attach type="jar" file="@{name}/@{name}-src.jar" classifier="sources" />
- <artifact:attach type="jar" file="@{name}/@{name}-docs.jar" classifier="javadoc" />
- <extra-attachments />
- </artifact:deploy>
+ <artifact:pom id="@{name}.pom" file="${path}-pom-filtered.xml" />
+
+ <if><equals arg1="@{signed}" arg2="false"/><then>
+ <if><equals arg1="@{local}" arg2="false"/><then>
+ <artifact:deploy file="${path}.jar" settingsFile="${settings.file}">
+ <artifact:remoteRepository url="${remote.repository}" id="${repository.credentials.id}" />
+ <artifact:pom refid="@{name}.pom" />
+ <artifact:attach type="jar" file="${path}-src.jar" classifier="sources" />
+ <artifact:attach type="jar" file="${path}-docs.jar" classifier="javadoc" />
+ </artifact:deploy>
+ </then><else>
+ <artifact:install file="${path}.jar">
+ <artifact:localRepository path="${local.repository}" id="${repository.credentials.id}" />
+ <artifact:pom refid="@{name}.pom" />
+ <artifact:attach type="jar" file="${path}-src.jar" classifier="sources" />
+ <artifact:attach type="jar" file="${path}-docs.jar" classifier="javadoc" />
+ </artifact:install>
+ </else></if>
+ </then><else>
+ <local name="repo"/>
+ <if><equals arg1="@{local}" arg2="false"/><then>
+ <property name="repo" value="${remote.repository}"/>
+ </then><else>
+ <property name="repo" value="${local.repository}"/>
+ </else></if>
+ <artifact:mvn>
+ <arg value="org.apache.maven.plugins:maven-gpg-plugin:1.3:sign-and-deploy-file" />
+ <arg value="-Durl=${repo}" />
+ <arg value="-DrepositoryId=${repository.credentials.id}" />
+ <arg value="-DpomFile=${path}-pom-filtered.xml" />
+ <arg value= "-Dfile=${path}.jar" />
+ <arg value="-Dsources=${path}-src.jar" />
+ <arg value="-Djavadoc=${path}-docs.jar" />
+ <arg value="-Pgpg" />
+ <arg value="-Dgpg.useagent=true" />
+ </artifact:mvn>
+ </else></if>
</sequential>
</macrodef>
- <!-- Deploy compiler plugins -->
- <macrodef name="deploy-remote-plugin">
- <attribute name="name" />
- <attribute name="version" />
- <attribute name="repository" />
- <element name="extra-attachments" optional="yes" />
- <sequential>
- <make-pom-plugin name="@{name}" version="@{version}" />
- <artifact:deploy file="plugins/@{name}/@{name}.jar" settingsFile="${settings.file}">
- <artifact:pom refid="plugin-@{name}.pom" />
- <artifact:attach type="jar" file="plugins/@{name}/@{name}-src.jar" classifier="sources" />
- <artifact:attach type="jar" file="plugins/@{name}/@{name}-docs.jar" classifier="javadoc" />
- <artifact:remoteRepository url="@{repository}" id="${repository.credentials.id}" />
- <extra-attachments />
- </artifact:deploy>
- </sequential>
- </macrodef>
+ <macrodef name="deploy">
+ <attribute name="local" default="false"/>
+ <attribute name="signed" default="false"/>
- <!-- Deploy all artifacts locally -->
- <macrodef name="deploy-remote-all">
- <attribute name="repository" />
- <attribute name="version" />
<sequential>
- <deploy-remote name="scala-library" version="@{version}" repository="@{repository}"/>
- <deploy-remote name="jline" version="@{version}" repository="@{repository}"/>
- <deploy-remote name="scala-reflect" version="@{version}" repository="@{repository}"/>
- <deploy-remote name="scala-compiler" version="@{version}" repository="@{repository}" />
- <deploy-remote name="scala-swing" version="@{version}" repository="@{repository}"/>
- <deploy-remote name="scala-actors" version="@{version}" repository="@{repository}"/>
- <deploy-remote name="scalap" version="@{version}" repository="@{repository}"/>
- <deploy-remote name="scala-partest" version="@{version}" repository="@{repository}"/>
- <deploy-remote-plugin name="continuations" version="@{version}" repository="@{repository}"/>
+ <deploy-one name="scala-actors" version="${maven.version.number}" local="@{local}" signed="@{signed}"/>
+ <deploy-one name="scala-compiler" version="${maven.version.number}" local="@{local}" signed="@{signed}"/>
+ <deploy-one name="scala-library" version="${maven.version.number}" local="@{local}" signed="@{signed}"/>
+ <deploy-one name="scala-xml" version="${maven.version.number}" local="@{local}" signed="@{signed}"/>
+ <deploy-one name="scala-parser-combinators" version="${maven.version.number}" local="@{local}" signed="@{signed}"/>
+ <deploy-one name="scala-partest" version="${maven.version.number}" local="@{local}" signed="@{signed}"/>
+ <deploy-one name="scala-reflect" version="${maven.version.number}" local="@{local}" signed="@{signed}"/>
+ <deploy-one name="scala-swing" version="${maven.version.number}" local="@{local}" signed="@{signed}"/>
+ <deploy-one name="scalap" version="${maven.version.number}" local="@{local}" signed="@{signed}"/>
+ <deploy-one dir="plugins/" name="continuations" version="${maven.version.number}" local="@{local}" signed="@{signed}"/>
</sequential>
</macrodef>
-
- <!-- PGP Signed deployment -->
- <macrodef name="deploy-remote-signed-single">
- <attribute name="pom" />
- <attribute name="repository" />
- <attribute name="jar" />
- <attribute name="srcjar" />
- <attribute name="docjar" />
- <sequential>
- <artifact:mvn>
- <arg value="org.apache.maven.plugins:maven-gpg-plugin:1.3:sign-and-deploy-file" />
- <arg value="-Durl=@{repository}" />
- <arg value="-DrepositoryId=${repository.credentials.id}" />
- <arg value="-DpomFile=@{pom}" />
- <arg value="-Dfile=@{jar}" />
- <arg value="-Dsources=@{srcjar}" />
- <arg value="-Djavadoc=@{docjar}" />
- <arg value="-Pgpg" />
- <arg value="-Dgpg.useagent=true" />
- </artifact:mvn>
- </sequential>
- </macrodef>
- <macrodef name="deploy-remote-signed">
- <attribute name="name" />
- <attribute name="repository" />
- <attribute name="version" />
- <element name="extra-attachments" optional="yes" />
- <sequential>
- <make-pom name="@{name}" version="@{version}" />
- <deploy-remote-signed-single
- pom="@{name}/@{name}-pom-fixed.xml"
- repository="@{repository}"
- jar="@{name}/@{name}.jar"
- srcjar="@{name}/@{name}-src.jar"
- docjar="@{name}/@{name}-docs.jar" />
- </sequential>
- </macrodef>
- <macrodef name="deploy-remote-plugin-signed">
- <attribute name="name" />
- <attribute name="repository" />
- <attribute name="version" />
- <element name="extra-attachments" optional="yes" />
- <sequential>
- <make-pom-plugin name="@{name}" version="@{version}" />
- <deploy-remote-signed-single
- pom="plugins/@{name}/@{name}-pom-fixed.xml"
- repository="@{repository}"
- jar="plugins/@{name}/@{name}.jar"
- srcjar="plugins/@{name}/@{name}-src.jar"
- docjar="plugins/@{name}/@{name}-docs.jar" />
- </sequential>
- </macrodef>
- <macrodef name="deploy-remote-signed-all">
- <attribute name="repository" />
- <attribute name="version" />
- <sequential>
- <deploy-remote-plugin-signed name="continuations" version="@{version}" repository="@{repository}"/>
- <deploy-remote-signed name="scala-library" version="@{version}" repository="@{repository}"/>
- <deploy-remote-signed name="jline" version="@{version}" repository="@{repository}"/>
- <deploy-remote-signed name="scala-reflect" version="@{version}" repository="@{repository}"/>
- <deploy-remote-signed name="scala-compiler" version="@{version}" repository="@{repository}" />
- <deploy-remote-signed name="scala-swing" version="@{version}" repository="@{repository}"/>
- <deploy-remote-signed name="scala-actors" version="@{version}" repository="@{repository}"/>
- <deploy-remote-signed name="scalap" version="@{version}" repository="@{repository}"/>
- <deploy-remote-signed name="scala-partest" version="@{version}" repository="@{repository}"/>
- </sequential>
- </macrodef>
- </target>
-
- <!-- Local Targets -->
- <target name="deploy.snapshot.local" depends="deploy.local.init" if="version.is.snapshot" description="Deploys the bundled snapshot of the Scala Lanaguage to a local maven repository">
- <deploy-local-all version="${maven.version.number}" repository="${local.snapshot.repository}" />
- </target>
-
- <target name="deploy.release.local" depends="deploy.local.init" unless="version.is.snapshot" description="Deploys the bundled files as a release into the local Maven repository">
- <deploy-local-all version="${maven.version.number}" repository="${local.release.repository}" />
- </target>
- <target name="deploy.local" depends="deploy.snapshot.local, deploy.release.local" description="Deploys the bundle files to the local maven repo."/>
-
- <!-- Remote Signed Targets -->
- <target name="deploy.signed.snapshot" depends="deploy.remote.init" if="version.is.snapshot" description="Deploys the bundled files as a snapshot into the desired remote Maven repository">
- <deploy-remote-signed-all version="${maven.version.number}" repository="${remote.snapshot.repository}" />
</target>
- <target name="deploy.signed.release" depends="deploy.remote.init" unless="version.is.snapshot" description="Deploys the bundled files as a release into the desired remote Maven repository">
- <deploy-remote-signed-all version="${maven.version.number}" repository="${remote.release.repository}" />
- </target>
- <target name="deploy.signed" depends="deploy.signed.release, deploy.signed.snapshot" description="Deploys signed bundles to remote repo"/>
- <!-- Remote unsigned targets -->
- <target name="deploy.snapshot" depends="deploy.remote.init" if="version.is.snapshot" description="Deploys the bundled files as a snapshot into the desired remote Maven repository">
- <deploy-remote-all version="${maven.version.number}" repository="${remote.snapshot.repository}" />
- </target>
-
- <target name="deploy.release" depends="deploy.remote.init" unless="version.is.snapshot" description="Deploys the bundled files as a release into the desired remote Maven repository">
- <deploy-remote-all version="${maven.version.number}" repository="${remote.release.repository}" />
- </target>
- <target name="deploy" depends="deploy.snapshot, deploy.release" description="Deploys unsigned artifacts to the maven repo."/>
+ <target name="deploy" depends="init" description="Deploys unsigned artifacts to the maven repo."> <deploy/> </target>
+ <target name="deploy.local" depends="init" description="Deploys unsigned artifacts to the local maven repo."> <deploy local="true"/> </target>
+ <target name="deploy.signed" depends="init" description="Deploys signed artifacts to the remote maven repo."> <deploy signed="true"/> </target>
</project>
diff --git a/src/build/maven/continuations-plugin-pom.xml b/src/build/maven/plugins/continuations-pom.xml
index 9abb0a36f0..9abb0a36f0 100644
--- a/src/build/maven/continuations-plugin-pom.xml
+++ b/src/build/maven/plugins/continuations-pom.xml
diff --git a/src/build/maven/scala-compiler-pom.xml b/src/build/maven/scala-compiler-pom.xml
index fedc34a5d5..6e7f1a0f2c 100644
--- a/src/build/maven/scala-compiler-pom.xml
+++ b/src/build/maven/scala-compiler-pom.xml
@@ -1,13 +1,13 @@
<project
- xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>org.scala-lang</groupId>
<artifactId>scala-compiler</artifactId>
<packaging>jar</packaging>
<version>@VERSION@</version>
- <name>Scala Compiler</name>
- <description>Compiler for the Scala Programming Language</description>
+ <name>Scala Compiler</name>
+ <description>Compiler for the Scala Programming Language</description>
<url>http://www.scala-lang.org/</url>
<inceptionYear>2002</inceptionYear>
<organization>
@@ -23,12 +23,12 @@
</license>
</licenses>
<scm>
- <connection>scm:git:git://github.com/scala/scala.git</connection>
- <url>https://github.com/scala/scala.git</url>
+ <connection>scm:git:git://github.com/scala/scala.git</connection>
+ <url>https://github.com/scala/scala.git</url>
</scm>
<issueManagement>
- <system>JIRA</system>
- <url>https://issues.scala-lang.org/</url>
+ <system>JIRA</system>
+ <url>https://issues.scala-lang.org/</url>
</issueManagement>
<dependencies>
@@ -37,17 +37,27 @@
<artifactId>scala-library</artifactId>
<version>@VERSION@</version>
</dependency>
+ <dependency> <!-- for scaladoc -->
+ <groupId>org.scala-lang</groupId>
+ <artifactId>scala-xml</artifactId>
+ <version>@VERSION@</version>
+ </dependency>
+ <dependency> <!-- for scaladoc -->
+ <groupId>org.scala-lang</groupId>
+ <artifactId>scala-parser-combinators</artifactId>
+ <version>@VERSION@</version>
+ </dependency>
<dependency>
<groupId>org.scala-lang</groupId>
<artifactId>scala-reflect</artifactId>
<version>@VERSION@</version>
</dependency>
- <dependency>
- <groupId>org.scala-lang</groupId>
- <artifactId>jline</artifactId>
- <version>@VERSION@</version>
- <optional>true</optional>
- </dependency>
+ <dependency>
+ <groupId>jline</groupId>
+ <artifactId>jline</artifactId>
+ <version>@JLINE_VERSION@</version>
+ <optional>true</optional>
+ </dependency>
</dependencies>
<distributionManagement>
<repository>
@@ -60,14 +70,14 @@
<uniqueVersion>false</uniqueVersion>
</snapshotRepository>
</distributionManagement>
- <developers>
- <developer>
- <id>lamp</id>
- <name>EPFL LAMP</name>
- </developer>
- <developer>
- <id>Typesafe</id>
- <name>Typesafe, Inc.</name>
- </developer>
- </developers>
+ <developers>
+ <developer>
+ <id>lamp</id>
+ <name>EPFL LAMP</name>
+ </developer>
+ <developer>
+ <id>Typesafe</id>
+ <name>Typesafe, Inc.</name>
+ </developer>
+ </developers>
</project>
diff --git a/src/build/maven/scala-library-pom.xml b/src/build/maven/scala-library-pom.xml
index fc9964ae92..684474e79a 100644
--- a/src/build/maven/scala-library-pom.xml
+++ b/src/build/maven/scala-library-pom.xml
@@ -34,11 +34,6 @@
<info.apiURL>http://www.scala-lang.org/api/@VERSION@/</info.apiURL>
</properties>
<dependencies>
- <!--<dependency>
- <groupId>com.typesafe</groupId>
- <artifactId>config</artifactId>
- <version>0.4.0</version>
- </dependency>-->
</dependencies>
<distributionManagement>
<repository>
diff --git a/src/build/maven/scala-parser-combinators-pom.xml b/src/build/maven/scala-parser-combinators-pom.xml
new file mode 100644
index 0000000000..cddff269c8
--- /dev/null
+++ b/src/build/maven/scala-parser-combinators-pom.xml
@@ -0,0 +1,59 @@
+<project
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>org.scala-lang</groupId>
+ <artifactId>scala-parser-combinators</artifactId>
+ <packaging>jar</packaging>
+ <version>@VERSION@</version>
+ <name>Scala Parser Combinators</name>
+ <description>Parser Combinator Library for the Scala Programming Language</description>
+ <url>http://www.scala-lang.org/</url>
+ <inceptionYear>2002</inceptionYear>
+ <organization>
+ <name>LAMP/EPFL</name>
+ <url>http://lamp.epfl.ch/</url>
+ </organization>
+ <licenses>
+ <license>
+ <name>BSD-like</name>
+ <url>http://www.scala-lang.org/downloads/license.html
+ </url>
+ <distribution>repo</distribution>
+ </license>
+ </licenses>
+ <scm>
+ <connection>scm:git:git://github.com/scala/scala.git</connection>
+ <url>https://github.com/scala/scala.git</url>
+ </scm>
+ <issueManagement>
+ <system>JIRA</system>
+ <url>https://issues.scala-lang.org/</url>
+ </issueManagement>
+ <properties>
+ <info.apiURL>http://www.scala-lang.org/api/@VERSION@/</info.apiURL>
+ </properties>
+ <dependencies>
+ </dependencies>
+ <distributionManagement>
+ <repository>
+ <id>scala-tools.org</id>
+ <url>@RELEASE_REPOSITORY@</url>
+ </repository>
+ <snapshotRepository>
+ <id>scala-tools.org</id>
+ <url>@SNAPSHOT_REPOSITORY@</url>
+ <uniqueVersion>false</uniqueVersion>
+ </snapshotRepository>
+ </distributionManagement>
+ <developers>
+ <developer>
+ <id>lamp</id>
+ <name>EPFL LAMP</name>
+ </developer>
+ <developer>
+ <id>Typesafe</id>
+ <name>Typesafe, Inc.</name>
+ </developer>
+ </developers>
+</project>
diff --git a/src/build/maven/scala-dotnet-library-pom.xml b/src/build/maven/scala-xml-pom.xml
index 007e8be173..629872c2e2 100644
--- a/src/build/maven/scala-dotnet-library-pom.xml
+++ b/src/build/maven/scala-xml-pom.xml
@@ -1,15 +1,14 @@
-<?xml version="1.0" encoding="UTF-8"?>
-
-<project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
- xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
- <modelVersion>4.0.0</modelVersion>
- <groupId>org.scala-lang</groupId>
- <artifactId>scala-dotnet-library</artifactId>
- <version>@VERSION@</version>
- <name>Class Library</name>
- <packaging>dotnet:library</packaging>
-
- <url>http://www.scala-lang.org/</url>
+<project
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>org.scala-lang</groupId>
+ <artifactId>scala-xml</artifactId>
+ <packaging>jar</packaging>
+ <version>@VERSION@</version>
+ <name>Scala XML</name>
+ <description>XML Library for the Scala Programming Language</description>
+ <url>http://www.scala-lang.org/</url>
<inceptionYear>2002</inceptionYear>
<organization>
<name>LAMP/EPFL</name>
@@ -31,6 +30,11 @@
<system>JIRA</system>
<url>https://issues.scala-lang.org/</url>
</issueManagement>
+ <properties>
+ <info.apiURL>http://www.scala-lang.org/api/@VERSION@/</info.apiURL>
+ </properties>
+ <dependencies>
+ </dependencies>
<distributionManagement>
<repository>
<id>scala-tools.org</id>
@@ -41,5 +45,15 @@
<url>@SNAPSHOT_REPOSITORY@</url>
<uniqueVersion>false</uniqueVersion>
</snapshotRepository>
- </distributionManagement>
+ </distributionManagement>
+ <developers>
+ <developer>
+ <id>lamp</id>
+ <name>EPFL LAMP</name>
+ </developer>
+ <developer>
+ <id>Typesafe</id>
+ <name>Typesafe, Inc.</name>
+ </developer>
+ </developers>
</project>
diff --git a/src/build/pack.xml b/src/build/pack.xml
index 20c4034107..6b6579ce12 100644
--- a/src/build/pack.xml
+++ b/src/build/pack.xml
@@ -151,8 +151,9 @@ MAIN DISTRIBUTION PACKAGING
</copy>
</sequential>
</macrodef>
- <mvn-copy-lib mvn.artifact.name="jline"/>
<mvn-copy-lib mvn.artifact.name="scala-library"/>
+ <mvn-copy-lib mvn.artifact.name="scala-xml"/>
+ <mvn-copy-lib mvn.artifact.name="scala-parser-combinators"/>
<mvn-copy-lib mvn.artifact.name="scala-reflect"/>
<mvn-copy-lib mvn.artifact.name="scala-compiler"/>
<mvn-copy-lib mvn.artifact.name="scala-swing"/>
@@ -170,8 +171,8 @@ MAIN DISTRIBUTION PACKAGING
<fileset dir="${dist.dir}/misc/scala-devel/plugins/">
<filename name="@{mvn.artifact.name}.jar"/>
</fileset>
- <fileset dir="${src.dir}/build/maven/">
- <filename name="@{mvn.artifact.name}-plugin-pom.xml"/>
+ <fileset dir="${src.dir}/build/maven/plugins/">
+ <filename name="@{mvn.artifact.name}-pom.xml"/>
</fileset>
</copy>
</sequential>
@@ -181,12 +182,6 @@ MAIN DISTRIBUTION PACKAGING
<target name="pack-maven.srcs" depends="pack-maven.libs">
<!-- Add missing src jars. -->
- <jar whenmanifestonly="fail" destfile="${dists.dir}/maven/${version.number}/jline/jline-src.jar"
- basedir="${src.dir}/jline/src/main/java">
- <include name="**/*"/>
- </jar>
-
-
<!-- Continuations plugin -->
<jar whenmanifestonly="fail" destfile="${dists.dir}/maven/${version.number}/plugins/continuations/continuations-src.jar"
basedir="${src.dir}/continuations/plugin">
@@ -195,14 +190,18 @@ MAIN DISTRIBUTION PACKAGING
</target>
<target name="pack-maven.docs" depends="pack-maven.libs, pack-maven.plugins">
- <jar whenmanifestonly="fail" destfile="${dists.dir}/maven/${version.number}/jline/jline-docs.jar"
- basedir="${build-docs.dir}/jline">
- <include name="**/*"/>
- </jar>
<jar whenmanifestonly="fail" destfile="${dists.dir}/maven/${version.number}/scala-library/scala-library-docs.jar"
basedir="${build-docs.dir}/library">
<include name="**/*"/>
</jar>
+ <jar whenmanifestonly="fail" destfile="${dists.dir}/maven/${version.number}/scala-xml/scala-xml-docs.jar"
+ basedir="${build-docs.dir}/xml">
+ <include name="**/*"/>
+ </jar>
+ <jar whenmanifestonly="fail" destfile="${dists.dir}/maven/${version.number}/scala-parser-combinators/scala-parser-combinators-docs.jar"
+ basedir="${build-docs.dir}/parser-combinators">
+ <include name="**/*"/>
+ </jar>
<jar whenmanifestonly="fail" destfile="${dists.dir}/maven/${version.number}/scala-compiler/scala-compiler-docs.jar"
basedir="${build-docs.dir}/compiler">
<include name="**/*"/>
@@ -243,6 +242,8 @@ MAIN DISTRIBUTION PACKAGING
<target name="pack-maven.scripts" depends="pack-maven.latest.unix,pack-maven.latest.win,pack-maven.srcs">
<copy todir="${dists.dir}/maven/${version.number}"
+ file="${lib-ant.dir}/ant-contrib.jar"/>
+ <copy todir="${dists.dir}/maven/${version.number}"
file="${lib-ant.dir}/maven-ant-tasks-2.1.1.jar"/>
<copy tofile="${dists.dir}/maven/${version.number}/build.xml"
file="${src.dir}/build/maven/maven-deploy.xml"/>
diff --git a/src/compiler/scala/reflect/macros/compiler/Errors.scala b/src/compiler/scala/reflect/macros/compiler/Errors.scala
index dd3142127e..a60a2c2306 100644
--- a/src/compiler/scala/reflect/macros/compiler/Errors.scala
+++ b/src/compiler/scala/reflect/macros/compiler/Errors.scala
@@ -60,8 +60,8 @@ trait Errors extends Traces {
(rtpe, atpe) match {
case _ if rtpe eq atpe => success()
case (TypeRef(_, RepeatedParamClass, rtpe :: Nil), TypeRef(_, RepeatedParamClass, atpe :: Nil)) => check(rtpe, atpe)
- case (ExprClassOf(_), TreeType()) => success()
- case (TreeType(), ExprClassOf(_)) => success()
+ case (ExprClassOf(_), TreeType()) if rtpe.prefix =:= atpe.prefix => success()
+ case (SubtreeType(), ExprClassOf(_)) if rtpe.prefix =:= atpe.prefix => success()
case _ => rtpe <:< atpe
}
}
diff --git a/src/compiler/scala/reflect/reify/codegen/GenTrees.scala b/src/compiler/scala/reflect/reify/codegen/GenTrees.scala
index 78bdf7e132..3507c2a173 100644
--- a/src/compiler/scala/reflect/reify/codegen/GenTrees.scala
+++ b/src/compiler/scala/reflect/reify/codegen/GenTrees.scala
@@ -42,12 +42,6 @@ trait GenTrees {
// the second prototype reified external types, but avoided reifying local ones => this created an ugly irregularity
// current approach is uniform and compact
var rtree = tree match {
- case global.EmptyTree =>
- reifyMirrorObject(EmptyTree)
- case global.emptyValDef =>
- mirrorSelect(nme.emptyValDef)
- case global.pendingSuperCall =>
- mirrorSelect(nme.pendingSuperCall)
case FreeDef(_, _, _, _, _) =>
reifyNestedFreeDef(tree)
case FreeRef(_, _) =>
@@ -56,12 +50,8 @@ trait GenTrees {
reifyBoundTerm(tree)
case BoundType(tree) =>
reifyBoundType(tree)
- case Literal(const @ Constant(_)) =>
- mirrorCall(nme.Literal, reifyProduct(const))
- case Import(expr, selectors) =>
- mirrorCall(nme.Import, reify(expr), mkList(selectors map reifyProduct))
case _ =>
- reifyProduct(tree)
+ reifyTreeSyntactically(tree)
}
// usually we don't reify symbols/types, because they can be re-inferred during subsequent reflective compilation
@@ -78,6 +68,21 @@ trait GenTrees {
rtree
}
+ def reifyTreeSyntactically(tree: Tree) = tree match {
+ case global.EmptyTree =>
+ reifyMirrorObject(EmptyTree)
+ case global.emptyValDef =>
+ mirrorSelect(nme.emptyValDef)
+ case global.pendingSuperCall =>
+ mirrorSelect(nme.pendingSuperCall)
+ case Literal(const @ Constant(_)) =>
+ mirrorCall(nme.Literal, reifyProduct(const))
+ case Import(expr, selectors) =>
+ mirrorCall(nme.Import, reify(expr), mkList(selectors map reifyProduct))
+ case _ =>
+ reifyProduct(tree)
+ }
+
def reifyModifiers(m: global.Modifiers) =
mirrorFactoryCall(nme.Modifiers, mirrorBuildCall(nme.flagsFromBits, reify(m.flags)), reify(m.privateWithin), reify(m.annotations))
diff --git a/src/compiler/scala/reflect/reify/codegen/GenUtils.scala b/src/compiler/scala/reflect/reify/codegen/GenUtils.scala
index e0570d61f2..de9fec0df5 100644
--- a/src/compiler/scala/reflect/reify/codegen/GenUtils.scala
+++ b/src/compiler/scala/reflect/reify/codegen/GenUtils.scala
@@ -42,6 +42,9 @@ trait GenUtils {
def mirrorBuildCall(name: TermName, args: Tree*): Tree =
call("" + nme.UNIVERSE_BUILD_PREFIX + name, args: _*)
+ def reifyBuildCall(name: TermName, args: Any*) =
+ mirrorBuildCall(name, args map reify: _*)
+
def mirrorMirrorCall(name: TermName, args: Tree*): Tree =
call("" + nme.MIRROR_PREFIX + name, args: _*)
diff --git a/src/compiler/scala/reflect/reify/package.scala b/src/compiler/scala/reflect/reify/package.scala
index d3cae3d123..30cfec8e2a 100644
--- a/src/compiler/scala/reflect/reify/package.scala
+++ b/src/compiler/scala/reflect/reify/package.scala
@@ -32,7 +32,7 @@ package object reify {
// If we're in the constructor of an object or others don't have easy access to `this`, we have no good way to grab
// the class of that object. Instead, we construct an anonymous class and grab his class file, assuming
// this is enough to get the correct class loadeer for the class we *want* a mirror for, the object itself.
- rClassTree orElse Apply(Select(treeBuilder.makeAnonymousNew(Nil), sn.GetClass), Nil)
+ rClassTree orElse Apply(Select(gen.mkAnonymousNew(Nil), sn.GetClass), Nil)
}
// JavaUniverse is defined in scala-reflect.jar, so we must be very careful in case someone reifies stuff having only scala-library.jar on the classpath
val isJavaUniverse = JavaUniverseClass != NoSymbol && universe.tpe <:< JavaUniverseClass.toTypeConstructor
diff --git a/src/compiler/scala/tools/ant/sabbus/Settings.scala b/src/compiler/scala/tools/ant/sabbus/Settings.scala
index d0fefdaa03..4cbc03d8d4 100644
--- a/src/compiler/scala/tools/ant/sabbus/Settings.scala
+++ b/src/compiler/scala/tools/ant/sabbus/Settings.scala
@@ -93,4 +93,18 @@ class Settings {
case _ => false
}
+ override lazy val hashCode: Int = Seq(
+ gBf,
+ uncheckedBf,
+ classpathBf,
+ sourcepathBf,
+ sourcedirBf,
+ bootclasspathBf,
+ extdirsBf,
+ dBf,
+ encodingBf,
+ targetBf,
+ optimiseBf,
+ extraParamsBf
+ ).##
}
diff --git a/src/compiler/scala/tools/ant/templates/tool-unix.tmpl b/src/compiler/scala/tools/ant/templates/tool-unix.tmpl
index 84ccaba749..abf9925ad9 100644
--- a/src/compiler/scala/tools/ant/templates/tool-unix.tmpl
+++ b/src/compiler/scala/tools/ant/templates/tool-unix.tmpl
@@ -115,7 +115,7 @@ if [[ -n "$cygwin$mingw" ]]; then
case "$TERM" in
rxvt* | xterm*)
stty -icanon min 1 -echo
- WINDOWS_OPT="-Djline.terminal=scala.tools.jline.UnixTerminal"
+ WINDOWS_OPT="-Djline.terminal=unix"
;;
esac
fi
diff --git a/src/compiler/scala/tools/cmd/Spec.scala b/src/compiler/scala/tools/cmd/Spec.scala
index b761601167..a1cb31f911 100644
--- a/src/compiler/scala/tools/cmd/Spec.scala
+++ b/src/compiler/scala/tools/cmd/Spec.scala
@@ -15,7 +15,7 @@ trait Spec {
def programInfo: Spec.Info
protected def help(str: => String): Unit
- protected def heading(str: => String): Unit = help("\n " + str)
+ protected def heading(str: => String): Unit = help(s"\n $str")
type OptionMagic <: Opt.Implicit
protected implicit def optionMagicAdditions(s: String): OptionMagic
diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala
index eafe03d5cd..ea6543bb71 100644
--- a/src/compiler/scala/tools/nsc/Global.scala
+++ b/src/compiler/scala/tools/nsc/Global.scala
@@ -28,6 +28,7 @@ import transform.patmat.PatternMatching
import transform._
import backend.icode.{ ICodes, GenICode, ICodeCheckers }
import backend.{ ScalaPrimitives, Platform, JavaPlatform }
+import backend.jvm.GenBCode
import backend.jvm.GenASM
import backend.opt.{ Inliners, InlineExceptionHandlers, ConstantOptimization, ClosureElimination, DeadCodeElimination }
import backend.icode.analysis._
@@ -102,16 +103,8 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
typer.typed(mkCast(tree, pt))
}
- /** Trees fresh from the oven, mostly for use by the parser. */
- object treeBuilder extends {
- val global: Global.this.type = Global.this
- } with TreeBuilder {
- def freshName(prefix: String): Name = freshTermName(prefix)
- def freshTermName(prefix: String): TermName = currentUnit.freshTermName(prefix)
- def freshTypeName(prefix: String): TypeName = currentUnit.freshTypeName(prefix)
- def o2p(offset: Int): Position = new OffsetPosition(currentUnit.source, offset)
- def r2p(start: Int, mid: Int, end: Int): Position = rangePos(currentUnit.source, start, mid, end)
- }
+ /** A spare instance of TreeBuilder left for backwards compatibility. */
+ lazy val treeBuilder: TreeBuilder { val global: Global.this.type } = new syntaxAnalyzer.ParserTreeBuilder
/** Fold constants */
object constfold extends {
@@ -619,6 +612,13 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
val runsRightAfter = None
} with GenASM
+ // phaseName = "bcode"
+ object genBCode extends {
+ val global: Global.this.type = Global.this
+ val runsAfter = List("dce")
+ val runsRightAfter = None
+ } with GenBCode
+
// phaseName = "terminal"
object terminal extends {
val global: Global.this.type = Global.this
@@ -1057,6 +1057,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter)
@inline final def enteringMixin[T](op: => T): T = enteringPhase(currentRun.mixinPhase)(op)
@inline final def enteringPickler[T](op: => T): T = enteringPhase(currentRun.picklerPhase)(op)
@inline final def enteringRefchecks[T](op: => T): T = enteringPhase(currentRun.refchecksPhase)(op)
+ @inline final def enteringSpecialize[T](op: => T): T = enteringPhase(currentRun.specializePhase)(op)
@inline final def enteringTyper[T](op: => T): T = enteringPhase(currentRun.typerPhase)(op)
@inline final def enteringUncurry[T](op: => T): T = enteringPhase(currentRun.uncurryPhase)(op)
diff --git a/src/compiler/scala/tools/nsc/ast/TreeGen.scala b/src/compiler/scala/tools/nsc/ast/TreeGen.scala
index c28a6ba337..ad1977b9aa 100644
--- a/src/compiler/scala/tools/nsc/ast/TreeGen.scala
+++ b/src/compiler/scala/tools/nsc/ast/TreeGen.scala
@@ -112,7 +112,6 @@ abstract class TreeGen extends scala.reflect.internal.TreeGen with TreeDSL {
else AppliedTypeTree(Ident(clazz), targs map TypeTree)
))
}
- def mkSuperInitCall: Select = Select(Super(This(tpnme.EMPTY), tpnme.EMPTY), nme.CONSTRUCTOR)
def wildcardStar(tree: Tree) =
atPos(tree.pos) { Typed(tree, Ident(tpnme.WILDCARD_STAR)) }
@@ -255,4 +254,52 @@ abstract class TreeGen extends scala.reflect.internal.TreeGen with TreeDSL {
attrThis,
If(cond, Block(syncBody: _*), EmptyTree)) ::
stats: _*)
+
+ /** Creates a tree representing new Object { stats }.
+ * To make sure an anonymous subclass of Object is created,
+ * if there are no stats, a () is added.
+ */
+ def mkAnonymousNew(stats: List[Tree]): Tree = {
+ val stats1 = if (stats.isEmpty) List(Literal(Constant(()))) else stats
+ mkNew(Nil, emptyValDef, stats1, NoPosition, NoPosition)
+ }
+
+ /** Create positioned tree representing an object creation <new parents { stats }
+ * @param npos the position of the new
+ * @param cpos the position of the anonymous class starting with parents
+ */
+ def mkNew(parents: List[Tree], self: ValDef, stats: List[Tree],
+ npos: Position, cpos: Position): Tree =
+ if (parents.isEmpty)
+ mkNew(List(scalaAnyRefConstr), self, stats, npos, cpos)
+ else if (parents.tail.isEmpty && stats.isEmpty) {
+ // `Parsers.template` no longer differentiates tpts and their argss
+ // e.g. `C()` will be represented as a single tree Apply(Ident(C), Nil)
+ // instead of parents = Ident(C), argss = Nil as before
+ // this change works great for things that are actually templates
+ // but in this degenerate case we need to perform postprocessing
+ val app = treeInfo.dissectApplied(parents.head)
+ atPos(npos union cpos) { New(app.callee, app.argss) }
+ } else {
+ val x = tpnme.ANON_CLASS_NAME
+ atPos(npos union cpos) {
+ Block(
+ List(
+ atPos(cpos) {
+ ClassDef(
+ Modifiers(FINAL), x, Nil,
+ mkTemplate(parents, self, NoMods, ListOfNil, stats, cpos.focus))
+ }),
+ atPos(npos) {
+ New(
+ Ident(x) setPos npos.focus,
+ Nil)
+ }
+ )
+ }
+ }
+
+ def mkSyntheticParam(pname: TermName) =
+ ValDef(Modifiers(PARAM | SYNTHETIC), pname, TypeTree(), EmptyTree)
+
}
diff --git a/src/compiler/scala/tools/nsc/ast/Trees.scala b/src/compiler/scala/tools/nsc/ast/Trees.scala
index 41d89aa3b4..641ab9c279 100644
--- a/src/compiler/scala/tools/nsc/ast/Trees.scala
+++ b/src/compiler/scala/tools/nsc/ast/Trees.scala
@@ -54,77 +54,6 @@ trait Trees extends scala.reflect.internal.Trees { self: Global =>
case xs :: rest => rest.foldLeft(Apply(gen.mkSuperInitCall, xs): Tree)(Apply.apply)
}
- /** Generates a template with constructor corresponding to
- *
- * constrmods (vparams1_) ... (vparams_n) preSuper { presupers }
- * extends superclass(args_1) ... (args_n) with mixins { self => body }
- *
- * This gets translated to
- *
- * extends superclass with mixins { self =>
- * presupers' // presupers without rhs
- * vparamss // abstract fields corresponding to value parameters
- * def <init>(vparamss) {
- * presupers
- * super.<init>(args)
- * }
- * body
- * }
- */
- def Template(parents: List[Tree], self: ValDef, constrMods: Modifiers, vparamss: List[List[ValDef]], body: List[Tree], superPos: Position): Template = {
- /* Add constructor to template */
-
- // create parameters for <init> as synthetic trees.
- var vparamss1 = mmap(vparamss) { vd =>
- atPos(vd.pos.focus) {
- val mods = Modifiers(vd.mods.flags & (IMPLICIT | DEFAULTPARAM | BYNAMEPARAM) | PARAM | PARAMACCESSOR)
- ValDef(mods withAnnotations vd.mods.annotations, vd.name, vd.tpt.duplicate, vd.rhs.duplicate)
- }
- }
- val (edefs, rest) = body span treeInfo.isEarlyDef
- val (evdefs, etdefs) = edefs partition treeInfo.isEarlyValDef
- val gvdefs = evdefs map {
- case vdef @ ValDef(_, _, tpt, _) =>
- copyValDef(vdef)(
- // atPos for the new tpt is necessary, since the original tpt might have no position
- // (when missing type annotation for ValDef for example), so even though setOriginal modifies the
- // position of TypeTree, it would still be NoPosition. That's what the author meant.
- tpt = atPos(vdef.pos.focus)(TypeTree() setOriginal tpt setPos tpt.pos.focus),
- rhs = EmptyTree
- )
- }
- val lvdefs = evdefs collect { case vdef: ValDef => copyValDef(vdef)(mods = vdef.mods | PRESUPER) }
-
- val constrs = {
- if (constrMods hasFlag TRAIT) {
- if (body forall treeInfo.isInterfaceMember) List()
- else List(
- atPos(wrappingPos(superPos, lvdefs)) (
- DefDef(NoMods, nme.MIXIN_CONSTRUCTOR, List(), ListOfNil, TypeTree(), Block(lvdefs, Literal(Constant(()))))))
- } else {
- // convert (implicit ... ) to ()(implicit ... ) if its the only parameter section
- if (vparamss1.isEmpty || !vparamss1.head.isEmpty && vparamss1.head.head.mods.isImplicit)
- vparamss1 = List() :: vparamss1
- val superCall = pendingSuperCall // we can't know in advance which of the parents will end up as a superclass
- // this requires knowing which of the parents is a type macro and which is not
- // and that's something that cannot be found out before typer
- // (the type macros aren't in the trunk yet, but there is a plan for them to land there soon)
- // this means that we don't know what will be the arguments of the super call
- // therefore here we emit a dummy which gets populated when the template is named and typechecked
- List(
- // TODO: previously this was `wrappingPos(superPos, lvdefs ::: argss.flatten)`
- // is it going to be a problem that we can no longer include the `argss`?
- atPos(wrappingPos(superPos, lvdefs)) (
- DefDef(constrMods, nme.CONSTRUCTOR, List(), vparamss1, TypeTree(), Block(lvdefs ::: List(superCall), Literal(Constant(()))))))
- }
- }
- constrs foreach (ensureNonOverlapping(_, parents ::: gvdefs, focus=false))
- // Field definitions for the class - remove defaults.
- val fieldDefs = vparamss.flatten map (vd => copyValDef(vd)(mods = vd.mods &~ DEFAULTPARAM, rhs = EmptyTree))
-
- Template(parents, self, gvdefs ::: fieldDefs ::: constrs ::: etdefs ::: rest)
- }
-
/** Construct class definition with given class symbol, value parameters,
* supercall arguments and template body.
*
@@ -143,9 +72,9 @@ trait Trees extends scala.reflect.internal.Trees { self: Global =>
)
ClassDef(sym,
- Template(sym.info.parents map TypeTree,
- if (sym.thisSym == sym || phase.erasedTypes) emptyValDef else ValDef(sym.thisSym),
- constrMods, vparamss, body, superPos))
+ gen.mkTemplate(sym.info.parents map TypeTree,
+ if (sym.thisSym == sym || phase.erasedTypes) emptyValDef else ValDef(sym.thisSym),
+ constrMods, vparamss, body, superPos))
}
// --- subcomponents --------------------------------------------------
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
index ef5872986c..eb924a811b 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala
@@ -26,13 +26,22 @@ import util.FreshNameCreator
* the beginnings of a campaign against this latest incursion by Cutty
* McPastington and his army of very similar soldiers.
*/
-trait ParsersCommon extends ScannersCommon {
+trait ParsersCommon extends ScannersCommon { self =>
val global : Global
import global._
def newLiteral(const: Any) = Literal(Constant(const))
def literalUnit = newLiteral(())
+ class ParserTreeBuilder extends TreeBuilder {
+ val global: self.global.type = self.global
+ def freshName(prefix: String): Name = freshTermName(prefix)
+ def freshTermName(prefix: String): TermName = currentUnit.freshTermName(prefix)
+ def freshTypeName(prefix: String): TypeName = currentUnit.freshTypeName(prefix)
+ def o2p(offset: Int): Position = new OffsetPosition(currentUnit.source, offset)
+ def r2p(start: Int, mid: Int, end: Int): Position = rangePos(currentUnit.source, start, mid, end)
+ }
+
/** This is now an abstract class, only to work around the optimizer:
* methods in traits are never inlined.
*/
@@ -147,6 +156,17 @@ self =>
def newScanner(): Scanner = new SourceFileScanner(source)
+ /** Scoping operator used to temporarily look into the future.
+ * Backs up scanner data before evaluating a block and restores it after.
+ */
+ def lookingAhead[T](body: => T): T = {
+ val snapshot = (new ScannerData{}).copyFrom(in)
+ in.nextToken()
+ val res = body
+ in copyFrom snapshot
+ res
+ }
+
val in = newScanner()
in.init()
@@ -290,6 +310,7 @@ self =>
/** whether a non-continuable syntax error has been seen */
private var lastErrorOffset : Int = -1
+ val treeBuilder = new ParserTreeBuilder
import treeBuilder.{global => _, _}
/** The types of the context bounds of type parameters of the surrounding class
@@ -399,7 +420,7 @@ self =>
def mainParamType = AppliedTypeTree(Ident(tpnme.Array), List(Ident(tpnme.String)))
def mainParameter = List(ValDef(Modifiers(Flags.PARAM), nme.argv, mainParamType, EmptyTree))
def mainSetArgv = List(ValDef(NoMods, nme.args, TypeTree(), Ident(nme.argv)))
- def mainDef = DefDef(NoMods, nme.main, Nil, List(mainParameter), scalaDot(tpnme.Unit), Block(mainSetArgv, makeAnonymousNew(stmts)))
+ def mainDef = DefDef(NoMods, nme.main, Nil, List(mainParameter), scalaDot(tpnme.Unit), Block(mainSetArgv, gen.mkAnonymousNew(stmts)))
// object Main
def moduleName = newTermName(ScriptRunner scriptMain settings)
@@ -604,6 +625,8 @@ self =>
case _ => false
}
+ def isAnnotation: Boolean = in.token == AT
+
def isLocalModifier: Boolean = in.token match {
case ABSTRACT | FINAL | SEALED | IMPLICIT | LAZY => true
case _ => false
@@ -731,7 +754,7 @@ self =>
}
@inline final def commaSeparated[T](part: => T): List[T] = tokenSeparated(COMMA, sepFirst = false, part)
@inline final def caseSeparated[T](part: => T): List[T] = tokenSeparated(CASE, sepFirst = true, part)
- @inline final def readAnnots[T](part: => T): List[T] = tokenSeparated(AT, sepFirst = true, part)
+ def readAnnots(part: => Tree): List[Tree] = tokenSeparated(AT, sepFirst = true, part)
/* --------- OPERAND/OPERATOR STACK --------------------------------------- */
@@ -1365,7 +1388,7 @@ self =>
} else {
syntaxErrorOrIncomplete("`*' expected", skipIt = true)
}
- } else if (in.token == AT) {
+ } else if (isAnnotation) {
t = (t /: annotations(skipNewLines = false))(makeAnnotated)
} else {
t = atPos(t.pos.startOrPoint, colonPos) {
@@ -1501,7 +1524,7 @@ self =>
val pname = freshName("x$")
in.nextToken()
val id = atPos(start) (Ident(pname))
- val param = atPos(id.pos.focus){ makeSyntheticParam(pname.toTermName) }
+ val param = atPos(id.pos.focus){ gen.mkSyntheticParam(pname.toTermName) }
placeholderParams = param :: placeholderParams
id
case LPAREN =>
@@ -1516,7 +1539,7 @@ self =>
val tstart = in.offset
val (parents, self, stats) = template()
val cpos = r2p(tstart, tstart, in.lastOffset max tstart)
- makeNew(parents, self, stats, npos, cpos)
+ gen.mkNew(parents, self, stats, npos, cpos)
case _ =>
syntaxErrorOrIncompleteAnd("illegal start of simple expression", skipIt = true)(errorTermTree)
}
@@ -1602,13 +1625,16 @@ self =>
*/
def block(): Tree = makeBlock(blockStatSeq())
+ def caseClause(): CaseDef =
+ atPos(in.offset)(makeCaseDef(pattern(), guard(), caseBlock()))
+
/** {{{
* CaseClauses ::= CaseClause {CaseClause}
* CaseClause ::= case Pattern [Guard] `=>' Block
* }}}
*/
def caseClauses(): List[CaseDef] = {
- val cases = caseSeparated { atPos(in.offset)(makeCaseDef(pattern(), guard(), caseBlock())) }
+ val cases = caseSeparated { caseClause() }
if (cases.isEmpty) // trigger error if there are no cases
accept(CASE)
@@ -2050,6 +2076,8 @@ self =>
/* -------- PARAMETERS ------------------------------------------- */
+ def allowTypelessParams = false
+
/** {{{
* ParamClauses ::= {ParamClause} [[nl] `(' implicit Params `)']
* ParamClause ::= [nl] `(' [Params] `)'
@@ -2086,7 +2114,7 @@ self =>
val name = ident()
var bynamemod = 0
val tpt =
- if (settings.YmethodInfer && !owner.isTypeName && in.token != COLON) {
+ if (((settings.YmethodInfer && !owner.isTypeName) || allowTypelessParams) && in.token != COLON) {
TypeTree()
} else { // XX-METHOD-INFER
accept(COLON)
@@ -2804,7 +2832,7 @@ self =>
if (inScalaRootPackage && ScalaValueClassNames.contains(name))
Template(parents0, self, anyvalConstructor :: body)
else
- Template(anyrefParents(), self, constrMods, vparamss, body, o2p(tstart))
+ gen.mkTemplate(anyrefParents(), self, constrMods, vparamss, body, o2p(tstart))
}
}
@@ -2867,7 +2895,7 @@ self =>
case IMPORT =>
in.flushDoc
importClause()
- case x if x == AT || isTemplateIntro || isModifier =>
+ case x if isAnnotation || isTemplateIntro || isModifier =>
joinComment(topLevelTmplDef :: Nil)
case _ =>
if (isStatSep) Nil
@@ -2923,11 +2951,11 @@ self =>
if (in.token == IMPORT) {
in.flushDoc
stats ++= importClause()
+ } else if (isDefIntro || isModifier || isAnnotation) {
+ stats ++= joinComment(nonLocalDefOrDcl)
} else if (isExprIntro) {
in.flushDoc
stats += statement(InTemplate)
- } else if (isDefIntro || isModifier || in.token == AT) {
- stats ++= joinComment(nonLocalDefOrDcl)
} else if (!isStatSep) {
syntaxErrorOrIncomplete("illegal start of definition", skipIt = true)
}
@@ -3007,7 +3035,7 @@ self =>
stats += statement(InBlock)
if (in.token != RBRACE && in.token != CASE) acceptStatSep()
}
- else if (isDefIntro || isLocalModifier || in.token == AT) {
+ else if (isDefIntro || isLocalModifier || isAnnotation) {
if (in.token == IMPLICIT) {
val start = in.skipToken()
if (isIdent) stats += implicitClosure(start, InBlock)
diff --git a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
index 2dca39f7a3..03cdead472 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala
@@ -5,7 +5,7 @@
package scala.tools.nsc
package ast.parser
-import scala.tools.nsc.util.CharArrayReader
+import scala.tools.nsc.util.{ CharArrayReader, CharArrayReaderData }
import scala.reflect.internal.util._
import scala.reflect.internal.Chars._
import Tokens._
@@ -71,17 +71,37 @@ trait Scanners extends ScannersCommon {
/** the base of a number */
var base: Int = 0
- def copyFrom(td: TokenData) = {
+ def copyFrom(td: TokenData): this.type = {
this.token = td.token
this.offset = td.offset
this.lastOffset = td.lastOffset
this.name = td.name
this.strVal = td.strVal
this.base = td.base
+ this
}
}
- abstract class Scanner extends CharArrayReader with TokenData with ScannerCommon {
+ /** An interface to most of mutable data in Scanner defined in TokenData
+ * and CharArrayReader (+ next, prev fields) with copyFrom functionality
+ * to backup/restore data (used by quasiquotes' lookingAhead).
+ */
+ trait ScannerData extends TokenData with CharArrayReaderData {
+ /** we need one token lookahead and one token history
+ */
+ val next: TokenData = new TokenData{}
+ val prev: TokenData = new TokenData{}
+
+ def copyFrom(sd: ScannerData): this.type = {
+ this.next copyFrom sd.next
+ this.prev copyFrom sd.prev
+ super[CharArrayReaderData].copyFrom(sd)
+ super[TokenData].copyFrom(sd)
+ this
+ }
+ }
+
+ abstract class Scanner extends CharArrayReader with TokenData with ScannerData with ScannerCommon {
private def isDigit(c: Char) = java.lang.Character isDigit c
private var openComments = 0
@@ -194,13 +214,6 @@ trait Scanners extends ScannersCommon {
cbuf.clear()
}
- private class TokenData0 extends TokenData
-
- /** we need one token lookahead and one token history
- */
- val next : TokenData = new TokenData0
- val prev : TokenData = new TokenData0
-
/** a stack of tokens which indicates whether line-ends can be statement separators
* also used for keeping track of nesting levels.
* We keep track of the closing symbol of a region. This can be
diff --git a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
index 0ef71fa1b5..666f19851d 100644
--- a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
+++ b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala
@@ -190,50 +190,6 @@ abstract class TreeBuilder {
}
}
- /** Creates a tree representing new Object { stats }.
- * To make sure an anonymous subclass of Object is created,
- * if there are no stats, a () is added.
- */
- def makeAnonymousNew(stats: List[Tree]): Tree = {
- val stats1 = if (stats.isEmpty) List(Literal(Constant(()))) else stats
- makeNew(Nil, emptyValDef, stats1, NoPosition, NoPosition)
- }
-
- /** Create positioned tree representing an object creation <new parents { stats }
- * @param npos the position of the new
- * @param cpos the position of the anonymous class starting with parents
- */
- def makeNew(parents: List[Tree], self: ValDef, stats: List[Tree],
- npos: Position, cpos: Position): Tree =
- if (parents.isEmpty)
- makeNew(List(scalaAnyRefConstr), self, stats, npos, cpos)
- else if (parents.tail.isEmpty && stats.isEmpty) {
- // `Parsers.template` no longer differentiates tpts and their argss
- // e.g. `C()` will be represented as a single tree Apply(Ident(C), Nil)
- // instead of parents = Ident(C), argss = Nil as before
- // this change works great for things that are actually templates
- // but in this degenerate case we need to perform postprocessing
- val app = treeInfo.dissectApplied(parents.head)
- atPos(npos union cpos) { New(app.callee, app.argss) }
- } else {
- val x = tpnme.ANON_CLASS_NAME
- atPos(npos union cpos) {
- Block(
- List(
- atPos(cpos) {
- ClassDef(
- Modifiers(FINAL), x, Nil,
- Template(parents, self, NoMods, ListOfNil, stats, cpos.focus))
- }),
- atPos(npos) {
- New(
- Ident(x) setPos npos.focus,
- Nil)
- }
- )
- }
- }
-
/** Create a tree representing an assignment <lhs = rhs> */
def makeAssign(lhs: Tree, rhs: Tree): Tree = lhs match {
case Apply(fn, args) =>
@@ -303,9 +259,6 @@ abstract class TreeBuilder {
def makeParam(pname: TermName, tpe: Tree) =
ValDef(Modifiers(PARAM), pname, tpe, EmptyTree)
- def makeSyntheticParam(pname: TermName) =
- ValDef(Modifiers(PARAM | SYNTHETIC), pname, TypeTree(), EmptyTree)
-
def makeSyntheticTypeParam(pname: TypeName, bounds: Tree) =
TypeDef(Modifiers(DEFERRED | SYNTHETIC), pname, Nil, bounds)
@@ -467,7 +420,7 @@ abstract class TreeBuilder {
val x = freshTermName(prefix)
val id = Ident(x)
val sel = if (checkExhaustive) id else gen.mkUnchecked(id)
- Function(List(makeSyntheticParam(x)), Match(sel, cases))
+ Function(List(gen.mkSyntheticParam(x)), Match(sel, cases))
}
/** Create tree for case definition <case pat if guard => rhs> */
diff --git a/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala b/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala
index 00f2933fab..c5fc12e3ec 100644
--- a/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala
+++ b/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala
@@ -38,9 +38,13 @@ trait JavaPlatform extends Platform {
// replaces the tighter abstract definition here. If we had DOT typing rules, the two
// types would be conjoined and everything would work out. Yet another reason to push for DOT.
+ private def classEmitPhase =
+ if (settings.isBCodeActive) genBCode
+ else genASM
+
def platformPhases = List(
flatten, // get rid of inner classes
- genASM // generate .class files
+ classEmitPhase // generate .class files
)
lazy val externalEquals = getDecl(BoxesRunTimeClass, nme.equals_)
diff --git a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
index 7263a0d0b9..e6f21fc1e3 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala
@@ -52,6 +52,7 @@ abstract class GenICode extends SubComponent {
}
override def apply(unit: CompilationUnit): Unit = {
+ if (settings.isBCodeActive) { return }
this.unit = unit
unit.icode.clear()
informProgress("Generating icode for " + unit)
@@ -1747,7 +1748,7 @@ abstract class GenICode extends SubComponent {
/////////////////////// Context ////////////////////////////////
- abstract class Cleanup(val value: AnyRef) {
+ sealed abstract class Cleanup(val value: AnyRef) {
def contains(x: AnyRef) = value == x
}
case class MonitorRelease(m: Local) extends Cleanup(m) { }
diff --git a/src/compiler/scala/tools/nsc/backend/icode/Members.scala b/src/compiler/scala/tools/nsc/backend/icode/Members.scala
index 4389afb2b7..91bd39232e 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/Members.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/Members.scala
@@ -108,6 +108,14 @@ trait Members {
if (symbol eq other.symbol) 0
else if (symbol isLess other.symbol) -1
else 1
+
+ override def equals(other: Any): Boolean =
+ other match {
+ case other: IMember => (this compare other) == 0
+ case _ => false
+ }
+
+ override def hashCode = symbol.##
}
/** Represent a class in ICode */
diff --git a/src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala b/src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala
index 57a768d9cb..076f84ce7a 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/Opcodes.scala
@@ -725,6 +725,8 @@ trait Opcodes { self: ICodes =>
/** Is this a static method call? */
def isStatic: Boolean = false
+ def isSuper: Boolean = false
+
/** Is this an instance method call? */
def hasInstance: Boolean = true
@@ -758,6 +760,7 @@ trait Opcodes { self: ICodes =>
* On JVM, translated to `invokespecial`.
*/
case class SuperCall(mix: Name) extends InvokeStyle {
+ override def isSuper = true
override def toString(): String = { "super(" + mix + ")" }
}
}
diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/CopyPropagation.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/CopyPropagation.scala
index 0c3f92f13f..9d48d7a0d3 100644
--- a/src/compiler/scala/tools/nsc/backend/icode/analysis/CopyPropagation.scala
+++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/CopyPropagation.scala
@@ -27,7 +27,7 @@ abstract class CopyPropagation {
case object This extends Location
/** Values that can be on the stack. */
- abstract class Value { }
+ sealed abstract class Value { }
case class Record(cls: Symbol, bindings: mutable.Map[Symbol, Value]) extends Value { }
/** The value of some location in memory. */
case class Deref(l: Location) extends Value
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala
new file mode 100644
index 0000000000..a7f43eefed
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala
@@ -0,0 +1,1256 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2012 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+
+package scala
+package tools.nsc
+package backend
+package jvm
+
+import scala.collection.{ mutable, immutable }
+import scala.annotation.switch
+
+import scala.tools.asm
+
+/*
+ *
+ * @author Miguel Garcia, http://lamp.epfl.ch/~magarcia/ScalaCompilerCornerReloaded/
+ * @version 1.0
+ *
+ */
+abstract class BCodeBodyBuilder extends BCodeSkelBuilder {
+ import global._
+ import definitions._
+
+ /*
+ * Functionality to build the body of ASM MethodNode, except for `synchronized` and `try` expressions.
+ */
+ abstract class PlainBodyBuilder(cunit: CompilationUnit) extends PlainSkelBuilder(cunit) {
+
+ import icodes.TestOp
+ import icodes.opcodes.InvokeStyle
+
+ /* If the selector type has a member with the right name,
+ * it is the host class; otherwise the symbol's owner.
+ */
+ def findHostClass(selector: Type, sym: Symbol) = selector member sym.name match {
+ case NoSymbol => log(s"Rejecting $selector as host class for $sym") ; sym.owner
+ case _ => selector.typeSymbol
+ }
+
+ /* ---------------- helper utils for generating methods and code ---------------- */
+
+ def emit(opc: Int) { mnode.visitInsn(opc) }
+ def emit(i: asm.tree.AbstractInsnNode) { mnode.instructions.add(i) }
+ def emit(is: List[asm.tree.AbstractInsnNode]) { for(i <- is) { mnode.instructions.add(i) } }
+
+ def emitZeroOf(tk: BType) {
+ (tk.sort: @switch) match {
+ case asm.Type.BOOLEAN => bc.boolconst(false)
+ case asm.Type.BYTE |
+ asm.Type.SHORT |
+ asm.Type.CHAR |
+ asm.Type.INT => bc.iconst(0)
+ case asm.Type.LONG => bc.lconst(0)
+ case asm.Type.FLOAT => bc.fconst(0)
+ case asm.Type.DOUBLE => bc.dconst(0)
+ case asm.Type.VOID => ()
+ case _ => emit(asm.Opcodes.ACONST_NULL)
+ }
+ }
+
+ /*
+ * Emits code that adds nothing to the operand stack.
+ * Two main cases: `tree` is an assignment,
+ * otherwise an `adapt()` to UNIT is performed if needed.
+ */
+ def genStat(tree: Tree) {
+ lineNumber(tree)
+ tree match {
+ case Assign(lhs @ Select(_, _), rhs) =>
+ val isStatic = lhs.symbol.isStaticMember
+ if (!isStatic) { genLoadQualifier(lhs) }
+ genLoad(rhs, symInfoTK(lhs.symbol))
+ lineNumber(tree)
+ fieldStore(lhs.symbol)
+
+ case Assign(lhs, rhs) =>
+ val s = lhs.symbol
+ val Local(tk, _, idx, _) = locals.getOrMakeLocal(s)
+ genLoad(rhs, tk)
+ lineNumber(tree)
+ bc.store(idx, tk)
+
+ case _ =>
+ genLoad(tree, UNIT)
+ }
+ }
+
+ def genThrow(expr: Tree): BType = {
+ val thrownKind = tpeTK(expr)
+ assert(exemplars.get(thrownKind).isSubtypeOf(ThrowableReference))
+ genLoad(expr, thrownKind)
+ lineNumber(expr)
+ emit(asm.Opcodes.ATHROW) // ICode enters here into enterIgnoreMode, we'll rely instead on DCE at ClassNode level.
+
+ RT_NOTHING // always returns the same, the invoker should know :)
+ }
+
+ /* Generate code for primitive arithmetic operations. */
+ def genArithmeticOp(tree: Tree, code: Int): BType = {
+ val Apply(fun @ Select(larg, _), args) = tree
+ var resKind = tpeTK(larg)
+
+ assert(resKind.isNumericType || (resKind == BOOL),
+ s"$resKind is not a numeric or boolean type [operation: ${fun.symbol}]")
+
+ import scalaPrimitives._
+
+ args match {
+ // unary operation
+ case Nil =>
+ genLoad(larg, resKind)
+ code match {
+ case POS => () // nothing
+ case NEG => bc.neg(resKind)
+ case NOT => bc.genPrimitiveArithmetic(icodes.NOT, resKind)
+ case _ => abort(s"Unknown unary operation: ${fun.symbol.fullName} code: $code")
+ }
+
+ // binary operation
+ case rarg :: Nil =>
+ resKind = maxType(tpeTK(larg), tpeTK(rarg))
+ if (scalaPrimitives.isShiftOp(code) || scalaPrimitives.isBitwiseOp(code)) {
+ assert(resKind.isIntegralType || (resKind == BOOL),
+ s"$resKind incompatible with arithmetic modulo operation.")
+ }
+
+ genLoad(larg, resKind)
+ genLoad(rarg, // check .NET size of shift arguments!
+ if (scalaPrimitives.isShiftOp(code)) INT else resKind)
+
+ (code: @switch) match {
+ case ADD => bc add resKind
+ case SUB => bc sub resKind
+ case MUL => bc mul resKind
+ case DIV => bc div resKind
+ case MOD => bc rem resKind
+
+ case OR | XOR | AND => bc.genPrimitiveLogical(code, resKind)
+
+ case LSL | LSR | ASR => bc.genPrimitiveShift(code, resKind)
+
+ case _ => abort(s"Unknown primitive: ${fun.symbol}[$code]")
+ }
+
+ case _ =>
+ abort(s"Too many arguments for primitive function: $tree")
+ }
+ lineNumber(tree)
+ resKind
+ }
+
+ /* Generate primitive array operations. */
+ def genArrayOp(tree: Tree, code: Int, expectedType: BType): BType = {
+ val Apply(Select(arrayObj, _), args) = tree
+ val k = tpeTK(arrayObj)
+ genLoad(arrayObj, k)
+ val elementType = typeOfArrayOp.getOrElse(code, abort(s"Unknown operation on arrays: $tree code: $code"))
+
+ var generatedType = expectedType
+
+ if (scalaPrimitives.isArrayGet(code)) {
+ // load argument on stack
+ assert(args.length == 1, s"Too many arguments for array get operation: $tree");
+ genLoad(args.head, INT)
+ generatedType = k.getComponentType
+ bc.aload(elementType)
+ }
+ else if (scalaPrimitives.isArraySet(code)) {
+ args match {
+ case a1 :: a2 :: Nil =>
+ genLoad(a1, INT)
+ genLoad(a2)
+ // the following line should really be here, but because of bugs in erasure
+ // we pretend we generate whatever type is expected from us.
+ //generatedType = UNIT
+ bc.astore(elementType)
+ case _ =>
+ abort(s"Too many arguments for array set operation: $tree")
+ }
+ }
+ else {
+ generatedType = INT
+ emit(asm.Opcodes.ARRAYLENGTH)
+ }
+ lineNumber(tree)
+
+ generatedType
+ }
+
+ def genLoadIf(tree: If, expectedType: BType): BType = {
+ val If(condp, thenp, elsep) = tree
+
+ val success = new asm.Label
+ val failure = new asm.Label
+
+ val hasElse = !elsep.isEmpty
+ val postIf = if (hasElse) new asm.Label else failure
+
+ genCond(condp, success, failure)
+
+ val thenKind = tpeTK(thenp)
+ val elseKind = if (!hasElse) UNIT else tpeTK(elsep)
+ def hasUnitBranch = (thenKind == UNIT || elseKind == UNIT)
+ val resKind = if (hasUnitBranch) UNIT else tpeTK(tree)
+
+ markProgramPoint(success)
+ genLoad(thenp, resKind)
+ if (hasElse) { bc goTo postIf }
+ markProgramPoint(failure)
+ if (hasElse) {
+ genLoad(elsep, resKind)
+ markProgramPoint(postIf)
+ }
+
+ resKind
+ }
+
+ def genPrimitiveOp(tree: Apply, expectedType: BType): BType = {
+ val sym = tree.symbol
+ val Apply(fun @ Select(receiver, _), _) = tree
+ val code = scalaPrimitives.getPrimitive(sym, receiver.tpe)
+
+ import scalaPrimitives.{isArithmeticOp, isArrayOp, isLogicalOp, isComparisonOp}
+
+ if (isArithmeticOp(code)) genArithmeticOp(tree, code)
+ else if (code == scalaPrimitives.CONCAT) genStringConcat(tree)
+ else if (code == scalaPrimitives.HASH) genScalaHash(receiver)
+ else if (isArrayOp(code)) genArrayOp(tree, code, expectedType)
+ else if (isLogicalOp(code) || isComparisonOp(code)) {
+ val success, failure, after = new asm.Label
+ genCond(tree, success, failure)
+ // success block
+ markProgramPoint(success)
+ bc boolconst true
+ bc goTo after
+ // failure block
+ markProgramPoint(failure)
+ bc boolconst false
+ // after
+ markProgramPoint(after)
+
+ BOOL
+ }
+ else if (code == scalaPrimitives.SYNCHRONIZED)
+ genSynchronized(tree, expectedType)
+ else if (scalaPrimitives.isCoercion(code)) {
+ genLoad(receiver)
+ lineNumber(tree)
+ genCoercion(code)
+ coercionTo(code)
+ }
+ else abort(
+ s"Primitive operation not handled yet: ${sym.fullName}(${fun.symbol.simpleName}) at: ${tree.pos}"
+ )
+ }
+
+ def genLoad(tree: Tree) {
+ genLoad(tree, tpeTK(tree))
+ }
+
+ /* Generate code for trees that produce values on the stack */
+ def genLoad(tree: Tree, expectedType: BType) {
+ var generatedType = expectedType
+
+ lineNumber(tree)
+
+ tree match {
+ case lblDf : LabelDef => genLabelDef(lblDf, expectedType)
+
+ case ValDef(_, nme.THIS, _, _) =>
+ debuglog("skipping trivial assign to _$this: " + tree)
+
+ case ValDef(_, _, _, rhs) =>
+ val sym = tree.symbol
+ /* most of the time, !locals.contains(sym), unless the current activation of genLoad() is being called
+ while duplicating a finalizer that contains this ValDef. */
+ val Local(tk, _, idx, isSynth) = locals.getOrMakeLocal(sym)
+ if (rhs == EmptyTree) { emitZeroOf(tk) }
+ else { genLoad(rhs, tk) }
+ bc.store(idx, tk)
+ if (!isSynth) { // there are case <synthetic> ValDef's emitted by patmat
+ varsInScope ::= (sym -> currProgramPoint())
+ }
+ generatedType = UNIT
+
+ case t : If =>
+ generatedType = genLoadIf(t, expectedType)
+
+ case r : Return =>
+ genReturn(r)
+ generatedType = expectedType
+
+ case t : Try =>
+ generatedType = genLoadTry(t)
+
+ case Throw(expr) =>
+ generatedType = genThrow(expr)
+
+ case New(tpt) =>
+ abort(s"Unexpected New(${tpt.summaryString}/$tpt) reached GenBCode.\n" +
+ " Call was genLoad" + ((tree, expectedType)))
+
+ case app : Apply =>
+ generatedType = genApply(app, expectedType)
+
+ case ApplyDynamic(qual, args) => sys.error("No invokedynamic support yet.")
+
+ case This(qual) =>
+ val symIsModuleClass = tree.symbol.isModuleClass
+ assert(tree.symbol == claszSymbol || symIsModuleClass,
+ s"Trying to access the this of another class: tree.symbol = ${tree.symbol}, class symbol = $claszSymbol compilation unit: $cunit")
+ if (symIsModuleClass && tree.symbol != claszSymbol) {
+ generatedType = genLoadModule(tree)
+ }
+ else {
+ mnode.visitVarInsn(asm.Opcodes.ALOAD, 0)
+ generatedType =
+ if (tree.symbol == ArrayClass) ObjectReference
+ else brefType(thisName) // inner class (if any) for claszSymbol already tracked.
+ }
+
+ case Select(Ident(nme.EMPTY_PACKAGE_NAME), module) =>
+ assert(tree.symbol.isModule, s"Selection of non-module from empty package: $tree sym: ${tree.symbol} at: ${tree.pos}")
+ genLoadModule(tree)
+
+ case Select(qualifier, selector) =>
+ val sym = tree.symbol
+ generatedType = symInfoTK(sym)
+ val hostClass = findHostClass(qualifier.tpe, sym)
+ log(s"Host class of $sym with qual $qualifier (${qualifier.tpe}) is $hostClass")
+ val qualSafeToElide = treeInfo isQualifierSafeToElide qualifier
+
+ def genLoadQualUnlessElidable() { if (!qualSafeToElide) { genLoadQualifier(tree) } }
+
+ if (sym.isModule) {
+ genLoadQualUnlessElidable()
+ genLoadModule(tree)
+ }
+ else if (sym.isStaticMember) {
+ genLoadQualUnlessElidable()
+ fieldLoad(sym, hostClass)
+ }
+ else {
+ genLoadQualifier(tree)
+ fieldLoad(sym, hostClass)
+ }
+
+ case Ident(name) =>
+ val sym = tree.symbol
+ if (!sym.isPackage) {
+ val tk = symInfoTK(sym)
+ if (sym.isModule) { genLoadModule(tree) }
+ else { locals.load(sym) }
+ generatedType = tk
+ }
+
+ case Literal(value) =>
+ if (value.tag != UnitTag) (value.tag, expectedType) match {
+ case (IntTag, LONG ) => bc.lconst(value.longValue); generatedType = LONG
+ case (FloatTag, DOUBLE) => bc.dconst(value.doubleValue); generatedType = DOUBLE
+ case (NullTag, _ ) => bc.emit(asm.Opcodes.ACONST_NULL); generatedType = RT_NULL
+ case _ => genConstant(value); generatedType = tpeTK(tree)
+ }
+
+ case blck : Block => genBlock(blck, expectedType)
+
+ case Typed(Super(_, _), _) => genLoad(This(claszSymbol), expectedType)
+
+ case Typed(expr, _) => genLoad(expr, expectedType)
+
+ case Assign(_, _) =>
+ generatedType = UNIT
+ genStat(tree)
+
+ case av : ArrayValue =>
+ generatedType = genArrayValue(av)
+
+ case mtch : Match =>
+ generatedType = genMatch(mtch)
+
+ case EmptyTree => if (expectedType != UNIT) { emitZeroOf(expectedType) }
+
+ case _ => abort(s"Unexpected tree in genLoad: $tree/${tree.getClass} at: ${tree.pos}")
+ }
+
+ // emit conversion
+ if (generatedType != expectedType) {
+ adapt(generatedType, expectedType)
+ }
+
+ } // end of GenBCode.genLoad()
+
+ // ---------------- field load and store ----------------
+
+ /*
+ * must-single-thread
+ */
+ def fieldLoad( field: Symbol, hostClass: Symbol = null) {
+ fieldOp(field, isLoad = true, hostClass)
+ }
+ /*
+ * must-single-thread
+ */
+ def fieldStore(field: Symbol, hostClass: Symbol = null) {
+ fieldOp(field, isLoad = false, hostClass)
+ }
+
+ /*
+ * must-single-thread
+ */
+ private def fieldOp(field: Symbol, isLoad: Boolean, hostClass: Symbol = null) {
+ // LOAD_FIELD.hostClass , CALL_METHOD.hostClass , and #4283
+ val owner =
+ if (hostClass == null) internalName(field.owner)
+ else internalName(hostClass)
+ val fieldJName = field.javaSimpleName.toString
+ val fieldDescr = symInfoTK(field).getDescriptor
+ val isStatic = field.isStaticMember
+ val opc =
+ if (isLoad) { if (isStatic) asm.Opcodes.GETSTATIC else asm.Opcodes.GETFIELD }
+ else { if (isStatic) asm.Opcodes.PUTSTATIC else asm.Opcodes.PUTFIELD }
+ mnode.visitFieldInsn(opc, owner, fieldJName, fieldDescr)
+
+ }
+
+ // ---------------- emitting constant values ----------------
+
+ /*
+ * For const.tag in {ClazzTag, EnumTag}
+ * must-single-thread
+ * Otherwise it's safe to call from multiple threads.
+ */
+ def genConstant(const: Constant) {
+ (const.tag: @switch) match {
+
+ case BooleanTag => bc.boolconst(const.booleanValue)
+
+ case ByteTag => bc.iconst(const.byteValue)
+ case ShortTag => bc.iconst(const.shortValue)
+ case CharTag => bc.iconst(const.charValue)
+ case IntTag => bc.iconst(const.intValue)
+
+ case LongTag => bc.lconst(const.longValue)
+ case FloatTag => bc.fconst(const.floatValue)
+ case DoubleTag => bc.dconst(const.doubleValue)
+
+ case UnitTag => ()
+
+ case StringTag =>
+ assert(const.value != null, const) // TODO this invariant isn't documented in `case class Constant`
+ mnode.visitLdcInsn(const.stringValue) // `stringValue` special-cases null, but not for a const with StringTag
+
+ case NullTag => emit(asm.Opcodes.ACONST_NULL)
+
+ case ClazzTag =>
+ val toPush: BType = {
+ val kind = toTypeKind(const.typeValue)
+ if (kind.isValueType) classLiteral(kind)
+ else kind
+ }
+ mnode.visitLdcInsn(toPush.toASMType)
+
+ case EnumTag =>
+ val sym = const.symbolValue
+ val ownerName = internalName(sym.owner)
+ val fieldName = sym.javaSimpleName.toString
+ val fieldDesc = toTypeKind(sym.tpe.underlying).getDescriptor
+ mnode.visitFieldInsn(
+ asm.Opcodes.GETSTATIC,
+ ownerName,
+ fieldName,
+ fieldDesc
+ )
+
+ case _ => abort(s"Unknown constant value: $const")
+ }
+ }
+
+ private def genLabelDef(lblDf: LabelDef, expectedType: BType) {
+ // duplication of LabelDefs contained in `finally`-clauses is handled when emitting RETURN. No bookkeeping for that required here.
+ // no need to call index() over lblDf.params, on first access that magic happens (moreover, no LocalVariableTable entries needed for them).
+ markProgramPoint(programPoint(lblDf.symbol))
+ lineNumber(lblDf)
+ genLoad(lblDf.rhs, expectedType)
+ }
+
+ private def genReturn(r: Return) {
+ val Return(expr) = r
+ val returnedKind = tpeTK(expr)
+ genLoad(expr, returnedKind)
+ adapt(returnedKind, returnType)
+ val saveReturnValue = (returnType != UNIT)
+ lineNumber(r)
+
+ cleanups match {
+ case Nil =>
+ // not an assertion: !shouldEmitCleanup (at least not yet, pendingCleanups() may still have to run, and reset `shouldEmitCleanup`.
+ bc emitRETURN returnType
+ case nextCleanup :: rest =>
+ if (saveReturnValue) {
+ if (insideCleanupBlock) {
+ cunit.warning(r.pos, "Return statement found in finally-clause, discarding its return-value in favor of that of a more deeply nested return.")
+ bc drop returnType
+ } else {
+ // regarding return value, the protocol is: in place of a `return-stmt`, a sequence of `adapt, store, jump` are inserted.
+ if (earlyReturnVar == null) {
+ earlyReturnVar = locals.makeLocal(returnType, "earlyReturnVar")
+ }
+ locals.store(earlyReturnVar)
+ }
+ }
+ bc goTo nextCleanup
+ shouldEmitCleanup = true
+ }
+
+ } // end of genReturn()
+
+ private def genApply(app: Apply, expectedType: BType): BType = {
+ var generatedType = expectedType
+ lineNumber(app)
+ app match {
+
+ case Apply(TypeApply(fun, targs), _) =>
+
+ val sym = fun.symbol
+ val cast = sym match {
+ case Object_isInstanceOf => false
+ case Object_asInstanceOf => true
+ case _ => abort(s"Unexpected type application $fun[sym: ${sym.fullName}] in: $app")
+ }
+
+ val Select(obj, _) = fun
+ val l = tpeTK(obj)
+ val r = tpeTK(targs.head)
+
+ def genTypeApply(): BType = {
+ genLoadQualifier(fun)
+
+ if (l.isValueType && r.isValueType)
+ genConversion(l, r, cast)
+ else if (l.isValueType) {
+ bc drop l
+ if (cast) {
+ mnode.visitTypeInsn(asm.Opcodes.NEW, classCastExceptionReference.getInternalName)
+ bc dup ObjectReference
+ emit(asm.Opcodes.ATHROW)
+ } else {
+ bc boolconst false
+ }
+ }
+ else if (r.isValueType && cast) {
+ abort(s"Erasure should have added an unboxing operation to prevent this cast. Tree: $app")
+ }
+ else if (r.isValueType) {
+ bc isInstance classLiteral(r)
+ }
+ else {
+ genCast(r, cast)
+ }
+
+ if (cast) r else BOOL
+ } // end of genTypeApply()
+
+ generatedType = genTypeApply()
+
+ // 'super' call: Note: since constructors are supposed to
+ // return an instance of what they construct, we have to take
+ // special care. On JVM they are 'void', and Scala forbids (syntactically)
+ // to call super constructors explicitly and/or use their 'returned' value.
+ // therefore, we can ignore this fact, and generate code that leaves nothing
+ // on the stack (contrary to what the type in the AST says).
+ case Apply(fun @ Select(Super(_, mix), _), args) =>
+ val invokeStyle = icodes.opcodes.SuperCall(mix)
+ // if (fun.symbol.isConstructor) Static(true) else SuperCall(mix);
+ mnode.visitVarInsn(asm.Opcodes.ALOAD, 0)
+ genLoadArguments(args, paramTKs(app))
+ genCallMethod(fun.symbol, invokeStyle, pos = app.pos)
+ generatedType = asmMethodType(fun.symbol).getReturnType
+
+ // 'new' constructor call: Note: since constructors are
+ // thought to return an instance of what they construct,
+ // we have to 'simulate' it by DUPlicating the freshly created
+ // instance (on JVM, <init> methods return VOID).
+ case Apply(fun @ Select(New(tpt), nme.CONSTRUCTOR), args) =>
+ val ctor = fun.symbol
+ assert(ctor.isClassConstructor, s"'new' call to non-constructor: ${ctor.name}")
+
+ generatedType = tpeTK(tpt)
+ assert(generatedType.isRefOrArrayType, s"Non reference type cannot be instantiated: $generatedType")
+
+ generatedType match {
+ case arr if generatedType.isArray =>
+ genLoadArguments(args, paramTKs(app))
+ val dims = arr.getDimensions
+ var elemKind = arr.getElementType
+ val argsSize = args.length
+ if (argsSize > dims) {
+ cunit.error(app.pos, s"too many arguments for array constructor: found ${args.length} but array has only $dims dimension(s)")
+ }
+ if (argsSize < dims) {
+ /* In one step:
+ * elemKind = new BType(BType.ARRAY, arr.off + argsSize, arr.len - argsSize)
+ * however the above does not enter a TypeName for each nested arrays in chrs.
+ */
+ for (i <- args.length until dims) elemKind = arrayOf(elemKind)
+ }
+ (argsSize : @switch) match {
+ case 1 => bc newarray elemKind
+ case _ =>
+ val descr = ('[' * argsSize) + elemKind.getDescriptor // denotes the same as: arrayN(elemKind, argsSize).getDescriptor
+ mnode.visitMultiANewArrayInsn(descr, argsSize)
+ }
+
+ case rt if generatedType.hasObjectSort =>
+ assert(exemplar(ctor.owner).c == rt, s"Symbol ${ctor.owner.fullName} is different from $rt")
+ mnode.visitTypeInsn(asm.Opcodes.NEW, rt.getInternalName)
+ bc dup generatedType
+ genLoadArguments(args, paramTKs(app))
+ genCallMethod(ctor, icodes.opcodes.Static(onInstance = true))
+
+ case _ =>
+ abort(s"Cannot instantiate $tpt of kind: $generatedType")
+ }
+
+ case Apply(fun @ _, List(expr)) if definitions.isBox(fun.symbol) =>
+ val nativeKind = tpeTK(expr)
+ genLoad(expr, nativeKind)
+ val MethodNameAndType(mname, mdesc) = asmBoxTo(nativeKind)
+ bc.invokestatic(BoxesRunTime.getInternalName, mname, mdesc)
+ generatedType = boxResultType(fun.symbol) // was toTypeKind(fun.symbol.tpe.resultType)
+
+ case Apply(fun @ _, List(expr)) if definitions.isUnbox(fun.symbol) =>
+ genLoad(expr)
+ val boxType = unboxResultType(fun.symbol) // was toTypeKind(fun.symbol.owner.linkedClassOfClass.tpe)
+ generatedType = boxType
+ val MethodNameAndType(mname, mdesc) = asmUnboxTo(boxType)
+ bc.invokestatic(BoxesRunTime.getInternalName, mname, mdesc)
+
+ case app @ Apply(fun, args) =>
+ val sym = fun.symbol
+
+ if (sym.isLabel) { // jump to a label
+ genLoadLabelArguments(args, labelDef(sym), app.pos)
+ bc goTo programPoint(sym)
+ } else if (isPrimitive(sym)) { // primitive method call
+ generatedType = genPrimitiveOp(app, expectedType)
+ } else { // normal method call
+
+ def genNormalMethodCall() {
+
+ val invokeStyle =
+ if (sym.isStaticMember) icodes.opcodes.Static(onInstance = false)
+ else if (sym.isPrivate || sym.isClassConstructor) icodes.opcodes.Static(onInstance = true)
+ else icodes.opcodes.Dynamic;
+
+ if (invokeStyle.hasInstance) {
+ genLoadQualifier(fun)
+ }
+
+ genLoadArguments(args, paramTKs(app))
+
+ // In "a couple cases", squirrel away a extra information (hostClass, targetTypeKind). TODO Document what "in a couple cases" refers to.
+ var hostClass: Symbol = null
+ var targetTypeKind: BType = null
+ fun match {
+ case Select(qual, _) =>
+ val qualSym = findHostClass(qual.tpe, sym)
+ if (qualSym == ArrayClass) {
+ targetTypeKind = tpeTK(qual)
+ log(s"Stored target type kind for ${sym.fullName} as $targetTypeKind")
+ }
+ else {
+ hostClass = qualSym
+ if (qual.tpe.typeSymbol != qualSym) {
+ log(s"Precisified host class for $sym from ${qual.tpe.typeSymbol.fullName} to ${qualSym.fullName}")
+ }
+ }
+
+ case _ =>
+ }
+ if ((targetTypeKind != null) && (sym == definitions.Array_clone) && invokeStyle.isDynamic) {
+ val target: String = targetTypeKind.getInternalName
+ bc.invokevirtual(target, "clone", "()Ljava/lang/Object;")
+ }
+ else {
+ genCallMethod(sym, invokeStyle, hostClass, app.pos)
+ }
+
+ } // end of genNormalMethodCall()
+
+ genNormalMethodCall()
+
+ generatedType = asmMethodType(sym).getReturnType
+ }
+
+ }
+
+ generatedType
+ } // end of genApply()
+
+ private def genArrayValue(av: ArrayValue): BType = {
+ val ArrayValue(tpt @ TypeTree(), elems) = av
+
+ val elmKind = tpeTK(tpt)
+ val generatedType = arrayOf(elmKind)
+
+ lineNumber(av)
+ bc iconst elems.length
+ bc newarray elmKind
+
+ var i = 0
+ var rest = elems
+ while (!rest.isEmpty) {
+ bc dup generatedType
+ bc iconst i
+ genLoad(rest.head, elmKind)
+ bc astore elmKind
+ rest = rest.tail
+ i = i + 1
+ }
+
+ generatedType
+ }
+
+ /*
+ * A Match node contains one or more case clauses,
+ * each case clause lists one or more Int values to use as keys, and a code block.
+ * Except the "default" case clause which (if it exists) doesn't list any Int key.
+ *
+ * On a first pass over the case clauses, we flatten the keys and their targets (the latter represented with asm.Labels).
+ * That representation allows JCodeMethodV to emit a lookupswitch or a tableswitch.
+ *
+ * On a second pass, we emit the switch blocks, one for each different target.
+ */
+ private def genMatch(tree: Match): BType = {
+ lineNumber(tree)
+ genLoad(tree.selector, INT)
+ val generatedType = tpeTK(tree)
+
+ var flatKeys: List[Int] = Nil
+ var targets: List[asm.Label] = Nil
+ var default: asm.Label = null
+ var switchBlocks: List[Pair[asm.Label, Tree]] = Nil
+
+ // collect switch blocks and their keys, but don't emit yet any switch-block.
+ for (caze @ CaseDef(pat, guard, body) <- tree.cases) {
+ assert(guard == EmptyTree, guard)
+ val switchBlockPoint = new asm.Label
+ switchBlocks ::= Pair(switchBlockPoint, body)
+ pat match {
+ case Literal(value) =>
+ flatKeys ::= value.intValue
+ targets ::= switchBlockPoint
+ case Ident(nme.WILDCARD) =>
+ assert(default == null, s"multiple default targets in a Match node, at ${tree.pos}")
+ default = switchBlockPoint
+ case Alternative(alts) =>
+ alts foreach {
+ case Literal(value) =>
+ flatKeys ::= value.intValue
+ targets ::= switchBlockPoint
+ case _ =>
+ abort(s"Invalid alternative in alternative pattern in Match node: $tree at: ${tree.pos}")
+ }
+ case _ =>
+ abort(s"Invalid pattern in Match node: $tree at: ${tree.pos}")
+ }
+ }
+ bc.emitSWITCH(mkArrayReverse(flatKeys), mkArray(targets.reverse), default, MIN_SWITCH_DENSITY)
+
+ // emit switch-blocks.
+ val postMatch = new asm.Label
+ for (sb <- switchBlocks.reverse) {
+ val Pair(caseLabel, caseBody) = sb
+ markProgramPoint(caseLabel)
+ genLoad(caseBody, generatedType)
+ bc goTo postMatch
+ }
+
+ markProgramPoint(postMatch)
+ generatedType
+ }
+
+ def genBlock(tree: Block, expectedType: BType) {
+ val Block(stats, expr) = tree
+ val savedScope = varsInScope
+ varsInScope = Nil
+ stats foreach genStat
+ genLoad(expr, expectedType)
+ val end = currProgramPoint()
+ if (emitVars) { // add entries to LocalVariableTable JVM attribute
+ for (Pair(sym, start) <- varsInScope.reverse) { emitLocalVarScope(sym, start, end) }
+ }
+ varsInScope = savedScope
+ }
+
+ def adapt(from: BType, to: BType) {
+ if (!conforms(from, to)) {
+ to match {
+ case UNIT => bc drop from
+ case _ => bc.emitT2T(from, to)
+ }
+ } else if (from.isNothingType) {
+ emit(asm.Opcodes.ATHROW) // ICode enters here into enterIgnoreMode, we'll rely instead on DCE at ClassNode level.
+ } else if (from.isNullType) {
+ bc drop from
+ mnode.visitInsn(asm.Opcodes.ACONST_NULL)
+ }
+ else (from, to) match {
+ case (BYTE, LONG) | (SHORT, LONG) | (CHAR, LONG) | (INT, LONG) => bc.emitT2T(INT, LONG)
+ case _ => ()
+ }
+ }
+
+ /* Emit code to Load the qualifier of `tree` on top of the stack. */
+ def genLoadQualifier(tree: Tree) {
+ lineNumber(tree)
+ tree match {
+ case Select(qualifier, _) => genLoad(qualifier)
+ case _ => abort(s"Unknown qualifier $tree")
+ }
+ }
+
+ /* Generate code that loads args into label parameters. */
+ def genLoadLabelArguments(args: List[Tree], lblDef: LabelDef, gotoPos: Position) {
+ assert(args forall { a => !a.hasSymbolField || a.hasSymbolWhich( s => !s.isLabel) }, s"SI-6089 at: $gotoPos") // SI-6089
+
+ val aps = {
+ val params: List[Symbol] = lblDef.params.map(_.symbol)
+ assert(args.length == params.length, s"Wrong number of arguments in call to label at: $gotoPos")
+
+ def isTrivial(kv: (Tree, Symbol)) = kv match {
+ case (This(_), p) if p.name == nme.THIS => true
+ case (arg @ Ident(_), p) if arg.symbol == p => true
+ case _ => false
+ }
+
+ (args zip params) filterNot isTrivial
+ }
+
+ // first push *all* arguments. This makes sure muliple uses of the same labelDef-var will all denote the (previous) value.
+ aps foreach { case (arg, param) => genLoad(arg, locals(param).tk) } // `locals` is known to contain `param` because `genDefDef()` visited `labelDefsAtOrUnder`
+
+ // second assign one by one to the LabelDef's variables.
+ aps.reverse foreach {
+ case (_, param) =>
+ // TODO FIXME a "this" param results from tail-call xform. If so, the `else` branch seems perfectly fine. And the `then` branch must be wrong.
+ if (param.name == nme.THIS) mnode.visitVarInsn(asm.Opcodes.ASTORE, 0)
+ else locals.store(param)
+ }
+
+ }
+
+ def genLoadArguments(args: List[Tree], btpes: List[BType]) {
+ (args zip btpes) foreach { case (arg, btpe) => genLoad(arg, btpe) }
+ }
+
+ def genLoadModule(tree: Tree): BType = {
+ // Working around SI-5604. Rather than failing the compile when we see a package here, check if there's a package object.
+ val module = (
+ if (!tree.symbol.isPackageClass) tree.symbol
+ else tree.symbol.info.member(nme.PACKAGE) match {
+ case NoSymbol => abort(s"Cannot use package as value: $tree") ; NoSymbol
+ case s => devWarning("Bug: found package class where package object expected. Converting.") ; s.moduleClass
+ }
+ )
+ lineNumber(tree)
+ genLoadModule(module)
+ symInfoTK(module)
+ }
+
+ def genLoadModule(module: Symbol) {
+ if (claszSymbol == module.moduleClass && jMethodName != "readResolve") {
+ mnode.visitVarInsn(asm.Opcodes.ALOAD, 0)
+ } else {
+ val mbt = symInfoTK(module)
+ mnode.visitFieldInsn(
+ asm.Opcodes.GETSTATIC,
+ mbt.getInternalName /* + "$" */ ,
+ strMODULE_INSTANCE_FIELD,
+ mbt.getDescriptor // for nostalgics: toTypeKind(module.tpe).getDescriptor
+ )
+ }
+ }
+
+ def genConversion(from: BType, to: BType, cast: Boolean) {
+ if (cast) { bc.emitT2T(from, to) }
+ else {
+ bc drop from
+ bc boolconst (from == to)
+ }
+ }
+
+ def genCast(to: BType, cast: Boolean) {
+ if (cast) { bc checkCast to }
+ else { bc isInstance to }
+ }
+
+ /* Is the given symbol a primitive operation? */
+ def isPrimitive(fun: Symbol): Boolean = scalaPrimitives.isPrimitive(fun)
+
+ /* Generate coercion denoted by "code" */
+ def genCoercion(code: Int) {
+ import scalaPrimitives._
+ (code: @switch) match {
+ case B2B | S2S | C2C | I2I | L2L | F2F | D2D => ()
+ case _ =>
+ val from = coercionFrom(code)
+ val to = coercionTo(code)
+ bc.emitT2T(from, to)
+ }
+ }
+
+ def genStringConcat(tree: Tree): BType = {
+ lineNumber(tree)
+ liftStringConcat(tree) match {
+
+ // Optimization for expressions of the form "" + x. We can avoid the StringBuilder.
+ case List(Literal(Constant("")), arg) =>
+ genLoad(arg, ObjectReference)
+ genCallMethod(String_valueOf, icodes.opcodes.Static(onInstance = false))
+
+ case concatenations =>
+ bc.genStartConcat
+ for (elem <- concatenations) {
+ val kind = tpeTK(elem)
+ genLoad(elem, kind)
+ bc.genStringConcat(kind)
+ }
+ bc.genEndConcat
+
+ }
+
+ StringReference
+ }
+
+ def genCallMethod(method: Symbol, style: InvokeStyle, hostClass0: Symbol = null, pos: Position = NoPosition) {
+
+ val siteSymbol = claszSymbol
+ val hostSymbol = if (hostClass0 == null) method.owner else hostClass0;
+ val methodOwner = method.owner
+ // info calls so that types are up to date; erasure may add lateINTERFACE to traits
+ hostSymbol.info ; methodOwner.info
+
+ def needsInterfaceCall(sym: Symbol) = (
+ sym.isInterface
+ || sym.isJavaDefined && sym.isNonBottomSubClass(definitions.ClassfileAnnotationClass)
+ )
+
+ def isAccessibleFrom(target: Symbol, site: Symbol): Boolean = {
+ target.isPublic || target.isProtected && {
+ (site.enclClass isSubClass target.enclClass) ||
+ (site.enclosingPackage == target.privateWithin)
+ }
+ }
+
+ // whether to reference the type of the receiver or
+ // the type of the method owner
+ val useMethodOwner = (
+ style != icodes.opcodes.Dynamic
+ || hostSymbol.isBottomClass
+ || methodOwner == definitions.ObjectClass
+ )
+ val receiver = if (useMethodOwner) methodOwner else hostSymbol
+ val bmOwner = asmClassType(receiver)
+ val jowner = bmOwner.getInternalName
+ val jname = method.javaSimpleName.toString
+ val bmType = asmMethodType(method)
+ val mdescr = bmType.getDescriptor
+
+ def initModule() {
+ // we initialize the MODULE$ field immediately after the super ctor
+ if (!isModuleInitialized &&
+ jMethodName == INSTANCE_CONSTRUCTOR_NAME &&
+ jname == INSTANCE_CONSTRUCTOR_NAME &&
+ isStaticModule(siteSymbol)) {
+ isModuleInitialized = true
+ mnode.visitVarInsn(asm.Opcodes.ALOAD, 0)
+ mnode.visitFieldInsn(
+ asm.Opcodes.PUTSTATIC,
+ thisName,
+ strMODULE_INSTANCE_FIELD,
+ "L" + thisName + ";"
+ )
+ }
+ }
+
+ if (style.isStatic) {
+ if (style.hasInstance) { bc.invokespecial (jowner, jname, mdescr) }
+ else { bc.invokestatic (jowner, jname, mdescr) }
+ }
+ else if (style.isDynamic) {
+ if (needsInterfaceCall(receiver)) { bc.invokeinterface(jowner, jname, mdescr) }
+ else { bc.invokevirtual (jowner, jname, mdescr) }
+ }
+ else {
+ assert(style.isSuper, s"An unknown InvokeStyle: $style")
+ bc.invokespecial(jowner, jname, mdescr)
+ initModule()
+ }
+
+ } // end of genCallMethod()
+
+ /* Generate the scala ## method. */
+ def genScalaHash(tree: Tree): BType = {
+ genLoadModule(ScalaRunTimeModule) // TODO why load ScalaRunTimeModule if ## has InvokeStyle of Static(false) ?
+ genLoad(tree, ObjectReference)
+ genCallMethod(hashMethodSym, icodes.opcodes.Static(onInstance = false))
+
+ INT
+ }
+
+ /*
+ * Returns a list of trees that each should be concatenated, from left to right.
+ * It turns a chained call like "a".+("b").+("c") into a list of arguments.
+ */
+ def liftStringConcat(tree: Tree): List[Tree] = tree match {
+ case Apply(fun @ Select(larg, method), rarg) =>
+ if (isPrimitive(fun.symbol) &&
+ scalaPrimitives.getPrimitive(fun.symbol) == scalaPrimitives.CONCAT)
+ liftStringConcat(larg) ::: rarg
+ else
+ tree :: Nil
+ case _ =>
+ tree :: Nil
+ }
+
+ /* Some useful equality helpers. */
+ def isNull(t: Tree) = {
+ t match {
+ case Literal(Constant(null)) => true
+ case _ => false
+ }
+ }
+
+ /* If l or r is constant null, returns the other ; otherwise null */
+ def ifOneIsNull(l: Tree, r: Tree) = if (isNull(l)) r else if (isNull(r)) l else null
+
+ /* Emit code to compare the two top-most stack values using the 'op' operator. */
+ private def genCJUMP(success: asm.Label, failure: asm.Label, op: TestOp, tk: BType) {
+ if (tk.isIntSizedType) { // BOOL, BYTE, CHAR, SHORT, or INT
+ bc.emitIF_ICMP(op, success)
+ } else if (tk.isRefOrArrayType) { // REFERENCE(_) | ARRAY(_)
+ bc.emitIF_ACMP(op, success)
+ } else {
+ (tk: @unchecked) match {
+ case LONG => emit(asm.Opcodes.LCMP)
+ case FLOAT =>
+ if (op == icodes.LT || op == icodes.LE) emit(asm.Opcodes.FCMPG)
+ else emit(asm.Opcodes.FCMPL)
+ case DOUBLE =>
+ if (op == icodes.LT || op == icodes.LE) emit(asm.Opcodes.DCMPG)
+ else emit(asm.Opcodes.DCMPL)
+ }
+ bc.emitIF(op, success)
+ }
+ bc goTo failure
+ }
+
+ /* Emits code to compare (and consume) stack-top and zero using the 'op' operator */
+ private def genCZJUMP(success: asm.Label, failure: asm.Label, op: TestOp, tk: BType) {
+ if (tk.isIntSizedType) { // BOOL, BYTE, CHAR, SHORT, or INT
+ bc.emitIF(op, success)
+ } else if (tk.isRefOrArrayType) { // REFERENCE(_) | ARRAY(_)
+ // @unchecked because references aren't compared with GT, GE, LT, LE.
+ (op : @unchecked) match {
+ case icodes.EQ => bc emitIFNULL success
+ case icodes.NE => bc emitIFNONNULL success
+ }
+ } else {
+ (tk: @unchecked) match {
+ case LONG =>
+ emit(asm.Opcodes.LCONST_0)
+ emit(asm.Opcodes.LCMP)
+ case FLOAT =>
+ emit(asm.Opcodes.FCONST_0)
+ if (op == icodes.LT || op == icodes.LE) emit(asm.Opcodes.FCMPG)
+ else emit(asm.Opcodes.FCMPL)
+ case DOUBLE =>
+ emit(asm.Opcodes.DCONST_0)
+ if (op == icodes.LT || op == icodes.LE) emit(asm.Opcodes.DCMPG)
+ else emit(asm.Opcodes.DCMPL)
+ }
+ bc.emitIF(op, success)
+ }
+ bc goTo failure
+ }
+
+ val testOpForPrimitive: Array[TestOp] = Array(
+ icodes.EQ, icodes.NE, icodes.EQ, icodes.NE, icodes.LT, icodes.LE, icodes.GE, icodes.GT
+ )
+
+ /*
+ * Generate code for conditional expressions.
+ * The jump targets success/failure of the test are `then-target` and `else-target` resp.
+ */
+ private def genCond(tree: Tree, success: asm.Label, failure: asm.Label) {
+
+ def genComparisonOp(l: Tree, r: Tree, code: Int) {
+ val op: TestOp = testOpForPrimitive(code - scalaPrimitives.ID)
+ // special-case reference (in)equality test for null (null eq x, x eq null)
+ var nonNullSide: Tree = null
+ if (scalaPrimitives.isReferenceEqualityOp(code) &&
+ { nonNullSide = ifOneIsNull(l, r); nonNullSide != null }
+ ) {
+ genLoad(nonNullSide, ObjectReference)
+ genCZJUMP(success, failure, op, ObjectReference)
+ }
+ else {
+ val tk = maxType(tpeTK(l), tpeTK(r))
+ genLoad(l, tk)
+ genLoad(r, tk)
+ genCJUMP(success, failure, op, tk)
+ }
+ }
+
+ def default() = {
+ genLoad(tree, BOOL)
+ genCZJUMP(success, failure, icodes.NE, BOOL)
+ }
+
+ lineNumber(tree)
+ tree match {
+
+ case Apply(fun, args) if isPrimitive(fun.symbol) =>
+ import scalaPrimitives.{ ZNOT, ZAND, ZOR, EQ, getPrimitive }
+
+ // lhs and rhs of test
+ lazy val Select(lhs, _) = fun
+ val rhs = if (args.isEmpty) EmptyTree else args.head; // args.isEmpty only for ZNOT
+
+ def genZandOrZor(and: Boolean) { // TODO WRONG
+ // reaching "keepGoing" indicates the rhs should be evaluated too (ie not short-circuited).
+ val keepGoing = new asm.Label
+
+ if (and) genCond(lhs, keepGoing, failure)
+ else genCond(lhs, success, keepGoing)
+
+ markProgramPoint(keepGoing)
+ genCond(rhs, success, failure)
+ }
+
+ getPrimitive(fun.symbol) match {
+ case ZNOT => genCond(lhs, failure, success)
+ case ZAND => genZandOrZor(and = true)
+ case ZOR => genZandOrZor(and = false)
+ case code =>
+ // TODO !!!!!!!!!! isReferenceType, in the sense of TypeKind? (ie non-array, non-boxed, non-nothing, may be null)
+ if (scalaPrimitives.isUniversalEqualityOp(code) && tpeTK(lhs).hasObjectSort) {
+ // `lhs` has reference type
+ if (code == EQ) genEqEqPrimitive(lhs, rhs, success, failure)
+ else genEqEqPrimitive(lhs, rhs, failure, success)
+ }
+ else if (scalaPrimitives.isComparisonOp(code))
+ genComparisonOp(lhs, rhs, code)
+ else
+ default
+ }
+
+ case _ => default
+ }
+
+ } // end of genCond()
+
+ /*
+ * Generate the "==" code for object references. It is equivalent of
+ * if (l eq null) r eq null else l.equals(r);
+ *
+ * @param l left-hand-side of the '=='
+ * @param r right-hand-side of the '=='
+ */
+ def genEqEqPrimitive(l: Tree, r: Tree, success: asm.Label, failure: asm.Label) {
+
+ /* True if the equality comparison is between values that require the use of the rich equality
+ * comparator (scala.runtime.Comparator.equals). This is the case when either side of the
+ * comparison might have a run-time type subtype of java.lang.Number or java.lang.Character.
+ * When it is statically known that both sides are equal and subtypes of Number of Character,
+ * not using the rich equality is possible (their own equals method will do ok.)
+ */
+ val mustUseAnyComparator: Boolean = {
+ val areSameFinals = l.tpe.isFinalType && r.tpe.isFinalType && (l.tpe =:= r.tpe)
+
+ !areSameFinals && platform.isMaybeBoxed(l.tpe.typeSymbol) && platform.isMaybeBoxed(r.tpe.typeSymbol)
+ }
+
+ if (mustUseAnyComparator) {
+ val equalsMethod = {
+
+ def default = platform.externalEquals
+
+ platform match {
+ case x: JavaPlatform =>
+ import x._
+ if (l.tpe <:< BoxedNumberClass.tpe) {
+ if (r.tpe <:< BoxedNumberClass.tpe) externalEqualsNumNum
+ else if (r.tpe <:< BoxedCharacterClass.tpe) externalEqualsNumChar
+ else externalEqualsNumObject
+ }
+ else default
+
+ case _ => default
+ }
+ }
+ genLoad(l, ObjectReference)
+ genLoad(r, ObjectReference)
+ genCallMethod(equalsMethod, icodes.opcodes.Static(onInstance = false))
+ genCZJUMP(success, failure, icodes.NE, BOOL)
+ }
+ else {
+ if (isNull(l)) {
+ // null == expr -> expr eq null
+ genLoad(r, ObjectReference)
+ genCZJUMP(success, failure, icodes.EQ, ObjectReference)
+ } else if (isNull(r)) {
+ // expr == null -> expr eq null
+ genLoad(l, ObjectReference)
+ genCZJUMP(success, failure, icodes.EQ, ObjectReference)
+ } else {
+ // l == r -> if (l eq null) r eq null else l.equals(r)
+ val eqEqTempLocal = locals.makeLocal(AnyRefReference, nme.EQEQ_LOCAL_VAR.toString)
+ val lNull = new asm.Label
+ val lNonNull = new asm.Label
+
+ genLoad(l, ObjectReference)
+ genLoad(r, ObjectReference)
+ locals.store(eqEqTempLocal)
+ bc dup ObjectReference
+ genCZJUMP(lNull, lNonNull, icodes.EQ, ObjectReference)
+
+ markProgramPoint(lNull)
+ bc drop ObjectReference
+ locals.load(eqEqTempLocal)
+ genCZJUMP(success, failure, icodes.EQ, ObjectReference)
+
+ markProgramPoint(lNonNull)
+ locals.load(eqEqTempLocal)
+ genCallMethod(Object_equals, icodes.opcodes.Dynamic)
+ genCZJUMP(success, failure, icodes.NE, BOOL)
+ }
+ }
+ }
+
+ /* can-multi-thread */
+ def getMaxType(ts: List[Type]): BType = {
+ ts map toTypeKind reduceLeft maxType
+ }
+
+ def genSynchronized(tree: Apply, expectedType: BType): BType
+ def genLoadTry(tree: Try): BType
+
+ }
+
+}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeGlue.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeGlue.scala
new file mode 100644
index 0000000000..f95ceef678
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeGlue.scala
@@ -0,0 +1,881 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2012 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+package scala
+package tools.nsc
+package backend.jvm
+
+import scala.tools.asm
+import scala.annotation.switch
+import scala.collection.{ immutable, mutable }
+
+/*
+ * Immutable representations of bytecode-level types.
+ *
+ * @author Miguel Garcia, http://lamp.epfl.ch/~magarcia/ScalaCompilerCornerReloaded
+ * @version 1.0
+ *
+ */
+abstract class BCodeGlue extends SubComponent {
+
+ import global._
+
+ object BType {
+
+ import global.chrs
+
+ // ------------- sorts -------------
+
+ val VOID : Int = 0
+ val BOOLEAN: Int = 1
+ val CHAR : Int = 2
+ val BYTE : Int = 3
+ val SHORT : Int = 4
+ val INT : Int = 5
+ val FLOAT : Int = 6
+ val LONG : Int = 7
+ val DOUBLE : Int = 8
+ val ARRAY : Int = 9
+ val OBJECT : Int = 10
+ val METHOD : Int = 11
+
+ // ------------- primitive types -------------
+
+ val VOID_TYPE = new BType(VOID, ('V' << 24) | (5 << 16) | (0 << 8) | 0, 1)
+ val BOOLEAN_TYPE = new BType(BOOLEAN, ('Z' << 24) | (0 << 16) | (5 << 8) | 1, 1)
+ val CHAR_TYPE = new BType(CHAR, ('C' << 24) | (0 << 16) | (6 << 8) | 1, 1)
+ val BYTE_TYPE = new BType(BYTE, ('B' << 24) | (0 << 16) | (5 << 8) | 1, 1)
+ val SHORT_TYPE = new BType(SHORT, ('S' << 24) | (0 << 16) | (7 << 8) | 1, 1)
+ val INT_TYPE = new BType(INT, ('I' << 24) | (0 << 16) | (0 << 8) | 1, 1)
+ val FLOAT_TYPE = new BType(FLOAT, ('F' << 24) | (2 << 16) | (2 << 8) | 1, 1)
+ val LONG_TYPE = new BType(LONG, ('J' << 24) | (1 << 16) | (1 << 8) | 2, 1)
+ val DOUBLE_TYPE = new BType(DOUBLE, ('D' << 24) | (3 << 16) | (3 << 8) | 2, 1)
+
+ /*
+ * Returns the Java type corresponding to the given type descriptor.
+ *
+ * @param off the offset of this descriptor in the chrs buffer.
+ * @return the Java type corresponding to the given type descriptor.
+ *
+ * can-multi-thread
+ */
+ def getType(off: Int): BType = {
+ var len = 0
+ chrs(off) match {
+ case 'V' => VOID_TYPE
+ case 'Z' => BOOLEAN_TYPE
+ case 'C' => CHAR_TYPE
+ case 'B' => BYTE_TYPE
+ case 'S' => SHORT_TYPE
+ case 'I' => INT_TYPE
+ case 'F' => FLOAT_TYPE
+ case 'J' => LONG_TYPE
+ case 'D' => DOUBLE_TYPE
+ case '[' =>
+ len = 1
+ while (chrs(off + len) == '[') {
+ len += 1
+ }
+ if (chrs(off + len) == 'L') {
+ len += 1
+ while (chrs(off + len) != ';') {
+ len += 1
+ }
+ }
+ new BType(ARRAY, off, len + 1)
+ case 'L' =>
+ len = 1
+ while (chrs(off + len) != ';') {
+ len += 1
+ }
+ new BType(OBJECT, off + 1, len - 1)
+ // case '(':
+ case _ =>
+ assert(chrs(off) == '(')
+ var resPos = off + 1
+ while (chrs(resPos) != ')') { resPos += 1 }
+ val resType = getType(resPos + 1)
+ val len = resPos - off + 1 + resType.len;
+ new BType(
+ METHOD,
+ off,
+ if (resType.hasObjectSort) {
+ len + 2 // "+ 2" accounts for the "L ... ;" in a descriptor for a non-array reference.
+ } else {
+ len
+ }
+ )
+ }
+ }
+
+ /* Params denote an internal name.
+ * can-multi-thread
+ */
+ def getObjectType(index: Int, length: Int): BType = {
+ val sort = if (chrs(index) == '[') ARRAY else OBJECT;
+ new BType(sort, index, length)
+ }
+
+ /*
+ * @param typeDescriptor a field or method type descriptor.
+ *
+ * must-single-thread
+ */
+ def getType(typeDescriptor: String): BType = {
+ val n = global.newTypeName(typeDescriptor)
+ getType(n.start)
+ }
+
+ /*
+ * @param methodDescriptor a method descriptor.
+ *
+ * must-single-thread
+ */
+ def getMethodType(methodDescriptor: String): BType = {
+ val n = global.newTypeName(methodDescriptor)
+ new BType(BType.METHOD, n.start, n.length) // TODO assert isValidMethodDescriptor
+ }
+
+ /*
+ * Returns the Java method type corresponding to the given argument and return types.
+ *
+ * @param returnType the return type of the method.
+ * @param argumentTypes the argument types of the method.
+ * @return the Java type corresponding to the given argument and return types.
+ *
+ * must-single-thread
+ */
+ def getMethodType(returnType: BType, argumentTypes: Array[BType]): BType = {
+ val n = global.newTypeName(getMethodDescriptor(returnType, argumentTypes))
+ new BType(BType.METHOD, n.start, n.length)
+ }
+
+ /*
+ * Returns the Java types corresponding to the argument types of method descriptor whose first argument starts at idx0.
+ *
+ * @param idx0 index into chrs of the first argument.
+ * @return the Java types corresponding to the argument types of the given method descriptor.
+ *
+ * can-multi-thread
+ */
+ private def getArgumentTypes(idx0: Int): Array[BType] = {
+ assert(chrs(idx0 - 1) == '(', "doesn't look like a method descriptor.")
+ val args = new Array[BType](getArgumentCount(idx0))
+ var off = idx0
+ var size = 0
+ while (chrs(off) != ')') {
+ args(size) = getType(off)
+ off += args(size).len
+ if (args(size).sort == OBJECT) { off += 2 }
+ // debug: assert("LVZBSCIJFD[)".contains(chrs(off)))
+ size += 1
+ }
+ // debug: var check = 0; while (check < args.length) { assert(args(check) != null); check += 1 }
+ args
+ }
+
+ /*
+ * Returns the Java types corresponding to the argument types of the given
+ * method descriptor.
+ *
+ * @param methodDescriptor a method descriptor.
+ * @return the Java types corresponding to the argument types of the given method descriptor.
+ *
+ * must-single-thread
+ */
+ def getArgumentTypes(methodDescriptor: String): Array[BType] = {
+ val n = global.newTypeName(methodDescriptor)
+ getArgumentTypes(n.start + 1)
+ }
+
+ /*
+ * Returns the number of argument types of this method type, whose first argument starts at idx0.
+ *
+ * @param idx0 index into chrs of the first argument.
+ * @return the number of argument types of this method type.
+ *
+ * can-multi-thread
+ */
+ private def getArgumentCount(idx0: Int): Int = {
+ assert(chrs(idx0 - 1) == '(', "doesn't look like a method descriptor.")
+ var off = idx0
+ var size = 0
+ var keepGoing = true
+ while (keepGoing) {
+ val car = chrs(off)
+ off += 1
+ if (car == ')') {
+ keepGoing = false
+ } else if (car == 'L') {
+ while (chrs(off) != ';') { off += 1 }
+ off += 1
+ size += 1
+ } else if (car != '[') {
+ size += 1
+ }
+ }
+
+ size
+ }
+
+ /*
+ * Returns the Java type corresponding to the return type of the given
+ * method descriptor.
+ *
+ * @param methodDescriptor a method descriptor.
+ * @return the Java type corresponding to the return type of the given method descriptor.
+ *
+ * must-single-thread
+ */
+ def getReturnType(methodDescriptor: String): BType = {
+ val n = global.newTypeName(methodDescriptor)
+ val delta = n.pos(')') // `delta` is relative to the Name's zero-based start position, not a valid index into chrs.
+ assert(delta < n.length, s"not a valid method descriptor: $methodDescriptor")
+ getType(n.start + delta + 1)
+ }
+
+ /*
+ * Returns the descriptor corresponding to the given argument and return types.
+ * Note: no BType is created here for the resulting method descriptor,
+ * if that's desired the invoker is responsible for that.
+ *
+ * @param returnType the return type of the method.
+ * @param argumentTypes the argument types of the method.
+ * @return the descriptor corresponding to the given argument and return types.
+ *
+ * can-multi-thread
+ */
+ def getMethodDescriptor(
+ returnType: BType,
+ argumentTypes: Array[BType]): String =
+ {
+ val buf = new StringBuffer()
+ buf.append('(')
+ var i = 0
+ while (i < argumentTypes.length) {
+ argumentTypes(i).getDescriptor(buf)
+ i += 1
+ }
+ buf.append(')')
+ returnType.getDescriptor(buf)
+ buf.toString()
+ }
+
+ } // end of object BType
+
+ /*
+ * Based on ASM's Type class. Namer's chrs is used in this class for the same purposes as the `buf` char array in asm.Type.
+ *
+ * All methods of this classs can-multi-thread
+ */
+ final class BType(val sort: Int, val off: Int, val len: Int) {
+
+ import global.chrs
+
+ /*
+ * can-multi-thread
+ */
+ def toASMType: scala.tools.asm.Type = {
+ import scala.tools.asm
+ // using `asm.Type.SHORT` instead of `BType.SHORT` because otherwise "warning: could not emit switch for @switch annotated match"
+ (sort: @switch) match {
+ case asm.Type.VOID => asm.Type.VOID_TYPE
+ case asm.Type.BOOLEAN => asm.Type.BOOLEAN_TYPE
+ case asm.Type.CHAR => asm.Type.CHAR_TYPE
+ case asm.Type.BYTE => asm.Type.BYTE_TYPE
+ case asm.Type.SHORT => asm.Type.SHORT_TYPE
+ case asm.Type.INT => asm.Type.INT_TYPE
+ case asm.Type.FLOAT => asm.Type.FLOAT_TYPE
+ case asm.Type.LONG => asm.Type.LONG_TYPE
+ case asm.Type.DOUBLE => asm.Type.DOUBLE_TYPE
+ case asm.Type.ARRAY |
+ asm.Type.OBJECT => asm.Type.getObjectType(getInternalName)
+ case asm.Type.METHOD => asm.Type.getMethodType(getDescriptor)
+ }
+ }
+
+ /*
+ * Unlike for ICode's REFERENCE, isBoxedType(t) implies isReferenceType(t)
+ * Also, `isReferenceType(RT_NOTHING) == true` , similarly for RT_NULL.
+ * Use isNullType() , isNothingType() to detect Nothing and Null.
+ *
+ * can-multi-thread
+ */
+ def hasObjectSort = (sort == BType.OBJECT)
+
+ /*
+ * Returns the number of dimensions of this array type. This method should
+ * only be used for an array type.
+ *
+ * @return the number of dimensions of this array type.
+ *
+ * can-multi-thread
+ */
+ def getDimensions: Int = {
+ var i = 1
+ while (chrs(off + i) == '[') {
+ i += 1
+ }
+ i
+ }
+
+ /*
+ * Returns the (ultimate) element type of this array type.
+ * This method should only be used for an array type.
+ *
+ * @return Returns the type of the elements of this array type.
+ *
+ * can-multi-thread
+ */
+ def getElementType: BType = {
+ assert(isArray, s"Asked for the element type of a non-array type: $this")
+ BType.getType(off + getDimensions)
+ }
+
+ /*
+ * Returns the internal name of the class corresponding to this object or
+ * array type. The internal name of a class is its fully qualified name (as
+ * returned by Class.getName(), where '.' are replaced by '/'. This method
+ * should only be used for an object or array type.
+ *
+ * @return the internal name of the class corresponding to this object type.
+ *
+ * can-multi-thread
+ */
+ def getInternalName: String = {
+ new String(chrs, off, len)
+ }
+
+ /*
+ * @return the prefix of the internal name until the last '/' (if '/' present), empty string otherwise.
+ *
+ * can-multi-thread
+ */
+ def getRuntimePackage: String = {
+ assert(hasObjectSort, s"not of object sort: $toString")
+ val iname = getInternalName
+ val idx = iname.lastIndexOf('/')
+ if (idx == -1) ""
+ else iname.substring(0, idx)
+ }
+
+ /*
+ * @return the suffix of the internal name until the last '/' (if '/' present), internal name otherwise.
+ *
+ * can-multi-thread
+ */
+ def getSimpleName: String = {
+ assert(hasObjectSort, s"not of object sort: $toString")
+ val iname = getInternalName
+ val idx = iname.lastIndexOf('/')
+ if (idx == -1) iname
+ else iname.substring(idx + 1)
+ }
+
+ /*
+ * Returns the argument types of methods of this type.
+ * This method should only be used for method types.
+ *
+ * @return the argument types of methods of this type.
+ *
+ * can-multi-thread
+ */
+ def getArgumentTypes: Array[BType] = {
+ BType.getArgumentTypes(off + 1)
+ }
+
+ /*
+ * Returns the number of arguments of methods of this type.
+ * This method should only be used for method types.
+ *
+ * @return the number of arguments of methods of this type.
+ *
+ * can-multi-thread
+ */
+ def getArgumentCount: Int = {
+ BType.getArgumentCount(off + 1)
+ }
+
+ /*
+ * Returns the return type of methods of this type.
+ * This method should only be used for method types.
+ *
+ * @return the return type of methods of this type.
+ *
+ * can-multi-thread
+ */
+ def getReturnType: BType = {
+ assert(chrs(off) == '(', s"doesn't look like a method descriptor: $toString")
+ var resPos = off + 1
+ while (chrs(resPos) != ')') { resPos += 1 }
+ BType.getType(resPos + 1)
+ }
+
+ /*
+ * Given a zero-based formal-param-position, return its corresponding local-var-index,
+ * taking into account the JVM-type-sizes of preceding formal params.
+ */
+ def convertFormalParamPosToLocalVarIdx(paramPos: Int, isInstanceMethod: Boolean): Int = {
+ assert(sort == asm.Type.METHOD)
+ val paramTypes = getArgumentTypes
+ var local = 0
+ (0 until paramPos) foreach { argPos => local += paramTypes(argPos).getSize }
+
+ local + (if (isInstanceMethod) 1 else 0)
+ }
+
+ /*
+ * Given a local-var-index, return its corresponding zero-based formal-param-position,
+ * taking into account the JVM-type-sizes of preceding formal params.
+ */
+ def convertLocalVarIdxToFormalParamPos(localIdx: Int, isInstanceMethod: Boolean): Int = {
+ assert(sort == asm.Type.METHOD)
+ val paramTypes = getArgumentTypes
+ var remaining = (if (isInstanceMethod) (localIdx - 1) else localIdx)
+ assert(remaining >= 0)
+ var result = 0
+ while (remaining > 0) {
+ remaining -= paramTypes(result).getSize
+ result += 1
+ }
+ assert(remaining == 0)
+
+ result
+ }
+
+ // ------------------------------------------------------------------------
+ // Inspector methods
+ // ------------------------------------------------------------------------
+
+ def isPrimitiveOrVoid = (sort < BType.ARRAY) // can-multi-thread
+ def isValueType = (sort < BType.ARRAY) // can-multi-thread
+ def isArray = (sort == BType.ARRAY) // can-multi-thread
+ def isUnitType = (sort == BType.VOID) // can-multi-thread
+
+ def isRefOrArrayType = { hasObjectSort || isArray } // can-multi-thread
+ def isNonUnitValueType = { isValueType && !isUnitType } // can-multi-thread
+
+ def isNonSpecial = { !isValueType && !isArray && !isPhantomType } // can-multi-thread
+ def isNothingType = { (this == RT_NOTHING) || (this == CT_NOTHING) } // can-multi-thread
+ def isNullType = { (this == RT_NULL) || (this == CT_NULL) } // can-multi-thread
+ def isPhantomType = { isNothingType || isNullType } // can-multi-thread
+
+ /*
+ * can-multi-thread
+ */
+ def isBoxed = {
+ this match {
+ case BOXED_UNIT | BOXED_BOOLEAN | BOXED_CHAR |
+ BOXED_BYTE | BOXED_SHORT | BOXED_INT |
+ BOXED_FLOAT | BOXED_LONG | BOXED_DOUBLE
+ => true
+ case _
+ => false
+ }
+ }
+
+ /* On the JVM,
+ * BOOL, BYTE, CHAR, SHORT, and INT
+ * are like Ints for the purpose of lub calculation.
+ *
+ * can-multi-thread
+ */
+ def isIntSizedType = {
+ (sort : @switch) match {
+ case BType.BOOLEAN | BType.CHAR |
+ BType.BYTE | BType.SHORT | BType.INT
+ => true
+ case _
+ => false
+ }
+ }
+
+ /* On the JVM, similar to isIntSizedType except that BOOL isn't integral while LONG is.
+ *
+ * can-multi-thread
+ */
+ def isIntegralType = {
+ (sort : @switch) match {
+ case BType.CHAR |
+ BType.BYTE | BType.SHORT | BType.INT |
+ BType.LONG
+ => true
+ case _
+ => false
+ }
+ }
+
+ /* On the JVM, FLOAT and DOUBLE.
+ *
+ * can-multi-thread
+ */
+ def isRealType = { (sort == BType.FLOAT ) || (sort == BType.DOUBLE) }
+
+ def isNumericType = (isIntegralType || isRealType) // can-multi-thread
+
+ /* Is this type a category 2 type in JVM terms? (ie, is it LONG or DOUBLE?)
+ *
+ * can-multi-thread
+ */
+ def isWideType = (getSize == 2)
+
+ def isCapturedCellRef: Boolean = {
+ this == srBooleanRef || this == srByteRef ||
+ this == srCharRef ||
+ this == srIntRef ||
+ this == srLongRef ||
+ this == srFloatRef || this == srDoubleRef
+ }
+
+ /*
+ * Element vs. Component type of an array:
+ * Quoting from the JVMS, Sec. 2.4 "Reference Types and Values"
+ *
+ * An array type consists of a component type with a single dimension (whose
+ * length is not given by the type). The component type of an array type may itself be
+ * an array type. If, starting from any array type, one considers its component type,
+ * and then (if that is also an array type) the component type of that type, and so on,
+ * eventually one must reach a component type that is not an array type; this is called
+ * the element type of the array type. The element type of an array type is necessarily
+ * either a primitive type, or a class type, or an interface type.
+ *
+ */
+
+ /* The type of items this array holds.
+ *
+ * can-multi-thread
+ */
+ def getComponentType: BType = {
+ assert(isArray, s"Asked for the component type of a non-array type: $this")
+ BType.getType(off + 1)
+ }
+
+ // ------------------------------------------------------------------------
+ // Conversion to type descriptors
+ // ------------------------------------------------------------------------
+
+ /*
+ * @return the descriptor corresponding to this Java type.
+ *
+ * can-multi-thread
+ */
+ def getDescriptor: String = {
+ val buf = new StringBuffer()
+ getDescriptor(buf)
+ buf.toString()
+ }
+
+ /*
+ * Appends the descriptor corresponding to this Java type to the given string buffer.
+ *
+ * @param buf the string buffer to which the descriptor must be appended.
+ *
+ * can-multi-thread
+ */
+ private def getDescriptor(buf: StringBuffer) {
+ if (isPrimitiveOrVoid) {
+ // descriptor is in byte 3 of 'off' for primitive types (buf == null)
+ buf.append(((off & 0xFF000000) >>> 24).asInstanceOf[Char])
+ } else if (sort == BType.OBJECT) {
+ buf.append('L')
+ buf.append(chrs, off, len)
+ buf.append(';')
+ } else { // sort == ARRAY || sort == METHOD
+ buf.append(chrs, off, len)
+ }
+ }
+
+ // ------------------------------------------------------------------------
+ // Corresponding size and opcodes
+ // ------------------------------------------------------------------------
+
+ /*
+ * Returns the size of values of this type.
+ * This method must not be used for method types.
+ *
+ * @return the size of values of this type, i.e., 2 for <tt>long</tt> and
+ * <tt>double</tt>, 0 for <tt>void</tt> and 1 otherwise.
+ *
+ * can-multi-thread
+ */
+ def getSize: Int = {
+ // the size is in byte 0 of 'off' for primitive types (buf == null)
+ if (isPrimitiveOrVoid) (off & 0xFF) else 1
+ }
+
+ /*
+ * Returns a JVM instruction opcode adapted to this Java type. This method
+ * must not be used for method types.
+ *
+ * @param opcode a JVM instruction opcode. This opcode must be one of ILOAD,
+ * ISTORE, IALOAD, IASTORE, IADD, ISUB, IMUL, IDIV, IREM, INEG, ISHL,
+ * ISHR, IUSHR, IAND, IOR, IXOR and IRETURN.
+ * @return an opcode that is similar to the given opcode, but adapted to
+ * this Java type. For example, if this type is <tt>float</tt> and
+ * <tt>opcode</tt> is IRETURN, this method returns FRETURN.
+ *
+ * can-multi-thread
+ */
+ def getOpcode(opcode: Int): Int = {
+ import scala.tools.asm.Opcodes
+ if (opcode == Opcodes.IALOAD || opcode == Opcodes.IASTORE) {
+ // the offset for IALOAD or IASTORE is in byte 1 of 'off' for
+ // primitive types (buf == null)
+ opcode + (if (isPrimitiveOrVoid) (off & 0xFF00) >> 8 else 4)
+ } else {
+ // the offset for other instructions is in byte 2 of 'off' for
+ // primitive types (buf == null)
+ opcode + (if (isPrimitiveOrVoid) (off & 0xFF0000) >> 16 else 4)
+ }
+ }
+
+ // ------------------------------------------------------------------------
+ // Equals, hashCode and toString
+ // ------------------------------------------------------------------------
+
+ /*
+ * Tests if the given object is equal to this type.
+ *
+ * @param o the object to be compared to this type.
+ * @return <tt>true</tt> if the given object is equal to this type.
+ *
+ * can-multi-thread
+ */
+ override def equals(o: Any): Boolean = {
+ if (!(o.isInstanceOf[BType])) {
+ return false
+ }
+ val t = o.asInstanceOf[BType]
+ if (this eq t) {
+ return true
+ }
+ if (sort != t.sort) {
+ return false
+ }
+ if (sort >= BType.ARRAY) {
+ if (len != t.len) {
+ return false
+ }
+ // sort checked already
+ if (off == t.off) {
+ return true
+ }
+ var i = 0
+ while (i < len) {
+ if (chrs(off + i) != chrs(t.off + i)) {
+ return false
+ }
+ i += 1
+ }
+ // If we reach here, we could update the largest of (this.off, t.off) to match the other, so as to simplify future == comparisons.
+ // But that would require a var rather than val.
+ }
+ true
+ }
+
+ /*
+ * @return a hash code value for this type.
+ *
+ * can-multi-thread
+ */
+ override def hashCode(): Int = {
+ var hc = 13 * sort;
+ if (sort >= BType.ARRAY) {
+ var i = off
+ val end = i + len
+ while (i < end) {
+ hc = 17 * (hc + chrs(i))
+ i += 1
+ }
+ }
+ hc
+ }
+
+ /*
+ * @return the descriptor of this type.
+ *
+ * can-multi-thread
+ */
+ override def toString: String = { getDescriptor }
+
+ }
+
+ /*
+ * Creates a TypeName and the BType token for it.
+ * This method does not add to `innerClassBufferASM`, use `internalName()` or `asmType()` or `toTypeKind()` for that.
+ *
+ * must-single-thread
+ */
+ def brefType(iname: String): BType = { brefType(newTypeName(iname.toCharArray(), 0, iname.length())) }
+
+ /*
+ * Creates a BType token for the TypeName received as argument.
+ * This method does not add to `innerClassBufferASM`, use `internalName()` or `asmType()` or `toTypeKind()` for that.
+ *
+ * can-multi-thread
+ */
+ def brefType(iname: TypeName): BType = { BType.getObjectType(iname.start, iname.length) }
+
+ // due to keyboard economy only
+ val UNIT = BType.VOID_TYPE
+ val BOOL = BType.BOOLEAN_TYPE
+ val CHAR = BType.CHAR_TYPE
+ val BYTE = BType.BYTE_TYPE
+ val SHORT = BType.SHORT_TYPE
+ val INT = BType.INT_TYPE
+ val LONG = BType.LONG_TYPE
+ val FLOAT = BType.FLOAT_TYPE
+ val DOUBLE = BType.DOUBLE_TYPE
+
+ val BOXED_UNIT = brefType("java/lang/Void")
+ val BOXED_BOOLEAN = brefType("java/lang/Boolean")
+ val BOXED_BYTE = brefType("java/lang/Byte")
+ val BOXED_SHORT = brefType("java/lang/Short")
+ val BOXED_CHAR = brefType("java/lang/Character")
+ val BOXED_INT = brefType("java/lang/Integer")
+ val BOXED_LONG = brefType("java/lang/Long")
+ val BOXED_FLOAT = brefType("java/lang/Float")
+ val BOXED_DOUBLE = brefType("java/lang/Double")
+
+ /*
+ * RT_NOTHING and RT_NULL exist at run-time only.
+ * They are the bytecode-level manifestation (in method signatures only) of what shows up as NothingClass resp. NullClass in Scala ASTs.
+ * Therefore, when RT_NOTHING or RT_NULL are to be emitted,
+ * a mapping is needed: the internal names of NothingClass and NullClass can't be emitted as-is.
+ */
+ val RT_NOTHING = brefType("scala/runtime/Nothing$")
+ val RT_NULL = brefType("scala/runtime/Null$")
+ val CT_NOTHING = brefType("scala/Nothing") // TODO needed?
+ val CT_NULL = brefType("scala/Null") // TODO needed?
+
+ val srBooleanRef = brefType("scala/runtime/BooleanRef")
+ val srByteRef = brefType("scala/runtime/ByteRef")
+ val srCharRef = brefType("scala/runtime/CharRef")
+ val srIntRef = brefType("scala/runtime/IntRef")
+ val srLongRef = brefType("scala/runtime/LongRef")
+ val srFloatRef = brefType("scala/runtime/FloatRef")
+ val srDoubleRef = brefType("scala/runtime/DoubleRef")
+
+ /* Map from type kinds to the Java reference types.
+ * Useful when pushing class literals onto the operand stack (ldc instruction taking a class literal).
+ * @see Predef.classOf
+ * @see genConstant()
+ */
+ val classLiteral = immutable.Map[BType, BType](
+ UNIT -> BOXED_UNIT,
+ BOOL -> BOXED_BOOLEAN,
+ BYTE -> BOXED_BYTE,
+ SHORT -> BOXED_SHORT,
+ CHAR -> BOXED_CHAR,
+ INT -> BOXED_INT,
+ LONG -> BOXED_LONG,
+ FLOAT -> BOXED_FLOAT,
+ DOUBLE -> BOXED_DOUBLE
+ )
+
+ case class MethodNameAndType(mname: String, mdesc: String)
+
+ val asmBoxTo: Map[BType, MethodNameAndType] = {
+ Map(
+ BOOL -> MethodNameAndType("boxToBoolean", "(Z)Ljava/lang/Boolean;" ) ,
+ BYTE -> MethodNameAndType("boxToByte", "(B)Ljava/lang/Byte;" ) ,
+ CHAR -> MethodNameAndType("boxToCharacter", "(C)Ljava/lang/Character;") ,
+ SHORT -> MethodNameAndType("boxToShort", "(S)Ljava/lang/Short;" ) ,
+ INT -> MethodNameAndType("boxToInteger", "(I)Ljava/lang/Integer;" ) ,
+ LONG -> MethodNameAndType("boxToLong", "(J)Ljava/lang/Long;" ) ,
+ FLOAT -> MethodNameAndType("boxToFloat", "(F)Ljava/lang/Float;" ) ,
+ DOUBLE -> MethodNameAndType("boxToDouble", "(D)Ljava/lang/Double;" )
+ )
+ }
+
+ val asmUnboxTo: Map[BType, MethodNameAndType] = {
+ Map(
+ BOOL -> MethodNameAndType("unboxToBoolean", "(Ljava/lang/Object;)Z") ,
+ BYTE -> MethodNameAndType("unboxToByte", "(Ljava/lang/Object;)B") ,
+ CHAR -> MethodNameAndType("unboxToChar", "(Ljava/lang/Object;)C") ,
+ SHORT -> MethodNameAndType("unboxToShort", "(Ljava/lang/Object;)S") ,
+ INT -> MethodNameAndType("unboxToInt", "(Ljava/lang/Object;)I") ,
+ LONG -> MethodNameAndType("unboxToLong", "(Ljava/lang/Object;)J") ,
+ FLOAT -> MethodNameAndType("unboxToFloat", "(Ljava/lang/Object;)F") ,
+ DOUBLE -> MethodNameAndType("unboxToDouble", "(Ljava/lang/Object;)D")
+ )
+ }
+
+ /*
+ * can-multi-thread
+ */
+ def toBType(t: asm.Type): BType = {
+ (t.getSort: @switch) match {
+ case asm.Type.VOID => BType.VOID_TYPE
+ case asm.Type.BOOLEAN => BType.BOOLEAN_TYPE
+ case asm.Type.CHAR => BType.CHAR_TYPE
+ case asm.Type.BYTE => BType.BYTE_TYPE
+ case asm.Type.SHORT => BType.SHORT_TYPE
+ case asm.Type.INT => BType.INT_TYPE
+ case asm.Type.FLOAT => BType.FLOAT_TYPE
+ case asm.Type.LONG => BType.LONG_TYPE
+ case asm.Type.DOUBLE => BType.DOUBLE_TYPE
+ case asm.Type.ARRAY |
+ asm.Type.OBJECT |
+ asm.Type.METHOD =>
+ // TODO confirm whether this also takes care of the phantom types.
+ val key =
+ if (t.getSort == asm.Type.METHOD) t.getDescriptor
+ else t.getInternalName
+
+ val n = global.lookupTypeName(key.toCharArray)
+ new BType(t.getSort, n.start, n.length)
+ }
+ }
+
+ /*
+ * ASM trees represent types as strings (internal names, descriptors).
+ * Given that we operate instead on BTypes, conversion is needed when visiting MethodNodes outside GenBCode.
+ *
+ * can-multi-thread
+ */
+ def descrToBType(typeDescriptor: String): BType = {
+ val c: Char = typeDescriptor(0)
+ c match {
+ case 'V' => BType.VOID_TYPE
+ case 'Z' => BType.BOOLEAN_TYPE
+ case 'C' => BType.CHAR_TYPE
+ case 'B' => BType.BYTE_TYPE
+ case 'S' => BType.SHORT_TYPE
+ case 'I' => BType.INT_TYPE
+ case 'F' => BType.FLOAT_TYPE
+ case 'J' => BType.LONG_TYPE
+ case 'D' => BType.DOUBLE_TYPE
+ case 'L' =>
+ val iname = typeDescriptor.substring(1, typeDescriptor.length() - 1)
+ val n = global.lookupTypeName(iname.toCharArray)
+ new BType(asm.Type.OBJECT, n.start, n.length)
+ case _ =>
+ val n = global.lookupTypeName(typeDescriptor.toCharArray)
+ BType.getType(n.start)
+ }
+ }
+
+ /*
+ * Use only to lookup reference types, otherwise use `descrToBType()`
+ *
+ * can-multi-thread
+ */
+ def lookupRefBType(iname: String): BType = {
+ import global.chrs
+ val n = global.lookupTypeName(iname.toCharArray)
+ val sort = if (chrs(n.start) == '[') BType.ARRAY else BType.OBJECT;
+ new BType(sort, n.start, n.length)
+ }
+
+ def lookupRefBTypeIfExisting(iname: String): BType = {
+ import global.chrs
+ val n = global.lookupTypeNameIfExisting(iname.toCharArray, false)
+ if (n == null) { return null }
+ val sort = if (chrs(n.start) == '[') BType.ARRAY else BType.OBJECT;
+ new BType(sort, n.start, n.length)
+ }
+
+}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala
new file mode 100644
index 0000000000..62270b7c0a
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala
@@ -0,0 +1,1329 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2012 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+package scala
+package tools.nsc
+package backend.jvm
+
+import scala.tools.asm
+import scala.annotation.switch
+import scala.collection.{ immutable, mutable }
+import scala.tools.nsc.io.AbstractFile
+
+/*
+ * Traits encapsulating functionality to convert Scala AST Trees into ASM ClassNodes.
+ *
+ * @author Miguel Garcia, http://lamp.epfl.ch/~magarcia/ScalaCompilerCornerReloaded
+ * @version 1.0
+ *
+ */
+abstract class BCodeHelpers extends BCodeTypes with BytecodeWriters {
+
+ import global._
+
+ /*
+ * must-single-thread
+ */
+ def getFileForClassfile(base: AbstractFile, clsName: String, suffix: String): AbstractFile = {
+ getFile(base, clsName, suffix)
+ }
+
+ /*
+ * must-single-thread
+ */
+ def getOutFolder(csym: Symbol, cName: String, cunit: CompilationUnit): _root_.scala.tools.nsc.io.AbstractFile = {
+ try {
+ outputDirectory(csym)
+ } catch {
+ case ex: Throwable =>
+ cunit.error(cunit.body.pos, s"Couldn't create file for class $cName\n${ex.getMessage}")
+ null
+ }
+ }
+
+ var pickledBytes = 0 // statistics
+
+ // -----------------------------------------------------------------------------------------
+ // finding the least upper bound in agreement with the bytecode verifier (given two internal names handed by ASM)
+ // Background:
+ // http://gallium.inria.fr/~xleroy/publi/bytecode-verification-JAR.pdf
+ // http://comments.gmane.org/gmane.comp.java.vm.languages/2293
+ // https://issues.scala-lang.org/browse/SI-3872
+ // -----------------------------------------------------------------------------------------
+
+ /*
+ * can-multi-thread
+ */
+ def firstCommonSuffix(as: List[Tracked], bs: List[Tracked]): BType = {
+ var chainA = as
+ var chainB = bs
+ var fcs: Tracked = null
+ do {
+ if (chainB contains chainA.head) fcs = chainA.head
+ else if (chainA contains chainB.head) fcs = chainB.head
+ else {
+ chainA = chainA.tail
+ chainB = chainB.tail
+ }
+ } while (fcs == null)
+ fcs.c
+ }
+
+ /* An `asm.ClassWriter` that uses `jvmWiseLUB()`
+ * The internal name of the least common ancestor of the types given by inameA and inameB.
+ * It's what ASM needs to know in order to compute stack map frames, http://asm.ow2.org/doc/developer-guide.html#controlflow
+ */
+ final class CClassWriter(flags: Int) extends asm.ClassWriter(flags) {
+
+ /*
+ * This method is thread re-entrant because chrs never grows during its operation (that's because all TypeNames being looked up have already been entered).
+ * To stress this point, rather than using `newTypeName()` we use `lookupTypeName()`
+ *
+ * can-multi-thread
+ */
+ override def getCommonSuperClass(inameA: String, inameB: String): String = {
+ val a = brefType(lookupTypeName(inameA.toCharArray))
+ val b = brefType(lookupTypeName(inameB.toCharArray))
+ val lca = jvmWiseLUB(a, b)
+ val lcaName = lca.getInternalName // don't call javaName because that side-effects innerClassBuffer.
+ assert(lcaName != "scala/Any")
+
+ lcaName // ASM caches the answer during the lifetime of a ClassWriter. We outlive that. Not sure whether caching on our side would improve things.
+ }
+
+ }
+
+ /*
+ * Finding the least upper bound in agreement with the bytecode verifier (given two internal names handed out by ASM)
+ * Background:
+ * http://gallium.inria.fr/~xleroy/publi/bytecode-verification-JAR.pdf
+ * http://comments.gmane.org/gmane.comp.java.vm.languages/2293
+ * https://issues.scala-lang.org/browse/SI-3872
+ *
+ * can-multi-thread
+ */
+ def jvmWiseLUB(a: BType, b: BType): BType = {
+
+ assert(a.isNonSpecial, s"jvmWiseLUB() received a non-plain-class $a")
+ assert(b.isNonSpecial, s"jvmWiseLUB() received a non-plain-class $b")
+
+ val ta = exemplars.get(a)
+ val tb = exemplars.get(b)
+
+ val res = Pair(ta.isInterface, tb.isInterface) match {
+ case (true, true) =>
+ // exercised by test/files/run/t4761.scala
+ if (tb.isSubtypeOf(ta.c)) ta.c
+ else if (ta.isSubtypeOf(tb.c)) tb.c
+ else ObjectReference
+ case (true, false) =>
+ if (tb.isSubtypeOf(a)) a else ObjectReference
+ case (false, true) =>
+ if (ta.isSubtypeOf(b)) b else ObjectReference
+ case _ =>
+ firstCommonSuffix(ta :: ta.superClasses, tb :: tb.superClasses)
+ }
+ assert(res.isNonSpecial, "jvmWiseLUB() returned a non-plain-class.")
+ res
+ }
+
+ /*
+ * must-single-thread
+ */
+ object isJavaEntryPoint {
+
+ /*
+ * must-single-thread
+ */
+ def apply(sym: Symbol, csymCompUnit: CompilationUnit): Boolean = {
+ def fail(msg: String, pos: Position = sym.pos) = {
+ csymCompUnit.warning(sym.pos,
+ sym.name +
+ s" has a main method with parameter type Array[String], but ${sym.fullName('.')} will not be a runnable program.\n Reason: $msg"
+ // TODO: make this next claim true, if possible
+ // by generating valid main methods as static in module classes
+ // not sure what the jvm allows here
+ // + " You can still run the program by calling it as " + sym.javaSimpleName + " instead."
+ )
+ false
+ }
+ def failNoForwarder(msg: String) = {
+ fail(s"$msg, which means no static forwarder can be generated.\n")
+ }
+ val possibles = if (sym.hasModuleFlag) (sym.tpe nonPrivateMember nme.main).alternatives else Nil
+ val hasApproximate = possibles exists { m =>
+ m.info match {
+ case MethodType(p :: Nil, _) => p.tpe.typeSymbol == definitions.ArrayClass
+ case _ => false
+ }
+ }
+ // At this point it's a module with a main-looking method, so either succeed or warn that it isn't.
+ hasApproximate && {
+ // Before erasure so we can identify generic mains.
+ enteringErasure {
+ val companion = sym.linkedClassOfClass
+
+ if (definitions.hasJavaMainMethod(companion))
+ failNoForwarder("companion contains its own main method")
+ else if (companion.tpe.member(nme.main) != NoSymbol)
+ // this is only because forwarders aren't smart enough yet
+ failNoForwarder("companion contains its own main method (implementation restriction: no main is allowed, regardless of signature)")
+ else if (companion.isTrait)
+ failNoForwarder("companion is a trait")
+ // Now either succeeed, or issue some additional warnings for things which look like
+ // attempts to be java main methods.
+ else (possibles exists definitions.isJavaMainMethod) || {
+ possibles exists { m =>
+ m.info match {
+ case PolyType(_, _) =>
+ fail("main methods cannot be generic.")
+ case MethodType(params, res) =>
+ if (res.typeSymbol :: params exists (_.isAbstractType))
+ fail("main methods cannot refer to type parameters or abstract types.", m.pos)
+ else
+ definitions.isJavaMainMethod(m) || fail("main method must have exact signature (Array[String])Unit", m.pos)
+ case tp =>
+ fail(s"don't know what this is: $tp", m.pos)
+ }
+ }
+ }
+ }
+ }
+ }
+
+ }
+
+ /*
+ * must-single-thread
+ */
+ def initBytecodeWriter(entryPoints: List[Symbol]): BytecodeWriter = {
+ settings.outputDirs.getSingleOutput match {
+ case Some(f) if f hasExtension "jar" =>
+ // If no main class was specified, see if there's only one
+ // entry point among the classes going into the jar.
+ if (settings.mainClass.isDefault) {
+ entryPoints map (_.fullName('.')) match {
+ case Nil =>
+ log("No Main-Class designated or discovered.")
+ case name :: Nil =>
+ log(s"Unique entry point: setting Main-Class to $name")
+ settings.mainClass.value = name
+ case names =>
+ log(s"No Main-Class due to multiple entry points:\n ${names.mkString("\n ")}")
+ }
+ }
+ else log(s"Main-Class was specified: ${settings.mainClass.value}")
+
+ new DirectToJarfileWriter(f.file)
+
+ case _ => factoryNonJarBytecodeWriter()
+ }
+ }
+
+ /*
+ * must-single-thread
+ */
+ def fieldSymbols(cls: Symbol): List[Symbol] = {
+ for (f <- cls.info.decls.toList ;
+ if !f.isMethod && f.isTerm && !f.isModule
+ ) yield f;
+ }
+
+ /*
+ * can-multi-thread
+ */
+ def methodSymbols(cd: ClassDef): List[Symbol] = {
+ cd.impl.body collect { case dd: DefDef => dd.symbol }
+ }
+
+ /*
+ * Populates the InnerClasses JVM attribute with `refedInnerClasses`.
+ * In addition to inner classes mentioned somewhere in `jclass` (where `jclass` is a class file being emitted)
+ * `refedInnerClasses` should contain those inner classes defined as direct member classes of `jclass`
+ * but otherwise not mentioned in `jclass`.
+ *
+ * `refedInnerClasses` may contain duplicates,
+ * need not contain the enclosing inner classes of each inner class it lists (those are looked up for consistency).
+ *
+ * This method serializes in the InnerClasses JVM attribute in an appropriate order,
+ * not necessarily that given by `refedInnerClasses`.
+ *
+ * can-multi-thread
+ */
+ final def addInnerClassesASM(jclass: asm.ClassVisitor, refedInnerClasses: Iterable[BType]) {
+ // used to detect duplicates.
+ val seen = mutable.Map.empty[String, String]
+ // result without duplicates, not yet sorted.
+ val result = mutable.Set.empty[InnerClassEntry]
+
+ for(s: BType <- refedInnerClasses;
+ e: InnerClassEntry <- exemplars.get(s).innersChain) {
+
+ assert(e.name != null, "saveInnerClassesFor() is broken.") // documentation
+ val doAdd = seen.get(e.name) match {
+ // TODO is it ok for prevOName to be null? (Someone should really document the invariants of the InnerClasses bytecode attribute)
+ case Some(prevOName) =>
+ // this occurs e.g. when innerClassBuffer contains both class Thread$State, object Thread$State,
+ // i.e. for them it must be the case that oname == java/lang/Thread
+ assert(prevOName == e.outerName, "duplicate")
+ false
+ case None => true
+ }
+
+ if (doAdd) {
+ seen += (e.name -> e.outerName)
+ result += e
+ }
+
+ }
+ // sorting ensures inner classes are listed after their enclosing class thus satisfying the Eclipse Java compiler
+ for(e <- result.toList sortBy (_.name.toString)) {
+ jclass.visitInnerClass(e.name, e.outerName, e.innerName, e.access)
+ }
+
+ } // end of method addInnerClassesASM()
+
+ /*
+ * Custom attribute (JVMS 4.7.1) "ScalaSig" used as marker only
+ * i.e., the pickle is contained in a custom annotation, see:
+ * (1) `addAnnotations()`,
+ * (2) SID # 10 (draft) - Storage of pickled Scala signatures in class files, http://www.scala-lang.org/sid/10
+ * (3) SID # 5 - Internals of Scala Annotations, http://www.scala-lang.org/sid/5
+ * That annotation in turn is not related to the "java-generic-signature" (JVMS 4.7.9)
+ * other than both ending up encoded as attributes (JVMS 4.7)
+ * (with the caveat that the "ScalaSig" attribute is associated to some classes,
+ * while the "Signature" attribute can be associated to classes, methods, and fields.)
+ *
+ */
+ trait BCPickles {
+
+ import scala.reflect.internal.pickling.{ PickleFormat, PickleBuffer }
+
+ val versionPickle = {
+ val vp = new PickleBuffer(new Array[Byte](16), -1, 0)
+ assert(vp.writeIndex == 0, vp)
+ vp writeNat PickleFormat.MajorVersion
+ vp writeNat PickleFormat.MinorVersion
+ vp writeNat 0
+ vp
+ }
+
+ /*
+ * can-multi-thread
+ */
+ def createJAttribute(name: String, b: Array[Byte], offset: Int, len: Int): asm.Attribute = {
+ val dest = new Array[Byte](len);
+ System.arraycopy(b, offset, dest, 0, len);
+ new asm.CustomAttr(name, dest)
+ }
+
+ /*
+ * can-multi-thread
+ */
+ def pickleMarkerLocal = {
+ createJAttribute(tpnme.ScalaSignatureATTR.toString, versionPickle.bytes, 0, versionPickle.writeIndex)
+ }
+
+ /*
+ * can-multi-thread
+ */
+ def pickleMarkerForeign = {
+ createJAttribute(tpnme.ScalaATTR.toString, new Array[Byte](0), 0, 0)
+ }
+
+ /* Returns a ScalaSignature annotation if it must be added to this class, none otherwise.
+ * This annotation must be added to the class' annotations list when generating them.
+ *
+ * Depending on whether the returned option is defined, it adds to `jclass` one of:
+ * (a) the ScalaSig marker attribute
+ * (indicating that a scala-signature-annotation aka pickle is present in this class); or
+ * (b) the Scala marker attribute
+ * (indicating that a scala-signature-annotation aka pickle is to be found in another file).
+ *
+ *
+ * @param jclassName The class file that is being readied.
+ * @param sym The symbol for which the signature has been entered in the symData map.
+ * This is different than the symbol
+ * that is being generated in the case of a mirror class.
+ * @return An option that is:
+ * - defined and contains an AnnotationInfo of the ScalaSignature type,
+ * instantiated with the pickle signature for sym.
+ * - empty if the jclass/sym pair must not contain a pickle.
+ *
+ * must-single-thread
+ */
+ def getAnnotPickle(jclassName: String, sym: Symbol): Option[AnnotationInfo] = {
+ currentRun.symData get sym match {
+ case Some(pickle) if !nme.isModuleName(newTermName(jclassName)) =>
+ val scalaAnnot = {
+ val sigBytes = ScalaSigBytes(pickle.bytes.take(pickle.writeIndex))
+ AnnotationInfo(sigBytes.sigAnnot, Nil, (nme.bytes, sigBytes) :: Nil)
+ }
+ pickledBytes += pickle.writeIndex
+ currentRun.symData -= sym
+ currentRun.symData -= sym.companionSymbol
+ Some(scalaAnnot)
+ case _ =>
+ None
+ }
+ }
+
+ } // end of trait BCPickles
+
+ trait BCInnerClassGen {
+
+ def debugLevel = settings.debuginfo.indexOfChoice
+
+ val emitSource = debugLevel >= 1
+ val emitLines = debugLevel >= 2
+ val emitVars = debugLevel >= 3
+
+ /*
+ * Contains class-symbols that:
+ * (a) are known to denote inner classes
+ * (b) are mentioned somewhere in the class being generated.
+ *
+ * In other words, the lifetime of `innerClassBufferASM` is associated to "the class being generated".
+ */
+ val innerClassBufferASM = mutable.Set.empty[BType]
+
+ /*
+ * Tracks (if needed) the inner class given by `sym`.
+ *
+ * must-single-thread
+ */
+ final def internalName(sym: Symbol): String = { asmClassType(sym).getInternalName }
+
+ /*
+ * Tracks (if needed) the inner class given by `sym`.
+ *
+ * must-single-thread
+ */
+ final def asmClassType(sym: Symbol): BType = {
+ assert(
+ hasInternalName(sym),
+ {
+ val msg0 = if (sym.isAbstractType) "An AbstractTypeSymbol (SI-7122) " else "A symbol ";
+ msg0 + s"has reached the bytecode emitter, for which no JVM-level internal name can be found: ${sym.fullName}"
+ }
+ )
+ val phantOpt = phantomTypeMap.get(sym)
+ if (phantOpt.isDefined) {
+ return phantOpt.get
+ }
+ val tracked = exemplar(sym)
+ val tk = tracked.c
+ if (tracked.isInnerClass) {
+ innerClassBufferASM += tk
+ }
+
+ tk
+ }
+
+ /*
+ * Returns the BType for the given type.
+ * Tracks (if needed) the inner class given by `t`.
+ *
+ * must-single-thread
+ */
+ final def toTypeKind(t: Type): BType = {
+
+ /* Interfaces have to be handled delicately to avoid introducing spurious errors,
+ * but if we treat them all as AnyRef we lose too much information.
+ */
+ def newReference(sym0: Symbol): BType = {
+ assert(!primitiveTypeMap.contains(sym0), "Use primitiveTypeMap instead.")
+ assert(sym0 != definitions.ArrayClass, "Use arrayOf() instead.")
+
+ if (sym0 == definitions.NullClass) return RT_NULL;
+ if (sym0 == definitions.NothingClass) return RT_NOTHING;
+
+ // Working around SI-5604. Rather than failing the compile when we see
+ // a package here, check if there's a package object.
+ val sym = (
+ if (!sym0.isPackageClass) sym0
+ else sym0.info.member(nme.PACKAGE) match {
+ case NoSymbol => abort(s"Cannot use package as value: ${sym0.fullName}")
+ case s => devWarning("Bug: found package class where package object expected. Converting.") ; s.moduleClass
+ }
+ )
+
+ // Can't call .toInterface (at this phase) or we trip an assertion.
+ // See PackratParser#grow for a method which fails with an apparent mismatch
+ // between "object PackratParsers$class" and "trait PackratParsers"
+ if (sym.isImplClass) {
+ // pos/spec-List.scala is the sole failure if we don't check for NoSymbol
+ val traitSym = sym.owner.info.decl(tpnme.interfaceName(sym.name))
+ if (traitSym != NoSymbol) {
+ // this tracks the inner class in innerClassBufferASM, if needed.
+ return asmClassType(traitSym)
+ }
+ }
+
+ assert(hasInternalName(sym), s"Invoked for a symbol lacking JVM internal name: ${sym.fullName}")
+ assert(!phantomTypeMap.contains(sym), "phantom types not supposed to reach here.")
+
+ val tracked = exemplar(sym)
+ val tk = tracked.c
+ if (tracked.isInnerClass) {
+ innerClassBufferASM += tk
+ }
+
+ tk
+ }
+
+ def primitiveOrRefType(sym: Symbol): BType = {
+ assert(sym != definitions.ArrayClass, "Use primitiveOrArrayOrRefType() instead.")
+
+ primitiveTypeMap.getOrElse(sym, newReference(sym))
+ }
+
+ def primitiveOrRefType2(sym: Symbol): BType = {
+ primitiveTypeMap.get(sym) match {
+ case Some(pt) => pt
+ case None =>
+ sym match {
+ case definitions.NullClass => RT_NULL
+ case definitions.NothingClass => RT_NOTHING
+ case _ if sym.isClass => newReference(sym)
+ case _ =>
+ assert(sym.isType, sym) // it must be compiling Array[a]
+ ObjectReference
+ }
+ }
+ }
+
+ import definitions.ArrayClass
+
+ // Call to .normalize fixes #3003 (follow type aliases). Otherwise, primitiveOrArrayOrRefType() would return ObjectReference.
+ t.normalize match {
+
+ case ThisType(sym) =>
+ if (sym == ArrayClass) ObjectReference
+ else phantomTypeMap.getOrElse(sym, exemplar(sym).c)
+
+ case SingleType(_, sym) => primitiveOrRefType(sym)
+
+ case _: ConstantType => toTypeKind(t.underlying)
+
+ case TypeRef(_, sym, args) =>
+ if (sym == ArrayClass) arrayOf(toTypeKind(args.head))
+ else primitiveOrRefType2(sym)
+
+ case ClassInfoType(_, _, sym) =>
+ assert(sym != ArrayClass, "ClassInfoType to ArrayClass!")
+ primitiveOrRefType(sym)
+
+ // !!! Iulian says types which make no sense after erasure should not reach here, which includes the ExistentialType, AnnotatedType, RefinedType.
+ case ExistentialType(_, t) => toTypeKind(t) // TODO shouldn't get here but the following does: akka-actor/src/main/scala/akka/util/WildcardTree.scala
+ case AnnotatedType(_, w, _) => toTypeKind(w) // TODO test/files/jvm/annotations.scala causes an AnnotatedType to reach here.
+ case RefinedType(parents, _) => parents map toTypeKind reduceLeft jvmWiseLUB
+
+ // For sure WildcardTypes shouldn't reach here either, but when debugging such situations this may come in handy.
+ // case WildcardType => REFERENCE(ObjectClass)
+ case norm => abort(
+ s"Unknown type: $t, $norm [${t.getClass}, ${norm.getClass}] TypeRef? ${t.isInstanceOf[TypeRef]}"
+ )
+ }
+
+ } // end of method toTypeKind()
+
+ /*
+ * must-single-thread
+ */
+ def asmMethodType(msym: Symbol): BType = {
+ assert(msym.isMethod, s"not a method-symbol: $msym")
+ val resT: BType =
+ if (msym.isClassConstructor || msym.isConstructor) BType.VOID_TYPE
+ else toTypeKind(msym.tpe.resultType);
+ BType.getMethodType( resT, mkArray(msym.tpe.paramTypes map toTypeKind) )
+ }
+
+ /*
+ * Returns all direct member inner classes of `csym`,
+ * thus making sure they get entries in the InnerClasses JVM attribute
+ * even if otherwise not mentioned in the class being built.
+ *
+ * must-single-thread
+ */
+ final def trackMemberClasses(csym: Symbol, lateClosuresBTs: List[BType]): List[BType] = {
+ val lateInnerClasses = exitingErasure {
+ for (sym <- List(csym, csym.linkedClassOfClass); memberc <- sym.info.decls.map(innerClassSymbolFor) if memberc.isClass)
+ yield memberc
+ }
+ // as a precaution, do the following outside the above `exitingErasure` otherwise funny internal names might be computed.
+ val result = for(memberc <- lateInnerClasses) yield {
+ val tracked = exemplar(memberc)
+ val memberCTK = tracked.c
+ assert(tracked.isInnerClass, s"saveInnerClassesFor() says this was no inner-class after all: ${memberc.fullName}")
+
+ memberCTK
+ }
+
+ exemplar(csym).directMemberClasses = (result ::: lateClosuresBTs)
+
+ result
+ }
+
+ /*
+ * Tracks (if needed) the inner class given by `t`.
+ *
+ * must-single-thread
+ */
+ final def descriptor(t: Type): String = { toTypeKind(t).getDescriptor }
+
+ /*
+ * Tracks (if needed) the inner class given by `sym`.
+ *
+ * must-single-thread
+ */
+ final def descriptor(sym: Symbol): String = { asmClassType(sym).getDescriptor }
+
+ } // end of trait BCInnerClassGen
+
+ trait BCAnnotGen extends BCInnerClassGen {
+
+ /*
+ * can-multi-thread
+ */
+ def ubytesToCharArray(bytes: Array[Byte]): Array[Char] = {
+ val ca = new Array[Char](bytes.length)
+ var idx = 0
+ while (idx < bytes.length) {
+ val b: Byte = bytes(idx)
+ assert((b & ~0x7f) == 0)
+ ca(idx) = b.asInstanceOf[Char]
+ idx += 1
+ }
+
+ ca
+ }
+
+ /*
+ * can-multi-thread
+ */
+ private def arrEncode(sb: ScalaSigBytes): Array[String] = {
+ var strs: List[String] = Nil
+ val bSeven: Array[Byte] = sb.sevenBitsMayBeZero
+ // chop into slices of at most 65535 bytes, counting 0x00 as taking two bytes (as per JVMS 4.4.7 The CONSTANT_Utf8_info Structure)
+ var prevOffset = 0
+ var offset = 0
+ var encLength = 0
+ while (offset < bSeven.size) {
+ val deltaEncLength = (if (bSeven(offset) == 0) 2 else 1)
+ val newEncLength = encLength.toLong + deltaEncLength
+ if (newEncLength >= 65535) {
+ val ba = bSeven.slice(prevOffset, offset)
+ strs ::= new java.lang.String(ubytesToCharArray(ba))
+ encLength = 0
+ prevOffset = offset
+ } else {
+ encLength += deltaEncLength
+ offset += 1
+ }
+ }
+ if (prevOffset < offset) {
+ assert(offset == bSeven.length)
+ val ba = bSeven.slice(prevOffset, offset)
+ strs ::= new java.lang.String(ubytesToCharArray(ba))
+ }
+ assert(strs.size > 1, "encode instead as one String via strEncode()") // TODO too strict?
+ mkArrayReverse(strs)
+ }
+
+ /*
+ * can-multi-thread
+ */
+ private def strEncode(sb: ScalaSigBytes): String = {
+ val ca = ubytesToCharArray(sb.sevenBitsMayBeZero)
+ new java.lang.String(ca)
+ // debug val bvA = new asm.ByteVector; bvA.putUTF8(s)
+ // debug val enc: Array[Byte] = scala.reflect.internal.pickling.ByteCodecs.encode(bytes)
+ // debug assert(enc(idx) == bvA.getByte(idx + 2))
+ // debug assert(bvA.getLength == enc.size + 2)
+ }
+
+ /*
+ * For arg a LiteralAnnotArg(constt) with const.tag in {ClazzTag, EnumTag}
+ * as well as for arg a NestedAnnotArg
+ * must-single-thread
+ * Otherwise it's safe to call from multiple threads.
+ */
+ def emitArgument(av: asm.AnnotationVisitor,
+ name: String,
+ arg: ClassfileAnnotArg) {
+ arg match {
+
+ case LiteralAnnotArg(const) =>
+ if (const.isNonUnitAnyVal) { av.visit(name, const.value) }
+ else {
+ const.tag match {
+ case StringTag =>
+ assert(const.value != null, const) // TODO this invariant isn't documented in `case class Constant`
+ av.visit(name, const.stringValue) // `stringValue` special-cases null, but that execution path isn't exercised for a const with StringTag
+ case ClazzTag => av.visit(name, toTypeKind(const.typeValue).toASMType)
+ case EnumTag =>
+ val edesc = descriptor(const.tpe) // the class descriptor of the enumeration class.
+ val evalue = const.symbolValue.name.toString // value the actual enumeration value.
+ av.visitEnum(name, edesc, evalue)
+ }
+ }
+
+ case sb @ ScalaSigBytes(bytes) =>
+ // see http://www.scala-lang.org/sid/10 (Storage of pickled Scala signatures in class files)
+ // also JVMS Sec. 4.7.16.1 The element_value structure and JVMS Sec. 4.4.7 The CONSTANT_Utf8_info Structure.
+ if (sb.fitsInOneString) {
+ av.visit(name, strEncode(sb))
+ } else {
+ val arrAnnotV: asm.AnnotationVisitor = av.visitArray(name)
+ for(arg <- arrEncode(sb)) { arrAnnotV.visit(name, arg) }
+ arrAnnotV.visitEnd()
+ } // for the lazy val in ScalaSigBytes to be GC'ed, the invoker of emitAnnotations() should hold the ScalaSigBytes in a method-local var that doesn't escape.
+
+ case ArrayAnnotArg(args) =>
+ val arrAnnotV: asm.AnnotationVisitor = av.visitArray(name)
+ for(arg <- args) { emitArgument(arrAnnotV, null, arg) }
+ arrAnnotV.visitEnd()
+
+ case NestedAnnotArg(annInfo) =>
+ val AnnotationInfo(typ, args, assocs) = annInfo
+ assert(args.isEmpty, args)
+ val desc = descriptor(typ) // the class descriptor of the nested annotation class
+ val nestedVisitor = av.visitAnnotation(name, desc)
+ emitAssocs(nestedVisitor, assocs)
+ }
+ }
+
+ /* Whether an annotation should be emitted as a Java annotation
+ * .initialize: if 'annot' is read from pickle, atp might be un-initialized
+ *
+ * must-single-thread
+ */
+ private def shouldEmitAnnotation(annot: AnnotationInfo) =
+ annot.symbol.initialize.isJavaDefined &&
+ annot.matches(definitions.ClassfileAnnotationClass) &&
+ annot.args.isEmpty &&
+ !annot.matches(definitions.DeprecatedAttr)
+
+ /*
+ * In general,
+ * must-single-thread
+ * but not necessarily always.
+ */
+ def emitAssocs(av: asm.AnnotationVisitor, assocs: List[(Name, ClassfileAnnotArg)]) {
+ for ((name, value) <- assocs) {
+ emitArgument(av, name.toString(), value)
+ }
+ av.visitEnd()
+ }
+
+ /*
+ * must-single-thread
+ */
+ def emitAnnotations(cw: asm.ClassVisitor, annotations: List[AnnotationInfo]) {
+ for(annot <- annotations; if shouldEmitAnnotation(annot)) {
+ val AnnotationInfo(typ, args, assocs) = annot
+ assert(args.isEmpty, args)
+ val av = cw.visitAnnotation(descriptor(typ), true)
+ emitAssocs(av, assocs)
+ }
+ }
+
+ /*
+ * must-single-thread
+ */
+ def emitAnnotations(mw: asm.MethodVisitor, annotations: List[AnnotationInfo]) {
+ for(annot <- annotations; if shouldEmitAnnotation(annot)) {
+ val AnnotationInfo(typ, args, assocs) = annot
+ assert(args.isEmpty, args)
+ val av = mw.visitAnnotation(descriptor(typ), true)
+ emitAssocs(av, assocs)
+ }
+ }
+
+ /*
+ * must-single-thread
+ */
+ def emitAnnotations(fw: asm.FieldVisitor, annotations: List[AnnotationInfo]) {
+ for(annot <- annotations; if shouldEmitAnnotation(annot)) {
+ val AnnotationInfo(typ, args, assocs) = annot
+ assert(args.isEmpty, args)
+ val av = fw.visitAnnotation(descriptor(typ), true)
+ emitAssocs(av, assocs)
+ }
+ }
+
+ /*
+ * must-single-thread
+ */
+ def emitParamAnnotations(jmethod: asm.MethodVisitor, pannotss: List[List[AnnotationInfo]]) {
+ val annotationss = pannotss map (_ filter shouldEmitAnnotation)
+ if (annotationss forall (_.isEmpty)) return
+ for (Pair(annots, idx) <- annotationss.zipWithIndex;
+ annot <- annots) {
+ val AnnotationInfo(typ, args, assocs) = annot
+ assert(args.isEmpty, args)
+ val pannVisitor: asm.AnnotationVisitor = jmethod.visitParameterAnnotation(idx, descriptor(typ), true)
+ emitAssocs(pannVisitor, assocs)
+ }
+ }
+
+ } // end of trait BCAnnotGen
+
+ trait BCJGenSigGen {
+
+ // @M don't generate java generics sigs for (members of) implementation
+ // classes, as they are monomorphic (TODO: ok?)
+ /*
+ * must-single-thread
+ */
+ private def needsGenericSignature(sym: Symbol) = !(
+ // PP: This condition used to include sym.hasExpandedName, but this leads
+ // to the total loss of generic information if a private member is
+ // accessed from a closure: both the field and the accessor were generated
+ // without it. This is particularly bad because the availability of
+ // generic information could disappear as a consequence of a seemingly
+ // unrelated change.
+ settings.Ynogenericsig
+ || sym.isArtifact
+ || sym.isLiftedMethod
+ || sym.isBridge
+ || (sym.ownerChain exists (_.isImplClass))
+ )
+
+ def getCurrentCUnit(): CompilationUnit
+
+ /* @return
+ * - `null` if no Java signature is to be added (`null` is what ASM expects in these cases).
+ * - otherwise the signature in question
+ *
+ * must-single-thread
+ */
+ def getGenericSignature(sym: Symbol, owner: Symbol): String = {
+
+ if (!needsGenericSignature(sym)) { return null }
+
+ val memberTpe = enteringErasure(owner.thisType.memberInfo(sym))
+
+ val jsOpt: Option[String] = erasure.javaSig(sym, memberTpe)
+ if (jsOpt.isEmpty) { return null }
+
+ val sig = jsOpt.get
+ log(sig) // This seems useful enough in the general case.
+
+ def wrap(op: => Unit) = {
+ try { op; true }
+ catch { case _: Throwable => false }
+ }
+
+ if (settings.Xverify) {
+ // Run the signature parser to catch bogus signatures.
+ val isValidSignature = wrap {
+ // Alternative: scala.tools.reflect.SigParser (frontend to sun.reflect.generics.parser.SignatureParser)
+ import scala.tools.asm.util.CheckClassAdapter
+ if (sym.isMethod) { CheckClassAdapter checkMethodSignature sig }
+ else if (sym.isTerm) { CheckClassAdapter checkFieldSignature sig }
+ else { CheckClassAdapter checkClassSignature sig }
+ }
+
+ if (!isValidSignature) {
+ getCurrentCUnit().warning(sym.pos,
+ """|compiler bug: created invalid generic signature for %s in %s
+ |signature: %s
+ |if this is reproducible, please report bug at https://issues.scala-lang.org/
+ """.trim.stripMargin.format(sym, sym.owner.skipPackageObject.fullName, sig))
+ return null
+ }
+ }
+
+ if ((settings.check containsName phaseName)) {
+ val normalizedTpe = enteringErasure(erasure.prepareSigMap(memberTpe))
+ val bytecodeTpe = owner.thisType.memberInfo(sym)
+ if (!sym.isType && !sym.isConstructor && !(erasure.erasure(sym)(normalizedTpe) =:= bytecodeTpe)) {
+ getCurrentCUnit().warning(sym.pos,
+ """|compiler bug: created generic signature for %s in %s that does not conform to its erasure
+ |signature: %s
+ |original type: %s
+ |normalized type: %s
+ |erasure type: %s
+ |if this is reproducible, please report bug at http://issues.scala-lang.org/
+ """.trim.stripMargin.format(sym, sym.owner.skipPackageObject.fullName, sig, memberTpe, normalizedTpe, bytecodeTpe))
+ return null
+ }
+ }
+
+ sig
+ }
+
+ } // end of trait BCJGenSigGen
+
+ trait BCForwardersGen extends BCAnnotGen with BCJGenSigGen {
+
+ // -----------------------------------------------------------------------------------------
+ // Static forwarders (related to mirror classes but also present in
+ // a plain class lacking companion module, for details see `isCandidateForForwarders`).
+ // -----------------------------------------------------------------------------------------
+
+ val ExcludedForwarderFlags = {
+ import symtab.Flags._
+ // Should include DEFERRED but this breaks findMember.
+ ( CASE | SPECIALIZED | LIFTED | PROTECTED | STATIC | EXPANDEDNAME | BridgeAndPrivateFlags | MACRO )
+ }
+
+ /* Adds a @remote annotation, actual use unknown.
+ *
+ * Invoked from genMethod() and addForwarder().
+ *
+ * must-single-thread
+ */
+ def addRemoteExceptionAnnot(isRemoteClass: Boolean, isJMethodPublic: Boolean, meth: Symbol) {
+ val needsAnnotation = (
+ ( isRemoteClass ||
+ isRemote(meth) && isJMethodPublic
+ ) && !(meth.throwsAnnotations contains definitions.RemoteExceptionClass)
+ )
+ if (needsAnnotation) {
+ val c = Constant(definitions.RemoteExceptionClass.tpe)
+ val arg = Literal(c) setType c.tpe
+ meth.addAnnotation(appliedType(definitions.ThrowsClass, c.tpe), arg)
+ }
+ }
+
+ /* Add a forwarder for method m. Used only from addForwarders().
+ *
+ * must-single-thread
+ */
+ private def addForwarder(isRemoteClass: Boolean, jclass: asm.ClassVisitor, module: Symbol, m: Symbol) {
+ val moduleName = internalName(module)
+ val methodInfo = module.thisType.memberInfo(m)
+ val paramJavaTypes: List[BType] = methodInfo.paramTypes map toTypeKind
+ // val paramNames = 0 until paramJavaTypes.length map ("x_" + _)
+
+ /* Forwarders must not be marked final,
+ * as the JVM will not allow redefinition of a final static method,
+ * and we don't know what classes might be subclassing the companion class. See SI-4827.
+ */
+ // TODO: evaluate the other flags we might be dropping on the floor here.
+ // TODO: ACC_SYNTHETIC ?
+ val flags = PublicStatic | (
+ if (m.isVarargsMethod) asm.Opcodes.ACC_VARARGS else 0
+ )
+
+ // TODO needed? for(ann <- m.annotations) { ann.symbol.initialize }
+ val jgensig = if (m.isDeferred) null else getGenericSignature(m, module); // only add generic signature if method concrete; bug #1745
+ addRemoteExceptionAnnot(isRemoteClass, hasPublicBitSet(flags), m)
+ val (throws, others) = m.annotations partition (_.symbol == definitions.ThrowsClass)
+ val thrownExceptions: List[String] = getExceptions(throws)
+
+ val jReturnType = toTypeKind(methodInfo.resultType)
+ val mdesc = BType.getMethodType(jReturnType, mkArray(paramJavaTypes)).getDescriptor
+ val mirrorMethodName = m.javaSimpleName.toString
+ val mirrorMethod: asm.MethodVisitor = jclass.visitMethod(
+ flags,
+ mirrorMethodName,
+ mdesc,
+ jgensig,
+ mkArray(thrownExceptions)
+ )
+
+ emitAnnotations(mirrorMethod, others)
+ emitParamAnnotations(mirrorMethod, m.info.params.map(_.annotations))
+
+ mirrorMethod.visitCode()
+
+ mirrorMethod.visitFieldInsn(asm.Opcodes.GETSTATIC, moduleName, strMODULE_INSTANCE_FIELD, descriptor(module))
+
+ var index = 0
+ for(jparamType <- paramJavaTypes) {
+ mirrorMethod.visitVarInsn(jparamType.getOpcode(asm.Opcodes.ILOAD), index)
+ assert(jparamType.sort != BType.METHOD, jparamType)
+ index += jparamType.getSize
+ }
+
+ mirrorMethod.visitMethodInsn(asm.Opcodes.INVOKEVIRTUAL, moduleName, mirrorMethodName, asmMethodType(m).getDescriptor)
+ mirrorMethod.visitInsn(jReturnType.getOpcode(asm.Opcodes.IRETURN))
+
+ mirrorMethod.visitMaxs(0, 0) // just to follow protocol, dummy arguments
+ mirrorMethod.visitEnd()
+
+ }
+
+ /* Add forwarders for all methods defined in `module` that don't conflict
+ * with methods in the companion class of `module`. A conflict arises when
+ * a method with the same name is defined both in a class and its companion object:
+ * method signature is not taken into account.
+ *
+ * must-single-thread
+ */
+ def addForwarders(isRemoteClass: Boolean, jclass: asm.ClassVisitor, jclassName: String, moduleClass: Symbol) {
+ assert(moduleClass.isModuleClass, moduleClass)
+ debuglog(s"Dumping mirror class for object: $moduleClass")
+
+ val linkedClass = moduleClass.companionClass
+ lazy val conflictingNames: Set[Name] = {
+ (linkedClass.info.members collect { case sym if sym.name.isTermName => sym.name }).toSet
+ }
+ debuglog(s"Potentially conflicting names for forwarders: $conflictingNames")
+
+ for (m <- moduleClass.info.membersBasedOnFlags(ExcludedForwarderFlags, symtab.Flags.METHOD)) {
+ if (m.isType || m.isDeferred || (m.owner eq definitions.ObjectClass) || m.isConstructor)
+ debuglog(s"No forwarder for '$m' from $jclassName to '$moduleClass'")
+ else if (conflictingNames(m.name))
+ log(s"No forwarder for $m due to conflict with ${linkedClass.info.member(m.name)}")
+ else if (m.hasAccessBoundary)
+ log(s"No forwarder for non-public member $m")
+ else {
+ log(s"Adding static forwarder for '$m' from $jclassName to '$moduleClass'")
+ addForwarder(isRemoteClass, jclass, moduleClass, m)
+ }
+ }
+ }
+
+ /*
+ * Quoting from JVMS 4.7.5 The Exceptions Attribute
+ * "The Exceptions attribute indicates which checked exceptions a method may throw.
+ * There may be at most one Exceptions attribute in each method_info structure."
+ *
+ * The contents of that attribute are determined by the `String[] exceptions` argument to ASM's ClassVisitor.visitMethod()
+ * This method returns such list of internal names.
+ *
+ * must-single-thread
+ */
+ def getExceptions(excs: List[AnnotationInfo]): List[String] = {
+ for (ThrownException(exc) <- excs.distinct)
+ yield internalName(exc)
+ }
+
+ } // end of trait BCForwardersGen
+
+ trait BCClassGen extends BCInnerClassGen {
+
+ // Used as threshold above which a tableswitch bytecode instruction is preferred over a lookupswitch.
+ // There's a space tradeoff between these multi-branch instructions (details in the JVM spec).
+ // The particular value in use for `MIN_SWITCH_DENSITY` reflects a heuristic.
+ val MIN_SWITCH_DENSITY = 0.7
+
+ /*
+ * must-single-thread
+ */
+ def serialVUID(csym: Symbol): Option[Long] = csym getAnnotation definitions.SerialVersionUIDAttr collect {
+ case AnnotationInfo(_, Literal(const) :: _, _) => const.longValue
+ }
+
+ /*
+ * Add public static final field serialVersionUID with value `id`
+ *
+ * can-multi-thread
+ */
+ def addSerialVUID(id: Long, jclass: asm.ClassVisitor) {
+ // add static serialVersionUID field if `clasz` annotated with `@SerialVersionUID(uid: Long)`
+ jclass.visitField(
+ PublicStaticFinal,
+ "serialVersionUID",
+ "J",
+ null, // no java-generic-signature
+ new java.lang.Long(id)
+ ).visitEnd()
+ }
+
+ /*
+ * @param owner internal name of the enclosing class of the class.
+ *
+ * @param name the name of the method that contains the class.
+
+ * @param methodType the method that contains the class.
+ */
+ case class EnclMethodEntry(owner: String, name: String, methodType: BType)
+
+ /*
+ * @return null if the current class is not internal to a method
+ *
+ * Quoting from JVMS 4.7.7 The EnclosingMethod Attribute
+ * A class must have an EnclosingMethod attribute if and only if it is a local class or an anonymous class.
+ * A class may have no more than one EnclosingMethod attribute.
+ *
+ * must-single-thread
+ */
+ def getEnclosingMethodAttribute(clazz: Symbol): EnclMethodEntry = { // JVMS 4.7.7
+
+ def newEEE(eClass: Symbol, m: Symbol) = {
+ EnclMethodEntry(
+ internalName(eClass),
+ m.javaSimpleName.toString,
+ asmMethodType(m)
+ )
+ }
+
+ var res: EnclMethodEntry = null
+ val sym = clazz.originalEnclosingMethod
+ if (sym.isMethod) {
+ debuglog(s"enclosing method for $clazz is $sym (in ${sym.enclClass})")
+ res = newEEE(sym.enclClass, sym)
+ } else if (clazz.isAnonymousClass) {
+ val enclClass = clazz.rawowner
+ assert(enclClass.isClass, enclClass)
+ val sym = enclClass.primaryConstructor
+ if (sym == NoSymbol) {
+ log(s"Ran out of room looking for an enclosing method for $clazz: no constructor here: $enclClass.")
+ } else {
+ debuglog(s"enclosing method for $clazz is $sym (in $enclClass)")
+ res = newEEE(enclClass, sym)
+ }
+ }
+
+ res
+ }
+
+ } // end of trait BCClassGen
+
+ /* basic functionality for class file building of plain, mirror, and beaninfo classes. */
+ abstract class JBuilder extends BCInnerClassGen {
+
+ } // end of class JBuilder
+
+ /* functionality for building plain and mirror classes */
+ abstract class JCommonBuilder
+ extends JBuilder
+ with BCAnnotGen
+ with BCForwardersGen
+ with BCPickles { }
+
+ /* builder of mirror classes */
+ class JMirrorBuilder extends JCommonBuilder {
+
+ private var cunit: CompilationUnit = _
+ def getCurrentCUnit(): CompilationUnit = cunit;
+
+ /* Generate a mirror class for a top-level module. A mirror class is a class
+ * containing only static methods that forward to the corresponding method
+ * on the MODULE instance of the given Scala object. It will only be
+ * generated if there is no companion class: if there is, an attempt will
+ * instead be made to add the forwarder methods to the companion class.
+ *
+ * must-single-thread
+ */
+ def genMirrorClass(modsym: Symbol, cunit: CompilationUnit): asm.tree.ClassNode = {
+ assert(modsym.companionClass == NoSymbol, modsym)
+ innerClassBufferASM.clear()
+ this.cunit = cunit
+ val moduleName = internalName(modsym) // + "$"
+ val mirrorName = moduleName.substring(0, moduleName.length() - 1)
+
+ val flags = (asm.Opcodes.ACC_SUPER | asm.Opcodes.ACC_PUBLIC | asm.Opcodes.ACC_FINAL)
+ val mirrorClass = new asm.tree.ClassNode
+ mirrorClass.visit(
+ classfileVersion,
+ flags,
+ mirrorName,
+ null /* no java-generic-signature */,
+ JAVA_LANG_OBJECT.getInternalName,
+ EMPTY_STRING_ARRAY
+ )
+
+ if (emitSource) {
+ mirrorClass.visitSource("" + cunit.source,
+ null /* SourceDebugExtension */)
+ }
+
+ val ssa = getAnnotPickle(mirrorName, modsym.companionSymbol)
+ mirrorClass.visitAttribute(if (ssa.isDefined) pickleMarkerLocal else pickleMarkerForeign)
+ emitAnnotations(mirrorClass, modsym.annotations ++ ssa)
+
+ addForwarders(isRemote(modsym), mirrorClass, mirrorName, modsym)
+
+ innerClassBufferASM ++= trackMemberClasses(modsym, Nil /* TODO what about Late-Closure-Classes */ )
+ addInnerClassesASM(mirrorClass, innerClassBufferASM.toList)
+
+ mirrorClass.visitEnd()
+
+ ("" + modsym.name) // this side-effect is necessary, really.
+
+ mirrorClass
+ }
+
+ } // end of class JMirrorBuilder
+
+ /* builder of bean info classes */
+ class JBeanInfoBuilder extends JBuilder {
+
+ /*
+ * Generate a bean info class that describes the given class.
+ *
+ * @author Ross Judson (ross.judson@soletta.com)
+ *
+ * must-single-thread
+ */
+ def genBeanInfoClass(cls: Symbol, cunit: CompilationUnit, fieldSymbols: List[Symbol], methodSymbols: List[Symbol]): asm.tree.ClassNode = {
+
+ def javaSimpleName(s: Symbol): String = { s.javaSimpleName.toString }
+
+ innerClassBufferASM.clear()
+
+ val flags = mkFlags(
+ javaFlags(cls),
+ if (isDeprecated(cls)) asm.Opcodes.ACC_DEPRECATED else 0 // ASM pseudo access flag
+ )
+
+ val beanInfoName = (internalName(cls) + "BeanInfo")
+ val beanInfoClass = new asm.tree.ClassNode
+ beanInfoClass.visit(
+ classfileVersion,
+ flags,
+ beanInfoName,
+ null, // no java-generic-signature
+ "scala/beans/ScalaBeanInfo",
+ EMPTY_STRING_ARRAY
+ )
+
+ beanInfoClass.visitSource(
+ cunit.source.toString,
+ null /* SourceDebugExtension */
+ )
+
+ var fieldList = List[String]()
+
+ for (f <- fieldSymbols if f.hasGetter;
+ g = f.getter(cls);
+ s = f.setter(cls);
+ if g.isPublic && !(f.name startsWith "$")
+ ) {
+ // inserting $outer breaks the bean
+ fieldList = javaSimpleName(f) :: javaSimpleName(g) :: (if (s != NoSymbol) javaSimpleName(s) else null) :: fieldList
+ }
+
+ val methodList: List[String] =
+ for (m <- methodSymbols
+ if !m.isConstructor &&
+ m.isPublic &&
+ !(m.name startsWith "$") &&
+ !m.isGetter &&
+ !m.isSetter)
+ yield javaSimpleName(m)
+
+ val constructor = beanInfoClass.visitMethod(
+ asm.Opcodes.ACC_PUBLIC,
+ INSTANCE_CONSTRUCTOR_NAME,
+ "()V",
+ null, // no java-generic-signature
+ EMPTY_STRING_ARRAY // no throwable exceptions
+ )
+
+ val stringArrayJType: BType = arrayOf(JAVA_LANG_STRING)
+ val conJType: BType =
+ BType.getMethodType(
+ BType.VOID_TYPE,
+ Array(exemplar(definitions.ClassClass).c, stringArrayJType, stringArrayJType)
+ )
+
+ def push(lst: List[String]) {
+ var fi = 0
+ for (f <- lst) {
+ constructor.visitInsn(asm.Opcodes.DUP)
+ constructor.visitLdcInsn(new java.lang.Integer(fi))
+ if (f == null) { constructor.visitInsn(asm.Opcodes.ACONST_NULL) }
+ else { constructor.visitLdcInsn(f) }
+ constructor.visitInsn(JAVA_LANG_STRING.getOpcode(asm.Opcodes.IASTORE))
+ fi += 1
+ }
+ }
+
+ constructor.visitCode()
+
+ constructor.visitVarInsn(asm.Opcodes.ALOAD, 0)
+ // push the class
+ constructor.visitLdcInsn(exemplar(cls).c)
+
+ // push the string array of field information
+ constructor.visitLdcInsn(new java.lang.Integer(fieldList.length))
+ constructor.visitTypeInsn(asm.Opcodes.ANEWARRAY, JAVA_LANG_STRING.getInternalName)
+ push(fieldList)
+
+ // push the string array of method information
+ constructor.visitLdcInsn(new java.lang.Integer(methodList.length))
+ constructor.visitTypeInsn(asm.Opcodes.ANEWARRAY, JAVA_LANG_STRING.getInternalName)
+ push(methodList)
+
+ // invoke the superclass constructor, which will do the
+ // necessary java reflection and create Method objects.
+ constructor.visitMethodInsn(asm.Opcodes.INVOKESPECIAL, "scala/beans/ScalaBeanInfo", INSTANCE_CONSTRUCTOR_NAME, conJType.getDescriptor)
+ constructor.visitInsn(asm.Opcodes.RETURN)
+
+ constructor.visitMaxs(0, 0) // just to follow protocol, dummy arguments
+ constructor.visitEnd()
+
+ innerClassBufferASM ++= trackMemberClasses(cls, Nil /* TODO what about Late-Closure-Classes */ )
+ addInnerClassesASM(beanInfoClass, innerClassBufferASM.toList)
+
+ beanInfoClass.visitEnd()
+
+ beanInfoClass
+ }
+
+ } // end of class JBeanInfoBuilder
+
+ trait JAndroidBuilder {
+ self: BCInnerClassGen =>
+
+ /* From the reference documentation of the Android SDK:
+ * The `Parcelable` interface identifies classes whose instances can be written to and restored from a `Parcel`.
+ * Classes implementing the `Parcelable` interface must also have a static field called `CREATOR`,
+ * which is an object implementing the `Parcelable.Creator` interface.
+ */
+ val androidFieldName = newTermName("CREATOR")
+
+ /*
+ * must-single-thread
+ */
+ def isAndroidParcelableClass(sym: Symbol) =
+ (AndroidParcelableInterface != NoSymbol) &&
+ (sym.parentSymbols contains AndroidParcelableInterface)
+
+ /*
+ * must-single-thread
+ */
+ def legacyAddCreatorCode(clinit: asm.MethodVisitor, cnode: asm.tree.ClassNode, thisName: String) {
+ // this tracks the inner class in innerClassBufferASM, if needed.
+ val androidCreatorType = asmClassType(AndroidCreatorClass)
+ val tdesc_creator = androidCreatorType.getDescriptor
+
+ cnode.visitField(
+ PublicStaticFinal,
+ "CREATOR",
+ tdesc_creator,
+ null, // no java-generic-signature
+ null // no initial value
+ ).visitEnd()
+
+ val moduleName = (thisName + "$")
+
+ // GETSTATIC `moduleName`.MODULE$ : `moduleName`;
+ clinit.visitFieldInsn(
+ asm.Opcodes.GETSTATIC,
+ moduleName,
+ strMODULE_INSTANCE_FIELD,
+ "L" + moduleName + ";"
+ )
+
+ // INVOKEVIRTUAL `moduleName`.CREATOR() : android.os.Parcelable$Creator;
+ val bt = BType.getMethodType(androidCreatorType, Array.empty[BType])
+ clinit.visitMethodInsn(
+ asm.Opcodes.INVOKEVIRTUAL,
+ moduleName,
+ "CREATOR",
+ bt.getDescriptor
+ )
+
+ // PUTSTATIC `thisName`.CREATOR;
+ clinit.visitFieldInsn(
+ asm.Opcodes.PUTSTATIC,
+ thisName,
+ "CREATOR",
+ tdesc_creator
+ )
+ }
+
+ } // end of trait JAndroidBuilder
+
+}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeIdiomatic.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeIdiomatic.scala
new file mode 100644
index 0000000000..eda17c6e32
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeIdiomatic.scala
@@ -0,0 +1,844 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2012 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+package scala
+package tools.nsc
+package backend.jvm
+
+import scala.tools.asm
+import scala.annotation.switch
+import scala.collection.{ immutable, mutable }
+import collection.convert.Wrappers.JListWrapper
+
+/*
+ * A high-level facade to the ASM API for bytecode generation.
+ *
+ * @author Miguel Garcia, http://lamp.epfl.ch/~magarcia/ScalaCompilerCornerReloaded
+ * @version 1.0
+ *
+ */
+abstract class BCodeIdiomatic extends BCodeGlue {
+
+ import global._
+
+ val classfileVersion: Int = settings.target.value match {
+ case "jvm-1.5" => asm.Opcodes.V1_5
+ case "jvm-1.6" => asm.Opcodes.V1_6
+ case "jvm-1.7" => asm.Opcodes.V1_7
+ }
+
+ val majorVersion: Int = (classfileVersion & 0xFF)
+ val emitStackMapFrame = (majorVersion >= 50)
+
+ def mkFlags(args: Int*) = args.foldLeft(0)(_ | _)
+
+ val extraProc: Int = mkFlags(
+ asm.ClassWriter.COMPUTE_MAXS,
+ if (emitStackMapFrame) asm.ClassWriter.COMPUTE_FRAMES else 0
+ )
+
+ val StringBuilderClassName = "scala/collection/mutable/StringBuilder"
+
+ val CLASS_CONSTRUCTOR_NAME = "<clinit>"
+ val INSTANCE_CONSTRUCTOR_NAME = "<init>"
+
+ val ObjectReference = brefType("java/lang/Object")
+ val AnyRefReference = ObjectReference
+ val objArrayReference = arrayOf(ObjectReference)
+
+ val JAVA_LANG_OBJECT = ObjectReference
+ val JAVA_LANG_STRING = brefType("java/lang/String")
+
+ var StringBuilderReference: BType = null
+
+ val EMPTY_STRING_ARRAY = Array.empty[String]
+ val EMPTY_INT_ARRAY = Array.empty[Int]
+ val EMPTY_LABEL_ARRAY = Array.empty[asm.Label]
+ val EMPTY_BTYPE_ARRAY = Array.empty[BType]
+
+ /* can-multi-thread */
+ final def mkArray(xs: List[BType]): Array[BType] = {
+ if (xs.isEmpty) { return EMPTY_BTYPE_ARRAY }
+ val a = new Array[BType](xs.size); xs.copyToArray(a); a
+ }
+ /* can-multi-thread */
+ final def mkArray(xs: List[String]): Array[String] = {
+ if (xs.isEmpty) { return EMPTY_STRING_ARRAY }
+ val a = new Array[String](xs.size); xs.copyToArray(a); a
+ }
+ /* can-multi-thread */
+ final def mkArray(xs: List[asm.Label]): Array[asm.Label] = {
+ if (xs.isEmpty) { return EMPTY_LABEL_ARRAY }
+ val a = new Array[asm.Label](xs.size); xs.copyToArray(a); a
+ }
+ /* can-multi-thread */
+ final def mkArray(xs: List[Int]): Array[Int] = {
+ if (xs.isEmpty) { return EMPTY_INT_ARRAY }
+ val a = new Array[Int](xs.size); xs.copyToArray(a); a
+ }
+
+ /*
+ * can-multi-thread
+ */
+ final def mkArrayReverse(xs: List[String]): Array[String] = {
+ val len = xs.size
+ if (len == 0) { return EMPTY_STRING_ARRAY }
+ val a = new Array[String](len)
+ var i = len - 1
+ var rest = xs
+ while (!rest.isEmpty) {
+ a(i) = rest.head
+ rest = rest.tail
+ i -= 1
+ }
+ a
+ }
+
+ /*
+ * can-multi-thread
+ */
+ final def mkArrayReverse(xs: List[Int]): Array[Int] = {
+ val len = xs.size
+ if (len == 0) { return EMPTY_INT_ARRAY }
+ val a = new Array[Int](len)
+ var i = len - 1
+ var rest = xs
+ while (!rest.isEmpty) {
+ a(i) = rest.head
+ rest = rest.tail
+ i -= 1
+ }
+ a
+ }
+
+ /*
+ * can-multi-thread
+ */
+ final def mkArrayReverse(xs: List[asm.Label]): Array[asm.Label] = {
+ val len = xs.size
+ if (len == 0) { return EMPTY_LABEL_ARRAY }
+ val a = new Array[asm.Label](len)
+ var i = len - 1
+ var rest = xs
+ while (!rest.isEmpty) {
+ a(i) = rest.head
+ rest = rest.tail
+ i -= 1
+ }
+ a
+ }
+
+ /*
+ * The type of 1-dimensional arrays of `elem` type.
+ * The invoker is responsible for tracking (if needed) the inner class given by the elem BType.
+ *
+ * must-single-thread
+ */
+ final def arrayOf(elem: BType): BType = {
+ assert(!(elem.isUnitType), s"The element type of an array can't be: $elem")
+ brefType("[" + elem.getDescriptor)
+ }
+
+ /*
+ * The type of N-dimensional arrays of `elem` type.
+ * The invoker is responsible for tracking (if needed) the inner class given by the elem BType.
+ *
+ * must-single-thread
+ */
+ final def arrayN(elem: BType, dims: Int): BType = {
+ assert(dims > 0)
+ assert(!(elem.isUnitType) && !(elem.isPhantomType),
+ "The element type of an array type is necessarily either a primitive type, or a class type, or an interface type.")
+ val desc = ("[" * dims) + elem.getDescriptor
+ brefType(desc)
+ }
+
+ /* Just a namespace for utilities that encapsulate MethodVisitor idioms.
+ * In the ASM world, org.objectweb.asm.commons.InstructionAdapter plays a similar role,
+ * but the methods here allow choosing when to transition from ICode to ASM types
+ * (including not at all, e.g. for performance).
+ */
+ abstract class JCodeMethodN {
+
+ def jmethod: asm.MethodVisitor
+
+ import asm.Opcodes;
+ import icodes.opcodes.{ InvokeStyle, Static, Dynamic, SuperCall }
+
+ final def emit(opc: Int) { jmethod.visitInsn(opc) }
+
+ /*
+ * can-multi-thread
+ */
+ final def genPrimitiveArithmetic(op: icodes.ArithmeticOp, kind: BType) {
+
+ import icodes.{ ADD, SUB, MUL, DIV, REM, NOT }
+
+ op match {
+
+ case ADD => add(kind)
+ case SUB => sub(kind)
+ case MUL => mul(kind)
+ case DIV => div(kind)
+ case REM => rem(kind)
+
+ case NOT =>
+ if (kind.isIntSizedType) {
+ emit(Opcodes.ICONST_M1)
+ emit(Opcodes.IXOR)
+ } else if (kind == LONG) {
+ jmethod.visitLdcInsn(new java.lang.Long(-1))
+ jmethod.visitInsn(Opcodes.LXOR)
+ } else {
+ abort(s"Impossible to negate an $kind")
+ }
+
+ case _ =>
+ abort(s"Unknown arithmetic primitive $op")
+ }
+
+ } // end of method genPrimitiveArithmetic()
+
+ /*
+ * can-multi-thread
+ */
+ final def genPrimitiveLogical(op: /* LogicalOp */ Int, kind: BType) {
+
+ import scalaPrimitives.{ AND, OR, XOR }
+
+ ((op, kind): @unchecked) match {
+ case (AND, LONG) => emit(Opcodes.LAND)
+ case (AND, INT) => emit(Opcodes.IAND)
+ case (AND, _) =>
+ emit(Opcodes.IAND)
+ if (kind != BOOL) { emitT2T(INT, kind) }
+
+ case (OR, LONG) => emit(Opcodes.LOR)
+ case (OR, INT) => emit(Opcodes.IOR)
+ case (OR, _) =>
+ emit(Opcodes.IOR)
+ if (kind != BOOL) { emitT2T(INT, kind) }
+
+ case (XOR, LONG) => emit(Opcodes.LXOR)
+ case (XOR, INT) => emit(Opcodes.IXOR)
+ case (XOR, _) =>
+ emit(Opcodes.IXOR)
+ if (kind != BOOL) { emitT2T(INT, kind) }
+ }
+
+ } // end of method genPrimitiveLogical()
+
+ /*
+ * can-multi-thread
+ */
+ final def genPrimitiveShift(op: /* ShiftOp */ Int, kind: BType) {
+
+ import scalaPrimitives.{ LSL, ASR, LSR }
+
+ ((op, kind): @unchecked) match {
+ case (LSL, LONG) => emit(Opcodes.LSHL)
+ case (LSL, INT) => emit(Opcodes.ISHL)
+ case (LSL, _) =>
+ emit(Opcodes.ISHL)
+ emitT2T(INT, kind)
+
+ case (ASR, LONG) => emit(Opcodes.LSHR)
+ case (ASR, INT) => emit(Opcodes.ISHR)
+ case (ASR, _) =>
+ emit(Opcodes.ISHR)
+ emitT2T(INT, kind)
+
+ case (LSR, LONG) => emit(Opcodes.LUSHR)
+ case (LSR, INT) => emit(Opcodes.IUSHR)
+ case (LSR, _) =>
+ emit(Opcodes.IUSHR)
+ emitT2T(INT, kind)
+ }
+
+ } // end of method genPrimitiveShift()
+
+ /*
+ * can-multi-thread
+ */
+ final def genPrimitiveComparison(op: icodes.ComparisonOp, kind: BType) {
+
+ import icodes.{ CMPL, CMP, CMPG }
+
+ ((op, kind): @unchecked) match {
+ case (CMP, LONG) => emit(Opcodes.LCMP)
+ case (CMPL, FLOAT) => emit(Opcodes.FCMPL)
+ case (CMPG, FLOAT) => emit(Opcodes.FCMPG)
+ case (CMPL, DOUBLE) => emit(Opcodes.DCMPL)
+ case (CMPG, DOUBLE) => emit(Opcodes.DCMPL) // http://docs.oracle.com/javase/specs/jvms/se5.0/html/Instructions2.doc3.html
+ }
+
+ } // end of method genPrimitiveComparison()
+
+ /*
+ * can-multi-thread
+ */
+ final def genStartConcat {
+ jmethod.visitTypeInsn(Opcodes.NEW, StringBuilderClassName)
+ jmethod.visitInsn(Opcodes.DUP)
+ invokespecial(
+ StringBuilderClassName,
+ INSTANCE_CONSTRUCTOR_NAME,
+ "()V"
+ )
+ }
+
+ /*
+ * can-multi-thread
+ */
+ final def genStringConcat(el: BType) {
+
+ val jtype =
+ if (el.isArray || el.hasObjectSort) JAVA_LANG_OBJECT
+ else el;
+
+ val bt = BType.getMethodType(StringBuilderReference, Array(jtype))
+
+ invokevirtual(StringBuilderClassName, "append", bt.getDescriptor)
+ }
+
+ /*
+ * can-multi-thread
+ */
+ final def genEndConcat {
+ invokevirtual(StringBuilderClassName, "toString", "()Ljava/lang/String;")
+ }
+
+ /*
+ * Emits one or more conversion instructions based on the types given as arguments.
+ *
+ * @param from The type of the value to be converted into another type.
+ * @param to The type the value will be converted into.
+ *
+ * can-multi-thread
+ */
+ final def emitT2T(from: BType, to: BType) {
+
+ assert(
+ from.isNonUnitValueType && to.isNonUnitValueType,
+ s"Cannot emit primitive conversion from $from to $to"
+ )
+
+ def pickOne(opcs: Array[Int]) { // TODO index on to.sort
+ val chosen = (to: @unchecked) match {
+ case BYTE => opcs(0)
+ case SHORT => opcs(1)
+ case CHAR => opcs(2)
+ case INT => opcs(3)
+ case LONG => opcs(4)
+ case FLOAT => opcs(5)
+ case DOUBLE => opcs(6)
+ }
+ if (chosen != -1) { emit(chosen) }
+ }
+
+ if (from == to) { return }
+ // the only conversion involving BOOL that is allowed is (BOOL -> BOOL)
+ assert(from != BOOL && to != BOOL, s"inconvertible types : $from -> $to")
+
+ // We're done with BOOL already
+ (from.sort: @switch) match {
+
+ // using `asm.Type.SHORT` instead of `BType.SHORT` because otherwise "warning: could not emit switch for @switch annotated match"
+
+ case asm.Type.BYTE => pickOne(JCodeMethodN.fromByteT2T)
+ case asm.Type.SHORT => pickOne(JCodeMethodN.fromShortT2T)
+ case asm.Type.CHAR => pickOne(JCodeMethodN.fromCharT2T)
+ case asm.Type.INT => pickOne(JCodeMethodN.fromIntT2T)
+
+ case asm.Type.FLOAT =>
+ import asm.Opcodes.{ F2L, F2D, F2I }
+ (to.sort: @switch) match {
+ case asm.Type.LONG => emit(F2L)
+ case asm.Type.DOUBLE => emit(F2D)
+ case _ => emit(F2I); emitT2T(INT, to)
+ }
+
+ case asm.Type.LONG =>
+ import asm.Opcodes.{ L2F, L2D, L2I }
+ (to.sort: @switch) match {
+ case asm.Type.FLOAT => emit(L2F)
+ case asm.Type.DOUBLE => emit(L2D)
+ case _ => emit(L2I); emitT2T(INT, to)
+ }
+
+ case asm.Type.DOUBLE =>
+ import asm.Opcodes.{ D2L, D2F, D2I }
+ (to.sort: @switch) match {
+ case asm.Type.FLOAT => emit(D2F)
+ case asm.Type.LONG => emit(D2L)
+ case _ => emit(D2I); emitT2T(INT, to)
+ }
+ }
+ } // end of emitT2T()
+
+ // can-multi-thread
+ final def aconst(cst: AnyRef) {
+ if (cst == null) { emit(Opcodes.ACONST_NULL) }
+ else { jmethod.visitLdcInsn(cst) }
+ }
+
+ // can-multi-thread
+ final def boolconst(b: Boolean) { iconst(if (b) 1 else 0) }
+
+ // can-multi-thread
+ final def iconst(cst: Int) {
+ if (cst >= -1 && cst <= 5) {
+ emit(Opcodes.ICONST_0 + cst)
+ } else if (cst >= java.lang.Byte.MIN_VALUE && cst <= java.lang.Byte.MAX_VALUE) {
+ jmethod.visitIntInsn(Opcodes.BIPUSH, cst)
+ } else if (cst >= java.lang.Short.MIN_VALUE && cst <= java.lang.Short.MAX_VALUE) {
+ jmethod.visitIntInsn(Opcodes.SIPUSH, cst)
+ } else {
+ jmethod.visitLdcInsn(new Integer(cst))
+ }
+ }
+
+ // can-multi-thread
+ final def lconst(cst: Long) {
+ if (cst == 0L || cst == 1L) {
+ emit(Opcodes.LCONST_0 + cst.asInstanceOf[Int])
+ } else {
+ jmethod.visitLdcInsn(new java.lang.Long(cst))
+ }
+ }
+
+ // can-multi-thread
+ final def fconst(cst: Float) {
+ val bits: Int = java.lang.Float.floatToIntBits(cst)
+ if (bits == 0L || bits == 0x3f800000 || bits == 0x40000000) { // 0..2
+ emit(Opcodes.FCONST_0 + cst.asInstanceOf[Int])
+ } else {
+ jmethod.visitLdcInsn(new java.lang.Float(cst))
+ }
+ }
+
+ // can-multi-thread
+ final def dconst(cst: Double) {
+ val bits: Long = java.lang.Double.doubleToLongBits(cst)
+ if (bits == 0L || bits == 0x3ff0000000000000L) { // +0.0d and 1.0d
+ emit(Opcodes.DCONST_0 + cst.asInstanceOf[Int])
+ } else {
+ jmethod.visitLdcInsn(new java.lang.Double(cst))
+ }
+ }
+
+ // can-multi-thread
+ final def newarray(elem: BType) {
+ if (elem.isRefOrArrayType || elem.isPhantomType ) {
+ /* phantom type at play in `Array(null)`, SI-1513. On the other hand, Array(()) has element type `scala.runtime.BoxedUnit` which hasObjectSort. */
+ jmethod.visitTypeInsn(Opcodes.ANEWARRAY, elem.getInternalName)
+ } else {
+ val rand = {
+ // using `asm.Type.SHORT` instead of `BType.SHORT` because otherwise "warning: could not emit switch for @switch annotated match"
+ (elem.sort: @switch) match {
+ case asm.Type.BOOLEAN => Opcodes.T_BOOLEAN
+ case asm.Type.BYTE => Opcodes.T_BYTE
+ case asm.Type.SHORT => Opcodes.T_SHORT
+ case asm.Type.CHAR => Opcodes.T_CHAR
+ case asm.Type.INT => Opcodes.T_INT
+ case asm.Type.LONG => Opcodes.T_LONG
+ case asm.Type.FLOAT => Opcodes.T_FLOAT
+ case asm.Type.DOUBLE => Opcodes.T_DOUBLE
+ }
+ }
+ jmethod.visitIntInsn(Opcodes.NEWARRAY, rand)
+ }
+ }
+
+
+ final def load( idx: Int, tk: BType) { emitVarInsn(Opcodes.ILOAD, idx, tk) } // can-multi-thread
+ final def store(idx: Int, tk: BType) { emitVarInsn(Opcodes.ISTORE, idx, tk) } // can-multi-thread
+
+ final def aload( tk: BType) { emitTypeBased(JCodeMethodN.aloadOpcodes, tk) } // can-multi-thread
+ final def astore(tk: BType) { emitTypeBased(JCodeMethodN.astoreOpcodes, tk) } // can-multi-thread
+
+ final def neg(tk: BType) { emitPrimitive(JCodeMethodN.negOpcodes, tk) } // can-multi-thread
+ final def add(tk: BType) { emitPrimitive(JCodeMethodN.addOpcodes, tk) } // can-multi-thread
+ final def sub(tk: BType) { emitPrimitive(JCodeMethodN.subOpcodes, tk) } // can-multi-thread
+ final def mul(tk: BType) { emitPrimitive(JCodeMethodN.mulOpcodes, tk) } // can-multi-thread
+ final def div(tk: BType) { emitPrimitive(JCodeMethodN.divOpcodes, tk) } // can-multi-thread
+ final def rem(tk: BType) { emitPrimitive(JCodeMethodN.remOpcodes, tk) } // can-multi-thread
+
+ // can-multi-thread
+ final def invokespecial(owner: String, name: String, desc: String) {
+ jmethod.visitMethodInsn(Opcodes.INVOKESPECIAL, owner, name, desc)
+ }
+ // can-multi-thread
+ final def invokestatic(owner: String, name: String, desc: String) {
+ jmethod.visitMethodInsn(Opcodes.INVOKESTATIC, owner, name, desc)
+ }
+ // can-multi-thread
+ final def invokeinterface(owner: String, name: String, desc: String) {
+ jmethod.visitMethodInsn(Opcodes.INVOKEINTERFACE, owner, name, desc)
+ }
+ // can-multi-thread
+ final def invokevirtual(owner: String, name: String, desc: String) {
+ jmethod.visitMethodInsn(Opcodes.INVOKEVIRTUAL, owner, name, desc)
+ }
+
+ // can-multi-thread
+ final def goTo(label: asm.Label) { jmethod.visitJumpInsn(Opcodes.GOTO, label) }
+ // can-multi-thread
+ final def emitIF(cond: icodes.TestOp, label: asm.Label) { jmethod.visitJumpInsn(cond.opcodeIF, label) }
+ // can-multi-thread
+ final def emitIF_ICMP(cond: icodes.TestOp, label: asm.Label) { jmethod.visitJumpInsn(cond.opcodeIFICMP, label) }
+ // can-multi-thread
+ final def emitIF_ACMP(cond: icodes.TestOp, label: asm.Label) {
+ assert((cond == icodes.EQ) || (cond == icodes.NE), cond)
+ val opc = (if (cond == icodes.EQ) Opcodes.IF_ACMPEQ else Opcodes.IF_ACMPNE)
+ jmethod.visitJumpInsn(opc, label)
+ }
+ // can-multi-thread
+ final def emitIFNONNULL(label: asm.Label) { jmethod.visitJumpInsn(Opcodes.IFNONNULL, label) }
+ // can-multi-thread
+ final def emitIFNULL (label: asm.Label) { jmethod.visitJumpInsn(Opcodes.IFNULL, label) }
+
+ // can-multi-thread
+ final def emitRETURN(tk: BType) {
+ if (tk == UNIT) { emit(Opcodes.RETURN) }
+ else { emitTypeBased(JCodeMethodN.returnOpcodes, tk) }
+ }
+
+ /* Emits one of tableswitch or lookoupswitch.
+ *
+ * can-multi-thread
+ */
+ final def emitSWITCH(keys: Array[Int], branches: Array[asm.Label], defaultBranch: asm.Label, minDensity: Double) {
+ assert(keys.length == branches.length)
+
+ // For empty keys, it makes sense emitting LOOKUPSWITCH with defaultBranch only.
+ // Similar to what javac emits for a switch statement consisting only of a default case.
+ if (keys.length == 0) {
+ jmethod.visitLookupSwitchInsn(defaultBranch, keys, branches)
+ return
+ }
+
+ // sort `keys` by increasing key, keeping `branches` in sync. TODO FIXME use quicksort
+ var i = 1
+ while (i < keys.length) {
+ var j = 1
+ while (j <= keys.length - i) {
+ if (keys(j) < keys(j - 1)) {
+ val tmp = keys(j)
+ keys(j) = keys(j - 1)
+ keys(j - 1) = tmp
+ val tmpL = branches(j)
+ branches(j) = branches(j - 1)
+ branches(j - 1) = tmpL
+ }
+ j += 1
+ }
+ i += 1
+ }
+
+ // check for duplicate keys to avoid "VerifyError: unsorted lookupswitch" (SI-6011)
+ i = 1
+ while (i < keys.length) {
+ if (keys(i-1) == keys(i)) {
+ abort("duplicate keys in SWITCH, can't pick arbitrarily one of them to evict, see SI-6011.")
+ }
+ i += 1
+ }
+
+ val keyMin = keys(0)
+ val keyMax = keys(keys.length - 1)
+
+ val isDenseEnough: Boolean = {
+ /* Calculate in long to guard against overflow. TODO what overflow? */
+ val keyRangeD: Double = (keyMax.asInstanceOf[Long] - keyMin + 1).asInstanceOf[Double]
+ val klenD: Double = keys.length
+ val kdensity: Double = (klenD / keyRangeD)
+
+ kdensity >= minDensity
+ }
+
+ if (isDenseEnough) {
+ // use a table in which holes are filled with defaultBranch.
+ val keyRange = (keyMax - keyMin + 1)
+ val newBranches = new Array[asm.Label](keyRange)
+ var oldPos = 0
+ var i = 0
+ while (i < keyRange) {
+ val key = keyMin + i;
+ if (keys(oldPos) == key) {
+ newBranches(i) = branches(oldPos)
+ oldPos += 1
+ } else {
+ newBranches(i) = defaultBranch
+ }
+ i += 1
+ }
+ assert(oldPos == keys.length, "emitSWITCH")
+ jmethod.visitTableSwitchInsn(keyMin, keyMax, defaultBranch, newBranches: _*)
+ } else {
+ jmethod.visitLookupSwitchInsn(defaultBranch, keys, branches)
+ }
+ }
+
+ // internal helpers -- not part of the public API of `jcode`
+ // don't make private otherwise inlining will suffer
+
+ // can-multi-thread
+ final def emitVarInsn(opc: Int, idx: Int, tk: BType) {
+ assert((opc == Opcodes.ILOAD) || (opc == Opcodes.ISTORE), opc)
+ jmethod.visitVarInsn(tk.getOpcode(opc), idx)
+ }
+
+ // ---------------- array load and store ----------------
+
+ // can-multi-thread
+ final def emitTypeBased(opcs: Array[Int], tk: BType) {
+ assert(tk != UNIT, tk)
+ val opc = {
+ if (tk.isRefOrArrayType) { opcs(0) }
+ else if (tk.isIntSizedType) {
+ (tk: @unchecked) match {
+ case BOOL | BYTE => opcs(1)
+ case SHORT => opcs(2)
+ case CHAR => opcs(3)
+ case INT => opcs(4)
+ }
+ } else {
+ (tk: @unchecked) match {
+ case LONG => opcs(5)
+ case FLOAT => opcs(6)
+ case DOUBLE => opcs(7)
+ }
+ }
+ }
+ emit(opc)
+ }
+
+ // ---------------- primitive operations ----------------
+
+ // can-multi-thread
+ final def emitPrimitive(opcs: Array[Int], tk: BType) {
+ val opc = {
+ // using `asm.Type.SHORT` instead of `BType.SHORT` because otherwise "warning: could not emit switch for @switch annotated match"
+ (tk.sort: @switch) match {
+ case asm.Type.LONG => opcs(1)
+ case asm.Type.FLOAT => opcs(2)
+ case asm.Type.DOUBLE => opcs(3)
+ case _ => opcs(0)
+ }
+ }
+ emit(opc)
+ }
+
+ // can-multi-thread
+ final def drop(tk: BType) { emit(if (tk.isWideType) Opcodes.POP2 else Opcodes.POP) }
+
+ // can-multi-thread
+ final def dup(tk: BType) { emit(if (tk.isWideType) Opcodes.DUP2 else Opcodes.DUP) }
+
+ // ---------------- type checks and casts ----------------
+
+ // can-multi-thread
+ final def isInstance(tk: BType) {
+ jmethod.visitTypeInsn(Opcodes.INSTANCEOF, tk.getInternalName)
+ }
+
+ // can-multi-thread
+ final def checkCast(tk: BType) {
+ assert(tk.isRefOrArrayType, s"checkcast on primitive type: $tk")
+ // TODO ICode also requires: but that's too much, right? assert(!isBoxedType(tk), "checkcast on boxed type: " + tk)
+ jmethod.visitTypeInsn(Opcodes.CHECKCAST, tk.getInternalName)
+ }
+
+ } // end of class JCodeMethodN
+
+ /* Constant-valued val-members of JCodeMethodN at the companion object, so as to avoid re-initializing them multiple times. */
+ object JCodeMethodN {
+
+ import asm.Opcodes._
+
+ // ---------------- conversions ----------------
+
+ val fromByteT2T = { Array( -1, -1, I2C, -1, I2L, I2F, I2D) } // do nothing for (BYTE -> SHORT) and for (BYTE -> INT)
+ val fromCharT2T = { Array(I2B, I2S, -1, -1, I2L, I2F, I2D) } // for (CHAR -> INT) do nothing
+ val fromShortT2T = { Array(I2B, -1, I2C, -1, I2L, I2F, I2D) } // for (SHORT -> INT) do nothing
+ val fromIntT2T = { Array(I2B, I2S, I2C, -1, I2L, I2F, I2D) }
+
+ // ---------------- array load and store ----------------
+
+ val aloadOpcodes = { Array(AALOAD, BALOAD, SALOAD, CALOAD, IALOAD, LALOAD, FALOAD, DALOAD) }
+ val astoreOpcodes = { Array(AASTORE, BASTORE, SASTORE, CASTORE, IASTORE, LASTORE, FASTORE, DASTORE) }
+ val returnOpcodes = { Array(ARETURN, IRETURN, IRETURN, IRETURN, IRETURN, LRETURN, FRETURN, DRETURN) }
+
+ // ---------------- primitive operations ----------------
+
+ val negOpcodes: Array[Int] = { Array(INEG, LNEG, FNEG, DNEG) }
+ val addOpcodes: Array[Int] = { Array(IADD, LADD, FADD, DADD) }
+ val subOpcodes: Array[Int] = { Array(ISUB, LSUB, FSUB, DSUB) }
+ val mulOpcodes: Array[Int] = { Array(IMUL, LMUL, FMUL, DMUL) }
+ val divOpcodes: Array[Int] = { Array(IDIV, LDIV, FDIV, DDIV) }
+ val remOpcodes: Array[Int] = { Array(IREM, LREM, FREM, DREM) }
+
+ } // end of object JCodeMethodN
+
+ // ---------------- adapted from scalaPrimitives ----------------
+
+ /* Given `code` reports the src TypeKind of the coercion indicated by `code`.
+ * To find the dst TypeKind, `ScalaPrimitives.generatedKind(code)` can be used.
+ *
+ * can-multi-thread
+ */
+ final def coercionFrom(code: Int): BType = {
+ import scalaPrimitives._
+ (code: @switch) match {
+ case B2B | B2C | B2S | B2I | B2L | B2F | B2D => BYTE
+ case S2B | S2S | S2C | S2I | S2L | S2F | S2D => SHORT
+ case C2B | C2S | C2C | C2I | C2L | C2F | C2D => CHAR
+ case I2B | I2S | I2C | I2I | I2L | I2F | I2D => INT
+ case L2B | L2S | L2C | L2I | L2L | L2F | L2D => LONG
+ case F2B | F2S | F2C | F2I | F2L | F2F | F2D => FLOAT
+ case D2B | D2S | D2C | D2I | D2L | D2F | D2D => DOUBLE
+ }
+ }
+
+ /* If code is a coercion primitive, the result type.
+ *
+ * can-multi-thread
+ */
+ final def coercionTo(code: Int): BType = {
+ import scalaPrimitives._
+ (code: @scala.annotation.switch) match {
+ case B2B | C2B | S2B | I2B | L2B | F2B | D2B => BYTE
+ case B2C | C2C | S2C | I2C | L2C | F2C | D2C => CHAR
+ case B2S | C2S | S2S | I2S | L2S | F2S | D2S => SHORT
+ case B2I | C2I | S2I | I2I | L2I | F2I | D2I => INT
+ case B2L | C2L | S2L | I2L | L2L | F2L | D2L => LONG
+ case B2F | C2F | S2F | I2F | L2F | F2F | D2F => FLOAT
+ case B2D | C2D | S2D | I2D | L2D | F2D | D2D => DOUBLE
+ }
+ }
+
+ final val typeOfArrayOp: Map[Int, BType] = {
+ import scalaPrimitives._
+ Map(
+ (List(ZARRAY_LENGTH, ZARRAY_GET, ZARRAY_SET) map (_ -> BOOL)) ++
+ (List(BARRAY_LENGTH, BARRAY_GET, BARRAY_SET) map (_ -> BYTE)) ++
+ (List(SARRAY_LENGTH, SARRAY_GET, SARRAY_SET) map (_ -> SHORT)) ++
+ (List(CARRAY_LENGTH, CARRAY_GET, CARRAY_SET) map (_ -> CHAR)) ++
+ (List(IARRAY_LENGTH, IARRAY_GET, IARRAY_SET) map (_ -> INT)) ++
+ (List(LARRAY_LENGTH, LARRAY_GET, LARRAY_SET) map (_ -> LONG)) ++
+ (List(FARRAY_LENGTH, FARRAY_GET, FARRAY_SET) map (_ -> FLOAT)) ++
+ (List(DARRAY_LENGTH, DARRAY_GET, DARRAY_SET) map (_ -> DOUBLE)) ++
+ (List(OARRAY_LENGTH, OARRAY_GET, OARRAY_SET) map (_ -> ObjectReference)) : _*
+ )
+ }
+
+ /*
+ * Collects (in `result`) all LabelDef nodes enclosed (directly or not) by each node it visits.
+ *
+ * In other words, this traverser prepares a map giving
+ * all labelDefs (the entry-value) having a Tree node (the entry-key) as ancestor.
+ * The entry-value for a LabelDef entry-key always contains the entry-key.
+ *
+ */
+ class LabelDefsFinder extends Traverser {
+ val result = mutable.Map.empty[Tree, List[LabelDef]]
+ var acc: List[LabelDef] = Nil
+
+ /*
+ * can-multi-thread
+ */
+ override def traverse(tree: Tree) {
+ val saved = acc
+ acc = Nil
+ super.traverse(tree)
+ // acc contains all LabelDefs found under (but not at) `tree`
+ tree match {
+ case lblDf: LabelDef => acc ::= lblDf
+ case _ => ()
+ }
+ if (acc.isEmpty) {
+ acc = saved
+ } else {
+ result += (tree -> acc)
+ acc = acc ::: saved
+ }
+ }
+ }
+
+ implicit class MethodIterClassNode(cnode: asm.tree.ClassNode) {
+
+ @inline final def foreachMethod(f: (asm.tree.MethodNode) => Unit) { toMethodList.foreach(f) }
+
+ @inline final def toMethodList: List[asm.tree.MethodNode] = { JListWrapper(cnode.methods).toList }
+
+ @inline final def toFieldList: List[asm.tree.FieldNode] = { JListWrapper(cnode.fields).toList }
+
+ }
+
+ implicit class InsnIterMethodNode(mnode: asm.tree.MethodNode) {
+
+ @inline final def foreachInsn(f: (asm.tree.AbstractInsnNode) => Unit) { mnode.instructions.foreachInsn(f) }
+
+ @inline final def toList: List[asm.tree.AbstractInsnNode] = { mnode.instructions.toList }
+
+ }
+
+ implicit class InsnIterInsnList(lst: asm.tree.InsnList) {
+
+ @inline final def foreachInsn(f: (asm.tree.AbstractInsnNode) => Unit) {
+ val insnIter = lst.iterator()
+ while (insnIter.hasNext) {
+ f(insnIter.next())
+ }
+ }
+
+ @inline final def toList: List[asm.tree.AbstractInsnNode] = {
+ var result: List[asm.tree.AbstractInsnNode] = Nil
+ lst foreachInsn { insn => if (insn != null) { result ::= insn } }
+ result.reverse
+ }
+
+ }
+
+ /*
+ * Upon finding a name already seen among previous List elements, adds a numeric postfix to make it unique.
+ */
+ def uniquify(names: List[String]): List[String] = {
+ val seen = mutable.Set.empty[String]
+
+ @scala.annotation.tailrec def uniquified(current: String, attempt: Int): String = {
+ if (seen contains current) {
+ val currentBis = (current + "$" + attempt.toString)
+ if (seen contains currentBis) {
+ uniquified(current, attempt + 1)
+ } else currentBis
+ } else current
+ }
+
+ var rest = names
+ var result: List[String] = Nil
+ while (rest.nonEmpty) {
+ val u = uniquified(rest.head.trim, 1)
+ seen += u
+ result ::= u
+ rest = rest.tail
+ }
+
+ result.reverse
+ }
+
+ def allDifferent[ElemType](xs: Iterable[ElemType]): Boolean = {
+ val seen = mutable.Set.empty[ElemType]
+ val iter = xs.iterator
+ while (iter.hasNext) {
+ val nxt = iter.next()
+ if (seen contains nxt) { return false }
+ seen += nxt
+ }
+ true
+ }
+
+} \ No newline at end of file
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeSkelBuilder.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeSkelBuilder.scala
new file mode 100644
index 0000000000..8b6b4ab9ce
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeSkelBuilder.scala
@@ -0,0 +1,727 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2012 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+
+package scala
+package tools.nsc
+package backend
+package jvm
+
+import scala.collection.{ mutable, immutable }
+import scala.tools.nsc.symtab._
+import scala.annotation.switch
+
+import scala.tools.asm
+
+/*
+ *
+ * @author Miguel Garcia, http://lamp.epfl.ch/~magarcia/ScalaCompilerCornerReloaded/
+ * @version 1.0
+ *
+ */
+abstract class BCodeSkelBuilder extends BCodeHelpers {
+ import global._
+ import definitions._
+
+ /*
+ * There's a dedicated PlainClassBuilder for each CompilationUnit,
+ * which simplifies the initialization of per-class data structures in `genPlainClass()` which in turn delegates to `initJClass()`
+ *
+ * The entry-point to emitting bytecode instructions is `genDefDef()` where the per-method data structures are initialized,
+ * including `resetMethodBookkeeping()` and `initJMethod()`.
+ * Once that's been done, and assuming the method being visited isn't abstract, `emitNormalMethodBody()` populates
+ * the ASM MethodNode instance with ASM AbstractInsnNodes.
+ *
+ * Given that CleanUp delivers trees that produce values on the stack,
+ * the entry-point to all-things instruction-emit is `genLoad()`.
+ * There, an operation taking N arguments results in recursively emitting instructions to lead each of them,
+ * followed by emitting instructions to process those arguments (to be found at run-time on the operand-stack).
+ *
+ * In a few cases the above recipe deserves more details, as provided in the documentation for:
+ * - `genLoadTry()`
+ * - `genSynchronized()
+ * - `jumpDest` , `cleanups` , `labelDefsAtOrUnder`
+ */
+ abstract class PlainSkelBuilder(cunit: CompilationUnit)
+ extends BCClassGen
+ with BCAnnotGen
+ with BCInnerClassGen
+ with JAndroidBuilder
+ with BCForwardersGen
+ with BCPickles
+ with BCJGenSigGen {
+
+ // Strangely I can't find this in the asm code 255, but reserving 1 for "this"
+ final val MaximumJvmParameters = 254
+
+ // current class
+ var cnode: asm.tree.ClassNode = null
+ var thisName: String = null // the internal name of the class being emitted
+
+ var claszSymbol: Symbol = null
+ var isCZParcelable = false
+ var isCZStaticModule = false
+ var isCZRemote = false
+
+ /* ---------------- idiomatic way to ask questions to typer ---------------- */
+
+ def paramTKs(app: Apply): List[BType] = {
+ val Apply(fun, _) = app
+ val funSym = fun.symbol
+ (funSym.info.paramTypes map toTypeKind) // this tracks mentioned inner classes (in innerClassBufferASM)
+ }
+
+ def symInfoTK(sym: Symbol): BType = {
+ toTypeKind(sym.info) // this tracks mentioned inner classes (in innerClassBufferASM)
+ }
+
+ def tpeTK(tree: Tree): BType = { toTypeKind(tree.tpe) }
+
+ def log(msg: => AnyRef) {
+ global synchronized { global.log(msg) }
+ }
+
+ override def getCurrentCUnit(): CompilationUnit = { cunit }
+
+ /* ---------------- helper utils for generating classes and fiels ---------------- */
+
+ def genPlainClass(cd: ClassDef) {
+ assert(cnode == null, "GenBCode detected nested methods.")
+ innerClassBufferASM.clear()
+
+ claszSymbol = cd.symbol
+ isCZParcelable = isAndroidParcelableClass(claszSymbol)
+ isCZStaticModule = isStaticModule(claszSymbol)
+ isCZRemote = isRemote(claszSymbol)
+ thisName = internalName(claszSymbol)
+
+ cnode = new asm.tree.ClassNode()
+
+ initJClass(cnode)
+
+ val hasStaticCtor = methodSymbols(cd) exists (_.isStaticConstructor)
+ if (!hasStaticCtor) {
+ // but needs one ...
+ if (isCZStaticModule || isCZParcelable) {
+ fabricateStaticInit()
+ }
+ }
+
+ val optSerial: Option[Long] = serialVUID(claszSymbol)
+ if (optSerial.isDefined) { addSerialVUID(optSerial.get, cnode)}
+
+ addClassFields()
+
+ innerClassBufferASM ++= trackMemberClasses(claszSymbol, Nil)
+
+ gen(cd.impl)
+
+ assert(cd.symbol == claszSymbol, "Someone messed up BCodePhase.claszSymbol during genPlainClass().")
+
+ } // end of method genPlainClass()
+
+ /*
+ * must-single-thread
+ */
+ private def initJClass(jclass: asm.ClassVisitor) {
+
+ val ps = claszSymbol.info.parents
+ val superClass: String = if (ps.isEmpty) JAVA_LANG_OBJECT.getInternalName else internalName(ps.head.typeSymbol);
+ val ifaces: Array[String] = {
+ val arrIfacesTr: Array[Tracked] = exemplar(claszSymbol).ifaces
+ val arrIfaces = new Array[String](arrIfacesTr.length)
+ var i = 0
+ while (i < arrIfacesTr.length) {
+ val ifaceTr = arrIfacesTr(i)
+ val bt = ifaceTr.c
+ if (ifaceTr.isInnerClass) { innerClassBufferASM += bt }
+ arrIfaces(i) = bt.getInternalName
+ i += 1
+ }
+ arrIfaces
+ }
+ // `internalName()` tracks inner classes.
+
+ val flags = mkFlags(
+ javaFlags(claszSymbol),
+ if (isDeprecated(claszSymbol)) asm.Opcodes.ACC_DEPRECATED else 0 // ASM pseudo access flag
+ )
+
+ val thisSignature = getGenericSignature(claszSymbol, claszSymbol.owner)
+ cnode.visit(classfileVersion, flags,
+ thisName, thisSignature,
+ superClass, ifaces)
+
+ if (emitSource) {
+ cnode.visitSource(cunit.source.toString, null /* SourceDebugExtension */)
+ }
+
+ val enclM = getEnclosingMethodAttribute(claszSymbol)
+ if (enclM != null) {
+ val EnclMethodEntry(className, methodName, methodType) = enclM
+ cnode.visitOuterClass(className, methodName, methodType.getDescriptor)
+ }
+
+ val ssa = getAnnotPickle(thisName, claszSymbol)
+ cnode.visitAttribute(if (ssa.isDefined) pickleMarkerLocal else pickleMarkerForeign)
+ emitAnnotations(cnode, claszSymbol.annotations ++ ssa)
+
+ if (isCZStaticModule || isCZParcelable) {
+
+ if (isCZStaticModule) { addModuleInstanceField() }
+
+ } else {
+
+ val skipStaticForwarders = (claszSymbol.isInterface || settings.noForwarders)
+ if (!skipStaticForwarders) {
+ val lmoc = claszSymbol.companionModule
+ // add static forwarders if there are no name conflicts; see bugs #363 and #1735
+ if (lmoc != NoSymbol) {
+ // it must be a top level class (name contains no $s)
+ val isCandidateForForwarders = {
+ exitingPickler { !(lmoc.name.toString contains '$') && lmoc.hasModuleFlag && !lmoc.isImplClass && !lmoc.isNestedClass }
+ }
+ if (isCandidateForForwarders) {
+ log(s"Adding static forwarders from '$claszSymbol' to implementations in '$lmoc'")
+ addForwarders(isRemote(claszSymbol), cnode, thisName, lmoc.moduleClass)
+ }
+ }
+ }
+
+ }
+
+ // the invoker is responsible for adding a class-static constructor.
+
+ } // end of method initJClass
+
+ /*
+ * can-multi-thread
+ */
+ private def addModuleInstanceField() {
+ val fv =
+ cnode.visitField(PublicStaticFinal, // TODO confirm whether we really don't want ACC_SYNTHETIC nor ACC_DEPRECATED
+ strMODULE_INSTANCE_FIELD,
+ "L" + thisName + ";",
+ null, // no java-generic-signature
+ null // no initial value
+ )
+
+ fv.visitEnd()
+ }
+
+ /*
+ * must-single-thread
+ */
+ private def fabricateStaticInit() {
+
+ val clinit: asm.MethodVisitor = cnode.visitMethod(
+ PublicStatic, // TODO confirm whether we really don't want ACC_SYNTHETIC nor ACC_DEPRECATED
+ CLASS_CONSTRUCTOR_NAME,
+ "()V",
+ null, // no java-generic-signature
+ null // no throwable exceptions
+ )
+ clinit.visitCode()
+
+ /* "legacy static initialization" */
+ if (isCZStaticModule) {
+ clinit.visitTypeInsn(asm.Opcodes.NEW, thisName)
+ clinit.visitMethodInsn(asm.Opcodes.INVOKESPECIAL,
+ thisName, INSTANCE_CONSTRUCTOR_NAME, "()V")
+ }
+ if (isCZParcelable) { legacyAddCreatorCode(clinit, cnode, thisName) }
+ clinit.visitInsn(asm.Opcodes.RETURN)
+
+ clinit.visitMaxs(0, 0) // just to follow protocol, dummy arguments
+ clinit.visitEnd()
+ }
+
+ def addClassFields() {
+ /* Non-method term members are fields, except for module members. Module
+ * members can only happen on .NET (no flatten) for inner traits. There,
+ * a module symbol is generated (transformInfo in mixin) which is used
+ * as owner for the members of the implementation class (so that the
+ * backend emits them as static).
+ * No code is needed for this module symbol.
+ */
+ for (f <- fieldSymbols(claszSymbol)) {
+ val javagensig = getGenericSignature(f, claszSymbol)
+ val flags = mkFlags(
+ javaFieldFlags(f),
+ if (isDeprecated(f)) asm.Opcodes.ACC_DEPRECATED else 0 // ASM pseudo access flag
+ )
+
+ val jfield = new asm.tree.FieldNode(
+ flags,
+ f.javaSimpleName.toString,
+ symInfoTK(f).getDescriptor,
+ javagensig,
+ null // no initial value
+ )
+ cnode.fields.add(jfield)
+ emitAnnotations(jfield, f.annotations)
+ }
+
+ } // end of method addClassFields()
+
+ // current method
+ var mnode: asm.tree.MethodNode = null
+ var jMethodName: String = null
+ var isMethSymStaticCtor = false
+ var isMethSymBridge = false
+ var returnType: BType = null
+ var methSymbol: Symbol = null
+ // in GenASM this is local to genCode(), ie should get false whenever a new method is emitted (including fabricated ones eg addStaticInit())
+ var isModuleInitialized = false
+ // used by genLoadTry() and genSynchronized()
+ var earlyReturnVar: Symbol = null
+ var shouldEmitCleanup = false
+ var insideCleanupBlock = false
+ // line numbers
+ var lastEmittedLineNr = -1
+
+ object bc extends JCodeMethodN {
+ override def jmethod = PlainSkelBuilder.this.mnode
+ }
+
+ /* ---------------- Part 1 of program points, ie Labels in the ASM world ---------------- */
+
+ /*
+ * A jump is represented as an Apply node whose symbol denotes a LabelDef, the target of the jump.
+ * The `jumpDest` map is used to:
+ * (a) find the asm.Label for the target, given an Apply node's symbol;
+ * (b) anchor an asm.Label in the instruction stream, given a LabelDef node.
+ * In other words, (a) is necessary when visiting a jump-source, and (b) when visiting a jump-target.
+ * A related map is `labelDef`: it has the same keys as `jumpDest` but its values are LabelDef nodes not asm.Labels.
+ *
+ */
+ var jumpDest: immutable.Map[ /* LabelDef */ Symbol, asm.Label ] = null
+ def programPoint(labelSym: Symbol): asm.Label = {
+ assert(labelSym.isLabel, s"trying to map a non-label symbol to an asm.Label, at: ${labelSym.pos}")
+ jumpDest.getOrElse(labelSym, {
+ val pp = new asm.Label
+ jumpDest += (labelSym -> pp)
+ pp
+ })
+ }
+
+ /*
+ * A program point may be lexically nested (at some depth)
+ * (a) in the try-clause of a try-with-finally expression
+ * (b) in a synchronized block.
+ * Each of the constructs above establishes a "cleanup block" to execute upon
+ * both normal-exit, early-return, and abrupt-termination of the instructions it encloses.
+ *
+ * The `cleanups` LIFO queue represents the nesting of active (for the current program point)
+ * pending cleanups. For each such cleanup an asm.Label indicates the start of its cleanup-block.
+ * At any given time during traversal of the method body,
+ * the head of `cleanups` denotes the cleanup-block for the closest enclosing try-with-finally or synchronized-expression.
+ *
+ * `cleanups` is used:
+ *
+ * (1) upon visiting a Return statement.
+ * In case of pending cleanups, we can't just emit a RETURN instruction, but must instead:
+ * - store the result (if any) in `earlyReturnVar`, and
+ * - jump to the next pending cleanup.
+ * See `genReturn()`
+ *
+ * (2) upon emitting a try-with-finally or a synchronized-expr,
+ * In these cases, the targets of the above jumps are emitted,
+ * provided an early exit was actually encountered somewhere in the protected clauses.
+ * See `genLoadTry()` and `genSynchronized()`
+ *
+ * The code thus emitted for jumps and targets covers the early-return case.
+ * The case of abrupt (ie exceptional) termination is covered by exception handlers
+ * emitted for that purpose as described in `genLoadTry()` and `genSynchronized()`.
+ */
+ var cleanups: List[asm.Label] = Nil
+ def registerCleanup(finCleanup: asm.Label) {
+ if (finCleanup != null) { cleanups = finCleanup :: cleanups }
+ }
+ def unregisterCleanup(finCleanup: asm.Label) {
+ if (finCleanup != null) {
+ assert(cleanups.head eq finCleanup,
+ s"Bad nesting of cleanup operations: $cleanups trying to unregister: $finCleanup")
+ cleanups = cleanups.tail
+ }
+ }
+
+ /* ---------------- local variables and params ---------------- */
+
+ case class Local(tk: BType, name: String, idx: Int, isSynth: Boolean)
+
+ /*
+ * Bookkeeping for method-local vars and method-params.
+ */
+ object locals {
+
+ private val slots = mutable.Map.empty[Symbol, Local] // (local-or-param-sym -> Local(BType, name, idx, isSynth))
+
+ private var nxtIdx = -1 // next available index for local-var
+
+ def reset(isStaticMethod: Boolean) {
+ slots.clear()
+ nxtIdx = if (isStaticMethod) 0 else 1
+ }
+
+ def contains(locSym: Symbol): Boolean = { slots.contains(locSym) }
+
+ def apply(locSym: Symbol): Local = { slots.apply(locSym) }
+
+ /* Make a fresh local variable, ensuring a unique name.
+ * The invoker must make sure inner classes are tracked for the sym's tpe.
+ */
+ def makeLocal(tk: BType, name: String): Symbol = {
+ val locSym = methSymbol.newVariable(cunit.freshTermName(name), NoPosition, Flags.SYNTHETIC) // setInfo tpe
+ makeLocal(locSym, tk)
+ locSym
+ }
+
+ def makeLocal(locSym: Symbol): Local = {
+ makeLocal(locSym, symInfoTK(locSym))
+ }
+
+ def getOrMakeLocal(locSym: Symbol): Local = {
+ // `getOrElse` below has the same effect as `getOrElseUpdate` because `makeLocal()` adds an entry to the `locals` map.
+ slots.getOrElse(locSym, makeLocal(locSym))
+ }
+
+ private def makeLocal(sym: Symbol, tk: BType): Local = {
+ assert(!slots.contains(sym), "attempt to create duplicate local var.")
+ assert(nxtIdx != -1, "not a valid start index")
+ val loc = Local(tk, sym.javaSimpleName.toString, nxtIdx, sym.isSynthetic)
+ slots += (sym -> loc)
+ assert(tk.getSize > 0, "makeLocal called for a symbol whose type is Unit.")
+ nxtIdx += tk.getSize
+ loc
+ }
+
+ // not to be confused with `fieldStore` and `fieldLoad` which also take a symbol but a field-symbol.
+ def store(locSym: Symbol) {
+ val Local(tk, _, idx, _) = slots(locSym)
+ bc.store(idx, tk)
+ }
+
+ def load(locSym: Symbol) {
+ val Local(tk, _, idx, _) = slots(locSym)
+ bc.load(idx, tk)
+ }
+
+ }
+
+ /* ---------------- Part 2 of program points, ie Labels in the ASM world ---------------- */
+
+ /*
+ * The semantics of try-with-finally and synchronized-expr require their cleanup code
+ * to be present in three forms in the emitted bytecode:
+ * (a) as normal-exit code, reached via fall-through from the last program point being protected,
+ * (b) as code reached upon early-return from an enclosed return statement.
+ * The only difference between (a) and (b) is their next program-point:
+ * the former must continue with fall-through while
+ * the latter must continue to the next early-return cleanup (if any, otherwise return from the method).
+ * Otherwise they are identical.
+ * (c) as exception-handler, reached via exceptional control flow,
+ * which rethrows the caught exception once it's done with the cleanup code.
+ *
+ * A particular cleanup may in general contain LabelDefs. Care is needed when duplicating such jump-targets,
+ * so as to preserve agreement wit the (also duplicated) jump-sources.
+ * This is achieved based on the bookkeeping provided by two maps:
+ * - `labelDefsAtOrUnder` lists all LabelDefs enclosed by a given Tree node (the key)
+ * - `labelDef` provides the LabelDef node whose symbol is used as key.
+ * As a sidenote, a related map is `jumpDest`: it has the same keys as `labelDef` but its values are asm.Labels not LabelDef nodes.
+ *
+ * Details in `emitFinalizer()`, which is invoked from `genLoadTry()` and `genSynchronized()`.
+ */
+ var labelDefsAtOrUnder: scala.collection.Map[Tree, List[LabelDef]] = null
+ var labelDef: scala.collection.Map[Symbol, LabelDef] = null// (LabelDef-sym -> LabelDef)
+
+ // bookkeeping the scopes of non-synthetic local vars, to emit debug info (`emitVars`).
+ var varsInScope: List[Pair[Symbol, asm.Label]] = null // (local-var-sym -> start-of-scope)
+
+ // helpers around program-points.
+ def lastInsn: asm.tree.AbstractInsnNode = {
+ mnode.instructions.getLast
+ }
+ def currProgramPoint(): asm.Label = {
+ lastInsn match {
+ case labnode: asm.tree.LabelNode => labnode.getLabel
+ case _ =>
+ val pp = new asm.Label
+ mnode visitLabel pp
+ pp
+ }
+ }
+ def markProgramPoint(lbl: asm.Label) {
+ val skip = (lbl == null) || isAtProgramPoint(lbl)
+ if (!skip) { mnode visitLabel lbl }
+ }
+ def isAtProgramPoint(lbl: asm.Label): Boolean = {
+ (lastInsn match { case labnode: asm.tree.LabelNode => (labnode.getLabel == lbl); case _ => false } )
+ }
+ def lineNumber(tree: Tree) {
+ if (!emitLines || !tree.pos.isDefined) return;
+ val nr = tree.pos.line
+ if (nr != lastEmittedLineNr) {
+ lastEmittedLineNr = nr
+ lastInsn match {
+ case lnn: asm.tree.LineNumberNode =>
+ // overwrite previous landmark as no instructions have been emitted for it
+ lnn.line = nr
+ case _ =>
+ mnode.visitLineNumber(nr, currProgramPoint())
+ }
+ }
+ }
+
+ // on entering a method
+ def resetMethodBookkeeping(dd: DefDef) {
+ locals.reset(isStaticMethod = methSymbol.isStaticMember)
+ jumpDest = immutable.Map.empty[ /* LabelDef */ Symbol, asm.Label ]
+ // populate labelDefsAtOrUnder
+ val ldf = new LabelDefsFinder
+ ldf.traverse(dd.rhs)
+ labelDefsAtOrUnder = ldf.result.withDefaultValue(Nil)
+ labelDef = labelDefsAtOrUnder(dd.rhs).map(ld => (ld.symbol -> ld)).toMap
+ // check previous invocation of genDefDef exited as many varsInScope as it entered.
+ assert(varsInScope == null, "Unbalanced entering/exiting of GenBCode's genBlock().")
+ // check previous invocation of genDefDef unregistered as many cleanups as it registered.
+ assert(cleanups == Nil, "Previous invocation of genDefDef didn't unregister as many cleanups as it registered.")
+ isModuleInitialized = false
+ earlyReturnVar = null
+ shouldEmitCleanup = false
+
+ lastEmittedLineNr = -1
+ }
+
+ /* ---------------- top-down traversal invoking ASM Tree API along the way ---------------- */
+
+ def gen(tree: Tree) {
+ tree match {
+ case EmptyTree => ()
+
+ case _: ModuleDef => abort(s"Modules should have been eliminated by refchecks: $tree")
+
+ case ValDef(mods, name, tpt, rhs) => () // fields are added in `genPlainClass()`, via `addClassFields()`
+
+ case dd : DefDef => genDefDef(dd)
+
+ case Template(_, _, body) => body foreach gen
+
+ case _ => abort(s"Illegal tree in gen: $tree")
+ }
+ }
+
+ /*
+ * must-single-thread
+ */
+ def initJMethod(flags: Int, paramAnnotations: List[List[AnnotationInfo]]) {
+
+ val jgensig = getGenericSignature(methSymbol, claszSymbol)
+ addRemoteExceptionAnnot(isCZRemote, hasPublicBitSet(flags), methSymbol)
+ val (excs, others) = methSymbol.annotations partition (_.symbol == definitions.ThrowsClass)
+ val thrownExceptions: List[String] = getExceptions(excs)
+
+ val bytecodeName =
+ if (isMethSymStaticCtor) CLASS_CONSTRUCTOR_NAME
+ else jMethodName
+
+ val mdesc = asmMethodType(methSymbol).getDescriptor
+ mnode = cnode.visitMethod(
+ flags,
+ bytecodeName,
+ mdesc,
+ jgensig,
+ mkArray(thrownExceptions)
+ ).asInstanceOf[asm.tree.MethodNode]
+
+ // TODO param names: (m.params map (p => javaName(p.sym)))
+
+ emitAnnotations(mnode, others)
+ emitParamAnnotations(mnode, paramAnnotations)
+
+ } // end of method initJMethod
+
+
+ def genDefDef(dd: DefDef) {
+ // the only method whose implementation is not emitted: getClass()
+ if (definitions.isGetClass(dd.symbol)) { return }
+ assert(mnode == null, "GenBCode detected nested method.")
+
+ methSymbol = dd.symbol
+ jMethodName = methSymbol.javaSimpleName.toString
+ returnType = asmMethodType(dd.symbol).getReturnType
+ isMethSymStaticCtor = methSymbol.isStaticConstructor
+ isMethSymBridge = methSymbol.isBridge
+
+ resetMethodBookkeeping(dd)
+
+ // add method-local vars for params
+ val DefDef(_, _, _, vparamss, _, rhs) = dd
+ assert(vparamss.isEmpty || vparamss.tail.isEmpty, s"Malformed parameter list: $vparamss")
+ val params = if (vparamss.isEmpty) Nil else vparamss.head
+ for (p <- params) { locals.makeLocal(p.symbol) }
+ // debug assert((params.map(p => locals(p.symbol).tk)) == asmMethodType(methSymbol).getArgumentTypes.toList, "debug")
+
+ if (params.size > MaximumJvmParameters) {
+ // SI-7324
+ cunit.error(methSymbol.pos, s"Platform restriction: a parameter list's length cannot exceed $MaximumJvmParameters.")
+ return
+ }
+
+ val isNative = methSymbol.hasAnnotation(definitions.NativeAttr)
+ val isAbstractMethod = (methSymbol.isDeferred || methSymbol.owner.isInterface)
+ val flags = mkFlags(
+ javaFlags(methSymbol),
+ if (claszSymbol.isInterface) asm.Opcodes.ACC_ABSTRACT else 0,
+ if (methSymbol.isStrictFP) asm.Opcodes.ACC_STRICT else 0,
+ if (isNative) asm.Opcodes.ACC_NATIVE else 0, // native methods of objects are generated in mirror classes
+ if (isDeprecated(methSymbol)) asm.Opcodes.ACC_DEPRECATED else 0 // ASM pseudo access flag
+ )
+
+ // TODO needed? for(ann <- m.symbol.annotations) { ann.symbol.initialize }
+ initJMethod(flags, params.map(p => p.symbol.annotations))
+
+ /* Add method-local vars for LabelDef-params.
+ *
+ * This makes sure that:
+ * (1) upon visiting any "forward-jumping" Apply (ie visited before its target LabelDef), and after
+ * (2) grabbing the corresponding param symbols,
+ * those param-symbols can be used to access method-local vars.
+ *
+ * When duplicating a finally-contained LabelDef, another program-point is needed for the copy (each such copy has its own asm.Label),
+ * but the same vars (given by the LabelDef's params) can be reused,
+ * because no LabelDef ends up nested within itself after such duplication.
+ */
+ for(ld <- labelDefsAtOrUnder(dd.rhs); ldp <- ld.params; if !locals.contains(ldp.symbol)) {
+ // the tail-calls xform results in symbols shared btw method-params and labelDef-params, thus the guard above.
+ locals.makeLocal(ldp.symbol)
+ }
+
+ if (!isAbstractMethod && !isNative) {
+
+ def emitNormalMethodBody() {
+ val veryFirstProgramPoint = currProgramPoint()
+ genLoad(rhs, returnType)
+
+ rhs match {
+ case Block(_, Return(_)) => ()
+ case Return(_) => ()
+ case EmptyTree =>
+ globalError("Concrete method has no definition: " + dd + (
+ if (settings.debug) "(found: " + methSymbol.owner.info.decls.toList.mkString(", ") + ")"
+ else "")
+ )
+ case _ =>
+ bc emitRETURN returnType
+ }
+ if (emitVars) {
+ // add entries to LocalVariableTable JVM attribute
+ val onePastLastProgramPoint = currProgramPoint()
+ val hasStaticBitSet = ((flags & asm.Opcodes.ACC_STATIC) != 0)
+ if (!hasStaticBitSet) {
+ mnode.visitLocalVariable(
+ "this",
+ "L" + thisName + ";",
+ null,
+ veryFirstProgramPoint,
+ onePastLastProgramPoint,
+ 0
+ )
+ }
+ for (p <- params) { emitLocalVarScope(p.symbol, veryFirstProgramPoint, onePastLastProgramPoint, force = true) }
+ }
+
+ if (isMethSymStaticCtor) { appendToStaticCtor(dd) }
+ } // end of emitNormalMethodBody()
+
+ lineNumber(rhs)
+ emitNormalMethodBody()
+
+ // Note we don't invoke visitMax, thus there are no FrameNode among mnode.instructions.
+ // The only non-instruction nodes to be found are LabelNode and LineNumberNode.
+ }
+ mnode = null
+ } // end of method genDefDef()
+
+ /*
+ * must-single-thread
+ *
+ * TODO document, explain interplay with `fabricateStaticInit()`
+ */
+ private def appendToStaticCtor(dd: DefDef) {
+
+ def insertBefore(
+ location: asm.tree.AbstractInsnNode,
+ i0: asm.tree.AbstractInsnNode,
+ i1: asm.tree.AbstractInsnNode) {
+ if (i0 != null) {
+ mnode.instructions.insertBefore(location, i0.clone(null))
+ mnode.instructions.insertBefore(location, i1.clone(null))
+ }
+ }
+
+ // collect all return instructions
+ var rets: List[asm.tree.AbstractInsnNode] = Nil
+ mnode foreachInsn { i => if (i.getOpcode() == asm.Opcodes.RETURN) { rets ::= i } }
+ if (rets.isEmpty) { return }
+
+ var insnModA: asm.tree.AbstractInsnNode = null
+ var insnModB: asm.tree.AbstractInsnNode = null
+ // call object's private ctor from static ctor
+ if (isCZStaticModule) {
+ // NEW `moduleName`
+ val className = internalName(methSymbol.enclClass)
+ insnModA = new asm.tree.TypeInsnNode(asm.Opcodes.NEW, className)
+ // INVOKESPECIAL <init>
+ val callee = methSymbol.enclClass.primaryConstructor
+ val jname = callee.javaSimpleName.toString
+ val jowner = internalName(callee.owner)
+ val jtype = asmMethodType(callee).getDescriptor
+ insnModB = new asm.tree.MethodInsnNode(asm.Opcodes.INVOKESPECIAL, jowner, jname, jtype)
+ }
+
+ var insnParcA: asm.tree.AbstractInsnNode = null
+ var insnParcB: asm.tree.AbstractInsnNode = null
+ // android creator code
+ if (isCZParcelable) {
+ // add a static field ("CREATOR") to this class to cache android.os.Parcelable$Creator
+ val andrFieldDescr = asmClassType(AndroidCreatorClass).getDescriptor
+ cnode.visitField(
+ asm.Opcodes.ACC_STATIC | asm.Opcodes.ACC_FINAL,
+ "CREATOR",
+ andrFieldDescr,
+ null,
+ null
+ )
+ // INVOKESTATIC CREATOR(): android.os.Parcelable$Creator; -- TODO where does this Android method come from?
+ val callee = definitions.getMember(claszSymbol.companionModule, androidFieldName)
+ val jowner = internalName(callee.owner)
+ val jname = callee.javaSimpleName.toString
+ val jtype = asmMethodType(callee).getDescriptor
+ insnParcA = new asm.tree.MethodInsnNode(asm.Opcodes.INVOKESTATIC, jowner, jname, jtype)
+ // PUTSTATIC `thisName`.CREATOR;
+ insnParcB = new asm.tree.FieldInsnNode(asm.Opcodes.PUTSTATIC, thisName, "CREATOR", andrFieldDescr)
+ }
+
+ // insert a few instructions for initialization before each return instruction
+ for(r <- rets) {
+ insertBefore(r, insnModA, insnModB)
+ insertBefore(r, insnParcA, insnParcB)
+ }
+
+ }
+
+ def emitLocalVarScope(sym: Symbol, start: asm.Label, end: asm.Label, force: Boolean = false) {
+ val Local(tk, name, idx, isSynth) = locals(sym)
+ if (force || !isSynth) {
+ mnode.visitLocalVariable(name, tk.getDescriptor, null, start, end, idx)
+ }
+ }
+
+ def genLoad(tree: Tree, expectedType: BType)
+
+ } // end of class PlainSkelBuilder
+
+}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeSyncAndTry.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeSyncAndTry.scala
new file mode 100644
index 0000000000..439be77b31
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeSyncAndTry.scala
@@ -0,0 +1,401 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2012 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+
+package scala
+package tools.nsc
+package backend
+package jvm
+
+import scala.collection.{ mutable, immutable }
+import scala.annotation.switch
+
+import scala.tools.asm
+
+/*
+ *
+ * @author Miguel Garcia, http://lamp.epfl.ch/~magarcia/ScalaCompilerCornerReloaded/
+ * @version 1.0
+ *
+ */
+abstract class BCodeSyncAndTry extends BCodeBodyBuilder {
+ import global._
+
+
+ /*
+ * Functionality to lower `synchronized` and `try` expressions.
+ */
+ abstract class SyncAndTryBuilder(cunit: CompilationUnit) extends PlainBodyBuilder(cunit) {
+
+ def genSynchronized(tree: Apply, expectedType: BType): BType = {
+ val Apply(fun, args) = tree
+ val monitor = locals.makeLocal(ObjectReference, "monitor")
+ val monCleanup = new asm.Label
+
+ // if the synchronized block returns a result, store it in a local variable.
+ // Just leaving it on the stack is not valid in MSIL (stack is cleaned when leaving try-blocks).
+ val hasResult = (expectedType != UNIT)
+ val monitorResult: Symbol = if (hasResult) locals.makeLocal(tpeTK(args.head), "monitorResult") else null;
+
+ /* ------ (1) pushing and entering the monitor, also keeping a reference to it in a local var. ------ */
+ genLoadQualifier(fun)
+ bc dup ObjectReference
+ locals.store(monitor)
+ emit(asm.Opcodes.MONITORENTER)
+
+ /* ------ (2) Synchronized block.
+ * Reached by fall-through from (1).
+ * Protected by:
+ * (2.a) the EH-version of the monitor-exit, and
+ * (2.b) whatever protects the whole synchronized expression.
+ * ------
+ */
+ val startProtected = currProgramPoint()
+ registerCleanup(monCleanup)
+ genLoad(args.head, expectedType /* toTypeKind(tree.tpe.resultType) */)
+ unregisterCleanup(monCleanup)
+ if (hasResult) { locals.store(monitorResult) }
+ nopIfNeeded(startProtected)
+ val endProtected = currProgramPoint()
+
+ /* ------ (3) monitor-exit after normal, non-early-return, termination of (2).
+ * Reached by fall-through from (2).
+ * Protected by whatever protects the whole synchronized expression.
+ * ------
+ */
+ locals.load(monitor)
+ emit(asm.Opcodes.MONITOREXIT)
+ if (hasResult) { locals.load(monitorResult) }
+ val postHandler = new asm.Label
+ bc goTo postHandler
+
+ /* ------ (4) exception-handler version of monitor-exit code.
+ * Reached upon abrupt termination of (2).
+ * Protected by whatever protects the whole synchronized expression.
+ * ------
+ */
+ protect(startProtected, endProtected, currProgramPoint(), ThrowableReference)
+ locals.load(monitor)
+ emit(asm.Opcodes.MONITOREXIT)
+ emit(asm.Opcodes.ATHROW)
+
+ /* ------ (5) cleanup version of monitor-exit code.
+ * Reached upon early-return from (2).
+ * Protected by whatever protects the whole synchronized expression.
+ * ------
+ */
+ if (shouldEmitCleanup) {
+ markProgramPoint(monCleanup)
+ locals.load(monitor)
+ emit(asm.Opcodes.MONITOREXIT)
+ pendingCleanups()
+ }
+
+ /* ------ (6) normal exit of the synchronized expression.
+ * Reached after normal, non-early-return, termination of (3).
+ * Protected by whatever protects the whole synchronized expression.
+ * ------
+ */
+ mnode visitLabel postHandler
+
+ lineNumber(tree)
+
+ expectedType
+ }
+
+ /*
+ * Detects whether no instructions have been emitted since label `lbl` and if so emits a NOP.
+ * Useful to avoid emitting an empty try-block being protected by exception handlers,
+ * which results in "java.lang.ClassFormatError: Illegal exception table range". See SI-6102.
+ */
+ def nopIfNeeded(lbl: asm.Label) {
+ val noInstructionEmitted = isAtProgramPoint(lbl)
+ if (noInstructionEmitted) { emit(asm.Opcodes.NOP) }
+ }
+
+ /*
+ * Emitting try-catch is easy, emitting try-catch-finally not quite so.
+ * A finally-block (which always has type Unit, thus leaving the operand stack unchanged)
+ * affects control-transfer from protected regions, as follows:
+ *
+ * (a) `return` statement:
+ *
+ * First, the value to return (if any) is evaluated.
+ * Afterwards, all enclosing finally-blocks are run, from innermost to outermost.
+ * Only then is the return value (if any) returned.
+ *
+ * Some terminology:
+ * (a.1) Executing a return statement that is protected
+ * by one or more finally-blocks is called "early return"
+ * (a.2) the chain of code sections (a code section for each enclosing finally-block)
+ * to run upon early returns is called "cleanup chain"
+ *
+ * As an additional spin, consider a return statement in a finally-block.
+ * In this case, the value to return depends on how control arrived at that statement:
+ * in case it arrived via a previous return, the previous return enjoys priority:
+ * the value to return is given by that statement.
+ *
+ * (b) A finally-block protects both the try-clause and the catch-clauses.
+ *
+ * Sidenote:
+ * A try-clause may contain an empty block. On CLR, a finally-block has special semantics
+ * regarding Abort interruptions; but on the JVM it's safe to elide an exception-handler
+ * that protects an "empty" range ("empty" as in "containing NOPs only",
+ * see `asm.optimiz.DanglingExcHandlers` and SI-6720).
+ *
+ * This means a finally-block indicates instructions that can be reached:
+ * (b.1) Upon normal (non-early-returning) completion of the try-clause or a catch-clause
+ * In this case, the next-program-point is that following the try-catch-finally expression.
+ * (b.2) Upon early-return initiated in the try-clause or a catch-clause
+ * In this case, the next-program-point is the enclosing cleanup section (if any), otherwise return.
+ * (b.3) Upon abrupt termination (due to unhandled exception) of the try-clause or a catch-clause
+ * In this case, the unhandled exception must be re-thrown after running the finally-block.
+ *
+ * (c) finally-blocks are implicit to `synchronized` (a finally-block is added to just release the lock)
+ * that's why `genSynchronized()` too emits cleanup-sections.
+ *
+ * A number of code patterns can be emitted to realize the intended semantics.
+ *
+ * A popular alternative (GenICode, javac) consists in duplicating the cleanup-chain at each early-return position.
+ * The principle at work being that once control is transferred to a cleanup-section,
+ * control will always stay within the cleanup-chain.
+ * That is, barring an exception being thrown in a cleanup-section, in which case the enclosing try-block
+ * (reached via abrupt termination) takes over.
+ *
+ * The observations above hint at another code layout, less verbose, for the cleanup-chain.
+ *
+ * The code layout that GenBCode emits takes into account that once a cleanup section has been reached,
+ * jumping to the next cleanup-section (and so on, until the outermost one) realizes the correct semantics.
+ *
+ * There is still code duplication in that two cleanup-chains are needed (but this is unavoidable, anyway):
+ * one for normal control flow and another chain consisting of exception handlers.
+ * The in-line comments below refer to them as
+ * - "early-return-cleanups" and
+ * - "exception-handler-version-of-finally-block" respectively.
+ *
+ */
+ def genLoadTry(tree: Try): BType = {
+
+ val Try(block, catches, finalizer) = tree
+ val kind = tpeTK(tree)
+
+ val caseHandlers: List[EHClause] =
+ for (CaseDef(pat, _, caseBody) <- catches) yield {
+ pat match {
+ case Typed(Ident(nme.WILDCARD), tpt) => NamelessEH(tpeTK(tpt), caseBody)
+ case Ident(nme.WILDCARD) => NamelessEH(ThrowableReference, caseBody)
+ case Bind(_, _) => BoundEH (pat.symbol, caseBody)
+ }
+ }
+
+ // ------ (0) locals used later ------
+
+ /*
+ * `postHandlers` is a program point denoting:
+ * (a) the finally-clause conceptually reached via fall-through from try-catch-finally
+ * (in case a finally-block is present); or
+ * (b) the program point right after the try-catch
+ * (in case there's no finally-block).
+ * The name choice emphasizes that the code section lies "after all exception handlers",
+ * where "all exception handlers" includes those derived from catch-clauses as well as from finally-blocks.
+ */
+ val postHandlers = new asm.Label
+
+ val hasFinally = (finalizer != EmptyTree)
+
+ /*
+ * used in the finally-clause reached via fall-through from try-catch, if any.
+ */
+ val guardResult = hasFinally && (kind != UNIT) && mayCleanStack(finalizer)
+
+ /*
+ * please notice `tmp` has type tree.tpe, while `earlyReturnVar` has the method return type.
+ * Because those two types can be different, dedicated vars are needed.
+ */
+ val tmp = if (guardResult) locals.makeLocal(tpeTK(tree), "tmp") else null;
+
+ /*
+ * upon early return from the try-body or one of its EHs (but not the EH-version of the finally-clause)
+ * AND hasFinally, a cleanup is needed.
+ */
+ val finCleanup = if (hasFinally) new asm.Label else null
+
+ /* ------ (1) try-block, protected by:
+ * (1.a) the EHs due to case-clauses, emitted in (2),
+ * (1.b) the EH due to finally-clause, emitted in (3.A)
+ * (1.c) whatever protects the whole try-catch-finally expression.
+ * ------
+ */
+
+ val startTryBody = currProgramPoint()
+ registerCleanup(finCleanup)
+ genLoad(block, kind)
+ unregisterCleanup(finCleanup)
+ nopIfNeeded(startTryBody)
+ val endTryBody = currProgramPoint()
+ bc goTo postHandlers
+
+ /* ------ (2) One EH for each case-clause (this does not include the EH-version of the finally-clause)
+ * An EH in (2) is reached upon abrupt termination of (1).
+ * An EH in (2) is protected by:
+ * (2.a) the EH-version of the finally-clause, if any.
+ * (2.b) whatever protects the whole try-catch-finally expression.
+ * ------
+ */
+
+ for (ch <- caseHandlers) {
+
+ // (2.a) emit case clause proper
+ val startHandler = currProgramPoint()
+ var endHandler: asm.Label = null
+ var excType: BType = null
+ registerCleanup(finCleanup)
+ ch match {
+ case NamelessEH(typeToDrop, caseBody) =>
+ bc drop typeToDrop
+ genLoad(caseBody, kind) // adapts caseBody to `kind`, thus it can be stored, if `guardResult`, in `tmp`.
+ nopIfNeeded(startHandler)
+ endHandler = currProgramPoint()
+ excType = typeToDrop
+
+ case BoundEH (patSymbol, caseBody) =>
+ // test/files/run/contrib674.scala , a local-var already exists for patSymbol.
+ // rather than creating on first-access, we do it right away to emit debug-info for the created local var.
+ val Local(patTK, _, patIdx, _) = locals.getOrMakeLocal(patSymbol)
+ bc.store(patIdx, patTK)
+ genLoad(caseBody, kind)
+ nopIfNeeded(startHandler)
+ endHandler = currProgramPoint()
+ emitLocalVarScope(patSymbol, startHandler, endHandler)
+ excType = patTK
+ }
+ unregisterCleanup(finCleanup)
+ // (2.b) mark the try-body as protected by this case clause.
+ protect(startTryBody, endTryBody, startHandler, excType)
+ // (2.c) emit jump to the program point where the finally-clause-for-normal-exit starts, or in effect `after` if no finally-clause was given.
+ bc goTo postHandlers
+
+ }
+
+ /* ------ (3.A) The exception-handler-version of the finally-clause.
+ * Reached upon abrupt termination of (1) or one of the EHs in (2).
+ * Protected only by whatever protects the whole try-catch-finally expression.
+ * ------
+ */
+
+ // a note on terminology: this is not "postHandlers", despite appearences.
+ // "postHandlers" as in the source-code view. And from that perspective, both (3.A) and (3.B) are invisible implementation artifacts.
+ if (hasFinally) {
+ nopIfNeeded(startTryBody)
+ val finalHandler = currProgramPoint() // version of the finally-clause reached via unhandled exception.
+ protect(startTryBody, finalHandler, finalHandler, null)
+ val Local(eTK, _, eIdx, _) = locals(locals.makeLocal(ThrowableReference, "exc"))
+ bc.store(eIdx, eTK)
+ emitFinalizer(finalizer, null, isDuplicate = true)
+ bc.load(eIdx, eTK)
+ emit(asm.Opcodes.ATHROW)
+ }
+
+ /* ------ (3.B) Cleanup-version of the finally-clause.
+ * Reached upon early RETURN from (1) or upon early RETURN from one of the EHs in (2)
+ * (and only from there, ie reached only upon early RETURN from
+ * program regions bracketed by registerCleanup/unregisterCleanup).
+ * Protected only by whatever protects the whole try-catch-finally expression.
+ *
+ * Given that control arrives to a cleanup section only upon early RETURN,
+ * the value to return (if any) is always available. Therefore, a further RETURN
+ * found in a cleanup section is always ignored (a warning is displayed, @see `genReturn()`).
+ * In order for `genReturn()` to know whether the return statement is enclosed in a cleanup section,
+ * the variable `insideCleanupBlock` is used.
+ * ------
+ */
+
+ // this is not "postHandlers" either.
+ // `shouldEmitCleanup` can be set, and at the same time this try expression may lack a finally-clause.
+ // In other words, all combinations of (hasFinally, shouldEmitCleanup) are valid.
+ if (hasFinally && shouldEmitCleanup) {
+ val savedInsideCleanup = insideCleanupBlock
+ insideCleanupBlock = true
+ markProgramPoint(finCleanup)
+ // regarding return value, the protocol is: in place of a `return-stmt`, a sequence of `adapt, store, jump` are inserted.
+ emitFinalizer(finalizer, null, isDuplicate = true)
+ pendingCleanups()
+ insideCleanupBlock = savedInsideCleanup
+ }
+
+ /* ------ (4) finally-clause-for-normal-nonEarlyReturn-exit
+ * Reached upon normal, non-early-return termination of (1) or of an EH in (2).
+ * Protected only by whatever protects the whole try-catch-finally expression.
+ * TODO explain what happens upon RETURN contained in (4)
+ * ------
+ */
+
+ markProgramPoint(postHandlers)
+ if (hasFinally) {
+ emitFinalizer(finalizer, tmp, isDuplicate = false) // the only invocation of emitFinalizer with `isDuplicate == false`
+ }
+
+ kind
+ } // end of genLoadTry()
+
+ /* if no more pending cleanups, all that remains to do is return. Otherwise jump to the next (outer) pending cleanup. */
+ private def pendingCleanups() {
+ cleanups match {
+ case Nil =>
+ if (earlyReturnVar != null) {
+ locals.load(earlyReturnVar)
+ bc.emitRETURN(locals(earlyReturnVar).tk)
+ } else {
+ bc emitRETURN UNIT
+ }
+ shouldEmitCleanup = false
+
+ case nextCleanup :: _ =>
+ bc goTo nextCleanup
+ }
+ }
+
+ def protect(start: asm.Label, end: asm.Label, handler: asm.Label, excType: BType) {
+ val excInternalName: String =
+ if (excType == null) null
+ else excType.getInternalName
+ assert(start != end, "protecting a range of zero instructions leads to illegal class format. Solution: add a NOP to that range.")
+ mnode.visitTryCatchBlock(start, end, handler, excInternalName)
+ }
+
+ /* `tmp` (if non-null) is the symbol of the local-var used to preserve the result of the try-body, see `guardResult` */
+ def emitFinalizer(finalizer: Tree, tmp: Symbol, isDuplicate: Boolean) {
+ var saved: immutable.Map[ /* LabelDef */ Symbol, asm.Label ] = null
+ if (isDuplicate) {
+ saved = jumpDest
+ for(ldef <- labelDefsAtOrUnder(finalizer)) {
+ jumpDest -= ldef.symbol
+ }
+ }
+ // when duplicating, the above guarantees new asm.Labels are used for LabelDefs contained in the finalizer (their vars are reused, that's ok)
+ if (tmp != null) { locals.store(tmp) }
+ genLoad(finalizer, UNIT)
+ if (tmp != null) { locals.load(tmp) }
+ if (isDuplicate) {
+ jumpDest = saved
+ }
+ }
+
+ /* Does this tree have a try-catch block? */
+ def mayCleanStack(tree: Tree): Boolean = tree exists { t => t.isInstanceOf[Try] }
+
+ abstract class Cleanup(val value: AnyRef) {
+ def contains(x: AnyRef) = value == x
+ }
+ case class MonitorRelease(v: Symbol) extends Cleanup(v) { }
+ case class Finalizer(f: Tree) extends Cleanup (f) { }
+
+ trait EHClause
+ case class NamelessEH(typeToDrop: BType, caseBody: Tree) extends EHClause
+ case class BoundEH (patSymbol: Symbol, caseBody: Tree) extends EHClause
+
+ }
+
+}
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeTypes.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeTypes.scala
new file mode 100644
index 0000000000..542a90fa85
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeTypes.scala
@@ -0,0 +1,991 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2012 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+package scala
+package tools.nsc
+package backend.jvm
+
+import scala.tools.asm
+import scala.collection.{ immutable, mutable }
+
+/*
+ * Utilities to mediate between types as represented in Scala ASTs and ASM trees.
+ *
+ * @author Miguel Garcia, http://lamp.epfl.ch/~magarcia/ScalaCompilerCornerReloaded
+ * @version 1.0
+ *
+ */
+abstract class BCodeTypes extends BCodeIdiomatic {
+
+ import global._
+
+ // when compiling the Scala library, some assertions don't hold (e.g., scala.Boolean has null superClass although it's not an interface)
+ val isCompilingStdLib = !(settings.sourcepath.isDefault)
+
+ val srBoxedUnit = brefType("scala/runtime/BoxedUnit")
+
+ // special names
+ var StringReference : BType = null
+ var ThrowableReference : BType = null
+ var jlCloneableReference : BType = null // java/lang/Cloneable
+ var jlNPEReference : BType = null // java/lang/NullPointerException
+ var jioSerializableReference : BType = null // java/io/Serializable
+ var scalaSerializableReference : BType = null // scala/Serializable
+ var classCastExceptionReference : BType = null // java/lang/ClassCastException
+
+ var lateClosureInterfaces: Array[Tracked] = null // the only interface a Late-Closure-Class implements is scala.Serializable
+
+ /* A map from scala primitive type-symbols to BTypes */
+ var primitiveTypeMap: Map[Symbol, BType] = null
+ /* A map from scala type-symbols for Nothing and Null to (runtime version) BTypes */
+ var phantomTypeMap: Map[Symbol, BType] = null
+ /* Maps the method symbol for a box method to the boxed type of the result.
+ * For example, the method symbol for `Byte.box()`) is mapped to the BType `Ljava/lang/Integer;`. */
+ var boxResultType: Map[Symbol, BType] = null
+ /* Maps the method symbol for an unbox method to the primitive type of the result.
+ * For example, the method symbol for `Byte.unbox()`) is mapped to the BType BYTE. */
+ var unboxResultType: Map[Symbol, BType] = null
+
+ var hashMethodSym: Symbol = null // scala.runtime.ScalaRunTime.hash
+
+ var AndroidParcelableInterface: Symbol = null
+ var AndroidCreatorClass : Symbol = null // this is an inner class, use asmType() to get hold of its BType while tracking in innerClassBufferASM
+ var androidCreatorType : BType = null
+
+ var BeanInfoAttr: Symbol = null
+
+ /* The Object => String overload. */
+ var String_valueOf: Symbol = null
+
+ var ArrayInterfaces: Set[Tracked] = null
+
+ // scala.FunctionX and scala.runtim.AbstractFunctionX
+ val FunctionReference = new Array[Tracked](definitions.MaxFunctionArity + 1)
+ val AbstractFunctionReference = new Array[Tracked](definitions.MaxFunctionArity + 1)
+ val abstractFunctionArityMap = mutable.Map.empty[BType, Int]
+
+ var PartialFunctionReference: BType = null // scala.PartialFunction
+ var AbstractPartialFunctionReference: BType = null // scala.runtime.AbstractPartialFunction
+
+ var BoxesRunTime: BType = null
+
+ /*
+ * must-single-thread
+ */
+ def initBCodeTypes() {
+
+ import definitions._
+
+ primitiveTypeMap =
+ Map(
+ UnitClass -> UNIT,
+ BooleanClass -> BOOL,
+ CharClass -> CHAR,
+ ByteClass -> BYTE,
+ ShortClass -> SHORT,
+ IntClass -> INT,
+ LongClass -> LONG,
+ FloatClass -> FLOAT,
+ DoubleClass -> DOUBLE
+ )
+
+ phantomTypeMap =
+ Map(
+ NothingClass -> RT_NOTHING,
+ NullClass -> RT_NULL,
+ NothingClass -> RT_NOTHING, // we map on purpose to RT_NOTHING, getting rid of the distinction compile-time vs. runtime for NullClass.
+ NullClass -> RT_NULL // ditto.
+ )
+
+ boxResultType =
+ for(Pair(csym, msym) <- definitions.boxMethod)
+ yield (msym -> classLiteral(primitiveTypeMap(csym)))
+
+ unboxResultType =
+ for(Pair(csym, msym) <- definitions.unboxMethod)
+ yield (msym -> primitiveTypeMap(csym))
+
+ // boxed classes are looked up in the `exemplars` map by jvmWiseLUB().
+ // Other than that, they aren't needed there (e.g., `isSubtypeOf()` special-cases boxed classes, similarly for others).
+ val boxedClasses = List(BoxedBooleanClass, BoxedCharacterClass, BoxedByteClass, BoxedShortClass, BoxedIntClass, BoxedLongClass, BoxedFloatClass, BoxedDoubleClass)
+ for(csym <- boxedClasses) {
+ val key = brefType(csym.javaBinaryName.toTypeName)
+ val tr = buildExemplar(key, csym)
+ symExemplars.put(csym, tr)
+ exemplars.put(tr.c, tr)
+ }
+
+ // reversePrimitiveMap = (primitiveTypeMap map { case (s, pt) => (s.tpe, pt) } map (_.swap)).toMap
+
+ hashMethodSym = getMember(ScalaRunTimeModule, nme.hash_)
+
+ // TODO avoiding going through through missingHook for every line in the REPL: https://github.com/scala/scala/commit/8d962ed4ddd310cc784121c426a2e3f56a112540
+ AndroidParcelableInterface = rootMirror.getClassIfDefined("android.os.Parcelable")
+ AndroidCreatorClass = rootMirror.getClassIfDefined("android.os.Parcelable$Creator")
+
+ // the following couldn't be an eager vals in Phase constructors:
+ // that might cause cycles before Global has finished initialization.
+ BeanInfoAttr = rootMirror.getRequiredClass("scala.beans.BeanInfo")
+
+ String_valueOf = {
+ getMember(StringModule, nme.valueOf) filter (sym =>
+ sym.info.paramTypes match {
+ case List(pt) => pt.typeSymbol == ObjectClass
+ case _ => false
+ }
+ )
+ }
+
+ ArrayInterfaces = Set(JavaCloneableClass, JavaSerializableClass) map exemplar
+
+ StringReference = exemplar(StringClass).c
+ StringBuilderReference = exemplar(StringBuilderClass).c
+ ThrowableReference = exemplar(ThrowableClass).c
+ jlCloneableReference = exemplar(JavaCloneableClass).c
+ jlNPEReference = exemplar(NullPointerExceptionClass).c
+ jioSerializableReference = exemplar(JavaSerializableClass).c
+ scalaSerializableReference = exemplar(SerializableClass).c
+ classCastExceptionReference = exemplar(ClassCastExceptionClass).c
+
+ lateClosureInterfaces = Array(exemplar(SerializableClass))
+
+ /*
+ * The bytecode emitter special-cases String concatenation, in that three methods of `JCodeMethodN`
+ * ( `genStartConcat()` , `genStringConcat()` , and `genEndConcat()` )
+ * don't obtain the method descriptor of the callee via `asmMethodType()` (as normally done)
+ * but directly emit callsites on StringBuilder using literal constant for method descriptors.
+ * In order to make sure those method descriptors are available as BTypes, they are initialized here.
+ */
+ BType.getMethodType("()V") // necessary for JCodeMethodN.genStartConcat
+ BType.getMethodType("()Ljava/lang/String;") // necessary for JCodeMethodN.genEndConcat
+
+ PartialFunctionReference = exemplar(PartialFunctionClass).c
+ for(idx <- 0 to definitions.MaxFunctionArity) {
+ FunctionReference(idx) = exemplar(FunctionClass(idx))
+ AbstractFunctionReference(idx) = exemplar(AbstractFunctionClass(idx))
+ abstractFunctionArityMap += (AbstractFunctionReference(idx).c -> idx)
+ AbstractPartialFunctionReference = exemplar(AbstractPartialFunctionClass).c
+ }
+
+ // later a few analyses (e.g. refreshInnerClasses) will look up BTypes based on descriptors in instructions
+ // we make sure those BTypes can be found via lookup as opposed to creating them on the fly.
+ BoxesRunTime = brefType("scala/runtime/BoxesRunTime")
+ asmBoxTo.values foreach { mnat: MethodNameAndType => BType.getMethodType(mnat.mdesc) }
+ asmUnboxTo.values foreach { mnat: MethodNameAndType => BType.getMethodType(mnat.mdesc) }
+
+ }
+
+ /*
+ * must-single-thread
+ */
+ def clearBCodeTypes() {
+ symExemplars.clear()
+ exemplars.clear()
+ }
+
+ val PublicStatic = asm.Opcodes.ACC_PUBLIC | asm.Opcodes.ACC_STATIC
+ val PublicStaticFinal = asm.Opcodes.ACC_PUBLIC | asm.Opcodes.ACC_STATIC | asm.Opcodes.ACC_FINAL
+
+ val strMODULE_INSTANCE_FIELD = nme.MODULE_INSTANCE_FIELD.toString
+
+ // ------------------------------------------------
+ // accessory maps tracking the isInterface, innerClasses, superClass, and supportedInterfaces relations,
+ // allowing answering `conforms()` without resorting to typer.
+ // ------------------------------------------------
+
+ val exemplars = new java.util.concurrent.ConcurrentHashMap[BType, Tracked]
+ val symExemplars = new java.util.concurrent.ConcurrentHashMap[Symbol, Tracked]
+
+ /*
+ * Typically, a question about a BType can be answered only by using the BType as lookup key in one or more maps.
+ * A `Tracked` object saves time by holding together information required to answer those questions:
+ *
+ * - `sc` denotes the bytecode-level superclass if any, null otherwise
+ *
+ * - `ifaces` denotes the interfaces explicitly declared.
+ * Not included are those transitively supported, but the utility method `allLeafIfaces()` can be used for that.
+ *
+ * - `innersChain` denotes the containing classes for a non-package-level class `c`, null otherwise.
+ * Note: the optimizer may inline anonymous closures, thus eliding those inner classes
+ * (no physical class file is emitted for elided classes).
+ * Before committing `innersChain` to bytecode, cross-check with the list of elided classes (SI-6546).
+ *
+ * All methods of this class can-multi-thread
+ */
+ case class Tracked(c: BType, flags: Int, sc: Tracked, ifaces: Array[Tracked], innersChain: Array[InnerClassEntry]) {
+
+ // not a case-field because we initialize it only for JVM classes we emit.
+ private var _directMemberClasses: List[BType] = null
+
+ def directMemberClasses: List[BType] = {
+ assert(_directMemberClasses != null, s"getter directMemberClasses() invoked too early for $c")
+ _directMemberClasses
+ }
+
+ def directMemberClasses_=(bs: List[BType]) {
+ if (_directMemberClasses != null) {
+ // TODO we enter here when both mirror class and plain class are emitted for the same ModuleClassSymbol.
+ assert(_directMemberClasses == bs.sortBy(_.off))
+ }
+ _directMemberClasses = bs.sortBy(_.off)
+ }
+
+ /* `isCompilingStdLib` saves the day when compiling:
+ * (1) scala.Nothing (the test `c.isNonSpecial` fails for it)
+ * (2) scala.Boolean (it has null superClass and is not an interface)
+ */
+ assert(c.isNonSpecial || isCompilingStdLib /*(1)*/, s"non well-formed plain-type: $this")
+ assert(
+ if (sc == null) { (c == ObjectReference) || isInterface || isCompilingStdLib /*(2)*/ }
+ else { (c != ObjectReference) && !sc.isInterface }
+ , "non well-formed plain-type: " + this
+ )
+ assert(ifaces.forall(i => i.c.isNonSpecial && i.isInterface), s"non well-formed plain-type: $this")
+
+ import asm.Opcodes._
+ def hasFlags(mask: Int) = (flags & mask) != 0
+ def isPrivate = hasFlags(ACC_PRIVATE)
+ def isPublic = hasFlags(ACC_PUBLIC)
+ def isAbstract = hasFlags(ACC_ABSTRACT)
+ def isInterface = hasFlags(ACC_INTERFACE)
+ def isFinal = hasFlags(ACC_FINAL)
+ def isSynthetic = hasFlags(ACC_SYNTHETIC)
+ def isSuper = hasFlags(ACC_SUPER)
+ def isDeprecated = hasFlags(ACC_DEPRECATED)
+ def isInnerClass = { innersChain != null }
+ def isTraditionalClosureClass = {
+ isInnerClass && isFinal && (c.getSimpleName.contains(tpnme.ANON_FUN_NAME.toString)) && isFunctionType(c)
+ }
+ def isLambda = {
+ // ie isLCC || isTraditionalClosureClass
+ isFinal && (c.getSimpleName.contains(tpnme.ANON_FUN_NAME.toString)) && isFunctionType(c)
+ }
+ def isSerializable = { isSubtypeOf(jioSerializableReference) }
+
+ /* can-multi-thread */
+ def superClasses: List[Tracked] = {
+ if (sc == null) Nil else sc :: sc.superClasses
+ }
+
+ /* can-multi-thread */
+ def isSubtypeOf(other: BType): Boolean = {
+ assert(other.isNonSpecial, "so called special cases have to be handled in BCodeTypes.conforms()")
+
+ if (c == other) return true;
+
+ val otherIsIface = exemplars.get(other).isInterface
+
+ if (this.isInterface) {
+ if (other == ObjectReference) return true;
+ if (!otherIsIface) return false;
+ }
+ else {
+ if (sc != null && sc.isSubtypeOf(other)) return true;
+ if (!otherIsIface) return false;
+ }
+
+ var idx = 0
+ while (idx < ifaces.length) {
+ if (ifaces(idx).isSubtypeOf(other)) return true;
+ idx += 1
+ }
+
+ false
+ }
+
+ /*
+ * The `ifaces` field lists only those interfaces declared by `c`
+ * From the set of all supported interfaces, this method discards those which are supertypes of others in the set.
+ */
+ def allLeafIfaces: Set[Tracked] = {
+ if (sc == null) { ifaces.toSet }
+ else { minimizeInterfaces(ifaces.toSet ++ sc.allLeafIfaces) }
+ }
+
+ /*
+ * This type may not support in its entirety the interface given by the argument, however it may support some of its super-interfaces.
+ * We visualize each such supported subset of the argument's functionality as a "branch". This method returns all such branches.
+ *
+ * In other words, let Ri be a branch supported by `ib`,
+ * this method returns all Ri such that this <:< Ri, where each Ri is maximally deep.
+ */
+ def supportedBranches(ib: Tracked): Set[Tracked] = {
+ assert(ib.isInterface, s"Non-interface argument: $ib")
+
+ val result: Set[Tracked] =
+ if (this.isSubtypeOf(ib.c)) { Set(ib) }
+ else { ib.ifaces.toSet[Tracked].flatMap( bi => supportedBranches(bi) ) }
+
+ checkAllInterfaces(result)
+
+ result
+ }
+
+ override def toString = { c.toString }
+
+ }
+
+ /* must-single-thread */
+ final def isDeprecated(sym: Symbol): Boolean = { sym.annotations exists (_ matches definitions.DeprecatedAttr) }
+
+ /* must-single-thread */
+ final def hasInternalName(sym: Symbol) = { sym.isClass || (sym.isModule && !sym.isMethod) }
+
+ /* must-single-thread */
+ def getSuperInterfaces(csym: Symbol): List[Symbol] = {
+
+ // Additional interface parents based on annotations and other cues
+ def newParentForAttr(ann: AnnotationInfo): Symbol = ann.symbol match {
+ case definitions.RemoteAttr => definitions.RemoteInterfaceClass
+ case _ => NoSymbol
+ }
+
+ /* Drop redundant interfaces (which are implemented by some other parent) from the immediate parents.
+ * In other words, no two interfaces in the result are related by subtyping.
+ * This method works on Symbols, a similar one (not duplicate) works on Tracked instances.
+ */
+ def minimizeInterfaces(lstIfaces: List[Symbol]): List[Symbol] = {
+ var rest = lstIfaces
+ var leaves = List.empty[Symbol]
+ while (!rest.isEmpty) {
+ val candidate = rest.head
+ val nonLeaf = leaves exists { lsym => lsym isSubClass candidate }
+ if (!nonLeaf) {
+ leaves = candidate :: (leaves filterNot { lsym => candidate isSubClass lsym })
+ }
+ rest = rest.tail
+ }
+
+ leaves
+ }
+
+ val superInterfaces0: List[Symbol] = csym.mixinClasses
+ val superInterfaces = existingSymbols(superInterfaces0 ++ csym.annotations.map(newParentForAttr)).distinct
+
+ assert(!superInterfaces.contains(NoSymbol), s"found NoSymbol among: ${superInterfaces.mkString}")
+ assert(superInterfaces.forall(s => s.isInterface || s.isTrait), s"found non-interface among: ${superInterfaces.mkString}")
+
+ minimizeInterfaces(superInterfaces)
+ }
+
+ final def exemplarIfExisting(iname: String): Tracked = {
+ val bt = lookupRefBTypeIfExisting(iname)
+ if (bt != null) exemplars.get(bt)
+ else null
+ }
+
+ final def lookupExemplar(iname: String) = {
+ exemplars.get(lookupRefBType(iname))
+ }
+
+ /*
+ * Records the superClass and supportedInterfaces relations,
+ * so that afterwards queries can be answered without resorting to typer.
+ * This method does not add to `innerClassBufferASM`, use `internalName()` or `asmType()` or `toTypeKind()` for that.
+ * On the other hand, this method does record the inner-class status of the argument, via `buildExemplar()`.
+ *
+ * must-single-thread
+ */
+ final def exemplar(csym0: Symbol): Tracked = {
+ assert(csym0 != NoSymbol, "NoSymbol can't be tracked")
+
+ val csym = {
+ if (csym0.isJavaDefined && csym0.isModuleClass) csym0.linkedClassOfClass
+ else if (csym0.isModule) csym0.moduleClass
+ else csym0 // we track only module-classes and plain-classes
+ }
+
+ assert(!primitiveTypeMap.contains(csym) || isCompilingStdLib, s"primitive types not tracked here: ${csym.fullName}")
+ assert(!phantomTypeMap.contains(csym), s"phantom types not tracked here: ${csym.fullName}")
+
+ val opt = symExemplars.get(csym)
+ if (opt != null) {
+ return opt
+ }
+
+ val key = brefType(csym.javaBinaryName.toTypeName)
+ assert(key.isNonSpecial || isCompilingStdLib, s"Not a class to track: ${csym.fullName}")
+
+ // TODO accomodate the fix for SI-5031 of https://github.com/scala/scala/commit/0527b2549bcada2fda2201daa630369b377d0877
+ // TODO Weaken this assertion? buildExemplar() needs to be updated, too. In the meantime, pos/t5031_3 has been moved to test/disabled/pos.
+ val whatWasInExemplars = exemplars.get(key)
+ assert(whatWasInExemplars == null, "Maps `symExemplars` and `exemplars` got out of synch.")
+ val tr = buildExemplar(key, csym)
+ symExemplars.put(csym, tr)
+ if (csym != csym0) { symExemplars.put(csym0, tr) }
+ exemplars.put(tr.c, tr) // tr.c is the hash-consed, internalized, canonical representative for csym's key.
+ tr
+ }
+
+ val EMPTY_TRACKED_SET = Set.empty[Tracked]
+
+ val EMPTY_TRACKED_ARRAY = Array.empty[Tracked]
+ val EMPTY_InnerClassEntry_ARRAY = Array.empty[InnerClassEntry]
+
+ /*
+ * must-single-thread
+ */
+ private def buildExemplar(key: BType, csym: Symbol): Tracked = {
+ val sc =
+ if (csym.isImplClass) definitions.ObjectClass
+ else csym.superClass
+ assert(
+ if (csym == definitions.ObjectClass)
+ sc == NoSymbol
+ else if (csym.isInterface)
+ sc == definitions.ObjectClass
+ else
+ ((sc != NoSymbol) && !sc.isInterface) || isCompilingStdLib,
+ "superClass out of order"
+ )
+ val ifaces = getSuperInterfaces(csym) map exemplar;
+ val ifacesArr =
+ if (ifaces.isEmpty) EMPTY_TRACKED_ARRAY
+ else {
+ val arr = new Array[Tracked](ifaces.size)
+ ifaces.copyToArray(arr)
+ arr
+ }
+
+ val flags = mkFlags(
+ javaFlags(csym),
+ if (isDeprecated(csym)) asm.Opcodes.ACC_DEPRECATED else 0 // ASM pseudo access flag
+ )
+
+ val tsc = if (sc == NoSymbol) null else exemplar(sc)
+
+ val innersChain = saveInnerClassesFor(csym, key)
+
+ Tracked(key, flags, tsc, ifacesArr, innersChain)
+ }
+
+ /* can-multi-thread */
+ final def mkArray(xs: List[Tracked]): Array[Tracked] = {
+ if (xs.isEmpty) { return EMPTY_TRACKED_ARRAY }
+ val a = new Array[Tracked](xs.size); xs.copyToArray(a); a
+ }
+
+ // ---------------- utilities around interfaces represented by Tracked instances. ----------------
+
+ /* Drop redundant interfaces (those which are implemented by some other).
+ * In other words, no two interfaces in the result are related by subtyping.
+ * This method works on Tracked elements, a similar one (not duplicate) works on Symbols.
+ */
+ def minimizeInterfaces(lstIfaces: Set[Tracked]): Set[Tracked] = {
+ checkAllInterfaces(lstIfaces)
+ var rest = lstIfaces.toList
+ var leaves = List.empty[Tracked]
+ while (!rest.isEmpty) {
+ val candidate = rest.head
+ val nonLeaf = leaves exists { leaf => leaf.isSubtypeOf(candidate.c) }
+ if (!nonLeaf) {
+ leaves = candidate :: (leaves filterNot { leaf => candidate.isSubtypeOf(leaf.c) })
+ }
+ rest = rest.tail
+ }
+
+ leaves.toSet
+ }
+
+ def allInterfaces(is: Iterable[Tracked]): Boolean = { is forall { i => i.isInterface } }
+ def nonInterfaces(is: Iterable[Tracked]): Iterable[Tracked] = { is filterNot { i => i.isInterface } }
+
+ def checkAllInterfaces(ifaces: Iterable[Tracked]) {
+ assert(allInterfaces(ifaces), s"Non-interfaces: ${nonInterfaces(ifaces).mkString}")
+ }
+
+ /*
+ * Returns the intersection of two sets of interfaces.
+ */
+ def intersection(ifacesA: Set[Tracked], ifacesB: Set[Tracked]): Set[Tracked] = {
+ var acc: Set[Tracked] = Set()
+ for(ia <- ifacesA; ib <- ifacesB) {
+ val ab = ia.supportedBranches(ib)
+ val ba = ib.supportedBranches(ia)
+ acc = minimizeInterfaces(acc ++ ab ++ ba)
+ }
+ checkAllInterfaces(acc)
+
+ acc
+ }
+
+ /*
+ * Subtype check `a <:< b` on BTypes that takes into account the JVM built-in numeric promotions (e.g. BYTE to INT).
+ * Its operation can be visualized more easily in terms of the Java bytecode type hierarchy.
+ * This method used to be called, in the ICode world, TypeKind.<:<()
+ *
+ * can-multi-thread
+ */
+ final def conforms(a: BType, b: BType): Boolean = {
+ if (a.isArray) { // may be null
+ /* Array subtyping is covariant here, as in Java bytecode. Also necessary for Java interop. */
+ if ((b == jlCloneableReference) ||
+ (b == jioSerializableReference) ||
+ (b == AnyRefReference)) { true }
+ else if (b.isArray) { conforms(a.getComponentType, b.getComponentType) }
+ else { false }
+ }
+ else if (a.isBoxed) { // may be null
+ if (b.isBoxed) { a == b }
+ else if (b == AnyRefReference) { true }
+ else if (!(b.hasObjectSort)) { false }
+ else { exemplars.get(a).isSubtypeOf(b) } // e.g., java/lang/Double conforms to java/lang/Number
+ }
+ else if (a.isNullType) { // known to be null
+ if (b.isNothingType) { false }
+ else if (b.isValueType) { false }
+ else { true }
+ }
+ else if (a.isNothingType) { // known to be Nothing
+ true
+ }
+ else if (a.isUnitType) {
+ b.isUnitType
+ }
+ else if (a.hasObjectSort) { // may be null
+ if (a.isNothingType) { true }
+ else if (b.hasObjectSort) { exemplars.get(a).isSubtypeOf(b) }
+ else if (b.isArray) { a.isNullType } // documentation only, because `if(a.isNullType)` (above) covers this case already.
+ else { false }
+ }
+ else {
+
+ def msg = s"(a: $a, b: $b)"
+
+ assert(a.isNonUnitValueType, s"a isn't a non-Unit value type. $msg")
+ assert(b.isValueType, s"b isn't a value type. $msg")
+
+ (a eq b) || (a match {
+ case BOOL | BYTE | SHORT | CHAR => b == INT || b == LONG // TODO Actually, BOOL does NOT conform to LONG. Even with adapt().
+ case _ => a == b
+ })
+ }
+ }
+
+ /* The maxValueType of (Char, Byte) and of (Char, Short) is Int, to encompass the negative values of Byte and Short. See ticket #2087.
+ *
+ * can-multi-thread
+ */
+ def maxValueType(a: BType, other: BType): BType = {
+ assert(a.isValueType, "maxValueType() is defined only for 1st arg valuetypes (2nd arg doesn't matter).")
+
+ def uncomparable: Nothing = {
+ abort(s"Uncomparable BTypes: $a with $other")
+ }
+
+ if (a.isNothingType) return other;
+ if (other.isNothingType) return a;
+ if (a == other) return a;
+
+ a match {
+
+ case UNIT => uncomparable
+ case BOOL => uncomparable
+
+ case BYTE =>
+ if (other == CHAR) INT
+ else if (other.isNumericType) other
+ else uncomparable
+
+ case SHORT =>
+ other match {
+ case BYTE => SHORT
+ case CHAR => INT
+ case INT | LONG | FLOAT | DOUBLE => other
+ case _ => uncomparable
+ }
+
+ case CHAR =>
+ other match {
+ case BYTE | SHORT => INT
+ case INT | LONG | FLOAT | DOUBLE => other
+ case _ => uncomparable
+ }
+
+ case INT =>
+ other match {
+ case BYTE | SHORT | CHAR => INT
+ case LONG | FLOAT | DOUBLE => other
+ case _ => uncomparable
+ }
+
+ case LONG =>
+ if (other.isIntegralType) LONG
+ else if (other.isRealType) DOUBLE
+ else uncomparable
+
+ case FLOAT =>
+ if (other == DOUBLE) DOUBLE
+ else if (other.isNumericType) FLOAT
+ else uncomparable
+
+ case DOUBLE =>
+ if (other.isNumericType) DOUBLE
+ else uncomparable
+
+ case _ => uncomparable
+ }
+ }
+
+ /* Takes promotions of numeric primitives into account.
+ *
+ * can-multi-thread
+ */
+ final def maxType(a: BType, other: BType): BType = {
+ if (a.isValueType) { maxValueType(a, other) }
+ else {
+ if (a.isNothingType) return other;
+ if (other.isNothingType) return a;
+ if (a == other) return a;
+ // Approximate `lub`. The common type of two references is always AnyRef.
+ // For 'real' least upper bound wrt to subclassing use method 'lub'.
+ assert(a.isArray || a.isBoxed || a.hasObjectSort, s"This is not a valuetype and it's not something else, what is it? $a")
+ // TODO For some reason, ICode thinks `REFERENCE(...).maxType(BOXED(whatever))` is `uncomparable`. Here, that has maxType AnyRefReference.
+ // BTW, when swapping arguments, ICode says BOXED(whatever).maxType(REFERENCE(...)) == AnyRefReference, so I guess the above was an oversight in REFERENCE.maxType()
+ if (other.isRefOrArrayType) { AnyRefReference }
+ else { abort(s"Uncomparable BTypes: $a with $other") }
+ }
+ }
+
+ /*
+ * Whether the argument (the signature of a method) takes as argument
+ * one ore more Function or PartialFunction (in particular an anonymous closure).
+ *
+ * can-multi-thread
+ */
+ final def isHigherOrderMethod(mtype: BType): Boolean = {
+ assert(mtype.sort == BType.METHOD)
+
+ val ats = mtype.getArgumentTypes
+ var idx = 0
+ while (idx < ats.length) {
+ val t = ats(idx)
+ if (isFunctionType(t) || isPartialFunctionType(t)) {
+ return true
+ }
+ idx += 1
+ }
+ false
+ }
+
+ /*
+ * Whether the argument is a subtype of
+ * scala.PartialFunction[-A, +B] extends (A => B)
+ * N.B.: this method returns true for a scala.runtime.AbstractPartialFunction
+ *
+ * can-multi-thread
+ */
+ def isPartialFunctionType(t: BType): Boolean = {
+ (t.hasObjectSort) && exemplars.get(t).isSubtypeOf(PartialFunctionReference)
+ }
+
+ /*
+ * Whether the argument is a subtype of
+ * scala.runtime.AbstractPartialFunction[-T1, +R] extends Function1[T1, R] with PartialFunction[T1, R]
+ *
+ * can-multi-thread
+ */
+ def isAbstractPartialFunctionType(t: BType): Boolean = {
+ (t.hasObjectSort) && exemplars.get(t).isSubtypeOf(AbstractPartialFunctionReference)
+ }
+
+ /*
+ * Whether the argument is a subtype of scala.FunctionX where 0 <= X <= definitions.MaxFunctionArity
+ *
+ * can-multi-thread
+ */
+ def isFunctionType(t: BType): Boolean = {
+ if (!t.hasObjectSort) return false
+ var idx = 0
+ val et: Tracked = exemplars.get(t)
+ while (idx <= definitions.MaxFunctionArity) {
+ if (et.isSubtypeOf(FunctionReference(idx).c)) {
+ return true
+ }
+ idx += 1
+ }
+ false
+ }
+
+ def isClosureClass(bt: BType): Boolean = {
+ val tr = exemplars.get(bt); (tr != null && tr.isLambda)
+ }
+
+ /*
+ * Whether the argument is a subtype of scala.runtime.AbstractFunctionX where 0 <= X <= definitions.MaxFunctionArity
+ *
+ * can-multi-thread
+ */
+ def isAbstractFunctionType(t: BType): Boolean = {
+ if (!t.hasObjectSort) return false
+ var idx = 0
+ val et: Tracked = exemplars.get(t)
+ while (idx <= definitions.MaxFunctionArity) {
+ if (et.isSubtypeOf(AbstractFunctionReference(idx).c)) {
+ return true
+ }
+ idx += 1
+ }
+ false
+ }
+
+ /*
+ * For an argument of exactly one of the types
+ * scala.runtime.AbstractFunctionX where 0 <= X <= definitions.MaxFunctionArity
+ * returns the function arity, -1 otherwise.
+ *
+ * can-multi-thread
+ */
+ def abstractFunctionArity(t: BType): Int = {
+ abstractFunctionArityMap.getOrElse(t, -1)
+ }
+
+ /*
+ * must-single-thread
+ */
+ def isTopLevelModule(sym: Symbol): Boolean = {
+ exitingPickler { sym.isModuleClass && !sym.isImplClass && !sym.isNestedClass }
+ }
+
+ /*
+ * must-single-thread
+ */
+ def isStaticModule(sym: Symbol): Boolean = {
+ sym.isModuleClass && !sym.isImplClass && !sym.isLifted
+ }
+
+ // ---------------------------------------------------------------------
+ // ---------------- InnerClasses attribute (JVMS 4.7.6) ----------------
+ // ---------------------------------------------------------------------
+
+ val INNER_CLASSES_FLAGS =
+ (asm.Opcodes.ACC_PUBLIC | asm.Opcodes.ACC_PRIVATE | asm.Opcodes.ACC_PROTECTED |
+ asm.Opcodes.ACC_STATIC | asm.Opcodes.ACC_INTERFACE | asm.Opcodes.ACC_ABSTRACT | asm.Opcodes.ACC_FINAL)
+
+ /*
+ * @param name the internal name of an inner class.
+ * @param outerName the internal name of the class to which the inner class belongs.
+ * May be `null` for non-member inner classes (ie for a Java local class or a Java anonymous class).
+ * @param innerName the (simple) name of the inner class inside its enclosing class. It's `null` for anonymous inner classes.
+ * @param access the access flags of the inner class as originally declared in the enclosing class.
+ */
+ case class InnerClassEntry(name: String, outerName: String, innerName: String, access: Int) {
+ assert(name != null, "Null isn't good as class name in an InnerClassEntry.")
+ }
+
+ /* For given symbol return a symbol corresponding to a class that should be declared as inner class.
+ *
+ * For example:
+ * class A {
+ * class B
+ * object C
+ * }
+ *
+ * then method will return:
+ * NoSymbol for A,
+ * the same symbol for A.B (corresponding to A$B class), and
+ * A$C$ symbol for A.C.
+ *
+ * must-single-thread
+ */
+ def innerClassSymbolFor(s: Symbol): Symbol =
+ if (s.isClass) s else if (s.isModule) s.moduleClass else NoSymbol
+
+ /*
+ * Computes the chain of inner-class (over the is-member-of relation) for the given argument.
+ * The resulting chain will be cached in `exemplars`.
+ *
+ * The chain thus cached is valid during this compiler run, see in contrast
+ * `innerClassBufferASM` for a cache that is valid only for the class being emitted.
+ *
+ * The argument can be any symbol, but given that this method is invoked only from `buildExemplar()`,
+ * in practice it has been vetted to be a class-symbol.
+ *
+ * Returns:
+ *
+ * - a non-empty array of entries for an inner-class argument.
+ * The array's first element is the outermost top-level class,
+ * the array's last element corresponds to csym.
+ *
+ * - null otherwise.
+ *
+ * This method does not add to `innerClassBufferASM`, use instead `exemplar()` for that.
+ *
+ * must-single-thread
+ */
+ final def saveInnerClassesFor(csym: Symbol, csymTK: BType): Array[InnerClassEntry] = {
+
+ val ics = innerClassSymbolFor(csym)
+ if (ics == NoSymbol) {
+ return null
+ }
+ assert(ics == csym, s"Disagreement between innerClassSymbolFor() and exemplar()'s tracked symbol for the same input: ${csym.fullName}")
+
+ var chain: List[Symbol] = Nil
+ var x = ics
+ while (x ne NoSymbol) {
+ assert(x.isClass, s"not a class symbol: ${x.fullName}")
+ val isInner = !x.rawowner.isPackageClass
+ if (isInner) {
+ chain ::= x
+ x = innerClassSymbolFor(x.rawowner)
+ } else {
+ x = NoSymbol
+ }
+ }
+
+ // now that we have all of `ics` , `csym` , and soon the inner-classes-chain, it's too tempting not to cache.
+ if (chain.isEmpty) { null }
+ else {
+ val arr = new Array[InnerClassEntry](chain.size)
+ (chain map toInnerClassEntry).copyToArray(arr)
+
+ arr
+ }
+ }
+
+ /*
+ * must-single-thread
+ */
+ private def toInnerClassEntry(innerSym: Symbol): InnerClassEntry = {
+
+ /* The outer name for this inner class. Note that it returns null
+ * when the inner class should not get an index in the constant pool.
+ * That means non-member classes (anonymous). See Section 4.7.5 in the JVMS.
+ */
+ def outerName(innerSym: Symbol): Name = {
+ if (innerSym.originalEnclosingMethod != NoSymbol)
+ null
+ else {
+ val outerName = innerSym.rawowner.javaBinaryName
+ if (isTopLevelModule(innerSym.rawowner)) nme.stripModuleSuffix(outerName)
+ else outerName
+ }
+ }
+
+ def innerName(innerSym: Symbol): String = {
+ if (innerSym.isAnonymousClass || innerSym.isAnonymousFunction)
+ null
+ else
+ innerSym.rawname + innerSym.moduleSuffix
+ }
+
+ val flagsWithFinal: Int = mkFlags(
+ if (innerSym.rawowner.hasModuleFlag) asm.Opcodes.ACC_STATIC else 0,
+ javaFlags(innerSym),
+ if (isDeprecated(innerSym)) asm.Opcodes.ACC_DEPRECATED else 0 // ASM pseudo-access flag
+ ) & (INNER_CLASSES_FLAGS | asm.Opcodes.ACC_DEPRECATED)
+ val flags = if (innerSym.isModuleClass) flagsWithFinal & ~asm.Opcodes.ACC_FINAL else flagsWithFinal // For SI-5676, object overriding.
+
+ val jname = innerSym.javaBinaryName.toString // never null
+ val oname = { // null when method-enclosed
+ val on = outerName(innerSym)
+ if (on == null) null else on.toString
+ }
+ val iname = { // null for anonymous inner class
+ val in = innerName(innerSym)
+ if (in == null) null else in.toString
+ }
+
+ InnerClassEntry(jname, oname, iname, flags)
+ }
+
+ // --------------------------------------------
+ // ---------------- Java flags ----------------
+ // --------------------------------------------
+
+ /*
+ * can-multi-thread
+ */
+ final def hasPublicBitSet(flags: Int) = ((flags & asm.Opcodes.ACC_PUBLIC) != 0)
+
+ /*
+ * must-single-thread
+ */
+ final def isRemote(s: Symbol) = (s hasAnnotation definitions.RemoteAttr)
+
+ /*
+ * Return the Java modifiers for the given symbol.
+ * Java modifiers for classes:
+ * - public, abstract, final, strictfp (not used)
+ * for interfaces:
+ * - the same as for classes, without 'final'
+ * for fields:
+ * - public, private (*)
+ * - static, final
+ * for methods:
+ * - the same as for fields, plus:
+ * - abstract, synchronized (not used), strictfp (not used), native (not used)
+ *
+ * (*) protected cannot be used, since inner classes 'see' protected members,
+ * and they would fail verification after lifted.
+ *
+ * must-single-thread
+ */
+ def javaFlags(sym: Symbol): Int = {
+ // constructors of module classes should be private
+ // PP: why are they only being marked private at this stage and not earlier?
+ val privateFlag =
+ sym.isPrivate || (sym.isPrimaryConstructor && isTopLevelModule(sym.owner))
+
+ // Final: the only fields which can receive ACC_FINAL are eager vals.
+ // Neither vars nor lazy vals can, because:
+ //
+ // Source: http://docs.oracle.com/javase/specs/jls/se7/html/jls-17.html#jls-17.5.3
+ // "Another problem is that the specification allows aggressive
+ // optimization of final fields. Within a thread, it is permissible to
+ // reorder reads of a final field with those modifications of a final
+ // field that do not take place in the constructor."
+ //
+ // A var or lazy val which is marked final still has meaning to the
+ // scala compiler. The word final is heavily overloaded unfortunately;
+ // for us it means "not overridable". At present you can't override
+ // vars regardless; this may change.
+ //
+ // The logic does not check .isFinal (which checks flags for the FINAL flag,
+ // and includes symbols marked lateFINAL) instead inspecting rawflags so
+ // we can exclude lateFINAL. Such symbols are eligible for inlining, but to
+ // avoid breaking proxy software which depends on subclassing, we do not
+ // emit ACC_FINAL.
+ // Nested objects won't receive ACC_FINAL in order to allow for their overriding.
+
+ val finalFlag = (
+ (((sym.rawflags & symtab.Flags.FINAL) != 0) || isTopLevelModule(sym))
+ && !sym.enclClass.isInterface
+ && !sym.isClassConstructor
+ && !sym.isMutable // lazy vals and vars both
+ )
+
+ // Primitives are "abstract final" to prohibit instantiation
+ // without having to provide any implementations, but that is an
+ // illegal combination of modifiers at the bytecode level so
+ // suppress final if abstract if present.
+ import asm.Opcodes._
+ mkFlags(
+ if (privateFlag) ACC_PRIVATE else ACC_PUBLIC,
+ if (sym.isDeferred || sym.hasAbstractFlag) ACC_ABSTRACT else 0,
+ if (sym.isInterface) ACC_INTERFACE else 0,
+ if (finalFlag && !sym.hasAbstractFlag) ACC_FINAL else 0,
+ if (sym.isStaticMember) ACC_STATIC else 0,
+ if (sym.isBridge) ACC_BRIDGE | ACC_SYNTHETIC else 0,
+ if (sym.isArtifact) ACC_SYNTHETIC else 0,
+ if (sym.isClass && !sym.isInterface) ACC_SUPER else 0,
+ if (sym.isVarargsMethod) ACC_VARARGS else 0,
+ if (sym.hasFlag(symtab.Flags.SYNCHRONIZED)) ACC_SYNCHRONIZED else 0
+ )
+ }
+
+ /*
+ * must-single-thread
+ */
+ def javaFieldFlags(sym: Symbol) = {
+ javaFlags(sym) | mkFlags(
+ if (sym hasAnnotation definitions.TransientAttr) asm.Opcodes.ACC_TRANSIENT else 0,
+ if (sym hasAnnotation definitions.VolatileAttr) asm.Opcodes.ACC_VOLATILE else 0,
+ if (sym.isMutable) 0 else asm.Opcodes.ACC_FINAL
+ )
+ }
+
+} // end of class BCodeTypes
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BytecodeWriters.scala b/src/compiler/scala/tools/nsc/backend/jvm/BytecodeWriters.scala
index 0df828393d..8e6c09213f 100644
--- a/src/compiler/scala/tools/nsc/backend/jvm/BytecodeWriters.scala
+++ b/src/compiler/scala/tools/nsc/backend/jvm/BytecodeWriters.scala
@@ -22,13 +22,13 @@ trait BytecodeWriters {
val global: Global
import global._
- private def outputDirectory(sym: Symbol): AbstractFile =
+ def outputDirectory(sym: Symbol): AbstractFile =
settings.outputDirs outputDirFor enteringFlatten(sym.sourceFile)
/**
* @param clsName cls.getName
*/
- private def getFile(base: AbstractFile, clsName: String, suffix: String): AbstractFile = {
+ def getFile(base: AbstractFile, clsName: String, suffix: String): AbstractFile = {
def ensureDirectory(dir: AbstractFile): AbstractFile =
if (dir.isDirectory) dir
else throw new FileConflictException(s"${base.path}/$clsName$suffix: ${dir.path} is not a directory", dir)
diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenBCode.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenBCode.scala
new file mode 100644
index 0000000000..e55a3baed0
--- /dev/null
+++ b/src/compiler/scala/tools/nsc/backend/jvm/GenBCode.scala
@@ -0,0 +1,203 @@
+/* NSC -- new Scala compiler
+ * Copyright 2005-2012 LAMP/EPFL
+ * @author Martin Odersky
+ */
+
+
+package scala
+package tools.nsc
+package backend
+package jvm
+
+import scala.collection.{ mutable, immutable }
+import scala.annotation.switch
+
+import scala.tools.asm
+
+/*
+ * Prepare in-memory representations of classfiles using the ASM Tree API, and serialize them to disk.
+ *
+ * `BCodePhase.apply(CompilationUnit)` is invoked by some external force and that sets in motion:
+ * - visiting each ClassDef contained in that CompilationUnit
+ * - lowering the ClassDef into:
+ * (a) an optional mirror class,
+ * (b) a plain class, and
+ * (c) an optional bean class.
+ * - each of the ClassNodes above is lowered into a byte-array (ie into a classfile) and serialized.
+ *
+ * Plain, mirror, and bean classes are built respectively by PlainClassBuilder, JMirrorBuilder, and JBeanInfoBuilder.
+ *
+ * @author Miguel Garcia, http://lamp.epfl.ch/~magarcia/ScalaCompilerCornerReloaded/
+ * @version 1.0
+ *
+ */
+abstract class GenBCode extends BCodeSyncAndTry {
+ import global._
+
+ val phaseName = "jvm"
+
+ override def newPhase(prev: Phase) = new BCodePhase(prev)
+
+ final class PlainClassBuilder(cunit: CompilationUnit) extends SyncAndTryBuilder(cunit)
+
+ class BCodePhase(prev: Phase) extends StdPhase(prev) {
+
+ override def name = phaseName
+ override def description = "Generate bytecode from ASTs using the ASM library"
+ override def erasedTypes = true
+
+ private var bytecodeWriter : BytecodeWriter = null
+ private var mirrorCodeGen : JMirrorBuilder = null
+ private var beanInfoCodeGen : JBeanInfoBuilder = null
+
+ private var needsOutFolder = false // whether getOutFolder(claszSymbol) should be invoked for each claszSymbol
+
+ val caseInsensitively = mutable.Map.empty[String, Symbol]
+
+ /*
+ * Checks for duplicate internal names case-insensitively,
+ * builds ASM ClassNodes for mirror, plain, and bean classes.
+ *
+ */
+ def visit(arrivalPos: Int, cd: ClassDef, cunit: CompilationUnit) {
+ val claszSymbol = cd.symbol
+
+ // GenASM checks this before classfiles are emitted, https://github.com/scala/scala/commit/e4d1d930693ac75d8eb64c2c3c69f2fc22bec739
+ val lowercaseJavaClassName = claszSymbol.javaClassName.toLowerCase
+ caseInsensitively.get(lowercaseJavaClassName) match {
+ case None =>
+ caseInsensitively.put(lowercaseJavaClassName, claszSymbol)
+ case Some(dupClassSym) =>
+ cunit.warning(
+ claszSymbol.pos,
+ s"Class ${claszSymbol.javaClassName} differs only in case from ${dupClassSym.javaClassName}. " +
+ "Such classes will overwrite one another on case-insensitive filesystems."
+ )
+ }
+
+ // -------------- mirror class, if needed --------------
+ val mirrorC =
+ if (isStaticModule(claszSymbol) && isTopLevelModule(claszSymbol)) {
+ if (claszSymbol.companionClass == NoSymbol) {
+ mirrorCodeGen.genMirrorClass(claszSymbol, cunit)
+ } else {
+ log(s"No mirror class for module with linked class: ${claszSymbol.fullName}");
+ null
+ }
+ } else null
+
+ // -------------- "plain" class --------------
+ val pcb = new PlainClassBuilder(cunit)
+ pcb.genPlainClass(cd)
+ val outF = if (needsOutFolder) getOutFolder(claszSymbol, pcb.thisName, cunit) else null;
+ val plainC = pcb.cnode
+
+ // -------------- bean info class, if needed --------------
+ val beanC =
+ if (claszSymbol hasAnnotation BeanInfoAttr) {
+ beanInfoCodeGen.genBeanInfoClass(
+ claszSymbol, cunit,
+ fieldSymbols(claszSymbol),
+ methodSymbols(cd)
+ )
+ } else null
+
+ // ----------- serialize classfiles to disk
+
+ def getByteArray(cn: asm.tree.ClassNode): Array[Byte] = {
+ val cw = new CClassWriter(extraProc)
+ cn.accept(cw)
+ cw.toByteArray
+ }
+
+ if (mirrorC != null) {
+ sendToDisk(mirrorC.name, getByteArray(mirrorC), outF)
+ }
+ sendToDisk(plainC.name, getByteArray(plainC), outF)
+ if (beanC != null) {
+ sendToDisk(beanC.name, getByteArray(beanC), outF)
+ }
+
+ } // end of method visit()
+
+ var arrivalPos = 0
+
+ /*
+ * A run of the BCodePhase phase comprises:
+ *
+ * (a) set-up steps (most notably supporting maps in `BCodeTypes`,
+ * but also "the" writer where class files in byte-array form go)
+ *
+ * (b) building of ASM ClassNodes, their optimization and serialization.
+ *
+ * (c) tear down (closing the classfile-writer and clearing maps)
+ *
+ */
+ override def run() {
+
+ arrivalPos = 0 // just in case
+ scalaPrimitives.init
+ initBCodeTypes()
+
+ // initBytecodeWriter invokes fullName, thus we have to run it before the typer-dependent thread is activated.
+ bytecodeWriter = initBytecodeWriter(cleanup.getEntryPoints)
+ mirrorCodeGen = new JMirrorBuilder
+ beanInfoCodeGen = new JBeanInfoBuilder
+
+ needsOutFolder = bytecodeWriter.isInstanceOf[ClassBytecodeWriter]
+
+ super.run()
+
+ // closing output files.
+ bytecodeWriter.close()
+
+ caseInsensitively.clear()
+
+ /* TODO Bytecode can be verified (now that all classfiles have been written to disk)
+ *
+ * (1) asm.util.CheckAdapter.verify()
+ * public static void verify(ClassReader cr, ClassLoader loader, boolean dump, PrintWriter pw)
+ * passing a custom ClassLoader to verify inter-dependent classes.
+ * Alternatively,
+ * - an offline-bytecode verifier could be used (e.g. Maxine brings one as separate tool).
+ * - -Xverify:all
+ *
+ * (2) if requested, check-java-signatures, over and beyond the syntactic checks in `getGenericSignature()`
+ *
+ */
+
+ // clearing maps
+ clearBCodeTypes()
+ }
+
+ def sendToDisk(jclassName: String, jclassBytes: Array[Byte], outFolder: _root_.scala.tools.nsc.io.AbstractFile) {
+ try {
+ val outFile =
+ if (outFolder == null) null
+ else getFileForClassfile(outFolder, jclassName, ".class")
+ bytecodeWriter.writeClass(jclassName, jclassName, jclassBytes, outFile)
+ }
+ catch {
+ case e: FileConflictException =>
+ error(s"error writing $jclassName: ${e.getMessage}")
+ }
+ }
+
+ override def apply(cunit: CompilationUnit): Unit = {
+
+ def gen(tree: Tree) {
+ tree match {
+ case EmptyTree => ()
+ case PackageDef(_, stats) => stats foreach gen
+ case cd: ClassDef =>
+ visit(arrivalPos, cd, cunit)
+ arrivalPos += 1
+ }
+ }
+
+ gen(cunit.body)
+ }
+
+ } // end of class BCodePhase
+
+} // end of class GenBCode
diff --git a/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala b/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
index a6eedbd07e..56191cc981 100644
--- a/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
+++ b/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala
@@ -679,9 +679,18 @@ abstract class Inliners extends SubComponent {
}
*/
- def checkField(f: Symbol) = check(f, f.isPrivate && !canMakePublic(f))
- def checkSuper(n: Symbol) = check(n, n.isPrivate || !n.isClassConstructor)
- def checkMethod(n: Symbol) = check(n, n.isPrivate)
+
+ def isPrivateForInlining(sym: Symbol): Boolean = {
+ if (sym.isJavaDefined) {
+ def check(sym: Symbol) = !(sym.isPublic || sym.isProtected)
+ check(sym) || check(sym.owner) // SI-7582 Must check the enclosing class *and* the symbol for Java.
+ }
+ else sym.isPrivate // Scala never emits package-private bytecode
+ }
+
+ def checkField(f: Symbol) = check(f, isPrivateForInlining(f) && !canMakePublic(f))
+ def checkSuper(n: Symbol) = check(n, isPrivateForInlining(n) || !n.isClassConstructor)
+ def checkMethod(n: Symbol) = check(n, isPrivateForInlining(n))
def getAccess(i: Instruction) = i match {
case CALL_METHOD(n, SuperCall(_)) => checkSuper(n)
diff --git a/src/compiler/scala/tools/nsc/dependencies/Changes.scala b/src/compiler/scala/tools/nsc/dependencies/Changes.scala
index c341d33a62..81d64421b3 100644
--- a/src/compiler/scala/tools/nsc/dependencies/Changes.scala
+++ b/src/compiler/scala/tools/nsc/dependencies/Changes.scala
@@ -16,7 +16,7 @@ abstract class Changes {
import compiler._
import symtab.Flags._
- abstract class Change
+ sealed abstract class Change
private lazy val annotationsChecked =
List(definitions.SpecializedClass) // Any others that should be checked?
@@ -38,7 +38,7 @@ abstract class Changes {
/** An entity in source code, either a class or a member definition.
* Name is fully-qualified.
*/
- abstract class Entity
+ sealed abstract class Entity
case class Class(name: String) extends Entity
case class Definition(name: String) extends Entity
diff --git a/src/compiler/scala/tools/nsc/settings/MutableSettings.scala b/src/compiler/scala/tools/nsc/settings/MutableSettings.scala
index b5cc89c0c8..0536be92cf 100644
--- a/src/compiler/scala/tools/nsc/settings/MutableSettings.scala
+++ b/src/compiler/scala/tools/nsc/settings/MutableSettings.scala
@@ -9,8 +9,9 @@ package nsc
package settings
import io.{ AbstractFile, Jar, Path, PlainFile, VirtualDirectory }
-import scala.reflect.internal.util.StringOps
+import scala.collection.generic.Clearable
import scala.io.Source
+import scala.reflect.internal.util.StringOps
import scala.reflect.{ ClassTag, classTag }
/** A mutable Settings object.
@@ -542,7 +543,7 @@ class MutableSettings(val errorFn: String => Unit)
name: String,
val arg: String,
descr: String)
- extends Setting(name, descr) {
+ extends Setting(name, descr) with Clearable {
type T = List[String]
protected var v: T = Nil
def appendToValue(str: String) { value ++= List(str) }
@@ -555,6 +556,7 @@ class MutableSettings(val errorFn: String => Unit)
}
override def tryToSetColon(args: List[String]) = tryToSet(args)
override def tryToSetFromPropertyValue(s: String) = tryToSet(s.trim.split(',').toList) // used from ide
+ def clear(): Unit = (v = Nil)
def unparse: List[String] = value map (name + ":" + _)
withHelpSyntax(name + ":<" + arg + ">")
@@ -608,44 +610,49 @@ class MutableSettings(val errorFn: String => Unit)
name: String,
descr: String,
default: String
- ) extends Setting(name, mkPhasesHelp(descr, default)) {
+ ) extends Setting(name, mkPhasesHelp(descr, default)) with Clearable {
private[nsc] def this(name: String, descr: String) = this(name, descr, "")
type T = List[String]
- protected var v: T = Nil
- override def value = if (v contains "all") List("all") else super.value
- private lazy val (numericValues, stringValues) =
- value filterNot (_ == "" ) partition (_ forall (ch => ch.isDigit || ch == '-'))
-
- /** A little ad-hoc parsing. If a string is not the name of a phase, it can also be:
- * a phase id: 5
- * a phase id range: 5-10 (inclusive of both ends)
- * a range with no start: -5 means up to and including 5
- * a range with no end: 10- means 10 until completion.
- */
- private def stringToPhaseIdTest(s: String): Int => Boolean = (s indexOf '-') match {
- case -1 => (_ == s.toInt)
- case 0 => (_ <= s.tail.toInt)
- case idx =>
- if (s.last == '-') (_ >= s.init.toInt)
- else (s splitAt idx) match {
- case (s1, s2) => (id => id >= s1.toInt && id <= s2.tail.toInt)
- }
- }
- private lazy val phaseIdTest: Int => Boolean =
- (numericValues map stringToPhaseIdTest) match {
- case Nil => _ => false
- case fns => fns.reduceLeft((f1, f2) => id => f1(id) || f2(id))
+ private[this] var _v: T = Nil
+ private[this] var _numbs: List[(Int,Int)] = Nil
+ private[this] var _names: T = Nil
+ //protected var v: T = Nil
+ protected def v: T = _v
+ protected def v_=(t: T): Unit = {
+ // throws NumberFormat on bad range (like -5-6)
+ def asRange(s: String): (Int,Int) = (s indexOf '-') match {
+ case -1 => (s.toInt, s.toInt)
+ case 0 => (-1, s.tail.toInt)
+ case i if s.last == '-' => (s.init.toInt, Int.MaxValue)
+ case i => (s.take(i).toInt, s.drop(i+1).toInt)
}
+ val numsAndStrs = t filter (_.nonEmpty) partition (_ forall (ch => ch.isDigit || ch == '-'))
+ _numbs = numsAndStrs._1 map asRange
+ _names = numsAndStrs._2
+ _v = t
+ }
+ override def value = if (v contains "all") List("all") else super.value // i.e., v
+ private def numericValues = _numbs
+ private def stringValues = _names
+ private def phaseIdTest(i: Int): Boolean = numericValues exists (_ match {
+ case (min, max) => min <= i && i <= max
+ })
def tryToSet(args: List[String]) =
if (default == "") errorAndValue("missing phase", None)
- else { tryToSetColon(List(default)) ; Some(args) }
+ else tryToSetColon(List(default)) map (_ => args)
+
+ override def tryToSetColon(args: List[String]) = try {
+ args match {
+ case Nil => if (default == "") errorAndValue("missing phase", None)
+ else tryToSetColon(List(default))
+ case xs => value = (value ++ xs).distinct.sorted ; Some(Nil)
+ }
+ } catch { case _: NumberFormatException => None }
+
+ def clear(): Unit = (v = Nil)
- override def tryToSetColon(args: List[String]) = args match {
- case Nil => if (default == "") errorAndValue("missing phase", None) else tryToSetColon(List(default))
- case xs => value = (value ++ xs).distinct.sorted ; Some(Nil)
- }
// we slightly abuse the usual meaning of "contains" here by returning
// true if our phase list contains "all", regardless of the incoming argument
def contains(phName: String) = doAllPhases || containsName(phName)
diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
index fe9165203f..993f735c72 100644
--- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
+++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala
@@ -39,7 +39,7 @@ trait ScalaSettings extends AbsScalaSettings
protected def futureSettings = List[BooleanSetting]()
/** Enabled under -optimise. */
- protected def optimiseSettings = List[BooleanSetting](inline, inlineHandlers, Xcloselim, Xdce, YconstOptimization)
+ def optimiseSettings = List[BooleanSetting](inline, inlineHandlers, Xcloselim, Xdce, YconstOptimization)
/** Internal use - syntax enhancements. */
private class EnableSettings[T <: BooleanSetting](val s: T) {
@@ -190,6 +190,7 @@ trait ScalaSettings extends AbsScalaSettings
val Yreifydebug = BooleanSetting("-Yreify-debug", "Trace reification.")
val Ytyperdebug = BooleanSetting("-Ytyper-debug", "Trace all type assignments.")
val Ypatmatdebug = BooleanSetting("-Ypatmat-debug", "Trace pattern matching translation.")
+ val Yquasiquotedebug = BooleanSetting("-Yquasiquote-debug", "Trace quasiquote-related activities.")
/** Groups of Settings.
*/
@@ -198,6 +199,12 @@ trait ScalaSettings extends AbsScalaSettings
val nooptimise = BooleanSetting("-Ynooptimise", "Clears all the flags set by -optimise. Useful for testing optimizations in isolation.") withAbbreviation "-Ynooptimize" disabling optimise::optimiseSettings
val Xexperimental = BooleanSetting("-Xexperimental", "Enable experimental extensions.") enabling experimentalSettings
+ /**
+ * Settings motivated by GenBCode
+ */
+ val Ybackend = ChoiceSetting ("-Ybackend", "choice of bytecode emitter", "Choice of bytecode emitter.",
+ List("GenASM", "GenBCode"),
+ "GenASM")
// Feature extensions
val XmacroSettings = MultiStringSetting("-Xmacro-settings", "option", "Custom settings for macros.")
@@ -220,4 +227,12 @@ trait ScalaSettings extends AbsScalaSettings
/** Test whether this is scaladoc we're looking at */
def isScaladoc = false
+
+ /**
+ * Helper utilities for use by checkConflictingSettings()
+ */
+ def isBCodeActive = !isICodeAskedFor
+ def isBCodeAskedFor = (Ybackend.value != "GenASM")
+ def isICodeAskedFor = ((Ybackend.value == "GenASM") || optimiseSettings.exists(_.value) || writeICode.isSetByUser)
+
}
diff --git a/src/compiler/scala/tools/nsc/settings/ScalaVersion.scala b/src/compiler/scala/tools/nsc/settings/ScalaVersion.scala
index da1cc0c4cf..4f45043c5e 100644
--- a/src/compiler/scala/tools/nsc/settings/ScalaVersion.scala
+++ b/src/compiler/scala/tools/nsc/settings/ScalaVersion.scala
@@ -11,7 +11,7 @@ package tools.nsc.settings
* Represents a single Scala version in a manner that
* supports easy comparison and sorting.
*/
-abstract class ScalaVersion extends Ordered[ScalaVersion] {
+sealed abstract class ScalaVersion extends Ordered[ScalaVersion] {
def unparse: String
}
diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
index cbfe5460f6..4c0c16690f 100644
--- a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
+++ b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala
@@ -517,7 +517,7 @@ abstract class ClassfileParser {
skipMembers() // methods
if (!isScala) {
clazz setFlag sflags
- propagatePackageBoundary(jflags, clazz, staticModule)
+ propagatePackageBoundary(jflags, clazz, staticModule, staticModule.moduleClass)
clazz setInfo classInfo
moduleClass setInfo staticInfo
staticModule setInfo moduleClass.tpe
diff --git a/src/compiler/scala/tools/nsc/transform/CleanUp.scala b/src/compiler/scala/tools/nsc/transform/CleanUp.scala
index a37ef29355..b16ba91916 100644
--- a/src/compiler/scala/tools/nsc/transform/CleanUp.scala
+++ b/src/compiler/scala/tools/nsc/transform/CleanUp.scala
@@ -20,6 +20,18 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
/** the following two members override abstract members in Transform */
val phaseName: String = "cleanup"
+ /* used in GenBCode: collects ClassDef symbols owning a main(Array[String]) method */
+ private var entryPoints: List[Symbol] = null
+ def getEntryPoints: List[Symbol] = {
+ assert(settings.isBCodeActive, "Candidate Java entry points are collected here only when GenBCode in use.")
+ entryPoints sortBy ("" + _.fullName) // For predictably ordered error messages.
+ }
+
+ override def newPhase(prev: scala.tools.nsc.Phase): StdPhase = {
+ entryPoints = if (settings.isBCodeActive) Nil else null;
+ super.newPhase(prev)
+ }
+
protected def newTransformer(unit: CompilationUnit): Transformer =
new CleanUpTransformer(unit)
@@ -390,6 +402,15 @@ abstract class CleanUp extends Transform with ast.TreeDSL {
override def transform(tree: Tree): Tree = tree match {
+ case _: ClassDef
+ if (entryPoints != null) &&
+ genBCode.isJavaEntryPoint(tree.symbol, currentUnit)
+ =>
+ // collecting symbols for entry points here (as opposed to GenBCode where they are used)
+ // has the advantage of saving an additional pass over all ClassDefs.
+ entryPoints ::= tree.symbol
+ super.transform(tree)
+
/* Transforms dynamic calls (i.e. calls to methods that are undefined
* in the erased type space) to -- dynamically -- unsafe calls using
* reflection. This is used for structural sub-typing of refinement
diff --git a/src/compiler/scala/tools/nsc/transform/Constructors.scala b/src/compiler/scala/tools/nsc/transform/Constructors.scala
index 75fb043070..7dfa7cdf8d 100644
--- a/src/compiler/scala/tools/nsc/transform/Constructors.scala
+++ b/src/compiler/scala/tools/nsc/transform/Constructors.scala
@@ -9,7 +9,6 @@ package transform
import scala.collection.{ mutable, immutable }
import scala.collection.mutable.ListBuffer
import symtab.Flags._
-import util.TreeSet
/** This phase converts classes with parameters into Java-like classes with
* fields, which are assigned to from constructors.
@@ -239,7 +238,8 @@ abstract class Constructors extends Transform with ast.TreeDSL {
// ----------- avoid making parameter-accessor fields for symbols accessed only within the primary constructor --------------
// A sorted set of symbols that are known to be accessed outside the primary constructor.
- val accessedSyms = new TreeSet[Symbol]((x, y) => x isLess y)
+ val ord = Ordering.fromLessThan[Symbol](_ isLess _)
+ val accessedSyms = mutable.TreeSet.empty[Symbol](ord)
// a list of outer accessor symbols and their bodies
var outerAccessors: List[(Symbol, Tree)] = List()
@@ -271,7 +271,7 @@ abstract class Constructors extends Transform with ast.TreeDSL {
case Select(_, _) =>
if (!mustbeKept(tree.symbol)) {
debuglog("accessedSyms += " + tree.symbol.fullName)
- accessedSyms addEntry tree.symbol
+ accessedSyms += tree.symbol
}
super.traverse(tree)
case _ =>
diff --git a/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala b/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala
index 672d9d232a..56ec49e962 100644
--- a/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala
+++ b/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala
@@ -208,6 +208,7 @@ abstract class ExtensionMethods extends Transform with TypingTransformers {
companion.moduleClass.newMethod(extensionName, origMeth.pos, origMeth.flags & ~OVERRIDE & ~PROTECTED | FINAL)
setAnnotations origMeth.annotations
)
+ origMeth.removeAnnotation(TailrecClass) // it's on the extension method, now.
companion.info.decls.enter(extensionMeth)
}
@@ -221,15 +222,16 @@ abstract class ExtensionMethods extends Transform with TypingTransformers {
val extensionParams = allParameters(extensionMono)
val extensionThis = gen.mkAttributedStableRef(thiz setPos extensionMeth.pos)
- val extensionBody = (
- rhs
+ val extensionBody: Tree = {
+ val tree = rhs
.substituteSymbols(origTpeParams, extensionTpeParams)
.substituteSymbols(origParams, extensionParams)
.substituteThis(origThis, extensionThis)
.changeOwner(origMeth -> extensionMeth)
- )
+ new SubstututeRecursion(origMeth, extensionMeth, unit).transform(tree)
+ }
- // Record the extension method ( FIXME: because... ? )
+ // Record the extension method. Later, in `Extender#transformStats`, these will be added to the companion object.
extensionDefs(companion) += atPos(tree.pos)(DefDef(extensionMeth, extensionBody))
// These three lines are assembling Foo.bar$extension[T1, T2, ...]($this)
@@ -264,4 +266,33 @@ abstract class ExtensionMethods extends Transform with TypingTransformers {
stat
}
}
+
+ final class SubstututeRecursion(origMeth: Symbol, extensionMeth: Symbol,
+ unit: CompilationUnit) extends TypingTransformer(unit) {
+ override def transform(tree: Tree): Tree = tree match {
+ // SI-6574 Rewrite recursive calls against the extension method so they can
+ // be tail call optimized later. The tailcalls phases comes before
+ // erasure, which performs this translation more generally at all call
+ // sites.
+ //
+ // // Source
+ // class C[C] { def meth[M](a: A) = { { <expr>: C[C'] }.meth[M'] } }
+ //
+ // // Translation
+ // class C[C] { def meth[M](a: A) = { { <expr>: C[C'] }.meth[M'](a1) } }
+ // object C { def meth$extension[M, C](this$: C[C], a: A)
+ // = { meth$extension[M', C']({ <expr>: C[C'] })(a1) } }
+ case treeInfo.Applied(sel @ Select(qual, _), targs, argss) if sel.symbol == origMeth =>
+ import gen.CODE._
+ localTyper.typedPos(tree.pos) {
+ val allArgss = List(qual) :: argss
+ val origThis = extensionMeth.owner.companionClass
+ val baseType = qual.tpe.baseType(origThis)
+ val allTargs = targs.map(_.tpe) ::: baseType.typeArgs
+ val fun = gen.mkAttributedTypeApply(THIS(extensionMeth.owner), extensionMeth, allTargs)
+ allArgss.foldLeft(fun)(Apply(_, _))
+ }
+ case _ => super.transform(tree)
+ }
+ }
}
diff --git a/src/compiler/scala/tools/nsc/transform/LambdaLift.scala b/src/compiler/scala/tools/nsc/transform/LambdaLift.scala
index 7888198531..ce495ca8ca 100644
--- a/src/compiler/scala/tools/nsc/transform/LambdaLift.scala
+++ b/src/compiler/scala/tools/nsc/transform/LambdaLift.scala
@@ -8,9 +8,8 @@ package transform
import symtab._
import Flags._
-import util.TreeSet
import scala.collection.{ mutable, immutable }
-import scala.collection.mutable.{ LinkedHashMap, LinkedHashSet }
+import scala.collection.mutable.{ LinkedHashMap, LinkedHashSet, TreeSet }
abstract class LambdaLift extends InfoTransform {
import global._
@@ -56,6 +55,8 @@ abstract class LambdaLift extends InfoTransform {
class LambdaLifter(unit: CompilationUnit) extends explicitOuter.OuterPathTransformer(unit) {
+ private type SymSet = TreeSet[Symbol]
+
/** A map storing free variables of functions and classes */
private val free = new LinkedHashMap[Symbol, SymSet]
@@ -68,6 +69,12 @@ abstract class LambdaLift extends InfoTransform {
/** Symbols that are called from an inner class. */
private val calledFromInner = new LinkedHashSet[Symbol]
+ private val ord = Ordering.fromLessThan[Symbol](_ isLess _)
+ private def newSymSet = TreeSet.empty[Symbol](ord)
+
+ private def symSet(f: LinkedHashMap[Symbol, SymSet], sym: Symbol): SymSet =
+ f.getOrElseUpdate(sym, newSymSet)
+
/** The set of symbols that need to be renamed. */
private val renamable = newSymSet
@@ -107,13 +114,6 @@ abstract class LambdaLift extends InfoTransform {
/** Buffers for lifted out classes and methods */
private val liftedDefs = new LinkedHashMap[Symbol, List[Tree]]
- private type SymSet = TreeSet[Symbol]
-
- private def newSymSet = new TreeSet[Symbol](_ isLess _)
-
- private def symSet(f: LinkedHashMap[Symbol, SymSet], sym: Symbol): SymSet =
- f.getOrElseUpdate(sym, newSymSet)
-
private def isSameOwnerEnclosure(sym: Symbol) =
sym.owner.logicallyEnclosingMember == currentOwner.logicallyEnclosingMember
@@ -155,8 +155,8 @@ abstract class LambdaLift extends InfoTransform {
else {
val ss = symSet(free, enclosure)
if (!ss(sym)) {
- ss addEntry sym
- renamable addEntry sym
+ ss += sym
+ renamable += sym
changedFreeVars = true
debuglog("" + sym + " is free in " + enclosure)
if (sym.isVariable) sym setFlag CAPTURED
@@ -168,7 +168,7 @@ abstract class LambdaLift extends InfoTransform {
private def markCalled(sym: Symbol, owner: Symbol) {
debuglog("mark called: " + sym + " of " + sym.owner + " is called by " + owner)
- symSet(called, owner) addEntry sym
+ symSet(called, owner) += sym
if (sym.enclClass != owner.enclClass) calledFromInner += sym
}
@@ -195,17 +195,17 @@ abstract class LambdaLift extends InfoTransform {
if (sym.isImplClass)
localImplClasses((sym.owner, tpnme.interfaceName(sym.name))) = sym
else {
- renamable addEntry sym
+ renamable += sym
if (sym.isTrait)
localTraits((sym, sym.name)) = sym.owner
}
}
case DefDef(_, _, _, _, _, _) =>
if (sym.isLocal) {
- renamable addEntry sym
+ renamable += sym
sym setFlag (PrivateLocal | FINAL)
} else if (sym.isPrimaryConstructor) {
- symSet(called, sym) addEntry sym.owner
+ symSet(called, sym) += sym.owner
}
case Ident(name) =>
if (sym == NoSymbol) {
@@ -214,7 +214,7 @@ abstract class LambdaLift extends InfoTransform {
val owner = currentOwner.logicallyEnclosingMember
if (sym.isTerm && !sym.isMethod) markFree(sym, owner)
else if (sym.isMethod) markCalled(sym, owner)
- //symSet(called, owner) addEntry sym
+ //symSet(called, owner) += sym
}
case Select(_, _) =>
if (sym.isConstructor && sym.owner.isLocal)
@@ -224,7 +224,7 @@ abstract class LambdaLift extends InfoTransform {
super.traverse(tree)
} catch {//debug
case ex: Throwable =>
- Console.println("exception when traversing " + tree)
+ Console.println(s"$ex while traversing $tree")
throw ex
}
}
diff --git a/src/compiler/scala/tools/nsc/transform/Mixin.scala b/src/compiler/scala/tools/nsc/transform/Mixin.scala
index e0b1d9ea80..1c44e86aca 100644
--- a/src/compiler/scala/tools/nsc/transform/Mixin.scala
+++ b/src/compiler/scala/tools/nsc/transform/Mixin.scala
@@ -119,7 +119,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL {
* @param mixinClass The mixin class that produced the superaccessor
*/
private def rebindSuper(base: Symbol, member: Symbol, mixinClass: Symbol): Symbol =
- exitingPickler {
+ exitingSpecialize {
var bcs = base.info.baseClasses.dropWhile(mixinClass != _).tail
var sym: Symbol = NoSymbol
debuglog("starting rebindsuper " + base + " " + member + ":" + member.tpe +
diff --git a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
index d14fcb3eb1..4bc4e06fa7 100644
--- a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
+++ b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala
@@ -1267,7 +1267,35 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
}
protected override def newBodyDuplicator(context: Context) = new BodyDuplicator(context)
+ }
+ /** Introduced to fix SI-7343: Phase ordering problem between Duplicators and Specialization.
+ * brief explanation: specialization rewires class parents during info transformation, and
+ * the new info then guides the tree changes. But if a symbol is created during duplication,
+ * which runs after specialization, its info is not visited and thus the corresponding tree
+ * is not specialized. One manifestation is the following:
+ * ```
+ * object Test {
+ * class Parent[@specialized(Int) T]
+ *
+ * def spec_method[@specialized(Int) T](t: T, expectedXSuper: String) = {
+ * class X extends Parent[T]()
+ * // even in the specialized variant, the local X class
+ * // doesn't extend Parent$mcI$sp, since its symbol has
+ * // been created after specialization and was not seen
+ * // by specialzation's info transformer.
+ * ...
+ * }
+ * }
+ * ```
+ * We fix this by forcing duplication to take place before specialization.
+ *
+ * Note: The constructors phase (which also uses duplication) comes after erasure and uses the
+ * post-erasure typer => we must protect it from the beforeSpecialization phase shifting.
+ */
+ class SpecializationDuplicator(casts: Map[Symbol, Type]) extends Duplicator(casts) {
+ override def retyped(context: Context, tree: Tree, oldThis: Symbol, newThis: Symbol, env: scala.collection.Map[Symbol, Type]): Tree =
+ enteringSpecialize(super.retyped(context, tree, oldThis, newThis, env))
}
/** A tree symbol substituter that substitutes on type skolems.
@@ -1664,7 +1692,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
val tree1 = deriveValDef(tree)(_ => body(symbol.alias).duplicate)
debuglog("now typing: " + tree1 + " in " + tree.symbol.owner.fullName)
- val d = new Duplicator(emptyEnv)
+ val d = new SpecializationDuplicator(emptyEnv)
val newValDef = d.retyped(
localTyper.context1.asInstanceOf[d.Context],
tree1,
@@ -1723,7 +1751,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers {
val symbol = tree.symbol
val meth = addBody(tree, source)
- val d = new Duplicator(castmap)
+ val d = new SpecializationDuplicator(castmap)
debuglog("-->d DUPLICATING: " + meth)
d.retyped(
localTyper.context1.asInstanceOf[d.Context],
diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala
index bce0a077fb..baccdcf544 100644
--- a/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala
+++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala
@@ -79,7 +79,7 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging {
def chainBefore(next: Tree)(casegen: Casegen): Tree
}
- trait NoNewBinders extends TreeMaker {
+ sealed trait NoNewBinders extends TreeMaker {
protected val localSubstitution: Substitution = EmptySubstitution
}
@@ -105,12 +105,12 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging {
override def toString = "S"+ localSubstitution
}
- abstract class FunTreeMaker extends TreeMaker {
+ sealed abstract class FunTreeMaker extends TreeMaker {
val nextBinder: Symbol
def pos = nextBinder.pos
}
- abstract class CondTreeMaker extends FunTreeMaker {
+ sealed abstract class CondTreeMaker extends FunTreeMaker {
val prevBinder: Symbol
val nextBinderTp: Type
val cond: Tree
@@ -126,7 +126,7 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging {
// unless we're optimizing, emit local variable bindings for all subpatterns of extractor/case class patterns
protected val debugInfoEmitVars = !settings.optimise.value
- trait PreserveSubPatBinders extends TreeMaker {
+ sealed trait PreserveSubPatBinders extends TreeMaker {
val subPatBinders: List[Symbol]
val subPatRefs: List[Tree]
val ignoredSubPatBinders: Set[Symbol]
diff --git a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
index 7fa199afaf..81f5545695 100644
--- a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala
@@ -21,13 +21,13 @@ trait ContextErrors {
import global._
import definitions._
- abstract class AbsTypeError extends Throwable {
+ sealed abstract class AbsTypeError extends Throwable {
def errPos: Position
def errMsg: String
override def toString() = "[Type error at:" + errPos + "] " + errMsg
}
- abstract class TreeTypeError extends AbsTypeError {
+ sealed abstract class TreeTypeError extends AbsTypeError {
def underlyingTree: Tree
def errPos = underlyingTree.pos
}
@@ -697,7 +697,7 @@ trait ContextErrors {
protected def macroExpansionError(expandee: Tree, msg: String, pos: Position = NoPosition) = {
def msgForLog = if (msg != null && (msg contains "exception during macro expansion")) msg.split(EOL).drop(1).headOption.getOrElse("?") else msg
macroLogLite("macro expansion has failed: %s".format(msgForLog))
- if (msg != null) context.error(pos, msg) // issueTypeError(PosAndMsgTypeError(..)) won't work => swallows positions
+ if (msg != null) context.error(if (pos.isDefined) pos else expandee.pos, msg) // issueTypeError(PosAndMsgTypeError(..)) won't work => swallows positions
setError(expandee)
throw MacroExpansionException
}
@@ -741,7 +741,7 @@ trait ContextErrors {
try {
// [Eugene] is there a better way?
// [Paul] See Exceptional.scala and Origins.scala.
- val relevancyThreshold = realex.getStackTrace().indexWhere(_.getMethodName endsWith "macroExpand1")
+ val relevancyThreshold = realex.getStackTrace().indexWhere(_.getMethodName endsWith "macroExpandWithRuntime")
if (relevancyThreshold == -1) None
else {
var relevantElements = realex.getStackTrace().take(relevancyThreshold + 1)
@@ -782,13 +782,16 @@ trait ContextErrors {
}
def MacroExpansionHasInvalidTypeError(expandee: Tree, expanded: Any) = {
+ def isUnaffiliatedExpr = expanded.isInstanceOf[scala.reflect.api.Exprs#Expr[_]]
+ def isUnaffiliatedTree = expanded.isInstanceOf[scala.reflect.api.Trees#TreeApi]
val expected = "expr or tree"
- val isPathMismatch = expanded != null && expanded.isInstanceOf[scala.reflect.api.Exprs#Expr[_]]
+ val actual = if (isUnaffiliatedExpr) "an expr" else if (isUnaffiliatedTree) "a tree" else "unexpected"
+ val isPathMismatch = expanded != null && (isUnaffiliatedExpr || isUnaffiliatedTree)
macroExpansionError(expandee,
s"macro must return a compiler-specific $expected; returned value is " + (
if (expanded == null) "null"
- else if (isPathMismatch) s" $expected, but it doesn't belong to this compiler"
- else " of " + expanded.getClass
+ else if (isPathMismatch) s"$actual, but it doesn't belong to this compiler's universe"
+ else "of " + expanded.getClass
))
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
index 1f4ff7cc2d..1f8f13ae02 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala
@@ -99,22 +99,13 @@ trait Contexts { self: Analyzer =>
// there must be a scala.xml package when xml literals were parsed in this unit
if (unit.hasXml && ScalaXmlPackage == NoSymbol)
- unit.error(unit.firstXmlPos, "XML literals may only be used if the package scala.xml is present in the compilation classpath.")
-
- // TODO: remove the def below and drop `|| predefDefinesDollarScope` in the condition for `contextWithXML`
- // as soon as 2.11.0-M4 is released and used as STARR (and $scope is no longer defined in Predef)
- // Until then, to allow compiling quick with pre-2.11.0-M4 STARR,
- // which relied on Predef defining `val $scope`, we've left it in place.
- // Since the new scheme also imports $scope (as an alias for scala.xml.TopScope),
- // we must check whether it is still there and not import the alias to avoid ambiguity.
- // (All of this is only necessary to compile the full quick stage with STARR.
- // if using locker, Predef.$scope is no longer needed.)
- def predefDefinesDollarScope = definitions.getMemberIfDefined(PredefModule, nme.dollarScope) != NoSymbol
-
- // hack for the old xml library (detected by looking for scala.xml.TopScope, which needs to be in scope as $scope)
- // import scala.xml.{TopScope => $scope}
+ unit.error(unit.firstXmlPos, "To compile XML syntax, the scala.xml package must be on the classpath.\nPlease see https://github.com/scala/scala/wiki/Scala-2.11#xml.")
+
+ // scala-xml needs `scala.xml.TopScope` to be in scope globally as `$scope`
+ // We detect `scala-xml` by looking for `scala.xml.TopScope` and
+ // inject the equivalent of `import scala.xml.{TopScope => $scope}`
val contextWithXML =
- if (!unit.hasXml || ScalaXmlTopScope == NoSymbol || predefDefinesDollarScope) rootImportsContext
+ if (!unit.hasXml || ScalaXmlTopScope == NoSymbol) rootImportsContext
else rootImportsContext.make(gen.mkImport(ScalaXmlPackage, nme.TopScope, nme.dollarScope))
val c = contextWithXML.make(tree, unit = unit)
diff --git a/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala b/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala
index 95b771a8a5..0a2628b482 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala
@@ -32,6 +32,7 @@ abstract class Duplicators extends Analyzer {
envSubstitution = new SubstSkolemsTypeMap(env.keysIterator.toList, env.valuesIterator.toList)
debuglog("retyped with env: " + env)
+
newBodyDuplicator(context).typed(tree)
}
@@ -365,7 +366,8 @@ abstract class Duplicators extends Analyzer {
tree.symbol = NoSymbol // maybe we can find a more specific member in a subclass of Any (see AnyVal members, like ==)
}
val ntree = castType(tree, pt)
- super.typed(ntree, mode, pt)
+ val res = super.typed(ntree, mode, pt)
+ res
}
}
diff --git a/src/compiler/scala/tools/nsc/typechecker/Macros.scala b/src/compiler/scala/tools/nsc/typechecker/Macros.scala
index 86ba3d2164..6b9537e27d 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Macros.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Macros.scala
@@ -142,7 +142,7 @@ trait Macros extends FastTrack with MacroRuntimes with Traces with Helpers {
case Literal(Constant(s: String)) => s
case Literal(Constant(d: Double)) => d
case Literal(Constant(b: Boolean)) => b
- case Literal(Constant(i: Int)) => new Fingerprint(i)
+ case Literal(Constant(i: Int)) => Fingerprint(i)
}
def pickle(macroImplRef: Tree): Tree = {
@@ -464,9 +464,9 @@ trait Macros extends FastTrack with MacroRuntimes with Traces with Helpers {
/** Describes the role that the macro expandee is performing.
*/
- type MacroRole = String
- final def APPLY_ROLE: MacroRole = "APPLY_ROLE"
- private val roleNames = Map(APPLY_ROLE -> "apply")
+ type MacroRole = scala.tools.nsc.typechecker.MacroRole
+ final def APPLY_ROLE = MacroRole.Apply
+ final def UNAPPLY_ROLE = MacroRole.Unapply
/** Performs macro expansion:
*
@@ -482,9 +482,10 @@ trait Macros extends FastTrack with MacroRuntimes with Traces with Helpers {
* ========= Macro expansion =========
*
* First of all `macroExpandXXX`:
- * 1) If necessary desugars the `expandee` to fit into `macroExpand1`
+ * 1) If necessary desugars the `expandee` to fit into the default expansion scheme
+ * that is understood by `macroExpandWithRuntime` / `macroExpandWithoutRuntime`
*
- * Then `macroExpand1`:
+ * Then `macroExpandWithRuntime`:
* 2) Checks whether the expansion needs to be delayed
* 3) Loads macro implementation using `macroMirror`
* 4) Synthesizes invocation arguments for the macro implementation
@@ -532,26 +533,41 @@ trait Macros extends FastTrack with MacroRuntimes with Traces with Helpers {
def summary() = s"expander = $this, expandee = ${showDetailed(expandee)}, desugared = ${if (expandee == desugared) () else showDetailed(desugared)}"
if (macroDebugVerbose) println(s"macroExpand: ${summary()}")
assert(allowExpandee(expandee), summary())
+ linkExpandeeAndDesugared(expandee, desugared, role)
val start = if (Statistics.canEnable) Statistics.startTimer(macroExpandNanos) else null
if (Statistics.canEnable) Statistics.incCounter(macroExpandCount)
try {
- linkExpandeeAndDesugared(expandee, desugared, role)
- macroExpand1(typer, desugared) match {
- case Success(expanded) =>
- if (allowExpanded(expanded)) {
- // also see http://groups.google.com/group/scala-internals/browse_thread/thread/492560d941b315cc
- val expanded1 = try onSuccess(duplicateAndKeepPositions(expanded)) finally popMacroContext()
- if (!hasMacroExpansionAttachment(expanded1)) linkExpandeeAndExpanded(expandee, expanded1)
- if (allowResult(expanded1)) expanded1 else onFailure(expanded)
- } else {
- typer.TyperErrorGen.MacroInvalidExpansionError(expandee, roleNames(role), allowedExpansions)
- onFailure(expanded)
+ withInfoLevel(nodePrinters.InfoLevel.Quiet) { // verbose printing might cause recursive macro expansions
+ if (expandee.symbol.isErroneous || (expandee exists (_.isErroneous))) {
+ val reason = if (expandee.symbol.isErroneous) "not found or incompatible macro implementation" else "erroneous arguments"
+ macroLogVerbose(s"cancelled macro expansion because of $reason: $expandee")
+ onFailure(typer.infer.setError(expandee))
+ } else try {
+ val expanded = {
+ val runtime = macroRuntime(expandee.symbol)
+ if (runtime != null) macroExpandWithRuntime(typer, expandee, runtime)
+ else macroExpandWithoutRuntime(typer, expandee)
+ }
+ expanded match {
+ case Success(expanded) =>
+ if (allowExpanded(expanded)) {
+ // also see http://groups.google.com/group/scala-internals/browse_thread/thread/492560d941b315cc
+ val expanded1 = try onSuccess(duplicateAndKeepPositions(expanded)) finally popMacroContext()
+ if (!hasMacroExpansionAttachment(expanded1)) linkExpandeeAndExpanded(expandee, expanded1)
+ if (allowResult(expanded1)) expanded1 else onFailure(expanded)
+ } else {
+ typer.TyperErrorGen.MacroInvalidExpansionError(expandee, role.name, allowedExpansions)
+ onFailure(expanded)
+ }
+ case Fallback(fallback) => onFallback(fallback)
+ case Delayed(delayed) => onDelayed(delayed)
+ case Skipped(skipped) => onSkipped(skipped)
+ case Failure(failure) => onFailure(failure)
}
- case Fallback(fallback) => onFallback(fallback)
- case Delayed(delayed) => onDelayed(delayed)
- case Skipped(skipped) => onSkipped(skipped)
- case Failure(failure) => onFailure(failure)
+ } catch {
+ case typer.TyperErrorGen.MacroExpansionException => onFailure(expandee)
+ }
}
} finally {
if (Statistics.canEnable) Statistics.stopTimer(macroExpandNanos, start)
@@ -622,8 +638,21 @@ trait Macros extends FastTrack with MacroRuntimes with Traces with Helpers {
expander(expandee)
}
- /** Captures statuses of macro expansions performed by `macroExpand1'.
+ /** Expands a term macro used in unapply role as `u.Quasiquote(StringContext("", "")).q.unapply(x)` in `case q"$x" => ...`.
+ * @see MacroExpander
*/
+ def macroExpandUnapply(typer: Typer, original: Tree, fun: Tree, unapply: Symbol, args: List[Tree], mode: Mode, pt: Type) = {
+ val expandee = treeCopy.Apply(original, gen.mkAttributedSelect(fun, unapply), args)
+ object expander extends TermMacroExpander(UNAPPLY_ROLE, typer, expandee, mode, pt) {
+ override def allowedExpansions: String = "unapply trees"
+ override def allowExpandee(expandee: Tree) = expandee.isInstanceOf[Apply]
+ private def unsupported(what: String) = abort("unapply macros currently don't support " + what)
+ override def onFallback(fallback: Tree) = unsupported("fallback")
+ override def onDelayed(delayed: Tree) = unsupported("advanced interaction with type inference")
+ }
+ expander(original)
+ }
+
private sealed abstract class MacroStatus(val result: Tree)
private case class Success(expanded: Tree) extends MacroStatus(expanded)
private case class Fallback(fallback: Tree) extends MacroStatus(fallback) { currentRun.seenMacroExpansionsFallingBack = true }
@@ -632,28 +661,6 @@ trait Macros extends FastTrack with MacroRuntimes with Traces with Helpers {
private case class Failure(failure: Tree) extends MacroStatus(failure)
private def Delay(expanded: Tree) = Delayed(expanded)
private def Skip(expanded: Tree) = Skipped(expanded)
- private def Cancel(expandee: Tree) = Failure(expandee)
-
- /** Does the same as `macroExpand`, but without typechecking the expansion
- * Meant for internal use within the macro infrastructure, don't use it elsewhere.
- */
- private def macroExpand1(typer: Typer, expandee: Tree): MacroStatus = {
- // verbose printing might cause recursive macro expansions, so I'm shutting it down here
- withInfoLevel(nodePrinters.InfoLevel.Quiet) {
- if (expandee.symbol.isErroneous || (expandee exists (_.isErroneous))) {
- val reason = if (expandee.symbol.isErroneous) "not found or incompatible macro implementation" else "erroneous arguments"
- macroLogVerbose(s"cancelled macro expansion because of $reason: $expandee")
- Cancel(typer.infer.setError(expandee))
- }
- else try {
- val runtime = macroRuntime(expandee.symbol)
- if (runtime != null) macroExpandWithRuntime(typer, expandee, runtime)
- else macroExpandWithoutRuntime(typer, expandee)
- } catch {
- case typer.TyperErrorGen.MacroExpansionException => Failure(expandee)
- }
- }
- }
/** Expands a macro when a runtime (i.e. the macro implementation) can be successfully loaded
* Meant for internal use within the macro infrastructure, don't use it elsewhere.
@@ -804,7 +811,7 @@ object MacrosStats {
val macroExpandNanos = Statistics.newSubTimer("time spent in macroExpand", typerNanos)
}
-class Fingerprint(val value: Int) extends AnyVal {
+class Fingerprint private[Fingerprint](val value: Int) extends AnyVal {
def paramPos = { assert(isTag, this); value }
def isTag = value >= 0
def isOther = this == Other
@@ -819,8 +826,18 @@ class Fingerprint(val value: Int) extends AnyVal {
}
object Fingerprint {
+ def apply(value: Int) = new Fingerprint(value)
def Tagged(tparamPos: Int) = new Fingerprint(tparamPos)
val Other = new Fingerprint(-1)
val LiftedTyped = new Fingerprint(-2)
val LiftedUntyped = new Fingerprint(-3)
}
+
+class MacroRole private[MacroRole](val name: String) extends AnyVal {
+ override def toString = name
+}
+
+object MacroRole {
+ val Apply = new MacroRole("apply")
+ val Unapply = new MacroRole("unapply")
+}
diff --git a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala
index 646bf3a153..546186479f 100644
--- a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala
@@ -259,7 +259,7 @@ trait MethodSynthesis {
* So it's important that creating an instance of Derived does not have a side effect,
* or if it has a side effect, control that it is done only once.
*/
- trait Derived {
+ sealed trait Derived {
/** The tree from which we are deriving a synthetic member. Typically, that's
* given as an argument of the instance. */
@@ -288,7 +288,7 @@ trait MethodSynthesis {
def derivedTree: Tree
}
- trait DerivedFromMemberDef extends Derived {
+ sealed trait DerivedFromMemberDef extends Derived {
def tree: MemberDef
def enclClass: Symbol
@@ -297,12 +297,12 @@ trait MethodSynthesis {
final def basisSym = tree.symbol
}
- trait DerivedFromClassDef extends DerivedFromMemberDef {
+ sealed trait DerivedFromClassDef extends DerivedFromMemberDef {
def tree: ClassDef
final def enclClass = basisSym.owner.enclClass
}
- trait DerivedFromValDef extends DerivedFromMemberDef {
+ sealed trait DerivedFromValDef extends DerivedFromMemberDef {
def tree: ValDef
final def enclClass = basisSym.enclClass
@@ -341,10 +341,10 @@ trait MethodSynthesis {
logDerived(derivedTree)
}
}
- trait DerivedGetter extends DerivedFromValDef {
+ sealed trait DerivedGetter extends DerivedFromValDef {
// TODO
}
- trait DerivedSetter extends DerivedFromValDef {
+ sealed trait DerivedSetter extends DerivedFromValDef {
override def isSetter = true
private def setterParam = derivedSym.paramss match {
case (p :: Nil) :: _ => p
@@ -378,7 +378,7 @@ trait MethodSynthesis {
def name: TermName = tree.name.toTermName
}
- abstract class BaseGetter(tree: ValDef) extends DerivedGetter {
+ sealed abstract class BaseGetter(tree: ValDef) extends DerivedGetter {
def name = tree.name
def category = GetterTargetClass
def flagsMask = GetterFlags
@@ -510,7 +510,7 @@ trait MethodSynthesis {
def flagsExtra = 0
override def derivedSym = enclClass.info decl name
}
- trait AnyBeanGetter extends BeanAccessor with DerivedGetter {
+ sealed trait AnyBeanGetter extends BeanAccessor with DerivedGetter {
def category = BeanGetterTargetClass
override def validate() {
if (derivedSym == NoSymbol) {
diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
index 0305aab844..1282cfb416 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala
@@ -1408,11 +1408,20 @@ trait Namers extends MethodSynthesis {
if (!annotated.isInitialized) tree match {
case defn: MemberDef =>
val ainfos = defn.mods.annotations filterNot (_ eq null) map { ann =>
+ val ctx = typer.context
+ val annCtx = ctx.make(ann)
+ annCtx.setReportErrors()
// need to be lazy, #1782. beforeTyper to allow inferView in annotation args, SI-5892.
AnnotationInfo lazily {
- val context1 = typer.context.make(ann)
- context1.setReportErrors()
- enteringTyper(newTyper(context1) typedAnnotation ann)
+ if (typer.context ne ctx)
+ log(sm"""|The var `typer.context` in ${Namer.this} was mutated before the annotation ${ann} was forced.
+ |
+ |current value = ${typer.context}
+ |original value = $ctx
+ |
+ |This confirms the hypothesis for the cause of SI-7603. If you see this message, please comment on that ticket.""")
+
+ enteringTyper(newTyper(annCtx) typedAnnotation ann)
}
}
if (ainfos.nonEmpty) {
diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
index cb3a12b60d..1a9a30c2ad 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala
@@ -1431,8 +1431,8 @@ trait Typers extends Adaptations with Tags {
implRestriction(tree, "nested object")
//see https://issues.scala-lang.org/browse/SI-6444
//see https://issues.scala-lang.org/browse/SI-6463
- case _: ClassDef =>
- implRestriction(tree, "nested class")
+ case cd: ClassDef if !cd.symbol.isAnonymousClass => // Don't warn about partial functions, etc. SI-7571
+ implRestriction(tree, "nested class") // avoiding Type Tests that might check the $outer pointer.
case Select(sup @ Super(qual, mix), selector) if selector != nme.CONSTRUCTOR && qual.symbol == clazz && mix != tpnme.EMPTY =>
//see https://issues.scala-lang.org/browse/SI-6483
implRestriction(sup, "qualified super reference")
@@ -2797,16 +2797,11 @@ trait Typers extends Adaptations with Tags {
if (numVparams > definitions.MaxFunctionArity)
return MaxFunctionArityError(fun)
- def decompose(pt: Type): (Symbol, List[Type], Type) =
- if ((isFunctionType(pt) || (pt.typeSymbol == PartialFunctionClass && numVparams == 1 && fun.body.isInstanceOf[Match])) && // see bug901 for a reason why next conditions are needed
- ( pt.dealiasWiden.typeArgs.length - 1 == numVparams
- || fun.vparams.exists(_.tpt.isEmpty)
- ))
- (pt.typeSymbol, pt.dealiasWiden.typeArgs.init, pt.dealiasWiden.typeArgs.last)
- else
- (FunctionClass(numVparams), fun.vparams map (x => NoType), WildcardType)
-
- val (clazz, argpts, respt) = decompose(pt)
+ val FunctionSymbol = FunctionClass(numVparams)
+ val (argpts, respt) = pt baseType FunctionSymbol match {
+ case TypeRef(_, FunctionSymbol, args :+ res) => (args, res)
+ case _ => (fun.vparams map (_ => NoType), WildcardType)
+ }
if (argpts.lengthCompare(numVparams) != 0)
WrongNumberOfParametersError(fun, argpts)
else {
@@ -2856,7 +2851,7 @@ trait Typers extends Adaptations with Tags {
val formals = vparamSyms map (_.tpe)
val body1 = typed(fun.body, respt)
val restpe = packedType(body1, fun.symbol).deconst.resultType
- val funtpe = appliedType(clazz, formals :+ restpe: _*)
+ val funtpe = appliedType(FunctionSymbol, formals :+ restpe: _*)
treeCopy.Function(fun, vparams, body1) setType funtpe
}
@@ -3385,8 +3380,9 @@ trait Typers extends Adaptations with Tags {
if (!tree.isErrorTyped) setError(tree) else tree
// @H change to setError(treeCopy.Apply(tree, fun, args))
- case otpe if mode.inPatternMode && unapplyMember(otpe).exists =>
- doTypedUnapply(tree, fun0, fun, args, mode, pt)
+ case ExtractorType(unapply) if mode.inPatternMode =>
+ if (unapply == QuasiquoteClass_api_unapply) macroExpandUnapply(this, tree, fun, unapply, args, mode, pt)
+ else doTypedUnapply(tree, fun0, fun, args, mode, pt)
case _ =>
if (treeInfo.isMacroApplication(tree)) duplErrorTree(MacroTooManyArgumentListsError(tree, fun.symbol))
diff --git a/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala b/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
index af3f772f79..47c859bb5c 100644
--- a/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
+++ b/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala
@@ -51,6 +51,14 @@ trait Unapplies extends ast.TreeDSL
case NoSymbol => tp member nme.unapplySeq
case unapp => unapp
}
+
+ object ExtractorType {
+ def unapply(tp: Type): Option[Symbol] = {
+ val member = unapplyMember(tp)
+ if (member.exists) Some(member) else None
+ }
+ }
+
/** returns unapply member's parameter type. */
def unapplyParameterType(extractor: Symbol) = extractor.tpe.params match {
case p :: Nil => p.tpe.typeSymbol
@@ -142,7 +150,7 @@ trait Unapplies extends ast.TreeDSL
ModuleDef(
Modifiers(cdef.mods.flags & AccessFlags | SYNTHETIC, cdef.mods.privateWithin),
cdef.name.toTermName,
- Template(parents, emptyValDef, NoMods, Nil, body, cdef.impl.pos.focus))
+ gen.mkTemplate(parents, emptyValDef, NoMods, Nil, body, cdef.impl.pos.focus))
}
private val caseMods = Modifiers(SYNTHETIC | CASE)
diff --git a/src/compiler/scala/tools/nsc/util/CharArrayReader.scala b/src/compiler/scala/tools/nsc/util/CharArrayReader.scala
index 5c6f525c6f..f116e4af34 100644
--- a/src/compiler/scala/tools/nsc/util/CharArrayReader.scala
+++ b/src/compiler/scala/tools/nsc/util/CharArrayReader.scala
@@ -8,15 +8,7 @@ package util
import scala.reflect.internal.Chars._
-abstract class CharArrayReader { self =>
-
- val buf: Array[Char]
-
- def decodeUni: Boolean = true
-
- /** An error routine to call on bad unicode escapes \\uxxxx. */
- protected def error(offset: Int, msg: String): Unit
-
+trait CharArrayReaderData {
/** the last read character */
var ch: Char = _
@@ -29,7 +21,26 @@ abstract class CharArrayReader { self =>
/** The start offset of the line before the current one */
var lastLineStartOffset: Int = 0
- private var lastUnicodeOffset = -1
+ protected var lastUnicodeOffset = -1
+
+ def copyFrom(cd: CharArrayReaderData): this.type = {
+ this.ch = cd.ch
+ this.charOffset = cd.charOffset
+ this.lineStartOffset = cd.lineStartOffset
+ this.lastLineStartOffset = cd.lastLineStartOffset
+ this.lastUnicodeOffset = cd.lastUnicodeOffset
+ this
+ }
+}
+
+abstract class CharArrayReader extends CharArrayReaderData { self =>
+
+ val buf: Array[Char]
+
+ def decodeUni: Boolean = true
+
+ /** An error routine to call on bad unicode escapes \\uxxxx. */
+ protected def error(offset: Int, msg: String): Unit
/** Is last character a unicode escape \\uxxxx? */
def isUnicodeEscape = charOffset == lastUnicodeOffset
diff --git a/src/compiler/scala/tools/nsc/util/CommandLine.scala b/src/compiler/scala/tools/nsc/util/CommandLine.scala
deleted file mode 100644
index ef28f6dc53..0000000000
--- a/src/compiler/scala/tools/nsc/util/CommandLine.scala
+++ /dev/null
@@ -1,98 +0,0 @@
-/* NEST (New Scala Test)
- * Copyright 2007-2013 LAMP/EPFL
- * @author Paul Phillips
- */
-
-package scala.tools
-package nsc.util
-
-import scala.collection.mutable.ListBuffer
-
-/**
- * XXX Note this has been completely obsolesced by scala.tools.cmd.
- * I checked it back in as part of rolling partest back a month
- * rather than go down the rabbit hole of unravelling dependencies.
- */
-case class CommandLine(
- args: List[String],
- unaryArguments: List[String],
- binaryArguments: List[String]
-) {
- def this(args: List[String]) = this(args, Nil, Nil)
- def this(args: Array[String]) = this(args.toList, Nil, Nil)
- def this(line: String) = this(cmd.CommandLineParser tokenize line, Nil, Nil)
-
- def withUnaryArgs(xs: List[String]) = copy(unaryArguments = xs)
- def withBinaryArgs(xs: List[String]) = copy(binaryArguments = xs)
-
- def assumeBinary = true
- def enforceArity = true
- def onlyKnownOptions = false
-
- val Terminator = "--"
- val ValueForUnaryOption = "true" // so if --opt is given, x(--opt) = true
-
- def mapForUnary(opt: String) = Map(opt -> ValueForUnaryOption)
- def errorFn(msg: String) = println(msg)
-
- /** argMap is option -> argument (or "" if it is a unary argument)
- * residualArgs are what is left after removing the options and their args.
- */
- lazy val (argMap, residualArgs) = {
- val residualBuffer = new ListBuffer[String]
-
- def stripQuotes(s: String) = {
- def isQuotedBy(c: Char) = s.length > 0 && s.head == c && s.last == c
- if (List('"', '\'') exists isQuotedBy) s.tail.init else s
- }
-
- def isValidOption(s: String) = !onlyKnownOptions || (unaryArguments contains s) || (binaryArguments contains s)
- def isOption(s: String) = (s startsWith "-") && (isValidOption(s) || { unknownOption(s) ; false })
- def isUnary(s: String) = isOption(s) && (unaryArguments contains s)
- def isBinary(s: String) = isOption(s) && !isUnary(s) && (assumeBinary || (binaryArguments contains s))
-
- def unknownOption(opt: String) =
- errorFn("Option '%s' not recognized.".format(opt))
- def missingArg(opt: String, what: String) =
- errorFn("Option '%s' requires argument, found %s instead.".format(opt, what))
-
- def loop(args: List[String]): Map[String, String] = {
- def residual(xs: List[String]) = { residualBuffer ++= xs ; Map[String, String]() }
- if (args.isEmpty) return Map()
- val hd :: rest = args
- if (rest.isEmpty) {
- if (isBinary(hd) && enforceArity)
- missingArg(hd, "EOF")
-
- if (isOption(hd)) mapForUnary(hd) else residual(args)
- }
- else
- if (hd == Terminator) residual(rest)
- else {
- val hd1 :: hd2 :: rest = args
-
- if (hd2 == Terminator) mapForUnary(hd1) ++ residual(rest)
- else if (isUnary(hd1)) mapForUnary(hd1) ++ loop(hd2 :: rest)
- else if (isBinary(hd1)) {
- // Disabling this check so
- // --scalacopts "-verbose" works. We can't tell if it's quoted,
- // the shell does us in.
- //
- // if (isOption(hd2) && enforceArity)
- // missingArg(hd1, hd2)
-
- Map(hd1 -> hd2) ++ loop(rest)
- }
- else { residual(List(hd1)) ++ loop(hd2 :: rest) }
- }
- }
-
- (loop(args), residualBuffer map stripQuotes toList)
- }
-
- def isSet(arg: String) = args contains arg
- def get(arg: String) = argMap get arg
- def apply(arg: String) = argMap(arg)
-
- override def toString() = "CommandLine(\n%s)\n" format (args map (" " + _ + "\n") mkString)
-}
diff --git a/src/compiler/scala/tools/nsc/util/TreeSet.scala b/src/compiler/scala/tools/nsc/util/TreeSet.scala
deleted file mode 100644
index d2e9238e8f..0000000000
--- a/src/compiler/scala/tools/nsc/util/TreeSet.scala
+++ /dev/null
@@ -1,64 +0,0 @@
-/* NSC -- new Scala compiler
- * Copyright 2005-2013 LAMP/EPFL
- * @author Martin Odersky
- */
-
-package scala.tools.nsc
-package util
-
-/** Sets implemented as binary trees.
- *
- * @author Martin Odersky
- * @version 1.0
- */
-class TreeSet[T >: Null <: AnyRef](less: (T, T) => Boolean) extends Set[T] {
-
- private class Tree(val elem: T) {
- var l: Tree = null
- var r: Tree = null
- }
-
- private var tree: Tree = null
-
- def findEntry(x: T): T = {
- def find(t: Tree): T = {
- if (t eq null) null
- else if (less(x, t.elem)) find(t.l)
- else if (less(t.elem, x)) find(t.r)
- else t.elem
- }
- find(tree)
- }
-
- def addEntry(x: T) {
- def add(t: Tree): Tree = {
- if (t eq null) new Tree(x)
- else if (less(x, t.elem)) { t.l = add(t.l); t }
- else if (less(t.elem, x)) { t.r = add(t.r); t }
- else t
- }
- tree = add(tree)
- }
-
- def iterator = toList.iterator
-
- override def foreach[U](f: T => U) {
- def loop(t: Tree) {
- if (t ne null) {
- loop(t.l)
- f(t.elem)
- loop(t.r)
- }
- }
- loop(tree)
- }
- override def toList = {
- val xs = scala.collection.mutable.ListBuffer[T]()
- foreach(xs += _)
- xs.toList
- }
-
- override def toString(): String = {
- if (tree eq null) "<empty>" else "(..." + tree.elem + "...)"
- }
-}
diff --git a/src/compiler/scala/tools/reflect/FastTrack.scala b/src/compiler/scala/tools/reflect/FastTrack.scala
index 5a0ff4f6db..ad1d4c896b 100644
--- a/src/compiler/scala/tools/reflect/FastTrack.scala
+++ b/src/compiler/scala/tools/reflect/FastTrack.scala
@@ -5,7 +5,7 @@ import scala.reflect.reify.Taggers
import scala.tools.nsc.typechecker.{ Analyzer, Macros }
import scala.reflect.runtime.Macros.currentMirror
import scala.reflect.api.Universe
-import scala.reflect.macros.compiler.DefaultMacroCompiler
+import scala.tools.reflect.quasiquotes.{ Quasiquotes => QuasiquoteImpls }
/** Optimizes system macro expansions by hardwiring them directly to their implementations
* bypassing standard reflective load and invoke to avoid the overhead of Java/Scala reflection.
@@ -22,6 +22,8 @@ trait FastTrack {
new { val c: c0.type = c0 } with Taggers
private implicit def context2macroimplementations(c0: MacroContext): MacroImplementations { val c: c0.type } =
new { val c: c0.type = c0 } with MacroImplementations
+ private implicit def context2quasiquote(c0: MacroContext): QuasiquoteImpls { val c: c0.type } =
+ new { val c: c0.type = c0 } with QuasiquoteImpls
private def make(sym: Symbol)(pf: PartialFunction[Applied, MacroContext => Tree]) =
sym -> new FastTrackEntry(pf)
@@ -41,6 +43,8 @@ trait FastTrack {
make( materializeTypeTag) { case Applied(_, ttag :: Nil, (u :: _) :: _) => _.materializeTypeTag(u, EmptyTree, ttag.tpe, concrete = true) },
make( ApiUniverseReify) { case Applied(_, ttag :: Nil, (expr :: _) :: _) => c => c.materializeExpr(c.prefix.tree, EmptyTree, expr) },
make( StringContext_f) { case Applied(Select(Apply(_, ps), _), _, args) => c => c.macro_StringInterpolation_f(ps, args.flatten, c.expandee.pos) },
- make(ReflectRuntimeCurrentMirror) { case _ => c => currentMirror(c).tree }
+ make(ReflectRuntimeCurrentMirror) { case _ => c => currentMirror(c).tree },
+ make( QuasiquoteClass_api_apply) { case _ => _.expandQuasiquote },
+ make(QuasiquoteClass_api_unapply) { case _ => _.expandQuasiquote }
)
}
diff --git a/src/compiler/scala/tools/reflect/ToolBoxFactory.scala b/src/compiler/scala/tools/reflect/ToolBoxFactory.scala
index c53d10bd87..afaca3396c 100644
--- a/src/compiler/scala/tools/reflect/ToolBoxFactory.scala
+++ b/src/compiler/scala/tools/reflect/ToolBoxFactory.scala
@@ -217,7 +217,7 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf =>
val moduledef = ModuleDef(
obj,
- Template(
+ gen.mkTemplate(
List(TypeTree(ObjectTpe)),
emptyValDef,
NoMods,
diff --git a/src/compiler/scala/tools/reflect/quasiquotes/Holes.scala b/src/compiler/scala/tools/reflect/quasiquotes/Holes.scala
new file mode 100644
index 0000000000..9d171d52d2
--- /dev/null
+++ b/src/compiler/scala/tools/reflect/quasiquotes/Holes.scala
@@ -0,0 +1,187 @@
+package scala.tools.reflect
+package quasiquotes
+
+import scala.collection.{immutable, mutable}
+import scala.reflect.internal.Flags._
+
+class Cardinality private[Cardinality](val value: Int) extends AnyVal {
+ def pred = { assert(value - 1 >= 0); new Cardinality(value - 1) }
+ def succ = new Cardinality(value + 1)
+ override def toString = if (value == 0) "no dots" else "." * (value + 1)
+}
+
+object Cardinality {
+ val NoDot = new Cardinality(0)
+ val DotDot = new Cardinality(1)
+ val DotDotDot = new Cardinality(2)
+ object Dot { def unapply(card: Cardinality) = card != NoDot }
+ def parseDots(part: String) = {
+ if (part.endsWith("...")) (part.stripSuffix("..."), DotDotDot)
+ else if (part.endsWith("..")) (part.stripSuffix(".."), DotDot)
+ else (part, NoDot)
+ }
+}
+
+/** Defines abstractions that provide support for splicing into Scala syntax.
+ */
+trait Holes { self: Quasiquotes =>
+ import global._
+ import Cardinality._
+ import definitions._
+ import universeTypes._
+
+ /** Location characterizes a kind of a non-terminal in Scala syntax where something is going to be spliced.
+ * A location is typically associated with a type of the things that can be spliced there.
+ * Associated type might be different from an actual tpe of a splicee due to lifting.
+ * This is the first pillar of modularity in the quasiquote reifier.
+ */
+ sealed abstract class Location(val tpe: Type)
+ case object UnknownLocation extends Location(NoType)
+ case class TreeLocation(override val tpe: Type) extends Location(tpe)
+ case object NameLocation extends Location(nameType)
+ case object ModsLocation extends Location(modsType)
+ case object FlagsLocation extends Location(flagsType)
+ case object SymbolLocation extends Location(symbolType)
+ case class IterableLocation(card: Cardinality, sublocation: TreeLocation) extends Location(NoType) {
+ override val tpe = {
+ def loop(n: Cardinality, tpe: Type): Type =
+ if (n == NoDot) tpe
+ else appliedType(IterableClass.toType, List(loop(n.pred, tpe)))
+ loop(card, sublocation.tpe)
+ }
+ }
+
+ /** Hole type describes location, cardinality and a pre-reification routine associated with a hole.
+ * An interesting thing about HoleType is that it can be completely inferred from the type of the splicee.
+ * This is the second pillar of modularity in the quasiquote reifier.
+ */
+ case class HoleType(preprocessor: Tree => Tree, location: Location, cardinality: Cardinality) {
+ def makeHole(tree: Tree) = Hole(preprocessor(tree), location, cardinality)
+ }
+ object HoleType {
+ def unapply(tpe: Type): Option[HoleType] = tpe match {
+ case NativeType(holeTpe) => Some(holeTpe)
+ case LiftableType(holeTpe) => Some(holeTpe)
+ case IterableTreeType(holeTpe) => Some(holeTpe)
+ case IterableLiftableType(holeTpe) => Some(holeTpe)
+ case _ => None
+ }
+
+ trait HoleTypeExtractor {
+ def unapply(tpe: Type): Option[HoleType] = {
+ for {
+ preprocessor <- this.preprocessor(tpe)
+ location <- this.location(tpe)
+ cardinality <- Some(this.cardinality(tpe))
+ } yield HoleType(preprocessor, location, cardinality)
+ }
+ def preprocessor(tpe: Type): Option[Tree => Tree]
+ def location(tpe: Type): Option[Location]
+ def cardinality(tpe: Type): Cardinality = parseCardinality(tpe)._1
+
+ def lifter(tpe: Type): Option[Tree => Tree] = {
+ val lifterTpe = appliedType(LiftableClass.toType, List(tpe))
+ val lifter = c.inferImplicitValue(lifterTpe, silent = true)
+ if (lifter != EmptyTree) Some(tree => {
+ val lifted = Apply(lifter, List(u, tree))
+ val targetType = Select(u, tpnme.Tree)
+ atPos(tree.pos)(TypeApply(Select(lifted, nme.asInstanceOf_), List(targetType)))
+ }) else None
+ }
+
+ def iterator(tpe: Type)(elementTransform: Tree => Tree): Option[Tree => Tree] = {
+ def reifyIterable(tree: Tree, n: Cardinality): Tree = {
+ def loop(tree: Tree, n: Cardinality) =
+ if (n == NoDot) elementTransform(tree)
+ else {
+ val x: TermName = c.freshName()
+ val wrapped = reifyIterable(Ident(x), n.pred)
+ val xToWrapped = Function(List(ValDef(Modifiers(PARAM), x, TypeTree(), EmptyTree)), wrapped)
+ Select(Apply(Select(tree, nme.map), List(xToWrapped)), nme.toList)
+ }
+ if (tree.tpe != null && (tree.tpe <:< listTreeType || tree.tpe <:< listListTreeType)) tree
+ else atPos(tree.pos)(loop(tree, n))
+ }
+ val (card, elementTpe) = parseCardinality(tpe)
+ if (card != NoDot) Some(reifyIterable(_, card)) else None
+ }
+ }
+
+ object NativeType extends HoleTypeExtractor {
+ def preprocessor(tpe: Type) = Some(identity)
+ def location(tpe: Type) = {
+ if (tpe <:< treeType) Some(TreeLocation(tpe))
+ else if (tpe <:< nameType) Some(NameLocation)
+ else if (tpe <:< modsType) Some(ModsLocation)
+ else if (tpe <:< flagsType) Some(FlagsLocation)
+ else if (tpe <:< symbolType) Some(SymbolLocation)
+ else None
+ }
+ }
+
+ object LiftableType extends HoleTypeExtractor {
+ def preprocessor(tpe: Type) = lifter(tpe)
+ def location(tpe: Type) = Some(TreeLocation(treeType))
+ }
+
+ object IterableTreeType extends HoleTypeExtractor {
+ def preprocessor(tpe: Type) = iterator(tpe)(identity)
+ def location(tpe: Type) = {
+ val (card, elementTpe) = parseCardinality(tpe)
+ if (card != NoDot && elementTpe <:< treeType) Some(IterableLocation(card, TreeLocation(elementTpe)))
+ else None
+ }
+ }
+
+ object IterableLiftableType extends HoleTypeExtractor {
+ def preprocessor(tpe: Type) = {
+ val (_, elementTpe) = parseCardinality(tpe)
+ for {
+ lifter <- this.lifter(elementTpe)
+ iterator <- this.iterator(tpe)(lifter)
+ } yield iterator
+ }
+ def location(tpe: Type) = Some(IterableLocation(cardinality(tpe), TreeLocation(treeType)))
+ }
+ }
+
+ /** Hole encapsulates information about splices in quasiquotes.
+ * It packs together a cardinality of a splice, a splicee (possibly preprocessed)
+ * and the description of the location in Scala syntax where the splicee can be spliced.
+ * This is the third pillar of modularity in the quasiquote reifier.
+ */
+ case class Hole(tree: Tree, location: Location, cardinality: Cardinality)
+
+ object Hole {
+ def apply(splicee: Tree, holeCard: Cardinality): Hole = {
+ if (splicee.tpe == null) return new Hole(splicee, UnknownLocation, holeCard)
+ val (spliceeCard, elementTpe) = parseCardinality(splicee.tpe)
+ def cantSplice() = {
+ val holeCardMsg = if (holeCard != NoDot) s" with $holeCard" else ""
+ val action = "splice " + splicee.tpe + holeCardMsg
+ val suggestCard = holeCard != spliceeCard || holeCard != NoDot
+ val spliceeCardMsg = if (holeCard != spliceeCard && spliceeCard != NoDot) s"using $spliceeCard" else "omitting the dots"
+ val cardSuggestion = if (suggestCard) spliceeCardMsg else ""
+ def canBeLifted(tpe: Type) = HoleType.LiftableType.unapply(tpe).nonEmpty
+ val suggestLifting = (holeCard == NoDot || spliceeCard != NoDot) && !(elementTpe <:< treeType) && !canBeLifted(elementTpe)
+ val liftedTpe = if (holeCard != NoDot) elementTpe else splicee.tpe
+ val liftSuggestion = if (suggestLifting) s"providing an implicit instance of Liftable[$liftedTpe]" else ""
+ val advice = List(cardSuggestion, liftSuggestion).filter(_ != "").mkString(" or ")
+ c.abort(splicee.pos, s"Can't $action, consider $advice")
+ }
+ val holeTpe = splicee.tpe match {
+ case _ if holeCard != spliceeCard => cantSplice()
+ case HoleType(holeTpe) => holeTpe
+ case _ => cantSplice()
+ }
+ holeTpe.makeHole(splicee)
+ }
+ }
+
+ def parseCardinality(tpe: Type): (Cardinality, Type) = {
+ if (tpe != null && isIterableType(tpe)) {
+ val (card, innerTpe) = parseCardinality(tpe.typeArguments.head)
+ (card.succ, innerTpe)
+ } else (NoDot, tpe)
+ }
+} \ No newline at end of file
diff --git a/src/compiler/scala/tools/reflect/quasiquotes/Parsers.scala b/src/compiler/scala/tools/reflect/quasiquotes/Parsers.scala
new file mode 100644
index 0000000000..9a6ba56c18
--- /dev/null
+++ b/src/compiler/scala/tools/reflect/quasiquotes/Parsers.scala
@@ -0,0 +1,134 @@
+package scala.tools.reflect
+package quasiquotes
+
+import scala.tools.nsc.ast.parser.{Parsers => ScalaParser}
+import scala.tools.nsc.ast.parser.Tokens._
+import scala.compat.Platform.EOL
+import scala.reflect.internal.util.{BatchSourceFile, SourceFile}
+import scala.collection.mutable.ListBuffer
+
+/** Builds upon the vanilla Scala parser and teams up together with Placeholders.scala to emulate holes.
+ * A principled solution to splicing into Scala syntax would be a parser that natively supports holes.
+ * Unfortunately, that's outside of our reach in Scala 2.11, so we have to emulate.
+ */
+trait Parsers { self: Quasiquotes =>
+ import global._
+
+ abstract class Parser extends {
+ val global: self.global.type = self.global
+ } with ScalaParser {
+ /** Wraps given code to obtain a desired parser mode.
+ * This way we can just re-use standard parser entry point.
+ */
+ def wrapCode(code: String): String =
+ s"object wrapper { self => $EOL $code $EOL }"
+
+ def unwrapTree(wrappedTree: Tree): Tree = {
+ val PackageDef(_, List(ModuleDef(_, _, Template(_, _, _ :: parsed)))) = wrappedTree
+ parsed match {
+ case tree :: Nil => tree
+ case stats :+ tree => Block(stats, tree)
+ }
+ }
+
+ def parse(code: String): Tree = {
+ try {
+ val wrapped = wrapCode(code)
+ debug(s"wrapped code\n=${wrapped}\n")
+ val file = new BatchSourceFile(nme.QUASIQUOTE_FILE, wrapped)
+ val tree = new QuasiquoteParser(file).parse()
+ unwrapTree(tree)
+ } catch {
+ case mi: MalformedInput => c.abort(c.macroApplication.pos, s"syntax error: ${mi.msg}")
+ }
+ }
+
+ class QuasiquoteParser(source0: SourceFile) extends SourceFileParser(source0) {
+ override val treeBuilder = new ParserTreeBuilder {
+ // q"(..$xs)"
+ override def makeTupleTerm(trees: List[Tree], flattenUnary: Boolean): Tree =
+ Apply(Ident(nme.QUASIQUOTE_TUPLE), trees)
+
+ // tq"(..$xs)"
+ override def makeTupleType(trees: List[Tree], flattenUnary: Boolean): Tree =
+ AppliedTypeTree(Ident(tpnme.QUASIQUOTE_TUPLE), trees)
+
+ // q"{ $x }"
+ override def makeBlock(stats: List[Tree]): Tree = stats match {
+ case (head @ Ident(name)) :: Nil if holeMap.contains(name) => Block(Nil, head)
+ case _ => super.makeBlock(stats)
+ }
+ }
+ import treeBuilder.{global => _, _}
+
+ // q"def foo($x)"
+ override def allowTypelessParams = true
+
+ // q"foo match { case $x }"
+ override def caseClause(): CaseDef =
+ if (isHole && lookingAhead { in.token == CASE || in.token == RBRACE || in.token == SEMI }) {
+ val c = makeCaseDef(Apply(Ident(nme.QUASIQUOTE_CASE), List(Ident(ident()))), EmptyTree, EmptyTree)
+ while (in.token == SEMI) in.nextToken()
+ c
+ } else
+ super.caseClause()
+
+ def isHole = isIdent && holeMap.contains(in.name)
+
+ override def isAnnotation: Boolean = super.isAnnotation || (isHole && lookingAhead { isAnnotation })
+
+ override def isModifier: Boolean = super.isModifier || (isHole && lookingAhead { isModifier })
+
+ override def isLocalModifier: Boolean = super.isLocalModifier || (isHole && lookingAhead { isLocalModifier })
+
+ override def isTemplateIntro: Boolean = super.isTemplateIntro || (isHole && lookingAhead { isTemplateIntro })
+
+ override def isDclIntro: Boolean = super.isDclIntro || (isHole && lookingAhead { isDclIntro })
+
+ // $mods def foo
+ // $mods T
+ override def readAnnots(annot: => Tree): List[Tree] = in.token match {
+ case AT =>
+ in.nextToken()
+ annot :: readAnnots(annot)
+ case _ if isHole && lookingAhead { in.token == AT || isModifier || isDefIntro || isIdent} =>
+ val ann = Apply(Select(New(Ident(tpnme.QUASIQUOTE_MODS)), nme.CONSTRUCTOR), List(Literal(Constant(in.name.toString))))
+ in.nextToken()
+ ann :: readAnnots(annot)
+ case _ =>
+ Nil
+ }
+ }
+ }
+
+ object TermParser extends Parser
+
+ object CaseParser extends Parser {
+ override def wrapCode(code: String) = super.wrapCode("something match { case " + code + " }")
+
+ override def unwrapTree(wrappedTree: Tree): Tree = {
+ val Match(_, head :: tail) = super.unwrapTree(wrappedTree)
+ if (tail.nonEmpty)
+ c.abort(c.macroApplication.pos, "Can't parse more than one casedef, consider generating a match tree instead")
+ head
+ }
+ }
+
+ object PatternParser extends Parser {
+ override def wrapCode(code: String) = super.wrapCode("something match { case " + code + " => }")
+
+ override def unwrapTree(wrappedTree: Tree): Tree = {
+ val Match(_, List(CaseDef(pat, _, _))) = super.unwrapTree(wrappedTree)
+ pat
+ }
+ }
+
+ object TypeParser extends Parser {
+ override def wrapCode(code: String) = super.wrapCode("type T = " + code)
+
+ override def unwrapTree(wrappedTree: Tree): Tree = {
+ val TypeDef(_, _, _, rhs) = super.unwrapTree(wrappedTree)
+ rhs
+ }
+ }
+} \ No newline at end of file
diff --git a/src/compiler/scala/tools/reflect/quasiquotes/Placeholders.scala b/src/compiler/scala/tools/reflect/quasiquotes/Placeholders.scala
new file mode 100644
index 0000000000..b680c25f76
--- /dev/null
+++ b/src/compiler/scala/tools/reflect/quasiquotes/Placeholders.scala
@@ -0,0 +1,123 @@
+package scala.tools.reflect
+package quasiquotes
+
+import java.util.UUID.randomUUID
+import scala.collection.{immutable, mutable}
+
+/** Emulates hole support (see Holes.scala) in the quasiquote parser (see Parsers.scala).
+ * A principled solution to splicing into Scala syntax would be a parser that natively supports holes.
+ * Unfortunately, that's outside of our reach in Scala 2.11, so we have to emulate.
+ * This trait stores knowledge of how to represent the holes as something understandable by the parser
+ * and how to recover holes from the results of parsing the produced representation.
+ */
+trait Placeholders { self: Quasiquotes =>
+ import global._
+ import Cardinality._
+
+ // Step 1: Transform Scala source with holes into vanilla Scala source
+
+ lazy val holeMap = new HoleMap()
+ lazy val code = {
+ val sb = new StringBuilder()
+ val sessionSuffix = randomUUID().toString.replace("-", "").substring(0, 8) + "$"
+
+ foreach2(args, parts.init) { (tree, p) =>
+ val (part, cardinality) = parseDots(p)
+ val placeholderName = c.freshName(TermName(nme.QUASIQUOTE_PREFIX + sessionSuffix))
+ sb.append(part)
+ sb.append(placeholderName)
+ holeMap(placeholderName) = Hole(tree, cardinality)
+ }
+ sb.append(parts.last)
+
+ sb.toString
+ }
+
+ class HoleMap {
+ private val underlying = mutable.ListMap[String, Hole]()
+ private val accessed = mutable.Set[String]()
+ def unused: Set[Name] = (underlying.keys.toSet -- accessed).map(TermName(_))
+ def contains(key: Name) = underlying.contains(key.toString)
+ def apply(key: Name) = {
+ val s = key.toString
+ accessed += s
+ underlying(s)
+ }
+ def update(key: Name, hole: Hole) = {
+ underlying += key.toString -> hole
+ }
+ def get(key: Name) = {
+ val s = key.toString
+ accessed += s
+ underlying.get(s)
+ }
+ }
+
+ // Step 2: Transform vanilla Scala AST into an AST with holes
+
+ trait HolePlaceholder {
+ def matching: PartialFunction[Any, Name]
+ def unapply(scrutinee: Any): Option[(Tree, Location, Cardinality)] = {
+ val name = matching.lift(scrutinee)
+ name.flatMap { holeMap.get(_).map { case Hole(repr, loc, card) => (repr, loc, card) } }
+ }
+ }
+
+ object Placeholder extends HolePlaceholder {
+ def matching = {
+ case name: Name => name
+ case Ident(name) => name
+ case Bind(name, Ident(nme.WILDCARD)) => name
+ case TypeDef(_, name, List(), TypeBoundsTree(EmptyTree, EmptyTree)) => name
+ case ValDef(_, name, TypeTree(), EmptyTree) => name
+ }
+ }
+
+ object ModsPlaceholder extends HolePlaceholder {
+ def matching = {
+ case Apply(Select(New(Ident(tpnme.QUASIQUOTE_MODS)), nme.CONSTRUCTOR), List(Literal(Constant(s: String)))) => TermName(s)
+ }
+ }
+
+ object AnnotPlaceholder {
+ def unapply(tree: Tree): Option[(Tree, Location, Cardinality, List[Tree])] = tree match {
+ case Apply(Select(New(Placeholder(tree, loc, card)), nme.CONSTRUCTOR), args) => Some(tree, loc, card, args)
+ case _ => None
+ }
+ }
+
+ object TuplePlaceholder {
+ def unapply(tree: Tree): Option[List[Tree]] = tree match {
+ case Apply(Ident(nme.QUASIQUOTE_TUPLE), args) => Some(args)
+ case _ => None
+ }
+ }
+
+ object TupleTypePlaceholder {
+ def unapply(tree: Tree): Option[List[Tree]] = tree match {
+ case AppliedTypeTree(Ident(tpnme.QUASIQUOTE_TUPLE), args) => Some(args)
+ case _ => None
+ }
+ }
+
+ object SymbolPlaceholder {
+ def unapply(scrutinee: Any): Option[Tree] = scrutinee match {
+ case Placeholder(tree, SymbolLocation, _) => Some(tree)
+ case _ => None
+ }
+ }
+
+ object CasePlaceholder {
+ def unapply(tree: Tree): Option[(Tree, Location, Cardinality)] = tree match {
+ case CaseDef(Apply(Ident(nme.QUASIQUOTE_CASE), List(Placeholder(tree, location, card))), EmptyTree, EmptyTree) => Some((tree, location, card))
+ case _ => None
+ }
+ }
+
+ object ClassPlaceholder {
+ def unapply(tree: Tree): Option[Tree] = tree match {
+ case ClassDef(_, _, _, _) => Some(tree)
+ case _ => None
+ }
+ }
+} \ No newline at end of file
diff --git a/src/compiler/scala/tools/reflect/quasiquotes/Quasiquotes.scala b/src/compiler/scala/tools/reflect/quasiquotes/Quasiquotes.scala
new file mode 100644
index 0000000000..fe954e0bfd
--- /dev/null
+++ b/src/compiler/scala/tools/reflect/quasiquotes/Quasiquotes.scala
@@ -0,0 +1,51 @@
+package scala.tools.reflect
+package quasiquotes
+
+import scala.reflect.macros.runtime.Context
+
+abstract class Quasiquotes extends Parsers
+ with Holes
+ with Placeholders
+ with Reifiers {
+ val c: Context
+ val global: c.universe.type = c.universe
+ import c.universe._
+
+ def debug(msg: String): Unit =
+ if (settings.Yquasiquotedebug.value) println(msg)
+
+ lazy val (universe: Tree, args, parts, parse, reify) = c.macroApplication match {
+ case Apply(Select(Select(Apply(Select(universe0, _), List(Apply(_, parts0))), interpolator0), method0), args0) =>
+ val parts1 = parts0.map {
+ case Literal(Constant(s: String)) => s
+ case part => c.abort(part.pos, "Quasiquotes can only be used with literal strings")
+ }
+ val reify0 = method0 match {
+ case nme.apply => new ApplyReifier().reifyFillingHoles(_)
+ case nme.unapply => new UnapplyReifier().reifyFillingHoles(_)
+ case other => global.abort(s"Unknown quasiquote api method: $other")
+ }
+ val parse0 = interpolator0 match {
+ case nme.q => TermParser.parse(_)
+ case nme.tq => TypeParser.parse(_)
+ case nme.cq => CaseParser.parse(_)
+ case nme.pq => PatternParser.parse(_)
+ case other => global.abort(s"Unknown quasiquote flavor: $other")
+ }
+ (universe0, args0, parts1, parse0, reify0)
+ case _ =>
+ global.abort(s"Couldn't parse call prefix tree ${c.macroApplication}.")
+ }
+
+ lazy val u = universe // shortcut
+ lazy val universeTypes = new definitions.UniverseDependentTypes(universe)
+
+ def expandQuasiquote = {
+ debug(s"\ncode to parse=\n$code\n")
+ val tree = parse(code)
+ debug(s"parsed tree\n=${tree}\n=${showRaw(tree)}\n")
+ val reified = reify(tree)
+ debug(s"reified tree\n=${reified}\n=${showRaw(reified)}\n")
+ reified
+ }
+}
diff --git a/src/compiler/scala/tools/reflect/quasiquotes/Reifiers.scala b/src/compiler/scala/tools/reflect/quasiquotes/Reifiers.scala
new file mode 100644
index 0000000000..ec113036a3
--- /dev/null
+++ b/src/compiler/scala/tools/reflect/quasiquotes/Reifiers.scala
@@ -0,0 +1,290 @@
+package scala.tools.reflect
+package quasiquotes
+
+import java.lang.UnsupportedOperationException
+import scala.reflect.reify.{Reifier => ReflectReifier}
+import scala.reflect.internal.Flags._
+
+trait Reifiers { self: Quasiquotes =>
+ import global._
+ import global.build.SyntacticClassDef
+ import global.treeInfo._
+ import global.definitions._
+ import Cardinality._
+ import universeTypes._
+
+ abstract class Reifier extends {
+ val global: self.global.type = self.global
+ } with ReflectReifier {
+ val reifee = EmptyTree
+ val universe = self.universe
+ val mirror = EmptyTree
+ val concrete = false
+ lazy val typer = throw new UnsupportedOperationException
+
+ def isReifyingExpressions: Boolean
+ def isReifyingPatterns: Boolean = !isReifyingExpressions
+ def action = if (isReifyingExpressions) "splice" else "extract"
+ def holesHaveTypes = isReifyingExpressions
+
+ def reifyFillingHoles(tree: Tree): Tree = {
+ val reified = reifyTree(tree)
+ holeMap.unused.foreach { hole =>
+ c.abort(holeMap(hole).tree.pos, s"Don't know how to $action here")
+ }
+ reified
+ }
+
+ override def reifyTree(tree: Tree): Tree = {
+ val reified =
+ reifyTreePlaceholder(tree) orElse
+ reifyTreeSyntactically(tree)
+ //println(s"reified ${showRaw(tree)} as $reified")
+ reified
+ }
+
+ def reifyTreePlaceholder(tree: Tree): Tree = tree match {
+ case Placeholder(tree, TreeLocation(_), _) if isReifyingExpressions => tree
+ case Placeholder(tree, _, NoDot) if isReifyingPatterns => tree
+ case Placeholder(tree, _, card @ Dot()) => c.abort(tree.pos, s"Can't $action with $card here")
+ case TuplePlaceholder(args) => reifyTuple(args)
+ case TupleTypePlaceholder(args) => reifyTupleType(args)
+ case CasePlaceholder(tree, location, _) => reifyCase(tree, location)
+ case ClassPlaceholder(tree) => reifyClass(tree)
+ case _ => EmptyTree
+ }
+
+ override def reifyName(name: Name): Tree = name match {
+ case Placeholder(tree, location, _) =>
+ if (holesHaveTypes && !(location.tpe <:< nameType)) c.abort(tree.pos, s"$nameType expected but ${location.tpe} found")
+ tree
+ case _ =>
+ super.reifyName(name)
+ }
+
+ def reifyCase(tree: Tree, location: Location) = {
+ if (holesHaveTypes && !(location.tpe <:< caseDefType)) c.abort(tree.pos, s"$caseDefType expected but ${location.tpe} found")
+ tree
+ }
+
+ def reifyTuple(args: List[Tree]) = args match {
+ case Nil => reify(Literal(Constant(())))
+ case List(hole @ Placeholder(_, _, NoDot)) => reify(hole)
+ case List(Placeholder(_, _, _)) => reifyBuildCall(nme.TupleN, args)
+ // in a case we only have one element tuple without
+ // any cardinality annotations this means that this is
+ // just an expression wrapped in parentheses
+ case List(other) => reify(other)
+ case _ => reifyBuildCall(nme.TupleN, args)
+ }
+
+ def reifyTupleType(args: List[Tree]) = args match {
+ case Nil => reify(Select(Ident(nme.scala_), tpnme.Unit))
+ case List(hole @ Placeholder(_, _, NoDot)) => reify(hole)
+ case List(Placeholder(_, _, _)) => reifyBuildCall(nme.TupleTypeN, args)
+ case List(other) => reify(other)
+ case _ => reifyBuildCall(nme.TupleTypeN, args)
+ }
+
+ def reifyClass(tree: Tree) = {
+ val SyntacticClassDef(mods, name, tparams, constrmods, argss, parents, selfval, body) = tree
+ reifyBuildCall(nme.SyntacticClassDef, mods, name, tparams, constrmods, argss, parents, selfval, body)
+ }
+
+ /** Splits list into a list of groups where subsequent elements are considered
+ * similar by the corresponding function.
+ *
+ * Example:
+ *
+ * > group(List(1, 1, 0, 0, 1, 0)) { _ == _ }
+ * List(List(1, 1), List(0, 0), List(1), List(0))
+ *
+ */
+ def group[T](lst: List[T])(similar: (T, T) => Boolean) = lst.foldLeft[List[List[T]]](List()) {
+ case (Nil, el) => List(List(el))
+ case (ll :+ (last @ (lastinit :+ lastel)), el) if similar(lastel, el) => ll :+ (last :+ el)
+ case (ll, el) => ll :+ List(el)
+ }
+
+ /** Reifies list filling all the valid holeMap.
+ *
+ * Reification of non-trivial list is done in two steps:
+ *
+ * 1. split the list into groups where every placeholder is always
+ * put in a group of it's own and all subsquent non-holeMap are
+ * grouped together; element is considered to be a placeholder if it's
+ * in the domain of the fill function;
+ *
+ * 2. fold the groups into a sequence of lists added together with ++ using
+ * fill reification for holeMap and fallback reification for non-holeMap.
+ *
+ * Example:
+ *
+ * reifyMultiCardinalityList(lst) {
+ * // first we define patterns that extract high-cardinality holeMap (currently ..)
+ * case Placeholder(CorrespondsTo(tree, tpe)) if tpe <:< iterableTreeType => tree
+ * } {
+ * // in the end we define how single elements are reified, typically with default reify call
+ * reify(_)
+ * }
+ *
+ * Sample execution of previous concrete list reifier:
+ *
+ * > val lst = List(foo, bar, qq$f3948f9s$1)
+ * > reifyMultiCardinalityList(lst) { ... } { ... }
+ * q"List($foo, $bar) ++ ${holeMap(qq$f3948f9s$1).tree}"
+ */
+ def reifyMultiCardinalityList[T](xs: List[T])(fill: PartialFunction[T, Tree])(fallback: T => Tree): Tree
+
+ /** Reifies arbitrary list filling ..$x and ...$y holeMap when they are put
+ * in the correct position. Fallbacks to regular reification for non-high cardinality
+ * elements.
+ */
+ override def reifyList(xs: List[Any]): Tree = reifyMultiCardinalityList(xs) {
+ case Placeholder(tree, _, DotDot) => tree
+ case CasePlaceholder(tree, _, DotDot) => tree
+ case List(Placeholder(tree, _, DotDotDot)) => tree
+ } {
+ reify(_)
+ }
+
+ def reifyAnnotList(annots: List[Tree]): Tree
+
+ def ensureNoExplicitFlags(m: Modifiers, pos: Position) =
+ if ((m.flags & ExplicitFlags) != 0L) c.abort(pos, s"Can't $action modifiers together with flags, consider merging flags into modifiers")
+
+ override def mirrorSelect(name: String): Tree =
+ Select(universe, TermName(name))
+
+ override def mirrorCall(name: TermName, args: Tree*): Tree =
+ Apply(Select(universe, name), args.toList)
+
+ override def mirrorBuildCall(name: TermName, args: Tree*): Tree =
+ Apply(Select(Select(universe, nme.build), name), args.toList)
+ }
+
+ class ApplyReifier extends Reifier {
+ def isReifyingExpressions = true
+
+ override def reifyTreeSyntactically(tree: Tree): Tree = tree match {
+ case Block(stats, p @ Placeholder(_, _, _)) => reifyBuildCall(nme.Block, stats :+ p)
+ case Apply(f, List(Placeholder(argss, _, DotDotDot))) => reifyCallWithArgss(f, argss)
+ case RefTree(qual, SymbolPlaceholder(tree)) => mirrorBuildCall(nme.RefTree, reify(qual), tree)
+ case _ => super.reifyTreeSyntactically(tree)
+ }
+
+ def reifyCallWithArgss(f: Tree, argss: Tree) = {
+ val f1 = reifyTree(f)
+ val foldLeftF1 = Apply(TypeApply(Select(argss, nme.foldLeft), List(Select(u, tpnme.Tree))), List(f1))
+ val uDotApply = Function(
+ List(gen.mkSyntheticParam(nme.x_1), gen.mkSyntheticParam(nme.x_2)),
+ Apply(Select(u, nme.Apply), List(Ident(nme.x_1), Ident(nme.x_2))))
+ Apply(foldLeftF1, List(uDotApply))
+ }
+
+ override def reifyMultiCardinalityList[T](xs: List[T])(fill: PartialFunction[T, Tree])(fallback: T => Tree): Tree = xs match {
+ case Nil => mkList(Nil)
+ case _ =>
+ def reifyGroup(group: List[T]): Tree = group match {
+ case List(elem) if fill.isDefinedAt(elem) => fill(elem)
+ case elems => mkList(elems.map(fallback))
+ }
+ val head :: tail = group(xs) { (a, b) => !fill.isDefinedAt(a) && !fill.isDefinedAt(b) }
+ tail.foldLeft[Tree](reifyGroup(head)) { (tree, lst) => Apply(Select(tree, nme.PLUSPLUS), List(reifyGroup(lst))) }
+ }
+
+ override def reifyAnnotList(annots: List[Tree]): Tree = reifyMultiCardinalityList(annots) {
+ case AnnotPlaceholder(tree, _, DotDot, args) =>
+ val x: TermName = c.freshName()
+ val xToAnnotationCtor = Function(
+ List(ValDef(Modifiers(PARAM), x, TypeTree(), EmptyTree)),
+ mirrorBuildCall(nme.mkAnnotationCtor, Ident(x), reify(args)))
+ Apply(Select(tree, nme.map), List(xToAnnotationCtor))
+ } {
+ case AnnotPlaceholder(tree, _: TreeLocation, _, args) =>
+ mirrorBuildCall(nme.mkAnnotationCtor, tree, reify(args))
+ case other => reify(other)
+ }
+
+ override def reifyModifiers(m: Modifiers) = {
+ val (modsPlaceholders, annots) = m.annotations.partition {
+ case ModsPlaceholder(_, _, _) => true
+ case _ => false
+ }
+ val (mods, flags) = modsPlaceholders.map {
+ case ModsPlaceholder(tree, location, card) => (tree, location)
+ }.partition { case (tree, location) =>
+ location match {
+ case ModsLocation => true
+ case FlagsLocation => false
+ case _ => c.abort(tree.pos, s"$flagsType or $modsType expected but ${tree.tpe} found")
+ }
+ }
+ mods match {
+ case (tree, _) :: Nil =>
+ if (flags.nonEmpty) c.abort(flags(0)._1.pos, "Can't splice flags together with modifiers, consider merging flags into modifiers")
+ if (annots.nonEmpty) c.abort(tree.pos, "Can't splice modifiers together with annotations, consider merging annotations into modifiers")
+ ensureNoExplicitFlags(m, tree.pos)
+ tree
+ case _ :: (second, _) :: Nil =>
+ c.abort(second.pos, "Can't splice multiple modifiers, consider merging them into a single modifiers instance")
+ case _ =>
+ val baseFlags = reifyBuildCall(nme.flagsFromBits, m.flags)
+ val reifiedFlags = flags.foldLeft[Tree](baseFlags) { case (flag, (tree, _)) => Apply(Select(flag, nme.OR), List(tree)) }
+ mirrorFactoryCall(nme.Modifiers, reifiedFlags, reify(m.privateWithin), reifyAnnotList(annots))
+ }
+ }
+ }
+
+ class UnapplyReifier extends Reifier {
+ def isReifyingExpressions = false
+
+ override def reifyTreeSyntactically(tree: Tree): Tree = tree match {
+ case treeInfo.Applied(fun, Nil, argss) if fun != tree && !tree.isInstanceOf[AppliedTypeTree] =>
+ reifyBuildCall(nme.Applied, fun, argss)
+ case treeInfo.Applied(fun, targs, argss) if fun != tree & !tree.isInstanceOf[AppliedTypeTree] =>
+ mirrorBuildCall(nme.Applied, reifyBuildCall(nme.TypeApplied, fun, targs), reifyList(argss))
+ case _ =>
+ super.reifyTreeSyntactically(tree)
+ }
+
+ override def scalaFactoryCall(name: String, args: Tree*): Tree =
+ call("scala." + name, args: _*)
+
+ override def reifyMultiCardinalityList[T](xs: List[T])(fill: PartialFunction[T, Tree])(fallback: T => Tree) = xs match {
+ case init :+ last if fill.isDefinedAt(last) =>
+ init.foldRight[Tree](fill(last)) { (el, rest) =>
+ val cons = Select(Select(Select(Ident(nme.scala_), nme.collection), nme.immutable), nme.CONS)
+ Apply(cons, List(fallback(el), rest))
+ }
+ case _ =>
+ mkList(xs.map(fallback))
+ }
+
+ override def reifyAnnotList(annots: List[Tree]): Tree = reifyMultiCardinalityList(annots) {
+ case AnnotPlaceholder(tree, _, DotDot, Nil) => tree
+ } {
+ case AnnotPlaceholder(tree, _, NoDot, Nil) => tree
+ case AnnotPlaceholder(tree, _, NoDot, args) =>
+ val selectCONSTRUCTOR = Apply(Select(u, nme.Select), List(Apply(Select(u, nme.New), List(tree)), Select(Select(u, nme.nmeNme), nme.nmeCONSTRUCTOR)))
+ Apply(Select(u, nme.Apply), List(selectCONSTRUCTOR, reify(args)))
+ case other =>
+ reify(other)
+ }
+
+ override def reifyModifiers(m: Modifiers) = {
+ val mods = m.annotations.collect { case ModsPlaceholder(tree, _, _) => tree }
+ mods match {
+ case tree :: Nil =>
+ if (m.annotations.length != 1) c.abort(tree.pos, "Can't extract modifiers together with annotations, consider extracting just modifiers")
+ ensureNoExplicitFlags(m, tree.pos)
+ tree
+ case _ :: second :: rest =>
+ c.abort(second.pos, "Can't extract multiple modifiers together, consider extracting a single modifiers instance")
+ case Nil =>
+ mirrorFactoryCall(nme.Modifiers, reifyBuildCall(nme.FlagsAsBits, m.flags),
+ reify(m.privateWithin), reifyAnnotList(m.annotations))
+ }
+ }
+ }
+} \ No newline at end of file
diff --git a/src/eclipse/continuations-library/.classpath b/src/eclipse/continuations-library/.classpath
index b3ca4eeb48..61cb3f060e 100644
--- a/src/eclipse/continuations-library/.classpath
+++ b/src/eclipse/continuations-library/.classpath
@@ -1,6 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<classpath>
- <classpathentry kind="src" path="library"/>
+ <classpathentry kind="src" path="continuations-library"/>
<classpathentry combineaccessrules="false" kind="src" path="/scala-library"/>
<classpathentry kind="con" path="org.scala-ide.sdt.launching.SCALA_CONTAINER"/>
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
diff --git a/src/eclipse/continuations-library/.project b/src/eclipse/continuations-library/.project
index f3a53a3d97..33cc57d667 100644
--- a/src/eclipse/continuations-library/.project
+++ b/src/eclipse/continuations-library/.project
@@ -22,7 +22,7 @@
<locationURI>SCALA_BASEDIR/build/quick/classes/continuations/library</locationURI>
</link>
<link>
- <name>library</name>
+ <name>continuations-library</name>
<type>2</type>
<locationURI>SCALA_BASEDIR/src/continuations/library</locationURI>
</link>
diff --git a/src/eclipse/interactive/.classpath b/src/eclipse/interactive/.classpath
index 870cc67aec..73a67e45ed 100644
--- a/src/eclipse/interactive/.classpath
+++ b/src/eclipse/interactive/.classpath
@@ -1,10 +1,9 @@
<?xml version="1.0" encoding="UTF-8"?>
<classpath>
<classpathentry kind="src" path="interactive"/>
- <classpathentry combineaccessrules="false" kind="src" path="/reflect"/>
- <classpathentry combineaccessrules="false" kind="src" path="/scala-library"/>
- <classpathentry combineaccessrules="false" kind="src" path="/scala-compiler"/>
<classpathentry combineaccessrules="false" kind="src" path="/scaladoc"/>
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
+ <classpathentry kind="con" path="org.scala-ide.sdt.launching.SCALA_COMPILER_CONTAINER"/>
+ <classpathentry kind="con" path="org.scala-ide.sdt.launching.SCALA_CONTAINER"/>
<classpathentry kind="output" path="build-quick-interactive"/>
</classpath>
diff --git a/src/eclipse/partest/.classpath b/src/eclipse/partest/.classpath
index a990c5a1b3..462cbb9c94 100644
--- a/src/eclipse/partest/.classpath
+++ b/src/eclipse/partest/.classpath
@@ -1,15 +1,14 @@
<?xml version="1.0" encoding="UTF-8"?>
<classpath>
<classpathentry kind="src" path="partest"/>
- <classpathentry combineaccessrules="false" kind="src" path="/reflect"/>
- <classpathentry combineaccessrules="false" kind="src" path="/scala-library"/>
- <classpathentry combineaccessrules="false" kind="src" path="/scala-compiler"/>
<classpathentry combineaccessrules="false" kind="src" path="/scalap"/>
- <classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
- <classpathentry kind="lib" path="lib/ant/ant.jar"/>
- <classpathentry kind="lib" path="lib/jline.jar"/>
<classpathentry combineaccessrules="false" kind="src" path="/asm"/>
- <classpathentry combineaccessrules="false" kind="src" path="/continuations-library"/>
<classpathentry combineaccessrules="false" kind="src" path="/repl"/>
+ <classpathentry kind="var" path="M2_REPO/com/googlecode/java-diff-utils/diffutils/1.3.0/diffutils-1.3.0.jar"/>
+ <classpathentry kind="var" path="M2_REPO/org/scala-tools/testing/test-interface/0.5/test-interface-0.5.jar"/>
+ <classpathentry kind="var" path="SCALA_BASEDIR/lib/ant/ant.jar"/>
+ <classpathentry kind="con" path="org.scala-ide.sdt.launching.SCALA_CONTAINER"/>
+ <classpathentry kind="con" path="org.scala-ide.sdt.launching.SCALA_COMPILER_CONTAINER"/>
+ <classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
<classpathentry kind="output" path="build-quick-partest"/>
</classpath>
diff --git a/src/eclipse/reflect/.classpath b/src/eclipse/reflect/.classpath
index 36e6b6adf1..1eb37e3f5f 100644
--- a/src/eclipse/reflect/.classpath
+++ b/src/eclipse/reflect/.classpath
@@ -2,7 +2,7 @@
<classpath>
<classpathentry kind="src" path="reflect"/>
<classpathentry combineaccessrules="false" kind="src" path="/scala-library"/>
- <classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.6"/>
<classpathentry combineaccessrules="false" kind="src" path="/continuations-library"/>
+ <classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
<classpathentry kind="output" path="build-quick-reflect"/>
</classpath>
diff --git a/src/eclipse/repl/.classpath b/src/eclipse/repl/.classpath
index 30744da306..748fa6c9c8 100644
--- a/src/eclipse/repl/.classpath
+++ b/src/eclipse/repl/.classpath
@@ -1,11 +1,10 @@
<?xml version="1.0" encoding="UTF-8"?>
<classpath>
- <classpathentry kind="src" path="repl"/>
- <classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
- <classpathentry combineaccessrules="false" kind="src" path="/reflect"/>
- <classpathentry combineaccessrules="false" kind="src" path="/scala-compiler"/>
- <classpathentry combineaccessrules="false" kind="src" path="/scala-library"/>
- <classpathentry kind="var" path="SCALA_BASEDIR/lib/jline.jar"/>
- <classpathentry combineaccessrules="false" kind="src" path="/asm"/>
- <classpathentry kind="output" path="build-quick-repl"/>
+ <classpathentry kind="src" path="repl"/>
+ <classpathentry combineaccessrules="false" kind="src" path="/asm"/>
+ <classpathentry kind="var" path="M2_REPO/jline/jline/2.11/jline-2.11.jar"/>
+ <classpathentry kind="con" path="org.scala-ide.sdt.launching.SCALA_CONTAINER"/>
+ <classpathentry kind="con" path="org.scala-ide.sdt.launching.SCALA_COMPILER_CONTAINER"/>
+ <classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
+ <classpathentry kind="output" path="build-quick-repl"/>
</classpath>
diff --git a/src/eclipse/repl/.project b/src/eclipse/repl/.project
index ea188bc262..69ad08ab1a 100644
--- a/src/eclipse/repl/.project
+++ b/src/eclipse/repl/.project
@@ -1,35 +1,35 @@
<?xml version="1.0" encoding="UTF-8"?>
<projectDescription>
- <name>repl</name>
- <comment></comment>
- <projects>
- </projects>
- <buildSpec>
- <buildCommand>
- <name>org.scala-ide.sdt.core.scalabuilder</name>
- <arguments>
- </arguments>
- </buildCommand>
- </buildSpec>
- <natures>
- <nature>org.scala-ide.sdt.core.scalanature</nature>
- <nature>org.eclipse.jdt.core.javanature</nature>
- </natures>
- <linkedResources>
- <link>
- <name>build-quick-repl</name>
- <type>2</type>
- <locationURI>SCALA_BASEDIR/build/quick/classes/repl</locationURI>
- </link>
- <link>
- <name>lib</name>
- <type>2</type>
- <locationURI>SCALA_BASEDIR/lib</locationURI>
- </link>
- <link>
- <name>repl</name>
- <type>2</type>
- <locationURI>SCALA_BASEDIR/src/repl</locationURI>
- </link>
- </linkedResources>
+ <name>repl</name>
+ <comment></comment>
+ <projects>
+ </projects>
+ <buildSpec>
+ <buildCommand>
+ <name>org.scala-ide.sdt.core.scalabuilder</name>
+ <arguments>
+ </arguments>
+ </buildCommand>
+ </buildSpec>
+ <natures>
+ <nature>org.scala-ide.sdt.core.scalanature</nature>
+ <nature>org.eclipse.jdt.core.javanature</nature>
+ </natures>
+ <linkedResources>
+ <link>
+ <name>build-quick-repl</name>
+ <type>2</type>
+ <locationURI>SCALA_BASEDIR/build/quick/classes/repl</locationURI>
+ </link>
+ <link>
+ <name>lib</name>
+ <type>2</type>
+ <locationURI>SCALA_BASEDIR/lib</locationURI>
+ </link>
+ <link>
+ <name>repl</name>
+ <type>2</type>
+ <locationURI>SCALA_BASEDIR/src/repl</locationURI>
+ </link>
+ </linkedResources>
</projectDescription>
diff --git a/src/eclipse/scala-compiler/.classpath b/src/eclipse/scala-compiler/.classpath
index 0488a0dc39..b6ef5f35bb 100644
--- a/src/eclipse/scala-compiler/.classpath
+++ b/src/eclipse/scala-compiler/.classpath
@@ -1,12 +1,11 @@
<?xml version="1.0" encoding="UTF-8"?>
<classpath>
<classpathentry kind="src" path="compiler"/>
+ <classpathentry combineaccessrules="false" kind="src" path="/asm"/>
<classpathentry combineaccessrules="false" kind="src" path="/reflect"/>
<classpathentry combineaccessrules="false" kind="src" path="/scala-library"/>
- <classpathentry combineaccessrules="false" kind="src" path="/asm"/>
- <classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
- <classpathentry kind="lib" path="lib/ant/ant.jar"/>
- <classpathentry kind="lib" path="lib/jline.jar"/>
<classpathentry combineaccessrules="false" kind="src" path="/continuations-library"/>
+ <classpathentry kind="var" path="SCALA_BASEDIR/lib/ant/ant.jar"/>
+ <classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
<classpathentry kind="output" path="build-quick-compiler"/>
</classpath>
diff --git a/src/eclipse/scala-library/.classpath b/src/eclipse/scala-library/.classpath
index a3a4933d34..eff3c8e0b7 100644
--- a/src/eclipse/scala-library/.classpath
+++ b/src/eclipse/scala-library/.classpath
@@ -2,6 +2,6 @@
<classpath>
<classpathentry kind="src" path="library"/>
<classpathentry kind="con" path="org.scala-ide.sdt.launching.SCALA_CONTAINER"/>
- <classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.6"/>
+ <classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
<classpathentry kind="output" path="build-quick-lib"/>
</classpath>
diff --git a/src/eclipse/scala-parser-combinators/.classpath b/src/eclipse/scala-parser-combinators/.classpath
new file mode 100644
index 0000000000..7eab7094eb
--- /dev/null
+++ b/src/eclipse/scala-parser-combinators/.classpath
@@ -0,0 +1,7 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<classpath>
+ <classpathentry kind="src" path="src-parser-combinators"/>
+ <classpathentry kind="con" path="org.scala-ide.sdt.launching.SCALA_CONTAINER"/>
+ <classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
+ <classpathentry kind="output" path="build-quick-parser-combinators"/>
+</classpath>
diff --git a/src/eclipse/scala-parser-combinators/.project b/src/eclipse/scala-parser-combinators/.project
new file mode 100644
index 0000000000..d94523f56d
--- /dev/null
+++ b/src/eclipse/scala-parser-combinators/.project
@@ -0,0 +1,30 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<projectDescription>
+ <name>scala-parser-combinators</name>
+ <comment></comment>
+ <projects>
+ </projects>
+ <buildSpec>
+ <buildCommand>
+ <name>org.scala-ide.sdt.core.scalabuilder</name>
+ <arguments>
+ </arguments>
+ </buildCommand>
+ </buildSpec>
+ <natures>
+ <nature>org.scala-ide.sdt.core.scalanature</nature>
+ <nature>org.eclipse.jdt.core.javanature</nature>
+ </natures>
+ <linkedResources>
+ <link>
+ <name>build-quick-parser-combinators</name>
+ <type>2</type>
+ <locationURI>SCALA_BASEDIR/build/quick/classes/parser-combinators</locationURI>
+ </link>
+ <link>
+ <name>src-parser-combinators</name>
+ <type>2</type>
+ <locationURI>SCALA_BASEDIR/src/parser-combinators</locationURI>
+ </link>
+ </linkedResources>
+</projectDescription>
diff --git a/src/eclipse/scala-xml/.classpath b/src/eclipse/scala-xml/.classpath
new file mode 100644
index 0000000000..b90d951640
--- /dev/null
+++ b/src/eclipse/scala-xml/.classpath
@@ -0,0 +1,7 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<classpath>
+ <classpathentry kind="src" path="src-xml"/>
+ <classpathentry kind="con" path="org.scala-ide.sdt.launching.SCALA_CONTAINER"/>
+ <classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
+ <classpathentry kind="output" path="build-quick-xml"/>
+</classpath>
diff --git a/src/eclipse/scala-xml/.project b/src/eclipse/scala-xml/.project
new file mode 100644
index 0000000000..8b0f7f6864
--- /dev/null
+++ b/src/eclipse/scala-xml/.project
@@ -0,0 +1,30 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<projectDescription>
+ <name>scala-xml</name>
+ <comment></comment>
+ <projects>
+ </projects>
+ <buildSpec>
+ <buildCommand>
+ <name>org.scala-ide.sdt.core.scalabuilder</name>
+ <arguments>
+ </arguments>
+ </buildCommand>
+ </buildSpec>
+ <natures>
+ <nature>org.scala-ide.sdt.core.scalanature</nature>
+ <nature>org.eclipse.jdt.core.javanature</nature>
+ </natures>
+ <linkedResources>
+ <link>
+ <name>build-quick-xml</name>
+ <type>2</type>
+ <locationURI>SCALA_BASEDIR/build/quick/classes/xml</locationURI>
+ </link>
+ <link>
+ <name>src-xml</name>
+ <type>2</type>
+ <locationURI>SCALA_BASEDIR/src/xml</locationURI>
+ </link>
+ </linkedResources>
+</projectDescription>
diff --git a/src/eclipse/scaladoc/.classpath b/src/eclipse/scaladoc/.classpath
index f12ba4bb2c..caafcf33b0 100644
--- a/src/eclipse/scaladoc/.classpath
+++ b/src/eclipse/scaladoc/.classpath
@@ -1,13 +1,10 @@
<?xml version="1.0" encoding="UTF-8"?>
<classpath>
- <classpathentry combineaccessrules="false" kind="src" path="/partest"/>
<classpathentry kind="src" path="scaladoc"/>
- <classpathentry combineaccessrules="false" kind="src" path="/reflect"/>
- <classpathentry combineaccessrules="false" kind="src" path="/scala-library"/>
- <classpathentry combineaccessrules="false" kind="src" path="/scala-compiler"/>
- <classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
+ <classpathentry combineaccessrules="false" kind="src" path="/partest"/>
<classpathentry kind="var" path="SCALA_BASEDIR/lib/ant/ant.jar"/>
- <classpathentry combineaccessrules="false" kind="src" path="/continuations-library"/>
- <classpathentry combineaccessrules="false" kind="src" path="/asm"/>
+ <classpathentry kind="con" path="org.scala-ide.sdt.launching.SCALA_COMPILER_CONTAINER"/>
+ <classpathentry kind="con" path="org.scala-ide.sdt.launching.SCALA_CONTAINER"/>
+ <classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
<classpathentry kind="output" path="build-quick-scaladoc"/>
</classpath>
diff --git a/src/eclipse/scalap/.classpath b/src/eclipse/scalap/.classpath
index 0a55745702..3b635cf56e 100644
--- a/src/eclipse/scalap/.classpath
+++ b/src/eclipse/scalap/.classpath
@@ -1,12 +1,9 @@
<?xml version="1.0" encoding="UTF-8"?>
<classpath>
<classpathentry kind="src" path="scalap"/>
- <classpathentry combineaccessrules="false" kind="src" path="/reflect"/>
- <classpathentry combineaccessrules="false" kind="src" path="/scala-library"/>
- <classpathentry combineaccessrules="false" kind="src" path="/scala-compiler"/>
+ <classpathentry kind="var" path="SCALA_BASEDIR/lib/ant/ant.jar"/>
+ <classpathentry kind="con" path="org.scala-ide.sdt.launching.SCALA_CONTAINER"/>
+ <classpathentry kind="con" path="org.scala-ide.sdt.launching.SCALA_COMPILER_CONTAINER"/>
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
- <classpathentry kind="lib" path="lib/ant/ant.jar"/>
- <classpathentry kind="lib" path="lib/jline.jar"/>
- <classpathentry combineaccessrules="false" kind="src" path="/continuations-library"/>
<classpathentry kind="output" path="build-quick-scalap"/>
</classpath>
diff --git a/src/eclipse/test-junit/.classpath b/src/eclipse/test-junit/.classpath
index 718f7b6ece..8e4f88e0f0 100644
--- a/src/eclipse/test-junit/.classpath
+++ b/src/eclipse/test-junit/.classpath
@@ -1,12 +1,11 @@
<?xml version="1.0" encoding="UTF-8"?>
<classpath>
<classpathentry kind="src" path="test-junit"/>
+ <classpathentry kind="var" path="SCALA_BASEDIR/lib/ant/ant.jar"/>
+ <classpathentry kind="var" path="M2_REPO/junit/junit/4.10/junit-4.10.jar"/>
<classpathentry combineaccessrules="false" kind="src" path="/reflect"/>
<classpathentry combineaccessrules="false" kind="src" path="/scala-library"/>
+ <classpathentry combineaccessrules="false" kind="src" path="/continuations-library"/>
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
- <classpathentry kind="lib" path="lib/ant/ant.jar"/>
- <classpathentry kind="lib" path="lib/jline.jar"/>
- <classpathentry combineaccessrules="false" kind="src" path="/scala-compiler"/>
- <classpathentry kind="var" path="M2_REPO/junit/junit/4.10/junit-4.10.jar"/>
<classpathentry kind="output" path="build-test-junit"/>
</classpath>
diff --git a/src/interactive/scala/tools/nsc/interactive/Global.scala b/src/interactive/scala/tools/nsc/interactive/Global.scala
index 99f2cd4056..5875a44025 100644
--- a/src/interactive/scala/tools/nsc/interactive/Global.scala
+++ b/src/interactive/scala/tools/nsc/interactive/Global.scala
@@ -9,7 +9,7 @@ import java.io.{ PrintWriter, StringWriter, FileReader, FileWriter }
import scala.collection.mutable
import mutable.{LinkedHashMap, SynchronizedMap, HashSet, SynchronizedSet}
import scala.util.control.ControlThrowable
-import scala.tools.nsc.io.{ AbstractFile, LogReplay, Logger, NullLogger, Replayer }
+import scala.tools.nsc.io.{ AbstractFile }
import scala.tools.nsc.util.MultiHashMap
import scala.reflect.internal.util.{ SourceFile, BatchSourceFile, Position, NoPosition }
import scala.tools.nsc.reporters._
diff --git a/src/compiler/scala/tools/nsc/io/Lexer.scala b/src/interactive/scala/tools/nsc/interactive/Lexer.scala
index 1c926aff6b..82e8de3f3d 100644
--- a/src/compiler/scala/tools/nsc/io/Lexer.scala
+++ b/src/interactive/scala/tools/nsc/interactive/Lexer.scala
@@ -1,4 +1,4 @@
-package scala.tools.nsc.io
+package scala.tools.nsc.interactive
import java.io.Reader
diff --git a/src/compiler/scala/tools/nsc/io/Pickler.scala b/src/interactive/scala/tools/nsc/interactive/Pickler.scala
index 0e7da37c52..83f3fab925 100644
--- a/src/compiler/scala/tools/nsc/io/Pickler.scala
+++ b/src/interactive/scala/tools/nsc/interactive/Pickler.scala
@@ -1,4 +1,4 @@
-package scala.tools.nsc.io
+package scala.tools.nsc.interactive
import Lexer._
import java.io.Writer
diff --git a/src/interactive/scala/tools/nsc/interactive/Picklers.scala b/src/interactive/scala/tools/nsc/interactive/Picklers.scala
index 900a06333d..e75b4a3cc6 100644
--- a/src/interactive/scala/tools/nsc/interactive/Picklers.scala
+++ b/src/interactive/scala/tools/nsc/interactive/Picklers.scala
@@ -7,10 +7,10 @@ package interactive
import util.InterruptReq
import scala.reflect.internal.util.{ SourceFile, BatchSourceFile }
-import io.{ AbstractFile, PlainFile, Pickler, CondPickler }
+import io.{ AbstractFile, PlainFile }
import util.EmptyAction
import scala.reflect.internal.util.{ RangePosition, OffsetPosition, TransparentPosition }
-import io.Pickler._
+import Pickler._
import scala.collection.mutable
import mutable.ListBuffer
diff --git a/src/compiler/scala/tools/nsc/io/PrettyWriter.scala b/src/interactive/scala/tools/nsc/interactive/PrettyWriter.scala
index 11d3703983..d7dadcc6a8 100644
--- a/src/compiler/scala/tools/nsc/io/PrettyWriter.scala
+++ b/src/interactive/scala/tools/nsc/interactive/PrettyWriter.scala
@@ -1,4 +1,4 @@
-package scala.tools.nsc.io
+package scala.tools.nsc.interactive
import java.io.Writer
diff --git a/src/compiler/scala/tools/nsc/io/Replayer.scala b/src/interactive/scala/tools/nsc/interactive/Replayer.scala
index e3dc8939a3..0e3e2493fe 100644
--- a/src/compiler/scala/tools/nsc/io/Replayer.scala
+++ b/src/interactive/scala/tools/nsc/interactive/Replayer.scala
@@ -1,4 +1,4 @@
-package scala.tools.nsc.io
+package scala.tools.nsc.interactive
import java.io.{Reader, Writer}
diff --git a/src/interactive/scala/tools/nsc/interactive/ScratchPadMaker.scala b/src/interactive/scala/tools/nsc/interactive/ScratchPadMaker.scala
index ae70a74b60..2400b97d97 100644
--- a/src/interactive/scala/tools/nsc/interactive/ScratchPadMaker.scala
+++ b/src/interactive/scala/tools/nsc/interactive/ScratchPadMaker.scala
@@ -47,7 +47,7 @@ trait ScratchPadMaker { self: Global =>
toPrint.clear()
}
- /** The position where to insert an instrumentation statement in front of giuven statement.
+ /** The position where to insert an instrumentation statement in front of given statement.
* This is at the latest `stat.pos.start`. But in order not to mess with column numbers
* in position we try to insert it at the end of the previous token instead.
* Furthermore, `(' tokens have to be skipped because they do not show up
diff --git a/src/library/scala/Predef.scala b/src/library/scala/Predef.scala
index a188602543..3b588e261f 100644
--- a/src/library/scala/Predef.scala
+++ b/src/library/scala/Predef.scala
@@ -134,15 +134,6 @@ object Predef extends LowPriorityImplicits with DeprecatedPredef {
@inline def implicitly[T](implicit e: T) = e // for summoning implicit values from the nether world -- TODO: when dependent method types are on by default, give this result type `e.type`, so that inliner has better chance of knowing which method to inline in calls like `implicitly[MatchingStrategy[Option]].zero`
@inline def locally[T](x: T): T = x // to communicate intent and avoid unmoored statements
- // TODO: remove `val $scope = ...` as soon as 2.11.0-M4 is released and used as STARR
- // As it has a '$' in its name, we don't have to deprecate first.
- // The compiler now aliases `scala.xml.TopScope` to `$scope` (unless Predef.$scope is still there).
- // This definition left in place for older compilers and to compile quick with pre-2.11.0-M4 STARR.
- // In principle we don't need it to compile library/reflect/compiler (there's no xml left there),
- // so a new locker can be built without this definition, and locker can build quick
- // (partest, scaladoc still require xml).
- val $scope = scala.xml.TopScope
-
// errors and asserts -------------------------------------------------
// !!! Remove this when possible - ideally for 2.11.
diff --git a/src/library/scala/collection/GenTraversableOnce.scala b/src/library/scala/collection/GenTraversableOnce.scala
index d966c7324b..e4976d8f2c 100644
--- a/src/library/scala/collection/GenTraversableOnce.scala
+++ b/src/library/scala/collection/GenTraversableOnce.scala
@@ -341,7 +341,7 @@ trait GenTraversableOnce[+A] extends Any {
*
* @param ord An ordering to be used for comparing elements.
* @tparam A1 The type over which the ordering is defined.
- * @return the smallest element of this $coll with respect to the ordering `cmp`.
+ * @return the smallest element of this $coll with respect to the ordering `ord`.
*
* @usecase def min: A
* @inheritdoc
@@ -354,7 +354,7 @@ trait GenTraversableOnce[+A] extends Any {
*
* @param ord An ordering to be used for comparing elements.
* @tparam A1 The type over which the ordering is defined.
- * @return the largest element of this $coll with respect to the ordering `cmp`.
+ * @return the largest element of this $coll with respect to the ordering `ord`.
*
* @usecase def max: A
* @inheritdoc
@@ -363,8 +363,34 @@ trait GenTraversableOnce[+A] extends Any {
*/
def max[A1 >: A](implicit ord: Ordering[A1]): A
+ /** Finds the first element which yields the largest value measured by function f.
+ *
+ * @param cmp An ordering to be used for comparing elements.
+ * @tparam B The result type of the function f.
+ * @param f The measuring function.
+ * @return the first element of this $coll with the largest value measured by function f
+ * with respect to the ordering `cmp`.
+ *
+ * @usecase def maxBy[B](f: A => B): A
+ * @inheritdoc
+ *
+ * @return the first element of this $coll with the largest value measured by function f.
+ */
def maxBy[B](f: A => B)(implicit cmp: Ordering[B]): A
+ /** Finds the first element which yields the smallest value measured by function f.
+ *
+ * @param cmp An ordering to be used for comparing elements.
+ * @tparam B The result type of the function f.
+ * @param f The measuring function.
+ * @return the first element of this $coll with the smallest value measured by function f
+ * with respect to the ordering `cmp`.
+ *
+ * @usecase def minBy[B](f: A => B): A
+ * @inheritdoc
+ *
+ * @return the first element of this $coll with the smallest value measured by function f.
+ */
def minBy[B](f: A => B)(implicit cmp: Ordering[B]): A
def forall(pred: A => Boolean): Boolean
diff --git a/src/library/scala/collection/TraversableOnce.scala b/src/library/scala/collection/TraversableOnce.scala
index 526c36dda7..634807b29f 100644
--- a/src/library/scala/collection/TraversableOnce.scala
+++ b/src/library/scala/collection/TraversableOnce.scala
@@ -221,13 +221,37 @@ trait TraversableOnce[+A] extends Any with GenTraversableOnce[A] {
if (isEmpty)
throw new UnsupportedOperationException("empty.maxBy")
- reduceLeft((x, y) => if (cmp.gteq(f(x), f(y))) x else y)
+ var maxF: B = null.asInstanceOf[B]
+ var maxElem: A = null.asInstanceOf[A]
+ var first = true
+
+ for (elem <- self) {
+ val fx = f(elem)
+ if (first || cmp.gt(fx, maxF)) {
+ maxElem = elem
+ maxF = fx
+ first = false
+ }
+ }
+ maxElem
}
def minBy[B](f: A => B)(implicit cmp: Ordering[B]): A = {
if (isEmpty)
throw new UnsupportedOperationException("empty.minBy")
- reduceLeft((x, y) => if (cmp.lteq(f(x), f(y))) x else y)
+ var minF: B = null.asInstanceOf[B]
+ var minElem: A = null.asInstanceOf[A]
+ var first = true
+
+ for (elem <- self) {
+ val fx = f(elem)
+ if (first || cmp.lt(fx, minF)) {
+ minElem = elem
+ minF = fx
+ first = false
+ }
+ }
+ minElem
}
/** Copies all elements of this $coll to a buffer.
diff --git a/src/library/scala/concurrent/Future.scala b/src/library/scala/concurrent/Future.scala
index 2e4c72cd71..b072cd653b 100644
--- a/src/library/scala/concurrent/Future.scala
+++ b/src/library/scala/concurrent/Future.scala
@@ -14,7 +14,7 @@ import java.util.concurrent.{ ConcurrentLinkedQueue, TimeUnit, Callable }
import java.util.concurrent.TimeUnit.{ NANOSECONDS => NANOS, MILLISECONDS ⇒ MILLIS }
import java.lang.{ Iterable => JIterable }
import java.util.{ LinkedList => JLinkedList }
-import java.util.concurrent.atomic.{ AtomicReferenceFieldUpdater, AtomicInteger, AtomicBoolean }
+import java.util.concurrent.atomic.{ AtomicReferenceFieldUpdater, AtomicInteger, AtomicLong, AtomicBoolean }
import scala.util.control.NonFatal
import scala.Option
@@ -101,7 +101,7 @@ trait Future[+T] extends Awaitable[T] {
// that also have an executor parameter, which
// keeps us from accidentally forgetting to use
// the executor parameter.
- private implicit def internalExecutor: ExecutionContext = Future.InternalCallbackExecutor
+ private def internalExecutor = Future.InternalCallbackExecutor
/* Callbacks */
@@ -116,9 +116,10 @@ trait Future[+T] extends Awaitable[T] {
* $callbackInContext
*/
def onSuccess[U](pf: PartialFunction[T, U])(implicit executor: ExecutionContext): Unit = onComplete {
- case Success(v) if pf isDefinedAt v => pf(v)
+ case Success(v) =>
+ pf.applyOrElse[T, Any](v, Predef.conforms[T]) // Exploiting the cached function to avoid MatchError
case _ =>
- }(executor)
+ }
/** When this future is completed with a failure (i.e. with a throwable),
* apply the provided callback to the throwable.
@@ -134,9 +135,10 @@ trait Future[+T] extends Awaitable[T] {
* $callbackInContext
*/
def onFailure[U](@deprecatedName('callback) pf: PartialFunction[Throwable, U])(implicit executor: ExecutionContext): Unit = onComplete {
- case Failure(t) if NonFatal(t) && pf.isDefinedAt(t) => pf(t)
+ case Failure(t) =>
+ pf.applyOrElse[Throwable, Any](t, Predef.conforms[Throwable]) // Exploiting the cached function to avoid MatchError
case _ =>
- }(executor)
+ }
/** When this future is completed, either through an exception, or a value,
* apply the provided function.
@@ -186,13 +188,12 @@ trait Future[+T] extends Awaitable[T] {
* and throws a corresponding exception if the original future fails.
*/
def failed: Future[Throwable] = {
+ implicit val ec = internalExecutor
val p = Promise[Throwable]()
-
onComplete {
case Failure(t) => p success t
case Success(v) => p failure (new NoSuchElementException("Future.failed not completed with a throwable."))
}
-
p.future
}
@@ -203,10 +204,7 @@ trait Future[+T] extends Awaitable[T] {
*
* Will not be called if the future fails.
*/
- def foreach[U](f: T => U)(implicit executor: ExecutionContext): Unit = onComplete {
- case Success(r) => f(r)
- case _ => // do nothing
- }(executor)
+ def foreach[U](f: T => U)(implicit executor: ExecutionContext): Unit = onComplete { _ foreach f }
/** Creates a new future by applying the 's' function to the successful result of
* this future, or the 'f' function to the failed result. If there is any non-fatal
@@ -221,19 +219,11 @@ trait Future[+T] extends Awaitable[T] {
*/
def transform[S](s: T => S, f: Throwable => Throwable)(implicit executor: ExecutionContext): Future[S] = {
val p = Promise[S]()
-
+ // transform on Try has the wrong shape for us here
onComplete {
- case result =>
- try {
- result match {
- case Failure(t) => p failure f(t)
- case Success(r) => p success s(r)
- }
- } catch {
- case NonFatal(t) => p failure t
- }
- }(executor)
-
+ case Success(r) => p complete Try(s(r))
+ case Failure(t) => p complete Try(throw f(t)) // will throw fatal errors!
+ }
p.future
}
@@ -245,19 +235,7 @@ trait Future[+T] extends Awaitable[T] {
*/
def map[S](f: T => S)(implicit executor: ExecutionContext): Future[S] = { // transform(f, identity)
val p = Promise[S]()
-
- onComplete {
- case result =>
- try {
- result match {
- case Success(r) => p success f(r)
- case f: Failure[_] => p complete f.asInstanceOf[Failure[S]]
- }
- } catch {
- case NonFatal(t) => p failure t
- }
- }(executor)
-
+ onComplete { v => p complete (v map f) }
p.future
}
@@ -270,20 +248,10 @@ trait Future[+T] extends Awaitable[T] {
*/
def flatMap[S](f: T => Future[S])(implicit executor: ExecutionContext): Future[S] = {
val p = Promise[S]()
-
onComplete {
case f: Failure[_] => p complete f.asInstanceOf[Failure[S]]
- case Success(v) =>
- try {
- f(v).onComplete({
- case f: Failure[_] => p complete f.asInstanceOf[Failure[S]]
- case Success(v) => p success v
- })(internalExecutor)
- } catch {
- case NonFatal(t) => p failure t
- }
- }(executor)
-
+ case Success(v) => try f(v) onComplete p.complete catch { case NonFatal(t) => p failure t }
+ }
p.future
}
@@ -303,34 +271,14 @@ trait Future[+T] extends Awaitable[T] {
* Await.result(h, Duration.Zero) // throw a NoSuchElementException
* }}}
*/
- def filter(@deprecatedName('pred) p: T => Boolean)(implicit executor: ExecutionContext): Future[T] = {
- val promise = Promise[T]()
-
- onComplete {
- case f: Failure[_] => promise complete f.asInstanceOf[Failure[T]]
- case Success(v) =>
- try {
- if (p(v)) promise success v
- else promise failure new NoSuchElementException("Future.filter predicate is not satisfied")
- } catch {
- case NonFatal(t) => promise failure t
- }
- }(executor)
-
- promise.future
- }
+ def filter(@deprecatedName('pred) p: T => Boolean)(implicit executor: ExecutionContext): Future[T] =
+ map {
+ r => if (p(r)) r else throw new NoSuchElementException("Future.filter predicate is not satisfied")
+ }
/** Used by for-comprehensions.
*/
final def withFilter(p: T => Boolean)(implicit executor: ExecutionContext): Future[T] = filter(p)(executor)
- // final def withFilter(p: T => Boolean) = new FutureWithFilter[T](this, p)
-
- // final class FutureWithFilter[+S](self: Future[S], p: S => Boolean) {
- // def foreach(f: S => Unit): Unit = self filter p foreach f
- // def map[R](f: S => R) = self filter p map f
- // def flatMap[R](f: S => Future[R]) = self filter p flatMap f
- // def withFilter(q: S => Boolean): FutureWithFilter[S] = new FutureWithFilter[S](self, x => p(x) && q(x))
- // }
/** Creates a new future by mapping the value of the current future, if the given partial function is defined at that value.
*
@@ -352,22 +300,10 @@ trait Future[+T] extends Awaitable[T] {
* Await.result(h, Duration.Zero) // throw a NoSuchElementException
* }}}
*/
- def collect[S](pf: PartialFunction[T, S])(implicit executor: ExecutionContext): Future[S] = {
- val p = Promise[S]()
-
- onComplete {
- case f: Failure[_] => p complete f.asInstanceOf[Failure[S]]
- case Success(v) =>
- try {
- if (pf.isDefinedAt(v)) p success pf(v)
- else p failure new NoSuchElementException("Future.collect partial function is not defined at: " + v)
- } catch {
- case NonFatal(t) => p failure t
- }
- }(executor)
-
- p.future
- }
+ def collect[S](pf: PartialFunction[T, S])(implicit executor: ExecutionContext): Future[S] =
+ map {
+ r => pf.applyOrElse(r, (t: T) => throw new NoSuchElementException("Future.collect partial function is not defined at: " + t))
+ }
/** Creates a new future that will handle any matching throwable that this
* future might contain. If there is no match, or if this future contains
@@ -383,9 +319,7 @@ trait Future[+T] extends Awaitable[T] {
*/
def recover[U >: T](pf: PartialFunction[Throwable, U])(implicit executor: ExecutionContext): Future[U] = {
val p = Promise[U]()
-
- onComplete { case tr => p.complete(tr recover pf) }(executor)
-
+ onComplete { v => p complete (v recover pf) }
p.future
}
@@ -404,17 +338,10 @@ trait Future[+T] extends Awaitable[T] {
*/
def recoverWith[U >: T](pf: PartialFunction[Throwable, Future[U]])(implicit executor: ExecutionContext): Future[U] = {
val p = Promise[U]()
-
onComplete {
- case Failure(t) if pf isDefinedAt t =>
- try {
- p completeWith pf(t)
- } catch {
- case NonFatal(t) => p failure t
- }
- case otherwise => p complete otherwise
- }(executor)
-
+ case Failure(t) => try pf.applyOrElse(t, (_: Throwable) => this) onComplete p.complete catch { case NonFatal(t) => p failure t }
+ case other => p complete other
+ }
p.future
}
@@ -427,19 +354,12 @@ trait Future[+T] extends Awaitable[T] {
* with the throwable stored in `that`.
*/
def zip[U](that: Future[U]): Future[(T, U)] = {
+ implicit val ec = internalExecutor
val p = Promise[(T, U)]()
-
- this onComplete {
+ onComplete {
case f: Failure[_] => p complete f.asInstanceOf[Failure[(T, U)]]
- case Success(r) =>
- that onSuccess {
- case r2 => p success ((r, r2))
- }
- that onFailure {
- case f => p failure f
- }
+ case Success(s) => that onComplete { c => p.complete(c map { s2 => (s, s2) }) }
}
-
p.future
}
@@ -458,6 +378,7 @@ trait Future[+T] extends Awaitable[T] {
* }}}
*/
def fallbackTo[U >: T](that: Future[U]): Future[U] = {
+ implicit val ec = internalExecutor
val p = Promise[U]()
onComplete {
case s @ Success(_) => p complete s
@@ -470,23 +391,13 @@ trait Future[+T] extends Awaitable[T] {
* that conforms to `S`'s erased type or a `ClassCastException` otherwise.
*/
def mapTo[S](implicit tag: ClassTag[S]): Future[S] = {
- def boxedType(c: Class[_]): Class[_] = {
+ implicit val ec = internalExecutor
+ val boxedClass = {
+ val c = tag.runtimeClass
if (c.isPrimitive) Future.toBoxed(c) else c
}
-
- val p = Promise[S]()
-
- onComplete {
- case f: Failure[_] => p complete f.asInstanceOf[Failure[S]]
- case Success(t) =>
- p complete (try {
- Success(boxedType(tag.runtimeClass).cast(t).asInstanceOf[S])
- } catch {
- case e: ClassCastException => Failure(e)
- })
- }
-
- p.future
+ require(boxedClass ne null)
+ map(s => boxedClass.cast(s).asInstanceOf[S])
}
/** Applies the side-effecting function to the result of this future, and returns
@@ -514,11 +425,9 @@ trait Future[+T] extends Awaitable[T] {
*/
def andThen[U](pf: PartialFunction[Try[T], U])(implicit executor: ExecutionContext): Future[T] = {
val p = Promise[T]()
-
onComplete {
- case r => try if (pf isDefinedAt r) pf(r) finally p complete r
- }(executor)
-
+ case r => try pf.applyOrElse[Try[T], Any](r, Predef.conforms[Try[T]]) finally p complete r
+ }
p.future
}
@@ -579,14 +488,12 @@ object Future {
} map (_.result())
}
- /** Returns a `Future` to the result of the first future in the list that is completed.
+ /** Returns a new `Future` to the result of the first future in the list that is completed.
*/
def firstCompletedOf[T](futures: TraversableOnce[Future[T]])(implicit executor: ExecutionContext): Future[T] = {
val p = Promise[T]()
-
val completeFirst: Try[T] => Unit = p tryComplete _
- futures.foreach(_ onComplete completeFirst)
-
+ futures foreach { _ onComplete completeFirst }
p.future
}
diff --git a/src/library/scala/math/BigDecimal.scala b/src/library/scala/math/BigDecimal.scala
index 00a3686585..0a2da16523 100644
--- a/src/library/scala/math/BigDecimal.scala
+++ b/src/library/scala/math/BigDecimal.scala
@@ -158,7 +158,7 @@ object BigDecimal {
* @author Stephane Micheloud
* @version 1.0
*/
-@deprecatedInheritance("This class will me made final.", "2.10.0")
+@deprecatedInheritance("This class will be made final.", "2.10.0")
class BigDecimal(
val bigDecimal: BigDec,
val mc: MathContext)
diff --git a/src/library/scala/math/BigInt.scala b/src/library/scala/math/BigInt.scala
index afc4d5c535..b25dbefebd 100644
--- a/src/library/scala/math/BigInt.scala
+++ b/src/library/scala/math/BigInt.scala
@@ -109,7 +109,7 @@ object BigInt {
* @author Martin Odersky
* @version 1.0, 15/07/2003
*/
-@deprecatedInheritance("This class will me made final.", "2.10.0")
+@deprecatedInheritance("This class will be made final.", "2.10.0")
class BigInt(val bigInteger: BigInteger) extends ScalaNumber with ScalaNumericConversions with Serializable {
/** Returns the hash code for this BigInt. */
override def hashCode(): Int =
diff --git a/src/library/scala/util/parsing/combinator/ImplicitConversions.scala b/src/parser-combinators/scala/util/parsing/combinator/ImplicitConversions.scala
index 0683ea927d..0683ea927d 100644
--- a/src/library/scala/util/parsing/combinator/ImplicitConversions.scala
+++ b/src/parser-combinators/scala/util/parsing/combinator/ImplicitConversions.scala
diff --git a/src/library/scala/util/parsing/combinator/JavaTokenParsers.scala b/src/parser-combinators/scala/util/parsing/combinator/JavaTokenParsers.scala
index 01288a182e..01288a182e 100644
--- a/src/library/scala/util/parsing/combinator/JavaTokenParsers.scala
+++ b/src/parser-combinators/scala/util/parsing/combinator/JavaTokenParsers.scala
diff --git a/src/library/scala/util/parsing/combinator/PackratParsers.scala b/src/parser-combinators/scala/util/parsing/combinator/PackratParsers.scala
index a11dd18e62..a11dd18e62 100644
--- a/src/library/scala/util/parsing/combinator/PackratParsers.scala
+++ b/src/parser-combinators/scala/util/parsing/combinator/PackratParsers.scala
diff --git a/src/library/scala/util/parsing/combinator/Parsers.scala b/src/parser-combinators/scala/util/parsing/combinator/Parsers.scala
index 4602c3cc53..16754646fd 100644
--- a/src/library/scala/util/parsing/combinator/Parsers.scala
+++ b/src/parser-combinators/scala/util/parsing/combinator/Parsers.scala
@@ -531,10 +531,6 @@ trait Parsers {
}
}
- /*trait ElemFun
- case class EFCons(hd: Elem => ElemFun, tl: ElemFun) extends ElemFun
- case class EFNil(res: Boolean) extends ElemFun*/
-
/** A parser matching input elements that satisfy a given predicate.
*
* `elem(kind, p)` succeeds if the input starts with an element `e` for which `p(e)` is true.
diff --git a/src/library/scala/util/parsing/combinator/RegexParsers.scala b/src/parser-combinators/scala/util/parsing/combinator/RegexParsers.scala
index 8ebbc573ad..8ebbc573ad 100644
--- a/src/library/scala/util/parsing/combinator/RegexParsers.scala
+++ b/src/parser-combinators/scala/util/parsing/combinator/RegexParsers.scala
diff --git a/src/library/scala/util/parsing/combinator/lexical/Lexical.scala b/src/parser-combinators/scala/util/parsing/combinator/lexical/Lexical.scala
index d8029d068f..d8029d068f 100644
--- a/src/library/scala/util/parsing/combinator/lexical/Lexical.scala
+++ b/src/parser-combinators/scala/util/parsing/combinator/lexical/Lexical.scala
diff --git a/src/library/scala/util/parsing/combinator/lexical/Scanners.scala b/src/parser-combinators/scala/util/parsing/combinator/lexical/Scanners.scala
index 2e12915bb8..2e12915bb8 100644
--- a/src/library/scala/util/parsing/combinator/lexical/Scanners.scala
+++ b/src/parser-combinators/scala/util/parsing/combinator/lexical/Scanners.scala
diff --git a/src/library/scala/util/parsing/combinator/lexical/StdLexical.scala b/src/parser-combinators/scala/util/parsing/combinator/lexical/StdLexical.scala
index 32d7502cda..32d7502cda 100644
--- a/src/library/scala/util/parsing/combinator/lexical/StdLexical.scala
+++ b/src/parser-combinators/scala/util/parsing/combinator/lexical/StdLexical.scala
diff --git a/src/library/scala/util/parsing/combinator/syntactical/StandardTokenParsers.scala b/src/parser-combinators/scala/util/parsing/combinator/syntactical/StandardTokenParsers.scala
index 5b9d14c9a7..5b9d14c9a7 100644
--- a/src/library/scala/util/parsing/combinator/syntactical/StandardTokenParsers.scala
+++ b/src/parser-combinators/scala/util/parsing/combinator/syntactical/StandardTokenParsers.scala
diff --git a/src/library/scala/util/parsing/combinator/syntactical/StdTokenParsers.scala b/src/parser-combinators/scala/util/parsing/combinator/syntactical/StdTokenParsers.scala
index adcf85da7a..adcf85da7a 100644
--- a/src/library/scala/util/parsing/combinator/syntactical/StdTokenParsers.scala
+++ b/src/parser-combinators/scala/util/parsing/combinator/syntactical/StdTokenParsers.scala
diff --git a/src/library/scala/util/parsing/combinator/syntactical/TokenParsers.scala b/src/parser-combinators/scala/util/parsing/combinator/syntactical/TokenParsers.scala
index b06babcd7e..b06babcd7e 100644
--- a/src/library/scala/util/parsing/combinator/syntactical/TokenParsers.scala
+++ b/src/parser-combinators/scala/util/parsing/combinator/syntactical/TokenParsers.scala
diff --git a/src/library/scala/util/parsing/combinator/token/StdTokens.scala b/src/parser-combinators/scala/util/parsing/combinator/token/StdTokens.scala
index a102d1541e..a102d1541e 100644
--- a/src/library/scala/util/parsing/combinator/token/StdTokens.scala
+++ b/src/parser-combinators/scala/util/parsing/combinator/token/StdTokens.scala
diff --git a/src/library/scala/util/parsing/combinator/token/Tokens.scala b/src/parser-combinators/scala/util/parsing/combinator/token/Tokens.scala
index 5c3f1f95b5..5c3f1f95b5 100644
--- a/src/library/scala/util/parsing/combinator/token/Tokens.scala
+++ b/src/parser-combinators/scala/util/parsing/combinator/token/Tokens.scala
diff --git a/src/library/scala/util/parsing/input/CharArrayReader.scala b/src/parser-combinators/scala/util/parsing/input/CharArrayReader.scala
index 22530cb9aa..22530cb9aa 100644
--- a/src/library/scala/util/parsing/input/CharArrayReader.scala
+++ b/src/parser-combinators/scala/util/parsing/input/CharArrayReader.scala
diff --git a/src/library/scala/util/parsing/input/CharSequenceReader.scala b/src/parser-combinators/scala/util/parsing/input/CharSequenceReader.scala
index 8e7751cc82..8e7751cc82 100644
--- a/src/library/scala/util/parsing/input/CharSequenceReader.scala
+++ b/src/parser-combinators/scala/util/parsing/input/CharSequenceReader.scala
diff --git a/src/library/scala/util/parsing/input/NoPosition.scala b/src/parser-combinators/scala/util/parsing/input/NoPosition.scala
index 4a32264b79..4a32264b79 100644
--- a/src/library/scala/util/parsing/input/NoPosition.scala
+++ b/src/parser-combinators/scala/util/parsing/input/NoPosition.scala
diff --git a/src/library/scala/util/parsing/input/OffsetPosition.scala b/src/parser-combinators/scala/util/parsing/input/OffsetPosition.scala
index 23f79c74d1..23f79c74d1 100644
--- a/src/library/scala/util/parsing/input/OffsetPosition.scala
+++ b/src/parser-combinators/scala/util/parsing/input/OffsetPosition.scala
diff --git a/src/library/scala/util/parsing/input/PagedSeqReader.scala b/src/parser-combinators/scala/util/parsing/input/PagedSeqReader.scala
index 468f1f9a5f..468f1f9a5f 100644
--- a/src/library/scala/util/parsing/input/PagedSeqReader.scala
+++ b/src/parser-combinators/scala/util/parsing/input/PagedSeqReader.scala
diff --git a/src/library/scala/util/parsing/input/Position.scala b/src/parser-combinators/scala/util/parsing/input/Position.scala
index b7995a6471..b7995a6471 100644
--- a/src/library/scala/util/parsing/input/Position.scala
+++ b/src/parser-combinators/scala/util/parsing/input/Position.scala
diff --git a/src/library/scala/util/parsing/input/Positional.scala b/src/parser-combinators/scala/util/parsing/input/Positional.scala
index cfde67cadd..cfde67cadd 100644
--- a/src/library/scala/util/parsing/input/Positional.scala
+++ b/src/parser-combinators/scala/util/parsing/input/Positional.scala
diff --git a/src/library/scala/util/parsing/input/Reader.scala b/src/parser-combinators/scala/util/parsing/input/Reader.scala
index 9dbf08a7ca..9dbf08a7ca 100644
--- a/src/library/scala/util/parsing/input/Reader.scala
+++ b/src/parser-combinators/scala/util/parsing/input/Reader.scala
diff --git a/src/library/scala/util/parsing/input/StreamReader.scala b/src/parser-combinators/scala/util/parsing/input/StreamReader.scala
index 30eb097fd7..30eb097fd7 100644
--- a/src/library/scala/util/parsing/input/StreamReader.scala
+++ b/src/parser-combinators/scala/util/parsing/input/StreamReader.scala
diff --git a/src/library/scala/util/parsing/json/JSON.scala b/src/parser-combinators/scala/util/parsing/json/JSON.scala
index b06dddf532..b06dddf532 100644
--- a/src/library/scala/util/parsing/json/JSON.scala
+++ b/src/parser-combinators/scala/util/parsing/json/JSON.scala
diff --git a/src/library/scala/util/parsing/json/Lexer.scala b/src/parser-combinators/scala/util/parsing/json/Lexer.scala
index 7fc4e0bab6..7fc4e0bab6 100644
--- a/src/library/scala/util/parsing/json/Lexer.scala
+++ b/src/parser-combinators/scala/util/parsing/json/Lexer.scala
diff --git a/src/library/scala/util/parsing/json/Parser.scala b/src/parser-combinators/scala/util/parsing/json/Parser.scala
index 521dfc6612..521dfc6612 100644
--- a/src/library/scala/util/parsing/json/Parser.scala
+++ b/src/parser-combinators/scala/util/parsing/json/Parser.scala
diff --git a/src/partest/scala/tools/partest/PartestTask.scala b/src/partest/scala/tools/partest/PartestTask.scala
index b5b09a753a..8b88021dbf 100644
--- a/src/partest/scala/tools/partest/PartestTask.scala
+++ b/src/partest/scala/tools/partest/PartestTask.scala
@@ -15,6 +15,7 @@ import java.lang.reflect.Method
import org.apache.tools.ant.Task
import org.apache.tools.ant.types.{ Reference, FileSet}
import org.apache.tools.ant.types.Commandline.Argument
+import scala.tools.ant.ScalaTask
/** An Ant task to execute the Scala test suite (NSC).
*
@@ -34,7 +35,7 @@ import org.apache.tools.ant.types.Commandline.Argument
*
* @author Philippe Haller
*/
-class PartestTask extends Task with CompilationPathProperty {
+class PartestTask extends Task with CompilationPathProperty with ScalaTask {
type Path = org.apache.tools.ant.types.Path
private var kinds: List[String] = Nil
@@ -178,7 +179,7 @@ class PartestTask extends Task with CompilationPathProperty {
val allFailures = _results map (_._2) sum
val allFailedPaths = _results flatMap (_._3)
- def f = if (errorOnFailed && allFailures > 0) (sys error _) else log(_: String)
+ def f = if (errorOnFailed && allFailures > 0) buildError(_: String) else log(_: String)
def s = if (allFailures > 1) "s" else ""
val msg =
if (allFailures > 0)
diff --git a/src/partest/scala/tools/partest/ReplTest.scala b/src/partest/scala/tools/partest/ReplTest.scala
index edd1f705a4..7381b8af54 100644
--- a/src/partest/scala/tools/partest/ReplTest.scala
+++ b/src/partest/scala/tools/partest/ReplTest.scala
@@ -7,6 +7,7 @@ package scala.tools.partest
import scala.tools.nsc.Settings
import scala.tools.nsc.interpreter.ILoop
+import scala.tools.partest.nest.FileUtil
import java.lang.reflect.{ Method => JMethod, Field => JField }
/** A trait for testing repl code. It drops the first line
@@ -29,3 +30,15 @@ abstract class ReplTest extends DirectTest {
}
def show() = eval() foreach println
}
+
+abstract class SessionTest extends ReplTest with FileUtil {
+ def session: String
+ override final def code = expected filter (_.startsWith(prompt)) map (_.drop(prompt.length)) mkString "\n"
+ def expected = session.stripMargin.lines.toList
+ final def prompt = "scala> "
+ override def show() = {
+ val out = eval().toList
+ if (out.size != expected.size) Console println s"Expected ${expected.size} lines, got ${out.size}"
+ if (out != expected) Console print compareContents(expected, out, "expected", "actual")
+ }
+}
diff --git a/src/partest/scala/tools/partest/TestKinds.scala b/src/partest/scala/tools/partest/TestKinds.scala
index ec682690ca..b4e8afd0d2 100644
--- a/src/partest/scala/tools/partest/TestKinds.scala
+++ b/src/partest/scala/tools/partest/TestKinds.scala
@@ -4,8 +4,7 @@ package partest
import nest.PathSettings.srcDir
object TestKinds {
- val standardKinds = "pos neg run jvm res buildmanager scalacheck scalap specialized instrumented presentation ant" split "\\s+" toList
- val standardArgs = standardKinds map ("--" + _)
+ val standardKinds = ("pos neg run jvm res scalacheck scalap specialized instrumented presentation ant" split "\\s+").toList
def denotesTestFile(p: Path) = p.isFile && p.hasExtension("scala", "res", "xml")
def denotesTestDir(p: Path) = kindOf(p) match {
diff --git a/src/partest/scala/tools/partest/nest/ConsoleRunner.scala b/src/partest/scala/tools/partest/nest/ConsoleRunner.scala
index 33bf836a7b..332131ca3a 100644
--- a/src/partest/scala/tools/partest/nest/ConsoleRunner.scala
+++ b/src/partest/scala/tools/partest/nest/ConsoleRunner.scala
@@ -9,13 +9,15 @@ package nest
import utils.Properties._
import scala.tools.nsc.Properties.{ versionMsg, setProp }
-import scala.tools.nsc.util.CommandLine
import scala.collection.{ mutable, immutable }
import PathSettings.srcDir
import TestKinds._
import scala.reflect.internal.util.Collections.distinctBy
+import scala.tools.cmd.{ CommandLine, CommandLineParser, Instance }
-class ConsoleRunner extends DirectRunner {
+class ConsoleRunner(argstr: String) extends {
+ val parsed = ConsoleRunnerSpec.creator(CommandLineParser tokenize argstr)
+} with DirectRunner with ConsoleRunnerSpec with Instance {
import NestUI._
import NestUI.color._
@@ -86,27 +88,15 @@ class ConsoleRunner extends DirectRunner {
}
}
- private val unaryArgs = List(
- "--pack", "--all",
- "--terse", "--verbose", "--show-diff", "--show-log", "--self-test",
- "--failed", "--update-check", "--version", "--ansi", "--debug", "--help"
- ) ::: standardArgs
-
- private val binaryArgs = List(
- "--grep", "--srcpath", "--buildpath", "--classpath", "--timeout"
- )
-
- def main(argstr: String) {
- val parsed = (new CommandLine(argstr)) withUnaryArgs unaryArgs withBinaryArgs binaryArgs
-
- if (parsed isSet "--debug") NestUI.setDebug()
- if (parsed isSet "--verbose") NestUI.setVerbose()
- if (parsed isSet "--terse") NestUI.setTerse()
- if (parsed isSet "--show-diff") NestUI.setDiffOnFail()
+ def run(): Unit = {
+ if (optDebug) NestUI.setDebug()
+ if (optVerbose) NestUI.setVerbose()
+ if (optTerse) NestUI.setTerse()
+ if (optShowDiff) NestUI.setDiffOnFail()
// Early return on no args, version, or invalid args
- if (parsed isSet "--version") return echo(versionMsg)
- if ((argstr == "") || (parsed isSet "--help")) return NestUI.usage()
+ if (optVersion) return echo(versionMsg)
+ if ((argstr == "") || optHelp) return NestUI.usage()
val (individualTests, invalid) = parsed.residualArgs map (p => Path(p)) partition denotesTestPath
if (invalid.nonEmpty) {
@@ -116,27 +106,26 @@ class ConsoleRunner extends DirectRunner {
echoWarning(s"Discarding ${invalid.size} invalid test paths")
}
- parsed get "--srcpath" foreach (x => setProp("partest.srcdir", x))
- parsed get "--timeout" foreach (x => setProp("partest.timeout", x))
+ optSourcePath foreach (x => setProp("partest.srcdir", x))
+ optTimeout foreach (x => setProp("partest.timeout", x))
fileManager =
- if (parsed isSet "--buildpath") new ConsoleFileManager(parsed("--buildpath"))
- else if (parsed isSet "--classpath") new ConsoleFileManager(parsed("--classpath"), true)
- else if (parsed isSet "--pack") new ConsoleFileManager("build/pack")
+ if (optBuildPath.isDefined) new ConsoleFileManager(optBuildPath.get)
+ else if (optClassPath.isDefined) new ConsoleFileManager(optClassPath.get, true)
+ else if (optPack) new ConsoleFileManager("build/pack")
else new ConsoleFileManager // auto detection, see ConsoleFileManager.findLatest
- fileManager.updateCheck = parsed isSet "--update-check"
- fileManager.failed = parsed isSet "--failed"
+ fileManager.updateCheck = optUpdateCheck
+ fileManager.failed = optFailed
val partestTests = (
- if (parsed isSet "--self-test") TestKinds.testsForPartest
+ if (optSelfTest) TestKinds.testsForPartest
else Nil
)
- val grepExpr = parsed get "--grep" getOrElse ""
+ val grepExpr = optGrep getOrElse ""
// If --grep is given we suck in every file it matches.
- var grepMessage = ""
val greppedTests = if (grepExpr == "") Nil else {
val paths = grepFor(grepExpr)
if (paths.isEmpty)
@@ -145,14 +134,14 @@ class ConsoleRunner extends DirectRunner {
paths.sortBy(_.toString)
}
- val isRerun = parsed isSet "--failed"
+ val isRerun = optFailed
val rerunTests = if (isRerun) TestKinds.failedTests else Nil
def miscTests = partestTests ++ individualTests ++ greppedTests ++ rerunTests
- val givenKinds = standardArgs filter parsed.isSet
+ val givenKinds = standardKinds filter parsed.isSet
val kinds = (
- if (parsed isSet "--all") standardKinds
- else if (givenKinds.nonEmpty) givenKinds map (_ stripPrefix "--")
+ if (optAll) standardKinds
+ else if (givenKinds.nonEmpty) givenKinds
else if (invalid.isEmpty && miscTests.isEmpty && !isRerun) standardKinds // If no kinds, --grep, or individual tests were given, assume --all
else Nil
)
@@ -223,4 +212,13 @@ class ConsoleRunner extends DirectRunner {
issueSummaryReport()
System exit ( if (isSuccess) 0 else 1 )
}
+
+ run()
}
+
+object ConsoleRunner {
+ def main(args: Array[String]): Unit = {
+ new ConsoleRunner(args mkString " ")
+ }
+}
+
diff --git a/src/partest/scala/tools/partest/nest/ConsoleRunnerSpec.scala b/src/partest/scala/tools/partest/nest/ConsoleRunnerSpec.scala
new file mode 100644
index 0000000000..f9143013e9
--- /dev/null
+++ b/src/partest/scala/tools/partest/nest/ConsoleRunnerSpec.scala
@@ -0,0 +1,54 @@
+package scala.tools.partest.nest
+
+import language.postfixOps
+
+import scala.tools.cmd.{ CommandLine, Interpolation, Meta, Reference, Spec }
+
+trait ConsoleRunnerSpec extends Spec with Meta.StdOpts with Interpolation {
+ def referenceSpec = ConsoleRunnerSpec
+ def programInfo = Spec.Info(
+ "console-runner",
+ "Usage: NestRunner [options] [test test ...]",
+ "scala.tools.partest.nest.ConsoleRunner")
+
+ heading("Test categories:")
+ val optAll = "all" / "run all tests" --?
+ val optPos = "pos" / "run compilation tests (success)" --?
+ val optNeg = "neg" / "run compilation tests (failure)" --?
+ val optRun = "run" / "run interpreter and backend tests" --?
+ val optJvm = "jvm" / "run JVM backend tests" --?
+ val optRes = "res" / "run resident compiler tests" --?
+ val optAnt = "ant" / "run Ant tests" --?
+ val optScalap = "scalap" / "run scalap tests" --?
+ val optSpecialized = "specialized" / "run specialization tests" --?
+ val optScalacheck = "scalacheck" / "run ScalaCheck tests" --?
+ val optInstrumented = "instrumented" / "run instrumented tests" --?
+ val optPresentation = "presentation" / "run presentation compiler tests" --?
+
+ heading("Test runner options:")
+ val optFailed = "failed" / "run only those tests that failed during the last run" --?
+ val optTimeout = "timeout" / "aborts the test suite after the given amount of time" --|
+ val optPack = "pack" / "pick compiler/reflect/library in build/pack, and run all tests" --?
+ val optGrep = "grep" / "run all tests whose source file contains the expression given to grep" --|
+ val optUpdateCheck = "update-check" / "instead of failing tests with output change, update checkfile (use with care!)" --?
+ val optBuildPath = "buildpath" / "set (relative) path to build jars (ex.: --buildpath build/pack)" --|
+ val optClassPath = "classpath" / "set (absolute) path to build classes" --|
+ val optSourcePath = "srcpath" / "set (relative) path to test source files (ex.: --srcpath pending)" --|
+
+ heading("Test output options:")
+ val optShowDiff = "show-diff" / "show diffs for failed tests" --?
+ val optVerbose = "verbose" / "show verbose progress information" --?
+ val optTerse = "terse" / "show terse progress information" --?
+ val optDebug = "debug" / "enable debugging output" --?
+
+ heading("Other options:")
+ val optVersion = "version" / "show Scala version and exit" --?
+ val optSelfTest = "self-test" / "run tests for partest itself" --?
+ val optHelp = "help" / "show this page and exit" --?
+
+}
+
+object ConsoleRunnerSpec extends ConsoleRunnerSpec with Reference {
+ type ThisCommandLine = CommandLine
+ def creator(args: List[String]): ThisCommandLine = new CommandLine(ConsoleRunnerSpec, args)
+}
diff --git a/src/partest/scala/tools/partest/nest/FileManager.scala b/src/partest/scala/tools/partest/nest/FileManager.scala
index 230ada4803..7bfa8c6e77 100644
--- a/src/partest/scala/tools/partest/nest/FileManager.scala
+++ b/src/partest/scala/tools/partest/nest/FileManager.scala
@@ -68,12 +68,14 @@ trait FileManager extends FileUtil {
else
(SFile(LATEST_LIB).parent.parent / "classes" / what).toAbsolute.path
}
+ def latestParserCBLib = relativeToLibrary("parser-combinators")
+ def latestXmlLib = relativeToLibrary("xml")
def latestScaladoc = relativeToLibrary("scaladoc")
def latestInteractive = relativeToLibrary("interactive")
def latestScalapFile = relativeToLibrary("scalap")
def latestPaths = List(
LATEST_LIB, LATEST_REFLECT, LATEST_COMP, LATEST_PARTEST, LATEST_ACTORS,
- latestScalapFile, latestScaladoc, latestInteractive
+ latestParserCBLib, latestXmlLib, latestScalapFile, latestScaladoc, latestInteractive
)
def latestFiles = latestPaths map (p => new java.io.File(p))
def latestUrls = latestFiles map (_.toURI.toURL)
diff --git a/src/partest/scala/tools/partest/nest/NestUI.scala b/src/partest/scala/tools/partest/nest/NestUI.scala
index 564270e531..5148115905 100644
--- a/src/partest/scala/tools/partest/nest/NestUI.scala
+++ b/src/partest/scala/tools/partest/nest/NestUI.scala
@@ -144,34 +144,8 @@ object NestUI {
}
def usage() {
- println("Usage: NestRunner [options] [test test ...]")
- println
- println(" Test categories:")
- println(" --all run all tests")
- println(" --pos run compilation tests (success)")
- println(" --neg run compilation tests (failure)")
- println(" --run run interpreter and backend tests")
- println(" --jvm run JVM backend tests")
- println(" --res run resident compiler tests")
- println(" --scalacheck run ScalaCheck tests")
- println(" --instrumented run instrumented tests")
- println(" --presentation run presentation compiler tests")
- println
- println(" Other options:")
- println(" --pack pick compiler/reflect/library in build/pack, and run all tests")
- println(" --grep <expr> run all tests whose source file contains <expr>")
- println(" --failed run only those tests that failed during the last run")
- println(" --update-check instead of failing tests with output change, update checkfile. (Use with care!)")
- println(" --verbose show progress information")
- println(" --buildpath set (relative) path to build jars")
- println(" ex.: --buildpath build/pack")
- println(" --classpath set (absolute) path to build classes")
- println(" --srcpath set (relative) path to test source files")
- println(" ex.: --srcpath pending")
- println(" --debug enable debugging output")
- println
- println(utils.Properties.versionString)
- println("maintained by Philipp Haller (EPFL)")
+ println(ConsoleRunnerSpec.programInfo.usage)
+ println(ConsoleRunnerSpec.helpMsg)
sys.exit(1)
}
diff --git a/src/partest/scala/tools/partest/nest/ReflectiveRunner.scala b/src/partest/scala/tools/partest/nest/ReflectiveRunner.scala
index 734affa153..3c77a03f1e 100644
--- a/src/partest/scala/tools/partest/nest/ReflectiveRunner.scala
+++ b/src/partest/scala/tools/partest/nest/ReflectiveRunner.scala
@@ -85,10 +85,9 @@ class ReflectiveRunner {
try {
val sepRunnerClass = sepLoader loadClass sepRunnerClassName
- val sepRunner = sepRunnerClass.newInstance()
- val sepMainMethod = sepRunnerClass.getMethod("main", Array(classOf[String]): _*)
- val cargs: Array[AnyRef] = Array(args)
- sepMainMethod.invoke(sepRunner, cargs: _*)
+ val sepMainMethod = sepRunnerClass.getMethod("main", classOf[Array[String]])
+ val cargs: Array[AnyRef] = Array(Array(args))
+ sepMainMethod.invoke(null, cargs: _*)
}
catch {
case cnfe: ClassNotFoundException =>
diff --git a/src/partest/scala/tools/partest/nest/Runner.scala b/src/partest/scala/tools/partest/nest/Runner.scala
index a53698eb77..d7d87bdcf5 100644
--- a/src/partest/scala/tools/partest/nest/Runner.scala
+++ b/src/partest/scala/tools/partest/nest/Runner.scala
@@ -772,15 +772,8 @@ trait DirectRunner {
import PartestDefaults.{ numThreads, waitTime }
- Thread.setDefaultUncaughtExceptionHandler(
- new Thread.UncaughtExceptionHandler {
- def uncaughtException(thread: Thread, t: Throwable) {
- val t1 = Exceptional unwrap t
- System.err.println(s"Uncaught exception on thread $thread: $t1")
- t1.printStackTrace()
- }
- }
- )
+ setUncaughtHandler
+
def runTestsForFiles(kindFiles: List[File], kind: String): List[TestState] = {
NestUI.resetTestNumber(kindFiles.size)
diff --git a/src/reflect/scala/reflect/api/BuildUtils.scala b/src/reflect/scala/reflect/api/BuildUtils.scala
index 2e95a01176..c568cf74c0 100644
--- a/src/reflect/scala/reflect/api/BuildUtils.scala
+++ b/src/reflect/scala/reflect/api/BuildUtils.scala
@@ -66,6 +66,8 @@ private[reflect] trait BuildUtils { self: Universe =>
def Ident(sym: Symbol): Ident
+ def Block(stats: List[Tree]): Block
+
def TypeTree(tp: Type): TypeTree
def thisPrefix(sym: Symbol): Type
@@ -73,5 +75,45 @@ private[reflect] trait BuildUtils { self: Universe =>
def setType[T <: Tree](tree: T, tpe: Type): T
def setSymbol[T <: Tree](tree: T, sym: Symbol): T
+
+ def mkAnnotationCtor(tree: Tree, args: List[Tree]): Tree
+
+ val FlagsAsBits: FlagsAsBitsExtractor
+
+ trait FlagsAsBitsExtractor {
+ def unapply(flags: Long): Option[Long]
+ }
+
+ val TypeApplied: TypeAppliedExtractor
+
+ trait TypeAppliedExtractor {
+ def unapply(tree: Tree): Some[(Tree, List[Tree])]
+ }
+
+ val Applied: AppliedExtractor
+
+ trait AppliedExtractor {
+ def unapply(tree: Tree): Some[(Tree, List[List[Tree]])]
+ }
+
+ val SyntacticClassDef: SyntacticClassDefExtractor
+
+ trait SyntacticClassDefExtractor {
+ def apply(mods: Modifiers, name: TypeName, tparams: List[TypeDef],
+ constrMods: Modifiers, vparamss: List[List[ValDef]], parents: List[Tree],
+ selfdef: ValDef, body: List[Tree]): Tree
+ def unapply(tree: Tree): Option[(Modifiers, TypeName, List[TypeDef], Modifiers,
+ List[List[ValDef]], List[Tree], ValDef, List[Tree])]
+ }
+
+ val TupleN: TupleNExtractor
+ val TupleTypeN: TupleNExtractor
+
+ trait TupleNExtractor {
+ def apply(args: List[Tree]): Tree
+ def unapply(tree: Tree): Option[List[Tree]]
+ }
+
+ def RefTree(qual: Tree, sym: Symbol): Tree
}
}
diff --git a/src/reflect/scala/reflect/api/Liftable.scala b/src/reflect/scala/reflect/api/Liftable.scala
new file mode 100644
index 0000000000..8f6fe066dd
--- /dev/null
+++ b/src/reflect/scala/reflect/api/Liftable.scala
@@ -0,0 +1,32 @@
+package scala.reflect
+package api
+
+trait Liftable[T] {
+ def apply(universe: api.Universe, value: T): universe.Tree
+}
+
+object Liftable {
+ private class LiftableConstant[T] extends Liftable[T] {
+ def apply(universe: Universe, value: T): universe.Tree =
+ universe.Literal(universe.Constant(value))
+ }
+
+ implicit lazy val liftByte: Liftable[Byte] = new LiftableConstant[Byte]
+ implicit lazy val liftShort: Liftable[Short] = new LiftableConstant[Short]
+ implicit lazy val liftChar: Liftable[Char] = new LiftableConstant[Char]
+ implicit lazy val liftInt: Liftable[Int] = new LiftableConstant[Int]
+ implicit lazy val liftLong: Liftable[Long] = new LiftableConstant[Long]
+ implicit lazy val liftFloat: Liftable[Float] = new LiftableConstant[Float]
+ implicit lazy val liftDouble: Liftable[Double] = new LiftableConstant[Double]
+ implicit lazy val liftBoolean: Liftable[Boolean] = new LiftableConstant[Boolean]
+ implicit lazy val liftString: Liftable[String] = new LiftableConstant[String]
+ implicit lazy val liftUnit: Liftable[Unit] = new LiftableConstant[Unit]
+
+ implicit lazy val liftScalaSymbol: Liftable[scala.Symbol] = new Liftable[scala.Symbol] {
+ def apply(universe: Universe, value: scala.Symbol): universe.Tree = {
+ import universe._
+ val symbol = Select(Ident(TermName("scala")), TermName("Symbol"))
+ Apply(symbol, List(Literal(Constant(value.name))))
+ }
+ }
+}
diff --git a/src/reflect/scala/reflect/api/Quasiquotes.scala b/src/reflect/scala/reflect/api/Quasiquotes.scala
new file mode 100644
index 0000000000..3895b8b95f
--- /dev/null
+++ b/src/reflect/scala/reflect/api/Quasiquotes.scala
@@ -0,0 +1,20 @@
+package scala.reflect
+package api
+
+import language.experimental.macros
+
+trait Quasiquotes { self: Universe =>
+
+ // implementation is hardwired to `dispatch` method of `scala.tools.reflect.quasiquotes.Quasiquotes`
+ // using the mechanism implemented in `scala.tools.reflect.FastTrack`
+ implicit class Quasiquote(ctx: StringContext) {
+ protected trait api {
+ def apply(args: Any*): Any = macro ???
+ def unapply(subpatterns: Any*): Option[Any] = macro ???
+ }
+ object q extends api
+ object tq extends api
+ object cq extends api
+ object pq extends api
+ }
+}
diff --git a/src/reflect/scala/reflect/api/StandardLiftables.scala b/src/reflect/scala/reflect/api/StandardLiftables.scala
new file mode 100644
index 0000000000..ecea550225
--- /dev/null
+++ b/src/reflect/scala/reflect/api/StandardLiftables.scala
@@ -0,0 +1,36 @@
+package scala.reflect
+package api
+
+trait StandardLiftables { self: Universe =>
+
+ private def requireSameUniverse[T](universe: Universe, tp: String, value: T) =
+ require(universe eq self, s"Can't lift $tp ${showRaw(value)} from universe ${showRaw(universe)} using lift$tp defined for ${showRaw(self)}.")
+
+ implicit def liftExpr[T <: Expr[_]]: Liftable[T] = new Liftable[T] {
+ def apply(universe: Universe, value: T): universe.Tree = {
+ requireSameUniverse(universe, "Expr", value)
+ value.tree.asInstanceOf[universe.Tree]
+ }
+ }
+
+ implicit def liftType[T <: Type]: Liftable[T] = new Liftable[T] {
+ def apply(universe: Universe, value: T): universe.Tree = {
+ requireSameUniverse(universe, "Type", value)
+ universe.TypeTree(value.asInstanceOf[universe.Type])
+ }
+ }
+
+ implicit def liftTypeTag[T <: WeakTypeTag[_]]: Liftable[T] = new Liftable[T] {
+ def apply(universe: Universe, value: T): universe.Tree = {
+ requireSameUniverse(universe, "TypeTag", value)
+ universe.TypeTree(value.asInstanceOf[universe.WeakTypeTag[_]].tpe)
+ }
+ }
+
+ implicit def liftConstant[T <: Constant]: Liftable[T] = new Liftable[T] {
+ def apply(universe: Universe, value: T): universe.Tree = {
+ requireSameUniverse(universe, "Constant", value)
+ universe.Literal(value.asInstanceOf[universe.Constant])
+ }
+ }
+}
diff --git a/src/reflect/scala/reflect/api/Universe.scala b/src/reflect/scala/reflect/api/Universe.scala
index cb629f9c5a..77b4827eab 100644
--- a/src/reflect/scala/reflect/api/Universe.scala
+++ b/src/reflect/scala/reflect/api/Universe.scala
@@ -72,10 +72,12 @@ abstract class Universe extends Symbols
with ImplicitTags
with StandardDefinitions
with StandardNames
+ with StandardLiftables
with BuildUtils
with Mirrors
with Printers
with Importers
+ with Quasiquotes
{
/** Use `refiy` to produce the abstract syntax tree representing a given Scala expression.
*
diff --git a/src/reflect/scala/reflect/internal/AnnotationInfos.scala b/src/reflect/scala/reflect/internal/AnnotationInfos.scala
index 73ca74f08e..215ab6abd6 100644
--- a/src/reflect/scala/reflect/internal/AnnotationInfos.scala
+++ b/src/reflect/scala/reflect/internal/AnnotationInfos.scala
@@ -73,7 +73,7 @@ trait AnnotationInfos extends api.Annotations { self: SymbolTable =>
* - arrays of constants
* - or nested classfile annotations
*/
- abstract class ClassfileAnnotArg extends Product
+ sealed abstract class ClassfileAnnotArg extends Product
implicit val JavaArgumentTag = ClassTag[ClassfileAnnotArg](classOf[ClassfileAnnotArg])
case object UnmappableAnnotArg extends ClassfileAnnotArg
diff --git a/src/reflect/scala/reflect/internal/BuildUtils.scala b/src/reflect/scala/reflect/internal/BuildUtils.scala
index ece2d28be3..cdebfe52f8 100644
--- a/src/reflect/scala/reflect/internal/BuildUtils.scala
+++ b/src/reflect/scala/reflect/internal/BuildUtils.scala
@@ -2,7 +2,10 @@ package scala
package reflect
package internal
+import Flags._
+
trait BuildUtils { self: SymbolTable =>
+ import definitions.{TupleClass, MaxTupleArity, ScalaPackage, UnitClass}
class BuildImpl extends BuildApi {
@@ -52,6 +55,12 @@ trait BuildUtils { self: SymbolTable =>
def Ident(sym: Symbol): Ident = self.Ident(sym)
+ def Block(stats: List[Tree]): Block = stats match {
+ case Nil => self.Block(Nil, Literal(Constant(())))
+ case elem :: Nil => self.Block(Nil, elem)
+ case elems => self.Block(elems.init, elems.last)
+ }
+
def TypeTree(tp: Type): TypeTree = self.TypeTree(tp)
def thisPrefix(sym: Symbol): Type = sym.thisPrefix
@@ -59,6 +68,117 @@ trait BuildUtils { self: SymbolTable =>
def setType[T <: Tree](tree: T, tpe: Type): T = { tree.setType(tpe); tree }
def setSymbol[T <: Tree](tree: T, sym: Symbol): T = { tree.setSymbol(sym); tree }
+
+ def mkAnnotationCtor(tree: Tree, args: List[Tree]): Tree = tree match {
+ case ident: Ident => Apply(self.Select(New(ident), nme.CONSTRUCTOR: TermName), args)
+ case call @ Apply(Select(New(ident: Ident), nme.CONSTRUCTOR), _) =>
+ if (args.nonEmpty)
+ throw new IllegalArgumentException("Can't splice annotation that already contains args with extra args, consider merging these lists together")
+ call
+ case _ => throw new IllegalArgumentException(s"Tree ${showRaw(tree)} isn't a correct representation of annotation, consider passing Ident as a first argument")
+ }
+
+ object FlagsAsBits extends FlagsAsBitsExtractor {
+ def unapply(flags: Long): Some[Long] = Some(flags)
+ }
+
+ object TypeApplied extends TypeAppliedExtractor {
+ def unapply(tree: Tree): Some[(Tree, List[Tree])] = tree match {
+ case TypeApply(fun, targs) => Some((fun, targs))
+ case _ => Some((tree, Nil))
+ }
+ }
+
+ object Applied extends AppliedExtractor {
+ def unapply(tree: Tree): Some[(Tree, List[List[Tree]])] = {
+ val treeInfo.Applied(fun, targs, argss) = tree
+ targs match {
+ case Nil => Some((fun, argss))
+ case _ => Some((TypeApply(fun, targs), argss))
+ }
+ }
+ }
+
+ object SyntacticClassDef extends SyntacticClassDefExtractor {
+ def apply(mods: Modifiers, name: TypeName, tparams: List[TypeDef],
+ constrMods: Modifiers, vparamss: List[List[ValDef]], parents: List[Tree],
+ selfdef: ValDef, body: List[Tree]): Tree =
+ ClassDef(mods, name, tparams, gen.mkTemplate(parents, selfdef, constrMods, vparamss, body, NoPosition))
+
+ def unapply(tree: Tree): Option[(Modifiers, TypeName, List[TypeDef], Modifiers,
+ List[List[ValDef]], List[Tree], ValDef, List[Tree])] = tree match {
+ case ClassDef(mods, name, tparams, Template(parents, selfdef, tbody)) =>
+ // extract generated fieldDefs and constructor
+ val (defs, (ctor: DefDef) :: body) = tbody.splitAt(tbody.indexWhere {
+ case DefDef(_, nme.CONSTRUCTOR, _, _, _, _) => true
+ case _ => false
+ })
+ val (earlyDefs, fieldDefs) = defs.span(treeInfo.isEarlyDef)
+
+ // undo conversion from (implicit ... ) to ()(implicit ... ) when its the only parameter section
+ val vparamssRestoredImplicits = ctor.vparamss match {
+ case Nil :: rest if !rest.isEmpty && !rest.head.isEmpty && rest.head.head.mods.isImplicit => rest
+ case other => other
+ }
+
+ // undo flag modifications by mergeing flag info from constructor args and fieldDefs
+ val modsMap = fieldDefs.map { case ValDef(mods, name, _, _) => name -> mods }.toMap
+ val vparamss = mmap(vparamssRestoredImplicits) { vd =>
+ val originalMods = modsMap(vd.name) | (vd.mods.flags & DEFAULTPARAM)
+ atPos(vd.pos)(ValDef(originalMods, vd.name, vd.tpt, vd.rhs))
+ }
+
+ Some((mods, name, tparams, ctor.mods, vparamss, parents, selfdef, earlyDefs ::: body))
+ case _ =>
+ None
+ }
+ }
+
+ object TupleN extends TupleNExtractor {
+ def apply(args: List[Tree]): Tree = args match {
+ case Nil => Literal(Constant(()))
+ case _ =>
+ require(args.length <= MaxTupleArity, s"Tuples with arity bigger than $MaxTupleArity aren't supported")
+ self.Apply(TupleClass(args.length).companionModule, args: _*)
+ }
+
+ def unapply(tree: Tree): Option[List[Tree]] = tree match {
+ case Literal(Constant(())) =>
+ Some(Nil)
+ case Apply(id: Ident, args)
+ if args.length <= MaxTupleArity && id.symbol == TupleClass(args.length).companionModule =>
+ Some(args)
+ case Apply(Select(Ident(nme.scala_), TermName(tuple)), args)
+ if args.length <= MaxTupleArity && tuple == TupleClass(args.length).name =>
+ Some(args)
+ case _ =>
+ None
+ }
+ }
+
+ object TupleTypeN extends TupleNExtractor {
+ def apply(args: List[Tree]): Tree = args match {
+ case Nil => self.Select(self.Ident(nme.scala_), tpnme.Unit)
+ case _ =>
+ require(args.length <= MaxTupleArity, s"Tuples with arity bigger than $MaxTupleArity aren't supported")
+ AppliedTypeTree(Ident(TupleClass(args.length)), args)
+ }
+
+ def unapply(tree: Tree): Option[List[Tree]] = tree match {
+ case Select(Ident(nme.scala_), tpnme.Unit) =>
+ Some(Nil)
+ case AppliedTypeTree(id: Ident, args)
+ if args.length <= MaxTupleArity && id.symbol == TupleClass(args.length) =>
+ Some(args)
+ case AppliedTypeTree(Select(id @ Ident(nme.scala_), TermName(tuple)), args)
+ if args.length <= MaxTupleArity && id.symbol == ScalaPackage && tuple == TupleClass(args.length).name =>
+ Some(args)
+ case _ =>
+ None
+ }
+ }
+
+ def RefTree(qual: Tree, sym: Symbol) = self.RefTree(qual, sym.name) setSymbol sym
}
val build: BuildApi = new BuildImpl
diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala
index 3470b05495..6a9fa9a884 100644
--- a/src/reflect/scala/reflect/internal/Definitions.scala
+++ b/src/reflect/scala/reflect/internal/Definitions.scala
@@ -442,6 +442,7 @@ trait Definitions extends api.StandardDefinitions {
// collections classes
lazy val ConsClass = requiredClass[scala.collection.immutable.::[_]]
lazy val IteratorClass = requiredClass[scala.collection.Iterator[_]]
+ lazy val IterableClass = requiredClass[scala.collection.Iterable[_]]
lazy val ListClass = requiredClass[scala.collection.immutable.List[_]]
lazy val SeqClass = requiredClass[scala.collection.Seq[_]]
lazy val StringBuilderClass = requiredClass[scala.collection.mutable.StringBuilder]
@@ -491,10 +492,8 @@ trait Definitions extends api.StandardDefinitions {
lazy val TreesClass = getClassIfDefined("scala.reflect.api.Trees") // defined in scala-reflect.jar, so we need to be careful
lazy val TreesTreeType = TreesClass.map(sym => getTypeMember(sym, tpnme.Tree))
- object TreeType {
- def unapply(tpe: Type): Boolean = unapply(tpe.typeSymbol)
- def unapply(sym: Symbol): Boolean = sym.overrideChain contains TreesTreeType
- }
+ object TreeType { def unapply(tpe: Type): Boolean = tpe.typeSymbol.overrideChain contains TreesTreeType }
+ object SubtreeType { def unapply(tpe: Type): Boolean = tpe.typeSymbol.overrideChain exists (_.tpe <:< TreesTreeType.tpe) }
lazy val ExprsClass = getClassIfDefined("scala.reflect.api.Exprs") // defined in scala-reflect.jar, so we need to be careful
lazy val ExprClass = ExprsClass.map(sym => getMemberClass(sym, tpnme.Expr))
@@ -520,6 +519,7 @@ trait Definitions extends api.StandardDefinitions {
lazy val TypeCreatorClass = getClassIfDefined("scala.reflect.api.TypeCreator") // defined in scala-reflect.jar, so we need to be careful
lazy val TreeCreatorClass = getClassIfDefined("scala.reflect.api.TreeCreator") // defined in scala-reflect.jar, so we need to be careful
+ lazy val LiftableClass = getClassIfDefined("scala.reflect.api.Liftable") // defined in scala-reflect.jar, so we need to be careful
lazy val MacroClass = getClassIfDefined("scala.reflect.macros.Macro") // defined in scala-reflect.jar, so we need to be careful
lazy val MacroContextClass = getClassIfDefined("scala.reflect.macros.Context") // defined in scala-reflect.jar, so we need to be careful
@@ -534,6 +534,11 @@ trait Definitions extends api.StandardDefinitions {
lazy val StringContextClass = requiredClass[scala.StringContext]
def StringContext_f = getMemberMethod(StringContextClass, nme.f)
+ lazy val QuasiquoteClass = if (ApiUniverseClass != NoSymbol) getMember(ApiUniverseClass, tpnme.Quasiquote) else NoSymbol
+ lazy val QuasiquoteClass_api = if (QuasiquoteClass != NoSymbol) getMember(QuasiquoteClass, tpnme.api) else NoSymbol
+ lazy val QuasiquoteClass_api_apply = if (QuasiquoteClass_api != NoSymbol) getMember(QuasiquoteClass_api, nme.apply) else NoSymbol
+ lazy val QuasiquoteClass_api_unapply = if (QuasiquoteClass_api != NoSymbol) getMember(QuasiquoteClass_api, nme.unapply) else NoSymbol
+
lazy val ScalaSignatureAnnotation = requiredClass[scala.reflect.ScalaSignature]
lazy val ScalaLongSignatureAnnotation = requiredClass[scala.reflect.ScalaLongSignature]
@@ -645,6 +650,10 @@ trait Definitions extends api.StandardDefinitions {
isNonTrivial && isMacroCompatible
}
+ def isLiftableType(tp: Type) = tp <:< classExistentialType(LiftableClass)
+
+ def isIterableType(tp: Type) = tp <:< classExistentialType(IterableClass)
+
lazy val ProductRootClass: ClassSymbol = requiredClass[scala.Product]
def Product_productArity = getMemberMethod(ProductRootClass, nme.productArity)
def Product_productElement = getMemberMethod(ProductRootClass, nme.productElement)
@@ -870,6 +879,12 @@ trait Definitions extends api.StandardDefinitions {
lazy val BoxedNumberClass = getClassByName(sn.BoxedNumber)
lazy val BoxedCharacterClass = getClassByName(sn.BoxedCharacter)
lazy val BoxedBooleanClass = getClassByName(sn.BoxedBoolean)
+ lazy val BoxedByteClass = requiredClass[java.lang.Byte]
+ lazy val BoxedShortClass = requiredClass[java.lang.Short]
+ lazy val BoxedIntClass = requiredClass[java.lang.Integer]
+ lazy val BoxedLongClass = requiredClass[java.lang.Long]
+ lazy val BoxedFloatClass = requiredClass[java.lang.Float]
+ lazy val BoxedDoubleClass = requiredClass[java.lang.Double]
lazy val Boxes_isNumberOrBool = getDecl(BoxesRunTimeClass, nme.isBoxedNumberOrBoolean)
lazy val Boxes_isNumber = getDecl(BoxesRunTimeClass, nme.isBoxedNumber)
@@ -1179,5 +1194,28 @@ trait Definitions extends api.StandardDefinitions {
val _ = symbolsNotPresentInBytecode
isInitialized = true
} //init
+
+ class UniverseDependentTypes(universe: Tree) {
+ lazy val universeType = universe.tpe
+ lazy val universeSym = universe.symbol
+ lazy val nameType = universeMemberType(tpnme.Name)
+ lazy val termNameType = universeMemberType(tpnme.TypeName)
+ lazy val typeNameType = universeMemberType(tpnme.TermName)
+ lazy val modsType = universeMemberType(tpnme.Modifiers)
+ lazy val flagsType = universeMemberType(tpnme.FlagSet)
+ lazy val symbolType = universeMemberType(tpnme.Symbol)
+ lazy val treeType0 = universeMemberType(tpnme.Tree)
+ lazy val treeType = universeMemberType(tpnme.Tree)
+ lazy val typeDefType = universeMemberType(tpnme.TypeDef)
+ lazy val caseDefType = universeMemberType(tpnme.CaseDef)
+ lazy val iterableTreeType = appliedType(IterableClass, treeType)
+ lazy val iterableCaseDefType = appliedType(IterableClass, caseDefType)
+ lazy val iterableIterableTreeType = appliedType(IterableClass, iterableTreeType)
+ lazy val listTreeType = appliedType(ListClass, treeType)
+ lazy val listListTreeType = appliedType(ListClass, listTreeType)
+ lazy val optionTreeType = appliedType(OptionClass, treeType)
+ lazy val optionNameType = appliedType(OptionClass, nameType)
+ def universeMemberType(name: TypeName) = universe.tpe.memberType(getTypeMember(universe.symbol, name))
+ }
}
}
diff --git a/src/reflect/scala/reflect/internal/Names.scala b/src/reflect/scala/reflect/internal/Names.scala
index ed5b92565d..0d78e24548 100644
--- a/src/reflect/scala/reflect/internal/Names.scala
+++ b/src/reflect/scala/reflect/internal/Names.scala
@@ -121,6 +121,41 @@ trait Names extends api.Names {
def newTypeName(bs: Array[Byte], offset: Int, len: Int): TypeName =
newTermName(bs, offset, len).toTypeName
+ /**
+ * Used only by the GenBCode backend, to represent bytecode-level types in a way that makes equals() and hashCode() efficient.
+ * For bytecode-level types of OBJECT sort, its internal name (not its descriptor) is stored.
+ * For those of ARRAY sort, its descriptor is stored ie has a leading '['
+ * For those of METHOD sort, its descriptor is stored ie has a leading '('
+ *
+ * can-multi-thread
+ */
+ final def lookupTypeName(cs: Array[Char]): TypeName = { lookupTypeNameIfExisting(cs, true) }
+
+ final def lookupTypeNameIfExisting(cs: Array[Char], failOnNotFound: Boolean): TypeName = {
+
+ val hterm = hashValue(cs, 0, cs.size) & HASH_MASK
+ var nterm = termHashtable(hterm)
+ while ((nterm ne null) && (nterm.length != cs.size || !equals(nterm.start, cs, 0, cs.size))) {
+ nterm = nterm.next
+ }
+ if (nterm eq null) {
+ if (failOnNotFound) { assert(false, "TermName not yet created: " + new String(cs)) }
+ return null
+ }
+
+ val htype = hashValue(chrs, nterm.start, nterm.length) & HASH_MASK
+ var ntype = typeHashtable(htype)
+ while ((ntype ne null) && ntype.start != nterm.start) {
+ ntype = ntype.next
+ }
+ if (ntype eq null) {
+ if (failOnNotFound) { assert(false, "TypeName not yet created: " + new String(cs)) }
+ return null
+ }
+
+ ntype
+ }
+
// Classes ----------------------------------------------------------------------
/** The name class.
diff --git a/src/reflect/scala/reflect/internal/StdNames.scala b/src/reflect/scala/reflect/internal/StdNames.scala
index 30aaaa1f3a..64713b8d41 100644
--- a/src/reflect/scala/reflect/internal/StdNames.scala
+++ b/src/reflect/scala/reflect/internal/StdNames.scala
@@ -216,23 +216,39 @@ trait StdNames {
final val Any: NameType = "Any"
final val AnyVal: NameType = "AnyVal"
+ final val FlagSet: NameType = "FlagSet"
final val Mirror: NameType = "Mirror"
+ final val Modifiers: NameType = "Modifiers"
final val Nothing: NameType = "Nothing"
final val Null: NameType = "Null"
final val Object: NameType = "Object"
+ final val Option: NameType = "Option"
final val PrefixType: NameType = "PrefixType"
final val Product: NameType = "Product"
final val Serializable: NameType = "Serializable"
final val Singleton: NameType = "Singleton"
final val Throwable: NameType = "Throwable"
+ final val api: NameType = "api"
final val Annotation: NameType = "Annotation"
+ final val CaseDef: NameType = "CaseDef"
final val ClassfileAnnotation: NameType = "ClassfileAnnotation"
final val ClassManifest: NameType = "ClassManifest"
final val Enum: NameType = "Enum"
final val Group: NameType = "Group"
+ final val Name: NameType = "Name"
final val Tree: NameType = "Tree"
+ final val TermName: NameType = "TermName"
final val Type : NameType = "Type"
+ final val TypeName: NameType = "TypeName"
+ final val TypeDef: NameType = "TypeDef"
+ final val Tuple: NameType = "Tuple"
+ final val Universe: NameType = "Universe"
+ final val Quasiquote: NameType = "Quasiquote"
+
+ // quasiquote-specific names
+ final val QUASIQUOTE_MODS: NameType = "$quasiquote$mods$"
+ final val QUASIQUOTE_TUPLE: NameType = "$quasiquote$tuple$"
// Annotation simple names, used in Namer
final val BeanPropertyAnnot: NameType = "BeanProperty"
@@ -304,6 +320,10 @@ trait StdNames {
val REIFY_FREE_THIS_SUFFIX: NameType = "$this"
val REIFY_FREE_VALUE_SUFFIX: NameType = "$value"
val REIFY_SYMDEF_PREFIX: NameType = "symdef$"
+ val QUASIQUOTE_PREFIX: String = "qq$"
+ val QUASIQUOTE_FILE: String = "<quasiquote>"
+ val QUASIQUOTE_TUPLE: NameType = "$quasiquote$tuple$"
+ val QUASIQUOTE_CASE: NameType = "$quasiquote$case$"
val MIXIN_CONSTRUCTOR: NameType = "$init$"
val MODULE_INSTANCE_FIELD: NameType = NameTransformer.MODULE_INSTANCE_NAME // "MODULE$"
val OUTER: NameType = "$outer"
@@ -539,17 +559,23 @@ trait StdNames {
val Annotation: NameType = "Annotation"
val Any: NameType = "Any"
val AnyVal: NameType = "AnyVal"
+ val Apply: NameType = "Apply"
+ val Applied: NameType = "Applied"
val ArrayAnnotArg: NameType = "ArrayAnnotArg"
+ val Block: NameType = "Block"
val ConstantType: NameType = "ConstantType"
val EmptyPackage: NameType = "EmptyPackage"
val EmptyPackageClass: NameType = "EmptyPackageClass"
+ val False : NameType = "False"
val Flag : NameType = "Flag"
+ val FlagsAsBits: NameType = "FlagsAsBits"
val Ident: NameType = "Ident"
val Import: NameType = "Import"
val Literal: NameType = "Literal"
val LiteralAnnotArg: NameType = "LiteralAnnotArg"
val Modifiers: NameType = "Modifiers"
val NestedAnnotArg: NameType = "NestedAnnotArg"
+ val New: NameType = "New"
val NoFlags: NameType = "NoFlags"
val NoSymbol: NameType = "NoSymbol"
val Nothing: NameType = "Nothing"
@@ -560,10 +586,15 @@ trait StdNames {
val Select: NameType = "Select"
val SelectFromTypeTree: NameType = "SelectFromTypeTree"
val StringContext: NameType = "StringContext"
+ val SyntacticClassDef: NameType = "SyntacticClassDef"
val This: NameType = "This"
val ThisType: NameType = "ThisType"
+ val True : NameType = "True"
val Tuple2: NameType = "Tuple2"
+ val TupleN: NameType = "TupleN"
+ val TupleTypeN: NameType = "TupleTypeN"
val TYPE_ : NameType = "TYPE"
+ val TypeApplied: NameType = "TypeApplied"
val TypeRef: NameType = "TypeRef"
val TypeTree: NameType = "TypeTree"
val UNIT : NameType = "UNIT"
@@ -593,6 +624,7 @@ trait StdNames {
val checkInitialized: NameType = "checkInitialized"
val classOf: NameType = "classOf"
val clone_ : NameType = "clone"
+ val collection: NameType = "collection"
val conforms: NameType = "conforms"
val copy: NameType = "copy"
val create: NameType = "create"
@@ -619,10 +651,13 @@ trait StdNames {
val find_ : NameType = "find"
val flagsFromBits : NameType = "flagsFromBits"
val flatMap: NameType = "flatMap"
+ val flatten: NameType = "flatten"
+ val foldLeft: NameType = "foldLeft"
val foreach: NameType = "foreach"
val get: NameType = "get"
val hashCode_ : NameType = "hashCode"
val hash_ : NameType = "hash"
+ val immutable: NameType = "immutable"
val implicitly: NameType = "implicitly"
val in: NameType = "in"
val inlinedEquals: NameType = "inlinedEquals"
@@ -644,12 +679,15 @@ trait StdNames {
val materializeWeakTypeTag: NameType = "materializeWeakTypeTag"
val materializeTypeTag: NameType = "materializeTypeTag"
val moduleClass : NameType = "moduleClass"
+ val mkAnnotationCtor: NameType = "mkAnnotationCtor"
val ne: NameType = "ne"
val newArray: NameType = "newArray"
val newFreeTerm: NameType = "newFreeTerm"
val newFreeType: NameType = "newFreeType"
val newNestedSymbol: NameType = "newNestedSymbol"
val newScopeWith: NameType = "newScopeWith"
+ val nmeCONSTRUCTOR: NameType = "CONSTRUCTOR"
+ val nmeNme: NameType = "nme"
val notifyAll_ : NameType = "notifyAll"
val notify_ : NameType = "notify"
val null_ : NameType = "null"
@@ -665,6 +703,7 @@ trait StdNames {
val runtime: NameType = "runtime"
val runtimeClass: NameType = "runtimeClass"
val runtimeMirror: NameType = "runtimeMirror"
+ val RefTree: NameType = "RefTree"
val scala_ : NameType = "scala"
val selectDynamic: NameType = "selectDynamic"
val selectOverloadedMethod: NameType = "selectOverloadedMethod"
@@ -684,6 +723,7 @@ trait StdNames {
val this_ : NameType = "this"
val thisPrefix : NameType = "thisPrefix"
val toArray: NameType = "toArray"
+ val toList: NameType = "toList"
val toObjectArray : NameType = "toObjectArray"
val TopScope: NameType = "TopScope"
val toString_ : NameType = "toString"
@@ -694,6 +734,7 @@ trait StdNames {
val typedProductIterator: NameType = "typedProductIterator"
val TypeName: NameType = "TypeName"
val typeTagToManifest: NameType = "typeTagToManifest"
+
val unapply: NameType = "unapply"
val unapplySeq: NameType = "unapplySeq"
val unbox: NameType = "unbox"
@@ -708,6 +749,12 @@ trait StdNames {
val withFilter: NameType = "withFilter"
val zero: NameType = "zero"
+ // quasiquote interpolators:
+ val q: NameType = "q"
+ val tq: NameType = "tq"
+ val cq: NameType = "cq"
+ val pq: NameType = "pq"
+
// unencoded operators
object raw {
final val BANG : NameType = "!"
@@ -744,6 +791,7 @@ trait StdNames {
val ADD = encode("+")
val AND = encode("&")
val ASR = encode(">>")
+ val CONS = encode("::")
val DIV = encode("/")
val EQ = encode("==")
val EQL = encode("=")
@@ -760,6 +808,7 @@ trait StdNames {
val NE = encode("!=")
val OR = encode("|")
val PLUS = ADD // technically redundant, but ADD looks funny with MINUS
+ val PLUSPLUS = encode("++")
val SUB = MINUS // ... as does SUB with PLUS
val XOR = encode("^")
val ZAND = encode("&&")
diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala
index c3596fe62e..424296c212 100644
--- a/src/reflect/scala/reflect/internal/Symbols.scala
+++ b/src/reflect/scala/reflect/internal/Symbols.scala
@@ -3433,7 +3433,7 @@ trait Symbols extends api.Symbols { self: SymbolTable =>
}
/** A class for type histories */
- private sealed case class TypeHistory(var validFrom: Period, info: Type, prev: TypeHistory) {
+ private case class TypeHistory(var validFrom: Period, info: Type, prev: TypeHistory) {
assert((prev eq null) || phaseId(validFrom) > phaseId(prev.validFrom), this)
assert(validFrom != NoPeriod, this)
diff --git a/src/reflect/scala/reflect/internal/TreeGen.scala b/src/reflect/scala/reflect/internal/TreeGen.scala
index b75fd72526..1af8c225f5 100644
--- a/src/reflect/scala/reflect/internal/TreeGen.scala
+++ b/src/reflect/scala/reflect/internal/TreeGen.scala
@@ -2,6 +2,8 @@ package scala
package reflect
package internal
+import Flags._
+
abstract class TreeGen extends macros.TreeBuilder {
val global: SymbolTable
@@ -302,4 +304,78 @@ abstract class TreeGen extends macros.TreeBuilder {
val factory = Select(gen.mkAttributedRef(SeqModule), nme.apply)
Apply(factory, List(arg))
}
+
+ def mkSuperInitCall: Select = Select(Super(This(tpnme.EMPTY), tpnme.EMPTY), nme.CONSTRUCTOR)
+
+ /** Generates a template with constructor corresponding to
+ *
+ * constrmods (vparams1_) ... (vparams_n) preSuper { presupers }
+ * extends superclass(args_1) ... (args_n) with mixins { self => body }
+ *
+ * This gets translated to
+ *
+ * extends superclass with mixins { self =>
+ * presupers' // presupers without rhs
+ * vparamss // abstract fields corresponding to value parameters
+ * def <init>(vparamss) {
+ * presupers
+ * super.<init>(args)
+ * }
+ * body
+ * }
+ */
+ def mkTemplate(parents: List[Tree], self: ValDef, constrMods: Modifiers, vparamss: List[List[ValDef]], body: List[Tree], superPos: Position): Template = {
+ /* Add constructor to template */
+
+ // create parameters for <init> as synthetic trees.
+ var vparamss1 = mmap(vparamss) { vd =>
+ atPos(vd.pos.focus) {
+ val mods = Modifiers(vd.mods.flags & (IMPLICIT | DEFAULTPARAM | BYNAMEPARAM) | PARAM | PARAMACCESSOR)
+ ValDef(mods withAnnotations vd.mods.annotations, vd.name, vd.tpt.duplicate, vd.rhs.duplicate)
+ }
+ }
+ val (edefs, rest) = body span treeInfo.isEarlyDef
+ val (evdefs, etdefs) = edefs partition treeInfo.isEarlyValDef
+ val gvdefs = evdefs map {
+ case vdef @ ValDef(_, _, tpt, _) =>
+ copyValDef(vdef)(
+ // atPos for the new tpt is necessary, since the original tpt might have no position
+ // (when missing type annotation for ValDef for example), so even though setOriginal modifies the
+ // position of TypeTree, it would still be NoPosition. That's what the author meant.
+ tpt = atPos(vdef.pos.focus)(TypeTree() setOriginal tpt setPos tpt.pos.focus),
+ rhs = EmptyTree
+ )
+ }
+ val lvdefs = evdefs collect { case vdef: ValDef => copyValDef(vdef)(mods = vdef.mods | PRESUPER) }
+
+ val constrs = {
+ if (constrMods hasFlag TRAIT) {
+ if (body forall treeInfo.isInterfaceMember) List()
+ else List(
+ atPos(wrappingPos(superPos, lvdefs)) (
+ DefDef(NoMods, nme.MIXIN_CONSTRUCTOR, List(), List(Nil), TypeTree(), Block(lvdefs, Literal(Constant())))))
+ } else {
+ // convert (implicit ... ) to ()(implicit ... ) if its the only parameter section
+ if (vparamss1.isEmpty || !vparamss1.head.isEmpty && vparamss1.head.head.mods.isImplicit)
+ vparamss1 = List() :: vparamss1
+ val superRef: Tree = atPos(superPos)(mkSuperInitCall)
+ val superCall = pendingSuperCall // we can't know in advance which of the parents will end up as a superclass
+ // this requires knowing which of the parents is a type macro and which is not
+ // and that's something that cannot be found out before typer
+ // (the type macros aren't in the trunk yet, but there is a plan for them to land there soon)
+ // this means that we don't know what will be the arguments of the super call
+ // therefore here we emit a dummy which gets populated when the template is named and typechecked
+ List(
+ // TODO: previously this was `wrappingPos(superPos, lvdefs ::: argss.flatten)`
+ // is it going to be a problem that we can no longer include the `argss`?
+ atPos(wrappingPos(superPos, lvdefs)) (
+ DefDef(constrMods, nme.CONSTRUCTOR, List(), vparamss1, TypeTree(), Block(lvdefs ::: List(superCall), Literal(Constant())))))
+ }
+ }
+ constrs foreach (ensureNonOverlapping(_, parents ::: gvdefs, focus=false))
+ // Field definitions for the class - remove defaults.
+ val fieldDefs = vparamss.flatten map (vd => copyValDef(vd)(mods = vd.mods &~ DEFAULTPARAM, rhs = EmptyTree))
+
+ global.Template(parents, self, gvdefs ::: fieldDefs ::: constrs ::: etdefs ::: rest)
+ }
}
diff --git a/src/reflect/scala/reflect/internal/util/Statistics.scala b/src/reflect/scala/reflect/internal/util/Statistics.scala
index b583137059..905f1bf26e 100644
--- a/src/reflect/scala/reflect/internal/util/Statistics.scala
+++ b/src/reflect/scala/reflect/internal/util/Statistics.scala
@@ -133,6 +133,12 @@ quant)
if (this.value < that.value) -1
else if (this.value > that.value) 1
else 0
+ override def equals(that: Any): Boolean =
+ that match {
+ case that: Counter => (this compare that) == 0
+ case _ => false
+ }
+ override def hashCode = value
override def toString = value.toString
}
@@ -184,6 +190,12 @@ quant)
if (this.specificNanos < that.specificNanos) -1
else if (this.specificNanos > that.specificNanos) 1
else 0
+ override def equals(that: Any): Boolean =
+ that match {
+ case that: StackableTimer => (this compare that) == 0
+ case _ => false
+ }
+ override def hashCode = specificNanos.##
override def toString = s"${super.toString} aggregate, ${show(specificNanos)} specific"
}
diff --git a/src/repl/scala/tools/nsc/interpreter/ConsoleReaderHelper.scala b/src/repl/scala/tools/nsc/interpreter/ConsoleReaderHelper.scala
index 48af261937..cf03ecb480 100644
--- a/src/repl/scala/tools/nsc/interpreter/ConsoleReaderHelper.scala
+++ b/src/repl/scala/tools/nsc/interpreter/ConsoleReaderHelper.scala
@@ -6,7 +6,7 @@
package scala.tools.nsc
package interpreter
-import scala.tools.jline.console.{ ConsoleReader, CursorBuffer }
+import jline.console.{ ConsoleReader, CursorBuffer }
trait ConsoleReaderHelper extends ConsoleReader {
def terminal = getTerminal()
diff --git a/src/repl/scala/tools/nsc/interpreter/Delimited.scala b/src/repl/scala/tools/nsc/interpreter/Delimited.scala
index e88a044931..b7f06f1d0a 100644
--- a/src/repl/scala/tools/nsc/interpreter/Delimited.scala
+++ b/src/repl/scala/tools/nsc/interpreter/Delimited.scala
@@ -6,7 +6,7 @@
package scala.tools.nsc
package interpreter
-import scala.tools.jline.console.completer.ArgumentCompleter.{ ArgumentDelimiter, ArgumentList }
+import jline.console.completer.ArgumentCompleter.{ ArgumentDelimiter, ArgumentList }
class JLineDelimiter extends ArgumentDelimiter {
def toJLine(args: List[String], cursor: Int) = args match {
diff --git a/src/repl/scala/tools/nsc/interpreter/ILoop.scala b/src/repl/scala/tools/nsc/interpreter/ILoop.scala
index 392aac7c2e..ccc9621fad 100644
--- a/src/repl/scala/tools/nsc/interpreter/ILoop.scala
+++ b/src/repl/scala/tools/nsc/interpreter/ILoop.scala
@@ -7,22 +7,22 @@ package scala
package tools.nsc
package interpreter
-import Predef.{ println => _, _ }
-import java.io.{ BufferedReader, FileReader }
-import session._
+import scala.language.{ implicitConversions, existentials }
import scala.annotation.tailrec
+import Predef.{ println => _, _ }
+import interpreter.session._
+import StdReplTags._
import scala.util.Properties.{ jdkHome, javaVersion, versionString, javaVmName }
-import util.{ ClassPath, Exceptional, stringFromWriter, stringFromStream }
-import io.{ File, Directory }
-import util.ScalaClassLoader
+import scala.tools.nsc.util.{ ClassPath, Exceptional, stringFromWriter, stringFromStream }
+import scala.reflect.classTag
+import scala.reflect.internal.util.{ BatchSourceFile, ScalaClassLoader }
import ScalaClassLoader._
+import scala.reflect.io.{ File, Directory }
import scala.tools.util._
-import scala.language.{implicitConversions, existentials}
-import scala.reflect.classTag
-import StdReplTags._
+import scala.collection.generic.Clearable
import scala.concurrent.{ ExecutionContext, Await, Future, future }
import ExecutionContext.Implicits._
-import scala.reflect.internal.util.BatchSourceFile
+import java.io.{ BufferedReader, FileReader }
/** The Scala interactive shell. It provides a read-eval-print loop
* around the Interpreter class.
@@ -208,12 +208,14 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
/** Standard commands **/
lazy val standardCommands = List(
cmd("cp", "<path>", "add a jar or directory to the classpath", addClasspath),
+ cmd("edit", "<id>|<line>", "edit history", editCommand),
cmd("help", "[command]", "print this summary or command-specific help", helpCommand),
historyCommand,
cmd("h?", "<string>", "search the history", searchHistory),
cmd("imports", "[name name ...]", "show import history, identifying sources of names", importsCommand),
cmd("implicits", "[-v]", "show the implicits in scope", intp.implicitsCommand),
cmd("javap", "<path|class>", "disassemble a file or class name", javapCommand),
+ cmd("line", "<id>|<line>", "place line(s) at the end of history", lineCommand),
cmd("load", "<path>", "load and interpret a Scala file", loadCommand),
nullary("paste", "enter paste mode: all input up to ctrl-D compiled together", pasteCommand),
nullary("power", "enable power user mode", powerCmd),
@@ -222,6 +224,7 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
nullary("reset", "reset the repl to its initial state, forgetting all session entries", resetCommand),
cmd("save", "<path>", "save replayable session to a file", saveCommand),
shCommand,
+ cmd("settings", "[+|-]<options>", "+enable/-disable flags, set compiler options", changeSettings),
nullary("silent", "disable/enable automatic printing of results", verbosity),
cmd("type", "[-v] <expr>", "display the type of an expression without evaluating it", typeCommand),
cmd("kind", "[-v] <expr>", "display the kind of expression's type", kindCommand),
@@ -301,6 +304,49 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
intp.lastWarnings foreach { case (pos, msg) => intp.reporter.warning(pos, msg) }
}
+ private def changeSettings(args: String): Result = {
+ def showSettings() = {
+ for (s <- settings.userSetSettings.toSeq.sorted) echo(s.toString)
+ }
+ def updateSettings() = {
+ // put aside +flag options
+ val (pluses, rest) = (args split "\\s+").toList partition (_.startsWith("+"))
+ val tmps = new Settings
+ val (ok, leftover) = tmps.processArguments(rest, processAll = true)
+ if (!ok) echo("Bad settings request.")
+ else if (leftover.nonEmpty) echo("Unprocessed settings.")
+ else {
+ // boolean flags set-by-user on tmp copy should be off, not on
+ val offs = tmps.userSetSettings filter (_.isInstanceOf[Settings#BooleanSetting])
+ val (minuses, nonbools) = rest partition (arg => offs exists (_ respondsTo arg))
+ // update non-flags
+ settings.processArguments(nonbools, processAll = true)
+ // also snag multi-value options for clearing, e.g. -Ylog: and -language:
+ for {
+ s <- settings.userSetSettings
+ if s.isInstanceOf[Settings#MultiStringSetting] || s.isInstanceOf[Settings#PhasesSetting]
+ if nonbools exists (arg => arg.head == '-' && arg.last == ':' && (s respondsTo arg.init))
+ } s match {
+ case c: Clearable => c.clear()
+ case _ =>
+ }
+ def update(bs: Seq[String], name: String=>String, setter: Settings#Setting=>Unit) = {
+ for (b <- bs)
+ settings.lookupSetting(name(b)) match {
+ case Some(s) =>
+ if (s.isInstanceOf[Settings#BooleanSetting]) setter(s)
+ else echo(s"Not a boolean flag: $b")
+ case _ =>
+ echo(s"Not an option: $b")
+ }
+ }
+ update(minuses, identity, _.tryToSetFromPropertyValue("false")) // turn off
+ update(pluses, "-" + _.drop(1), _.tryToSet(Nil)) // turn on
+ }
+ }
+ if (args.isEmpty) showSettings() else updateSettings()
+ }
+
private def javapCommand(line: String): Result = {
if (javap == null)
":javap unavailable, no tools.jar at %s. Set JDK_HOME.".format(jdkHome)
@@ -443,6 +489,90 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter)
unleashAndSetPhase()
}
+ def lineCommand(what: String): Result = editCommand(what, None)
+
+ // :edit id or :edit line
+ def editCommand(what: String): Result = editCommand(what, Properties.envOrNone("EDITOR"))
+
+ def editCommand(what: String, editor: Option[String]): Result = {
+ def diagnose(code: String) = {
+ echo("The edited code is incomplete!\n")
+ val errless = intp compileSources new BatchSourceFile("<pastie>", s"object pastel {\n$code\n}")
+ if (errless) echo("The compiler reports no errors.")
+ }
+ def historicize(text: String) = history match {
+ case jlh: JLineHistory => text.lines foreach jlh.add ; jlh.moveToEnd() ; true
+ case _ => false
+ }
+ def edit(text: String): Result = editor match {
+ case Some(ed) =>
+ val tmp = File.makeTemp()
+ tmp.writeAll(text)
+ try {
+ val pr = new ProcessResult(s"$ed ${tmp.path}")
+ pr.exitCode match {
+ case 0 =>
+ tmp.safeSlurp() match {
+ case Some(edited) if edited.trim.isEmpty => echo("Edited text is empty.")
+ case Some(edited) =>
+ echo(edited.lines map ("+" + _) mkString "\n")
+ val res = intp interpret edited
+ if (res == IR.Incomplete) diagnose(edited)
+ else {
+ historicize(edited)
+ Result(lineToRecord = Some(edited), keepRunning = true)
+ }
+ case None => echo("Can't read edited text. Did you delete it?")
+ }
+ case x => echo(s"Error exit from $ed ($x), ignoring")
+ }
+ } finally {
+ tmp.delete()
+ }
+ case None =>
+ if (historicize(text)) echo("Placing text in recent history.")
+ else echo(f"No EDITOR defined and you can't change history, echoing your text:%n$text")
+ }
+
+ // if what is a number, use it as a line number or range in history
+ def isNum = what forall (c => c.isDigit || c == '-' || c == '+')
+ // except that "-" means last value
+ def isLast = (what == "-")
+ if (isLast || !isNum) {
+ val name = if (isLast) intp.mostRecentVar else what
+ val sym = intp.symbolOfIdent(name)
+ intp.prevRequestList collectFirst { case r if r.defines contains sym => r } match {
+ case Some(req) => edit(req.line)
+ case None => echo(s"No symbol in scope: $what")
+ }
+ } else try {
+ val s = what
+ // line 123, 120+3, -3, 120-123, 120-, note -3 is not 0-3 but (cur-3,cur)
+ val (start, len) =
+ if ((s indexOf '+') > 0) {
+ val (a,b) = s splitAt (s indexOf '+')
+ (a.toInt, b.drop(1).toInt)
+ } else {
+ (s indexOf '-') match {
+ case -1 => (s.toInt, 1)
+ case 0 => val n = s.drop(1).toInt ; (history.index - n, n)
+ case _ if s.last == '-' => val n = s.init.toInt ; (n, history.index - n)
+ case i => val n = s.take(i).toInt ; (n, s.drop(i+1).toInt - n)
+ }
+ }
+ import scala.collection.JavaConverters._
+ val index = (start - 1) max 0
+ val text = history match {
+ case jlh: JLineHistory => jlh.entries(index).asScala.take(len) map (_.value) mkString "\n"
+ case _ => history.asStrings.slice(index, index + len) mkString "\n"
+ }
+ edit(text)
+ } catch {
+ case _: NumberFormatException => echo(s"Bad range '$what'")
+ echo("Use line 123, 120+3, -3, 120-123, 120-, note -3 is not 0-3 but (cur-3,cur)")
+ }
+ }
+
/** fork a shell and run a command */
lazy val shCommand = new LoopCommand("sh", "run a shell command (result is implicitly => List[String])") {
override def usage = "<command line>"
diff --git a/src/repl/scala/tools/nsc/interpreter/JLineReader.scala b/src/repl/scala/tools/nsc/interpreter/JLineReader.scala
index 5d41f1bbb4..d8a876feb2 100644
--- a/src/repl/scala/tools/nsc/interpreter/JLineReader.scala
+++ b/src/repl/scala/tools/nsc/interpreter/JLineReader.scala
@@ -6,8 +6,8 @@
package scala.tools.nsc
package interpreter
-import scala.tools.jline.console.ConsoleReader
-import scala.tools.jline.console.completer._
+import jline.console.ConsoleReader
+import jline.console.completer._
import session._
import Completion._
@@ -42,7 +42,7 @@ class JLineReader(_completion: => Completion) extends InteractiveReader {
def readOneKey(prompt: String) = {
this.print(prompt)
this.flush()
- this.readVirtualKey()
+ this.readCharacter()
}
def eraseLine() = consoleReader.resetPromptLine("", "", 0)
def redrawLineAndFlush(): Unit = { flush() ; drawLine() ; flush() }
diff --git a/src/repl/scala/tools/nsc/interpreter/session/SimpleHistory.scala b/src/repl/scala/tools/nsc/interpreter/session/SimpleHistory.scala
index 89998e438a..7c49b91296 100644
--- a/src/repl/scala/tools/nsc/interpreter/session/SimpleHistory.scala
+++ b/src/repl/scala/tools/nsc/interpreter/session/SimpleHistory.scala
@@ -46,6 +46,11 @@ class SimpleHistory extends JLineHistory {
def entries(): JListIterator[JEntry] = toEntries().asJava.listIterator()
def iterator: JIterator[JEntry] = toEntries().iterator.asJava
+ def remove(idx: Int): CharSequence = buf remove idx
+ def removeFirst(): CharSequence = buf remove 0
+ def removeLast(): CharSequence = buf remove lastIndex
+ def set(idx: Int, to: CharSequence): Unit = buf(idx) = to
+
def current() = if (index >= 0 && index < buf.size) buf(index) else fail("current()")
def previous() = (index > 0) && minusOne
def next() = (index <= lastIndex) && plusOne
diff --git a/src/repl/scala/tools/nsc/interpreter/session/package.scala b/src/repl/scala/tools/nsc/interpreter/session/package.scala
index c62cf21151..a3d7312c98 100644
--- a/src/repl/scala/tools/nsc/interpreter/session/package.scala
+++ b/src/repl/scala/tools/nsc/interpreter/session/package.scala
@@ -14,10 +14,10 @@ package object session {
type JIterator[T] = java.util.Iterator[T]
type JListIterator[T] = java.util.ListIterator[T]
- type JEntry = scala.tools.jline.console.history.History.Entry
- type JHistory = scala.tools.jline.console.history.History
- type JMemoryHistory = scala.tools.jline.console.history.MemoryHistory
- type JPersistentHistory = scala.tools.jline.console.history.PersistentHistory
+ type JEntry = jline.console.history.History.Entry
+ type JHistory = jline.console.history.History
+ type JMemoryHistory = jline.console.history.MemoryHistory
+ type JPersistentHistory = jline.console.history.PersistentHistory
private[interpreter] implicit def charSequenceFix(x: CharSequence): String = x.toString
}
diff --git a/src/scaladoc/scala/tools/nsc/doc/model/diagram/Diagram.scala b/src/scaladoc/scala/tools/nsc/doc/model/diagram/Diagram.scala
index 150b293b81..1846f375cd 100644
--- a/src/scaladoc/scala/tools/nsc/doc/model/diagram/Diagram.scala
+++ b/src/scaladoc/scala/tools/nsc/doc/model/diagram/Diagram.scala
@@ -10,7 +10,7 @@ import model._
* @author Damien Obrist
* @author Vlad Ureche
*/
-abstract class Diagram {
+sealed abstract class Diagram {
def nodes: List[Node]
def edges: List[(Node, List[Node])]
def isContentDiagram = false // Implemented by ContentDiagram
@@ -44,7 +44,7 @@ trait DepthInfo {
def maxDepth: Int
}
-abstract class Node {
+sealed abstract class Node {
def name = tpe.name
def tpe: TypeEntity
def tpl: Option[TemplateEntity]
diff --git a/src/library/scala/xml/Atom.scala b/src/xml/scala/xml/Atom.scala
index 33e58ba7e7..33e58ba7e7 100644
--- a/src/library/scala/xml/Atom.scala
+++ b/src/xml/scala/xml/Atom.scala
diff --git a/src/library/scala/xml/Attribute.scala b/src/xml/scala/xml/Attribute.scala
index e4b2b69fc6..e4b2b69fc6 100644
--- a/src/library/scala/xml/Attribute.scala
+++ b/src/xml/scala/xml/Attribute.scala
diff --git a/src/library/scala/xml/Comment.scala b/src/xml/scala/xml/Comment.scala
index b8dccdcb16..b8dccdcb16 100644
--- a/src/library/scala/xml/Comment.scala
+++ b/src/xml/scala/xml/Comment.scala
diff --git a/src/library/scala/xml/Document.scala b/src/xml/scala/xml/Document.scala
index 9a725014fc..9a725014fc 100644
--- a/src/library/scala/xml/Document.scala
+++ b/src/xml/scala/xml/Document.scala
diff --git a/src/library/scala/xml/Elem.scala b/src/xml/scala/xml/Elem.scala
index 484cf98744..484cf98744 100755
--- a/src/library/scala/xml/Elem.scala
+++ b/src/xml/scala/xml/Elem.scala
diff --git a/src/library/scala/xml/EntityRef.scala b/src/xml/scala/xml/EntityRef.scala
index 7a58831075..7a58831075 100644
--- a/src/library/scala/xml/EntityRef.scala
+++ b/src/xml/scala/xml/EntityRef.scala
diff --git a/src/library/scala/xml/Equality.scala b/src/xml/scala/xml/Equality.scala
index 021d185812..021d185812 100644
--- a/src/library/scala/xml/Equality.scala
+++ b/src/xml/scala/xml/Equality.scala
diff --git a/src/library/scala/xml/Group.scala b/src/xml/scala/xml/Group.scala
index e3af615008..e3af615008 100644
--- a/src/library/scala/xml/Group.scala
+++ b/src/xml/scala/xml/Group.scala
diff --git a/src/library/scala/xml/MalformedAttributeException.scala b/src/xml/scala/xml/MalformedAttributeException.scala
index d499ad3e10..d499ad3e10 100644
--- a/src/library/scala/xml/MalformedAttributeException.scala
+++ b/src/xml/scala/xml/MalformedAttributeException.scala
diff --git a/src/library/scala/xml/MetaData.scala b/src/xml/scala/xml/MetaData.scala
index 8b5ea187cb..8b5ea187cb 100644
--- a/src/library/scala/xml/MetaData.scala
+++ b/src/xml/scala/xml/MetaData.scala
diff --git a/src/library/scala/xml/NamespaceBinding.scala b/src/xml/scala/xml/NamespaceBinding.scala
index b320466976..b320466976 100644
--- a/src/library/scala/xml/NamespaceBinding.scala
+++ b/src/xml/scala/xml/NamespaceBinding.scala
diff --git a/src/library/scala/xml/Node.scala b/src/xml/scala/xml/Node.scala
index e121284252..e121284252 100755
--- a/src/library/scala/xml/Node.scala
+++ b/src/xml/scala/xml/Node.scala
diff --git a/src/library/scala/xml/NodeBuffer.scala b/src/xml/scala/xml/NodeBuffer.scala
index ae7c7b2bf8..ae7c7b2bf8 100644
--- a/src/library/scala/xml/NodeBuffer.scala
+++ b/src/xml/scala/xml/NodeBuffer.scala
diff --git a/src/library/scala/xml/NodeSeq.scala b/src/xml/scala/xml/NodeSeq.scala
index b8022472fb..b8022472fb 100644
--- a/src/library/scala/xml/NodeSeq.scala
+++ b/src/xml/scala/xml/NodeSeq.scala
diff --git a/src/library/scala/xml/Null.scala b/src/xml/scala/xml/Null.scala
index f763c023c4..f763c023c4 100644
--- a/src/library/scala/xml/Null.scala
+++ b/src/xml/scala/xml/Null.scala
diff --git a/src/library/scala/xml/PCData.scala b/src/xml/scala/xml/PCData.scala
index 31eea2b6d7..31eea2b6d7 100644
--- a/src/library/scala/xml/PCData.scala
+++ b/src/xml/scala/xml/PCData.scala
diff --git a/src/library/scala/xml/PrefixedAttribute.scala b/src/xml/scala/xml/PrefixedAttribute.scala
index 4ab79c8677..4ab79c8677 100644
--- a/src/library/scala/xml/PrefixedAttribute.scala
+++ b/src/xml/scala/xml/PrefixedAttribute.scala
diff --git a/src/library/scala/xml/PrettyPrinter.scala b/src/xml/scala/xml/PrettyPrinter.scala
index 9e01905357..9e01905357 100755
--- a/src/library/scala/xml/PrettyPrinter.scala
+++ b/src/xml/scala/xml/PrettyPrinter.scala
diff --git a/src/library/scala/xml/ProcInstr.scala b/src/xml/scala/xml/ProcInstr.scala
index 189c1c6878..189c1c6878 100644
--- a/src/library/scala/xml/ProcInstr.scala
+++ b/src/xml/scala/xml/ProcInstr.scala
diff --git a/src/library/scala/xml/QNode.scala b/src/xml/scala/xml/QNode.scala
index f9e3f1854b..f9e3f1854b 100644
--- a/src/library/scala/xml/QNode.scala
+++ b/src/xml/scala/xml/QNode.scala
diff --git a/src/library/scala/xml/SpecialNode.scala b/src/xml/scala/xml/SpecialNode.scala
index 5fef8ef66c..5fef8ef66c 100644
--- a/src/library/scala/xml/SpecialNode.scala
+++ b/src/xml/scala/xml/SpecialNode.scala
diff --git a/src/library/scala/xml/Text.scala b/src/xml/scala/xml/Text.scala
index debea0c025..debea0c025 100644
--- a/src/library/scala/xml/Text.scala
+++ b/src/xml/scala/xml/Text.scala
diff --git a/src/library/scala/xml/TextBuffer.scala b/src/xml/scala/xml/TextBuffer.scala
index 514b1701af..514b1701af 100644
--- a/src/library/scala/xml/TextBuffer.scala
+++ b/src/xml/scala/xml/TextBuffer.scala
diff --git a/src/library/scala/xml/TopScope.scala b/src/xml/scala/xml/TopScope.scala
index 474fbbbdb5..474fbbbdb5 100644
--- a/src/library/scala/xml/TopScope.scala
+++ b/src/xml/scala/xml/TopScope.scala
diff --git a/src/library/scala/xml/TypeSymbol.scala b/src/xml/scala/xml/TypeSymbol.scala
index fb371ee340..fb371ee340 100644
--- a/src/library/scala/xml/TypeSymbol.scala
+++ b/src/xml/scala/xml/TypeSymbol.scala
diff --git a/src/library/scala/xml/Unparsed.scala b/src/xml/scala/xml/Unparsed.scala
index bc190eb724..bc190eb724 100644
--- a/src/library/scala/xml/Unparsed.scala
+++ b/src/xml/scala/xml/Unparsed.scala
diff --git a/src/library/scala/xml/UnprefixedAttribute.scala b/src/xml/scala/xml/UnprefixedAttribute.scala
index 6fa827da5f..6fa827da5f 100644
--- a/src/library/scala/xml/UnprefixedAttribute.scala
+++ b/src/xml/scala/xml/UnprefixedAttribute.scala
diff --git a/src/library/scala/xml/Utility.scala b/src/xml/scala/xml/Utility.scala
index 9134476401..9134476401 100755
--- a/src/library/scala/xml/Utility.scala
+++ b/src/xml/scala/xml/Utility.scala
diff --git a/src/library/scala/xml/XML.scala b/src/xml/scala/xml/XML.scala
index 020264e509..020264e509 100755
--- a/src/library/scala/xml/XML.scala
+++ b/src/xml/scala/xml/XML.scala
diff --git a/src/library/scala/xml/Xhtml.scala b/src/xml/scala/xml/Xhtml.scala
index 6a12c1a89a..6a12c1a89a 100644
--- a/src/library/scala/xml/Xhtml.scala
+++ b/src/xml/scala/xml/Xhtml.scala
diff --git a/src/library/scala/xml/dtd/ContentModel.scala b/src/xml/scala/xml/dtd/ContentModel.scala
index 4007985dce..4007985dce 100644
--- a/src/library/scala/xml/dtd/ContentModel.scala
+++ b/src/xml/scala/xml/dtd/ContentModel.scala
diff --git a/src/library/scala/xml/dtd/ContentModelParser.scala b/src/xml/scala/xml/dtd/ContentModelParser.scala
index 71b391c422..71b391c422 100644
--- a/src/library/scala/xml/dtd/ContentModelParser.scala
+++ b/src/xml/scala/xml/dtd/ContentModelParser.scala
diff --git a/src/library/scala/xml/dtd/DTD.scala b/src/xml/scala/xml/dtd/DTD.scala
index 16a824fe2c..16a824fe2c 100644
--- a/src/library/scala/xml/dtd/DTD.scala
+++ b/src/xml/scala/xml/dtd/DTD.scala
diff --git a/src/library/scala/xml/dtd/Decl.scala b/src/xml/scala/xml/dtd/Decl.scala
index e6804478bd..8bf859c460 100644
--- a/src/library/scala/xml/dtd/Decl.scala
+++ b/src/xml/scala/xml/dtd/Decl.scala
@@ -12,9 +12,9 @@ package dtd
import Utility.sbToString
-abstract class Decl
+sealed abstract class Decl
-abstract class MarkupDecl extends Decl {
+sealed abstract class MarkupDecl extends Decl {
def buildString(sb: StringBuilder): StringBuilder
}
@@ -52,7 +52,7 @@ case class AttrDecl(name: String, tpe: String, default: DefaultDecl) {
}
/** an entity declaration */
-abstract class EntityDecl extends MarkupDecl
+sealed abstract class EntityDecl extends MarkupDecl
/** a parsed general entity declaration */
case class ParsedEntityDecl(name: String, entdef: EntityDef) extends EntityDecl {
@@ -85,7 +85,7 @@ case class NotationDecl( name:String, extID:ExternalID ) extends MarkupDecl {
}
}
-abstract class EntityDef {
+sealed abstract class EntityDef {
def buildString(sb: StringBuilder): StringBuilder
}
@@ -133,7 +133,7 @@ case class PEReference(ent:String) extends MarkupDecl {
// default declarations for attributes
-abstract class DefaultDecl {
+sealed abstract class DefaultDecl {
override def toString(): String
def buildString(sb: StringBuilder): StringBuilder
}
diff --git a/src/library/scala/xml/dtd/DocType.scala b/src/xml/scala/xml/dtd/DocType.scala
index 849d560cc9..849d560cc9 100644
--- a/src/library/scala/xml/dtd/DocType.scala
+++ b/src/xml/scala/xml/dtd/DocType.scala
diff --git a/src/library/scala/xml/dtd/ElementValidator.scala b/src/xml/scala/xml/dtd/ElementValidator.scala
index 4830769a7d..4830769a7d 100644
--- a/src/library/scala/xml/dtd/ElementValidator.scala
+++ b/src/xml/scala/xml/dtd/ElementValidator.scala
diff --git a/src/library/scala/xml/dtd/ExternalID.scala b/src/xml/scala/xml/dtd/ExternalID.scala
index 5a1b5d1a19..880633d860 100644
--- a/src/library/scala/xml/dtd/ExternalID.scala
+++ b/src/xml/scala/xml/dtd/ExternalID.scala
@@ -15,7 +15,7 @@ package dtd
*
* @author Burak Emir
*/
-abstract class ExternalID extends parsing.TokenTests {
+sealed abstract class ExternalID extends parsing.TokenTests {
def quoted(s: String) = {
val c = if (s contains '"') '\'' else '"'
c + s + c
diff --git a/src/library/scala/xml/dtd/Scanner.scala b/src/xml/scala/xml/dtd/Scanner.scala
index 5f9d1ccaed..5f9d1ccaed 100644
--- a/src/library/scala/xml/dtd/Scanner.scala
+++ b/src/xml/scala/xml/dtd/Scanner.scala
diff --git a/src/library/scala/xml/dtd/Tokens.scala b/src/xml/scala/xml/dtd/Tokens.scala
index 07e888e77a..07e888e77a 100644
--- a/src/library/scala/xml/dtd/Tokens.scala
+++ b/src/xml/scala/xml/dtd/Tokens.scala
diff --git a/src/library/scala/xml/dtd/ValidationException.scala b/src/xml/scala/xml/dtd/ValidationException.scala
index 1bfae55286..1bfae55286 100644
--- a/src/library/scala/xml/dtd/ValidationException.scala
+++ b/src/xml/scala/xml/dtd/ValidationException.scala
diff --git a/src/library/scala/xml/dtd/impl/Base.scala b/src/xml/scala/xml/dtd/impl/Base.scala
index 91ff03a93a..91ff03a93a 100644
--- a/src/library/scala/xml/dtd/impl/Base.scala
+++ b/src/xml/scala/xml/dtd/impl/Base.scala
diff --git a/src/library/scala/xml/dtd/impl/BaseBerrySethi.scala b/src/xml/scala/xml/dtd/impl/BaseBerrySethi.scala
index f30309b037..f30309b037 100644
--- a/src/library/scala/xml/dtd/impl/BaseBerrySethi.scala
+++ b/src/xml/scala/xml/dtd/impl/BaseBerrySethi.scala
diff --git a/src/library/scala/xml/dtd/impl/DetWordAutom.scala b/src/xml/scala/xml/dtd/impl/DetWordAutom.scala
index 6f8ba4de72..6f8ba4de72 100644
--- a/src/library/scala/xml/dtd/impl/DetWordAutom.scala
+++ b/src/xml/scala/xml/dtd/impl/DetWordAutom.scala
diff --git a/src/library/scala/xml/dtd/impl/Inclusion.scala b/src/xml/scala/xml/dtd/impl/Inclusion.scala
index 07b6afaeba..07b6afaeba 100644
--- a/src/library/scala/xml/dtd/impl/Inclusion.scala
+++ b/src/xml/scala/xml/dtd/impl/Inclusion.scala
diff --git a/src/library/scala/xml/dtd/impl/NondetWordAutom.scala b/src/xml/scala/xml/dtd/impl/NondetWordAutom.scala
index 0bb19a7e3e..0bb19a7e3e 100644
--- a/src/library/scala/xml/dtd/impl/NondetWordAutom.scala
+++ b/src/xml/scala/xml/dtd/impl/NondetWordAutom.scala
diff --git a/src/library/scala/xml/dtd/impl/PointedHedgeExp.scala b/src/xml/scala/xml/dtd/impl/PointedHedgeExp.scala
index 1720604132..1720604132 100644
--- a/src/library/scala/xml/dtd/impl/PointedHedgeExp.scala
+++ b/src/xml/scala/xml/dtd/impl/PointedHedgeExp.scala
diff --git a/src/library/scala/xml/dtd/impl/SubsetConstruction.scala b/src/xml/scala/xml/dtd/impl/SubsetConstruction.scala
index 632ca1eb18..632ca1eb18 100644
--- a/src/library/scala/xml/dtd/impl/SubsetConstruction.scala
+++ b/src/xml/scala/xml/dtd/impl/SubsetConstruction.scala
diff --git a/src/library/scala/xml/dtd/impl/SyntaxError.scala b/src/xml/scala/xml/dtd/impl/SyntaxError.scala
index a5b8a5aba0..a5b8a5aba0 100644
--- a/src/library/scala/xml/dtd/impl/SyntaxError.scala
+++ b/src/xml/scala/xml/dtd/impl/SyntaxError.scala
diff --git a/src/library/scala/xml/dtd/impl/WordBerrySethi.scala b/src/xml/scala/xml/dtd/impl/WordBerrySethi.scala
index 9bf3fa518b..9bf3fa518b 100644
--- a/src/library/scala/xml/dtd/impl/WordBerrySethi.scala
+++ b/src/xml/scala/xml/dtd/impl/WordBerrySethi.scala
diff --git a/src/library/scala/xml/dtd/impl/WordExp.scala b/src/xml/scala/xml/dtd/impl/WordExp.scala
index a4bb54c1ea..a4bb54c1ea 100644
--- a/src/library/scala/xml/dtd/impl/WordExp.scala
+++ b/src/xml/scala/xml/dtd/impl/WordExp.scala
diff --git a/src/library/scala/xml/factory/Binder.scala b/src/xml/scala/xml/factory/Binder.scala
index 947f99e6a4..947f99e6a4 100755
--- a/src/library/scala/xml/factory/Binder.scala
+++ b/src/xml/scala/xml/factory/Binder.scala
diff --git a/src/library/scala/xml/factory/LoggedNodeFactory.scala b/src/xml/scala/xml/factory/LoggedNodeFactory.scala
index bc074bfc83..bc074bfc83 100644
--- a/src/library/scala/xml/factory/LoggedNodeFactory.scala
+++ b/src/xml/scala/xml/factory/LoggedNodeFactory.scala
diff --git a/src/library/scala/xml/factory/NodeFactory.scala b/src/xml/scala/xml/factory/NodeFactory.scala
index 94801bb554..94801bb554 100644
--- a/src/library/scala/xml/factory/NodeFactory.scala
+++ b/src/xml/scala/xml/factory/NodeFactory.scala
diff --git a/src/library/scala/xml/factory/XMLLoader.scala b/src/xml/scala/xml/factory/XMLLoader.scala
index b69f187039..b69f187039 100644
--- a/src/library/scala/xml/factory/XMLLoader.scala
+++ b/src/xml/scala/xml/factory/XMLLoader.scala
diff --git a/src/library/scala/xml/include/CircularIncludeException.scala b/src/xml/scala/xml/include/CircularIncludeException.scala
index 351f403008..351f403008 100644
--- a/src/library/scala/xml/include/CircularIncludeException.scala
+++ b/src/xml/scala/xml/include/CircularIncludeException.scala
diff --git a/src/library/scala/xml/include/UnavailableResourceException.scala b/src/xml/scala/xml/include/UnavailableResourceException.scala
index 47b176e0f3..47b176e0f3 100644
--- a/src/library/scala/xml/include/UnavailableResourceException.scala
+++ b/src/xml/scala/xml/include/UnavailableResourceException.scala
diff --git a/src/library/scala/xml/include/XIncludeException.scala b/src/xml/scala/xml/include/XIncludeException.scala
index 11e1644d83..11e1644d83 100644
--- a/src/library/scala/xml/include/XIncludeException.scala
+++ b/src/xml/scala/xml/include/XIncludeException.scala
diff --git a/src/library/scala/xml/include/sax/EncodingHeuristics.scala b/src/xml/scala/xml/include/sax/EncodingHeuristics.scala
index 57ab5ed91c..57ab5ed91c 100644
--- a/src/library/scala/xml/include/sax/EncodingHeuristics.scala
+++ b/src/xml/scala/xml/include/sax/EncodingHeuristics.scala
diff --git a/src/library/scala/xml/include/sax/XIncludeFilter.scala b/src/xml/scala/xml/include/sax/XIncludeFilter.scala
index 3fa3beefb0..3fa3beefb0 100644
--- a/src/library/scala/xml/include/sax/XIncludeFilter.scala
+++ b/src/xml/scala/xml/include/sax/XIncludeFilter.scala
diff --git a/src/library/scala/xml/include/sax/XIncluder.scala b/src/xml/scala/xml/include/sax/XIncluder.scala
index 1939fa1875..1939fa1875 100644
--- a/src/library/scala/xml/include/sax/XIncluder.scala
+++ b/src/xml/scala/xml/include/sax/XIncluder.scala
diff --git a/src/library/scala/xml/package.scala b/src/xml/scala/xml/package.scala
index 4001cc5ffb..4001cc5ffb 100644
--- a/src/library/scala/xml/package.scala
+++ b/src/xml/scala/xml/package.scala
diff --git a/src/library/scala/xml/parsing/ConstructingHandler.scala b/src/xml/scala/xml/parsing/ConstructingHandler.scala
index ba416e4301..ba416e4301 100755
--- a/src/library/scala/xml/parsing/ConstructingHandler.scala
+++ b/src/xml/scala/xml/parsing/ConstructingHandler.scala
diff --git a/src/library/scala/xml/parsing/ConstructingParser.scala b/src/xml/scala/xml/parsing/ConstructingParser.scala
index 3caeddabf4..3caeddabf4 100644
--- a/src/library/scala/xml/parsing/ConstructingParser.scala
+++ b/src/xml/scala/xml/parsing/ConstructingParser.scala
diff --git a/src/library/scala/xml/parsing/DefaultMarkupHandler.scala b/src/xml/scala/xml/parsing/DefaultMarkupHandler.scala
index 6ec7474843..6ec7474843 100755
--- a/src/library/scala/xml/parsing/DefaultMarkupHandler.scala
+++ b/src/xml/scala/xml/parsing/DefaultMarkupHandler.scala
diff --git a/src/library/scala/xml/parsing/ExternalSources.scala b/src/xml/scala/xml/parsing/ExternalSources.scala
index bb939bca95..bb939bca95 100644
--- a/src/library/scala/xml/parsing/ExternalSources.scala
+++ b/src/xml/scala/xml/parsing/ExternalSources.scala
diff --git a/src/library/scala/xml/parsing/FactoryAdapter.scala b/src/xml/scala/xml/parsing/FactoryAdapter.scala
index 2154bdf5ba..2154bdf5ba 100644
--- a/src/library/scala/xml/parsing/FactoryAdapter.scala
+++ b/src/xml/scala/xml/parsing/FactoryAdapter.scala
diff --git a/src/library/scala/xml/parsing/FatalError.scala b/src/xml/scala/xml/parsing/FatalError.scala
index ab3cb2a74d..ab3cb2a74d 100644
--- a/src/library/scala/xml/parsing/FatalError.scala
+++ b/src/xml/scala/xml/parsing/FatalError.scala
diff --git a/src/library/scala/xml/parsing/MarkupHandler.scala b/src/xml/scala/xml/parsing/MarkupHandler.scala
index 1ebffb9c90..1ebffb9c90 100755
--- a/src/library/scala/xml/parsing/MarkupHandler.scala
+++ b/src/xml/scala/xml/parsing/MarkupHandler.scala
diff --git a/src/library/scala/xml/parsing/MarkupParser.scala b/src/xml/scala/xml/parsing/MarkupParser.scala
index 3bbd136b67..3bbd136b67 100755
--- a/src/library/scala/xml/parsing/MarkupParser.scala
+++ b/src/xml/scala/xml/parsing/MarkupParser.scala
diff --git a/src/library/scala/xml/parsing/MarkupParserCommon.scala b/src/xml/scala/xml/parsing/MarkupParserCommon.scala
index 57c1651558..57c1651558 100644
--- a/src/library/scala/xml/parsing/MarkupParserCommon.scala
+++ b/src/xml/scala/xml/parsing/MarkupParserCommon.scala
diff --git a/src/library/scala/xml/parsing/NoBindingFactoryAdapter.scala b/src/xml/scala/xml/parsing/NoBindingFactoryAdapter.scala
index 56ac185f47..56ac185f47 100644
--- a/src/library/scala/xml/parsing/NoBindingFactoryAdapter.scala
+++ b/src/xml/scala/xml/parsing/NoBindingFactoryAdapter.scala
diff --git a/src/library/scala/xml/parsing/TokenTests.scala b/src/xml/scala/xml/parsing/TokenTests.scala
index 8dd9cdfaa3..8dd9cdfaa3 100644
--- a/src/library/scala/xml/parsing/TokenTests.scala
+++ b/src/xml/scala/xml/parsing/TokenTests.scala
diff --git a/src/library/scala/xml/parsing/ValidatingMarkupHandler.scala b/src/xml/scala/xml/parsing/ValidatingMarkupHandler.scala
index 1b20901249..1b20901249 100644
--- a/src/library/scala/xml/parsing/ValidatingMarkupHandler.scala
+++ b/src/xml/scala/xml/parsing/ValidatingMarkupHandler.scala
diff --git a/src/library/scala/xml/parsing/XhtmlEntities.scala b/src/xml/scala/xml/parsing/XhtmlEntities.scala
index 3683af202c..3683af202c 100644
--- a/src/library/scala/xml/parsing/XhtmlEntities.scala
+++ b/src/xml/scala/xml/parsing/XhtmlEntities.scala
diff --git a/src/library/scala/xml/parsing/XhtmlParser.scala b/src/xml/scala/xml/parsing/XhtmlParser.scala
index 6ce5bec8d0..6ce5bec8d0 100644
--- a/src/library/scala/xml/parsing/XhtmlParser.scala
+++ b/src/xml/scala/xml/parsing/XhtmlParser.scala
diff --git a/src/library/scala/xml/persistent/CachedFileStorage.scala b/src/xml/scala/xml/persistent/CachedFileStorage.scala
index a1489ef3f4..a1489ef3f4 100644
--- a/src/library/scala/xml/persistent/CachedFileStorage.scala
+++ b/src/xml/scala/xml/persistent/CachedFileStorage.scala
diff --git a/src/library/scala/xml/persistent/Index.scala b/src/xml/scala/xml/persistent/Index.scala
index 9ee45e7086..9ee45e7086 100644
--- a/src/library/scala/xml/persistent/Index.scala
+++ b/src/xml/scala/xml/persistent/Index.scala
diff --git a/src/library/scala/xml/persistent/SetStorage.scala b/src/xml/scala/xml/persistent/SetStorage.scala
index 8db56a2e71..8db56a2e71 100644
--- a/src/library/scala/xml/persistent/SetStorage.scala
+++ b/src/xml/scala/xml/persistent/SetStorage.scala
diff --git a/src/library/scala/xml/pull/XMLEvent.scala b/src/xml/scala/xml/pull/XMLEvent.scala
index 3beb3648e7..3beb3648e7 100644
--- a/src/library/scala/xml/pull/XMLEvent.scala
+++ b/src/xml/scala/xml/pull/XMLEvent.scala
diff --git a/src/library/scala/xml/pull/XMLEventReader.scala b/src/xml/scala/xml/pull/XMLEventReader.scala
index 76e51e17fd..76e51e17fd 100755
--- a/src/library/scala/xml/pull/XMLEventReader.scala
+++ b/src/xml/scala/xml/pull/XMLEventReader.scala
diff --git a/src/library/scala/xml/pull/package.scala b/src/xml/scala/xml/pull/package.scala
index 0e3019446b..0e3019446b 100644
--- a/src/library/scala/xml/pull/package.scala
+++ b/src/xml/scala/xml/pull/package.scala
diff --git a/src/library/scala/xml/transform/BasicTransformer.scala b/src/xml/scala/xml/transform/BasicTransformer.scala
index c98339fd67..c98339fd67 100644
--- a/src/library/scala/xml/transform/BasicTransformer.scala
+++ b/src/xml/scala/xml/transform/BasicTransformer.scala
diff --git a/src/library/scala/xml/transform/RewriteRule.scala b/src/xml/scala/xml/transform/RewriteRule.scala
index 1399ee538d..1399ee538d 100644
--- a/src/library/scala/xml/transform/RewriteRule.scala
+++ b/src/xml/scala/xml/transform/RewriteRule.scala
diff --git a/src/library/scala/xml/transform/RuleTransformer.scala b/src/xml/scala/xml/transform/RuleTransformer.scala
index 3a222ba759..3a222ba759 100644
--- a/src/library/scala/xml/transform/RuleTransformer.scala
+++ b/src/xml/scala/xml/transform/RuleTransformer.scala
diff --git a/test/files/codelib/.gitignore b/test/files/codelib/.gitignore
new file mode 100644
index 0000000000..f77a26afb7
--- /dev/null
+++ b/test/files/codelib/.gitignore
@@ -0,0 +1 @@
+code.jar
diff --git a/test/files/jvm/bytecode-test-example/Foo_1.flags b/test/files/jvm/bytecode-test-example/Foo_1.flags
new file mode 100644
index 0000000000..49f2d2c4c8
--- /dev/null
+++ b/test/files/jvm/bytecode-test-example/Foo_1.flags
@@ -0,0 +1 @@
+-Ybackend:GenASM
diff --git a/test/files/jvm/nooptimise/Foo_1.flags b/test/files/jvm/nooptimise/Foo_1.flags
index 9686c20775..f493cf9f3f 100644
--- a/test/files/jvm/nooptimise/Foo_1.flags
+++ b/test/files/jvm/nooptimise/Foo_1.flags
@@ -1 +1 @@
--optimise -Ynooptimise \ No newline at end of file
+-Ybackend:GenASM -optimise -Ynooptimise \ No newline at end of file
diff --git a/test/files/jvm/scala-concurrent-tck.check b/test/files/jvm/scala-concurrent-tck.check
deleted file mode 100644
index 18fc456acb..0000000000
--- a/test/files/jvm/scala-concurrent-tck.check
+++ /dev/null
@@ -1,3 +0,0 @@
-scala-concurrent-tck.scala:429: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
- f
- ^
diff --git a/test/files/jvm/scala-concurrent-tck.scala b/test/files/jvm/scala-concurrent-tck.scala
index 438ee310e6..6e2b8ca033 100644
--- a/test/files/jvm/scala-concurrent-tck.scala
+++ b/test/files/jvm/scala-concurrent-tck.scala
@@ -15,21 +15,17 @@ import scala.reflect.{ classTag, ClassTag }
import scala.tools.partest.TestUtil.intercept
trait TestBase {
-
- def once(body: (() => Unit) => Unit) {
- val sv = new SyncVar[Boolean]
- body(() => sv put true)
- sv.take(2000)
+ trait Done { def apply(proof: => Boolean): Unit }
+ def once(body: Done => Unit) {
+ import java.util.concurrent.{ LinkedBlockingQueue, TimeUnit }
+ val q = new LinkedBlockingQueue[Try[Boolean]]
+ body(new Done {
+ def apply(proof: => Boolean): Unit = q offer Try(proof)
+ })
+ assert(q.poll(2000, TimeUnit.MILLISECONDS).get)
+ // Check that we don't get more than one completion
+ assert(q.poll(50, TimeUnit.MILLISECONDS) eq null)
}
-
- // def assert(cond: => Boolean) {
- // try {
- // Predef.assert(cond)
- // } catch {
- // case e => e.printStackTrace()
- // }
- // }
-
}
@@ -39,99 +35,52 @@ trait FutureCallbacks extends TestBase {
def testOnSuccess(): Unit = once {
done =>
var x = 0
- val f = future {
- x = 1
- }
- f onSuccess {
- case _ =>
- done()
- assert(x == 1)
- }
+ val f = future { x = 1 }
+ f onSuccess { case _ => done(x == 1) }
}
def testOnSuccessWhenCompleted(): Unit = once {
done =>
var x = 0
- val f = future {
- x = 1
- }
+ val f = future { x = 1 }
f onSuccess {
- case _ =>
- assert(x == 1)
+ case _ if x == 1 =>
x = 2
- f onSuccess {
- case _ =>
- assert(x == 2)
- done()
- }
+ f onSuccess { case _ => done(x == 2) }
}
}
def testOnSuccessWhenFailed(): Unit = once {
done =>
- val f = future[Unit] {
- done()
- throw new Exception
- }
- f onSuccess {
- case _ => assert(false)
- }
+ val f = future[Unit] { throw new Exception }
+ f onSuccess { case _ => done(false) }
+ f onFailure { case _ => done(true) }
}
def testOnFailure(): Unit = once {
done =>
- var x = 0
- val f = future[Unit] {
- x = 1
- throw new Exception
- }
- f onSuccess {
- case _ =>
- done()
- assert(false)
- }
- f onFailure {
- case _ =>
- done()
- assert(x == 1)
- }
+ val f = future[Unit] { throw new Exception }
+ f onSuccess { case _ => done(false) }
+ f onFailure { case _ => done(true) }
}
def testOnFailureWhenSpecialThrowable(num: Int, cause: Throwable): Unit = once {
done =>
- val f = future[Unit] {
- throw cause
- }
- f onSuccess {
- case _ =>
- done()
- assert(false)
- }
+ val f = future[Unit] { throw cause }
+ f onSuccess { case _ => done(false) }
f onFailure {
- case e: ExecutionException if (e.getCause == cause) =>
- done()
- case _ =>
- done()
- assert(false)
+ case e: ExecutionException if e.getCause == cause => done(true)
+ case _ => done(false)
}
}
def testOnFailureWhenTimeoutException(): Unit = once {
done =>
- val f = future[Unit] {
- throw new TimeoutException()
- }
- f onSuccess {
- case _ =>
- done()
- assert(false)
- }
+ val f = future[Unit] { throw new TimeoutException() }
+ f onSuccess { case _ => done(false) }
f onFailure {
- case e: TimeoutException =>
- done()
- case other =>
- done()
- assert(false)
+ case e: TimeoutException => done(true)
+ case _ => done(false)
}
}
@@ -151,7 +100,6 @@ trait FutureCallbacks extends TestBase {
//testOnFailureWhenSpecialThrowable(7, new InterruptedException)
testThatNestedCallbacksDoNotYieldStackOverflow()
testOnFailureWhenTimeoutException()
-
}
@@ -162,207 +110,120 @@ trait FutureCombinators extends TestBase {
done =>
val f = future { 5 }
val g = f map { x => "result: " + x }
- g onSuccess {
- case s =>
- done()
- assert(s == "result: 5")
- }
- g onFailure {
- case _ =>
- done()
- assert(false)
- }
+ g onSuccess { case s => done(s == "result: 5") }
+ g onFailure { case _ => done(false) }
}
def testMapFailure(): Unit = once {
done =>
- val f = future {
- throw new Exception("exception message")
- }
+ val f = future[Unit] { throw new Exception("exception message") }
val g = f map { x => "result: " + x }
- g onSuccess {
- case _ =>
- done()
- assert(false)
- }
- g onFailure {
- case t =>
- done()
- assert(t.getMessage() == "exception message")
- }
+ g onSuccess { case _ => done(false) }
+ g onFailure { case t => done(t.getMessage() == "exception message") }
}
def testMapSuccessPF(): Unit = once {
done =>
val f = future { 5 }
val g = f map { case r => "result: " + r }
- g onSuccess {
- case s =>
- done()
- assert(s == "result: 5")
- }
- g onFailure {
- case _ =>
- done()
- assert(false)
- }
+ g onSuccess { case s => done(s == "result: 5") }
+ g onFailure { case _ => done(false) }
}
def testTransformSuccess(): Unit = once {
done =>
val f = future { 5 }
val g = f.transform(r => "result: " + r, identity)
- g onSuccess {
- case s =>
- done()
- assert(s == "result: 5")
- }
- g onFailure {
- case _ =>
- done()
- assert(false)
- }
+ g onSuccess { case s => done(s == "result: 5") }
+ g onFailure { case _ => done(false) }
}
def testTransformSuccessPF(): Unit = once {
done =>
val f = future { 5 }
val g = f.transform( { case r => "result: " + r }, identity)
- g onSuccess {
- case s =>
- done()
- assert(s == "result: 5")
- }
- g onFailure {
- case _ =>
- done()
- assert(false)
- }
+ g onSuccess { case s => done(s == "result: 5") }
+ g onFailure { case _ => done(false) }
+ }
+
+def testTransformFailure(): Unit = once {
+ done =>
+ val transformed = new Exception("transformed")
+ val f = future { throw new Exception("expected") }
+ val g = f.transform(identity, _ => transformed)
+ g onSuccess { case _ => done(false) }
+ g onFailure { case e => done(e eq transformed) }
+ }
+
+ def testTransformFailurePF(): Unit = once {
+ done =>
+ val e = new Exception("expected")
+ val transformed = new Exception("transformed")
+ val f = future[Unit] { throw e }
+ val g = f.transform(identity, { case `e` => transformed })
+ g onSuccess { case _ => done(false) }
+ g onFailure { case e => done(e eq transformed) }
}
def testFoldFailure(): Unit = once {
done =>
- val f = future {
- throw new Exception("exception message")
- }
+ val f = future[Unit] { throw new Exception("expected") }
val g = f.transform(r => "result: " + r, identity)
- g onSuccess {
- case _ =>
- done()
- assert(false)
- }
- g onFailure {
- case t =>
- done()
- assert(t.getMessage() == "exception message")
- }
+ g onSuccess { case _ => done(false) }
+ g onFailure { case t => done(t.getMessage() == "expected") }
}
def testFlatMapSuccess(): Unit = once {
done =>
val f = future { 5 }
val g = f flatMap { _ => future { 10 } }
- g onSuccess {
- case x =>
- done()
- assert(x == 10)
- }
- g onFailure {
- case _ =>
- done()
- assert(false)
- }
+ g onSuccess { case x => done(x == 10) }
+ g onFailure { case _ => done(false) }
}
def testFlatMapFailure(): Unit = once {
done =>
- val f = future {
- throw new Exception("exception message")
- }
+ val f = future[Unit] { throw new Exception("expected") }
val g = f flatMap { _ => future { 10 } }
- g onSuccess {
- case _ =>
- done()
- assert(false)
- }
- g onFailure {
- case t =>
- done()
- assert(t.getMessage() == "exception message")
- }
+ g onSuccess { case _ => done(false) }
+ g onFailure { case t => done(t.getMessage() == "expected") }
}
def testFilterSuccess(): Unit = once {
done =>
val f = future { 4 }
val g = f filter { _ % 2 == 0 }
- g onSuccess {
- case x: Int =>
- done()
- assert(x == 4)
- }
- g onFailure {
- case _ =>
- done()
- assert(false)
- }
+ g onSuccess { case x: Int => done(x == 4) }
+ g onFailure { case _ => done(false) }
}
def testFilterFailure(): Unit = once {
done =>
val f = future { 4 }
val g = f filter { _ % 2 == 1 }
- g onSuccess {
- case x: Int =>
- done()
- assert(false)
- }
+ g onSuccess { case x: Int => done(false) }
g onFailure {
- case e: NoSuchElementException =>
- done()
- assert(true)
- case _ =>
- done()
- assert(false)
+ case e: NoSuchElementException => done(true)
+ case _ => done(false)
}
}
def testCollectSuccess(): Unit = once {
done =>
val f = future { -5 }
- val g = f collect {
- case x if x < 0 => -x
- }
- g onSuccess {
- case x: Int =>
- done()
- assert(x == 5)
- }
- g onFailure {
- case _ =>
- done()
- assert(false)
- }
+ val g = f collect { case x if x < 0 => -x }
+ g onSuccess { case x: Int => done(x == 5) }
+ g onFailure { case _ => done(false) }
}
def testCollectFailure(): Unit = once {
done =>
val f = future { -5 }
- val g = f collect {
- case x if x > 0 => x * 2
- }
- g onSuccess {
- case _ =>
- done()
- assert(false)
- }
+ val g = f collect { case x if x > 0 => x * 2 }
+ g onSuccess { case _ => done(false) }
g onFailure {
- case e: NoSuchElementException =>
- done()
- assert(true)
- case _ =>
- done()
- assert(false)
+ case e: NoSuchElementException => done(true)
+ case _ => done(false)
}
}
@@ -376,16 +237,8 @@ trait FutureCombinators extends TestBase {
f foreach { x => p.success(x * 2) }
val g = p.future
- g.onSuccess {
- case res: Int =>
- done()
- assert(res == 10)
- }
- g.onFailure {
- case _ =>
- done()
- assert(false)
- }
+ g.onSuccess { case res: Int => done(res == 10) }
+ g.onFailure { case _ => done(false) }
}
def testForeachFailure(): Unit = once {
@@ -396,16 +249,8 @@ trait FutureCombinators extends TestBase {
f onFailure { case _ => p.failure(new Exception) }
val g = p.future
- g.onSuccess {
- case _ =>
- done()
- assert(false)
- }
- g.onFailure {
- case _ =>
- done()
- assert(true)
- }
+ g.onSuccess { case _ => done(false) }
+ g.onFailure { case _ => done(true) }
}
def testRecoverSuccess(): Unit = once {
@@ -415,18 +260,9 @@ trait FutureCombinators extends TestBase {
throw cause
} recover {
case re: RuntimeException =>
- "recovered"
- }
- f onSuccess {
- case x =>
- done()
- assert(x == "recovered")
- }
- f onFailure { case any =>
- done()
- assert(false)
- }
- f
+ "recovered" }
+ f onSuccess { case x => done(x == "recovered") }
+ f onFailure { case any => done(false) }
}
def testRecoverFailure(): Unit = once {
@@ -437,15 +273,8 @@ trait FutureCombinators extends TestBase {
} recover {
case te: TimeoutException => "timeout"
}
- f onSuccess {
- case x =>
- done()
- assert(false)
- }
- f onFailure { case any =>
- done()
- assert(any == cause)
- }
+ f onSuccess { case _ => done(false) }
+ f onFailure { case any => done(any == cause) }
}
def testRecoverWithSuccess(): Unit = once {
@@ -457,15 +286,8 @@ trait FutureCombinators extends TestBase {
case re: RuntimeException =>
future { "recovered" }
}
- f onSuccess {
- case x =>
- done()
- assert(x == "recovered")
- }
- f onFailure { case any =>
- done()
- assert(false)
- }
+ f onSuccess { case x => done(x == "recovered") }
+ f onFailure { case any => done(false) }
}
def testRecoverWithFailure(): Unit = once {
@@ -477,15 +299,8 @@ trait FutureCombinators extends TestBase {
case te: TimeoutException =>
future { "timeout" }
}
- f onSuccess {
- case x =>
- done()
- assert(false)
- }
- f onFailure { case any =>
- done()
- assert(any == cause)
- }
+ f onSuccess { case x => done(false) }
+ f onFailure { case any => done(any == cause) }
}
def testZipSuccess(): Unit = once {
@@ -493,52 +308,28 @@ trait FutureCombinators extends TestBase {
val f = future { 5 }
val g = future { 6 }
val h = f zip g
- h onSuccess {
- case (l: Int, r: Int) =>
- done()
- assert(l+r == 11)
- }
- h onFailure {
- case _ =>
- done()
- assert(false)
- }
+ h onSuccess { case (l: Int, r: Int) => done(l+r == 11) }
+ h onFailure { case _ => done(false) }
}
def testZipFailureLeft(): Unit = once {
done =>
- val cause = new Exception("exception message")
+ val cause = new Exception("expected")
val f = future { throw cause }
val g = future { 6 }
val h = f zip g
- h onSuccess {
- case _ =>
- done()
- assert(false)
- }
- h onFailure {
- case e: Exception =>
- done()
- assert(e.getMessage == "exception message")
- }
+ h onSuccess { case _ => done(false) }
+ h onFailure { case e: Exception => done(e.getMessage == "expected") }
}
def testZipFailureRight(): Unit = once {
done =>
- val cause = new Exception("exception message")
+ val cause = new Exception("expected")
val f = future { 5 }
val g = future { throw cause }
val h = f zip g
- h onSuccess {
- case _ =>
- done()
- assert(false)
- }
- h onFailure {
- case e: Exception =>
- done()
- assert(e.getMessage == "exception message")
- }
+ h onSuccess { case _ => done(false) }
+ h onFailure { case e: Exception => done(e.getMessage == "expected") }
}
def testFallbackTo(): Unit = once {
@@ -546,17 +337,8 @@ trait FutureCombinators extends TestBase {
val f = future { sys.error("failed") }
val g = future { 5 }
val h = f fallbackTo g
-
- h onSuccess {
- case x: Int =>
- done()
- assert(x == 5)
- }
- h onFailure {
- case _ =>
- done()
- assert(false)
- }
+ h onSuccess { case x: Int => done(x == 5) }
+ h onFailure { case _ => done(false) }
}
def testFallbackToFailure(): Unit = once {
@@ -566,16 +348,8 @@ trait FutureCombinators extends TestBase {
val g = future { throw cause }
val h = f fallbackTo g
- h onSuccess {
- case _ =>
- done()
- assert(false)
- }
- h onFailure {
- case e: Exception =>
- done()
- assert(e == cause)
- }
+ h onSuccess { case _ => done(false) }
+ h onFailure { case e => done(e eq cause) }
}
testMapSuccess()
@@ -597,6 +371,8 @@ trait FutureCombinators extends TestBase {
testZipFailureRight()
testFallbackTo()
testFallbackToFailure()
+ testTransformSuccess()
+ testTransformSuccessPF()
}
@@ -606,40 +382,26 @@ trait FutureProjections extends TestBase {
def testFailedFailureOnComplete(): Unit = once {
done =>
val cause = new RuntimeException
- val f = future {
- throw cause
- }
+ val f = future { throw cause }
f.failed onComplete {
- case Success(t) =>
- assert(t == cause)
- done()
- case Failure(t) =>
- assert(false)
+ case Success(t) => done(t == cause)
+ case Failure(t) => done(false)
}
}
def testFailedFailureOnSuccess(): Unit = once {
done =>
val cause = new RuntimeException
- val f = future {
- throw cause
- }
- f.failed onSuccess {
- case t =>
- assert(t == cause)
- done()
- }
+ val f = future { throw cause }
+ f.failed onSuccess { case t => done(t == cause) }
}
def testFailedSuccessOnComplete(): Unit = once {
done =>
val f = future { 0 }
f.failed onComplete {
- case Success(t) =>
- assert(false)
- case Failure(t) =>
- assert(t.isInstanceOf[NoSuchElementException])
- done()
+ case Failure(_: NoSuchElementException) => done(true)
+ case _ => done(false)
}
}
@@ -647,19 +409,17 @@ trait FutureProjections extends TestBase {
done =>
val f = future { 0 }
f.failed onFailure {
- case nsee: NoSuchElementException =>
- done()
+ case e: NoSuchElementException => done(true)
+ case _ => done(false)
}
+ f.failed onSuccess { case _ => done(false) }
}
def testFailedFailureAwait(): Unit = once {
done =>
val cause = new RuntimeException
- val f = future {
- throw cause
- }
- assert(Await.result(f.failed, Duration(500, "ms")) == cause)
- done()
+ val f = future { throw cause }
+ done(Await.result(f.failed, Duration(500, "ms")) == cause)
}
def testFailedSuccessAwait(): Unit = once {
@@ -667,9 +427,10 @@ trait FutureProjections extends TestBase {
val f = future { 0 }
try {
Await.result(f.failed, Duration(500, "ms"))
- assert(false)
+ done(false)
} catch {
- case nsee: NoSuchElementException => done()
+ case nsee: NoSuchElementException => done(true)
+ case _: Throwable => done(false)
}
}
@@ -682,8 +443,8 @@ trait FutureProjections extends TestBase {
Await.ready(f, Duration.Zero)
Await.ready(f, Duration(500, "ms"))
Await.ready(f, Duration.Inf)
- done()
- } onFailure { case x => throw x }
+ done(true)
+ } onFailure { case x => done(throw x) }
}
def testAwaitNegativeDuration(): Unit = once { done =>
@@ -692,8 +453,8 @@ trait FutureProjections extends TestBase {
intercept[TimeoutException] { Await.ready(f, Duration.Zero) }
intercept[TimeoutException] { Await.ready(f, Duration.MinusInf) }
intercept[TimeoutException] { Await.ready(f, Duration(-500, "ms")) }
- done()
- } onFailure { case x => throw x }
+ done(true)
+ } onFailure { case x => done(throw x) }
}
testFailedFailureOnComplete()
@@ -704,7 +465,6 @@ trait FutureProjections extends TestBase {
testFailedSuccessAwait()
testAwaitPositiveDuration()
testAwaitNegativeDuration()
-
}
@@ -714,33 +474,25 @@ trait Blocking extends TestBase {
def testAwaitSuccess(): Unit = once {
done =>
val f = future { 0 }
- Await.result(f, Duration(500, "ms"))
- done()
+ done(Await.result(f, Duration(500, "ms")) == 0)
}
def testAwaitFailure(): Unit = once {
done =>
val cause = new RuntimeException
- val f = future {
- throw cause
- }
+ val f = future { throw cause }
try {
Await.result(f, Duration(500, "ms"))
- assert(false)
+ done(false)
} catch {
- case t: Throwable =>
- assert(t == cause)
- done()
+ case t: Throwable => done(t == cause)
}
}
def testFQCNForAwaitAPI(): Unit = once {
done =>
-
- assert(classOf[CanAwait].getName == "scala.concurrent.CanAwait")
- assert(Await.getClass.getName == "scala.concurrent.Await")
-
- done()
+ done(classOf[CanAwait].getName == "scala.concurrent.CanAwait" &&
+ Await.getClass.getName == "scala.concurrent.Await")
}
testAwaitSuccess()
@@ -813,22 +565,26 @@ trait Promises extends TestBase {
val p = promise[Int]()
val f = p.future
- f onSuccess {
- case x =>
- done()
- assert(x == 5)
- }
- f onFailure {
- case any =>
- done()
- assert(false)
- }
+ f onSuccess { case x => done(x == 5) }
+ f onFailure { case any => done(false) }
p.success(5)
}
- testSuccess()
+ def testFailure(): Unit = once {
+ done =>
+ val e = new Exception("expected")
+ val p = promise[Int]()
+ val f = p.future
+
+ f onSuccess { case x => done(false) }
+ f onFailure { case any => done(any eq e) }
+
+ p.failure(e)
+ }
+ testSuccess()
+ testFailure()
}
@@ -888,11 +644,11 @@ trait CustomExecutionContext extends TestBase {
val count = countExecs { implicit ec =>
blocking {
once { done =>
- val f = future({ assertNoEC() })(defaultEC)
+ val f = future(assertNoEC())(defaultEC)
f onSuccess {
case _ =>
assertEC()
- done()
+ done(true)
}
assertNoEC()
}
@@ -911,7 +667,7 @@ trait CustomExecutionContext extends TestBase {
f onSuccess {
case _ =>
assertEC()
- done()
+ done(true)
}
}
}
@@ -935,15 +691,10 @@ trait CustomExecutionContext extends TestBase {
Promise.successful(x + 1).future.map(addOne).map(addOne)
} onComplete {
case Failure(t) =>
- try {
- throw new AssertionError("error in test: " + t.getMessage, t)
- } finally {
- done()
- }
+ done(throw new AssertionError("error in test: " + t.getMessage, t))
case Success(x) =>
assertEC()
- assert(x == 14)
- done()
+ done(x == 14)
}
assertNoEC()
}
@@ -999,21 +750,14 @@ trait ExecutionContextPrepare extends TestBase {
done =>
theLocal.set("secret")
val fut = future { 42 }
- fut onComplete {
- case _ =>
- assert(theLocal.get == "secret")
- done()
- }
+ fut onComplete { case _ => done(theLocal.get == "secret") }
}
def testMap(): Unit = once {
done =>
theLocal.set("secret2")
val fut = future { 42 }
- fut map { x =>
- assert(theLocal.get == "secret2")
- done()
- }
+ fut map { x => done(theLocal.get == "secret2") }
}
testOnComplete()
diff --git a/test/files/lib/.gitignore b/test/files/lib/.gitignore
new file mode 100644
index 0000000000..b4ac0b8789
--- /dev/null
+++ b/test/files/lib/.gitignore
@@ -0,0 +1,8 @@
+annotations.jar
+enums.jar
+genericNest.jar
+javac-artifacts.jar
+jsoup-1.3.1.jar
+methvsfield.jar
+nest.jar
+scalacheck.jar
diff --git a/test/files/neg/case-collision.flags b/test/files/neg/case-collision.flags
index 85d8eb2ba2..14c1069dee 100644
--- a/test/files/neg/case-collision.flags
+++ b/test/files/neg/case-collision.flags
@@ -1 +1 @@
--Xfatal-warnings
+-Ybackend:GenASM -Xfatal-warnings
diff --git a/test/files/neg/case-collision2.check b/test/files/neg/case-collision2.check
new file mode 100644
index 0000000000..b8481f46bb
--- /dev/null
+++ b/test/files/neg/case-collision2.check
@@ -0,0 +1,12 @@
+case-collision2.scala:5: warning: Class foo.BIPPY differs only in case from foo.Bippy. Such classes will overwrite one another on case-insensitive filesystems.
+class BIPPY
+ ^
+case-collision2.scala:8: warning: Class foo.DINGO$ differs only in case from foo.Dingo$. Such classes will overwrite one another on case-insensitive filesystems.
+object DINGO
+ ^
+case-collision2.scala:11: warning: Class foo.HyRaX$ differs only in case from foo.Hyrax$. Such classes will overwrite one another on case-insensitive filesystems.
+object HyRaX
+ ^
+error: No warnings can be incurred under -Xfatal-warnings.
+three warnings found
+one error found
diff --git a/test/files/neg/case-collision2.flags b/test/files/neg/case-collision2.flags
new file mode 100644
index 0000000000..5bfa9da5c5
--- /dev/null
+++ b/test/files/neg/case-collision2.flags
@@ -0,0 +1 @@
+-Ynooptimize -Ybackend:GenBCode -Xfatal-warnings
diff --git a/test/files/neg/case-collision2.scala b/test/files/neg/case-collision2.scala
new file mode 100644
index 0000000000..924e33005a
--- /dev/null
+++ b/test/files/neg/case-collision2.scala
@@ -0,0 +1,12 @@
+package foo
+
+class Bippy
+
+class BIPPY
+
+object Dingo
+object DINGO
+
+case class Hyrax()
+object HyRaX
+
diff --git a/test/files/neg/macro-quasiquotes.check b/test/files/neg/macro-quasiquotes.check
new file mode 100644
index 0000000000..a2d48723b5
--- /dev/null
+++ b/test/files/neg/macro-quasiquotes.check
@@ -0,0 +1,7 @@
+Macros_1.scala:14: error: macro implementation has wrong shape:
+ required: (x: Impls.this.c.Expr[Int]): Impls.this.c.Expr[Any]
+ found : (x: Impls.this.c.universe.Block): Impls.this.c.universe.Apply
+type mismatch for parameter x: Impls.this.c.Expr[Int] does not conform to Impls.this.c.universe.Block
+ def m3(x: Int) = macro Impls.impl3
+ ^
+one error found
diff --git a/test/files/neg/macro-quasiquotes/Macros_1.scala b/test/files/neg/macro-quasiquotes/Macros_1.scala
new file mode 100644
index 0000000000..17c1034720
--- /dev/null
+++ b/test/files/neg/macro-quasiquotes/Macros_1.scala
@@ -0,0 +1,15 @@
+import language.experimental.macros
+import scala.reflect.macros.Macro
+
+trait Impls extends Macro {
+ import c.universe._
+ def impl1(x: Expr[Int]) = q"println(x)"
+ def impl2(x: Tree) = q"println(x)"
+ def impl3(x: Block) = q"println(x)"
+}
+
+object Macros {
+ def m1(x: Int) = macro Impls.impl1
+ def m2(x: Int) = macro Impls.impl2
+ def m3(x: Int) = macro Impls.impl3
+} \ No newline at end of file
diff --git a/test/files/neg/macro-quasiquotes/Test_2.scala b/test/files/neg/macro-quasiquotes/Test_2.scala
new file mode 100644
index 0000000000..c7b8948d79
--- /dev/null
+++ b/test/files/neg/macro-quasiquotes/Test_2.scala
@@ -0,0 +1,5 @@
+object Test extends App {
+ Macros.m1
+ Macros.m2
+ Macros.m3
+}
diff --git a/test/files/neg/names-defaults-neg.check b/test/files/neg/names-defaults-neg.check
index f6bd703e1f..cdc12c2490 100644
--- a/test/files/neg/names-defaults-neg.check
+++ b/test/files/neg/names-defaults-neg.check
@@ -125,9 +125,12 @@ names-defaults-neg.scala:134: error: missing parameter type for expanded functio
names-defaults-neg.scala:135: error: parameter 'a' is already specified at parameter position 1
val taf3 = testAnnFun(b = _: String, a = get(8))
^
-names-defaults-neg.scala:136: error: wrong number of parameters; expected = 2
+names-defaults-neg.scala:136: error: missing parameter type for expanded function ((x$3) => testAnnFun(x$3, ((x$4) => b = x$4)))
val taf4: (Int, String) => Unit = testAnnFun(_, b = _)
- ^
+ ^
+names-defaults-neg.scala:136: error: missing parameter type for expanded function ((x$4) => b = x$4)
+ val taf4: (Int, String) => Unit = testAnnFun(_, b = _)
+ ^
names-defaults-neg.scala:144: error: variable definition needs type because 'x' is used as a named argument in its body.
def t3 { var x = t.f(x = 1) }
^
@@ -165,4 +168,4 @@ names-defaults-neg.scala:180: error: reference to x is ambiguous; it is both a m
class u18 { var x: Int = u.f(x = 1) }
^
four warnings found
-41 errors found
+42 errors found
diff --git a/test/files/neg/t1181.check b/test/files/neg/t1181.check
index 3724752a85..13b73d5381 100644
--- a/test/files/neg/t1181.check
+++ b/test/files/neg/t1181.check
@@ -1,8 +1,10 @@
t1181.scala:8: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses
case (Nil, Nil) => map
^
-t1181.scala:9: error: missing parameter type
+t1181.scala:9: error: type mismatch;
+ found : scala.collection.immutable.Map[Symbol,Symbol]
+ required: Symbol
_ => buildMap(map.updated(keyList.head, valueList.head), keyList.tail, valueList.tail)
- ^
+ ^
one warning found
one error found
diff --git a/test/files/neg/t556.check b/test/files/neg/t556.check
index c278e13991..5135dc92ef 100644
--- a/test/files/neg/t556.check
+++ b/test/files/neg/t556.check
@@ -1,4 +1,4 @@
-t556.scala:3: error: wrong number of parameters; expected = 1
+t556.scala:3: error: missing parameter type
def g:Int = f((x,y)=>x)
- ^
+ ^
one error found
diff --git a/test/files/neg/t6574.check b/test/files/neg/t6574.check
new file mode 100644
index 0000000000..c67b4ed804
--- /dev/null
+++ b/test/files/neg/t6574.check
@@ -0,0 +1,7 @@
+t6574.scala:4: error: could not optimize @tailrec annotated method notTailPos$extension: it contains a recursive call not in tail position
+ println("tail")
+ ^
+t6574.scala:8: error: could not optimize @tailrec annotated method differentTypeArgs$extension: it is called recursively with different type arguments
+ {(); new Bad[String, Unit](0)}.differentTypeArgs
+ ^
+two errors found
diff --git a/test/files/neg/t6574.scala b/test/files/neg/t6574.scala
new file mode 100644
index 0000000000..bba97ad62e
--- /dev/null
+++ b/test/files/neg/t6574.scala
@@ -0,0 +1,10 @@
+class Bad[X, Y](val v: Int) extends AnyVal {
+ @annotation.tailrec final def notTailPos[Z](a: Int)(b: String) {
+ this.notTailPos[Z](a)(b)
+ println("tail")
+ }
+
+ @annotation.tailrec final def differentTypeArgs {
+ {(); new Bad[String, Unit](0)}.differentTypeArgs
+ }
+}
diff --git a/test/files/neg/valueclasses-impl-restrictions.check b/test/files/neg/valueclasses-impl-restrictions.check
index 63924493aa..0af9173f74 100644
--- a/test/files/neg/valueclasses-impl-restrictions.check
+++ b/test/files/neg/valueclasses-impl-restrictions.check
@@ -6,12 +6,8 @@ valueclasses-impl-restrictions.scala:9: error: implementation restriction: neste
This restriction is planned to be removed in subsequent releases.
trait I2 {
^
-valueclasses-impl-restrictions.scala:15: error: implementation restriction: nested class is not allowed in value class
-This restriction is planned to be removed in subsequent releases.
- val i2 = new I2 { val q = x.s }
- ^
-valueclasses-impl-restrictions.scala:21: error: implementation restriction: nested class is not allowed in value class
+valueclasses-impl-restrictions.scala:23: error: implementation restriction: nested class is not allowed in value class
This restriction is planned to be removed in subsequent releases.
private[this] class I2(val q: String)
^
-four errors found
+three errors found
diff --git a/test/files/neg/valueclasses-impl-restrictions.scala b/test/files/neg/valueclasses-impl-restrictions.scala
index 137f3f854c..f0577a94aa 100644
--- a/test/files/neg/valueclasses-impl-restrictions.scala
+++ b/test/files/neg/valueclasses-impl-restrictions.scala
@@ -12,8 +12,10 @@ class X1(val s: String) extends AnyVal {
}
def y(x: X1) = {
- val i2 = new I2 { val q = x.s }
+ val i2 = new I2 { val q = x.s } // allowed as of SI-7571
i2.z
+
+ { case x => x } : PartialFunction[Int, Int] // allowed
}
}
diff --git a/test/files/pos/SI-7638.scala b/test/files/pos/SI-7638.scala
new file mode 100644
index 0000000000..da16e0bd2c
--- /dev/null
+++ b/test/files/pos/SI-7638.scala
@@ -0,0 +1,51 @@
+package miniboxing.tests.compile
+
+trait Ordering[@specialized(Int) A] {
+ def eqv(x: Array[A], y: Array[A]): Boolean = false
+}
+
+trait ArrayVectorOrder[@specialized(Int) A] extends Ordering[A] {
+ override def eqv(x: Array[A], y: Array[A]): Boolean = super.eqv(x, y)
+}
+
+object vectorOrder {
+ implicit def arrayOrder[@specialized(Int) A]() =
+ /*
+ * Before applying patch:
+ *
+ * while compiling: SI-7638.scala
+ * during phase: mixin
+ * library version: version 2.10.3-20130625-164027-d22e8d282c
+ * compiler version: version 2.10.3-20130627-153946-54cb6af7db
+ * reconstructed args:
+ *
+ * last tree to typer: TypeTree(class Array)
+ * symbol: class Array in package scala (flags: final)
+ * symbol definition: final class Array[T >: ? <: ?] extends Object
+ * tpe: Array[Int]
+ * symbol owners: class Array -> package scala
+ * context owners: anonymous class anon$1 -> package compile
+ *
+ * == Expanded type of tree ==
+ *
+ * TypeRef(
+ * TypeSymbol(final class Array[T >: ? <: ?] extends Object)
+ * args = List(TypeRef(TypeSymbol(final abstract class Int extends )))
+ * )
+ *
+ * unhandled exception while transforming SI-7638.scala
+ * error: uncaught exception during compilation: java.lang.UnsupportedOperationException
+ * error: java.lang.UnsupportedOperationException: tail of empty list
+ * at scala.collection.immutable.Nil$.tail(List.scala:339)
+ * at scala.collection.immutable.Nil$.tail(List.scala:334)
+ * at scala.tools.nsc.transform.Mixin$$anonfun$scala$tools$nsc$transform$Mixin$$rebindSuper$1.apply(Mixin.scala:123)
+ * at scala.tools.nsc.transform.Mixin$$anonfun$scala$tools$nsc$transform$Mixin$$rebindSuper$1.apply(Mixin.scala:122)
+ * at scala.reflect.internal.SymbolTable.atPhase(SymbolTable.scala:207)
+ * at scala.reflect.internal.SymbolTable.afterPhase(SymbolTable.scala:216)
+ * at scala.tools.nsc.Global.afterPickler(Global.scala:1104)
+ * at scala.tools.nsc.transform.Mixin.scala$tools$nsc$transform$Mixin$$rebindSuper(Mixin.scala:122)
+ * at scala.tools.nsc.transform.Mixin$$anonfun$scala$tools$nsc$transform$Mixin$$mixinTraitMembers$1$1.apply(Mixin.scala:339)
+ * at scala.tools.nsc.transform.Mixin$$anonfun$scala$tools$nsc$transform$Mixin$$mixinTraitMembers$1$1.apply(Mixin.scala:292)
+ */
+ new ArrayVectorOrder[A] { }
+}
diff --git a/test/files/pos/t6221.scala b/test/files/pos/t6221.scala
new file mode 100644
index 0000000000..dd7776f596
--- /dev/null
+++ b/test/files/pos/t6221.scala
@@ -0,0 +1,29 @@
+class MyFunc[-A, +B] extends (A => B) { def apply(x: A): B = ??? }
+
+class MyCollection[A] {
+ def map[B](f: MyFunc[A, B]): MyCollection[B] = new MyCollection[B]
+}
+
+class OtherFunc[-A, +B] {}
+
+object Test {
+ implicit def functionToMyFunc[A, B](f: A => B): MyFunc[A, B] = new MyFunc
+
+ implicit def otherFuncToMyFunc[A, B](f: OtherFunc[A, B]): MyFunc[A, B] = new MyFunc
+
+ def main(args: Array[String]) {
+ val col = new MyCollection[Int]
+
+ // Doesn't compile: error: missing parameter type for expanded function ((x$1) => x$1.toString)
+ println(col.map(_.toString))
+
+ // Doesn't compile: error: missing parameter type
+ println(col.map(x => x.toString))
+
+ // Does compile
+ println(col.map((x: Int) => x.toString))
+
+ // Does compile (even though type params of OtherFunc not given)
+ println(col.map(new OtherFunc))
+ }
+} \ No newline at end of file
diff --git a/test/files/pos/t6574.scala b/test/files/pos/t6574.scala
new file mode 100644
index 0000000000..59c1701eb4
--- /dev/null
+++ b/test/files/pos/t6574.scala
@@ -0,0 +1,19 @@
+class Bad[X, Y](val v: Int) extends AnyVal {
+ def vv = v
+ @annotation.tailrec final def foo[Z](a: Int)(b: String) {
+ this.foo[Z](a)(b)
+ }
+
+ @annotation.tailrec final def differentReceiver {
+ {(); new Bad[X, Y](0)}.differentReceiver
+ }
+
+ @annotation.tailrec final def dependent[Z](a: Int)(b: String): b.type = {
+ this.dependent[Z](a)(b)
+ }
+}
+
+class HK[M[_]](val v: Int) extends AnyVal {
+ def hk[N[_]]: Unit = if (false) hk[M] else ()
+}
+
diff --git a/src/compiler/scala/tools/cmd/Demo.scala b/test/files/pos/t7591/Demo.scala
index fc90140f8f..696d53585b 100644
--- a/src/compiler/scala/tools/cmd/Demo.scala
+++ b/test/files/pos/t7591/Demo.scala
@@ -3,9 +3,7 @@
* @author Paul Phillips
*/
-package scala
-package tools
-package cmd
+import scala.tools.cmd._
/** A sample command specification for illustrative purposes.
* First take advantage of the meta-options:
diff --git a/test/files/run/macro-quasiquotes.check b/test/files/run/macro-quasiquotes.check
new file mode 100644
index 0000000000..94ebaf9001
--- /dev/null
+++ b/test/files/run/macro-quasiquotes.check
@@ -0,0 +1,4 @@
+1
+2
+3
+4
diff --git a/test/files/run/macro-quasiquotes/Macros_1.scala b/test/files/run/macro-quasiquotes/Macros_1.scala
new file mode 100644
index 0000000000..b64eec8743
--- /dev/null
+++ b/test/files/run/macro-quasiquotes/Macros_1.scala
@@ -0,0 +1,15 @@
+import language.experimental.macros
+import scala.reflect.macros.Macro
+
+trait Impls extends Macro {
+ import c.universe._
+ def impl1 = q"println(1)"
+ def impl2 = q"{ println(2); println(3) }"
+ def impl3 = q"4"
+}
+
+object Macros {
+ def m1 = macro Impls.impl1
+ def m2 = macro Impls.impl2
+ def m3 = macro Impls.impl3
+} \ No newline at end of file
diff --git a/test/files/run/macro-quasiquotes/Test_2.scala b/test/files/run/macro-quasiquotes/Test_2.scala
new file mode 100644
index 0000000000..4be193938f
--- /dev/null
+++ b/test/files/run/macro-quasiquotes/Test_2.scala
@@ -0,0 +1,5 @@
+object Test extends App {
+ Macros.m1
+ Macros.m2
+ println(Macros.m3)
+}
diff --git a/test/files/run/t4594-repl-settings.scala b/test/files/run/t4594-repl-settings.scala
new file mode 100644
index 0000000000..d2335460e5
--- /dev/null
+++ b/test/files/run/t4594-repl-settings.scala
@@ -0,0 +1,26 @@
+
+import scala.tools.partest.SessionTest
+
+// Detected repl transcript paste: ctrl-D to finish.
+object Test extends SessionTest {
+ def session =
+""" |Type in expressions to have them evaluated.
+ |Type :help for more information.
+ |
+ |scala> @deprecated(message="Please don't do that.", since="Time began.") def depp = "john"
+ |depp: String
+ |
+ |scala> def a = depp
+ |warning: there were 1 deprecation warning(s); re-run with -deprecation for details
+ |a: String
+ |
+ |scala> :settings +deprecation
+ |
+ |scala> def b = depp
+ |<console>:8: warning: method depp is deprecated: Please don't do that.
+ | def b = depp
+ | ^
+ |b: String
+ |
+ |scala> """
+}
diff --git a/test/files/run/t6221.check b/test/files/run/t6221.check
new file mode 100644
index 0000000000..aa1bdd0e6e
--- /dev/null
+++ b/test/files/run/t6221.check
@@ -0,0 +1 @@
+((x) => x.$percent(2).$eq$eq(0))
diff --git a/test/files/run/t6221/Macros_1.scala b/test/files/run/t6221/Macros_1.scala
new file mode 100644
index 0000000000..c9500626d8
--- /dev/null
+++ b/test/files/run/t6221/Macros_1.scala
@@ -0,0 +1,22 @@
+import language.experimental.macros
+import language.implicitConversions
+import scala.reflect.macros.Context
+import scala.reflect.runtime.universe.Tree
+
+class ReflectiveClosure[A, B](val tree: Tree, fn: A => B) extends (A => B) {
+ def apply(x: A) = fn(x)
+}
+
+object ReflectiveClosure {
+ implicit def reflectClosure[A, B](f: A => B): ReflectiveClosure[A, B] = macro Macros.reflectiveClosureImpl[A, B]
+}
+
+object Macros {
+ def reflectiveClosureImpl[A, B](c: Context)(f: c.Expr[A => B]): c.Expr[ReflectiveClosure[A, B]] = {
+ import c.universe._
+ val u = treeBuild.mkRuntimeUniverseRef
+ val m = EmptyTree
+ val tree = c.Expr[scala.reflect.runtime.universe.Tree](Select(c.reifyTree(u, m, f.tree), newTermName("tree")))
+ c.universe.reify(new ReflectiveClosure(tree.splice, f.splice))
+ }
+}
diff --git a/test/files/run/t6221/Test_2.scala b/test/files/run/t6221/Test_2.scala
new file mode 100644
index 0000000000..9f6b2280a7
--- /dev/null
+++ b/test/files/run/t6221/Test_2.scala
@@ -0,0 +1,10 @@
+object Test extends App {
+ implicit class PimpedList[T](val list: List[T]) {
+ def query(predicate: ReflectiveClosure[T, Boolean]): List[T] = {
+ println(predicate.tree)
+ list filter predicate
+ }
+ }
+
+ List(1, 2, 3).query(x => x % 2 == 0)
+} \ No newline at end of file
diff --git a/test/files/run/t6331.scala b/test/files/run/t6331.scala
index 5ac627a8ea..d9d46f10ea 100644
--- a/test/files/run/t6331.scala
+++ b/test/files/run/t6331.scala
@@ -1,9 +1,4 @@
-import scala.tools.partest._
-import java.io._
-import scala.tools.nsc._
-import scala.tools.cmd.CommandLineParser
-import scala.tools.nsc.{Global, Settings, CompilerCommand}
-import scala.tools.nsc.reporters.ConsoleReporter
+import scala.tools.partest.DirectTest
// Test of Constant#equals, which must must account for floating point intricacies.
object Test extends DirectTest {
diff --git a/test/files/run/t6331b.scala b/test/files/run/t6331b.scala
index c567455c5c..3e09965ee8 100644
--- a/test/files/run/t6331b.scala
+++ b/test/files/run/t6331b.scala
@@ -1,12 +1,5 @@
-import scala.tools.partest._
-import java.io._
-import scala.tools.nsc._
-import scala.tools.cmd.CommandLineParser
-import scala.tools.nsc.{Global, Settings, CompilerCommand}
-import scala.tools.nsc.reporters.ConsoleReporter
-
import scala.tools.partest.trace
-import scala.util.control.Exception._
+import scala.util.control.Exception.allCatch
object Test extends App {
diff --git a/test/files/run/t6574b.check b/test/files/run/t6574b.check
new file mode 100644
index 0000000000..e10fa4f810
--- /dev/null
+++ b/test/files/run/t6574b.check
@@ -0,0 +1 @@
+List(5, 4, 3, 2, 1)
diff --git a/test/files/run/t6574b.scala b/test/files/run/t6574b.scala
new file mode 100644
index 0000000000..df329a31ca
--- /dev/null
+++ b/test/files/run/t6574b.scala
@@ -0,0 +1,7 @@
+object Test extends App {
+ implicit class AnyOps(val i: Int) extends AnyVal {
+ private def parentsOf(x: Int): List[Int] = if (x == 0) Nil else x :: parentsOf(x - 1)
+ def parents: List[Int] = parentsOf(i)
+ }
+ println((5).parents)
+}
diff --git a/test/files/run/t7008-scala-defined.flags b/test/files/run/t7008-scala-defined.flags
new file mode 100644
index 0000000000..49f2d2c4c8
--- /dev/null
+++ b/test/files/run/t7008-scala-defined.flags
@@ -0,0 +1 @@
+-Ybackend:GenASM
diff --git a/test/files/run/t7271.scala b/test/files/run/t7271.scala
index 55c388b7f5..69d5ea377e 100644
--- a/test/files/run/t7271.scala
+++ b/test/files/run/t7271.scala
@@ -1,5 +1,4 @@
import scala.tools.partest._
-import java.io._
import scala.tools.nsc._
import scala.tools.cmd.CommandLineParser
import scala.tools.nsc.{Global, Settings, CompilerCommand}
diff --git a/test/files/run/t7439/Test_2.scala b/test/files/run/t7439/Test_2.scala
index 3ebbcfe753..ce9b907145 100644
--- a/test/files/run/t7439/Test_2.scala
+++ b/test/files/run/t7439/Test_2.scala
@@ -23,7 +23,8 @@ object Test extends StoreReporterDirectTest {
val a1Class = new File(testOutput.path, "A_1.class")
assert(a1Class.exists)
assert(a1Class.delete())
- println(s"Recompiling after deleting $a1Class")
+ // testIdent normalizes to separate names using '/' regardless of platform, drops all but last two parts
+ println(s"Recompiling after deleting ${a1Class.testIdent}")
// bad symbolic reference error expected (but no stack trace!)
compileCode(C)
diff --git a/test/files/run/t7571.scala b/test/files/run/t7571.scala
new file mode 100644
index 0000000000..00b9695168
--- /dev/null
+++ b/test/files/run/t7571.scala
@@ -0,0 +1,12 @@
+class Foo(val a: Int) extends AnyVal {
+ def foo = { {case x => x + a}: PartialFunction[Int, Int]}
+
+ def bar = (new {}).toString
+}
+
+object Test extends App {
+ val x = new Foo(1).foo.apply(2)
+ assert(x == 3, x)
+ val s = new Foo(1).bar
+ assert(s.nonEmpty, s)
+}
diff --git a/test/files/run/t7582-private-within.check b/test/files/run/t7582-private-within.check
new file mode 100644
index 0000000000..b2743ffa06
--- /dev/null
+++ b/test/files/run/t7582-private-within.check
@@ -0,0 +1,12 @@
+private[package pack] class JavaPackagePrivate
+private[package pack] module JavaPackagePrivate
+private[package pack] module class JavaPackagePrivate
+private[package pack] field field
+private[package pack] primary constructor <init>
+private[package pack] method meth
+private[package pack] field staticField
+private[package pack] method staticMeth
+private[package pack] method <clinit>
+private[package pack] field staticField
+private[package pack] method staticMeth
+private[package pack] method <clinit>
diff --git a/test/files/run/t7582-private-within/JavaPackagePrivate.java b/test/files/run/t7582-private-within/JavaPackagePrivate.java
new file mode 100644
index 0000000000..672d19b57e
--- /dev/null
+++ b/test/files/run/t7582-private-within/JavaPackagePrivate.java
@@ -0,0 +1,8 @@
+package pack;
+
+class JavaPackagePrivate {
+ int field = 0;
+ static int staticField = 0;
+ void meth() { }
+ static void staticMeth() { }
+}
diff --git a/test/files/run/t7582-private-within/Test.scala b/test/files/run/t7582-private-within/Test.scala
new file mode 100644
index 0000000000..3d581f063b
--- /dev/null
+++ b/test/files/run/t7582-private-within/Test.scala
@@ -0,0 +1,22 @@
+import scala.tools.partest.DirectTest
+
+// Testing that the `privateWithin` field is correctly populated on all
+// the related symbols (e.g. module class) under separate compilation.
+object Test extends DirectTest {
+ def code = ???
+
+ def show(): Unit = {
+ val classpath = List(sys.props("partest.lib"), testOutput.path) mkString sys.props("path.separator")
+ val global = newCompiler("-usejavacp", "-cp", classpath, "-d", testOutput.path)
+ import global._
+ withRun(global) { _ =>
+ def check(sym: Symbol) = {
+ sym.initialize
+ println(f"${sym.accessString}%12s ${sym.accurateKindString} ${sym.name.decode}") // we want to see private[pack] for all of these.
+ }
+ val sym = rootMirror.getRequiredClass("pack.JavaPackagePrivate")
+ val syms = Seq(sym, sym.companionModule, sym.companionModule.moduleClass)
+ (syms ++ syms.flatMap(_.info.decls)).foreach(check)
+ }
+ }
+}
diff --git a/test/files/run/t7582.check b/test/files/run/t7582.check
new file mode 100644
index 0000000000..225fb1ace8
--- /dev/null
+++ b/test/files/run/t7582.check
@@ -0,0 +1,2 @@
+warning: there were 1 inliner warning(s); re-run with -Yinline-warnings for details
+2
diff --git a/test/files/run/t7582.flags b/test/files/run/t7582.flags
new file mode 100644
index 0000000000..1182725e86
--- /dev/null
+++ b/test/files/run/t7582.flags
@@ -0,0 +1 @@
+-optimize \ No newline at end of file
diff --git a/test/files/run/t7582/InlineHolder.scala b/test/files/run/t7582/InlineHolder.scala
new file mode 100644
index 0000000000..a18b9effaa
--- /dev/null
+++ b/test/files/run/t7582/InlineHolder.scala
@@ -0,0 +1,16 @@
+package p1 {
+ object InlineHolder {
+ @inline def inlinable = p1.PackageProtectedJava.protectedMethod() + 1
+ }
+}
+
+object O {
+ @noinline
+ def x = p1.InlineHolder.inlinable
+}
+
+object Test {
+ def main(args: Array[String]) {
+ println(O.x)
+ }
+}
diff --git a/test/files/run/t7582/PackageProtectedJava.java b/test/files/run/t7582/PackageProtectedJava.java
new file mode 100644
index 0000000000..b7ea2a7676
--- /dev/null
+++ b/test/files/run/t7582/PackageProtectedJava.java
@@ -0,0 +1,6 @@
+package p1;
+
+// public class, protected method
+public class PackageProtectedJava {
+ static final int protectedMethod() { return 1; }
+}
diff --git a/test/files/run/t7582b.check b/test/files/run/t7582b.check
new file mode 100644
index 0000000000..225fb1ace8
--- /dev/null
+++ b/test/files/run/t7582b.check
@@ -0,0 +1,2 @@
+warning: there were 1 inliner warning(s); re-run with -Yinline-warnings for details
+2
diff --git a/test/files/run/t7582b.flags b/test/files/run/t7582b.flags
new file mode 100644
index 0000000000..1182725e86
--- /dev/null
+++ b/test/files/run/t7582b.flags
@@ -0,0 +1 @@
+-optimize \ No newline at end of file
diff --git a/test/files/run/t7582b/InlineHolder.scala b/test/files/run/t7582b/InlineHolder.scala
new file mode 100644
index 0000000000..a18b9effaa
--- /dev/null
+++ b/test/files/run/t7582b/InlineHolder.scala
@@ -0,0 +1,16 @@
+package p1 {
+ object InlineHolder {
+ @inline def inlinable = p1.PackageProtectedJava.protectedMethod() + 1
+ }
+}
+
+object O {
+ @noinline
+ def x = p1.InlineHolder.inlinable
+}
+
+object Test {
+ def main(args: Array[String]) {
+ println(O.x)
+ }
+}
diff --git a/test/files/run/t7582b/PackageProtectedJava.java b/test/files/run/t7582b/PackageProtectedJava.java
new file mode 100644
index 0000000000..55a44b79f9
--- /dev/null
+++ b/test/files/run/t7582b/PackageProtectedJava.java
@@ -0,0 +1,6 @@
+package p1;
+
+// protected class, public method
+class PackageProtectedJava {
+ public static final int protectedMethod() { return 1; }
+}
diff --git a/test/files/scalacheck/quasiquotes/ArbitraryTreesAndNames.scala b/test/files/scalacheck/quasiquotes/ArbitraryTreesAndNames.scala
new file mode 100644
index 0000000000..03f8aa58d3
--- /dev/null
+++ b/test/files/scalacheck/quasiquotes/ArbitraryTreesAndNames.scala
@@ -0,0 +1,307 @@
+import org.scalacheck._
+import Prop._
+import Gen._
+import Arbitrary._
+
+import scala.reflect.api._
+import scala.reflect.runtime.universe._
+import scala.reflect.runtime.universe.Flag._
+
+trait ArbitraryTreesAndNames {
+ def smallList[T](size: Int, g: Gen[T]) = {
+ val n: Int = choose(0, size / 2 + 1).sample match {
+ case Some(i) => i
+ case None => 0
+ }
+ containerOfN[List, T](n, g)
+ }
+
+ def shortIdent(len: Int) =
+ for(name <- identifier)
+ yield if(name.length <= len) name
+ else name.substring(0, len - 1)
+
+ def genTermName = for(name <- shortIdent(8)) yield TermName(name)
+ def genTypeName = for(name <- shortIdent(8)) yield TypeName(name)
+ def genName = oneOf(genTermName, genTypeName)
+
+ def genFlagSet = oneOf(
+ TRAIT, INTERFACE, MUTABLE, MACRO,
+ DEFERRED, ABSTRACT, FINAL, SEALED,
+ IMPLICIT, LAZY, OVERRIDE, PRIVATE,
+ PROTECTED, LOCAL, CASE, ABSOVERRIDE,
+ BYNAMEPARAM, PARAM, COVARIANT, CONTRAVARIANT,
+ DEFAULTPARAM, PRESUPER, DEFAULTINIT
+ )
+
+ def genModifiers = for(flagset <- genFlagSet) yield Modifiers(flagset)
+
+ def genConstant =
+ for(value <- oneOf(arbitrary[Byte], arbitrary[Short], arbitrary[Char],
+ arbitrary[Int], arbitrary[Long], arbitrary[Float],
+ arbitrary[Double], arbitrary[Boolean], arbitrary[String]))
+ yield Constant(value)
+
+ def genAnnotated(size: Int, argGen: Int => Gen[Tree]) =
+ for(annot <- genTree(size - 1); arg <- argGen(size - 1))
+ yield Annotated(annot, arg)
+
+ def genAlternative(size: Int): Gen[Alternative] =
+ for(trees <- smallList(size, genTree(size - 1)))
+ yield Alternative(trees)
+
+ def genAppliedTypeTree(size: Int) =
+ for(tpt <- genTree(size - 1) if tpt.isType;
+ args <- smallList(size, genTree(size - 1)))
+ yield AppliedTypeTree(tpt, args)
+
+ def genApply(size: Int) =
+ for(fun <- genTree(size - 1);
+ args <- smallList(size, genTree(size - 1)))
+ yield Apply(fun, args)
+
+ def genAssign(size: Int) =
+ for(lhs <- genTree(size - 1); rhs <- genTree(size - 1))
+ yield Assign(lhs, rhs)
+
+ def genAssignOrNamedArg(size: Int) =
+ for(lhs <- genTree(size - 1); rhs <- genTree(size - 1))
+ yield AssignOrNamedArg(lhs, rhs)
+
+ def genBind(size: Int, nameGen: Gen[Name]) =
+ for(name <- nameGen; body <- genTree(size - 1))
+ yield Bind(name, body)
+
+ def genBlock(size: Int) =
+ for(stats <- smallList(size, genTree(size - 1)); expr <- genTree(size - 1))
+ yield Block(stats, expr)
+
+ def genCaseDef(size: Int) =
+ for(pat <- genTree(size - 1); guard <- genTree(size - 1); body <- genTree(size - 1))
+ yield CaseDef(pat, guard, body)
+
+ def genClassDef(size: Int) =
+ for(mods <- genModifiers; name <- genTypeName;
+ tparams <- smallList(size, genTypeDef(size - 1));
+ impl <- genTemplate(size - 1))
+ yield ClassDef(mods, name, tparams, impl)
+
+ def genCompoundTypeTree(size: Int) =
+ for(templ <- genTemplate(size - 1))
+ yield CompoundTypeTree(templ)
+
+ def genDefDef(size: Int) =
+ for(mods <- genModifiers; name <- genName;
+ tpt <- genTree(size -1); rhs <- genTree(size - 1);
+ tparams <- smallList(size, genTypeDef(size - 1));
+ vparamss <- smallList(size, smallList(size, genValDef(size - 1))))
+ yield DefDef(mods, name, tparams, vparamss, tpt, rhs)
+
+ def genExistentialTypeTree(size: Int) =
+ for(tpt <- genTree(size - 1); where <- smallList(size, genTree(size - 1)))
+ yield ExistentialTypeTree(tpt, where)
+
+ def genFunction(size: Int) =
+ for(vparams <- smallList(size, genValDef(size - 1)); body <- genTree(size - 1))
+ yield Function(vparams, body)
+
+ def genIdent(nameGen: Gen[Name] = genName) =
+ for(name <- nameGen) yield Ident(name)
+
+ def genIf(size: Int) =
+ for(cond <- genTree(size - 1); thenp <- genTree(size - 1); elsep <- genTree(size - 1))
+ yield If(cond, thenp, elsep)
+
+ def genImport(size: Int) =
+ for(expr <- genTree(size - 1); selectors <- smallList(size, genImportSelector(size - 1)))
+ yield Import(expr, selectors)
+
+ def genImportSelector(size: Int) =
+ for(name <- genName; namePos <- arbitrary[Int]; rename <- genName; renamePos <- arbitrary[Int])
+ yield ImportSelector(name, namePos, rename, renamePos)
+
+ def genTemplate(size: Int) =
+ for(parents <- smallList(size, genTree(size - 1));
+ self <- genValDef(size - 1);
+ body <- smallList(size, genTree(size - 1)))
+ yield Template(parents, self, body)
+
+ def genLabelDef(size: Int) =
+ for(name <- genTermName; params <- smallList(size, genIdent()); rhs <- genTree(size - 1))
+ yield LabelDef(name, params, rhs)
+
+ def genLiteral =
+ for(const <- genConstant) yield Literal(const)
+
+ def genMatch(size: Int) =
+ for(selector <- genTree(size - 1); cases <- smallList(size, genCaseDef(size - 1)))
+ yield Match(selector, cases)
+
+ def genModuleDef(size: Int) =
+ for(mods <- genModifiers; name <- genTermName; impl <- genTemplate(size - 1))
+ yield ModuleDef(mods, name, impl)
+
+ def genNew(size: Int) =
+ for(tpt <- genTree(size - 1))
+ yield New(tpt)
+
+ def genRefTree(size: Int) =
+ oneOf(genSelect(size), genIdent(), genSelectFromTypeTree(size))
+
+ def genPackageDef(size: Int) =
+ for(reftree <- genRefTree(size - 1); stats <- smallList(size, genTree(size - 1)))
+ yield PackageDef(reftree, stats)
+
+ def genTypeSelect(size: Int) =
+ for(qual <- genTree(size - 1); name <- genTypeName)
+ yield Select(qual, name)
+
+ def genSelect(size: Int, nameGen: Gen[Name] = genName) =
+ for(qual <- genTree(size - 1); name <- nameGen)
+ yield Select(qual, name)
+
+ def genSelectFromTypeTree(size: Int) =
+ for(qual <- genTreeIsType(size - 1); name <- genTypeName)
+ yield SelectFromTypeTree(qual, name)
+
+ def genReferenceToBoxed(size: Int) =
+ for(ident <- genIdent())
+ yield ReferenceToBoxed(ident)
+
+ def genReturn(size: Int) =
+ for(expr <- genTree(size - 1))
+ yield Return(expr)
+
+ def genSingletonTypeTree(size: Int) =
+ for(expr <- genTree(size - 1))
+ yield SingletonTypeTree(expr)
+
+ def genStar(size: Int) =
+ for(expr <- genTree(size - 1))
+ yield Star(expr)
+
+ def genSuper(size: Int) =
+ for(qual <- genTree(size - 1); mix <- genTypeName)
+ yield Super(qual, mix)
+
+ def genThis(size: Int) =
+ for(qual <- genTypeName)
+ yield This(qual)
+
+ def genThrow(size: Int) =
+ for(expr <- genTree(size - 1))
+ yield Throw(expr)
+
+ def genTry(size: Int) =
+ for(block <- genTree(size - 1);
+ catches <- smallList(size, genCaseDef(size - 1));
+ finalizer <- genTree(size - 1))
+ yield Try(block, catches, finalizer)
+
+ def genTypeApply(size: Int) =
+ for(fun <- genTreeIsTerm(size - 1); args <- smallList(size, genTree(size - 1)))
+ yield TypeApply(fun, args)
+
+ def genTypeBoundsTree(size: Int) =
+ for(lo <- genTree(size - 1); hi <- genTree(size - 1))
+ yield TypeBoundsTree(lo, hi)
+
+ def genTypeDef(size: Int): Gen[TypeDef] =
+ for(mods <- genModifiers; name <- genTypeName;
+ tparams <- smallList(size, genTypeDef(size - 1)); rhs <- genTree(size - 1))
+ yield TypeDef(mods, name, tparams, rhs)
+
+ def genTypeTree: Gen[TypeTree] = TypeTree()
+
+ def genTyped(size: Int) =
+ for(expr <- genTree(size - 1); tpt <- genTree(size - 1))
+ yield Typed(expr, tpt)
+
+ def genUnApply(size: Int) =
+ for(fun <- genTree(size - 1); args <- smallList(size, genTree(size - 1)))
+ yield UnApply(fun, args)
+
+ def genValDef(size: Int) =
+ for(mods <- genModifiers; name <- genTermName;
+ tpt <- genTree(size - 1); rhs <- genTree(size - 1))
+ yield ValDef(mods, name, tpt, rhs)
+
+ def genTree(size: Int): Gen[Tree] =
+ if (size <= 1) oneOf(EmptyTree, genTreeIsTerm(size), genTreeIsType(size))
+ else oneOf(genTree(1),
+ // these trees are neither terms nor types
+ genPackageDef(size - 1), genModuleDef(size - 1),
+ genCaseDef(size - 1), genDefDef(size - 1),
+ genTypeDef(size - 1), genTemplate(size - 1),
+ genClassDef(size - 1), genValDef(size - 1),
+ genImport(size - 1))
+
+ def genTreeIsTerm(size: Int): Gen[Tree] =
+ if (size <= 1) oneOf(genLiteral, genIdent(genTermName))
+ else oneOf(genTreeIsTerm(1), genBind(size - 1, genTermName),
+ genAnnotated(size - 1, genTreeIsTerm), genSelect(size - 1, genTermName),
+ genAlternative(size - 1), genApply(size - 1), genAssign(size - 1),
+ genAssignOrNamedArg(size - 1), genBlock(size - 1), genFunction(size - 1),
+ genIf(size - 1), genLabelDef(size - 1), genMatch(size - 1), genNew(size - 1),
+ genReturn(size - 1), genStar(size - 1), genSuper(size - 1), genThis(size - 1),
+ genThrow(size - 1), genTry(size - 1), genTypeApply(size - 1),
+ genTyped(size - 1), genUnApply(size - 1))
+
+ def genTreeIsType(size: Int): Gen[Tree] =
+ if (size <= 1) genIdent(genTypeName)
+ else oneOf(genTreeIsType(1), genAnnotated(size - 1, genTreeIsType),
+ genBind(size - 1, genTypeName), genSelect(size - 1, genTypeName),
+ genSingletonTypeTree(size - 1), genSelectFromTypeTree(size - 1),
+ genExistentialTypeTree(size - 1), genCompoundTypeTree(size - 1),
+ genAppliedTypeTree(size - 1), genTypeBoundsTree(size - 1))
+
+ /* These are marker types that allow to write tests that
+ * depend specificly on Trees that are terms or types.
+ * They are transperantly tranformed to trees through
+ * implicit conversions and liftables for quasiquotes.
+ */
+
+ case class TreeIsTerm(tree: Tree) { require(tree.isTerm, showRaw(tree)) }
+ case class TreeIsType(tree: Tree) { require(tree.isType, showRaw(tree)) }
+
+ def genTreeIsTermWrapped(size: Int) =
+ for(tit <- genTreeIsTerm(size)) yield TreeIsTerm(tit)
+
+ def genTreeIsTypeWrapped(size: Int) =
+ for(tit <- genTreeIsType(size)) yield TreeIsType(tit)
+
+ implicit object liftTreeIsTerm extends Liftable[TreeIsTerm] {
+ def apply(universe: Universe, value: TreeIsTerm): universe.Tree =
+ value.tree.asInstanceOf[universe.Tree]
+ }
+ implicit object liftTreeIsType extends Liftable[TreeIsType] {
+ def apply(universe: Universe, value: TreeIsType): universe.Tree =
+ value.tree.asInstanceOf[universe.Tree]
+ }
+ implicit def treeIsTerm2tree(tit: TreeIsTerm) = tit.tree
+ implicit def treeIsType2tree(tit: TreeIsType) = tit.tree
+
+ implicit val arbConstant: Arbitrary[Constant] = Arbitrary(genConstant)
+ implicit val arbModifiers: Arbitrary[Modifiers] = Arbitrary(genModifiers)
+ implicit val arbTermName: Arbitrary[TermName] = Arbitrary(genTermName)
+ implicit val arbTypeName: Arbitrary[TypeName] = Arbitrary(genTypeName)
+ implicit val arbName: Arbitrary[Name] = Arbitrary(genName)
+
+ // Trees generators are bound by this size to make
+ // generation times shorter and less memory hungry.
+ // TODO: is there any better solution?
+ val maxTreeSize = 5
+
+ def arbitrarySized[T](gen: Int => Gen[T]) =
+ Arbitrary(sized(s => gen(s.min(maxTreeSize))))
+
+ implicit val arbLiteral: Arbitrary[Literal] = Arbitrary(genLiteral)
+ implicit val arbIdent: Arbitrary[Ident] = Arbitrary(genIdent())
+ implicit val arbValDef: Arbitrary[ValDef] = arbitrarySized(genValDef)
+ implicit val arbDefDef: Arbitrary[DefDef] = arbitrarySized(genDefDef)
+ implicit val arbTypeDef: Arbitrary[TypeDef] = arbitrarySized(genTypeDef)
+ implicit val arbBind: Arbitrary[Bind] = arbitrarySized(genBind(_, genName))
+ implicit val arbTree: Arbitrary[Tree] = arbitrarySized(genTree)
+ implicit val arbTreeIsTerm: Arbitrary[TreeIsTerm] = arbitrarySized(genTreeIsTermWrapped)
+ implicit val arbTreeIsType: Arbitrary[TreeIsType] = arbitrarySized(genTreeIsTypeWrapped)
+} \ No newline at end of file
diff --git a/test/files/scalacheck/quasiquotes/ErrorProps.scala b/test/files/scalacheck/quasiquotes/ErrorProps.scala
new file mode 100644
index 0000000000..044a332a04
--- /dev/null
+++ b/test/files/scalacheck/quasiquotes/ErrorProps.scala
@@ -0,0 +1,199 @@
+import org.scalacheck._
+import Prop._
+import Gen._
+import Arbitrary._
+
+import scala.reflect.runtime.universe._
+import Flag._
+
+object ErrorProps extends QuasiquoteProperties("errors") {
+ property("can't extract two .. cardinalities in a row") = fails(
+ "Can't extract with .. here",
+ """
+ val xs = List(q"x1", q"x2")
+ val q"f(..$xs1, ..$xs2)" = xs
+ """)
+
+ property("can't splice with given cardinality") = fails(
+ "Can't splice List[reflect.runtime.universe.Ident], consider using ..",
+ """
+ val xs = List(q"x", q"x")
+ q"$xs"
+ """)
+
+ property("splice typename into typedef with default bounds") = fails(
+ "reflect.runtime.universe.Name expected but reflect.runtime.universe.TypeDef found",
+ """
+ val T1 = TypeName("T1")
+ val T2 = q"type T"
+ val t = EmptyTree
+ q"type $T1[$T2 >: _root_.scala.Any <: _root_.scala.Nothing] = $t" ≈
+ TypeDef(Modifiers(), T1, List(T2), t)
+ """)
+
+ property("can't splice annotations with ... cardinality") = fails(
+ "Can't splice with ... here",
+ """
+ val annots = List(List(q"Foo"))
+ q"@...$annots def foo"
+ """)
+
+ property("@..$first @$rest def foo") = fails(
+ "Can't extract with .. here",
+ """
+ val a = annot("a")
+ val b = annot("b")
+ val c = annot("c")
+ val q"@..$first @$rest def foo" = q"@$a @$b @$c def foo"
+ """)
+
+ property("only literal string arguments") = fails(
+ "Quasiquotes can only be used with literal strings",
+ """
+ val s: String = "foo"
+ StringContext(s).q()
+ """)
+
+ property("don't know how to splice inside of strings") = fails(
+ "Don't know how to splice here",
+ """
+ val x: Tree = EmptyTree
+ StringContext("\"", "\"").q(x)
+ """)
+
+ property("expected different cardinality") = fails(
+ "Can't splice List[reflect.runtime.universe.Tree] with ..., consider using ..",
+ """
+ val args: List[Tree] = Nil
+ q"f(...$args)"
+ """)
+
+ property("non-liftable type ..") = fails(
+ "Can't splice List[StringBuilder] with .., consider omitting the dots or providing an implicit instance of Liftable[StringBuilder]",
+ """
+ import java.lang.StringBuilder
+ val bazs = List(new StringBuilder)
+ q"f(..$bazs)"
+ """)
+
+ property("non-liftable type ...") = fails(
+ "Can't splice List[List[StringBuilder]] with .., consider using ... or providing an implicit instance of Liftable[StringBuilder]",
+ """
+ import java.lang.StringBuilder
+ val bazs = List(List(new StringBuilder))
+ q"f(..$bazs)"
+ """)
+
+ property("use .. card or provide liftable") = fails(
+ "Can't splice List[StringBuilder], consider using .. or providing an implicit instance of Liftable[List[StringBuilder]]",
+ """
+ import java.lang.StringBuilder
+ val lst: List[StringBuilder] = Nil
+ q"f($lst)"
+ """)
+
+ property("use ... card or provide liftable") = fails(
+ "Can't splice List[List[reflect.runtime.universe.Ident]], consider using ...",
+ """
+ val xs = List(List(q"x", q"x"))
+ q"$xs"
+ """)
+
+ property("use zero card") = fails(
+ "Can't splice reflect.runtime.universe.Tree with .., consider omitting the dots",
+ """
+ val t = EmptyTree
+ q"f(..$t)"
+ """)
+
+ property("not liftable or natively supported") = fails(
+ "Can't splice StringBuilder, consider providing an implicit instance of Liftable[StringBuilder]",
+ """
+ import java.lang.StringBuilder
+ val sb = new StringBuilder
+ q"f($sb)"
+ """)
+
+ property("casedef expected") = fails(
+ "reflect.runtime.universe.CaseDef expected but reflect.runtime.universe.Tree found",
+ """
+ val t = EmptyTree
+ q"_ { case $t }"
+ """)
+
+ property("can't splice with ... card here") = fails(
+ "Can't splice with ... here",
+ """
+ val lst: List[List[Tree]] = Nil; val t = EmptyTree
+ q"f(...$lst, $t)"
+ """)
+
+ property("name expected") = fails(
+ "reflect.runtime.universe.Name expected but reflect.runtime.universe.Tree found",
+ """
+ val t = EmptyTree
+ q"class $t"
+ """)
+
+ property("flags or mods expected") = fails(
+ "reflect.runtime.universe.FlagSet or reflect.runtime.universe.Modifiers expected but reflect.runtime.universe.Tree found",
+ """
+ val t = EmptyTree
+ q"$t def foo"
+ """)
+
+ property("cant splice flags together with mods") = fails(
+ "Can't splice flags together with modifiers, consider merging flags into modifiers",
+ """
+ val f = Flag.IMPLICIT; val m = NoMods
+ q"$f $m def foo"
+ """)
+
+ property("can't splice mods with annots") = fails(
+ "Can't splice modifiers together with annotations, consider merging annotations into modifiers",
+ """
+ val m = NoMods
+ q"@annot $m def foo"
+ """)
+
+ property("can't splice modifiers with inline flags") = fails(
+ "Can't splice modifiers together with flags, consider merging flags into modifiers",
+ """
+ val m = NoMods
+ q"$m implicit def foo"
+ """)
+
+ property("can't splice multiple mods") = fails(
+ "Can't splice multiple modifiers, consider merging them into a single modifiers instance",
+ """
+ val m1 = NoMods; val m2 = NoMods
+ q"$m1 $m2 def foo"
+ """)
+
+ property("can't extract with .. card here") = fails(
+ "Can't extract with .. here",
+ """
+ val q"f(..$xs, $y)" = EmptyTree
+ """)
+
+ property("can't extract mods with annots") = fails(
+ "Can't extract modifiers together with annotations, consider extracting just modifiers",
+ """
+ val q"@$annot $mods def foo" = EmptyTree
+ """)
+
+ property("can't extract multiple mods") = fails(
+ "Can't extract multiple modifiers together, consider extracting a single modifiers instance",
+ """
+ val q"$m1 $m2 def foo" = EmptyTree
+ """)
+
+ property("can't parse more than one casedef") = fails(
+ "Can't parse more than one casedef, consider generating a match tree instead",
+ """
+ cq"1 => 2 case 3 => 5"
+ """)
+
+ // // Make sure a nice error is reported in this case
+ // { import Flag._; val mods = NoMods; q"lazy $mods val x: Int" }
+} \ No newline at end of file
diff --git a/test/files/scalacheck/quasiquotes/LiftableProps.scala b/test/files/scalacheck/quasiquotes/LiftableProps.scala
new file mode 100644
index 0000000000..510ab99068
--- /dev/null
+++ b/test/files/scalacheck/quasiquotes/LiftableProps.scala
@@ -0,0 +1,84 @@
+import org.scalacheck._
+import Prop._
+import Gen._
+import Arbitrary._
+
+import scala.reflect.runtime.universe._
+import Flag._
+
+object LiftableProps extends QuasiquoteProperties("liftable") {
+ property("splice byte") = test {
+ val c: Byte = 0
+ assert(q"$c" ≈ Literal(Constant(c)))
+ }
+
+ property("splice short") = test {
+ val c: Short = 0
+ assert(q"$c" ≈ Literal(Constant(c)))
+ }
+
+ property("splice char") = test {
+ val c: Char = 'c'
+ assert(q"$c" ≈ Literal(Constant(c)))
+ }
+
+ property("splice int") = test {
+ val c: Int = 0
+ assert(q"$c" ≈ Literal(Constant(c)))
+ }
+
+ property("splice long") = test {
+ val c: Long = 0
+ assert(q"$c" ≈ Literal(Constant(c)))
+ }
+
+ property("splice float") = test {
+ val c: Float = 0.0f
+ assert(q"$c" ≈ Literal(Constant(c)))
+ }
+
+ property("splice double") = test {
+ val c: Double = 0.0
+ assert(q"$c" ≈ Literal(Constant(c)))
+ }
+
+ property("splice boolean") = test {
+ val c: Boolean = false
+ assert(q"$c" ≈ Literal(Constant(c)))
+ }
+
+ property("splice string") = test {
+ val c: String = "s"
+ assert(q"$c" ≈ Literal(Constant(c)))
+ }
+
+ property("splice unit") = test {
+ val c: Unit = ()
+ assert(q"$c" ≈ Literal(Constant(c)))
+ }
+
+ property("lift symbol") = test {
+ val s = rootMirror.staticClass("scala.Int")
+ assert(q"$s" ≈ Ident(s))
+ }
+
+ property("lift type") = test {
+ val tpe = rootMirror.staticClass("scala.Int").toType
+ assert(q"$tpe" ≈ TypeTree(tpe))
+ }
+
+ property("lift type tag") = test {
+ val tag = TypeTag.Int
+ assert(q"$tag" ≈ TypeTree(tag.tpe))
+ }
+
+ property("lift weak type tag") = test {
+ val tag = WeakTypeTag.Int
+ assert(q"$tag" ≈ TypeTree(tag.tpe))
+ }
+
+ property("lift constant") = test {
+ val const = Constant(0)
+ assert(q"$const" ≈ q"0")
+ }
+} \ No newline at end of file
diff --git a/test/files/scalacheck/quasiquotes/PatternConstructionProps.scala b/test/files/scalacheck/quasiquotes/PatternConstructionProps.scala
new file mode 100644
index 0000000000..aee50c9c5f
--- /dev/null
+++ b/test/files/scalacheck/quasiquotes/PatternConstructionProps.scala
@@ -0,0 +1,37 @@
+import org.scalacheck._
+import Prop._
+import Gen._
+import Arbitrary._
+
+import scala.reflect.runtime.universe._
+import Flag._
+
+object PatternConstructionProps extends QuasiquoteProperties("pattern construction") {
+ property("splice bind") = forAll { (bind: Bind) =>
+ pq"$bind" ≈ bind
+ }
+
+ property("splice name into bind") = forAll { (name: TermName) =>
+ pq"$name" ≈ Bind(name, Ident(nme.WILDCARD))
+ }
+
+ property("splice name and tree into bind") = forAll { (name: TermName, tree: Tree) =>
+ pq"$name @ $tree" ≈ Bind(name, tree)
+ }
+
+ property("splice type name into typed") = forAll { (name: TypeName) =>
+ pq"_ : $name" ≈ Typed(Ident(nme.WILDCARD), Ident(name))
+ }
+
+ property("splice tree into typed") = forAll { (typ: Tree) =>
+ pq"_ : $typ" ≈ Typed(Ident(nme.WILDCARD), typ)
+ }
+
+ property("splice into apply") = forAll { (pat: Tree, subpat: Tree) =>
+ pq"$pat($subpat)" ≈ Apply(pat, List(subpat))
+ }
+
+ property("splice into casedef") = forAll { (pat: Tree, cond: Tree, body: Tree) =>
+ cq"$pat if $cond => $body" ≈ CaseDef(pat, cond, Block(List(), body))
+ }
+} \ No newline at end of file
diff --git a/test/files/scalacheck/quasiquotes/PatternDeconstructionProps.scala b/test/files/scalacheck/quasiquotes/PatternDeconstructionProps.scala
new file mode 100644
index 0000000000..f73fd29b22
--- /dev/null
+++ b/test/files/scalacheck/quasiquotes/PatternDeconstructionProps.scala
@@ -0,0 +1,35 @@
+import org.scalacheck._
+import Prop._
+import Gen._
+import Arbitrary._
+
+import scala.reflect.runtime.universe._
+import Flag._
+import definitions._
+
+object PatternDeconstructionProps extends QuasiquoteProperties("pattern deconstruction") {
+ property("extract bind") = forAll { (bind: Bind) =>
+ val pq"$bind0" = pq"$bind"
+ bind0 ≈ bind
+ }
+
+ property("extract bind and subpattern") = forAll { (name: TermName, subp: Tree) =>
+ val pq"$name0 @ $subp0" = pq"$name @ $subp"
+ name0 ≈ name && subp0 ≈ subp
+ }
+
+ property("extract typed") = forAll { (typ: Tree) =>
+ val pq"_ : $typ0" = pq"_ : $typ"
+ typ0 ≈ typ
+ }
+
+ property("extract apply") = forAll { (pat: Tree, subpat: Tree) =>
+ val pq"$pat0($subpat0)" = pq"$pat($subpat)"
+ pat0 ≈ pat && subpat0 ≈ subpat
+ }
+
+ property("extract casedef") = forAll { (pat: Tree, cond: Tree, body: Tree) =>
+ val cq"$pat0 if $cond0 => $body0" = cq"$pat if $cond => $body"
+ pat0 ≈ pat && cond0 ≈ cond && body0 ≈ body
+ }
+} \ No newline at end of file
diff --git a/test/files/scalacheck/quasiquotes/QuasiquoteProperties.scala b/test/files/scalacheck/quasiquotes/QuasiquoteProperties.scala
new file mode 100644
index 0000000000..5e87aa57cc
--- /dev/null
+++ b/test/files/scalacheck/quasiquotes/QuasiquoteProperties.scala
@@ -0,0 +1,89 @@
+import scala.reflect.runtime.universe._
+import scala.tools.reflect.ToolBox
+import scala.tools.reflect.ToolBoxError
+import scala.reflect.macros.TypecheckException
+
+import org.scalacheck._
+import Prop._
+import Gen._
+import Arbitrary._
+
+class QuasiquoteProperties(name: String) extends Properties(name) with ArbitraryTreesAndNames with Helpers
+
+trait Helpers {
+ /** Runs a code block and returns proof confirmation
+ * if no exception has been thrown while executing code
+ * block. This is useful for simple one-off tests.
+ */
+ def test[T](block: => T)=
+ Prop { (params) =>
+ block
+ Result(Prop.Proof)
+ }
+
+ implicit class TestSimilarTree(tree1: Tree) {
+ def ≈(tree2: Tree) = tree1.equalsStructure(tree2)
+ }
+
+ implicit class TestSimilarListTree(lst: List[Tree]) {
+ def ≈(other: List[Tree]) = (lst.length == other.length) && lst.zip(other).forall { case (t1, t2) => t1 ≈ t2 }
+ }
+
+ implicit class TestSimilarListListTree(lst: List[List[Tree]]) {
+ def ≈(other: List[List[Tree]]) = (lst.length == other.length) && lst.zip(other).forall { case (l1, l2) => l1 ≈ l2 }
+ }
+
+ implicit class TestSimilarName(name: Name) {
+ def ≈(other: Name) = name == other
+ }
+
+ implicit class TestSimilarMods(mods: Modifiers) {
+ def ≈(other: Modifiers) = (mods.flags == other.flags) && (mods.privateWithin ≈ other.privateWithin) && (mods.annotations ≈ other.annotations)
+ }
+
+ def assertThrows[T <: AnyRef](f: => Any)(implicit manifest: Manifest[T]): Unit = {
+ val clazz = manifest.erasure.asInstanceOf[Class[T]]
+ val thrown =
+ try {
+ f
+ false
+ } catch {
+ case u: Throwable =>
+ if (!clazz.isAssignableFrom(u.getClass))
+ assert(false, s"wrong exception: $u")
+ true
+ }
+ if(!thrown)
+ assert(false, "exception wasn't thrown")
+ }
+
+ def fails(msg: String, block: String) = {
+ def result(ok: Boolean, description: String = "") = {
+ val status = if (ok) Prop.Proof else Prop.False
+ val labels = if (description != "") Set(description) else Set.empty[String]
+ Prop { new Prop.Result(status, Nil, Set.empty, labels) }
+ }
+ try {
+ val tb = rootMirror.mkToolBox()
+ val tree = tb.parse(s"""
+ object Wrapper extends Helpers {
+ import scala.reflect.runtime.universe._
+ $block
+ }
+ """)
+ tb.compile(tree)
+ result(false, "given code doesn't fail to typecheck")
+ } catch {
+ case ToolBoxError(emsg, _) =>
+ if (!emsg.contains(msg))
+ result(false, s"error message '${emsg}' is not the same as expected '$msg'")
+ else
+ result(true)
+ }
+ }
+
+ def annot(name: String): Tree = annot(TypeName(name), Nil)
+ def annot(name: TypeName): Tree = annot(name, Nil)
+ def annot(name: String, args: List[Tree]): Tree = annot(TypeName(name), args)
+ def annot(name: TypeName, args: List[Tree]): Tree = q"new $name(..$args)"
+} \ No newline at end of file
diff --git a/test/files/scalacheck/quasiquotes/TermConstructionProps.scala b/test/files/scalacheck/quasiquotes/TermConstructionProps.scala
new file mode 100644
index 0000000000..b14945f24b
--- /dev/null
+++ b/test/files/scalacheck/quasiquotes/TermConstructionProps.scala
@@ -0,0 +1,341 @@
+import org.scalacheck._
+import Prop._
+import Gen._
+import Arbitrary._
+
+import scala.reflect.runtime.universe._
+import Flag._
+
+object TermConstructionProps extends QuasiquoteProperties("term construction") {
+ val anyRef = Select(Ident(TermName("scala")), TypeName("AnyRef"))
+ val emtpyConstructor =
+ DefDef(
+ Modifiers(), nme.CONSTRUCTOR, List(),
+ List(List()), TypeTree(), Block(List(Apply(Select(Super(This(tpnme.EMPTY), tpnme.EMPTY), nme.CONSTRUCTOR), List())), Literal(Constant(()))))
+
+ def classWithMethods(name: TypeName, methods: List[DefDef] = Nil) =
+ ClassDef(
+ Modifiers(), name, List(),
+ Template(List(anyRef), emptyValDef, List(emtpyConstructor) ++ methods))
+
+ property("splice single tree return tree itself") = forAll { (t: Tree) =>
+ q"$t" ≈ t
+ }
+
+ property("splice trees into if expression") = forAll { (t1: Tree, t2: Tree, t3: Tree) =>
+ q"if($t1) $t2 else $t3" ≈ If(t1, t2, t3)
+ }
+
+ property("splice term name into val") = forAll { (name: TermName) =>
+ q"val $name = 0" ≈ ValDef(Modifiers(), name, TypeTree(), Literal(Constant(0)))
+ }
+
+ property("splice type name into typedef") = forAll { (name1: TypeName, name2: TypeName) =>
+ q"type $name1 = $name2" ≈ TypeDef(Modifiers(), name1, List(), Ident(name2))
+ }
+
+ property("splice term name into class") = forAll { (name: TypeName) =>
+ q"class $name" ≈ classWithMethods(name)
+ }
+
+ property("splice method into class") = forAll { (name: TypeName, method: DefDef) =>
+ q"class $name { $method }" ≈ classWithMethods(name, List(method))
+ }
+
+ property("splice trees into ascriptiopn") = forAll { (t1: Tree, t2: Tree) =>
+ q"$t1 : $t2" ≈ Typed(t1, t2)
+ }
+
+ property("splice trees into apply") = forAll { (t1: Tree, t2: Tree, t3: Tree) =>
+ q"$t1($t2, $t3)" ≈ Apply(t1, List(t2, t3))
+ }
+
+ property("splice trees with .. cardinality into apply") = forAll { (ts: List[Tree]) =>
+ q"f(..$ts)" ≈ Apply(q"f", ts)
+ }
+
+ property("splice iterable into apply") = forAll { (trees: List[Tree]) =>
+ val itrees: Iterable[Tree] = trees
+ q"f(..$itrees)" ≈ Apply(q"f", trees)
+ }
+
+ property("splice trees with ... cardinality into apply") = forAll { (ts1: List[Tree], ts2: List[Tree]) =>
+ val argss = List(ts1, ts2)
+ q"f(...$argss)" ≈ Apply(Apply(q"f", ts1), ts2)
+ }
+
+ property("splice term name into assign") = forAll { (name: TermName, t: Tree) =>
+ q"$name = $t" ≈ Assign(Ident(name), t)
+ }
+
+ property("splice trees into block") = forAll { (t1: Tree, t2: Tree, t3: Tree) =>
+ q"""{
+ $t1
+ $t2
+ $t3
+ }""" ≈ Block(List(t1, t2), t3)
+ }
+
+ property("splice type name into class parents") = forAll { (name: TypeName, parent: TypeName) =>
+ q"class $name extends $parent" ≈
+ ClassDef(
+ Modifiers(), name, List(),
+ Template(List(Ident(parent)), emptyValDef, List(emtpyConstructor)))
+ }
+
+ property("splice tree into new") = forAll { (tree: Tree) =>
+ q"new $tree" ≈ Apply(Select(New(tree), nme.CONSTRUCTOR), List())
+ }
+
+ property("splice tree into return") = forAll { (tree: Tree) =>
+ q"return $tree" ≈ Return(tree)
+ }
+
+ property("splice a list of arguments") = forAll { (fun: Tree, args: List[Tree]) =>
+ q"$fun(..$args)" ≈ Apply(fun, args)
+ }
+
+ property("splice list and non-list fun arguments") = forAll { (fun: Tree, arg1: Tree, arg2: Tree, args: List[Tree]) =>
+ q"$fun(..$args, $arg1, $arg2)" ≈ Apply(fun, args ++ List(arg1) ++ List(arg2)) &&
+ q"$fun($arg1, ..$args, $arg2)" ≈ Apply(fun, List(arg1) ++ args ++ List(arg2)) &&
+ q"$fun($arg1, $arg2, ..$args)" ≈ Apply(fun, List(arg1) ++ List(arg2) ++ args)
+ }
+
+ property("splice members into class") = forAll { (name: TypeName, defs: List[DefDef], extra: DefDef) =>
+ q"""class $name {
+ ..$defs
+ $extra
+ }""" ≈ classWithMethods(name, defs ++ List(extra))
+ }
+
+ property("splice into new") = forAll { (name: TypeName, body: List[Tree]) =>
+ q"new $name { ..$body }" ≈
+ q"""{
+ final class $$anon extends $name {
+ ..$body
+ }
+ new $$anon
+ }"""
+ }
+
+
+ property("splice tree into singleton type tree") = forAll { (name: TypeName, t: Tree) =>
+ q"type $name = $t.type" ≈ q"type $name = ${SingletonTypeTree(t)}"
+ }
+
+ property("splice type name into this") = forAll { (T: TypeName) =>
+ q"$T.this" ≈ This(T)
+ }
+
+ property("splice tree into throw") = forAll { (t: Tree) =>
+ q"throw $t" ≈ Throw(t)
+ }
+
+ property("splice trees into type apply") = forAll { (fun: TreeIsTerm, types: List[Tree]) =>
+ q"$fun[..$types]" ≈ TypeApply(fun, types)
+ }
+
+ property("splice type names into type bounds") = forAll { (T1: TypeName, T2: TypeName, T3: TypeName) =>
+ q"type $T1 >: $T2 <: $T3" ≈
+ TypeDef(
+ Modifiers(DEFERRED), T1, List(),
+ TypeBoundsTree(Ident(T2), Ident(T3)))
+ }
+
+ property("splice trees names into type bounds") = forAll { (T: TypeName, t1: Tree, t2: Tree) =>
+ q"type $T >: $t1 <: $t2" ≈
+ TypeDef(
+ Modifiers(DEFERRED), T, List(),
+ TypeBoundsTree(t1, t2))
+ }
+
+ property("splice tparams into typedef (1)") = forAll { (T: TypeName, targs: List[TypeDef], t: Tree) =>
+ q"type $T[..$targs] = $t" ≈ TypeDef(Modifiers(), T, targs, t)
+ }
+
+ property("splice tparams into typedef (2)") = forAll { (T: TypeName, targs1: List[TypeDef], targs2: List[TypeDef], t: Tree) =>
+ q"type $T[..$targs1, ..$targs2] = $t" ≈ TypeDef(Modifiers(), T, targs1 ++ targs2, t)
+ }
+
+ property("splice tparams into typedef (3)") = forAll { (T: TypeName, targ: TypeDef, targs: List[TypeDef], t: Tree) =>
+ q"type $T[$targ, ..$targs] = $t" ≈ TypeDef(Modifiers(), T, targ :: targs, t)
+ }
+
+ property("splice typename into typedef with default bounds") = forAll { (T1: TypeName, T2: TypeName, t: Tree) =>
+ q"type $T1[$T2 >: Any <: Nothing] = $t" ≈
+ TypeDef(
+ Modifiers(), T1,
+ List(TypeDef(
+ Modifiers(PARAM), T2,
+ List(),
+ TypeBoundsTree(
+ Ident(TypeName("Any")),
+ Ident(TypeName("Nothing"))))),
+ t)
+ }
+
+ property("splice type names into compound type tree") = forAll { (T: TypeName, A: TypeName, B: TypeName) =>
+ q"type $T = $A with $B" ≈
+ TypeDef(
+ Modifiers(), T, List(),
+ CompoundTypeTree(
+ Template(List(Ident(A), Ident(B)), ValDef(Modifiers(PRIVATE), nme.WILDCARD, TypeTree(), EmptyTree), List())))
+ }
+
+ property("splice trees into existential type tree") = forAll {
+ (T1: TypeName, T2: TypeName, X: TypeName, Lo: TypeName, Hi: TypeName) =>
+
+ q"type $T1 = $T2[$X] forSome { type $X >: $Lo <: $Hi }" ≈
+ TypeDef(
+ Modifiers(), T1, List(),
+ ExistentialTypeTree(
+ AppliedTypeTree(Ident(T2), List(Ident(X))),
+ List(
+ TypeDef(Modifiers(DEFERRED), X, List(), TypeBoundsTree(Ident(Lo), Ident(Hi))))))
+ }
+
+ property("splice names into import selector") = forAll {
+ (expr: Tree, plain: Name, oldname: Name, newname: Name, discard: Name) =>
+
+ val Import(expr1, List(
+ ImportSelector(plain11, _, plain12, _),
+ ImportSelector(oldname1, _, newname1, _),
+ ImportSelector(discard1, _, wildcard, _))) =
+ q"import $expr.{$plain, $oldname => $newname, $discard => _}"
+
+ expr1 ≈ expr && plain11 == plain12 && plain12 == plain &&
+ oldname1 == oldname && newname1 == newname && discard1 == discard && wildcard == nme.WILDCARD
+ }
+
+ property("splice trees into while loop") = forAll { (cond: Tree, body: Tree) =>
+ val LabelDef(_, List(), If(cond1, Block(List(body1), Apply(_, List())), Literal(Constant(())))) = q"while($cond) $body"
+ body1 ≈ body && cond1 ≈ cond
+ }
+
+ property("splice trees into do while loop") = forAll { (cond: Tree, body: Tree) =>
+ val LabelDef(_, List(), Block(List(body1), If(cond1, Apply(_, List()), Literal(Constant(()))))) = q"do $body while($cond)"
+ body1 ≈ body && cond1 ≈ cond
+ }
+
+ property("splice trees into alternative") = forAll { (c: Tree, A: Tree, B: Tree) =>
+ q"$c match { case $A | $B => }" ≈
+ Match(c, List(
+ CaseDef(Alternative(List(A, B)), EmptyTree, Literal(Constant(())))))
+ }
+
+ property("splice into applied type tree") = forAll { (T1: TypeName, T2: TypeName, args: List[Tree]) =>
+ q"type $T1 = $T2[..$args]" ≈
+ TypeDef(
+ Modifiers(), T1, List(),
+ AppliedTypeTree(Ident(T2), args))
+ }
+
+ property("splice list of trees into block (1)") = forAll { (trees: List[Tree]) =>
+ q"{ ..$trees }" ≈ (trees match {
+ case Nil => Block(Nil, q"()")
+ case _ => Block(trees.init, trees.last)
+ })
+ }
+
+ property("splice list of trees into block (2)") = forAll { (trees1: List[Tree], trees2: List[Tree]) =>
+ q"{ ..$trees1 ; ..$trees2 }" ≈ ((trees1 ++ trees2) match {
+ case Nil => Block(Nil, Literal(Constant(())))
+ case trees => Block(trees.init, trees.last)
+ })
+ }
+
+ property("splice list of trees into block (3)") = forAll { (trees: List[Tree], tree: Tree) =>
+ q"{ ..$trees; $tree }" ≈ Block(trees, tree)
+ }
+
+ def assertSameAnnots(tree: {def mods: Modifiers}, annots: List[Tree]) =
+ assert(tree.mods.annotations ≈ annots,
+ s"${tree.mods.annotations} =/= ${annots}")
+
+ def assertSameAnnots(tree1: {def mods: Modifiers}, tree2: {def mods: Modifiers}) =
+ assert(tree1.mods.annotations ≈ tree2.mods.annotations,
+ s"${tree1.mods.annotations} =/= ${tree2.mods.annotations}")
+
+ property("splice type name into annotation") = test {
+ val name = TypeName("annot")
+ assertSameAnnots(q"@$name def foo", List(annot(name)))
+ }
+
+ property("splice ident into annotation") = test {
+ val name = TypeName("annot")
+ val ident = Ident(name)
+ assertSameAnnots(q"@$ident def foo", List(annot(name)))
+ }
+
+ property("splice idents into annotation") = test {
+ val idents = List(Ident(TypeName("annot1")), Ident(TypeName("annot2")))
+ assertSameAnnots(q"@..$idents def foo",
+ idents.map { ident => Apply(Select(New(ident), nme.CONSTRUCTOR), List()) })
+ }
+
+ property("splice constructor calls into annotation") = test {
+ val ctorcalls = List(annot("a1"), annot("a2"))
+ assertSameAnnots(q"@..$ctorcalls def foo", ctorcalls)
+ }
+
+ property("splice multiple annotations (1)") = test {
+ val annot1 = annot("a1")
+ val annot2 = annot("a2")
+ val res = q"@$annot1 @$annot2 def foo"
+ assertSameAnnots(res, List(annot1, annot2))
+ }
+
+ property("splice multiple annotations (2)") = test {
+ val annot1 = annot("a1")
+ val annots = List(annot("a2"), annot("a3"))
+ val res = q"@$annot1 @..$annots def foo"
+ assertSameAnnots(res, annot1 :: annots)
+ }
+
+ property("splice annotations with arguments (1)") = test {
+ val a = annot("a", List(q"x"))
+ assertSameAnnots(q"@$a def foo", q"@a(x) def foo")
+ }
+
+ property("splice annotations with arguments (2)") = test {
+ val a = newTypeName("a")
+ assertSameAnnots(q"@$a(x) def foo", q"@a(x) def foo")
+ }
+
+ property("splice annotations with arguments (3") = test {
+ val a = Ident(newTypeName("a"))
+ assertSameAnnots(q"@$a(x) def foo", q"@a(x) def foo")
+ }
+
+ property("can't splice annotations with arguments specificed twice") = test {
+ val a = annot("a", List(q"x"))
+ assertThrows[IllegalArgumentException] {
+ q"@$a(y) def foo"
+ }
+ }
+
+ property("splice term into brackets") = test {
+ val a = q"a"
+ assert(q"($a)" ≈ a)
+ }
+
+ property("splice terms into tuple") = test {
+ val a1 = q"a1"
+ val a2 = q"a2"
+ val as = List(a1, a2)
+ assert(q"(..$as)" ≈ q"Tuple2($a1, $a2)")
+ assert(q"(a0, ..$as)" ≈ q"Tuple3(a0, $a1, $a2)")
+ }
+
+ property("splice empty list into tuple") = test {
+ val empty = List[Tree]()
+ assert(q"(..$empty)" ≈ q"()")
+ }
+
+ property("splice improper tree into annot") = test {
+ val t = tq"Foo[Baz]"
+ assertThrows[IllegalArgumentException] {
+ q"@$t def foo"
+ }
+ }
+}
diff --git a/test/files/scalacheck/quasiquotes/TermDeconstructionProps.scala b/test/files/scalacheck/quasiquotes/TermDeconstructionProps.scala
new file mode 100644
index 0000000000..114c9f112b
--- /dev/null
+++ b/test/files/scalacheck/quasiquotes/TermDeconstructionProps.scala
@@ -0,0 +1,122 @@
+import org.scalacheck._
+import Prop._
+import Gen._
+import Arbitrary._
+
+import scala.reflect.runtime.universe._
+import Flag._
+
+object TermDeconstructionProps extends QuasiquoteProperties("term deconstruction") {
+
+ property("f(..x) = f") = test {
+ assertThrows[MatchError] {
+ val q"f(..$argss)" = q"f"
+ }
+ }
+
+ property("f(x)") = forAll { (x: Tree) =>
+ val q"f($x1)" = q"f($x)"
+ x1 ≈ x
+ }
+
+ property("f(..xs)") = forAll { (x1: Tree, x2: Tree) =>
+ val q"f(..$xs)" = q"f($x1, $x2)"
+ xs ≈ List(x1, x2)
+ }
+
+ property("f(y, ..ys)") = forAll { (x1: Tree, x2: Tree, x3: Tree) =>
+ val q"f($y, ..$ys)" = q"f($x1, $x2, $x3)"
+ y ≈ x1 && ys ≈ List(x2, x3)
+ }
+
+ property("f(y1, y2, ..ys)") = forAll { (x1: Tree, x2: Tree, x3: Tree) =>
+ val q"f($y1, $y2, ..$ys)" = q"f($x1, $x2, $x3)"
+ y1 ≈ x1 && y2 ≈ x2 && ys ≈ List(x3)
+ }
+
+ property("f(...xss)") = forAll { (x1: Tree, x2: Tree) =>
+ val q"f(...$argss)" = q"f($x1)($x2)"
+ argss ≈ List(List(x1), List(x2))
+ }
+
+ property("f(...xss) = f") = forAll { (x1: Tree, x2: Tree) =>
+ val q"f(...$argss)" = q"f"
+ argss ≈ List()
+ }
+
+ property("@$annot def foo") = forAll { (annotName: TypeName) =>
+ val q"@$annot def foo" = q"@$annotName def foo"
+ annot ≈ Apply(Select(New(Ident(annotName)), nme.CONSTRUCTOR), List())
+ }
+
+ property("@$annot(..$args) def foo") = forAll { (annotName: TypeName, tree: Tree) =>
+ val q"@$annot(..$args) def foo" = q"@$annotName($tree) def foo"
+ annot ≈ Ident(annotName) && args ≈ List(tree)
+ }
+
+ property("@..$annots def foo") = test {
+ val a = annot("a")
+ val b = annot("b")
+ val q"@..$annots def foo" = q"@$a @$b def foo"
+ annots ≈ List(a, b)
+ }
+
+ property("@$annot @..$annots def foo") = test {
+ val a = annot("a")
+ val b = annot("b")
+ val c = annot("c")
+ val q"@$first @..$rest def foo" = q"@$a @$b @$c def foo"
+ first ≈ a && rest ≈ List(b, c)
+ }
+
+ property("class without params") = test {
+ val q"class $name { ..$body }" = q"class Foo { def bar = 3 }"
+ assert(body ≈ List(q"def bar = 3"))
+ }
+
+ property("class constructor") = test {
+ val q"class $name(...$argss)" = q"class Foo(x: Int)(y: Int)"
+ assert(argss.length == 2)
+ }
+
+ property("class parents") = test {
+ val q"class $name extends ..$parents" = q"class Foo extends Bar with Blah"
+ assert(parents ≈ List(tq"Bar", tq"Blah"))
+ }
+
+ property("class selfdef") = test {
+ val q"class $name { $self => }" = q"class Foo { self: T => }"
+ assert(self.name ≈ TermName("self") && self.tpt ≈ tq"T")
+ }
+
+ property("class tparams") = test {
+ val q"class $name[..$tparams]" = q"class Foo[A, B]"
+ assert(tparams.map { _.name } == List(TypeName("A"), TypeName("B")))
+ }
+
+ property("deconstruct unit as tuple") = test {
+ val q"(..$xs)" = q"()"
+ assert(xs.isEmpty)
+ }
+
+ property("deconstruct tuple") = test {
+ val q"(..$xs)" = q"(a, b)"
+ assert(xs ≈ List(q"a", q"b"))
+ }
+
+ property("deconstruct tuple mixed") = test {
+ val q"($first, ..$rest)" = q"(a, b, c)"
+ assert(first ≈ q"a" && rest ≈ List(q"b", q"c"))
+ }
+
+ property("deconstruct cases") = test {
+ val q"$x match { case ..$cases }" = q"x match { case 1 => case 2 => }"
+ x ≈ q"x" && cases ≈ List(cq"1 =>", cq"2 =>")
+ }
+
+ property("deconstruct mods") = test {
+ val mods = Modifiers(IMPLICIT | PRIVATE, TermName("foobar"), Nil)
+ val q"$mods0 def foo" = q"$mods def foo"
+ assert(mods0 ≈ mods)
+ }
+} \ No newline at end of file
diff --git a/test/files/scalacheck/quasiquotes/Test.scala b/test/files/scalacheck/quasiquotes/Test.scala
new file mode 100644
index 0000000000..2387a9b008
--- /dev/null
+++ b/test/files/scalacheck/quasiquotes/Test.scala
@@ -0,0 +1,12 @@
+import org.scalacheck._
+
+object Test extends Properties("quasiquotes") {
+ include(TermConstructionProps)
+ include(TermDeconstructionProps)
+ include(TypeConstructionProps)
+ include(TypeDeconstructionProps)
+ include(PatternConstructionProps)
+ include(PatternDeconstructionProps)
+ include(LiftableProps)
+ include(ErrorProps)
+} \ No newline at end of file
diff --git a/test/files/scalacheck/quasiquotes/TypeConstructionProps.scala b/test/files/scalacheck/quasiquotes/TypeConstructionProps.scala
new file mode 100644
index 0000000000..535ed8ecbf
--- /dev/null
+++ b/test/files/scalacheck/quasiquotes/TypeConstructionProps.scala
@@ -0,0 +1,25 @@
+import org.scalacheck._
+import Prop._
+import Gen._
+import Arbitrary._
+
+import scala.reflect.runtime.universe._
+import Flag._
+
+object TypeConstructionProps extends QuasiquoteProperties("type construction") {
+ property("bare idents contain type names") = test {
+ tq"x" ≈ Ident(TypeName("x"))
+ }
+
+ property("splice type names into AppliedTypeTree") = forAll { (name1: TypeName, name2: TypeName) =>
+ tq"$name1[$name2]" ≈ AppliedTypeTree(Ident(name1), List(Ident(name2)))
+ }
+
+ property("tuple type") = test {
+ val empty = List[Tree]()
+ val ts = List(tq"t1", tq"t2")
+ assert(tq"(..$empty)" ≈ tq"scala.Unit")
+ assert(tq"(..$ts)" ≈ tq"Tuple2[t1, t2]")
+ assert(tq"(t0, ..$ts)" ≈ tq"Tuple3[t0, t1, t2]")
+ }
+} \ No newline at end of file
diff --git a/test/files/scalacheck/quasiquotes/TypeDeconstructionProps.scala b/test/files/scalacheck/quasiquotes/TypeDeconstructionProps.scala
new file mode 100644
index 0000000000..6ab699d4f0
--- /dev/null
+++ b/test/files/scalacheck/quasiquotes/TypeDeconstructionProps.scala
@@ -0,0 +1,29 @@
+import org.scalacheck._
+import Prop._
+import Gen._
+import Arbitrary._
+
+import scala.reflect.runtime.universe._
+import Flag._
+
+object TypeDeconstructionProps extends QuasiquoteProperties("type deconstruction") {
+ property("ident(type name)") = forAll { (name: TypeName) =>
+ val t = Ident(name)
+ val tq"$t1" = t
+ t1 ≈ t
+ }
+
+ property("applied type tree") = forAll { (name1: TypeName, name2: TypeName) =>
+ val tq"$a[$b]" = AppliedTypeTree(Ident(name1), List(Ident(name2)))
+ a ≈ Ident(name1) && b ≈ Ident(name2)
+ }
+
+ property("tuple type") = test {
+ val tq"(..$empty)" = tq"scala.Unit"
+ assert(empty.isEmpty)
+ val tq"(..$ts)" = tq"(t1, t2)"
+ assert(ts ≈ List(tq"t1", tq"t2"))
+ val tq"($head, ..$tail)" = tq"(t0, t1, t2)"
+ assert(head ≈ tq"t0" && tail ≈ List(tq"t1", tq"t2"))
+ }
+} \ No newline at end of file
diff --git a/test/files/specialized/SI-7343.scala b/test/files/specialized/SI-7343.scala
new file mode 100644
index 0000000000..5ee683064c
--- /dev/null
+++ b/test/files/specialized/SI-7343.scala
@@ -0,0 +1,55 @@
+class Parent[@specialized(Int) T]
+
+object Test extends App {
+
+ /**
+ * This method will check if specialization is correctly rewiring parents
+ * for classes defined inside methods. The pattern is important since this
+ * is how closures are currently represented: as locally-defined anonymous
+ * classes, which usually end up inside methods. For these closures we do
+ * want their parents rewired correctly:
+ *
+ * ```
+ * def checkSuperClass$mIc$sp[T](t: T, ...) = {
+ * class X extends Parent$mcI$sp // instead of just Parent
+ * ...
+ * }
+ */
+ def checkSuperClass[@specialized(Int) T](t: T, expectedXSuper: String) = {
+ // test target:
+ // - in checkSuperClass, X should extend Parent
+ // - in checkSuperClass$mIc$sp, X should extend Parent$mcI$sp
+ class X extends Parent[T]()
+
+ // get the superclass for X and make sure it's correct
+ val actualXSuper = (new X).getClass().getSuperclass().getSimpleName()
+ assert(actualXSuper == expectedXSuper, actualXSuper + " != " + expectedXSuper)
+ }
+
+ checkSuperClass("x", "Parent")
+ checkSuperClass(101, "Parent$mcI$sp")
+
+ /**
+ * This is the same check, but in value. It should work exactly the same
+ * as its method counterpart.
+ */
+ class Val[@specialized(Int) T](t: T, expectedXSuper: String) {
+ val check: T = {
+ class X extends Parent[T]()
+
+ // get the superclass for X and make sure it's correct
+ val actualXSuper = (new X).getClass().getSuperclass().getSimpleName()
+ assert(actualXSuper == expectedXSuper, actualXSuper + " != " + expectedXSuper)
+ t
+ }
+ }
+
+ new Val("x", "Parent")
+ new Val(101, "Parent$mcI$sp")
+
+ /**
+ * NOTE: The the same check, only modified to affect constructors, won't
+ * work since the class X definition will always be lifted to become a
+ * member of the class, making it impossible to force its duplication.
+ */
+}
diff --git a/test/files/specialized/spec-ame.check b/test/files/specialized/spec-ame.check
index 9c1713cc8a..cf18c01191 100644
--- a/test/files/specialized/spec-ame.check
+++ b/test/files/specialized/spec-ame.check
@@ -1,3 +1,3 @@
abc
10
-3 \ No newline at end of file
+2
diff --git a/test/files/specialized/spec-ame.scala b/test/files/specialized/spec-ame.scala
index 79ee4217ed..129fb9f447 100644
--- a/test/files/specialized/spec-ame.scala
+++ b/test/files/specialized/spec-ame.scala
@@ -13,6 +13,9 @@ object Test {
def main(args: Array[String]) {
println((new A("abc")).foo.value)
println((new A(10)).foo.value)
+ // before fixing SI-7343, this was printing 3. Now it's printing 2,
+ // since the anonymous class created by doing new B[T] { ... } when
+ // T = Int is now rewired to B$mcI$sp instead of just B[Int]
println(runtime.BoxesRunTime.integerBoxCount)
}
}
diff --git a/test/files/speclib/.gitignore b/test/files/speclib/.gitignore
new file mode 100644
index 0000000000..2b26f5dfc5
--- /dev/null
+++ b/test/files/speclib/.gitignore
@@ -0,0 +1 @@
+instrumented.jar
diff --git a/test/junit/scala/collection/TraversableOnceTest.scala b/test/junit/scala/collection/TraversableOnceTest.scala
new file mode 100644
index 0000000000..56d8312336
--- /dev/null
+++ b/test/junit/scala/collection/TraversableOnceTest.scala
@@ -0,0 +1,70 @@
+package scala.collection
+
+import org.junit.Assert._
+import org.junit.Test
+import org.junit.runner.RunWith
+import org.junit.runners.JUnit4
+import scala.util.Random
+
+@RunWith(classOf[JUnit4])
+/* Test for SI-7614 */
+class TraversableOnceTest {
+ val list = List.fill(1000)(scala.util.Random.nextInt(10000) - 5000)
+
+ // Basic emptiness check
+ @Test
+ def checkEmpty {
+ def hasException(code: => Any): Boolean = try {
+ code
+ false
+ } catch {
+ case u: UnsupportedOperationException => true
+ case t: Throwable => false
+ }
+ assert(hasException({ List[Int]().maxBy(_ * 3) }), "maxBy: on empty list should throw UnsupportedOperationException.")
+ assert(hasException({ List[Int]().minBy(_ * 3) }), "minBy: on empty list should throw UnsupportedOperationException.")
+ }
+
+ // Basic definition of minBy/maxBy.
+ @Test
+ def testCorrectness() = {
+ def f(x: Int) = -1 * x
+ val max = list.maxBy(f)
+ assert(list.forall(f(_) <= f(max)), "f(list.maxBy(f)) should ≥ f(x) where x is any element of list.")
+
+ val min = list.minBy(f)
+ assert(list.forall(f(_) >= f(min)), "f(list.minBy(f)) should ≤ f(x) where x is any element of list.")
+ }
+
+ // Ensure that it always returns the first match if more than one element have the same largest/smallest f(x).
+ // Note that this behavior is not explicitly stated before.
+ // To make it compatible with the previous implementation, I add this behavior to docs.
+ @Test
+ def testReturnTheFirstMatch() = {
+ val d = List(1, 2, 3, 4, 5, 6, 7, 8)
+ def f(x: Int) = x % 3;
+ assert(d.maxBy(f) == 2, "If multiple elements evaluted to the largest value, maxBy should return the first one.")
+ assert(d.minBy(f) == 3, "If multiple elements evaluted to the largest value, minBy should return the first one.")
+ }
+
+ // Make sure it evaluates f no more than list.length times.
+ @Test
+ def testOnlyEvaluateOnce() = {
+ var evaluatedCountOfMaxBy = 0
+
+ val max = list.maxBy(x => {
+ evaluatedCountOfMaxBy += 1
+ x * 10
+ })
+ assert(evaluatedCountOfMaxBy == list.length, s"maxBy: should evaluate f only ${list.length} times, but it evaluted $evaluatedCountOfMaxBy times.")
+
+ var evaluatedCountOfMinBy = 0
+
+ val min = list.minBy(x => {
+ evaluatedCountOfMinBy += 1
+ x * 10
+ })
+ assert(evaluatedCountOfMinBy == list.length, s"minBy: should evaluate f only ${list.length} times, but it evaluted $evaluatedCountOfMinBy times.")
+ }
+
+}
diff --git a/test/partest b/test/partest
index 99a731a49b..0259cdb791 100755
--- a/test/partest
+++ b/test/partest
@@ -64,7 +64,7 @@ if [ -z "$EXT_CLASSPATH" ] ; then
fi
done
elif [ -f "$SCALA_HOME/build/pack/lib/scala-partest.jar" ] ; then
- for lib in `echo "scala-partest scala-library scala-reflect scala-compiler diffutils"`; do
+ for lib in `echo "scala-partest scala-library scala-parser-combinators scala-xml scala-reflect scala-compiler diffutils"`; do
ext="$SCALA_HOME/build/pack/lib/$lib.jar"
if [ -z "$EXT_CLASSPATH" ] ; then
EXT_CLASSPATH="$ext"
diff --git a/tools/.gitignore b/tools/.gitignore
new file mode 100644
index 0000000000..57701c8353
--- /dev/null
+++ b/tools/.gitignore
@@ -0,0 +1 @@
+push.jar